Compare commits
6 Commits
refactorin
...
bigRefacto
Author | SHA1 | Date | |
---|---|---|---|
95f48ffcb7 | |||
163f0f3047 | |||
1cf22d2602 | |||
d8ac25234f | |||
e00d76ce3b | |||
fec83c3a62 |
src
main
java
de
dhbwstuttgart
bytecode
genericsGenerator
insertGenerics
core
parser
scope
syntaxtree
typeinference
test
@ -9,7 +9,6 @@ import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import de.dhbwstuttgart.bytecode.TPHExtractor;
|
||||
import de.dhbwstuttgart.bytecode.insertGenerics.FamilyOfGeneratedGenerics;
|
||||
import de.dhbwstuttgart.bytecode.genericsGeneratorTypes.GenericGenratorResultForSourceFile;
|
||||
import de.dhbwstuttgart.bytecode.genericsGeneratorTypes.GenericsGeneratorResultForClass;
|
||||
import de.dhbwstuttgart.bytecode.genericsGeneratorTypes.MethodAndConstraints;
|
||||
@ -75,7 +74,6 @@ public class GeneratedGenericsFinder implements ASTVisitor {
|
||||
private GenericGenratorResultForSourceFile generatedGenericsForSF;//Ergebnis des GGenerics
|
||||
private ResultSet resultSet;
|
||||
private final List<String> methodNameAndParamsT = new ArrayList<>();
|
||||
private FamilyOfGeneratedGenerics fogg;
|
||||
|
||||
private String pkgName;
|
||||
private JavaClassName className;
|
||||
@ -133,10 +131,6 @@ public class GeneratedGenericsFinder implements ASTVisitor {
|
||||
tphExtractor.setResultSet(resultSet);
|
||||
resolver = new Resolver(resultSet);
|
||||
classOrInterface.accept(tphExtractor);
|
||||
//PL 2020-10-16: Ab hier GGenerics implementieren durch Ali
|
||||
//Rueckgabe an generatedGenericsForSF
|
||||
fogg = new FamilyOfGeneratedGenerics(tphExtractor);
|
||||
|
||||
tphsClass = tphExtractor.tphsClass;
|
||||
simplifiedConstraints = GenericsGenerator.simplifyConstraints(tphExtractor, tphsClass);
|
||||
if(!isVisited) {
|
||||
@ -151,8 +145,8 @@ public class GeneratedGenericsFinder implements ASTVisitor {
|
||||
|
||||
if(ggResult != null)
|
||||
generatedGenericsForSF.addGenericGeneratorResultClass(ggResult);
|
||||
System.out.println("ddd");
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,15 +0,0 @@
|
||||
package de.dhbwstuttgart.bytecode.insertGenerics;
|
||||
|
||||
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
|
||||
|
||||
public class ClassConstraint extends TPHConstraint {
|
||||
//private TPHConstraint constraint;
|
||||
|
||||
public ClassConstraint(String left, String right, Relation rel) {
|
||||
super(left, right, rel);
|
||||
}
|
||||
//besser?
|
||||
/*public ClassConstraint(TPHConstraint constraint) {
|
||||
this.constraint = constraint;
|
||||
}*/
|
||||
}
|
@ -1,303 +0,0 @@
|
||||
package de.dhbwstuttgart.bytecode.insertGenerics;
|
||||
|
||||
import de.dhbwstuttgart.bytecode.TPHExtractor;
|
||||
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
|
||||
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint.Relation;
|
||||
import de.dhbwstuttgart.bytecode.genericsGeneratorTypes.GenericsGeneratorResult;
|
||||
import de.dhbwstuttgart.bytecode.utilities.MethodAndTPH;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
public class FamilyOfGeneratedGenerics {
|
||||
public List<TPHConstraint> allConstraints = new ArrayList<>();
|
||||
// HashMap speichert ob TPH in einer Methode oder in der Klasse ist; und wenn es in der Methode ist, in welcher Methode
|
||||
public HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTPHs = new HashMap<>();
|
||||
public List<ClassConstraint> classConstraints = new ArrayList<>();
|
||||
public List<MethodConstraint> methodConstraints = new ArrayList<>();
|
||||
|
||||
public FamilyOfGeneratedGenerics(TPHExtractor tphExtractor) {
|
||||
this.allConstraints = tphExtractor.allCons;
|
||||
this.posOfTPHs = positionConverter(tphExtractor.allTPHS, tphExtractor.ListOfMethodsAndTph);
|
||||
this.classConstraints = getClassConstraints(allConstraints,posOfTPHs);
|
||||
// this.methodConstraints =
|
||||
}
|
||||
|
||||
public static List<ClassConstraint> getClassConstraints(List<TPHConstraint> cs, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) { //Inputparameter List<TPHConstraint> constraintsSet weg
|
||||
List<ClassConstraint> cs_cl = new ArrayList<>();
|
||||
List<ClassConstraint> classConstraints1 = typeOfANodeOfAField(cs, posOfTphs);
|
||||
for (ClassConstraint cons: classConstraints1) {
|
||||
if (!checkForDuplicates(cons, cs_cl)) {
|
||||
cs_cl.add(cons);
|
||||
}
|
||||
}
|
||||
List<ClassConstraint> classConstraints2 = transitiveSubtypeForClassTypes(cs, cs_cl); // in Klammer classConstraints1 oder constraintsSet? beides eher
|
||||
for (ClassConstraint cons: classConstraints2) {
|
||||
if (!checkForDuplicates(cons, cs_cl)) {
|
||||
cs_cl.add(cons);
|
||||
}
|
||||
}
|
||||
List<ClassConstraint> classConstraints3 = hasNoSupertypeForClassTypes(cs, cs_cl, posOfTphs);
|
||||
for (ClassConstraint cons: classConstraints3) {
|
||||
if (!checkForDuplicates(cons, cs_cl)) {
|
||||
cs_cl.add(cons);
|
||||
}
|
||||
}
|
||||
return cs_cl;
|
||||
}
|
||||
|
||||
public static List<MethodConstraint> getMethodConstraints(List<TPHConstraint> cs, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) {
|
||||
//TODO: Regeln
|
||||
List<MethodConstraint> cs_m = new ArrayList<>();
|
||||
List<MethodConstraint> methodConstraints1 = typeOfTheMethodInClSigma(cs, cs_m, posOfTphs);
|
||||
for (MethodConstraint cons: methodConstraints1) {
|
||||
if (!checkForDuplicates(cons, cs_m)) {
|
||||
cs_m.add(cons);
|
||||
}
|
||||
}
|
||||
//List<MethodConstraint> methodConstraints2 = firstTransitiveSubtypeForMethodTypes();
|
||||
return cs_m;
|
||||
}
|
||||
|
||||
/**
|
||||
* Def. FGG: erste Zeile von cs_cl
|
||||
* {T < .T' | T is a type variable in a type of a node of a field}
|
||||
*/
|
||||
public static List<ClassConstraint> typeOfANodeOfAField(List<TPHConstraint> allConstraints, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) {
|
||||
//RuntimeException re = new RuntimeException("enthält EQUALS-Relation");
|
||||
List<ClassConstraint> tempCC= new ArrayList<>();
|
||||
for(TPHConstraint allCons: allConstraints){
|
||||
if(posOfTphs.containsKey(allCons.getLeft()) && allCons.getRight()!=null && allCons.getRel()==Relation.EXTENDS) {
|
||||
for(String tph: posOfTphs.keySet()) {
|
||||
if(tph == allCons.getLeft() && posOfTphs.get(tph).fst == PositionFinder.Position.FIELD) {
|
||||
ClassConstraint consToAdd = new ClassConstraint(tph, allCons.getRight(), allCons.getRel());
|
||||
if (!checkForDuplicates(consToAdd, tempCC)) {
|
||||
tempCC.add(consToAdd);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/*else if (allCons.getRel() != Relation.EXTENDS) {
|
||||
throw re;
|
||||
}*/
|
||||
}
|
||||
return tempCC;
|
||||
}
|
||||
|
||||
/**
|
||||
* Def. FGG: zweite Zeile von cs_cl
|
||||
* {T' <. T'' | \exists T: (T <. T') \in cs_cl, (T' <. T'') \in cs }
|
||||
*/
|
||||
public static List<ClassConstraint> transitiveSubtypeForClassTypes(List<TPHConstraint> allConstraints, List<ClassConstraint> cs_cl) {
|
||||
List<ClassConstraint> tempCC= new ArrayList<>();
|
||||
for(ClassConstraint cCons: cs_cl) {
|
||||
if(cCons.getLeft() != null && cCons.getRel()==Relation.EXTENDS) {
|
||||
for(TPHConstraint allCons: allConstraints) {
|
||||
if(cCons.getRight() == allCons.getLeft() && allCons.getRight() != null && allCons.getRel()==Relation.EXTENDS){
|
||||
ClassConstraint consToAdd = new ClassConstraint(allCons.getLeft(), allCons.getRight(), allCons.getRel());
|
||||
if (!checkForDuplicates(consToAdd, tempCC)) {
|
||||
tempCC.add(consToAdd);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return tempCC;
|
||||
}
|
||||
|
||||
/**
|
||||
* Def. FGG: dritte Zeile von cs_cl
|
||||
* {T <. Object | ((T is a type variable in a type of a node of a field
|
||||
* or (\exists T~: (T~ <. T) \in cs_cl)) and (\existsnot T': T <. T') \in cs)}
|
||||
*/
|
||||
public static List<ClassConstraint> hasNoSupertypeForClassTypes(List<TPHConstraint> allConstraints, List<ClassConstraint> cs_cl, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) {
|
||||
List<ClassConstraint> tempCC= new ArrayList<>();
|
||||
for(TPHConstraint allCons: allConstraints) {
|
||||
for(ClassConstraint cCons: cs_cl) {
|
||||
for(String tph: posOfTphs.keySet()) {
|
||||
boolean tvInField = posOfTphs.get(tph).fst == PositionFinder.Position.FIELD;
|
||||
boolean hasSmallerTVInClCons = (posOfTphs.containsKey(cCons.getRight()) && cCons.getRight() == tph && cCons.getLeft() != null);
|
||||
if( ((tvInField || hasSmallerTVInClCons) && cCons.getRel()==Relation.EXTENDS) &&
|
||||
checkUpperBound(allConstraints, tph) && allCons.getRel()==Relation.EXTENDS) {
|
||||
ClassConstraint consToAdd = new ClassConstraint(tph, "Object", Relation.EXTENDS);
|
||||
if (!checkForDuplicates(consToAdd, tempCC)){
|
||||
tempCC.add(consToAdd);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return tempCC;
|
||||
}
|
||||
|
||||
public static boolean checkUpperBound(List<TPHConstraint> cs, String tph) {
|
||||
for(int i=0; i<cs.size(); i++) {
|
||||
if(cs.get(i).getLeft() == tph) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Def. FGG: erste Zeile von cs_m
|
||||
* {T < .T' | T is a type variable in a type of the method/constructor m in cl_\sigma, (T <. T') \in cs}
|
||||
*/
|
||||
|
||||
public static List<MethodConstraint> typeOfTheMethodInClSigma(List<TPHConstraint> allConstraints, List<MethodConstraint> cs_m, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) { // cl_\sigma??
|
||||
//TODO:
|
||||
List<MethodConstraint> tempCC= new ArrayList<>();
|
||||
for(TPHConstraint allCons: allConstraints){
|
||||
if(posOfTphs.containsKey(allCons.getLeft()) && allCons.getRight()!=null && allCons.getRel()==Relation.EXTENDS) {
|
||||
for(String tph: posOfTphs.keySet()) {
|
||||
if(tph == allCons.getLeft() && (posOfTphs.get(tph).fst == PositionFinder.Position.METHOD || posOfTphs.get(tph).fst == PositionFinder.Position.CONSTRUCTOR)) {
|
||||
MethodConstraint consToAdd = new MethodConstraint(allCons.getLeft(), allCons.getRight(), allCons.getRel());
|
||||
if (!checkForDuplicates(consToAdd, tempCC)) {
|
||||
tempCC.add(consToAdd);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return tempCC;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Def. FGG: zweite Zeile von cs_m
|
||||
* {R' <. S | (R <. R'), (S <. S') \in cs_m and (R',S) is in the transitive closure of cs}
|
||||
*/
|
||||
|
||||
public static List<MethodConstraint> firstTransitiveSubtypeForMethodTypes(List<TPHConstraint> allConstraints, List<MethodConstraint> cs_m, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) { //transitive closure of cs
|
||||
//TODO:
|
||||
List<MethodConstraint> tempCC= new ArrayList<>();
|
||||
List<TPHConstraint> tcOfCs = buildTransitiveClosure(allConstraints);
|
||||
for(MethodConstraint mC1 : cs_m) {
|
||||
for(MethodConstraint mC2 : cs_m) {
|
||||
String rightSide = mC1.getRight();
|
||||
String leftSide = mC2.getLeft();
|
||||
for(TPHConstraint tphC : tcOfCs) {
|
||||
if(tphC.getLeft().equals(leftSide)&&tphC.getRight().equals(rightSide)) {
|
||||
tempCC.add((MethodConstraint) tphC);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return tempCC;
|
||||
}
|
||||
|
||||
/**
|
||||
* Def. FGG: dritte Zeile von cs_m
|
||||
* {R' <. S | (R <. R') \in cs_m, (S <. S') \in cs_cl and (R',S) is in the transitive closure of cs}
|
||||
*/
|
||||
|
||||
public static List<MethodConstraint> secondTransitiveSubtypeForMethodTypes(List<TPHConstraint> allConstraints, List<ClassConstraint> cs_cl, List<MethodConstraint> cs_m, HashMap<String, PairTphMethod<PositionFinder.Position, String>> posOfTphs) {
|
||||
//TODO:
|
||||
List<MethodConstraint> tempCC= new ArrayList<>();
|
||||
List<TPHConstraint> tcOfCs = buildTransitiveClosure(allConstraints);
|
||||
for(ClassConstraint cC : cs_cl) {
|
||||
for(MethodConstraint mC : cs_m) {
|
||||
String rightSide = mC.getRight();
|
||||
String leftSide = cC.getLeft();
|
||||
for(TPHConstraint tphC : tcOfCs) {
|
||||
if(tphC.getLeft().equals(leftSide)&&tphC.getRight().equals(rightSide)) {
|
||||
tempCC.add((MethodConstraint) tphC);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return tempCC;
|
||||
}
|
||||
|
||||
/**
|
||||
* Def. FGG: vierte Zeile von cs_m
|
||||
* {T <. Object | (T is a type variable in a type of a node of the method/constructor m in cl_\sigma),
|
||||
* (\existsnot T': T <. T') \in cs)}
|
||||
*/
|
||||
|
||||
public static List<MethodConstraint> hasNoSupertypeForMethodTypes() {
|
||||
//TODO:
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* nimm die Menge cs_cl aus cs_m raus
|
||||
*/
|
||||
public static List<MethodConstraint> methodTypesWithoutClassTypes() {
|
||||
//TODO:
|
||||
return null;
|
||||
}
|
||||
|
||||
public static boolean checkForDuplicates(TPHConstraint constraint, List list) {
|
||||
List<TPHConstraint> tempList = list;
|
||||
boolean hasSame = false;
|
||||
for (TPHConstraint tphC: tempList) {
|
||||
hasSame = constraint.getLeft() == tphC.getLeft() &&
|
||||
constraint.getRight() == tphC.getRight() &&
|
||||
constraint.getRel() == tphC.getRel(); //constraint already in ArrayList if true
|
||||
if (hasSame)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static List<TPHConstraint> buildTransitiveClosure(List list) {
|
||||
List<TPHConstraint> iterList = new ArrayList<>(list);
|
||||
List<TPHConstraint> runList = new ArrayList<>(list);
|
||||
List<TPHConstraint> tcList = new ArrayList<>(list);
|
||||
boolean addedConToList = false;
|
||||
for (TPHConstraint cons: iterList) {
|
||||
for (TPHConstraint cons2: runList) {
|
||||
if(cons.getRight() == cons2.getLeft()) {
|
||||
TPHConstraint consToAdd = new TPHConstraint(cons.getLeft(), cons2.getRight(), Relation.EXTENDS);
|
||||
if (!checkForDuplicates(consToAdd,tcList)) {
|
||||
tcList.add(consToAdd);
|
||||
addedConToList = true;
|
||||
if (addedConToList) {
|
||||
return buildTransitiveClosure(tcList);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return tcList;
|
||||
}
|
||||
|
||||
|
||||
public static HashMap<String, PairTphMethod<PositionFinder.Position, String>> positionConverter(HashMap<String, Boolean> allTphs, List<MethodAndTPH> listOfMethodsAndTphs) {
|
||||
HashMap<String, PairTphMethod<PositionFinder.Position, String>> convertedPositions = new HashMap<>();
|
||||
for(String tph: allTphs.keySet()) {
|
||||
if(allTphs.get(tph)) { //if true, then tph is a method-TPH
|
||||
for(MethodAndTPH methTph: listOfMethodsAndTphs) {
|
||||
if (methTph.getTphs().contains(tph)) {
|
||||
convertedPositions.put(tph, new PairTphMethod<>(PositionFinder.Position.METHOD, methTph.getId()));
|
||||
}
|
||||
}
|
||||
} else { // else it is in the class-TPH
|
||||
convertedPositions.put(tph, new PairTphMethod<>(PositionFinder.Position.FIELD, null));
|
||||
}
|
||||
}
|
||||
return convertedPositions;
|
||||
}
|
||||
|
||||
/* public PositionFinder.Position positionConverter(TPHExtractor tphExtractor) {
|
||||
if(tphExtractor.allTPHS.keySet())
|
||||
}*/
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* GeneratedGenericsFinder genGenFinder;
|
||||
ConstraintsSimplierResult simplifiedConstraints = null;
|
||||
GenericsGeneratorResultForClass ggResult = null;
|
||||
Method m;
|
||||
|
||||
public void addMethodConstraints(List<MethodConstraint> cs_m) {
|
||||
genGenFinder.addMethodConstraints(simplifiedConstraints, ggResult, m);
|
||||
cs_m.add();
|
||||
}
|
||||
*/
|
||||
|
||||
}
|
@ -1,42 +0,0 @@
|
||||
/*
|
||||
package de.dhbwstuttgart.bytecode.gGenericsAli;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
*/
|
||||
/**
|
||||
* gets set of typed variable constraints with substitutions and the set of typed classes
|
||||
* and returns the set of families of generated generics and the set of families of type variable mappings
|
||||
*//*
|
||||
|
||||
public class GGenerics implements preGGenerics {
|
||||
private TVarConstraints tVarCons;
|
||||
private Substitutions subst;
|
||||
private TClass typedClass;
|
||||
|
||||
|
||||
public GGenerics(TVarConstraints tVarCons, Substitutions subst, TClass typedClass) throws IOException, ClassNotFoundException {
|
||||
this.tVarCons = tVarCons;
|
||||
this.subst = subst;
|
||||
this.typedClass = typedClass;
|
||||
}
|
||||
|
||||
List<File> input = new ArrayList<>();
|
||||
List<File> classpath = new ArrayList<>();
|
||||
|
||||
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath);
|
||||
compiler.typeInference();
|
||||
|
||||
public List<ResultSet> getResultOfTypeInference() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
*/
|
@ -1,9 +0,0 @@
|
||||
package de.dhbwstuttgart.bytecode.insertGenerics;
|
||||
|
||||
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
|
||||
|
||||
public class MethodConstraint extends TPHConstraint {
|
||||
public MethodConstraint(String left, String right, Relation rel) {
|
||||
super(left, right, rel);
|
||||
}
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
package de.dhbwstuttgart.bytecode.insertGenerics;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/** A generic class for pairs.
|
||||
*
|
||||
* <p><b>This is NOT part of any supported API.
|
||||
* If you write code that depends on this, you do so at your own risk.
|
||||
* This code and its internal interfaces are subject to change or
|
||||
* deletion without notice.</b>
|
||||
*/
|
||||
|
||||
|
||||
public class PairTphMethod<A, B> {
|
||||
public final A fst;
|
||||
public final B snd;
|
||||
|
||||
public PairTphMethod(A fst, B snd) {
|
||||
this.fst = fst;
|
||||
this.snd = snd;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "PairTphMethod[" + fst + "," + snd + "]";
|
||||
}
|
||||
|
||||
public boolean equals(Object other) {
|
||||
return
|
||||
other instanceof PairTphMethod<?,?> &&
|
||||
Objects.equals(fst, ((PairTphMethod<?,?>)other).fst) &&
|
||||
Objects.equals(snd, ((PairTphMethod<?,?>)other).snd);
|
||||
}
|
||||
|
||||
public int hashCode() {
|
||||
if (fst == null) return (snd == null) ? 0 : snd.hashCode() + 1;
|
||||
else if (snd == null) return fst.hashCode() + 2;
|
||||
else return fst.hashCode() * 17 + snd.hashCode();
|
||||
}
|
||||
|
||||
public static <A,B> PairTphMethod<A,B> of(A a, B b) {
|
||||
return new PairTphMethod<>(a,b);
|
||||
}
|
||||
}
|
@ -1,79 +0,0 @@
|
||||
package de.dhbwstuttgart.bytecode.insertGenerics;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.*;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Set;
|
||||
|
||||
public class PositionFinder{
|
||||
static HashMap<String, PairTphMethod<Position, String>> posOfTphs = new HashMap<String, PairTphMethod<Position, String>>();
|
||||
|
||||
static PairTphMethod<Position, String> whichMethod; // gibt an, in welcher Methode sich TPH befindet (Position.METHOD, id_of_method)
|
||||
|
||||
public enum Position{
|
||||
METHOD,
|
||||
CONSTRUCTOR,
|
||||
FIELD
|
||||
}
|
||||
|
||||
public static HashMap<String, PairTphMethod<Position, String>> getPositionOfTPH(SourceFile sf, Set<String> tphs) {
|
||||
|
||||
new Walker().visit(sf);
|
||||
for (String tph: posOfTphs.keySet()) {
|
||||
System.out.println(tph + " " + posOfTphs.get(tph));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
public static void putPositionInMethod(String tph, String methodId) {
|
||||
posOfTphs.put(tph, new PairTphMethod<>(Position.METHOD, methodId));
|
||||
}
|
||||
|
||||
public static void putPositionInField(String tph) {
|
||||
posOfTphs.put(tph, new PairTphMethod<>(Position.FIELD, null));
|
||||
}
|
||||
|
||||
public static void putPositionInConstructor(String tph, String id) {
|
||||
posOfTphs.put(tph, new PairTphMethod<>(Position.CONSTRUCTOR, id));
|
||||
}
|
||||
|
||||
|
||||
|
||||
static class Walker extends AbstractASTWalker{
|
||||
Boolean inMethod = false;
|
||||
Boolean inConstructor = false;
|
||||
|
||||
@Override
|
||||
public void visit(TypePlaceholder tph) {
|
||||
if (inMethod) {
|
||||
if (inConstructor) {
|
||||
// System.out.println(tph);
|
||||
// putPositionInConstructor(tph.getName(),);
|
||||
}
|
||||
// System.out.println(tph);
|
||||
// putPositionInMethod(tph.getName(),);
|
||||
} else {
|
||||
putPositionInField(tph.getName());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(Field field) {
|
||||
super.visit(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(Method method) {
|
||||
inMethod = true;
|
||||
super.visit(method);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(Constructor cons) {
|
||||
inConstructor = true;
|
||||
super.visit(cons);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
package de.dhbwstuttgart.bytecode.insertGenerics;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
interface preGGenerics {
|
||||
public List<ResultSet> getResultOfTypeInference();
|
||||
}
|
@ -20,10 +20,12 @@ import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
|
||||
import de.dhbwstuttgart.syntaxtree.Method;
|
||||
import de.dhbwstuttgart.syntaxtree.ParameterList;
|
||||
import de.dhbwstuttgart.syntaxtree.SourceFile;
|
||||
import de.dhbwstuttgart.syntaxtree.TypeScope;
|
||||
import de.dhbwstuttgart.syntaxtree.FormalParameter;
|
||||
import de.dhbwstuttgart.syntaxtree.GenericDeclarationList;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.statement.Block;
|
||||
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
@ -46,16 +48,26 @@ import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListener;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.net.URL;
|
||||
import java.net.URLClassLoader;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
|
||||
|
||||
public class JavaTXCompiler {
|
||||
|
||||
@ -64,6 +76,7 @@ public class JavaTXCompiler {
|
||||
Boolean resultmodel = true;
|
||||
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
|
||||
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"src/test/java/logFiles" geschrieben werden soll?
|
||||
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
|
||||
private final ClassLoader classLoader;
|
||||
|
||||
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
|
||||
@ -181,7 +194,7 @@ public class JavaTXCompiler {
|
||||
|
||||
|
||||
public List<ClassOrInterface> getAvailableClasses(SourceFile forSourceFile) throws ClassNotFoundException {
|
||||
//PL 2018-09-18: List durch Set ersetzt, damit die Klassen nur einmal hinzugefuegt werden
|
||||
//PL 2018-09-18: List durch Set ersetzt, damit die Klassen nur einmal hinzugefuegt werden
|
||||
//List<ClassOrInterface> allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
|
||||
Set<ClassOrInterface> allClasses = new HashSet<>();
|
||||
|
||||
@ -216,6 +229,321 @@ public class JavaTXCompiler {
|
||||
return new ArrayList<>(allClasses);
|
||||
}
|
||||
|
||||
/*
|
||||
* public List<ResultSet> typeInferenceOld() throws ClassNotFoundException {
|
||||
* List<ClassOrInterface> allClasses = new
|
||||
* ArrayList<>();//environment.getAllAvailableClasses(); //Alle Importierten
|
||||
* Klassen in allen geparsten Sourcefiles kommen ins FC for(SourceFile sf :
|
||||
* this.sourceFiles.values()) { allClasses.addAll(getAvailableClasses(sf));
|
||||
* allClasses.addAll(sf.getClasses()); }
|
||||
*
|
||||
* final ConstraintSet<Pair> cons = getConstraints();
|
||||
*
|
||||
* FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses);
|
||||
* System.out.println(finiteClosure); ConstraintSet<UnifyPair> unifyCons =
|
||||
* UnifyTypeFactory.convert(cons);
|
||||
*
|
||||
* TypeUnify unify = new TypeUnify(); Set<Set<UnifyPair>> results = new
|
||||
* HashSet<>(); try { File logPath = new
|
||||
* File(System.getProperty("user.dir")+"/target/logFiles/"); logPath.mkdirs();
|
||||
* FileWriter logFile = new FileWriter(new File(logPath, "log"));
|
||||
* logFile.write("FC:\\" + finiteClosure.toString()+"\n"); for(SourceFile sf :
|
||||
* this.sourceFiles.values()) { logFile.write(ASTTypePrinter.print(sf)); }
|
||||
* logFile.flush(); Set<List<Constraint<UnifyPair>>> cardProd =
|
||||
* unifyCons.cartesianProduct(); for (List<Constraint<UnifyPair>> xCons :
|
||||
* cardProd ){ Set<UnifyPair> xConsSet = new HashSet<>(); for
|
||||
* (Constraint<UnifyPair> constraint : xCons) { xConsSet.addAll(constraint); }
|
||||
* //.collect(Collectors.toCollection(ArrayList::new))))
|
||||
* System.out.println(xConsSet); Set<String> methodParaTypeVarNames =
|
||||
* allClasses.stream().map(x -> x.getMethods().stream().map(y ->
|
||||
* y.getParameterList().getFormalparalist() .stream().filter(z -> z.getType()
|
||||
* instanceof TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(
|
||||
* HashSet::new))) .reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return
|
||||
* a;}, (a,b) -> { a.addAll(b); return a;} ) ) .reduce(new HashSet<String>(),
|
||||
* (a,b) -> { a.addAll(b); return a;} );
|
||||
*
|
||||
* Set<String> constructorParaTypeVarNames = allClasses.stream().map(x ->
|
||||
* x.getConstructors().stream().map(y ->
|
||||
* y.getParameterList().getFormalparalist() .stream().filter(z -> z.getType()
|
||||
* instanceof TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(
|
||||
* HashSet::new))) .reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return
|
||||
* a;}, (a,b) -> { a.addAll(b); return a;} ) ) .reduce(new HashSet<String>(),
|
||||
* (a,b) -> { a.addAll(b); return a;} );
|
||||
*
|
||||
* Set<String> paraTypeVarNames = methodParaTypeVarNames;
|
||||
* paraTypeVarNames.addAll(constructorParaTypeVarNames);
|
||||
*
|
||||
* Set<String> returnTypeVarNames = allClasses.stream().map(x ->
|
||||
* x.getMethods().stream().filter(y -> y.getReturnType() instanceof
|
||||
* TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.
|
||||
* toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;}
|
||||
* ).get();
|
||||
*
|
||||
* Set<String> fieldTypeVarNames = allClasses.stream().map(x ->
|
||||
* x.getFieldDecl().stream().filter(y -> y.getReturnType() instanceof
|
||||
* TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.
|
||||
* toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;}
|
||||
* ).get();
|
||||
*
|
||||
* returnTypeVarNames.addAll(fieldTypeVarNames);
|
||||
*
|
||||
* xConsSet = xConsSet.stream().map(x -> { //Hier muss ueberlegt werden, ob //1.
|
||||
* alle Argument- und Retuntyp-Variablen in allen UnifyPairs // mit
|
||||
* disableWildcardtable() werden. //2. alle Typvariablen mit Argument- oder
|
||||
* Retuntyp-Variablen //in Beziehung auch auf disableWildcardtable() gesetzt
|
||||
* werden muessen //PL 2018-04-23 if ((x.getLhsType() instanceof
|
||||
* PlaceholderType)) { if (paraTypeVarNames.contains(x.getLhsType().getName()))
|
||||
* { ((PlaceholderType)x.getLhsType()).setVariance((byte)1);
|
||||
* ((PlaceholderType)x.getLhsType()).disableWildcardtable(); } if
|
||||
* (returnTypeVarNames.contains(x.getLhsType().getName())) {
|
||||
* ((PlaceholderType)x.getLhsType()).setVariance((byte)-1);
|
||||
* ((PlaceholderType)x.getLhsType()).disableWildcardtable(); } } if
|
||||
* ((x.getRhsType() instanceof PlaceholderType)) { if
|
||||
* (paraTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
* ((PlaceholderType)x.getRhsType()).setVariance((byte)1);
|
||||
* ((PlaceholderType)x.getRhsType()).disableWildcardtable(); } if
|
||||
* (returnTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
* ((PlaceholderType)x.getRhsType()).setVariance((byte)-1);
|
||||
* ((PlaceholderType)x.getRhsType()).disableWildcardtable(); } } return x;//HIER
|
||||
* DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS
|
||||
* ANDERE SEITE }).map( y -> { if ((y.getLhsType() instanceof PlaceholderType)
|
||||
* && (y.getRhsType() instanceof PlaceholderType)) { if
|
||||
* (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
|
||||
* ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType(
|
||||
* )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0
|
||||
* && ((PlaceholderType)y.getRhsType()).getVariance() != 0) {
|
||||
* ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType(
|
||||
* )).getVariance()); } } return y; } )
|
||||
* .collect(Collectors.toCollection(HashSet::new));
|
||||
* varianceInheritance(xConsSet); Set<Set<UnifyPair>> result =
|
||||
* unify.unifySequential(xConsSet, finiteClosure, logFile, log);
|
||||
* //Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
* System.out.println("RESULT: " + result); logFile.write("RES: " +
|
||||
* result.toString()+"\n"); logFile.flush(); results.addAll(result); }
|
||||
*
|
||||
* results = results.stream().map(x -> { Optional<Set<UnifyPair>> res = new
|
||||
* RuleSet().subst(x.stream().map(y -> { if (y.getPairOp() ==
|
||||
* PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT); return y;
|
||||
* //alle Paare a <.? b erden durch a =. b ersetzt
|
||||
* }).collect(Collectors.toCollection(HashSet::new))); if (res.isPresent())
|
||||
* {//wenn subst ein Erg liefert wurde was veraendert return new
|
||||
* TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure); } else
|
||||
* return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
* }).collect(Collectors.toCollection(HashSet::new));
|
||||
* System.out.println("RESULT Final: " + results); logFile.write("RES_FINAL: " +
|
||||
* results.toString()+"\n"); logFile.flush(); logFile.write("PLACEHOLDERS: " +
|
||||
* PlaceholderType.EXISTING_PLACEHOLDERS); logFile.flush(); } catch (IOException
|
||||
* e) { e.printStackTrace(); } return results.stream().map((unifyPairs -> new
|
||||
* ResultSet(UnifyTypeFactory.convert(unifyPairs,
|
||||
* generateTPHMap(cons))))).collect(Collectors.toList()); }
|
||||
*/
|
||||
/**
|
||||
* Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a) wenn a eine
|
||||
* Variance !=0 hat auf alle Typvariablen in Theta.
|
||||
*
|
||||
*
|
||||
*/
|
||||
/*
|
||||
* private void varianceInheritance(Set<UnifyPair> eq) { Set<PlaceholderType>
|
||||
* usedTPH = new HashSet<>(); Set<PlaceholderType> phSet = eq.stream().map(x ->
|
||||
* { Set<PlaceholderType> pair = new HashSet<>(); if (x.getLhsType() instanceof
|
||||
* PlaceholderType) pair.add((PlaceholderType)x.getLhsType()); if
|
||||
* (x.getRhsType() instanceof PlaceholderType)
|
||||
* pair.add((PlaceholderType)x.getRhsType()); return pair; }).reduce(new
|
||||
* HashSet<>(), (a,b) -> { a.addAll(b); return a;} , (c,d) -> { c.addAll(d);
|
||||
* return c;});
|
||||
*
|
||||
* ArrayList<PlaceholderType> phSetVariance = new ArrayList<>(phSet);
|
||||
* phSetVariance.removeIf(x -> (x.getVariance() == 0));
|
||||
* while(!phSetVariance.isEmpty()) { PlaceholderType a =
|
||||
* phSetVariance.remove(0); usedTPH.add(a); //HashMap<PlaceholderType,Integer>
|
||||
* ht = new HashMap<>(); //ht.put(a, a.getVariance()); Set<UnifyPair> eq1 = new
|
||||
* HashSet<>(eq); eq1.removeIf(x -> !(x.getLhsType() instanceof PlaceholderType
|
||||
* && ((PlaceholderType)x.getLhsType()).equals(a))); eq1.stream().forEach(x -> {
|
||||
* x.getRhsType().accept(new distributeVariance(), a.getVariance());}); eq1 =
|
||||
* new HashSet<>(eq); eq1.removeIf(x -> !(x.getRhsType() instanceof
|
||||
* PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)));
|
||||
* eq1.stream().forEach(x -> { x.getLhsType().accept(new distributeVariance(),
|
||||
* a.getVariance());}); phSetVariance = new ArrayList<>(phSet);
|
||||
* phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x))); }
|
||||
* }
|
||||
*/
|
||||
|
||||
public UnifyResultModel typeInferenceAsync(UnifyResultListener resultListener, Writer logFile)
|
||||
throws ClassNotFoundException, IOException {
|
||||
List<ClassOrInterface> allClasses = new ArrayList<>();// environment.getAllAvailableClasses();
|
||||
// Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
|
||||
for (File f : this.sourceFiles.keySet()) {
|
||||
SourceFile sf = sourceFiles.get(f);
|
||||
allClasses.addAll(getAvailableClasses(sf));
|
||||
allClasses.addAll(sf.getClasses());
|
||||
allClasses.addAll(CompilationEnvironment.loadDefaultPackageClasses(f,classLoader).stream().map(ASTFactory::createClass).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
final ConstraintSet<Pair> cons = getConstraints();
|
||||
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||
UnifyResultModel urm = null;
|
||||
// urm.addUnifyResultListener(resultListener);
|
||||
try {
|
||||
logFile = logFile == null
|
||||
? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName()))
|
||||
: logFile;
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, classLoader);
|
||||
System.out.println(finiteClosure);
|
||||
urm = new UnifyResultModel(cons, finiteClosure);
|
||||
urm.addUnifyResultListener(resultListener);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(cons);
|
||||
|
||||
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
|
||||
UnifyType lhs, rhs;
|
||||
if (((lhs = x.getLhsType()) instanceof PlaceholderType)
|
||||
&& ((rhs = x.getRhsType()) instanceof PlaceholderType)
|
||||
&& (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
|
||||
((PlaceholderType) lhs).setInnerType(true);
|
||||
((PlaceholderType) rhs).setInnerType(true);
|
||||
}
|
||||
return x;
|
||||
|
||||
};
|
||||
logFile.write(unifyCons.toString());
|
||||
unifyCons = unifyCons.map(distributeInnerVars);
|
||||
logFile.write(unifyCons.toString());
|
||||
TypeUnify unify = new TypeUnify();
|
||||
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
|
||||
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
|
||||
for (SourceFile sf : this.sourceFiles.values()) {
|
||||
logFile.write(ASTTypePrinter.print(sf));
|
||||
}
|
||||
logFile.flush();
|
||||
|
||||
Set<String> methodParaTypeVarNames = allClasses.stream().map(x -> x.getMethods().stream()
|
||||
.map(y -> y.getParameterList().getFormalparalist().stream()
|
||||
.filter(z -> z.getType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce(new HashSet<String>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}, (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
})).reduce(new HashSet<String>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
});
|
||||
|
||||
Set<String> constructorParaTypeVarNames = allClasses.stream().map(x -> x.getConstructors().stream()
|
||||
.map(y -> y.getParameterList().getFormalparalist().stream()
|
||||
.filter(z -> z.getType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce(new HashSet<String>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}, (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
})).reduce(new HashSet<String>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
});
|
||||
|
||||
Set<String> paraTypeVarNames = methodParaTypeVarNames;
|
||||
paraTypeVarNames.addAll(constructorParaTypeVarNames);
|
||||
|
||||
Set<String> returnTypeVarNames = allClasses.stream()
|
||||
.map(x -> x.getMethods().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getReturnType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce((a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}).get();
|
||||
|
||||
Set<String> fieldTypeVarNames = allClasses.stream()
|
||||
.map(x -> x.getFieldDecl().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getReturnType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce((a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}).get();
|
||||
|
||||
returnTypeVarNames.addAll(fieldTypeVarNames);
|
||||
|
||||
unifyCons = unifyCons.map(x -> {
|
||||
// Hier muss ueberlegt werden, ob
|
||||
// 1. alle Argument- und Retuntyp-Variablen in allen UnifyPairs
|
||||
// mit disableWildcardtable() werden.
|
||||
// 2. alle Typvariablen mit Argument- oder Retuntyp-Variablen
|
||||
// in Beziehung auch auf disableWildcardtable() gesetzt werden muessen
|
||||
// PL 2018-04-23
|
||||
if ((x.getLhsType() instanceof PlaceholderType)) {
|
||||
if (paraTypeVarNames.contains(x.getLhsType().getName())) {
|
||||
((PlaceholderType) x.getLhsType()).setVariance((byte) 1);
|
||||
((PlaceholderType) x.getLhsType()).disableWildcardtable();
|
||||
}
|
||||
if (returnTypeVarNames.contains(x.getLhsType().getName())) {
|
||||
((PlaceholderType) x.getLhsType()).setVariance((byte) -1);
|
||||
((PlaceholderType) x.getLhsType()).disableWildcardtable();
|
||||
}
|
||||
}
|
||||
if ((x.getRhsType() instanceof PlaceholderType)) {
|
||||
if (paraTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
((PlaceholderType) x.getRhsType()).setVariance((byte) 1);
|
||||
((PlaceholderType) x.getRhsType()).disableWildcardtable();
|
||||
}
|
||||
if (returnTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
((PlaceholderType) x.getRhsType()).setVariance((byte) -1);
|
||||
((PlaceholderType) x.getRhsType()).disableWildcardtable();
|
||||
}
|
||||
}
|
||||
return x;// HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE
|
||||
// JEWEILS ANDERE SEITE
|
||||
});
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||
|
||||
/*
|
||||
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL
|
||||
* 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen
|
||||
* //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH);
|
||||
* varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y
|
||||
* -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType()
|
||||
* instanceof PlaceholderType)) { if
|
||||
* (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
|
||||
* ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType(
|
||||
* )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0
|
||||
* && ((PlaceholderType)y.getRhsType()).getVariance() != 0) {
|
||||
* ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType(
|
||||
* )).getVariance()); } } return y; } ); } while
|
||||
* (!varianceTPHold.equals(varianceTPH));
|
||||
*/
|
||||
|
||||
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
|
||||
// logFile, log);
|
||||
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*.stream().map(x -> {
|
||||
Set<Set<UnifyPair>> ret = new HashSet<>();
|
||||
for (Constraint<UnifyPair> y : x) {
|
||||
ret.add(new HashSet<>(y));
|
||||
}
|
||||
return ret;
|
||||
}).collect(Collectors.toCollection(ArrayList::new))*/;
|
||||
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm,
|
||||
usedTasks);
|
||||
} catch (IOException e) {
|
||||
System.err.println("kein LogFile");
|
||||
}
|
||||
return urm;
|
||||
}
|
||||
|
||||
public List<ResultSet> typeInference() throws ClassNotFoundException, IOException {
|
||||
List<ClassOrInterface> allClasses = new ArrayList<>();// environment.getAllAvailableClasses();
|
||||
@ -364,12 +692,46 @@ public class JavaTXCompiler {
|
||||
}
|
||||
})));
|
||||
*/
|
||||
|
||||
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints();
|
||||
/*
|
||||
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL
|
||||
* 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen
|
||||
* //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH);
|
||||
* varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y
|
||||
* -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType()
|
||||
* instanceof PlaceholderType)) { if
|
||||
* (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
|
||||
* ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType(
|
||||
* )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0
|
||||
* && ((PlaceholderType)y.getRhsType()).getVariance() != 0) {
|
||||
* ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType(
|
||||
* )).getVariance()); } } return y; } ); } while
|
||||
* (!varianceTPHold.equals(varianceTPH));
|
||||
*/
|
||||
|
||||
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
|
||||
// logFile, log);
|
||||
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()//.stream().map(x -> {
|
||||
/*Set<Set<UnifyPair>> ret = new HashSet<>();
|
||||
for (Constraint<UnifyPair> y : x) {
|
||||
ret.add(new HashSet<>(y));
|
||||
}
|
||||
return ret;
|
||||
}).collect(Collectors.toCollection(ArrayList::new))*/;
|
||||
if (resultmodel) {
|
||||
/* UnifyResultModel Anfang */
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm);
|
||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||
urm.addUnifyResultListener(li);
|
||||
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm,
|
||||
usedTasks);
|
||||
System.out.println("RESULT Final: " + li.getResults());
|
||||
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
|
||||
logFile.flush();
|
||||
@ -381,7 +743,7 @@ public class JavaTXCompiler {
|
||||
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
|
||||
// finiteClosure));
|
||||
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints,
|
||||
finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure));
|
||||
finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
|
||||
System.out.println("RESULT: " + result);
|
||||
logFile.write("RES: " + result.toString() + "\n");
|
||||
logFile.flush();
|
||||
@ -532,10 +894,6 @@ public class JavaTXCompiler {
|
||||
/**
|
||||
* @param path - can be null, then class file output is in the same directory as the parsed source files
|
||||
*/
|
||||
List<ResultSet> typeinferenceResult = this.typeInference();
|
||||
List<GenericGenratorResultForSourceFile> simplifyResultsForAllSourceFiles = getGeneratedGenericResultsForAllSourceFiles(
|
||||
typeinferenceResult);
|
||||
generateBytecode(path, typeinferenceResult, simplifyResultsForAllSourceFiles);
|
||||
public void generateBytecode(File path) throws ClassNotFoundException, IOException, BytecodeGeneratorError {
|
||||
List<ResultSet> typeinferenceResult = this.typeInference();
|
||||
List<GenericGenratorResultForSourceFile> simplifyResultsForAllSourceFiles = getGeneratedGenericResultsForAllSourceFiles(
|
||||
|
@ -43,7 +43,7 @@ public class GatherNames {
|
||||
}
|
||||
else{
|
||||
if(typeDecl.classDeclaration().normalClassDeclaration() != null){
|
||||
if(!pkgName.isEmpty()){
|
||||
if(pkgName != ""){
|
||||
nameString = pkgName + "." + typeDecl.classDeclaration().normalClassDeclaration().Identifier().toString();
|
||||
}
|
||||
else{
|
||||
|
@ -64,10 +64,6 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return this.getName().hashCode();
|
||||
}
|
||||
|
||||
public String toString()
|
||||
{
|
||||
|
@ -587,9 +587,13 @@ public class TYPEStmt implements StatementVisitor{
|
||||
*/
|
||||
|
||||
RefTypeOrTPHOrWildcardOrGeneric retType = assumption.getReceiverType(resolver);
|
||||
methodConstraint.add(forMethod.name.equals("apply") ? //PL 2019-11-29: Tenaerer Operator eingefügt, weil bei Lambda-Ausdrücken keine Suntype FunN$$ existiert
|
||||
new Pair(forMethod.receiver.getType(), retType, PairOperator.EQUALSDOT)
|
||||
: new Pair(forMethod.receiver.getType(), retType, PairOperator.SMALLERDOT));
|
||||
methodConstraint.add(new Pair(forMethod.receiver.getType(), retType,
|
||||
PairOperator.EQUALSDOT));//PL 2020-03-17 SMALLERDOT in EQUALSDOT umgewandelt, weil alle geerbten Methoden in den jeweilen Klassen enthalten sind.
|
||||
|
||||
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ANFANG
|
||||
//methodConstraint.add(new Pair(forMethod.receiverType, retType,
|
||||
// PairOperator.EQUALSDOT));
|
||||
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ENDE
|
||||
|
||||
methodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(),
|
||||
PairOperator.EQUALSDOT));
|
||||
|
@ -655,7 +655,6 @@ public class RuleSet implements IRuleSet{
|
||||
else
|
||||
t1.getTypeParams().forEach(x -> occuringTypes.push(x));
|
||||
}
|
||||
|
||||
Queue<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
|
||||
ArrayList<UnifyPair> result = new ArrayList<UnifyPair>();
|
||||
boolean applied = false;
|
||||
@ -669,6 +668,7 @@ public class RuleSet implements IRuleSet{
|
||||
&& pair.getLhsType() instanceof PlaceholderType)
|
||||
lhsType = (PlaceholderType) pair.getLhsType();
|
||||
rhsType = pair.getRhsType(); //PL eingefuegt 2017-09-29 statt !((rhsType = pair.getRhsType()) instanceof PlaceholderType)
|
||||
|
||||
if(lhsType != null
|
||||
//&& !((rhsType = pair.getRhsType()) instanceof PlaceholderType) //PL geloescht am 2017-09-29 Begründung: auch Typvariablen muessen ersetzt werden.
|
||||
&& typeMap.get(lhsType) > 1 // The type occurs in more pairs in the set than just the recent pair.
|
||||
|
@ -28,8 +28,8 @@ public class TypeUnify {
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
@ -54,8 +54,8 @@ public class TypeUnify {
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
|
||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
return ret;
|
||||
@ -68,11 +68,12 @@ public class TypeUnify {
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
|
||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
@ -101,10 +102,11 @@ public class TypeUnify {
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret);
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
|
@ -1,30 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.io.Writer;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
public class TypeUnify2Future {
|
||||
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
|
||||
public TypeUnify2Future(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
}
|
||||
|
||||
Set<UnifyPair> getNextSetElement() {
|
||||
return nextSetElement;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, true);
|
||||
return res;
|
||||
}
|
||||
}
|
@ -18,8 +18,8 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
||||
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
}
|
||||
@ -30,7 +30,35 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
||||
|
||||
@Override
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, true);
|
||||
return res;
|
||||
if (one) {
|
||||
System.out.println("two");
|
||||
}
|
||||
one = true;
|
||||
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField);
|
||||
/*if (isUndefinedPairSetSet(res)) {
|
||||
return new HashSet<>(); }
|
||||
else
|
||||
*/
|
||||
//writeLog("xxx");
|
||||
//noOfThread--;
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
else {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void closeLogFile() {
|
||||
|
||||
try {
|
||||
logFile.close();
|
||||
}
|
||||
catch (IOException ioE) {
|
||||
System.err.println("no log-File" + thNo);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -60,33 +60,7 @@ import com.google.common.collect.Ordering;
|
||||
* Implementation of the type unification algorithm
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class TypeUnifyTask {
|
||||
|
||||
public static Set<Set<UnifyPair>> typeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm){
|
||||
Set<UnifyPair> neweq = new HashSet<>(eq);
|
||||
/* 1-elementige Oder-Constraints werden in und-Constraints umgewandelt */
|
||||
oderConstraints.stream()
|
||||
.filter(x -> x.size()==1)
|
||||
.map(y -> y.stream().findFirst().get()).forEach(x -> neweq.addAll(x));
|
||||
ArrayList<Set<Constraint<UnifyPair>>> remainingOderconstraints = oderConstraints.stream()
|
||||
.filter(x -> x.size()>1)
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefe, true);
|
||||
try {
|
||||
logFile.close();
|
||||
}
|
||||
catch (IOException ioE) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
if (isUndefinedPairSetSet(res)) {
|
||||
throw new TypeinferenceException("Unresolved constraints: " + res.toString(), new NullToken()); //return new HashSet<>();
|
||||
}
|
||||
else {
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private static int i = 0;
|
||||
@ -97,11 +71,16 @@ public class TypeUnifyTask {
|
||||
* Element, das aus dem nextSet den Gleichunen dieses Threads hinzugefuegt wurde
|
||||
*/
|
||||
Set<UnifyPair> nextSetElement;
|
||||
Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm
|
||||
|
||||
/**
|
||||
* Fuer die Threads
|
||||
*/
|
||||
UnifyResultModel urm;
|
||||
protected static int noOfThread = 0;
|
||||
private static int totalnoOfThread = 0;
|
||||
int thNo;
|
||||
protected boolean one = false;
|
||||
Integer MaxNoOfThreads = 8;
|
||||
|
||||
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
|
||||
Writer logFile;
|
||||
@ -131,6 +110,9 @@ public class TypeUnifyTask {
|
||||
|
||||
protected boolean parallel;
|
||||
|
||||
//Gives if unify is not called from checkA
|
||||
private boolean finalresult = true;
|
||||
|
||||
int rekTiefeField;
|
||||
|
||||
Integer nOfUnify = 0;
|
||||
@ -144,13 +126,31 @@ public class TypeUnifyTask {
|
||||
static int noBacktracking;
|
||||
|
||||
static Integer noShortendElements = 0;
|
||||
|
||||
|
||||
Boolean myIsCanceled = false;
|
||||
|
||||
volatile UnifyTaskModel usedTasks;
|
||||
|
||||
public TypeUnifyTask() {
|
||||
rules = new RuleSet();
|
||||
}
|
||||
|
||||
/*
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log) {
|
||||
this.eq = eq;
|
||||
this.fc = fc;
|
||||
this.oup = new OrderingUnifyPair(fc);
|
||||
this.parallel = parallel;
|
||||
this.logFile = logFile;
|
||||
this.log = log;
|
||||
rules = new RuleSet(logFile);
|
||||
noOfThread++;
|
||||
thNo = noOfThread;
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm) {
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
|
||||
synchronized (this) {
|
||||
this.eq = eq;
|
||||
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
|
||||
@ -170,7 +170,21 @@ public class TypeUnifyTask {
|
||||
this.parallel = parallel;
|
||||
this.logFile = logFile;
|
||||
this.log = log;
|
||||
|
||||
|
||||
noOfThread++;
|
||||
totalnoOfThread++;
|
||||
//writeLog("thNo1 " + thNo);
|
||||
thNo = totalnoOfThread;
|
||||
writeLog("thNo2 " + thNo);
|
||||
try {
|
||||
this.logFile = //new OutputStreamWriter(new NullOutputStream());
|
||||
//new FileWriter(new File(System.getProperty("user.dir")+"/src/test/resources/logFiles/"+"Thread_"+thNo));
|
||||
new FileWriter(new File(System.getProperty("user.dir")+"/logFiles/"+"Thread_"+thNo));
|
||||
logFile.write("");
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("log-File nicht vorhanden");
|
||||
}
|
||||
/*Abbruchtest
|
||||
if (thNo > 10) {
|
||||
System.out.println("cancel");
|
||||
@ -188,10 +202,56 @@ public class TypeUnifyTask {
|
||||
rules = new RuleSet(logFile);
|
||||
this.rekTiefeField = rekTiefe;
|
||||
this.urm = urm;
|
||||
this.usedTasks = usedTasks;
|
||||
this.usedTasks.add(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Vererbt alle Variancen
|
||||
* @param eq The set of constraints
|
||||
*/
|
||||
/* PL 2018-05- 17 verschoben nach JavaTXCompiler
|
||||
private void varianceInheritance(Set<UnifyPair> eq) {
|
||||
Set<PlaceholderType> usedTPH = new HashSet<>();
|
||||
Set<PlaceholderType> phSet = eq.stream().map(x -> {
|
||||
Set<PlaceholderType> pair = new HashSet<>();
|
||||
if (x.getLhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getLhsType());
|
||||
if (x.getRhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getRhsType());
|
||||
return pair;
|
||||
}).reduce(new HashSet<>(), (a,b) -> { a.addAll(b); return a;} , (c,d) -> { c.addAll(d); return c;});
|
||||
|
||||
ArrayList<PlaceholderType> phSetVariance = new ArrayList<>(phSet);
|
||||
phSetVariance.removeIf(x -> (x.getVariance() == 0));
|
||||
while(!phSetVariance.isEmpty()) {
|
||||
PlaceholderType a = phSetVariance.remove(0);
|
||||
usedTPH.add(a);
|
||||
//HashMap<PlaceholderType,Integer> ht = new HashMap<>();
|
||||
//ht.put(a, a.getVariance());
|
||||
Set<UnifyPair> eq1 = new HashSet<>(eq);
|
||||
eq1.removeIf(x -> !(x.getLhsType() instanceof PlaceholderType && ((PlaceholderType)x.getLhsType()).equals(a)));
|
||||
eq1.stream().forEach(x -> { x.getRhsType().accept(new distributeVariance(), a.getVariance());});
|
||||
eq1 = new HashSet<>(eq);
|
||||
eq1.removeIf(x -> !(x.getRhsType() instanceof PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)));
|
||||
eq1.stream().forEach(x -> { x.getLhsType().accept(new distributeVariance(), a.getVariance());});
|
||||
phSetVariance = new ArrayList<>(phSet);
|
||||
phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x)));
|
||||
}
|
||||
}
|
||||
*/
|
||||
void myCancel(Boolean b) {
|
||||
myIsCanceled = true;
|
||||
}
|
||||
|
||||
public boolean myIsCancelled() {
|
||||
return myIsCanceled;
|
||||
}
|
||||
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
if (one) {
|
||||
System.out.println("two");
|
||||
}
|
||||
one = true;
|
||||
Set<UnifyPair> neweq = new HashSet<>(eq);
|
||||
/* 1-elementige Oder-Constraints werden in und-Constraints umgewandelt */
|
||||
oderConstraintsField.stream()
|
||||
@ -200,7 +260,8 @@ public class TypeUnifyTask {
|
||||
ArrayList<Set<Constraint<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream()
|
||||
.filter(x -> x.size()>1)
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, true);
|
||||
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField);
|
||||
noOfThread--;
|
||||
try {
|
||||
logFile.close();
|
||||
}
|
||||
@ -211,10 +272,15 @@ public class TypeUnifyTask {
|
||||
throw new TypeinferenceException("Unresolved constraints: " + res.toString(), new NullToken()); //return new HashSet<>();
|
||||
}
|
||||
else {
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
else {
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
/*
|
||||
@Override
|
||||
@ -236,9 +302,26 @@ public class TypeUnifyTask {
|
||||
* @param fc The finite closure
|
||||
* @return The set of all principal type unifiers
|
||||
*/
|
||||
protected static Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) {
|
||||
protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||
//Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
|
||||
// ).collect(Collectors.toCollection(HashSet::new));
|
||||
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
|
||||
//if (aas.isEmpty()) {
|
||||
// System.out.println("");
|
||||
//}
|
||||
|
||||
//.collect(Collectors.toCollection(HashSet::new)));
|
||||
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
|
||||
rekTiefe++;
|
||||
nOfUnify++;
|
||||
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
|
||||
writeLog(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString());
|
||||
|
||||
/*
|
||||
* Variancen auf alle Gleichungen vererben
|
||||
@ -257,6 +340,29 @@ public class TypeUnifyTask {
|
||||
return ret;
|
||||
}
|
||||
|
||||
/*
|
||||
* Occurs-Check durchfuehren
|
||||
*/
|
||||
|
||||
Set<UnifyPair> ocurrPairs = eq.stream().filter(x -> {
|
||||
UnifyType lhs, rhs;
|
||||
return (lhs = x.getLhsType()) instanceof PlaceholderType
|
||||
&& !((rhs = x.getRhsType()) instanceof PlaceholderType)
|
||||
&& rhs.getTypeParams().occurs((PlaceholderType)lhs);})
|
||||
.map(x -> { x.setUndefinedPair(); return x;})
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
writeLog("ocurrPairs: " + ocurrPairs);
|
||||
if (ocurrPairs.size() > 0) {
|
||||
Set<Set<UnifyPair>> ret = new HashSet<>();
|
||||
ret.add(ocurrPairs);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* Step 1: Repeated application of reduce, adapt, erase, swap
|
||||
*/
|
||||
Set<UnifyPair> eq0;
|
||||
Set<UnifyPair> eq0Prime;
|
||||
Optional<Set<UnifyPair>> eqSubst = Optional.of(eq);
|
||||
@ -375,15 +481,30 @@ public class TypeUnifyTask {
|
||||
|
||||
//Aufruf von computeCartesianRecursive ANFANG
|
||||
//writeLog("topLevelSets: " + topLevelSets.toString());
|
||||
return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, rekTiefe, finalresult);
|
||||
return computeCartesianRecursive(new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe);
|
||||
|
||||
}
|
||||
|
||||
|
||||
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) {
|
||||
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||
//Aufruf von computeCartesianRecursive ENDE
|
||||
|
||||
|
||||
//keine Ahnung woher das kommt
|
||||
//Set<Set<UnifyPair>> setToFlatten = topLevelSets.stream().map(x -> x.iterator().next()).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
|
||||
// Cartesian product over all (up to 10) top level sets
|
||||
//Set<Set<Set<UnifyPair>>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
|
||||
// .stream().map(x -> new HashSet<>(x))
|
||||
// .collect(Collectors.toCollection(HashSet::new));
|
||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
||||
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
|
||||
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
|
||||
|
||||
Set<TypeUnifyTask> forks = new HashSet<>();
|
||||
@ -406,9 +527,47 @@ public class TypeUnifyTask {
|
||||
Set<Set<UnifyPair>> unifyres1 = null;
|
||||
Set<Set<UnifyPair>> unifyres2 = null;
|
||||
if (!ocString.equals(newOderConstraints.toString())) writeLog("nach Subst: " + newOderConstraints);
|
||||
{// sequentiell (Step 6b is included)
|
||||
//writeLog("nach Subst: " + eqPrimePrime);
|
||||
/*
|
||||
* Step 6 a) Restart (fork) for pairs where subst was applied
|
||||
*/
|
||||
/*
|
||||
if(parallel) {
|
||||
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()
|
||||
&& oderConstraints.isEmpty()) {
|
||||
&& oderConstraints.isEmpty()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
||||
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
||||
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
|
||||
//eqPrimePrime Veraenderungen in subst repraesentieren.
|
||||
eqPrimePrimeSet.add(eqPrime);
|
||||
else if(eqPrimePrime.isPresent()) {
|
||||
//System.out.println("nextStep: " + eqPrimePrime.get());
|
||||
TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc, true, logFile, log);
|
||||
forks.add(fork);
|
||||
fork.fork();
|
||||
}
|
||||
else {
|
||||
//System.out.println("nextStep: " + eqPrime);
|
||||
TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc, true, logFile, log);
|
||||
forks.add(fork);
|
||||
fork.fork();
|
||||
}
|
||||
}
|
||||
else */
|
||||
{// sequentiell (Step 6b is included)
|
||||
if (printtag) System.out.println("nextStep: " + eqPrimePrime);
|
||||
if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()
|
||||
&& oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
||||
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
||||
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
|
||||
//eqPrimePrime Veraenderungen in subst repraesentieren.
|
||||
//try {
|
||||
//if (isSolvedForm(eqPrime)) {
|
||||
// writeLog("eqPrime:" + eqPrime.toString()+"\n");
|
||||
//}
|
||||
//}
|
||||
//catch (IOException e) {
|
||||
// System.err.println("log-File nicht vorhanden");
|
||||
//}
|
||||
eqPrimePrimeSet.add(eqPrime);
|
||||
if (finalresult && isSolvedForm(eqPrime)) {
|
||||
writeLog("eqPrime:" + eqPrime.toString()+"\n");
|
||||
@ -416,17 +575,34 @@ public class TypeUnifyTask {
|
||||
}
|
||||
}
|
||||
else if(eqPrimePrime.isPresent()) {
|
||||
Set<Set<UnifyPair>> unifyres = unifyres1 = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, rekTiefe, finalresult);
|
||||
Set<Set<UnifyPair>> unifyres = unifyres1 = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, rekTiefe);
|
||||
|
||||
eqPrimePrimeSet.addAll(unifyres);
|
||||
}
|
||||
else {
|
||||
Set<Set<UnifyPair>> unifyres = unifyres2 = unify(eqPrime, newOderConstraints, fc, parallel, rekTiefe, finalresult);
|
||||
Set<Set<UnifyPair>> unifyres = unifyres2 = unify(eqPrime, newOderConstraints, fc, parallel, rekTiefe);
|
||||
|
||||
|
||||
eqPrimePrimeSet.addAll(unifyres);
|
||||
}
|
||||
}
|
||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
|
||||
//}
|
||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
||||
|
||||
/*
|
||||
* Step 6 b) Build the union over everything.
|
||||
*/
|
||||
/*
|
||||
* PL 2019-01-22: geloescht
|
||||
|
||||
if(parallel)
|
||||
for(TypeUnifyTask fork : forks)
|
||||
eqPrimePrimeSet.addAll(fork.join());
|
||||
*/
|
||||
/*
|
||||
* Step 7: Filter empty sets;
|
||||
*/
|
||||
eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new));
|
||||
if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) {
|
||||
writeLog("Result1 " + eqPrimePrimeSet.toString());
|
||||
@ -435,25 +611,56 @@ public class TypeUnifyTask {
|
||||
}
|
||||
|
||||
|
||||
|
||||
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, int rekTiefe, Boolean finalresult) {
|
||||
fstElems.addAll(topLevelSets.stream()
|
||||
/**
|
||||
* Computes the cartesian product of topLevelSets step by step.
|
||||
* @param topLevelSets List of Sets of Sets, where a cartesian product have to be built
|
||||
* Ex.: [{{a =. Integer}, {a = Object}}, {{a = Vector<b>, b =. Integer}, {a = Vector<b>, b =. Object}}]
|
||||
* @param eq Original set of equations which should be unified
|
||||
* @param oderConstraints Remaining or-constraints
|
||||
* @param fc The finite closure
|
||||
* @param parallel If the algorithm should be parallelized run
|
||||
* @param rekTiefe Deep of recursive calls
|
||||
* @return The set of all principal type unifiers
|
||||
*/
|
||||
Set<Set<UnifyPair>> computeCartesianRecursive(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe) {
|
||||
|
||||
//oneElems: Alle 1-elementigen Mengen, die nur ein Paar
|
||||
//a <. theta, theta <. a oder a =. theta enthalten
|
||||
Set<Set<UnifyPair>> oneElems = new HashSet<>();
|
||||
oneElems.addAll(topLevelSets.stream()
|
||||
.filter(x -> x.size()==1)
|
||||
.map(y -> y.stream().findFirst().get())
|
||||
.collect(Collectors.toCollection(HashSet::new)));
|
||||
ArrayList<Set<? extends Set<UnifyPair>>> remainingSets = topLevelSets.stream()
|
||||
.filter(x -> x.size()>1)
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
if (remainingSets.isEmpty()) {//Alle Elemente sind 1-elementig
|
||||
Set<Set<UnifyPair>> result = unify2(fstElems, eq, oderConstraints, fc, parallel, rekTiefe, finalresult);
|
||||
|
||||
//optNextSet: Eine mehrelementige Menge, wenn vorhanden
|
||||
Optional<Set<? extends Set<UnifyPair>>> optNextSet = topLevelSets.stream().filter(x -> x.size()>1).findAny();
|
||||
|
||||
if (!optNextSet.isPresent()) {//Alle Elemente sind 1-elementig
|
||||
Set<Set<UnifyPair>> result = unify2(oneElems, eq, oderConstraints, fc, parallel, rekTiefe);
|
||||
return result;
|
||||
}
|
||||
Set<? extends Set<UnifyPair>> nextSet = remainingSets.remove(0);
|
||||
|
||||
Set<? extends Set<UnifyPair>> nextSet = optNextSet.get();
|
||||
//writeLog("nextSet: " + nextSet.toString());
|
||||
List<Set<UnifyPair>> nextSetasList =new ArrayList<>(nextSet);
|
||||
|
||||
/*
|
||||
try {
|
||||
//List<Set<UnifyPair>>
|
||||
//nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet);
|
||||
}
|
||||
catch (java.lang.IllegalArgumentException e) {
|
||||
System.out.print("");
|
||||
}
|
||||
*/
|
||||
Set<Set<UnifyPair>> result = new HashSet<>();
|
||||
int variance = 0;
|
||||
|
||||
/* Varianzbestimmung Anfang
|
||||
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
|
||||
* Varianz = 1 => Argumentvariable
|
||||
* Varianz = -1 => Rückgabevariable
|
||||
* Varianz = 0 => unklar
|
||||
* Varianz = 2 => Operatoren oderConstraints */
|
||||
ArrayList<UnifyPair> zeroNextElem = new ArrayList<>(nextSetasList.get(0));
|
||||
UnifyPair fstBasePair = zeroNextElem.remove(0).getBasePair();
|
||||
Boolean oderConstraint = false;
|
||||
@ -476,16 +683,15 @@ public class TypeUnifyTask {
|
||||
}
|
||||
}
|
||||
else {
|
||||
//variance = 2;
|
||||
oderConstraint = true;
|
||||
}
|
||||
}
|
||||
else {
|
||||
//variance = 2;
|
||||
oderConstraint = true;
|
||||
}
|
||||
|
||||
if (oderConstraint) {//Varianz-Bestimmung Oder-Constraints
|
||||
//Varianz-Bestimmung Oder-Constraints
|
||||
if (oderConstraint) {
|
||||
if (printtag) System.out.println("nextSetasList " + nextSetasList);
|
||||
Optional<Integer> optVariance =
|
||||
nextSetasList.iterator()
|
||||
@ -498,24 +704,33 @@ public class TypeUnifyTask {
|
||||
((PlaceholderType)x.getGroundBasePair().getLhsType()).getVariance())
|
||||
.findAny();
|
||||
//Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
|
||||
//da kein Receiver existiert also keon x.getGroundBasePair().getLhsType() instanceof PlaceholderType
|
||||
//Es werden alle Elemente des Kartesischen Produkts abgearbeitet
|
||||
//da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
|
||||
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
|
||||
variance = optVariance.isPresent() ? optVariance.get() : 2;
|
||||
}
|
||||
/* Varianzbestimmung Ende */
|
||||
|
||||
if (!nextSetasList.iterator().hasNext())
|
||||
System.out.print("");
|
||||
if (nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("D")).findFirst().isPresent() && nextSetasList.size()>1)
|
||||
System.out.print("");
|
||||
|
||||
//writeLog("nextSetasList: " + nextSetasList.toString());
|
||||
Set<UnifyPair> nextSetElem = nextSetasList.get(0);
|
||||
/* sameEqSet-Bestimmung: Wenn a = ty \in nextSet dann enthaelt sameEqSet alle Paare a < ty1 oder ty2 < a aus fstElems */
|
||||
//writeLog("BasePair1: " + nextSetElem + " " + nextSetElem.iterator().next().getBasePair());
|
||||
|
||||
/* sameEqSet-Bestimmung: Wenn a = ty \in nextSet dann enthaelt sameEqSet
|
||||
* alle Paare a < ty1 oder ty2 < a aus oneElems */
|
||||
Set<UnifyPair> sameEqSet = new HashSet<>();
|
||||
|
||||
//optOrigPair enthaelt ggf. das Paar a = ty \in nextSet
|
||||
Optional<UnifyPair> optOrigPair = null;
|
||||
//if (variance != 2) {
|
||||
if (!oderConstraint) {
|
||||
optOrigPair = nextSetElem.stream().filter(x -> (
|
||||
(x.getPairOp().equals(PairOperator.EQUALSDOT)
|
||||
//x.getBasePair() != null && ist gegeben wenn variance != 2
|
||||
//x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
|
||||
(x.getPairOp().equals(PairOperator.EQUALSDOT)
|
||||
/*
|
||||
(x.getBasePair().getLhsType() instanceof PlaceholderType
|
||||
&& x.getLhsType().equals(x.getBasePair().getLhsType()))
|
||||
|| (x.getBasePair().getRhsType() instanceof PlaceholderType
|
||||
&& x.getLhsType().equals(x.getBasePair().getRhsType())
|
||||
*/
|
||||
))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
|
||||
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
|
||||
x.getLhsType().equals(x.getBasePair().getRhsType())
|
||||
@ -528,7 +743,7 @@ public class TypeUnifyTask {
|
||||
tyVar = origPair.getRhsType();
|
||||
}
|
||||
UnifyType tyVarEF = tyVar;
|
||||
sameEqSet = fstElems.stream().map(xx -> xx.iterator().next())
|
||||
sameEqSet = oneElems.stream().map(xx -> xx.iterator().next())
|
||||
.filter(x -> (((x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
|
||||
|| (x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType)))))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
@ -537,14 +752,21 @@ public class TypeUnifyTask {
|
||||
/* sameEqSet-Bestimmung Ende */
|
||||
|
||||
Set<UnifyPair> a = null;
|
||||
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
|
||||
while (nextSetasList.size() > 0) {
|
||||
Set<UnifyPair> a_last = a;
|
||||
|
||||
//Liste der Faelle für die parallele Verarbeitung
|
||||
/* Liste der Faelle für die parallele Verarbeitung
|
||||
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
|
||||
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
|
||||
* Deshalb wird ihre Berechnung parallel angestossen.
|
||||
*/
|
||||
List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
|
||||
|
||||
//Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
|
||||
//In der Regel ein Element
|
||||
/* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
|
||||
* In der Regel ist dies genau ein Element
|
||||
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
|
||||
* gefuehrt hat.
|
||||
*/
|
||||
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
|
||||
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
@ -567,6 +789,7 @@ public class TypeUnifyTask {
|
||||
}
|
||||
|
||||
//Alle maximale Elemente in nextSetasListRest bestimmen
|
||||
//nur für diese wird parallele Berechnung angestossen.
|
||||
nextSetasListRest = oup.maxElements(nextSetasListRest);
|
||||
}
|
||||
else if (variance == -1) {
|
||||
@ -587,10 +810,13 @@ public class TypeUnifyTask {
|
||||
}
|
||||
}
|
||||
//Alle minimalen Elemente in nextSetasListRest bestimmen
|
||||
//nur für diese wird parallele Berechnung angestossen.
|
||||
nextSetasListRest = oup.minElements(nextSetasListRest);
|
||||
}
|
||||
else if (variance == 2) {
|
||||
a = nextSetasList.remove(0);
|
||||
|
||||
//Fuer alle Elemente wird parallele Berechnung angestossen.
|
||||
nextSetasListRest = new ArrayList<>(nextSetasList);
|
||||
}
|
||||
else if (variance == 0) {
|
||||
@ -616,55 +842,33 @@ public class TypeUnifyTask {
|
||||
}
|
||||
}
|
||||
}
|
||||
//writeLog("nextSet: " + nextSetasList.toString()+ "\n");
|
||||
//nextSetasList.remove(a);
|
||||
|
||||
//PL 2018-03-01
|
||||
//TODO: 1. Maximum und Minimum unterscheiden
|
||||
//TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern
|
||||
//for(Set<UnifyPair> a : newSet) {
|
||||
i++;
|
||||
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
|
||||
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(oneElems);
|
||||
writeLog("a1: " + rekTiefe + " "+ "variance: "+ variance + " " + a.toString()+ "\n");
|
||||
//elems.add(a); PL 2019-01-20 Muss weg, weil das in jeweiligen Thread erfolgen muss. Fuer den sequentiellen Fall
|
||||
//im else-Zweig
|
||||
//if (remainingSets.isEmpty()) {//muss immer gegeben sein, weil nur 1 Element der topLevelSets mehr als ein Elemet enthaelt
|
||||
//writeLog("Vor unify2 Aufruf: " + elems.toString());
|
||||
|
||||
//Ergebnisvariable für den aktuelle Thread
|
||||
Set<Set<UnifyPair>> res = new HashSet<>();
|
||||
|
||||
//Menge der Ergebnisse der geforkten Threads
|
||||
Set<Set<Set<UnifyPair>>> add_res = new HashSet<>();
|
||||
|
||||
|
||||
Set<Set<UnifyPair>> aParDef = new HashSet<>();
|
||||
|
||||
/* PL 2019-03-11 Anfang eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
|
||||
if (!oderConstraint && !sameEqSet.isEmpty()) {
|
||||
Optional<UnifyPair> optAPair = a.stream().filter(x -> (
|
||||
//x.getBasePair() != null && ist gegeben wenn variance != 2
|
||||
//x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
|
||||
(x.getPairOp().equals(PairOperator.EQUALSDOT)
|
||||
/*
|
||||
(x.getBasePair().getLhsType() instanceof PlaceholderType
|
||||
&& x.getLhsType().equals(x.getBasePair().getLhsType()))
|
||||
|| (x.getBasePair().getRhsType() instanceof PlaceholderType
|
||||
&& x.getLhsType().equals(x.getBasePair().getRhsType())
|
||||
*/
|
||||
))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
|
||||
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
|
||||
x.getLhsType().equals(x.getBasePair().getRhsType())
|
||||
).findFirst();
|
||||
|
||||
if (optAPair.isPresent()) {//basepair ist entweder a <. Ty oder ty <. a
|
||||
UnifyPair aPair = optAPair.get();
|
||||
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
|
||||
writeLog("variance: " + new Integer(variance).toString() + "sameEqSet:" + sameEqSet);
|
||||
if (!checkA(aPair, sameEqSet, elems, result)) {
|
||||
a = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
/* Wenn bei (a \in theta) \in a zu Widerspruch in oneElems wird
|
||||
* a verworfen und zu nächstem Element von nextSetasList gegangen
|
||||
*/
|
||||
if (!oderConstraint && !sameEqSet.isEmpty() && !checkNoContradiction(a, sameEqSet, result)) {
|
||||
a = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
/* PL 2019-03-11 Ende eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
|
||||
|
||||
if(parallel && (variance == 1)) {
|
||||
/* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread
|
||||
* gestartet, der parallel weiterarbeitet.
|
||||
*/
|
||||
if(parallel && (variance == 1) && noOfThread <= MaxNoOfThreads) {
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
@ -672,8 +876,14 @@ public class TypeUnifyTask {
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
//forks.add(forkOrig);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
forkOrig.fork();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
synchronized (this) {
|
||||
@ -686,35 +896,14 @@ public class TypeUnifyTask {
|
||||
writeLog("1 RM" + nSaL.toString());
|
||||
}
|
||||
|
||||
/* PL 2019-03-13 Anfang eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
|
||||
if (!oderConstraint) {//weiss nicht ob das wirklich stimmt
|
||||
Optional<UnifyPair> optAPair = nSaL.stream().filter(x -> (
|
||||
//x.getBasePair() != null && ist gegeben wenn variance != 2
|
||||
//x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
|
||||
(x.getPairOp().equals(PairOperator.EQUALSDOT)
|
||||
/*
|
||||
(x.getBasePair().getLhsType() instanceof PlaceholderType
|
||||
&& x.getLhsType().equals(x.getBasePair().getLhsType()))
|
||||
|| (x.getBasePair().getRhsType() instanceof PlaceholderType
|
||||
&& x.getLhsType().equals(x.getBasePair().getRhsType())
|
||||
*/
|
||||
))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
|
||||
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
|
||||
x.getLhsType().equals(x.getBasePair().getRhsType())
|
||||
).findFirst();
|
||||
|
||||
if (optAPair.isPresent()) {//basepair ist entweder a <. Ty oder ty <. a
|
||||
UnifyPair aPair = optAPair.get();
|
||||
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
|
||||
writeLog("variance: " + new Integer(variance).toString() + "sameEqSet:" + sameEqSet);
|
||||
if (!sameEqSet.isEmpty() && !checkA(aPair, sameEqSet, elems, result)) {
|
||||
nSaL = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
if (!oderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
nSaL = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
/* PL 2019-03-13 Ende eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
|
||||
else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)nSaL).getExtendConstraint());
|
||||
}
|
||||
@ -722,14 +911,41 @@ public class TypeUnifyTask {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
forks.add(fork);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
}
|
||||
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
res = forkOrig.join();
|
||||
|
||||
|
||||
/* FORK ANFANG */
|
||||
synchronized (this) {
|
||||
writeLog("wait "+ forkOrig.thNo);
|
||||
noOfThread--;
|
||||
res = forkOrig.join();
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig 1");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
//add_res.add(fork_res);
|
||||
};
|
||||
/* FORK ENDE */
|
||||
|
||||
forks.forEach(x -> writeLog("wait: " + x.thNo));
|
||||
for(TypeUnify2Task fork : forks) {
|
||||
synchronized (this) {
|
||||
noOfThread--;
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -746,18 +962,28 @@ public class TypeUnifyTask {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final 1");
|
||||
fork.closeLogFile();
|
||||
};
|
||||
}
|
||||
//noOfThread++;
|
||||
} else {
|
||||
if((variance == -1)) {
|
||||
if(parallel && (variance == -1) && noOfThread <= MaxNoOfThreads) {
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
//forks.add(forkOrig);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
forkOrig.fork();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
synchronized (this) {
|
||||
writeLog("a in " + variance + " "+ a);
|
||||
@ -769,35 +995,14 @@ public class TypeUnifyTask {
|
||||
writeLog("-1 RM" + nSaL.toString());
|
||||
}
|
||||
|
||||
if (!oderConstraint) {//weiss nicht ob das wirklich stimmt
|
||||
/* PL 2019-03-13 Anfang eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
|
||||
Optional<UnifyPair> optAPair = nSaL.stream().filter(x -> (
|
||||
//x.getBasePair() != null && ist gegeben wenn variance != 2
|
||||
//x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
|
||||
(x.getPairOp().equals(PairOperator.EQUALSDOT)
|
||||
/*
|
||||
(x.getBasePair().getLhsType() instanceof PlaceholderType
|
||||
&& x.getLhsType().equals(x.getBasePair().getLhsType()))
|
||||
|| (x.getBasePair().getRhsType() instanceof PlaceholderType
|
||||
&& x.getLhsType().equals(x.getBasePair().getRhsType())
|
||||
*/
|
||||
))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
|
||||
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
|
||||
x.getLhsType().equals(x.getBasePair().getRhsType())
|
||||
).findFirst();
|
||||
|
||||
if (optAPair.isPresent()) {//basepair ist entweder a <. Ty oder ty <. a
|
||||
UnifyPair aPair = optAPair.get();
|
||||
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
|
||||
writeLog("variance: " + new Integer(variance).toString() + "sameEqSet:" + sameEqSet);
|
||||
if (!sameEqSet.isEmpty() && !checkA(aPair, sameEqSet, elems, result)) {
|
||||
nSaL = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
if (!oderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
nSaL = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
/* PL 2019-03-13 Ende eingefuegt Vergleich mit anderen Paaren ggf. loeschen */
|
||||
else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)nSaL).getExtendConstraint());
|
||||
}
|
||||
@ -940,11 +1145,12 @@ public class TypeUnifyTask {
|
||||
};
|
||||
}
|
||||
//noOfThread++;
|
||||
} else {
|
||||
//parallel = false; //Wenn MaxNoOfThreads erreicht ist, sequentiell weiterarbeiten
|
||||
} else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
|
||||
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
|
||||
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, finalresult);
|
||||
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe);
|
||||
}}}
|
||||
|
||||
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
|
||||
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
result = res;
|
||||
@ -1031,7 +1237,7 @@ public class TypeUnifyTask {
|
||||
}
|
||||
else {
|
||||
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||
writeLog("RES Fst: reuslt: " + result.toString() + " res: " + res.toString());
|
||||
writeLog("RES Fst: result: " + result.toString() + " res: " + res.toString());
|
||||
result.addAll(res);
|
||||
}
|
||||
}
|
||||
@ -1043,10 +1249,6 @@ public class TypeUnifyTask {
|
||||
//}
|
||||
}
|
||||
|
||||
//}
|
||||
//else {//duerfte gar nicht mehr vorkommen PL 2018-04-03
|
||||
//result.addAll(computeCartesianRecursive(elems, remainingSets, eq, fc, parallel));
|
||||
//}
|
||||
if (parallel) {
|
||||
for (Set<Set<UnifyPair>> par_res : add_res) {
|
||||
if (!isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) {
|
||||
@ -1283,7 +1485,29 @@ public class TypeUnifyTask {
|
||||
return result;
|
||||
}
|
||||
|
||||
protected Boolean checkA (UnifyPair aPair, Set<UnifyPair> sameEqSet, Set<Set<UnifyPair>> elems, Set<Set<UnifyPair>> result) {
|
||||
/**
|
||||
* checks if there is for (a = ty) \in a in sameEqSet a constradiction
|
||||
* @param a Set of actual element of constraints with a =. ty \in a
|
||||
* @param sameEqSet Set of constraints where a <. ty' and ty' <. a
|
||||
* @param result set of results which contains correct solution s and the
|
||||
* the error constraints. Error constraints are added
|
||||
* @result contradiction of (a = ty) in sameEqSet
|
||||
*/
|
||||
protected Boolean checkNoContradiction(Set<UnifyPair> a, Set<UnifyPair> sameEqSet, Set<Set<UnifyPair>> result) {
|
||||
|
||||
//optAPair enthaelt ggf. das Paar a = ty' \in a
|
||||
//unterscheidet sich von optOrigPair, da dort a = ty
|
||||
Optional<UnifyPair> optAPair =
|
||||
a.stream().filter(x -> (x.getPairOp().equals(PairOperator.EQUALSDOT)))
|
||||
.filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
|
||||
x.getLhsType().equals(x.getBasePair().getLhsType()) ||
|
||||
x.getLhsType().equals(x.getBasePair().getRhsType()))
|
||||
.findFirst();
|
||||
|
||||
if (optAPair.isPresent()) {//basepair ist entweder a <. Ty oder ty <. a
|
||||
UnifyPair aPair = optAPair.get();
|
||||
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
|
||||
|
||||
writeLog("checkA: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
for (UnifyPair sameEq : sameEqSet) {
|
||||
if (sameEq.getLhsType() instanceof PlaceholderType) {
|
||||
@ -1293,12 +1517,14 @@ public class TypeUnifyTask {
|
||||
unitedSubst.addAll(sameEq.getAllSubstitutions());
|
||||
unitedSubst.addAll(sameEq.getAllBases());
|
||||
localEq.add(new UnifyPair(aPair.getRhsType(), sameEq.getRhsType(), sameEq.getPairOp(), unitedSubst, null));
|
||||
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0, false);
|
||||
finalresult = false;
|
||||
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0);
|
||||
finalresult = true;
|
||||
if (isUndefinedPairSetSet(localRes)) {
|
||||
if (result.isEmpty() || isUndefinedPairSetSet(result)) {
|
||||
result.addAll(localRes);
|
||||
}
|
||||
//writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -1309,20 +1535,25 @@ public class TypeUnifyTask {
|
||||
unitedSubst.addAll(sameEq.getAllSubstitutions());
|
||||
unitedSubst.addAll(sameEq.getAllBases());
|
||||
localEq.add(new UnifyPair(sameEq.getLhsType(), aPair.getRhsType(), sameEq.getPairOp(), unitedSubst, null));
|
||||
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0, false);
|
||||
finalresult = false;
|
||||
Set<Set<UnifyPair>> localRes = unify(localEq, new ArrayList<>(), fc, false, 0);
|
||||
finalresult = true;
|
||||
if (isUndefinedPairSetSet(localRes)) {
|
||||
if (result.isEmpty() || isUndefinedPairSetSet(result)) {
|
||||
result.addAll(localRes);
|
||||
}
|
||||
//writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
//writeLog("TRUE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
writeLog("TRUE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
protected boolean couldBecorrect(Set<Pair<Set<UnifyPair>, UnifyPair>> reducedUndefResSubstGroundedBasePair, Set<UnifyPair> nextElem) {
|
||||
return reducedUndefResSubstGroundedBasePair.stream()
|
||||
.map(pair -> {
|
||||
@ -1412,7 +1643,7 @@ public class TypeUnifyTask {
|
||||
return true;}).reduce((xx, yy) -> xx || yy).get();
|
||||
}
|
||||
|
||||
protected static boolean isUndefinedPairSet(Set<UnifyPair> s) {
|
||||
protected boolean isUndefinedPairSet(Set<UnifyPair> s) {
|
||||
if (s.size() >= 1 ) {
|
||||
Boolean ret = s.stream().map(x -> x.isUndefinedPair()).reduce(true, (x,y)-> (x && y));
|
||||
return ret;
|
||||
@ -1422,7 +1653,7 @@ public class TypeUnifyTask {
|
||||
}
|
||||
}
|
||||
|
||||
protected static boolean isUndefinedPairSetSet(Set<Set<UnifyPair>> s) {
|
||||
protected boolean isUndefinedPairSetSet(Set<Set<UnifyPair>> s) {
|
||||
if (s.size() >= 1) {
|
||||
Boolean ret = s.stream(). map(x -> isUndefinedPairSet(x)).reduce(true, (x,y)-> (x && y));
|
||||
return ret;
|
||||
@ -1481,7 +1712,7 @@ public class TypeUnifyTask {
|
||||
/*
|
||||
* Apply rules until the queue is empty
|
||||
*/
|
||||
while(!eqQueue.isEmpty()) {
|
||||
while(!eqQueue.isEmpty()) {
|
||||
UnifyPair pair = eqQueue.pollFirst();
|
||||
|
||||
// ReduceUp, ReduceLow, ReduceUpLow
|
||||
@ -2289,7 +2520,7 @@ public class TypeUnifyTask {
|
||||
|
||||
void writeLog(String str) {
|
||||
synchronized ( this ) {
|
||||
if (log) {
|
||||
if (log && finalresult) {
|
||||
try {
|
||||
logFile.write("Thread no.:" + thNo + "\n");
|
||||
logFile.write("noOfThread:" + noOfThread + "\n");
|
||||
|
@ -0,0 +1,18 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
|
||||
public class UnifyResultEvent {
|
||||
|
||||
private List<ResultSet> newTypeResult;
|
||||
|
||||
public UnifyResultEvent(List<ResultSet> newTypeResult) {
|
||||
this.newTypeResult = newTypeResult;
|
||||
}
|
||||
|
||||
public List<ResultSet> getNewTypeResult() {
|
||||
return newTypeResult;
|
||||
}
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
public interface UnifyResultListener {
|
||||
|
||||
void onNewTypeResultFound(UnifyResultEvent evt);
|
||||
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class UnifyResultListenerImpl implements UnifyResultListener {
|
||||
|
||||
List<ResultSet> results = new ArrayList<>();
|
||||
|
||||
public synchronized void onNewTypeResultFound(UnifyResultEvent evt) {
|
||||
results.addAll(evt.getNewTypeResult());
|
||||
}
|
||||
|
||||
public List<ResultSet> getResults() {
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,59 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class UnifyResultModel {
|
||||
|
||||
ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons;
|
||||
|
||||
IFiniteClosure fc;
|
||||
|
||||
public UnifyResultModel(ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons,
|
||||
IFiniteClosure fc) {
|
||||
this.cons = cons;
|
||||
this.fc = fc;
|
||||
}
|
||||
|
||||
private List<UnifyResultListener> listeners = new ArrayList<>();
|
||||
|
||||
public void addUnifyResultListener(UnifyResultListener listenerToAdd) {
|
||||
listeners.add(listenerToAdd);
|
||||
}
|
||||
|
||||
public void removeUnifyResultListener(UnifyResultListener listenerToRemove) {
|
||||
listeners.remove(listenerToRemove);
|
||||
}
|
||||
|
||||
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
|
||||
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y; //alle Paare a <.? b erden durch a =. b ersetzt
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
|
||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
|
||||
}
|
||||
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
|
||||
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons))))
|
||||
.collect(Collectors.toList());
|
||||
UnifyResultEvent evt = new UnifyResultEvent(newResult);
|
||||
|
||||
for (UnifyResultListener listener : listeners) {
|
||||
listener.onNewTypeResultFound(evt);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
public class UnifyTaskModel {
|
||||
|
||||
ArrayList<TypeUnifyTask> usedTasks = new ArrayList<>();
|
||||
|
||||
public synchronized void add(TypeUnifyTask t) {
|
||||
usedTasks.add(t);
|
||||
}
|
||||
|
||||
public synchronized void cancel() {
|
||||
for(TypeUnifyTask t : usedTasks) {
|
||||
t.myCancel(true);
|
||||
}
|
||||
}
|
||||
}
|
@ -29,6 +29,8 @@ public class UnifyPair {
|
||||
*/
|
||||
private PairOperator pairOp;
|
||||
|
||||
private boolean undefinedPair = false;
|
||||
|
||||
/**
|
||||
* Unifier/substitute that generated this pair
|
||||
* PL 2018-03-15
|
||||
@ -112,7 +114,10 @@ public class UnifyPair {
|
||||
public void addSubstitutions(Set<UnifyPair> sup) {
|
||||
substitution.addAll(sup);
|
||||
}
|
||||
|
||||
|
||||
public void setUndefinedPair() {
|
||||
undefinedPair = true;
|
||||
}
|
||||
public Set<UnifyPair> getSubstitution() {
|
||||
return new HashSet<>(substitution);
|
||||
}
|
||||
@ -120,6 +125,10 @@ public class UnifyPair {
|
||||
public UnifyPair getBasePair() {
|
||||
return basePair;
|
||||
}
|
||||
public boolean isUndefinedPair() {
|
||||
return undefinedPair;
|
||||
}
|
||||
|
||||
public Set<UnifyPair> getAllSubstitutions () {
|
||||
Set<UnifyPair> ret = new HashSet<>();
|
||||
ret.addAll(new ArrayList<>(getSubstitution()));
|
||||
|
@ -114,7 +114,6 @@ public abstract class UnifyType {
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj == null)return false;
|
||||
return this.toString().equals(obj.toString());
|
||||
}
|
||||
}
|
@ -40,7 +40,8 @@ public class AllgemeinTest {
|
||||
//String className = "FCTest3";
|
||||
//String className = "Var";
|
||||
//String className = "Put";
|
||||
String className = "Cycle";
|
||||
//String className = "Twice";
|
||||
String className = "TestSubTypless";
|
||||
//PL 2019-10-24: genutzt fuer unterschiedliche Tests
|
||||
path = System.getProperty("user.dir")+"/src/test/resources/AllgemeinTest/" + className + ".jav";
|
||||
//path = System.getProperty("user.dir")+"/src/test/resources/AllgemeinTest/Overloading_Generics.jav";
|
||||
|
@ -1,68 +0,0 @@
|
||||
package constraintSimplify;
|
||||
|
||||
import de.dhbwstuttgart.bytecode.TPHExtractor;
|
||||
import de.dhbwstuttgart.bytecode.insertGenerics.PositionFinder;
|
||||
import de.dhbwstuttgart.bytecode.genericsGenerator.GeneratedGenericsFinder;
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
import de.dhbwstuttgart.syntaxtree.*;
|
||||
import de.dhbwstuttgart.syntaxtree.statement.Block;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public class FamilyOfGenerics {
|
||||
|
||||
@Test
|
||||
public void generateBC() throws Exception {
|
||||
SourceFile sf = generateAST();
|
||||
PositionFinder.getPositionOfTPH(sf, null);
|
||||
TPHExtractor tphExtractor = new TPHExtractor();
|
||||
List<ResultSet> results = new ArrayList<ResultSet>();
|
||||
GeneratedGenericsFinder generatedGenericsFinder = new GeneratedGenericsFinder(sf, results);
|
||||
}
|
||||
|
||||
public static SourceFile generateAST(){
|
||||
ArrayList<ClassOrInterface> classes = new ArrayList<>();
|
||||
ArrayList<Field> fields = new ArrayList<>();
|
||||
ArrayList<Method> methods = new ArrayList<>();
|
||||
|
||||
fields.add(generateField("fld1"));
|
||||
String[] paramNames = {"a"};
|
||||
methods.add(generateMethod("testMethode", paramNames));
|
||||
|
||||
classes.add(new ClassOrInterface(Modifier.PUBLIC, new JavaClassName("Test"), fields, Optional.empty(), methods,
|
||||
new ArrayList<>(), generateEmptyGenericDeclList(),
|
||||
new RefType(new JavaClassName("java.lang.Object"), new NullToken()),
|
||||
false, new ArrayList<>(), new NullToken()));
|
||||
|
||||
return new SourceFile("Test.jav", classes, new HashSet<>());
|
||||
}
|
||||
|
||||
public static Method generateMethod(String methodName, String[] paramNames){
|
||||
ArrayList<FormalParameter> parameters = new ArrayList<>();
|
||||
for(String str: paramNames) {
|
||||
FormalParameter fp = new FormalParameter(str, TypePlaceholder.fresh(new NullToken()), new NullToken());
|
||||
parameters.add(fp);
|
||||
|
||||
}
|
||||
ParameterList parameterList = new ParameterList(parameters, new NullToken());
|
||||
return new Method(Modifier.PUBLIC, methodName, TypePlaceholder.fresh(new NullToken()), parameterList,
|
||||
new Block(new ArrayList<>(), new NullToken()), generateEmptyGenericDeclList(), new NullToken());
|
||||
}
|
||||
|
||||
public static GenericDeclarationList generateEmptyGenericDeclList(){
|
||||
return new GenericDeclarationList(new ArrayList<>(), new NullToken());
|
||||
}
|
||||
|
||||
public static Field generateField(String fieldName) {
|
||||
return new Field(fieldName, TypePlaceholder.fresh(new NullToken()), Modifier.PUBLIC, new NullToken());
|
||||
}
|
||||
}
|
@ -1,206 +0,0 @@
|
||||
package insertGenerics;
|
||||
|
||||
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
|
||||
import de.dhbwstuttgart.bytecode.insertGenerics.*;
|
||||
import de.dhbwstuttgart.bytecode.utilities.MethodAndTPH;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
public class FamilyOfGeneratedGenericsTest extends TestCase {
|
||||
|
||||
public void testIdentityMethod(){
|
||||
/*
|
||||
Example method:
|
||||
A id(B i) return i;
|
||||
gives constraint: B <. A, which is a method constraint
|
||||
*/
|
||||
|
||||
List<TPHConstraint> inputConstraints = new ArrayList<>();
|
||||
inputConstraints.add(new TPHConstraint("B", "A", TPHConstraint.Relation.EXTENDS));
|
||||
|
||||
HashMap<String, PairTphMethod<PositionFinder.Position, String>> tphPositions = new HashMap<>();
|
||||
PairTphMethod<PositionFinder.Position, String> meth1 = new PairTphMethod<PositionFinder.Position, String>(PositionFinder.Position.METHOD, "m1");
|
||||
tphPositions.put("A", meth1);
|
||||
tphPositions.put("B", meth1);
|
||||
|
||||
List<ClassConstraint> classConstraints = FamilyOfGeneratedGenerics.getClassConstraints(inputConstraints, tphPositions);
|
||||
assertTrue(classConstraints.isEmpty());
|
||||
|
||||
/*
|
||||
MethodConstraints should be the same as the input constraint
|
||||
*/
|
||||
List<MethodConstraint> methodConstraints = FamilyOfGeneratedGenerics.getMethodConstraints(inputConstraints, tphPositions);
|
||||
assertTrue(methodConstraints.size() == 1);
|
||||
assertTrue(methodConstraints.get(0).getLeft().equals("B"));
|
||||
assertTrue(methodConstraints.get(0).getRight().equals("A"));
|
||||
}
|
||||
|
||||
public void testClassField(){
|
||||
/*
|
||||
class Example{
|
||||
A f;
|
||||
B fReturn(){
|
||||
return f;
|
||||
}
|
||||
}
|
||||
gives constraint: A <. B, which is a class constraint
|
||||
*/
|
||||
|
||||
List<TPHConstraint> inputConstraints = new ArrayList<>();
|
||||
inputConstraints.add(new TPHConstraint("A", "B", TPHConstraint.Relation.EXTENDS));
|
||||
|
||||
HashMap<String, PairTphMethod<PositionFinder.Position, String>> tphPositions = new HashMap<>();
|
||||
PairTphMethod<PositionFinder.Position, String> posOfA = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
|
||||
tphPositions.put("A", posOfA);
|
||||
PairTphMethod<PositionFinder.Position, String> posOfB = new PairTphMethod<>(PositionFinder.Position.METHOD, "fReturn");
|
||||
tphPositions.put("B", posOfB);
|
||||
|
||||
/*
|
||||
ClassConstraints should not be the same as the input constraint
|
||||
*/
|
||||
List<ClassConstraint> classConstraints = FamilyOfGeneratedGenerics.getClassConstraints(inputConstraints, tphPositions);
|
||||
System.out.println(classConstraints);
|
||||
assertTrue(classConstraints.size() == 2);
|
||||
//assertTrue(classConstraints.get(0).getLeft().equals("A"));
|
||||
//assertTrue(classConstraints.get(0).getRight().equals("B"));
|
||||
}
|
||||
|
||||
public void testSecondLineOfClassConstraints() {
|
||||
/*
|
||||
class Example() {
|
||||
A a;
|
||||
B b = a;
|
||||
C anyMethod() {
|
||||
F f;
|
||||
return f;
|
||||
}
|
||||
D otherMethod(E e) {
|
||||
this.b = e;
|
||||
e = this.a;
|
||||
return e;
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
List<TPHConstraint> inputConstraints = new ArrayList<>();
|
||||
inputConstraints.add(new TPHConstraint("A", "B", TPHConstraint.Relation.EXTENDS));
|
||||
inputConstraints.add(new TPHConstraint("F", "C", TPHConstraint.Relation.EXTENDS));
|
||||
inputConstraints.add(new TPHConstraint("E", "B", TPHConstraint.Relation.EXTENDS));
|
||||
inputConstraints.add(new TPHConstraint("A", "E", TPHConstraint.Relation.EXTENDS));
|
||||
inputConstraints.add(new TPHConstraint("E", "D", TPHConstraint.Relation.EXTENDS));
|
||||
|
||||
HashMap<String, PairTphMethod<PositionFinder.Position, String>> tphPositions = new HashMap<>();
|
||||
PairTphMethod<PositionFinder.Position, String> posOfA = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
|
||||
PairTphMethod<PositionFinder.Position, String> posOfB = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
|
||||
PairTphMethod<PositionFinder.Position, String> posOfC = new PairTphMethod<>(PositionFinder.Position.METHOD, "anyMethod");
|
||||
PairTphMethod<PositionFinder.Position, String> posOfD = new PairTphMethod<>(PositionFinder.Position.METHOD, "otherMethod");
|
||||
PairTphMethod<PositionFinder.Position, String> posOfE = new PairTphMethod<>(PositionFinder.Position.METHOD, "otherMethod");
|
||||
PairTphMethod<PositionFinder.Position, String> posOfF = new PairTphMethod<>(PositionFinder.Position.METHOD, "anyMethod");
|
||||
|
||||
tphPositions.put("A", posOfA);
|
||||
tphPositions.put("B", posOfB);
|
||||
tphPositions.put("C", posOfC);
|
||||
tphPositions.put("F", posOfF);
|
||||
tphPositions.put("D", posOfD);
|
||||
tphPositions.put("E", posOfE);
|
||||
|
||||
List<ClassConstraint> classConstraints = FamilyOfGeneratedGenerics.getClassConstraints(inputConstraints, tphPositions);
|
||||
System.out.println(classConstraints);
|
||||
assertFalse(classConstraints.isEmpty());
|
||||
assertTrue(classConstraints.size() == 6);
|
||||
|
||||
}
|
||||
|
||||
public void testTPHsAndGenerics() {
|
||||
/*
|
||||
class TPHsAndGenerics {
|
||||
Fun1<A,B> id = x -> x;
|
||||
C id2 (D x) {
|
||||
return id.apply(x);
|
||||
}
|
||||
E m(F a, G b){
|
||||
var c = m2(a,b);
|
||||
return a;
|
||||
}
|
||||
H m2(I a, J b){
|
||||
return b;
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
List<TPHConstraint> inputConstraints = new ArrayList<>();
|
||||
inputConstraints.add(new TPHConstraint("A","B", TPHConstraint.Relation.EXTENDS));
|
||||
inputConstraints.add(new TPHConstraint("B","C", TPHConstraint.Relation.EXTENDS));
|
||||
inputConstraints.add(new TPHConstraint("D","A", TPHConstraint.Relation.EXTENDS));
|
||||
inputConstraints.add(new TPHConstraint("F","E", TPHConstraint.Relation.EXTENDS));
|
||||
inputConstraints.add(new TPHConstraint("F","I", TPHConstraint.Relation.EXTENDS));
|
||||
inputConstraints.add(new TPHConstraint("G","J", TPHConstraint.Relation.EXTENDS));
|
||||
inputConstraints.add(new TPHConstraint("J","H", TPHConstraint.Relation.EXTENDS));
|
||||
|
||||
HashMap<String, PairTphMethod<PositionFinder.Position, String>> tphPositions = new HashMap<>();
|
||||
PairTphMethod<PositionFinder.Position, String> posOfA = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
|
||||
PairTphMethod<PositionFinder.Position, String> posOfB = new PairTphMethod<>(PositionFinder.Position.FIELD, null);
|
||||
PairTphMethod<PositionFinder.Position, String> posOfC = new PairTphMethod<>(PositionFinder.Position.METHOD, "id2");
|
||||
PairTphMethod<PositionFinder.Position, String> posOfD = new PairTphMethod<>(PositionFinder.Position.METHOD, "id2");
|
||||
PairTphMethod<PositionFinder.Position, String> posOfE = new PairTphMethod<>(PositionFinder.Position.METHOD, "m");
|
||||
PairTphMethod<PositionFinder.Position, String> posOfF = new PairTphMethod<>(PositionFinder.Position.METHOD, "m");
|
||||
PairTphMethod<PositionFinder.Position, String> posOfG = new PairTphMethod<>(PositionFinder.Position.METHOD, "m");
|
||||
PairTphMethod<PositionFinder.Position, String> posOfH = new PairTphMethod<>(PositionFinder.Position.METHOD, "m2");
|
||||
PairTphMethod<PositionFinder.Position, String> posOfI = new PairTphMethod<>(PositionFinder.Position.METHOD, "m2");
|
||||
PairTphMethod<PositionFinder.Position, String> posOfJ = new PairTphMethod<>(PositionFinder.Position.METHOD, "m2");
|
||||
|
||||
tphPositions.put("A", posOfA);
|
||||
tphPositions.put("B", posOfB);
|
||||
tphPositions.put("C", posOfC);
|
||||
tphPositions.put("D", posOfD);
|
||||
tphPositions.put("E", posOfE);
|
||||
tphPositions.put("F", posOfF);
|
||||
tphPositions.put("G", posOfG);
|
||||
tphPositions.put("H", posOfH);
|
||||
tphPositions.put("I", posOfI);
|
||||
tphPositions.put("J", posOfJ);
|
||||
|
||||
List<ClassConstraint> classConstraints = FamilyOfGeneratedGenerics.getClassConstraints(inputConstraints, tphPositions);
|
||||
System.out.println(classConstraints);
|
||||
|
||||
assertFalse(classConstraints.isEmpty());
|
||||
assertTrue(classConstraints.size() == 3);
|
||||
}
|
||||
|
||||
|
||||
public void testPositionConverter() {
|
||||
HashMap<String, Boolean> allTphsOld = new HashMap<>();
|
||||
List<MethodAndTPH> listOfMethodsAndTphs = new ArrayList<>();
|
||||
allTphsOld.put("A", true);
|
||||
allTphsOld.put("B", false);
|
||||
MethodAndTPH m1 = new MethodAndTPH("m1");
|
||||
m1.getTphs().add("A");
|
||||
MethodAndTPH bla = new MethodAndTPH("bla");
|
||||
MethodAndTPH blubb = new MethodAndTPH("blubb");
|
||||
// blubb.getTphs().add("A");
|
||||
listOfMethodsAndTphs.add(bla);
|
||||
listOfMethodsAndTphs.add(blubb);
|
||||
listOfMethodsAndTphs.add(m1);
|
||||
|
||||
|
||||
|
||||
HashMap<String, PairTphMethod<PositionFinder.Position, String>> allTphsNew = FamilyOfGeneratedGenerics.positionConverter(allTphsOld, listOfMethodsAndTphs);
|
||||
System.out.println(allTphsNew);
|
||||
//was tun wenn zwei (oder mehr) Methoden gleiches TPH enthalten?
|
||||
//ist dies möglich oder werden die TPHs immer verschieden initialisiert und dann erst am Ende gemappt?
|
||||
//überarbeiten oder lassen?
|
||||
|
||||
|
||||
assertTrue(allTphsNew.get("A").fst.equals(PositionFinder.Position.METHOD));
|
||||
assertTrue(allTphsNew.get("B").fst.equals(PositionFinder.Position.FIELD));
|
||||
}
|
||||
|
||||
|
||||
public void testFirstTransitiveSubtypeForMethodTypes(){
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
package insertGenerics;
|
||||
|
||||
import de.dhbwstuttgart.bytecode.genericsGenerator.GeneratedGenericsFinder;
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.SourceFile;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import junit.framework.TestResult;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class MethodsTest {
|
||||
public static final String rootDirectory = System.getProperty("user.dir")+"/src/test/resources/insertGenericsJav/";
|
||||
public static final String fileDirectory = rootDirectory + "TestGGFinder.jav";
|
||||
|
||||
@Test
|
||||
public void testVisit(ClassOrInterface coi) throws IOException, ClassNotFoundException {
|
||||
JavaTXCompiler compiler = new JavaTXCompiler(new File(fileDirectory));
|
||||
SourceFile sf = compiler.sourceFiles.get(compiler.sourceFiles.keySet());
|
||||
List<ResultSet> results = compiler.typeInference();
|
||||
|
||||
GeneratedGenericsFinder ggf = new GeneratedGenericsFinder(sf, results);
|
||||
ClassOrInterface coiHere = coi;
|
||||
ggf.visit(coiHere);
|
||||
//assert results.size()>0;
|
||||
}
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
/*
|
||||
package insertGenerics;
|
||||
|
||||
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
|
||||
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint.Relation;
|
||||
import de.dhbwstuttgart.bytecode.insertGenerics.ClassConstraint;
|
||||
import de.dhbwstuttgart.bytecode.insertGenerics.FamilyOfGeneratedGenerics;
|
||||
import de.dhbwstuttgart.bytecode.insertGenerics.MethodConstraint;
|
||||
import de.dhbwstuttgart.bytecode.insertGenerics.PositionFinder;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
public class TestExample42 {
|
||||
public List<TPHConstraint> fillConstraintsList() {
|
||||
List<TPHConstraint> cs = new ArrayList<>();
|
||||
cs.add(new TPHConstraint("M", "N", Relation.EXTENDS));
|
||||
cs.add(new TPHConstraint("N", "Z", Relation.EXTENDS));
|
||||
cs.add(new TPHConstraint("Q", "K", Relation.EXTENDS));
|
||||
cs.add(new TPHConstraint("K", "P", Relation.EXTENDS));
|
||||
cs.add(new TPHConstraint("W", "M", Relation.EXTENDS));
|
||||
cs.add(new TPHConstraint("Z", "V", Relation.EXTENDS));
|
||||
return cs;
|
||||
}
|
||||
|
||||
public HashMap<String, PositionFinder.Position> fillPosOfTphs() {
|
||||
BufferedReader l;
|
||||
HashMap<String, PositionFinder.Position> posOfTphs = new HashMap<>();
|
||||
posOfTphs.put("K", PositionFinder.Position.FIELD);
|
||||
posOfTphs.put("L", PositionFinder.Position.METHOD);
|
||||
posOfTphs.put("M", PositionFinder.Position.METHOD);
|
||||
posOfTphs.put("N", PositionFinder.Position.METHOD);
|
||||
posOfTphs.put("P", PositionFinder.Position.METHOD);
|
||||
posOfTphs.put("Q", PositionFinder.Position.METHOD);
|
||||
posOfTphs.put("U", PositionFinder.Position.METHOD);
|
||||
posOfTphs.put("V", PositionFinder.Position.METHOD);
|
||||
posOfTphs.put("W", PositionFinder.Position.METHOD);
|
||||
posOfTphs.put("Z", PositionFinder.Position.METHOD);
|
||||
return posOfTphs;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void genericTest() {
|
||||
List<ClassConstraint> classConstraints = FamilyOfGeneratedGenerics.getClassConstraints(fillConstraintsList(),fillPosOfTphs());
|
||||
System.out.println("ClassConstraints: " + classConstraints);
|
||||
List<MethodConstraint> methodConstraints = FamilyOfGeneratedGenerics.getMethodConstraints(fillConstraintsList(),fillPosOfTphs());
|
||||
System.out.println("MethodConstraints: " + methodConstraints);
|
||||
|
||||
List<TPHConstraint> testCons;
|
||||
}
|
||||
}
|
||||
*/
|
@ -1,28 +0,0 @@
|
||||
package insertGenerics;
|
||||
|
||||
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
|
||||
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint.Relation;
|
||||
import de.dhbwstuttgart.bytecode.insertGenerics.FamilyOfGeneratedGenerics;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class TestTransitiveClosure {
|
||||
public List<TPHConstraint> fillList() {
|
||||
List<TPHConstraint> list = new ArrayList<>();
|
||||
list.add(new TPHConstraint("A", "B", Relation.EXTENDS));
|
||||
list.add(new TPHConstraint("B", "C", Relation.EXTENDS));
|
||||
list.add(new TPHConstraint("C", "D", Relation.EXTENDS));
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void genericTest() {
|
||||
List<TPHConstraint> testCons = FamilyOfGeneratedGenerics.buildTransitiveClosure(fillList());
|
||||
System.out.println(testCons);
|
||||
|
||||
}
|
||||
}
|
@ -1,92 +0,0 @@
|
||||
package insertGenerics;
|
||||
|
||||
import de.dhbwstuttgart.bytecode.genericsGeneratorTypes.GenericGenratorResultForSourceFile;
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.syntaxtree.SourceFile;
|
||||
import de.dhbwstuttgart.syntaxtree.visual.ASTPrinter;
|
||||
import de.dhbwstuttgart.syntaxtree.visual.ASTTypePrinter;
|
||||
import de.dhbwstuttgart.typedeployment.TypeInsert;
|
||||
import de.dhbwstuttgart.typedeployment.TypeInsertFactory;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
public class TryTest {
|
||||
|
||||
public static final String rootDirectory = System.getProperty("user.dir")+"/src/test/resources/insertGenericsJav/";
|
||||
|
||||
@Test
|
||||
public void ggFinder() throws IOException, ClassNotFoundException {
|
||||
execute(new File(rootDirectory+"TestGGFinder.jav"));
|
||||
}
|
||||
|
||||
private static class TestResultSet{
|
||||
|
||||
}
|
||||
|
||||
public TestResultSet execute(File fileToTest) throws IOException, ClassNotFoundException {
|
||||
//filesToTest.add(new File(rootDirectory+"fc.jav"));
|
||||
//filesToTest.add(new File(rootDirectory+"Lambda.jav"));
|
||||
//filesToTest.add(new File(rootDirectory+"Lambda2.jav"));
|
||||
//filesToTest.add(new File(rootDirectory+"Lambda3.jav"));
|
||||
//filesToTest.add(new File(rootDirectory+"Vector.jav"));
|
||||
//filesToTest.add(new File(rootDirectory+"Generics.jav"));
|
||||
//filesToTest.add(new File(rootDirectory+"MethodsEasy.jav"));
|
||||
//filesToTest.add(new File(rootDirectory+"Matrix.jav"));
|
||||
//filesToTest.add(new File(rootDirectory+"Import.jav"));
|
||||
// //filesToTest.add(new File(rootDirectory+"Faculty.jav"));
|
||||
// //filesToTest.add(new File(rootDirectory+"mathStruc.jav"));
|
||||
// //filesToTest.add(new File(rootDirectory+"test.jav"));
|
||||
JavaTXCompiler compiler = new JavaTXCompiler(fileToTest);
|
||||
for(File f : compiler.sourceFiles.keySet()){
|
||||
SourceFile sf = compiler.sourceFiles.get(f);
|
||||
// System.out.println(ASTTypePrinter.print(sf));
|
||||
// System.out.println("---------------------------1");
|
||||
// System.out.println(ASTPrinter.print(sf));
|
||||
// System.out.println("---------------------------2");
|
||||
}
|
||||
List<ResultSet> results = compiler.typeInference();
|
||||
List<GenericGenratorResultForSourceFile> simplifyResultsForAllSourceFiles = compiler.getGeneratedGenericResultsForAllSourceFiles(results);
|
||||
//compiler.generateBytecode(rootDirectory+"xxx.class", results, simplifyResultsForAllSourceFiles);
|
||||
for(File f : compiler.sourceFiles.keySet()){
|
||||
SourceFile sf = compiler.sourceFiles.get(f);
|
||||
System.out.println(ASTTypePrinter.print(sf));
|
||||
// System.out.println("---------------------------3");
|
||||
System.out.println(ASTPrinter.print(sf));
|
||||
// System.out.println("---------------------------4");
|
||||
//List<ResultSet> results = compiler.typeInference(); PL 2017-10-03 vor die For-Schleife gezogen
|
||||
assert results.size()>0;
|
||||
Set<String> insertedTypes = new HashSet<>();
|
||||
for(ResultSet resultSet : results){
|
||||
Set<TypeInsert> result = TypeInsertFactory.createTypeInsertPoints(sf, resultSet, results, simplifyResultsForAllSourceFiles);
|
||||
assert result.size()>0;
|
||||
String content = readFile(f.getPath(), StandardCharsets.UTF_8);
|
||||
for(TypeInsert tip : result){
|
||||
insertedTypes.add(tip.insert(content));
|
||||
}
|
||||
}
|
||||
for(String s : insertedTypes){
|
||||
System.out.println("---------------------------51");
|
||||
System.out.println(s);
|
||||
System.out.println("---------------------------52");
|
||||
}
|
||||
}
|
||||
return new TestResultSet();
|
||||
}
|
||||
|
||||
static String readFile(String path, Charset encoding)
|
||||
throws IOException
|
||||
{
|
||||
byte[] encoded = Files.readAllBytes(Paths.get(path));
|
||||
return new String(encoded, encoding);
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
class Cycle{
|
||||
|
||||
<A> A und(A a, A b){
|
||||
return a;
|
||||
}
|
||||
|
||||
m(a){
|
||||
return und(m(m(a)), a);
|
||||
}
|
||||
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
class Twice{
|
||||
m(x, f){
|
||||
return f.apply(f.apply(x));
|
||||
}
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
class Generics{
|
||||
a;
|
||||
|
||||
id(b) {
|
||||
var c = b;
|
||||
return c;
|
||||
}
|
||||
|
||||
setA(x) {
|
||||
a = x;
|
||||
return a;
|
||||
}
|
||||
|
||||
m(x,y) {
|
||||
x = id(y);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user