forked from i21017/JavaCompilerCore
Compare commits
3 Commits
c0d8dc4692
...
58c3947167
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
58c3947167 | ||
|
|
2b9a9c2732 | ||
|
|
79c1a69ea3 |
@@ -1,4 +1,3 @@
|
||||
//PL 2018-12-19: typeInferenceOld nach typeInference uebertragen
|
||||
package de.dhbwstuttgart.core;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
@@ -13,7 +12,6 @@ import de.dhbwstuttgart.parser.SyntaxTreeGenerator.SyntaxTreeGenerator;
|
||||
import de.dhbwstuttgart.parser.antlr.Java8Parser.CompilationUnitContext;
|
||||
import de.dhbwstuttgart.parser.scope.GenericsRegistry;
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
import de.dhbwstuttgart.server.SocketClient;
|
||||
import de.dhbwstuttgart.syntaxtree.*;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.type.*;
|
||||
@@ -24,545 +22,476 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.typeAlgo.TYPE;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListener;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModelParallel;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
||||
public class JavaTXCompiler {
|
||||
|
||||
//public static JavaTXCompiler INSTANCE;
|
||||
final CompilationEnvironment environment;
|
||||
Boolean useResultModel = true;
|
||||
Optional<String> unificationServer = Optional.empty();
|
||||
Boolean useResultModel = true;
|
||||
Optional<String> unificationServer = Optional.empty();
|
||||
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
|
||||
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
|
||||
private final DirectoryClassLoader classLoader;
|
||||
static Writer statistics;
|
||||
|
||||
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
|
||||
this(Arrays.asList(sourceFile), null);
|
||||
}
|
||||
public JavaTXCompiler(File sourceFile, Boolean log) throws IOException, ClassNotFoundException {
|
||||
this(sourceFile);
|
||||
this.log = log;
|
||||
|
||||
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
|
||||
this(Arrays.asList(sourceFile), null);
|
||||
}
|
||||
public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException {
|
||||
this(sourceFiles, null);
|
||||
}
|
||||
|
||||
public JavaTXCompiler(File sourceFile, Boolean log) throws IOException, ClassNotFoundException {
|
||||
this(sourceFile);
|
||||
this.log = log;
|
||||
}
|
||||
|
||||
public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException {
|
||||
this(sourceFiles, null);
|
||||
}
|
||||
|
||||
public JavaTXCompiler(List<File> sources, List<File> contextPath) throws IOException, ClassNotFoundException {
|
||||
//statistics = new FileWriter(new File(System.getProperty("user.dir") + "/" + sources.get(0).getName() + "_"+ new Timestamp(System.currentTimeMillis())));
|
||||
statistics = new OutputStreamWriter(new NullOutputStream());
|
||||
statistics.write("test");
|
||||
if(contextPath == null || contextPath.isEmpty()){
|
||||
//When no contextPaths are given, the working directory is the sources root
|
||||
contextPath = Lists.newArrayList(new File(System.getProperty("user.dir")));
|
||||
}
|
||||
//statistics = new FileWriter(new File(System.getProperty("user.dir") + "/" + sources.get(0).getName() + "_"+ new Timestamp(System.currentTimeMillis())));
|
||||
statistics = new OutputStreamWriter(new NullOutputStream());
|
||||
statistics.write("test");
|
||||
if (contextPath == null || contextPath.isEmpty()) {
|
||||
//When no contextPaths are given, the working directory is the sources root
|
||||
contextPath = Lists.newArrayList(new File(System.getProperty("user.dir")));
|
||||
}
|
||||
classLoader = new DirectoryClassLoader(contextPath, ClassLoader.getSystemClassLoader());
|
||||
environment = new CompilationEnvironment(sources);
|
||||
environment = new CompilationEnvironment(sources);
|
||||
for (File s : sources) {
|
||||
sourceFiles.put(s, parse(s));
|
||||
}
|
||||
//INSTANCE = this;
|
||||
}
|
||||
public JavaTXCompiler(List<File> source, List<File> contextPath, String unificationServer) throws IOException, ClassNotFoundException {
|
||||
this(source, contextPath);
|
||||
this.unificationServer = unificationServer == null ? Optional.empty() : Optional.of(unificationServer);
|
||||
}
|
||||
|
||||
public JavaTXCompiler(List<File> source, List<File> contextPath, String unificationServer) throws IOException, ClassNotFoundException {
|
||||
this(source, contextPath);
|
||||
this.unificationServer = unificationServer == null ? Optional.empty() : Optional.of(unificationServer);
|
||||
}
|
||||
|
||||
public ConstraintSet<Pair> getConstraints() throws ClassNotFoundException, IOException {
|
||||
List<ClassOrInterface> allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
|
||||
List<ClassOrInterface> importedClasses = new ArrayList<>();
|
||||
ClassOrInterface objectClass = ASTFactory.createClass(
|
||||
classLoader.loadClass(new JavaClassName("java.lang.Object").toString()));
|
||||
classLoader.loadClass(new JavaClassName("java.lang.Object").toString())
|
||||
);
|
||||
|
||||
//Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
|
||||
for (File forSourceFile : sourceFiles.keySet()){
|
||||
for (File forSourceFile : sourceFiles.keySet()) {
|
||||
for (JavaClassName name : sourceFiles.get(forSourceFile).getImports()) {
|
||||
//TODO: Hier werden imports von eigenen (.jav) Klassen nicht beachtet
|
||||
ClassOrInterface importedClass = ASTFactory.createClass(
|
||||
classLoader.loadClass(name.toString()));
|
||||
classLoader.loadClass(name.toString()));
|
||||
importedClasses.add(importedClass);
|
||||
}
|
||||
for (Class<?> c : CompilationEnvironment.loadDefaultPackageClasses(forSourceFile, classLoader)) {
|
||||
ClassOrInterface importedClass = ASTFactory.createClass(c);
|
||||
importedClasses.add(importedClass);
|
||||
}
|
||||
for(Class c : CompilationEnvironment.loadDefaultPackageClasses(forSourceFile, classLoader)){
|
||||
ClassOrInterface importedClass = ASTFactory.createClass(c);
|
||||
importedClasses.add(importedClass);
|
||||
}
|
||||
}
|
||||
for (File f : this.sourceFiles.keySet()) {
|
||||
SourceFile sf = sourceFiles.get(f);
|
||||
sf = new SourceFile(sf.getPkgName(),
|
||||
sf.KlassenVektor.stream()
|
||||
.map(cl -> new ClassOrInterface(cl))
|
||||
.collect(Collectors.toCollection(ArrayList::new)),
|
||||
sf.imports);
|
||||
//sf enthaelt neues Source-File, neue Klassen-Objekte und neue
|
||||
//ArrayListen-Objekte fuer Fields, Construktoren und Methoden
|
||||
//Alle anderen Objekte werden nur kopiert.
|
||||
SourceFile sf_new = sf;
|
||||
sf.KlassenVektor.forEach(cl -> addMethods(sf_new, cl, importedClasses, objectClass));
|
||||
allClasses.addAll(sf.getClasses());
|
||||
}
|
||||
SourceFile sf = sourceFiles.get(f);
|
||||
sf = new SourceFile(sf.getPkgName(),
|
||||
sf.KlassenVektor.stream()
|
||||
.map(cl -> new ClassOrInterface(cl))
|
||||
.collect(Collectors.toCollection(ArrayList::new)),
|
||||
sf.imports);
|
||||
//sf enthaelt neues Source-File, neue Klassen-Objekte und neue
|
||||
//ArrayListen-Objekte fuer Fields, Construktoren und Methoden
|
||||
//Alle anderen Objekte werden nur kopiert.
|
||||
SourceFile sf_new = sf;
|
||||
sf.KlassenVektor.forEach(cl -> addMethods(sf_new, cl, importedClasses, objectClass));
|
||||
allClasses.addAll(sf.getClasses());
|
||||
}
|
||||
allClasses.addAll(importedClasses);
|
||||
TYPE ty = new TYPE(sourceFiles.values(), allClasses);
|
||||
return ty.getConstraints();
|
||||
}
|
||||
|
||||
void addMethods(SourceFile sf, ClassOrInterface cl, List<ClassOrInterface> importedClasses, ClassOrInterface objectClass) {
|
||||
if (!cl.areMethodsAdded()) {
|
||||
ClassOrInterface superclass = null;
|
||||
if (cl.getSuperClass().getName().equals(new JavaClassName("java.lang.Object"))) {
|
||||
superclass = objectClass;
|
||||
}
|
||||
else {
|
||||
Optional<ClassOrInterface> optSuperclass =
|
||||
importedClasses.stream().filter(x -> x.getClassName().equals(
|
||||
cl.getSuperClass().getName())).findFirst();
|
||||
if (optSuperclass.isPresent()) {
|
||||
superclass = optSuperclass.get();
|
||||
}
|
||||
else {
|
||||
optSuperclass =
|
||||
sf.KlassenVektor.stream().filter(x -> x.getClassName().equals(
|
||||
cl.getSuperClass().getName())).findFirst();
|
||||
if (optSuperclass.isPresent()) {
|
||||
superclass = optSuperclass.get();
|
||||
addMethods(sf, superclass, importedClasses, objectClass);
|
||||
}
|
||||
else {
|
||||
//throw new ClassNotFoundException("");
|
||||
}
|
||||
}
|
||||
}
|
||||
Iterator<RefTypeOrTPHOrWildcardOrGeneric> paraIt= cl.getSuperClass().getParaList().iterator();
|
||||
Iterator<GenericTypeVar> tvarVarIt = superclass.getGenerics().iterator();
|
||||
|
||||
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs = new HashMap<>();
|
||||
while (paraIt.hasNext()) {
|
||||
gtvs.put(tvarVarIt.next().getName(), paraIt.next());
|
||||
}
|
||||
Iterator<Method> methodIt = superclass.getMethods().iterator();
|
||||
//TODO: PL 2020-05-06: Hier müssen ueberschriebene Methoden noch rausgefiltert werden
|
||||
while(methodIt.hasNext()) {
|
||||
Method m = methodIt.next();
|
||||
ParameterList newParaList = new ParameterList(
|
||||
m.getParameterList()
|
||||
.getFormalparalist()
|
||||
.stream()
|
||||
.map(fp -> new FormalParameter(fp.getName(), fp.getType().acceptTV(new TypeExchanger(gtvs)), fp.getOffset()))
|
||||
.collect(Collectors.toCollection(ArrayList::new)), m.getParameterList().getOffset());
|
||||
cl.getMethods().add(new Method(m.modifier, m.name, m.getReturnType().acceptTV(new TypeExchanger(gtvs)), newParaList, m.block,
|
||||
//new GenericDeclarationList(newGenericsList, ((GenericDeclarationList)m.getGenerics()).getOffset()),
|
||||
(GenericDeclarationList)m.getGenerics(),
|
||||
m.getOffset(), true));
|
||||
}
|
||||
|
||||
}
|
||||
cl.setMethodsAdded();
|
||||
}
|
||||
|
||||
|
||||
|
||||
void addMethods(SourceFile sf, ClassOrInterface cl, List<ClassOrInterface> importedClasses, ClassOrInterface objectClass) {
|
||||
if (!cl.areMethodsAdded()) {
|
||||
ClassOrInterface superclass = null;
|
||||
if (cl.getSuperClass().getName().equals(new JavaClassName("java.lang.Object"))) {
|
||||
superclass = objectClass;
|
||||
} else {
|
||||
Optional<ClassOrInterface> optSuperclass =
|
||||
importedClasses.stream().filter(x -> x.getClassName().equals(
|
||||
cl.getSuperClass().getName())).findFirst();
|
||||
if (optSuperclass.isPresent()) {
|
||||
superclass = optSuperclass.get();
|
||||
} else {
|
||||
optSuperclass =
|
||||
sf.KlassenVektor.stream().filter(x -> x.getClassName().equals(
|
||||
cl.getSuperClass().getName())).findFirst();
|
||||
if (optSuperclass.isPresent()) {
|
||||
superclass = optSuperclass.get();
|
||||
addMethods(sf, superclass, importedClasses, objectClass);
|
||||
} else {
|
||||
//throw new ClassNotFoundException("");
|
||||
}
|
||||
}
|
||||
}
|
||||
Iterator<RefTypeOrTPHOrWildcardOrGeneric> paraIt = cl.getSuperClass().getParaList().iterator();
|
||||
Iterator<GenericTypeVar> tvarVarIt = superclass.getGenerics().iterator();
|
||||
|
||||
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs = new HashMap<>();
|
||||
while (paraIt.hasNext()) {
|
||||
gtvs.put(tvarVarIt.next().getName(), paraIt.next());
|
||||
}
|
||||
//TODO: PL 2020-05-06: Hier müssen ueberschriebene Methoden noch rausgefiltert werden
|
||||
for (Method m : superclass.getMethods()) {
|
||||
ParameterList newParaList = new ParameterList(
|
||||
m.getParameterList()
|
||||
.getFormalparalist()
|
||||
.stream()
|
||||
.map(fp -> new FormalParameter(fp.getName(), fp.getType().acceptTV(new TypeExchanger(gtvs)), fp.getOffset()))
|
||||
.collect(Collectors.toCollection(ArrayList::new)), m.getParameterList().getOffset());
|
||||
cl.getMethods().add(new Method(m.modifier, m.name, m.getReturnType().acceptTV(new TypeExchanger(gtvs)), newParaList, m.block,
|
||||
//new GenericDeclarationList(newGenericsList, ((GenericDeclarationList)m.getGenerics()).getOffset()),
|
||||
(GenericDeclarationList) m.getGenerics(),
|
||||
m.getOffset(), true));
|
||||
}
|
||||
|
||||
}
|
||||
cl.setMethodsAdded();
|
||||
}
|
||||
|
||||
|
||||
public List<ClassOrInterface> getAvailableClasses(SourceFile forSourceFile) throws ClassNotFoundException {
|
||||
//PL 2018-09-18: List durch Set ersetzt, damit die Klassen nur einmal hinzugefuegt werden
|
||||
//List<ClassOrInterface> allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
|
||||
//PL 2018-09-18: List durch Set ersetzt, damit die Klassen nur einmal hinzugefuegt werden
|
||||
//List<ClassOrInterface> allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
|
||||
Set<ClassOrInterface> allClasses = new HashSet<>();
|
||||
|
||||
/* PL 2018-09-19 geloescht werden bereits in typeInference hinzugefuegt
|
||||
}
|
||||
allClasses.addAll(importedClasses);
|
||||
|
||||
return new TYPE(sourceFiles.values(), allClasses).getConstraints();
|
||||
}
|
||||
|
||||
public List<ClassOrInterface> getAvailableClasses(SourceFile forSourceFile) throws ClassNotFoundException {
|
||||
// PL 2018-09-18: List durch Set ersetzt, damit die Klassen nur einmal
|
||||
// hinzugefuegt werden
|
||||
// List<ClassOrInterface> allClasses = new
|
||||
// ArrayList<>();//environment.getAllAvailableClasses();
|
||||
Set<ClassOrInterface> allClasses = new HashSet<>();
|
||||
List<ClassOrInterface> importedClasses = new ArrayList<>();
|
||||
for (JavaClassName name : forSourceFile.getImports()) {
|
||||
// TODO: Hier werden imports von eigenen (.jav) Klassen nicht beachtet
|
||||
ClassOrInterface importedClass = ASTFactory
|
||||
.createClass(classLoader.loadClass(name.toString()));
|
||||
importedClasses.add(importedClass);
|
||||
allClasses.addAll(importedClasses);
|
||||
}
|
||||
return new ArrayList<>(allClasses);
|
||||
}
|
||||
|
||||
/*
|
||||
* PL 2018-09-19 geloescht werden bereits in typeInference hinzugefuegt for
|
||||
* (SourceFile sf : sourceFiles.values()) { allClasses.addAll(sf.getClasses());
|
||||
* }
|
||||
*/
|
||||
/*
|
||||
* public List<ResultSet> typeInferenceOld() throws ClassNotFoundException {
|
||||
* List<ClassOrInterface> allClasses = new
|
||||
* ArrayList<>();//environment.getAllAvailableClasses(); //Alle Importierten
|
||||
* Klassen in allen geparsten Sourcefiles kommen ins FC for(SourceFile sf :
|
||||
* this.sourceFiles.values()) { allClasses.addAll(getAvailableClasses(sf));
|
||||
* allClasses.addAll(sf.getClasses()); }
|
||||
*
|
||||
* final ConstraintSet<Pair> cons = getConstraints();
|
||||
*
|
||||
* FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses);
|
||||
* System.out.println(finiteClosure); ConstraintSet<UnifyPair> unifyCons =
|
||||
* UnifyTypeFactory.convert(cons);
|
||||
*
|
||||
* TypeUnify unify = new TypeUnify(); Set<Set<UnifyPair>> results = new
|
||||
* HashSet<>(); try { File logPath = new
|
||||
* File(System.getProperty("user.dir")+"/target/logFiles/"); logPath.mkdirs();
|
||||
* FileWriter logFile = new FileWriter(new File(logPath, "log"));
|
||||
* logFile.write("FC:\\" + finiteClosure.toString()+"\n"); for(SourceFile sf :
|
||||
* this.sourceFiles.values()) { logFile.write(ASTTypePrinter.print(sf)); }
|
||||
* logFile.flush(); Set<List<Constraint<UnifyPair>>> cardProd =
|
||||
* unifyCons.cartesianProduct(); for (List<Constraint<UnifyPair>> xCons :
|
||||
* cardProd ){ Set<UnifyPair> xConsSet = new HashSet<>(); for
|
||||
* (Constraint<UnifyPair> constraint : xCons) { xConsSet.addAll(constraint); }
|
||||
* //.collect(Collectors.toCollection(ArrayList::new))))
|
||||
* System.out.println(xConsSet); Set<String> methodParaTypeVarNames =
|
||||
* allClasses.stream().map(x -> x.getMethods().stream().map(y ->
|
||||
* y.getParameterList().getFormalparalist() .stream().filter(z -> z.getType()
|
||||
* instanceof TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(
|
||||
* HashSet::new))) .reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return
|
||||
* a;}, (a,b) -> { a.addAll(b); return a;} ) ) .reduce(new HashSet<String>(),
|
||||
* (a,b) -> { a.addAll(b); return a;} );
|
||||
*
|
||||
* Set<String> constructorParaTypeVarNames = allClasses.stream().map(x ->
|
||||
* x.getConstructors().stream().map(y ->
|
||||
* y.getParameterList().getFormalparalist() .stream().filter(z -> z.getType()
|
||||
* instanceof TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(
|
||||
* HashSet::new))) .reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return
|
||||
* a;}, (a,b) -> { a.addAll(b); return a;} ) ) .reduce(new HashSet<String>(),
|
||||
* (a,b) -> { a.addAll(b); return a;} );
|
||||
*
|
||||
* Set<String> paraTypeVarNames = methodParaTypeVarNames;
|
||||
* paraTypeVarNames.addAll(constructorParaTypeVarNames);
|
||||
*
|
||||
* Set<String> returnTypeVarNames = allClasses.stream().map(x ->
|
||||
* x.getMethods().stream().filter(y -> y.getReturnType() instanceof
|
||||
* TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.
|
||||
* toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;}
|
||||
* ).get();
|
||||
*
|
||||
* Set<String> fieldTypeVarNames = allClasses.stream().map(x ->
|
||||
* x.getFieldDecl().stream().filter(y -> y.getReturnType() instanceof
|
||||
* TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.
|
||||
* toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;}
|
||||
* ).get();
|
||||
*
|
||||
* returnTypeVarNames.addAll(fieldTypeVarNames);
|
||||
*
|
||||
* xConsSet = xConsSet.stream().map(x -> { //Hier muss ueberlegt werden, ob //1.
|
||||
* alle Argument- und Retuntyp-Variablen in allen UnifyPairs // mit
|
||||
* disableWildcardtable() werden. //2. alle Typvariablen mit Argument- oder
|
||||
* Retuntyp-Variablen //in Beziehung auch auf disableWildcardtable() gesetzt
|
||||
* werden muessen //PL 2018-04-23 if ((x.getLhsType() instanceof
|
||||
* PlaceholderType)) { if (paraTypeVarNames.contains(x.getLhsType().getName()))
|
||||
* { ((PlaceholderType)x.getLhsType()).setVariance((byte)1);
|
||||
* ((PlaceholderType)x.getLhsType()).disableWildcardtable(); } if
|
||||
* (returnTypeVarNames.contains(x.getLhsType().getName())) {
|
||||
* ((PlaceholderType)x.getLhsType()).setVariance((byte)-1);
|
||||
* ((PlaceholderType)x.getLhsType()).disableWildcardtable(); } } if
|
||||
* ((x.getRhsType() instanceof PlaceholderType)) { if
|
||||
* (paraTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
* ((PlaceholderType)x.getRhsType()).setVariance((byte)1);
|
||||
* ((PlaceholderType)x.getRhsType()).disableWildcardtable(); } if
|
||||
* (returnTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
* ((PlaceholderType)x.getRhsType()).setVariance((byte)-1);
|
||||
* ((PlaceholderType)x.getRhsType()).disableWildcardtable(); } } return x;//HIER
|
||||
* DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS
|
||||
* ANDERE SEITE }).map( y -> { if ((y.getLhsType() instanceof PlaceholderType)
|
||||
* && (y.getRhsType() instanceof PlaceholderType)) { if
|
||||
* (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
|
||||
* ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType(
|
||||
* )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0
|
||||
* && ((PlaceholderType)y.getRhsType()).getVariance() != 0) {
|
||||
* ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType(
|
||||
* )).getVariance()); } } return y; } )
|
||||
* .collect(Collectors.toCollection(HashSet::new));
|
||||
* varianceInheritance(xConsSet); Set<Set<UnifyPair>> result =
|
||||
* unify.unifySequential(xConsSet, finiteClosure, logFile, log);
|
||||
* //Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
* System.out.println("RESULT: " + result); logFile.write("RES: " +
|
||||
* result.toString()+"\n"); logFile.flush(); results.addAll(result); }
|
||||
*
|
||||
* results = results.stream().map(x -> { Optional<Set<UnifyPair>> res = new
|
||||
* RuleSet().subst(x.stream().map(y -> { if (y.getPairOp() ==
|
||||
* PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT); return y;
|
||||
* //alle Paare a <.? b erden durch a =. b ersetzt
|
||||
* }).collect(Collectors.toCollection(HashSet::new))); if (res.isPresent())
|
||||
* {//wenn subst ein Erg liefert wurde was veraendert return new
|
||||
* TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure); } else
|
||||
* return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
* }).collect(Collectors.toCollection(HashSet::new));
|
||||
* System.out.println("RESULT Final: " + results); logFile.write("RES_FINAL: " +
|
||||
* results.toString()+"\n"); logFile.flush(); logFile.write("PLACEHOLDERS: " +
|
||||
* PlaceholderType.EXISTING_PLACEHOLDERS); logFile.flush(); } catch (IOException
|
||||
* e) { e.printStackTrace(); } return results.stream().map((unifyPairs -> new
|
||||
* ResultSet(UnifyTypeFactory.convert(unifyPairs,
|
||||
* generateTPHMap(cons))))).collect(Collectors.toList()); }
|
||||
*/
|
||||
|
||||
List<ClassOrInterface> importedClasses = new ArrayList<>();
|
||||
for (JavaClassName name : forSourceFile.getImports()) {
|
||||
// TODO: Hier werden imports von eigenen (.jav) Klassen nicht beachtet
|
||||
ClassOrInterface importedClass = ASTFactory
|
||||
.createClass(classLoader.loadClass(name.toString()));
|
||||
importedClasses.add(importedClass);
|
||||
allClasses.addAll(importedClasses);
|
||||
}
|
||||
return new ArrayList<>(allClasses);
|
||||
}
|
||||
/**
|
||||
* Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a) wenn a eine
|
||||
* Variance !=0 hat auf alle Typvariablen in Theta.
|
||||
*/
|
||||
/*
|
||||
* private void varianceInheritance(Set<UnifyPair> eq) { Set<PlaceholderType>
|
||||
* usedTPH = new HashSet<>(); Set<PlaceholderType> phSet = eq.stream().map(x ->
|
||||
* { Set<PlaceholderType> pair = new HashSet<>(); if (x.getLhsType() instanceof
|
||||
* PlaceholderType) pair.add((PlaceholderType)x.getLhsType()); if
|
||||
* (x.getRhsType() instanceof PlaceholderType)
|
||||
* pair.add((PlaceholderType)x.getRhsType()); return pair; }).reduce(new
|
||||
* HashSet<>(), (a,b) -> { a.addAll(b); return a;} , (c,d) -> { c.addAll(d);
|
||||
* return c;});
|
||||
*
|
||||
* ArrayList<PlaceholderType> phSetVariance = new ArrayList<>(phSet);
|
||||
* phSetVariance.removeIf(x -> (x.getVariance() == 0));
|
||||
* while(!phSetVariance.isEmpty()) { PlaceholderType a =
|
||||
* phSetVariance.remove(0); usedTPH.add(a); //HashMap<PlaceholderType,Integer>
|
||||
* ht = new HashMap<>(); //ht.put(a, a.getVariance()); Set<UnifyPair> eq1 = new
|
||||
* HashSet<>(eq); eq1.removeIf(x -> !(x.getLhsType() instanceof PlaceholderType
|
||||
* && ((PlaceholderType)x.getLhsType()).equals(a))); eq1.stream().forEach(x -> {
|
||||
* x.getRhsType().accept(new distributeVariance(), a.getVariance());}); eq1 =
|
||||
* new HashSet<>(eq); eq1.removeIf(x -> !(x.getRhsType() instanceof
|
||||
* PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)));
|
||||
* eq1.stream().forEach(x -> { x.getLhsType().accept(new distributeVariance(),
|
||||
* a.getVariance());}); phSetVariance = new ArrayList<>(phSet);
|
||||
* phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x))); }
|
||||
* }
|
||||
*/
|
||||
public List<ResultSet> typeInference() throws ClassNotFoundException, IOException {
|
||||
|
||||
/*
|
||||
* public List<ResultSet> typeInferenceOld() throws ClassNotFoundException {
|
||||
* List<ClassOrInterface> allClasses = new
|
||||
* ArrayList<>();//environment.getAllAvailableClasses(); //Alle Importierten
|
||||
* Klassen in allen geparsten Sourcefiles kommen ins FC for(SourceFile sf :
|
||||
* this.sourceFiles.values()) { allClasses.addAll(getAvailableClasses(sf));
|
||||
* allClasses.addAll(sf.getClasses()); }
|
||||
*
|
||||
* final ConstraintSet<Pair> cons = getConstraints();
|
||||
*
|
||||
* FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses);
|
||||
* System.out.println(finiteClosure); ConstraintSet<UnifyPair> unifyCons =
|
||||
* UnifyTypeFactory.convert(cons);
|
||||
*
|
||||
* TypeUnify unify = new TypeUnify(); Set<Set<UnifyPair>> results = new
|
||||
* HashSet<>(); try { File logPath = new
|
||||
* File(System.getProperty("user.dir")+"/target/logFiles/"); logPath.mkdirs();
|
||||
* FileWriter logFile = new FileWriter(new File(logPath, "log"));
|
||||
* logFile.write("FC:\\" + finiteClosure.toString()+"\n"); for(SourceFile sf :
|
||||
* this.sourceFiles.values()) { logFile.write(ASTTypePrinter.print(sf)); }
|
||||
* logFile.flush(); Set<List<Constraint<UnifyPair>>> cardProd =
|
||||
* unifyCons.cartesianProduct(); for (List<Constraint<UnifyPair>> xCons :
|
||||
* cardProd ){ Set<UnifyPair> xConsSet = new HashSet<>(); for
|
||||
* (Constraint<UnifyPair> constraint : xCons) { xConsSet.addAll(constraint); }
|
||||
* //.collect(Collectors.toCollection(ArrayList::new))))
|
||||
* System.out.println(xConsSet); Set<String> methodParaTypeVarNames =
|
||||
* allClasses.stream().map(x -> x.getMethods().stream().map(y ->
|
||||
* y.getParameterList().getFormalparalist() .stream().filter(z -> z.getType()
|
||||
* instanceof TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(
|
||||
* HashSet::new))) .reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return
|
||||
* a;}, (a,b) -> { a.addAll(b); return a;} ) ) .reduce(new HashSet<String>(),
|
||||
* (a,b) -> { a.addAll(b); return a;} );
|
||||
*
|
||||
* Set<String> constructorParaTypeVarNames = allClasses.stream().map(x ->
|
||||
* x.getConstructors().stream().map(y ->
|
||||
* y.getParameterList().getFormalparalist() .stream().filter(z -> z.getType()
|
||||
* instanceof TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getType()).getName()).collect(Collectors.toCollection(
|
||||
* HashSet::new))) .reduce(new HashSet<String>(), (a,b) -> { a.addAll(b); return
|
||||
* a;}, (a,b) -> { a.addAll(b); return a;} ) ) .reduce(new HashSet<String>(),
|
||||
* (a,b) -> { a.addAll(b); return a;} );
|
||||
*
|
||||
* Set<String> paraTypeVarNames = methodParaTypeVarNames;
|
||||
* paraTypeVarNames.addAll(constructorParaTypeVarNames);
|
||||
*
|
||||
* Set<String> returnTypeVarNames = allClasses.stream().map(x ->
|
||||
* x.getMethods().stream().filter(y -> y.getReturnType() instanceof
|
||||
* TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.
|
||||
* toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;}
|
||||
* ).get();
|
||||
*
|
||||
* Set<String> fieldTypeVarNames = allClasses.stream().map(x ->
|
||||
* x.getFieldDecl().stream().filter(y -> y.getReturnType() instanceof
|
||||
* TypePlaceholder) .map(z ->
|
||||
* ((TypePlaceholder)z.getReturnType()).getName()).collect(Collectors.
|
||||
* toCollection(HashSet::new))).reduce((a,b) -> { a.addAll(b); return a;}
|
||||
* ).get();
|
||||
*
|
||||
* returnTypeVarNames.addAll(fieldTypeVarNames);
|
||||
*
|
||||
* xConsSet = xConsSet.stream().map(x -> { //Hier muss ueberlegt werden, ob //1.
|
||||
* alle Argument- und Retuntyp-Variablen in allen UnifyPairs // mit
|
||||
* disableWildcardtable() werden. //2. alle Typvariablen mit Argument- oder
|
||||
* Retuntyp-Variablen //in Beziehung auch auf disableWildcardtable() gesetzt
|
||||
* werden muessen //PL 2018-04-23 if ((x.getLhsType() instanceof
|
||||
* PlaceholderType)) { if (paraTypeVarNames.contains(x.getLhsType().getName()))
|
||||
* { ((PlaceholderType)x.getLhsType()).setVariance((byte)1);
|
||||
* ((PlaceholderType)x.getLhsType()).disableWildcardtable(); } if
|
||||
* (returnTypeVarNames.contains(x.getLhsType().getName())) {
|
||||
* ((PlaceholderType)x.getLhsType()).setVariance((byte)-1);
|
||||
* ((PlaceholderType)x.getLhsType()).disableWildcardtable(); } } if
|
||||
* ((x.getRhsType() instanceof PlaceholderType)) { if
|
||||
* (paraTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
* ((PlaceholderType)x.getRhsType()).setVariance((byte)1);
|
||||
* ((PlaceholderType)x.getRhsType()).disableWildcardtable(); } if
|
||||
* (returnTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
* ((PlaceholderType)x.getRhsType()).setVariance((byte)-1);
|
||||
* ((PlaceholderType)x.getRhsType()).disableWildcardtable(); } } return x;//HIER
|
||||
* DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE JEWEILS
|
||||
* ANDERE SEITE }).map( y -> { if ((y.getLhsType() instanceof PlaceholderType)
|
||||
* && (y.getRhsType() instanceof PlaceholderType)) { if
|
||||
* (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
|
||||
* ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType(
|
||||
* )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0
|
||||
* && ((PlaceholderType)y.getRhsType()).getVariance() != 0) {
|
||||
* ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType(
|
||||
* )).getVariance()); } } return y; } )
|
||||
* .collect(Collectors.toCollection(HashSet::new));
|
||||
* varianceInheritance(xConsSet); Set<Set<UnifyPair>> result =
|
||||
* unify.unifySequential(xConsSet, finiteClosure, logFile, log);
|
||||
* //Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
* System.out.println("RESULT: " + result); logFile.write("RES: " +
|
||||
* result.toString()+"\n"); logFile.flush(); results.addAll(result); }
|
||||
*
|
||||
* results = results.stream().map(x -> { Optional<Set<UnifyPair>> res = new
|
||||
* RuleSet().subst(x.stream().map(y -> { if (y.getPairOp() ==
|
||||
* PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT); return y;
|
||||
* //alle Paare a <.? b erden durch a =. b ersetzt
|
||||
* }).collect(Collectors.toCollection(HashSet::new))); if (res.isPresent())
|
||||
* {//wenn subst ein Erg liefert wurde was veraendert return new
|
||||
* TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure); } else
|
||||
* return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
* }).collect(Collectors.toCollection(HashSet::new));
|
||||
* System.out.println("RESULT Final: " + results); logFile.write("RES_FINAL: " +
|
||||
* results.toString()+"\n"); logFile.flush(); logFile.write("PLACEHOLDERS: " +
|
||||
* PlaceholderType.EXISTING_PLACEHOLDERS); logFile.flush(); } catch (IOException
|
||||
* e) { e.printStackTrace(); } return results.stream().map((unifyPairs -> new
|
||||
* ResultSet(UnifyTypeFactory.convert(unifyPairs,
|
||||
* generateTPHMap(cons))))).collect(Collectors.toList()); }
|
||||
*/
|
||||
/**
|
||||
* Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a) wenn a eine
|
||||
* Variance !=0 hat auf alle Typvariablen in Theta.
|
||||
*
|
||||
*
|
||||
*/
|
||||
/*
|
||||
* private void varianceInheritance(Set<UnifyPair> eq) { Set<PlaceholderType>
|
||||
* usedTPH = new HashSet<>(); Set<PlaceholderType> phSet = eq.stream().map(x ->
|
||||
* { Set<PlaceholderType> pair = new HashSet<>(); if (x.getLhsType() instanceof
|
||||
* PlaceholderType) pair.add((PlaceholderType)x.getLhsType()); if
|
||||
* (x.getRhsType() instanceof PlaceholderType)
|
||||
* pair.add((PlaceholderType)x.getRhsType()); return pair; }).reduce(new
|
||||
* HashSet<>(), (a,b) -> { a.addAll(b); return a;} , (c,d) -> { c.addAll(d);
|
||||
* return c;});
|
||||
*
|
||||
* ArrayList<PlaceholderType> phSetVariance = new ArrayList<>(phSet);
|
||||
* phSetVariance.removeIf(x -> (x.getVariance() == 0));
|
||||
* while(!phSetVariance.isEmpty()) { PlaceholderType a =
|
||||
* phSetVariance.remove(0); usedTPH.add(a); //HashMap<PlaceholderType,Integer>
|
||||
* ht = new HashMap<>(); //ht.put(a, a.getVariance()); Set<UnifyPair> eq1 = new
|
||||
* HashSet<>(eq); eq1.removeIf(x -> !(x.getLhsType() instanceof PlaceholderType
|
||||
* && ((PlaceholderType)x.getLhsType()).equals(a))); eq1.stream().forEach(x -> {
|
||||
* x.getRhsType().accept(new distributeVariance(), a.getVariance());}); eq1 =
|
||||
* new HashSet<>(eq); eq1.removeIf(x -> !(x.getRhsType() instanceof
|
||||
* PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)));
|
||||
* eq1.stream().forEach(x -> { x.getLhsType().accept(new distributeVariance(),
|
||||
* a.getVariance());}); phSetVariance = new ArrayList<>(phSet);
|
||||
* phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x))); }
|
||||
* }
|
||||
*/
|
||||
/*
|
||||
* Gather all available (imported) classes from all parsed source files for putting them into the finite closure
|
||||
*/
|
||||
List<ClassOrInterface> allClasses = new ArrayList<>();// environment.getAllAvailableClasses();
|
||||
for (File f : this.sourceFiles.keySet()) {
|
||||
SourceFile sf = sourceFiles.get(f);
|
||||
allClasses.addAll(getAvailableClasses(sf));
|
||||
allClasses.addAll(sf.getClasses());
|
||||
allClasses.addAll(CompilationEnvironment.loadDefaultPackageClasses(f, classLoader).stream().map(ASTFactory::createClass).toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: can this be removed?
|
||||
*
|
||||
* @param resultListener
|
||||
* @param logFile
|
||||
* @return
|
||||
* @throws ClassNotFoundException
|
||||
* @throws IOException
|
||||
*/
|
||||
public UnifyResultModelParallel typeInferenceAsync(UnifyResultListener resultListener, Writer logFile)
|
||||
throws ClassNotFoundException, IOException {
|
||||
/*
|
||||
* Generate log file writer
|
||||
*/
|
||||
Writer logFile;
|
||||
try {
|
||||
// TODO: check if this makes sense and does not only always use the first source file key...
|
||||
logFile = log ? new FileWriter(System.getProperty("user.dir") + "/logFiles/" + "log_" + sourceFiles.keySet().iterator().next().getName())
|
||||
: new OutputStreamWriter(new NullOutputStream());
|
||||
} catch (IOException ioException) {
|
||||
System.err.println("IO Exception: " + ioException.getMessage());
|
||||
logFile = new OutputStreamWriter(new NullOutputStream());
|
||||
}
|
||||
|
||||
/*
|
||||
* Gather all available (imported) classes from all parsed source files for putting them into the finite closure
|
||||
*/
|
||||
List<ClassOrInterface> allClasses = new ArrayList<>();// environment.getAllAvailableClasses();
|
||||
for (File f : this.sourceFiles.keySet()) {
|
||||
SourceFile sf = sourceFiles.get(f);
|
||||
allClasses.addAll(getAvailableClasses(sf));
|
||||
allClasses.addAll(sf.getClasses());
|
||||
allClasses.addAll(CompilationEnvironment.loadDefaultPackageClasses(f,classLoader).stream().map(ASTFactory::createClass).collect(Collectors.toList()));
|
||||
}
|
||||
/*
|
||||
* Run the actual type inference process
|
||||
*/
|
||||
TypeInference typeInference = new TypeInference(
|
||||
getConstraints(),
|
||||
allClasses,
|
||||
classLoader,
|
||||
useResultModel,
|
||||
log,
|
||||
logFile,
|
||||
statistics
|
||||
);
|
||||
return typeInference.execute(this.unificationServer);
|
||||
}
|
||||
|
||||
/*
|
||||
* Run the actual type inference process
|
||||
*/
|
||||
TypeInference typeInference = new TypeInference(
|
||||
getConstraints(),
|
||||
allClasses,
|
||||
classLoader,
|
||||
useResultModel,
|
||||
log,
|
||||
logFile,
|
||||
statistics
|
||||
);
|
||||
return typeInference.executeAsync(resultListener);
|
||||
}
|
||||
|
||||
public List<ResultSet> typeInference() throws ClassNotFoundException, IOException {
|
||||
|
||||
/*
|
||||
* Gather all available (imported) classes from all parsed source files for putting them into the finite closure
|
||||
*/
|
||||
List<ClassOrInterface> allClasses = new ArrayList<>();// environment.getAllAvailableClasses();
|
||||
for (File f : this.sourceFiles.keySet()) {
|
||||
SourceFile sf = sourceFiles.get(f);
|
||||
allClasses.addAll(getAvailableClasses(sf));
|
||||
allClasses.addAll(sf.getClasses());
|
||||
allClasses.addAll(CompilationEnvironment.loadDefaultPackageClasses(f,classLoader).stream().map(ASTFactory::createClass).toList());
|
||||
}
|
||||
|
||||
/*
|
||||
* Generate log file writer
|
||||
*/
|
||||
Writer logFile;
|
||||
try {
|
||||
// TODO: check if this makes sense and does not only always use the first source file key...
|
||||
logFile = log ? new FileWriter(System.getProperty("user.dir") + "/logFiles/" + "log_" + sourceFiles.keySet().iterator().next().getName())
|
||||
: new OutputStreamWriter(new NullOutputStream());
|
||||
}
|
||||
catch (IOException ioException) {
|
||||
System.err.println("IO Exception: " + ioException.getMessage());
|
||||
logFile = new OutputStreamWriter(new NullOutputStream());
|
||||
}
|
||||
|
||||
/*
|
||||
* Run the actual type inference process
|
||||
*/
|
||||
TypeInference typeInference = new TypeInference(
|
||||
getConstraints(),
|
||||
allClasses,
|
||||
classLoader,
|
||||
useResultModel,
|
||||
log,
|
||||
logFile,
|
||||
statistics
|
||||
);
|
||||
return typeInference.execute(this.unificationServer);
|
||||
}
|
||||
|
||||
private SourceFile parse(File sourceFile) throws IOException, java.lang.ClassNotFoundException {
|
||||
CompilationUnitContext tree = JavaTXParser.parse(sourceFile);
|
||||
SyntaxTreeGenerator generator = new SyntaxTreeGenerator(environment.getRegistry(sourceFile, classLoader),
|
||||
new GenericsRegistry(null));
|
||||
private SourceFile parse(File sourceFile) throws IOException, java.lang.ClassNotFoundException {
|
||||
CompilationUnitContext tree = JavaTXParser.parse(sourceFile);
|
||||
SyntaxTreeGenerator generator = new SyntaxTreeGenerator(environment.getRegistry(sourceFile, classLoader),
|
||||
new GenericsRegistry(null));
|
||||
return generator.convert(tree, environment.packageCrawler, classLoader);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void generateBytecode() throws ClassNotFoundException, IOException {
|
||||
generateBytecode((File) null);
|
||||
}
|
||||
public void generateBytecode() throws ClassNotFoundException, IOException {
|
||||
generateBytecode((File) null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param path - can be null, then class file output is in the same directory as the parsed source files
|
||||
*/
|
||||
public void generateBytecode(String path) throws ClassNotFoundException, IOException {
|
||||
if(path != null)
|
||||
generateBytecode(new File(path));
|
||||
else
|
||||
generateBytecode();
|
||||
}
|
||||
/**
|
||||
* @param path - can be null, then class file output is in the same directory as the parsed source files
|
||||
*/
|
||||
public void generateBytecode(String path) throws ClassNotFoundException, IOException {
|
||||
if (path != null)
|
||||
generateBytecode(new File(path));
|
||||
else
|
||||
generateBytecode();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param path - output-Directory can be null, then class file output is in the same directory as the parsed source files
|
||||
*/
|
||||
public void generateBytecode(File path) throws ClassNotFoundException, IOException {
|
||||
List<ResultSet> typeinferenceResult = this.typeInference();
|
||||
generateBytecode(path, typeinferenceResult);
|
||||
}
|
||||
/**
|
||||
* @param path - output-Directory can be null, then class file output is in the same directory as the parsed source files
|
||||
*/
|
||||
public void generateBytecode(File path) throws ClassNotFoundException, IOException {
|
||||
List<ResultSet> typeinferenceResult = this.typeInference();
|
||||
generateBytecode(path, typeinferenceResult);
|
||||
}
|
||||
|
||||
private Map<File, List<GenericsResult>> generatedGenerics = new HashMap<>();
|
||||
private final Map<File, List<GenericsResult>> generatedGenerics = new HashMap<>();
|
||||
|
||||
// TODO This is a temporary solution, we should integrate with the old API for getting Generics
|
||||
public Map<File, List<GenericsResult>> getGeneratedGenerics() {
|
||||
return generatedGenerics;
|
||||
}
|
||||
// TODO This is a temporary solution, we should integrate with the old API for getting Generics
|
||||
public Map<File, List<GenericsResult>> getGeneratedGenerics() {
|
||||
return generatedGenerics;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param outputPath - can be null, then class file output is in the same directory as the parsed source files
|
||||
* @param typeinferenceResult
|
||||
* @throws IOException
|
||||
*/
|
||||
public void generateBytecode(File outputPath, List<ResultSet> typeinferenceResult) throws IOException {
|
||||
for (File f : sourceFiles.keySet()) {
|
||||
HashMap<JavaClassName, byte[]> classFiles = new HashMap<>();
|
||||
SourceFile sf = sourceFiles.get(f);
|
||||
File path;
|
||||
if(outputPath == null){
|
||||
path = f.getParentFile(); //Set path to path of the parsed .jav file
|
||||
}else{
|
||||
path = new File(outputPath ,sf.getPkgName().replace(".","/")); //add package path to root path
|
||||
}
|
||||
/**
|
||||
* @param outputPath - can be null, then class file output is in the same directory as the parsed source files
|
||||
* @param typeinferenceResult
|
||||
* @throws IOException
|
||||
*/
|
||||
public void generateBytecode(@Nullable File outputPath, List<ResultSet> typeinferenceResult) throws IOException {
|
||||
for (File f : sourceFiles.keySet()) {
|
||||
SourceFile sf = sourceFiles.get(f);
|
||||
File path;
|
||||
if (outputPath == null) {
|
||||
path = f.getParentFile(); //Set path to path of the parsed .jav file
|
||||
} else {
|
||||
path = new File(outputPath, sf.getPkgName().replace(".", "/")); //add package path to root path
|
||||
}
|
||||
|
||||
var converter = new ASTToTargetAST(typeinferenceResult, sf, classLoader);
|
||||
var generatedClasses = new HashMap<JavaClassName, byte[]>();
|
||||
for (var clazz : sf.getClasses()) {
|
||||
var codegen = new Codegen(converter.convert(clazz));
|
||||
var code = codegen.generate();
|
||||
generatedClasses.put(clazz.getClassName(), code);
|
||||
converter.auxiliaries.forEach((name, source) -> {
|
||||
generatedClasses.put(new JavaClassName(name), source);
|
||||
});
|
||||
}
|
||||
generatedGenerics.put(f, converter.computedGenerics());
|
||||
writeClassFile(generatedClasses, path);
|
||||
}
|
||||
}
|
||||
var converter = new ASTToTargetAST(typeinferenceResult, sf, classLoader);
|
||||
var generatedClasses = new HashMap<JavaClassName, byte[]>();
|
||||
for (var clazz : sf.getClasses()) {
|
||||
var codegen = new Codegen(converter.convert(clazz));
|
||||
var code = codegen.generate();
|
||||
generatedClasses.put(clazz.getClassName(), code);
|
||||
converter.auxiliaries.forEach((name, source) -> {
|
||||
generatedClasses.put(new JavaClassName(name), source);
|
||||
});
|
||||
}
|
||||
generatedGenerics.put(f, converter.computedGenerics());
|
||||
writeClassFile(generatedClasses, path);
|
||||
}
|
||||
}
|
||||
|
||||
private void writeClassFile(HashMap<JavaClassName, byte[]> classFiles, File path) throws IOException {
|
||||
FileOutputStream output;
|
||||
for (JavaClassName name : classFiles.keySet()) {
|
||||
byte[] bytecode = classFiles.get(name);
|
||||
System.out.println("generating " + name + ".class file ...");
|
||||
// output = new FileOutputStream(new File(System.getProperty("user.dir") +
|
||||
// "/testBytecode/generatedBC/" +name+".class"));
|
||||
File outputFile = new File(path, name.getClassName() + ".class");
|
||||
outputFile.getAbsoluteFile().getParentFile().mkdirs();
|
||||
output = new FileOutputStream(outputFile);
|
||||
output.write(bytecode);
|
||||
output.close();
|
||||
System.out.println(name + ".class file generated");
|
||||
}
|
||||
}
|
||||
private void writeClassFile(HashMap<JavaClassName, byte[]> classFiles, File path) throws IOException {
|
||||
FileOutputStream output;
|
||||
for (JavaClassName name : classFiles.keySet()) {
|
||||
byte[] bytecode = classFiles.get(name);
|
||||
System.out.println("generating " + name + ".class file ...");
|
||||
// output = new FileOutputStream(new File(System.getProperty("user.dir") +
|
||||
// "/testBytecode/generatedBC/" +name+".class"));
|
||||
File outputFile = new File(path, name.getClassName() + ".class");
|
||||
outputFile.getAbsoluteFile().getParentFile().mkdirs();
|
||||
output = new FileOutputStream(outputFile);
|
||||
output.write(bytecode);
|
||||
output.close();
|
||||
System.out.println(name + ".class file generated");
|
||||
}
|
||||
}
|
||||
|
||||
public List<GenericGenratorResultForSourceFile> getGeneratedGenericResultsForAllSourceFiles(List<ResultSet> results) {
|
||||
// FIXME
|
||||
return null;
|
||||
// FIXME
|
||||
return null;
|
||||
}
|
||||
|
||||
/* PL 2020-03-17 mit TypeExchanger in FCGenerator.java zusammenfuehren */
|
||||
/**
|
||||
|
||||
/**
|
||||
* Tauscht die GTVs in einem Typ gegen die entsprechenden Typen in der übergebenen Map aus.
|
||||
*/
|
||||
private static class TypeExchanger implements TypeVisitor<RefTypeOrTPHOrWildcardOrGeneric>{
|
||||
private static class TypeExchanger implements TypeVisitor<RefTypeOrTPHOrWildcardOrGeneric> {
|
||||
|
||||
private final HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs;
|
||||
|
||||
TypeExchanger(HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs){
|
||||
TypeExchanger(HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs) {
|
||||
this.gtvs = gtvs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric visit(RefType refType) {
|
||||
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
|
||||
for(RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()){
|
||||
for (RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()) {
|
||||
params.add(param.acceptTV(this));
|
||||
}
|
||||
RefTypeOrTPHOrWildcardOrGeneric ret = new RefType(refType.getName(), params, new NullToken());
|
||||
return ret;
|
||||
return new RefType(refType.getName(), params, new NullToken());
|
||||
}
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric visit(SuperWildcardType superWildcardType) {
|
||||
SuperWildcardType ret = new SuperWildcardType(superWildcardType.getInnerType().acceptTV(this), superWildcardType.getOffset());
|
||||
return ret;
|
||||
return new SuperWildcardType(superWildcardType.getInnerType().acceptTV(this), superWildcardType.getOffset());
|
||||
}
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric visit(TypePlaceholder typePlaceholder) {
|
||||
return typePlaceholder; //TypePlaceholder der vererbert wird kann bei der Vererbung nicht instanziert werden.
|
||||
return typePlaceholder; //TypePlaceholder der vererbert wird kann bei der Vererbung nicht instanziert werden.
|
||||
}
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric visit(ExtendsWildcardType extendsWildcardType) {
|
||||
ExtendsWildcardType ret = new ExtendsWildcardType(extendsWildcardType.getInnerType().acceptTV(this), extendsWildcardType.getOffset());
|
||||
return ret;
|
||||
return new ExtendsWildcardType(extendsWildcardType.getInnerType().acceptTV(this), extendsWildcardType.getOffset());
|
||||
}
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric visit(GenericRefType genericRefType) {
|
||||
if(! gtvs.containsKey(genericRefType.getParsedName()))
|
||||
throw new DebugException("Dieser Fall darf nicht auftreten");
|
||||
if (!gtvs.containsKey(genericRefType.getParsedName()))
|
||||
throw new DebugException("Cannot visit unknown generic type variable!");
|
||||
return gtvs.get(genericRefType.getParsedName());
|
||||
}
|
||||
|
||||
|
||||
@@ -4,12 +4,12 @@ import de.dhbwstuttgart.server.SocketServer;
|
||||
|
||||
public class JavaTXServer {
|
||||
|
||||
JavaTXServer(int port) {
|
||||
try {
|
||||
SocketServer socketServer = new SocketServer(port);
|
||||
socketServer.start();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
JavaTXServer(int port) {
|
||||
try {
|
||||
SocketServer socketServer = new SocketServer(port);
|
||||
socketServer.start();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,26 +1,21 @@
|
||||
package de.dhbwstuttgart.server;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import de.dhbwstuttgart.server.packet.ErrorPacket;
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.server.packet.InvalidPacket;
|
||||
import de.dhbwstuttgart.server.packet.MessagePacket;
|
||||
import de.dhbwstuttgart.server.packet.PacketContainer;
|
||||
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
|
||||
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
|
||||
import de.dhbwstuttgart.server.packet.*;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import org.java_websocket.client.WebSocketClient;
|
||||
import org.java_websocket.enums.ReadyState;
|
||||
import org.java_websocket.handshake.ServerHandshake;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.java_websocket.client.WebSocketClient;
|
||||
import org.java_websocket.enums.ReadyState;
|
||||
import org.java_websocket.handshake.ServerHandshake;
|
||||
|
||||
|
||||
/**
|
||||
@@ -28,111 +23,111 @@ import org.java_websocket.handshake.ServerHandshake;
|
||||
*/
|
||||
public class SocketClient extends WebSocketClient {
|
||||
|
||||
// use a latch to wait until the connection is closed by the remote host
|
||||
private final CountDownLatch closeLatch = new CountDownLatch(1);
|
||||
// temporarily: The received unify result packet
|
||||
private UnifyResultPacket unifyResultPacket = null;
|
||||
// use a latch to wait until the connection is closed by the remote host
|
||||
private final CountDownLatch closeLatch = new CountDownLatch(1);
|
||||
// temporarily: The received unify result packet
|
||||
private UnifyResultPacket unifyResultPacket = null;
|
||||
|
||||
public SocketClient(String url) {
|
||||
super(URI.create(url));
|
||||
// make sure, the url is in a valid format
|
||||
final String regex = "^wss?://(\\w+(\\.\\w+)?)*:(\\d+)$";
|
||||
final Matcher matcher = Pattern.compile(regex).matcher(url);
|
||||
if (!matcher.find()) {
|
||||
throw new RuntimeException("Provided string \"" + url + "\" is not a valid server URL! Use pattern ws(s?)://<host.name>:<port>");
|
||||
}
|
||||
}
|
||||
|
||||
public SocketClient(String host, int port, boolean secure) {
|
||||
super(URI.create(String.format("%s://%s:%d/", secure ? "wss" : "ws", host, port)));
|
||||
}
|
||||
|
||||
/**
|
||||
* The main method for connecting, requesting and waiting for the server to unify.
|
||||
* This is synchronized to prevent multiple webSockets connections at the moment, but it is not called from any
|
||||
* thread except the main thread right now and is not necessary at all, probably. Maybe remove it later
|
||||
*/
|
||||
synchronized public List<ResultSet> execute(
|
||||
IFiniteClosure finiteClosure,
|
||||
ConstraintSet<Pair> constraints,
|
||||
ConstraintSet<UnifyPair> unifyConstraints
|
||||
) throws JsonProcessingException {
|
||||
try {
|
||||
// wait for the connection to be set up
|
||||
this.connectBlocking();
|
||||
// make sure the connection has been established successfully
|
||||
if (this.getReadyState() != ReadyState.OPEN) {
|
||||
throw new RuntimeException("WebSocket Client could not connect to remote host at " + this.uri);
|
||||
}
|
||||
|
||||
// send the unify task request
|
||||
UnifyRequestPacket packet = UnifyRequestPacket.create(finiteClosure, constraints, unifyConstraints);
|
||||
String json = PacketContainer.serialize(packet);
|
||||
this.send(json);
|
||||
|
||||
// block the thread, until the connection is closed by the remote host (usually after sending the results)
|
||||
this.waitUntilClosed();
|
||||
// wait for the connection to fully close
|
||||
this.closeBlocking();
|
||||
} catch (InterruptedException exception) {
|
||||
System.err.println("Interrupted: " + exception);
|
||||
this.notifyAll();
|
||||
public SocketClient(String url) {
|
||||
super(URI.create(url));
|
||||
// make sure, the url is in a valid format
|
||||
final String regex = "^wss?://(\\w+(\\.\\w+)?)*:(\\d+)$";
|
||||
final Matcher matcher = Pattern.compile(regex).matcher(url);
|
||||
if (!matcher.find()) {
|
||||
throw new RuntimeException("Provided string \"" + url + "\" is not a valid server URL! Use pattern ws(s?)://<host.name>:<port>");
|
||||
}
|
||||
}
|
||||
|
||||
// detect error cases, in which no error was thrown, but also no result was sent back from the server
|
||||
if (this.unifyResultPacket == null) {
|
||||
throw new RuntimeException("Did not receive server response but closed connection already");
|
||||
public SocketClient(String host, int port, boolean secure) {
|
||||
super(URI.create(String.format("%s://%s:%d/", secure ? "wss" : "ws", host, port)));
|
||||
}
|
||||
|
||||
return unifyResultPacket.getResultSet();
|
||||
}
|
||||
/**
|
||||
* The main method for connecting, requesting and waiting for the server to unify.
|
||||
* This is synchronized to prevent multiple webSockets connections at the moment, but it is not called from any
|
||||
* thread except the main thread right now and is not necessary at all, probably. Maybe remove it later
|
||||
*/
|
||||
synchronized public List<ResultSet> execute(
|
||||
IFiniteClosure finiteClosure,
|
||||
ConstraintSet<Pair> constraints,
|
||||
ConstraintSet<UnifyPair> unifyConstraints
|
||||
) throws JsonProcessingException {
|
||||
try {
|
||||
// wait for the connection to be set up
|
||||
this.connectBlocking();
|
||||
// make sure the connection has been established successfully
|
||||
if (this.getReadyState() != ReadyState.OPEN) {
|
||||
throw new RuntimeException("WebSocket Client could not connect to remote host at " + this.uri);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specific client-side implementations to handle incomming packets
|
||||
*/
|
||||
protected void handleReceivedPacket(IPacket packet) {
|
||||
if (packet instanceof InvalidPacket) {
|
||||
System.err.println("[socket] " + ((InvalidPacket) packet).error);
|
||||
} else if (packet instanceof MessagePacket) {
|
||||
System.out.println("[socket] " + ((MessagePacket) packet).message);
|
||||
} else if (packet instanceof ErrorPacket) {
|
||||
System.err.println("[socket] " + ((ErrorPacket) packet).error);
|
||||
} else if (packet instanceof UnifyResultPacket) {
|
||||
System.out.println("[socket] Received unify result");
|
||||
unifyResultPacket = (UnifyResultPacket) packet;
|
||||
// send the unify task request
|
||||
UnifyRequestPacket packet = UnifyRequestPacket.create(finiteClosure, constraints, unifyConstraints);
|
||||
String json = PacketContainer.serialize(packet);
|
||||
this.send(json);
|
||||
|
||||
// block the thread, until the connection is closed by the remote host (usually after sending the results)
|
||||
this.waitUntilClosed();
|
||||
// wait for the connection to fully close
|
||||
this.closeBlocking();
|
||||
} catch (InterruptedException exception) {
|
||||
System.err.println("Interrupted: " + exception);
|
||||
this.notifyAll();
|
||||
}
|
||||
|
||||
// detect error cases, in which no error was thrown, but also no result was sent back from the server
|
||||
if (this.unifyResultPacket == null) {
|
||||
throw new RuntimeException("Did not receive server response but closed connection already");
|
||||
}
|
||||
|
||||
return unifyResultPacket.getResultSet();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOpen(ServerHandshake handshakedata) {
|
||||
System.out.println("Connected to server with status " + handshakedata.getHttpStatus());
|
||||
}
|
||||
/**
|
||||
* Specific client-side implementations to handle incomming packets
|
||||
*/
|
||||
protected void handleReceivedPacket(IPacket packet) {
|
||||
if (packet instanceof InvalidPacket) {
|
||||
System.err.println("[socket] " + ((InvalidPacket) packet).error);
|
||||
} else if (packet instanceof MessagePacket) {
|
||||
System.out.println("[socket] " + ((MessagePacket) packet).message);
|
||||
} else if (packet instanceof ErrorPacket) {
|
||||
System.err.println("[socket] " + ((ErrorPacket) packet).error);
|
||||
} else if (packet instanceof UnifyResultPacket) {
|
||||
System.out.println("[socket] Received unify result");
|
||||
unifyResultPacket = (UnifyResultPacket) packet;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMessage(String message) {
|
||||
// System.out.println("received: " + message);
|
||||
IPacket packet = PacketContainer.deserialize(message);
|
||||
this.handleReceivedPacket(packet);
|
||||
}
|
||||
@Override
|
||||
public void onOpen(ServerHandshake handshakedata) {
|
||||
System.out.println("Connected to server with status " + handshakedata.getHttpStatus());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClose(int code, String reason, boolean remote) {
|
||||
System.out.println(
|
||||
"Disconnected from server " +
|
||||
"with code " + code + " " +
|
||||
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
|
||||
"(closed by remote: " + remote + ")"
|
||||
);
|
||||
this.closeLatch.countDown();
|
||||
}
|
||||
@Override
|
||||
public void onMessage(String message) {
|
||||
// System.out.println("received: " + message);
|
||||
IPacket packet = PacketContainer.deserialize(message);
|
||||
this.handleReceivedPacket(packet);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(Exception e) {
|
||||
System.out.println("Error: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
}
|
||||
@Override
|
||||
public void onClose(int code, String reason, boolean remote) {
|
||||
System.out.println(
|
||||
"Disconnected from server " +
|
||||
"with code " + code + " " +
|
||||
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
|
||||
"(closed by remote: " + remote + ")"
|
||||
);
|
||||
this.closeLatch.countDown();
|
||||
}
|
||||
|
||||
public void waitUntilClosed() throws InterruptedException {
|
||||
closeLatch.await();
|
||||
}
|
||||
@Override
|
||||
public void onError(Exception e) {
|
||||
System.out.println("Error: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
public void waitUntilClosed() throws InterruptedException {
|
||||
closeLatch.await();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,12 +6,12 @@ package de.dhbwstuttgart.server;
|
||||
*/
|
||||
public class SocketData {
|
||||
|
||||
public final String id;
|
||||
// used for the timeout of 10 seconds, until an unused open connection is automatically closed
|
||||
public boolean hasSentTask = false;
|
||||
public final String id;
|
||||
// used for the timeout of 10 seconds, until an unused open connection is automatically closed
|
||||
public boolean hasSentTask = false;
|
||||
|
||||
public SocketData(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
public SocketData(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,138 +1,135 @@
|
||||
package de.dhbwstuttgart.server;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.server.packet.MessagePacket;
|
||||
import de.dhbwstuttgart.server.packet.PacketContainer;
|
||||
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
|
||||
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
|
||||
import de.dhbwstuttgart.server.packet.*;
|
||||
import de.dhbwstuttgart.typeinference.TypeInference;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.java_websocket.WebSocket;
|
||||
import org.java_websocket.handshake.ClientHandshake;
|
||||
import org.java_websocket.server.WebSocketServer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class SocketServer extends WebSocketServer {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SocketServer.class);
|
||||
private static final Logger log = LoggerFactory.getLogger(SocketServer.class);
|
||||
|
||||
public SocketServer(int port) {
|
||||
super(new InetSocketAddress(port));
|
||||
}
|
||||
public SocketServer(int port) {
|
||||
super(new InetSocketAddress(port));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
|
||||
System.out.println("New connection: " + webSocket.getResourceDescriptor());
|
||||
webSocket.setAttachment(new SocketData(UUID.randomUUID().toString()));
|
||||
@Override
|
||||
public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
|
||||
System.out.println("New connection: " + webSocket.getResourceDescriptor());
|
||||
webSocket.setAttachment(new SocketData(UUID.randomUUID().toString()));
|
||||
|
||||
try {
|
||||
sendMessage(webSocket, "Welcome to the server!");
|
||||
try {
|
||||
sendMessage(webSocket, "Welcome to the server!");
|
||||
|
||||
// wait 10 seconds for the client to send a task and close the connection, if nothing has been received until then
|
||||
ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
|
||||
Runnable task = () -> {
|
||||
if (webSocket.<SocketData>getAttachment().hasSentTask || !webSocket.isOpen()) {
|
||||
return;
|
||||
// wait 10 seconds for the client to send a task and close the connection, if nothing has been received until then
|
||||
ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
|
||||
Runnable task = () -> {
|
||||
if (webSocket.<SocketData>getAttachment().hasSentTask || !webSocket.isOpen()) {
|
||||
return;
|
||||
}
|
||||
sendMessage(webSocket, "No task received after 10 seconds. Closing connection...");
|
||||
webSocket.close();
|
||||
};
|
||||
executor.schedule(task, 10, TimeUnit.SECONDS);
|
||||
executor.shutdown();
|
||||
|
||||
// and finally, when your program wants to exit
|
||||
} catch (Exception e) {
|
||||
log.error("e: ", e);
|
||||
webSocket.close(1, e.getMessage());
|
||||
}
|
||||
sendMessage(webSocket, "No task received after 10 seconds. Closing connection...");
|
||||
webSocket.close();
|
||||
};
|
||||
executor.schedule(task, 10, TimeUnit.SECONDS);
|
||||
executor.shutdown();
|
||||
|
||||
// and finally, when your program wants to exit
|
||||
} catch (Exception e) {
|
||||
log.error("e: ", e);
|
||||
webSocket.close(1, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClose(WebSocket webSocket, int code, String reason, boolean remote) {
|
||||
System.out.println("Connection closed: " + webSocket.getResourceDescriptor());
|
||||
System.out.println(
|
||||
"Disconnected client " + webSocket.getResourceDescriptor() + " " +
|
||||
"with code " + code + " " +
|
||||
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
|
||||
"(closed by client: " + remote + ")"
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMessage(WebSocket webSocket, String s) {
|
||||
// System.out.println("Received: " + s.substring(0, 50));
|
||||
IPacket reconstructedPacket = PacketContainer.deserialize(s);
|
||||
this.onPacketReceived(webSocket, reconstructedPacket);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(WebSocket webSocket, Exception e) {
|
||||
webSocket.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStart() {
|
||||
System.out.println("Websocket server started");
|
||||
}
|
||||
|
||||
/**
|
||||
* A shorthand method for sending informational messages to the client
|
||||
*/
|
||||
public void sendMessage(WebSocket webSocket, String text) {
|
||||
try {
|
||||
MessagePacket message = new MessagePacket();
|
||||
message.message = text;
|
||||
webSocket.send(PacketContainer.serialize(message));
|
||||
} catch (Exception e) {
|
||||
System.err.println("Failed to send message: " + text);
|
||||
System.err.println(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The server-side implementation on how to handle certain packets when received
|
||||
*/
|
||||
private void onPacketReceived(WebSocket webSocket, IPacket packet) {
|
||||
if (packet instanceof UnifyRequestPacket unifyRequestPacket) {
|
||||
|
||||
// TODO: this static property will be a problem once we send more than one request per server and
|
||||
// should be replaced by a dynamic object property
|
||||
PlaceholderType.EXISTING_PLACEHOLDERS.clear();
|
||||
sendMessage(webSocket, "You requested a unify! Please wait until I calculated everything...");
|
||||
System.out.println("Client " + webSocket.<SocketData>getAttachment().id + " requested a unification. Starting now...");
|
||||
webSocket.<SocketData>getAttachment().hasSentTask = true;
|
||||
|
||||
try {
|
||||
// start the unification algorithm from the received data
|
||||
List<ResultSet> result = TypeInference.executeWithoutContext(
|
||||
unifyRequestPacket.retrieveFiniteClosure(),
|
||||
unifyRequestPacket.retrieveConstraints(),
|
||||
unifyRequestPacket.retrieveUnifyConstraints()
|
||||
@Override
|
||||
public void onClose(WebSocket webSocket, int code, String reason, boolean remote) {
|
||||
System.out.println("Connection closed: " + webSocket.getResourceDescriptor());
|
||||
System.out.println(
|
||||
"Disconnected client " + webSocket.getResourceDescriptor() + " " +
|
||||
"with code " + code + " " +
|
||||
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
|
||||
"(closed by client: " + remote + ")"
|
||||
);
|
||||
System.out.println("Finished unification for client " + webSocket.<SocketData>getAttachment().id);
|
||||
sendMessage(webSocket, "Unification finished. Found " + result.size() + " result sets");
|
||||
|
||||
if (webSocket.isOpen()) {
|
||||
UnifyResultPacket resultPacket = UnifyResultPacket.create(result);
|
||||
webSocket.send(PacketContainer.serialize(resultPacket));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.err.println(e);
|
||||
log.error("e: ", e);
|
||||
}
|
||||
|
||||
webSocket.close();
|
||||
} else {
|
||||
sendMessage(webSocket, "The package of type " + packet.getClass().getName() + " is not handled by the server!");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMessage(WebSocket webSocket, String s) {
|
||||
// System.out.println("Received: " + s.substring(0, 50));
|
||||
IPacket reconstructedPacket = PacketContainer.deserialize(s);
|
||||
this.onPacketReceived(webSocket, reconstructedPacket);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(WebSocket webSocket, Exception e) {
|
||||
webSocket.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStart() {
|
||||
System.out.println("Websocket server started");
|
||||
}
|
||||
|
||||
/**
|
||||
* A shorthand method for sending informational messages to the client
|
||||
*/
|
||||
public void sendMessage(WebSocket webSocket, String text) {
|
||||
try {
|
||||
MessagePacket message = new MessagePacket();
|
||||
message.message = text;
|
||||
webSocket.send(PacketContainer.serialize(message));
|
||||
} catch (Exception e) {
|
||||
System.err.println("Failed to send message: " + text);
|
||||
System.err.println(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The server-side implementation on how to handle certain packets when received
|
||||
*/
|
||||
private void onPacketReceived(WebSocket webSocket, IPacket packet) {
|
||||
if (packet instanceof UnifyRequestPacket unifyRequestPacket) {
|
||||
|
||||
// TODO: this static property will be a problem once we send more than one request per server and
|
||||
// should be replaced by a dynamic object property
|
||||
PlaceholderType.EXISTING_PLACEHOLDERS.clear();
|
||||
sendMessage(webSocket, "You requested a unify! Please wait until I calculated everything...");
|
||||
System.out.println("Client " + webSocket.<SocketData>getAttachment().id + " requested a unification. Starting now...");
|
||||
webSocket.<SocketData>getAttachment().hasSentTask = true;
|
||||
|
||||
try {
|
||||
// start the unification algorithm from the received data
|
||||
List<ResultSet> result = TypeInference.executeWithoutContext(
|
||||
unifyRequestPacket.retrieveFiniteClosure(),
|
||||
unifyRequestPacket.retrieveConstraints(),
|
||||
unifyRequestPacket.retrieveUnifyConstraints()
|
||||
);
|
||||
System.out.println("Finished unification for client " + webSocket.<SocketData>getAttachment().id);
|
||||
sendMessage(webSocket, "Unification finished. Found " + result.size() + " result sets");
|
||||
|
||||
if (webSocket.isOpen()) {
|
||||
UnifyResultPacket resultPacket = UnifyResultPacket.create(result);
|
||||
webSocket.send(PacketContainer.serialize(resultPacket));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.err.println(e);
|
||||
log.error("e: ", e);
|
||||
}
|
||||
|
||||
webSocket.close();
|
||||
} else {
|
||||
sendMessage(webSocket, "The package of type " + packet.getClass().getName() + " is not handled by the server!");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,10 +5,10 @@ package de.dhbwstuttgart.server.packet;
|
||||
*/
|
||||
public class ErrorPacket implements IPacket {
|
||||
|
||||
/**
|
||||
* The error endpoint for messages from the server, that should be logged out outputted
|
||||
*/
|
||||
public String error;
|
||||
/**
|
||||
* The error endpoint for messages from the server, that should be logged out outputted
|
||||
*/
|
||||
public String error;
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -7,12 +7,11 @@ package de.dhbwstuttgart.server.packet;
|
||||
* - Have only serializable public properties (or disable them via jackson annotations)
|
||||
* A packet should have, for easy usage and consisteny:
|
||||
* - a static create() method
|
||||
*
|
||||
*/
|
||||
public interface IPacket {
|
||||
|
||||
interface IDataContainer<T> {
|
||||
T toObject();
|
||||
}
|
||||
interface IDataContainer<T> {
|
||||
T toObject();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,9 +5,9 @@ package de.dhbwstuttgart.server.packet;
|
||||
*/
|
||||
public class InvalidPacket implements IPacket {
|
||||
|
||||
/**
|
||||
* If available, the error that caused this package to appear
|
||||
*/
|
||||
public String error = "<unknown error>";
|
||||
/**
|
||||
* If available, the error that caused this package to appear
|
||||
*/
|
||||
public String error = "<unknown error>";
|
||||
|
||||
}
|
||||
|
||||
@@ -5,9 +5,9 @@ package de.dhbwstuttgart.server.packet;
|
||||
*/
|
||||
public class MessagePacket implements IPacket {
|
||||
|
||||
/**
|
||||
* The informational message from the server, that should be logged out outputted
|
||||
*/
|
||||
public String message;
|
||||
/**
|
||||
* The informational message from the server, that should be logged out outputted
|
||||
*/
|
||||
public String message;
|
||||
|
||||
}
|
||||
|
||||
@@ -11,74 +11,74 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class PacketContainer {
|
||||
|
||||
// The jackson serializer / deserializer tool
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
// The jackson serializer / deserializer tool
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
/*
|
||||
* The available packet types. The one type that is represented in the JSON should always be the ONLY non-null value.
|
||||
* They have to be private (for the moment) to let jackson fill them in while deserializing
|
||||
*/
|
||||
public ErrorPacket errorPacket = null;
|
||||
public MessagePacket messagePacket = null;
|
||||
public InvalidPacket invalidPacket = null;
|
||||
public UnifyRequestPacket unifyRequestPacket = null;
|
||||
public UnifyResultPacket unifyResultPacket = null;
|
||||
/*
|
||||
* The available packet types. The one type that is represented in the JSON should always be the ONLY non-null value.
|
||||
* They have to be public (for the moment) to let jackson fill them in while deserializing
|
||||
*/
|
||||
public ErrorPacket errorPacket = null;
|
||||
public MessagePacket messagePacket = null;
|
||||
public InvalidPacket invalidPacket = null;
|
||||
public UnifyRequestPacket unifyRequestPacket = null;
|
||||
public UnifyResultPacket unifyResultPacket = null;
|
||||
|
||||
|
||||
/**
|
||||
* Generate the JSON string for the given packet
|
||||
*
|
||||
* @param packet The packet to serialize
|
||||
* @return The json representation of the packet
|
||||
*/
|
||||
public static String serialize(IPacket packet) throws JsonProcessingException {
|
||||
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
PacketContainer container = new PacketContainer();
|
||||
/**
|
||||
* Generate the JSON string for the given packet
|
||||
*
|
||||
* @param packet The packet to serialize
|
||||
* @return The json representation of the packet
|
||||
*/
|
||||
public static String serialize(IPacket packet) throws JsonProcessingException {
|
||||
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
PacketContainer container = new PacketContainer();
|
||||
|
||||
if (packet instanceof ErrorPacket)
|
||||
container.errorPacket = (ErrorPacket) packet;
|
||||
else if (packet instanceof MessagePacket)
|
||||
container.messagePacket = (MessagePacket) packet;
|
||||
else if (packet instanceof UnifyRequestPacket)
|
||||
container.unifyRequestPacket = (UnifyRequestPacket) packet;
|
||||
else if (packet instanceof UnifyResultPacket)
|
||||
container.unifyResultPacket = (UnifyResultPacket) packet;
|
||||
// Add new packets here and in the deserialize method
|
||||
if (packet instanceof ErrorPacket)
|
||||
container.errorPacket = (ErrorPacket) packet;
|
||||
else if (packet instanceof MessagePacket)
|
||||
container.messagePacket = (MessagePacket) packet;
|
||||
else if (packet instanceof UnifyRequestPacket)
|
||||
container.unifyRequestPacket = (UnifyRequestPacket) packet;
|
||||
else if (packet instanceof UnifyResultPacket)
|
||||
container.unifyResultPacket = (UnifyResultPacket) packet;
|
||||
// Add new packets here and in the deserialize method
|
||||
|
||||
return objectMapper.writeValueAsString(container);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the JSON string to generate the matching packet object
|
||||
*
|
||||
* @param json The serialized representation of a packet container
|
||||
* @return The deserialized Packet object
|
||||
*/
|
||||
public static IPacket deserialize(String json) {
|
||||
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
|
||||
try {
|
||||
PacketContainer container = objectMapper.readValue(json, PacketContainer.class);
|
||||
|
||||
if (container.errorPacket != null)
|
||||
return container.errorPacket;
|
||||
if (container.messagePacket != null)
|
||||
return container.messagePacket;
|
||||
if (container.invalidPacket != null)
|
||||
return container.invalidPacket;
|
||||
if (container.unifyRequestPacket != null)
|
||||
return container.unifyRequestPacket;
|
||||
if (container.unifyResultPacket != null)
|
||||
return container.unifyResultPacket;
|
||||
// Add new packets here and in the serialize method
|
||||
|
||||
throw new RuntimeException("Cannot map received json to any known packet class");
|
||||
} catch (Exception e) {
|
||||
InvalidPacket packet = new InvalidPacket();
|
||||
packet.error = e.getMessage();
|
||||
return packet;
|
||||
return objectMapper.writeValueAsString(container);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the JSON string to generate the matching packet object
|
||||
*
|
||||
* @param json The serialized representation of a packet container
|
||||
* @return The deserialized Packet object
|
||||
*/
|
||||
public static IPacket deserialize(String json) {
|
||||
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
|
||||
try {
|
||||
PacketContainer container = objectMapper.readValue(json, PacketContainer.class);
|
||||
|
||||
if (container.errorPacket != null)
|
||||
return container.errorPacket;
|
||||
if (container.messagePacket != null)
|
||||
return container.messagePacket;
|
||||
if (container.invalidPacket != null)
|
||||
return container.invalidPacket;
|
||||
if (container.unifyRequestPacket != null)
|
||||
return container.unifyRequestPacket;
|
||||
if (container.unifyResultPacket != null)
|
||||
return container.unifyResultPacket;
|
||||
// Add new packets here and in the serialize method
|
||||
|
||||
throw new RuntimeException("Cannot map received json to any known packet class");
|
||||
} catch (Exception e) {
|
||||
InvalidPacket packet = new InvalidPacket();
|
||||
packet.error = e.getMessage();
|
||||
return packet;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -14,35 +14,35 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
*/
|
||||
public class UnifyRequestPacket implements IPacket {
|
||||
|
||||
public SerializedFiniteClosure finiteClosure;
|
||||
public SerializedPairConstraintSet constraints;
|
||||
public SerializedUnifyConstraintSet unifyConstraints;
|
||||
public SerializedFiniteClosure finiteClosure;
|
||||
public SerializedPairConstraintSet constraints;
|
||||
public SerializedUnifyConstraintSet unifyConstraints;
|
||||
|
||||
public static UnifyRequestPacket create(
|
||||
IFiniteClosure finiteClosure,
|
||||
ConstraintSet<Pair> constraints,
|
||||
ConstraintSet<UnifyPair> unifyConstraints
|
||||
) {
|
||||
UnifyRequestPacket packet = new UnifyRequestPacket();
|
||||
packet.finiteClosure = SerializedFiniteClosure.create(finiteClosure);
|
||||
packet.constraints = SerializedPairConstraintSet.create(constraints);
|
||||
packet.unifyConstraints = SerializedUnifyConstraintSet.create(unifyConstraints);
|
||||
return packet;
|
||||
}
|
||||
public static UnifyRequestPacket create(
|
||||
IFiniteClosure finiteClosure,
|
||||
ConstraintSet<Pair> constraints,
|
||||
ConstraintSet<UnifyPair> unifyConstraints
|
||||
) {
|
||||
UnifyRequestPacket packet = new UnifyRequestPacket();
|
||||
packet.finiteClosure = SerializedFiniteClosure.create(finiteClosure);
|
||||
packet.constraints = SerializedPairConstraintSet.create(constraints);
|
||||
packet.unifyConstraints = SerializedUnifyConstraintSet.create(unifyConstraints);
|
||||
return packet;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public IFiniteClosure retrieveFiniteClosure() {
|
||||
return this.finiteClosure.toObject();
|
||||
}
|
||||
@JsonIgnore
|
||||
public IFiniteClosure retrieveFiniteClosure() {
|
||||
return this.finiteClosure.toObject();
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public ConstraintSet<Pair> retrieveConstraints() {
|
||||
return this.constraints.toObject();
|
||||
}
|
||||
@JsonIgnore
|
||||
public ConstraintSet<Pair> retrieveConstraints() {
|
||||
return this.constraints.toObject();
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public ConstraintSet<UnifyPair> retrieveUnifyConstraints() {
|
||||
return this.unifyConstraints.toObject();
|
||||
}
|
||||
@JsonIgnore
|
||||
public ConstraintSet<UnifyPair> retrieveUnifyConstraints() {
|
||||
return this.unifyConstraints.toObject();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package de.dhbwstuttgart.server.packet;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.SerializedResultSet;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
@@ -11,17 +12,17 @@ import java.util.List;
|
||||
*/
|
||||
public class UnifyResultPacket implements IPacket {
|
||||
|
||||
public SerializedResultSet[] results;
|
||||
public SerializedResultSet[] results;
|
||||
|
||||
public static UnifyResultPacket create(List<ResultSet> resultSets) {
|
||||
UnifyResultPacket serialized = new UnifyResultPacket();
|
||||
serialized.results = resultSets.stream().map(SerializedResultSet::create).toArray(SerializedResultSet[]::new);
|
||||
return serialized;
|
||||
}
|
||||
public static UnifyResultPacket create(List<ResultSet> resultSets) {
|
||||
UnifyResultPacket serialized = new UnifyResultPacket();
|
||||
serialized.results = resultSets.stream().map(SerializedResultSet::create).toArray(SerializedResultSet[]::new);
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public List<ResultSet> getResultSet() {
|
||||
return Arrays.stream(this.results).map(SerializedResultSet::toObject).toList();
|
||||
}
|
||||
@JsonIgnore
|
||||
public List<ResultSet> getResultSet() {
|
||||
return Arrays.stream(this.results).map(SerializedResultSet::toObject).toList();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package de.dhbwstuttgart.server.packet.dataContainers;
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
|
||||
@@ -12,19 +13,19 @@ import java.util.HashSet;
|
||||
* @see IFiniteClosure
|
||||
*/
|
||||
public class SerializedFiniteClosure implements IPacket.IDataContainer<IFiniteClosure> {
|
||||
public SerializedUnifyPair[] pairs;
|
||||
public SerializedUnifyPair[] pairs;
|
||||
|
||||
public static SerializedFiniteClosure create(IFiniteClosure finiteClosure) {
|
||||
SerializedFiniteClosure fc = new SerializedFiniteClosure();
|
||||
fc.pairs = finiteClosure.getPairs().stream().map(SerializedUnifyPair::create).toArray(SerializedUnifyPair[]::new);
|
||||
return fc;
|
||||
}
|
||||
public static SerializedFiniteClosure create(IFiniteClosure finiteClosure) {
|
||||
SerializedFiniteClosure fc = new SerializedFiniteClosure();
|
||||
fc.pairs = finiteClosure.getPairs().stream().map(SerializedUnifyPair::create).toArray(SerializedUnifyPair[]::new);
|
||||
return fc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IFiniteClosure toObject() {
|
||||
return new FiniteClosure(
|
||||
new HashSet<>(Arrays.stream(pairs).map(SerializedUnifyPair::toObject).toList()),
|
||||
null
|
||||
);
|
||||
}
|
||||
@Override
|
||||
public IFiniteClosure toObject() {
|
||||
return new FiniteClosure(
|
||||
new HashSet<>(Arrays.stream(pairs).map(SerializedUnifyPair::toObject).toList()),
|
||||
null
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,26 +12,26 @@ import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
*/
|
||||
public class SerializedPair implements IPacket.IDataContainer<Pair> {
|
||||
|
||||
@JsonProperty("o")
|
||||
public PairOperator operator;
|
||||
public SerializedTokenWrapper ta1;
|
||||
public SerializedTokenWrapper ta2;
|
||||
@JsonProperty("o")
|
||||
public PairOperator operator;
|
||||
public SerializedTokenWrapper ta1;
|
||||
public SerializedTokenWrapper ta2;
|
||||
|
||||
|
||||
public static SerializedPair create(Pair pair) {
|
||||
SerializedPair sPair = new SerializedPair();
|
||||
sPair.operator = pair.GetOperator();
|
||||
sPair.ta1 = SerializedTokenWrapper.create(pair.TA1);
|
||||
sPair.ta2 = SerializedTokenWrapper.create(pair.TA2);
|
||||
return sPair;
|
||||
}
|
||||
public static SerializedPair create(Pair pair) {
|
||||
SerializedPair sPair = new SerializedPair();
|
||||
sPair.operator = pair.GetOperator();
|
||||
sPair.ta1 = SerializedTokenWrapper.create(pair.TA1);
|
||||
sPair.ta2 = SerializedTokenWrapper.create(pair.TA2);
|
||||
return sPair;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Pair toObject() {
|
||||
return new Pair(
|
||||
ta1.toObject(),
|
||||
ta2.toObject(),
|
||||
operator
|
||||
);
|
||||
}
|
||||
@Override
|
||||
public Pair toObject() {
|
||||
return new Pair(
|
||||
ta1.toObject(),
|
||||
ta2.toObject(),
|
||||
operator
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
@@ -17,66 +18,66 @@ import java.util.HashSet;
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class SerializedPairConstraint implements IPacket.IDataContainer<Constraint<Pair>> {
|
||||
|
||||
// serialize recursive structure
|
||||
public static final HashMap<Constraint<Pair>, String> UNIQUE_CONSTRAINT_KEY_MAP = new HashMap<>();
|
||||
public static final HashMap<String, SerializedPairConstraint> UNIQUE_CONSTRAINT_MAP = new HashMap<>();
|
||||
// deserialize recursive structure
|
||||
private static final HashMap<String, Constraint<Pair>> UNIQUE_OBJECT_MAP = new HashMap<>();
|
||||
// serialize recursive structure
|
||||
public static final HashMap<Constraint<Pair>, String> UNIQUE_CONSTRAINT_KEY_MAP = new HashMap<>();
|
||||
public static final HashMap<String, SerializedPairConstraint> UNIQUE_CONSTRAINT_MAP = new HashMap<>();
|
||||
// deserialize recursive structure
|
||||
private static final HashMap<String, Constraint<Pair>> UNIQUE_OBJECT_MAP = new HashMap<>();
|
||||
|
||||
@JsonProperty("i")
|
||||
public boolean isInherited;
|
||||
@JsonProperty("u")
|
||||
public String uniqueKey;
|
||||
@JsonProperty("e")
|
||||
public String extendedConstraint = null;
|
||||
@JsonProperty("m")
|
||||
public SerializedPair[] methodSignatureConstraint;
|
||||
@JsonProperty("c")
|
||||
public SerializedPair[] constraintElements;
|
||||
@JsonProperty("i")
|
||||
public boolean isInherited;
|
||||
@JsonProperty("u")
|
||||
public String uniqueKey;
|
||||
@JsonProperty("e")
|
||||
public String extendedConstraint = null;
|
||||
@JsonProperty("m")
|
||||
public SerializedPair[] methodSignatureConstraint;
|
||||
@JsonProperty("c")
|
||||
public SerializedPair[] constraintElements;
|
||||
|
||||
|
||||
public static SerializedPairConstraint create(Constraint<Pair> constraint) {
|
||||
final String uniqueKey = UNIQUE_CONSTRAINT_KEY_MAP.getOrDefault(constraint, "_" + UNIQUE_CONSTRAINT_MAP.size());
|
||||
public static SerializedPairConstraint create(Constraint<Pair> constraint) {
|
||||
final String uniqueKey = UNIQUE_CONSTRAINT_KEY_MAP.getOrDefault(constraint, "_" + UNIQUE_CONSTRAINT_MAP.size());
|
||||
|
||||
if (UNIQUE_CONSTRAINT_MAP.containsKey(uniqueKey)) {
|
||||
return UNIQUE_CONSTRAINT_MAP.get(uniqueKey);
|
||||
if (UNIQUE_CONSTRAINT_MAP.containsKey(uniqueKey)) {
|
||||
return UNIQUE_CONSTRAINT_MAP.get(uniqueKey);
|
||||
}
|
||||
|
||||
SerializedPairConstraint pairConstraint = new SerializedPairConstraint();
|
||||
pairConstraint.uniqueKey = uniqueKey;
|
||||
UNIQUE_CONSTRAINT_KEY_MAP.put(constraint, uniqueKey);
|
||||
UNIQUE_CONSTRAINT_MAP.put(uniqueKey, pairConstraint);
|
||||
|
||||
pairConstraint.constraintElements = constraint.stream().map(SerializedPair::create).toArray(SerializedPair[]::new);
|
||||
pairConstraint.isInherited = constraint.isInherited();
|
||||
pairConstraint.methodSignatureConstraint = constraint.getmethodSignatureConstraint().stream().map(SerializedPair::create).toArray(SerializedPair[]::new);
|
||||
if (constraint.getExtendConstraint() != null) {
|
||||
pairConstraint.extendedConstraint = SerializedPairConstraint.create(constraint.getExtendConstraint()).uniqueKey;
|
||||
}
|
||||
|
||||
return pairConstraint;
|
||||
}
|
||||
|
||||
SerializedPairConstraint pairConstraint = new SerializedPairConstraint();
|
||||
pairConstraint.uniqueKey = uniqueKey;
|
||||
UNIQUE_CONSTRAINT_KEY_MAP.put(constraint, uniqueKey);
|
||||
UNIQUE_CONSTRAINT_MAP.put(uniqueKey, pairConstraint);
|
||||
@Override
|
||||
public Constraint<Pair> toObject() {
|
||||
if (UNIQUE_OBJECT_MAP.containsKey(uniqueKey)) {
|
||||
return UNIQUE_OBJECT_MAP.get(uniqueKey);
|
||||
}
|
||||
|
||||
pairConstraint.constraintElements = constraint.stream().map(SerializedPair::create).toArray(SerializedPair[]::new);
|
||||
pairConstraint.isInherited = constraint.isInherited();
|
||||
pairConstraint.methodSignatureConstraint = constraint.getmethodSignatureConstraint().stream().map(SerializedPair::create).toArray(SerializedPair[]::new);
|
||||
if (constraint.getExtendConstraint() != null) {
|
||||
pairConstraint.extendedConstraint = SerializedPairConstraint.create(constraint.getExtendConstraint()).uniqueKey;
|
||||
Constraint<Pair> constraint = new Constraint<>();
|
||||
UNIQUE_OBJECT_MAP.put(uniqueKey, constraint);
|
||||
|
||||
constraint.addAll(Arrays.stream(constraintElements).map(SerializedPair::toObject).toList());
|
||||
constraint.setIsInherited(isInherited);
|
||||
constraint.setmethodSignatureConstraint(new HashSet<>(
|
||||
Arrays.stream(methodSignatureConstraint).map(SerializedPair::toObject).toList()
|
||||
));
|
||||
SerializedPairConstraint extendedConstraint = this.extendedConstraint == null ? null :
|
||||
SerializedPairConstraint.UNIQUE_CONSTRAINT_MAP.get(this.extendedConstraint);
|
||||
if (extendedConstraint != null) {
|
||||
constraint.setExtendConstraint(extendedConstraint.toObject());
|
||||
}
|
||||
|
||||
return constraint;
|
||||
}
|
||||
|
||||
return pairConstraint;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Constraint<Pair> toObject() {
|
||||
if (UNIQUE_OBJECT_MAP.containsKey(uniqueKey)) {
|
||||
return UNIQUE_OBJECT_MAP.get(uniqueKey);
|
||||
}
|
||||
|
||||
Constraint<Pair> constraint = new Constraint<>();
|
||||
UNIQUE_OBJECT_MAP.put(uniqueKey, constraint);
|
||||
|
||||
constraint.addAll(Arrays.stream(constraintElements).map(SerializedPair::toObject).toList());
|
||||
constraint.setIsInherited(isInherited);
|
||||
constraint.setmethodSignatureConstraint(new HashSet<>(
|
||||
Arrays.stream(methodSignatureConstraint).map(SerializedPair::toObject).toList()
|
||||
));
|
||||
SerializedPairConstraint extendedConstraint = this.extendedConstraint == null ? null :
|
||||
SerializedPairConstraint.UNIQUE_CONSTRAINT_MAP.get(this.extendedConstraint);
|
||||
if (extendedConstraint != null) {
|
||||
constraint.setExtendConstraint(extendedConstraint.toObject());
|
||||
}
|
||||
|
||||
return constraint;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,13 +4,8 @@ import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Serializable container of
|
||||
@@ -19,42 +14,42 @@ import java.util.Set;
|
||||
*/
|
||||
public class SerializedPairConstraintSet implements IPacket.IDataContainer<ConstraintSet<Pair>> {
|
||||
|
||||
public SerializedPair[] undConstraints;
|
||||
public SerializedPairConstraint[][] oderConstraints;
|
||||
public Map<String, SerializedPairConstraint> uniqueConstraintMap = new HashMap<>();
|
||||
public SerializedPair[] undConstraints;
|
||||
public SerializedPairConstraint[][] oderConstraints;
|
||||
public Map<String, SerializedPairConstraint> uniqueConstraintMap = new HashMap<>();
|
||||
|
||||
public static SerializedPairConstraintSet create(ConstraintSet<Pair> constraints) {
|
||||
SerializedPairConstraintSet constraintSet = new SerializedPairConstraintSet();
|
||||
public static SerializedPairConstraintSet create(ConstraintSet<Pair> constraints) {
|
||||
SerializedPairConstraintSet constraintSet = new SerializedPairConstraintSet();
|
||||
|
||||
constraintSet.undConstraints = constraints.getUndConstraints().stream().map(SerializedPair::create).toArray(SerializedPair[]::new);
|
||||
constraintSet.oderConstraints = constraints.getOderConstraints().stream().map(consSet ->
|
||||
consSet.stream().map(SerializedPairConstraint::create).toArray(SerializedPairConstraint[]::new)
|
||||
).toArray(SerializedPairConstraint[][]::new);
|
||||
constraintSet.undConstraints = constraints.getUndConstraints().stream().map(SerializedPair::create).toArray(SerializedPair[]::new);
|
||||
constraintSet.oderConstraints = constraints.getOderConstraints().stream().map(consSet ->
|
||||
consSet.stream().map(SerializedPairConstraint::create).toArray(SerializedPairConstraint[]::new)
|
||||
).toArray(SerializedPairConstraint[][]::new);
|
||||
|
||||
// add all the gathered constraints to a serializable property
|
||||
constraintSet.uniqueConstraintMap.putAll(SerializedPairConstraint.UNIQUE_CONSTRAINT_MAP);
|
||||
// add all the gathered constraints to a serializable property
|
||||
constraintSet.uniqueConstraintMap.putAll(SerializedPairConstraint.UNIQUE_CONSTRAINT_MAP);
|
||||
|
||||
return constraintSet;
|
||||
}
|
||||
return constraintSet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConstraintSet<Pair> toObject() {
|
||||
ConstraintSet<Pair> consSet = new ConstraintSet<>();
|
||||
@Override
|
||||
public ConstraintSet<Pair> toObject() {
|
||||
ConstraintSet<Pair> consSet = new ConstraintSet<>();
|
||||
|
||||
// read all the constraints from the serializable property
|
||||
SerializedPairConstraint.UNIQUE_CONSTRAINT_MAP.putAll(this.uniqueConstraintMap);
|
||||
// read all the constraints from the serializable property
|
||||
SerializedPairConstraint.UNIQUE_CONSTRAINT_MAP.putAll(this.uniqueConstraintMap);
|
||||
|
||||
Constraint<Pair> undCons = new Constraint<>();
|
||||
undCons.addAll(Arrays.stream(undConstraints).map(SerializedPair::toObject).toList());
|
||||
consSet.addAllUndConstraint(undCons);
|
||||
Constraint<Pair> undCons = new Constraint<>();
|
||||
undCons.addAll(Arrays.stream(undConstraints).map(SerializedPair::toObject).toList());
|
||||
consSet.addAllUndConstraint(undCons);
|
||||
|
||||
List<Set<Constraint<Pair>>> oderCons = new ArrayList<>(Arrays.stream(oderConstraints).map(cons ->
|
||||
new HashSet<>(
|
||||
Arrays.stream(cons).map(SerializedPairConstraint::toObject).toList()
|
||||
)
|
||||
).toList());
|
||||
consSet.addAllOderConstraint(oderCons);
|
||||
List<Set<Constraint<Pair>>> oderCons = new ArrayList<>(Arrays.stream(oderConstraints).map(cons ->
|
||||
new HashSet<>(
|
||||
Arrays.stream(cons).map(SerializedPairConstraint::toObject).toList()
|
||||
)
|
||||
).toList());
|
||||
consSet.addAllOderConstraint(oderCons);
|
||||
|
||||
return consSet;
|
||||
}
|
||||
return consSet;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,11 +8,7 @@ import de.dhbwstuttgart.server.packet.dataContainers.resultPairs.SerializedPairT
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.resultPairs.SerializedPairTPHequalRefTypeOrWildcardType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.resultPairs.SerializedPairTPHsmallerTPH;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.typeinference.result.PairNoResult;
|
||||
import de.dhbwstuttgart.typeinference.result.PairTPHEqualTPH;
|
||||
import de.dhbwstuttgart.typeinference.result.PairTPHequalRefTypeOrWildcardType;
|
||||
import de.dhbwstuttgart.typeinference.result.PairTPHsmallerTPH;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultPair;
|
||||
import de.dhbwstuttgart.typeinference.result.*;
|
||||
|
||||
/**
|
||||
* Serializable container of
|
||||
@@ -22,42 +18,42 @@ import de.dhbwstuttgart.typeinference.result.ResultPair;
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class SerializedResultPairWrapper implements IPacket.IDataContainer<ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>> {
|
||||
|
||||
@JsonProperty("nr")
|
||||
public SerializedPairNoResult noResult = null;
|
||||
@JsonProperty("teo")
|
||||
public SerializedPairTPHequalRefTypeOrWildcardType tphEqualOther = null;
|
||||
@JsonProperty("tet")
|
||||
public SerializedPairTPHEqualTPH tphEqualTph = null;
|
||||
@JsonProperty("tst")
|
||||
public SerializedPairTPHsmallerTPH tphSmallerTPH = null;
|
||||
@JsonProperty("nr")
|
||||
public SerializedPairNoResult noResult = null;
|
||||
@JsonProperty("teo")
|
||||
public SerializedPairTPHequalRefTypeOrWildcardType tphEqualOther = null;
|
||||
@JsonProperty("tet")
|
||||
public SerializedPairTPHEqualTPH tphEqualTph = null;
|
||||
@JsonProperty("tst")
|
||||
public SerializedPairTPHsmallerTPH tphSmallerTPH = null;
|
||||
|
||||
|
||||
public static <A extends RefTypeOrTPHOrWildcardOrGeneric, B extends RefTypeOrTPHOrWildcardOrGeneric> SerializedResultPairWrapper create(ResultPair<A, B> pair) {
|
||||
SerializedResultPairWrapper serialized = new SerializedResultPairWrapper();
|
||||
public static <A extends RefTypeOrTPHOrWildcardOrGeneric, B extends RefTypeOrTPHOrWildcardOrGeneric> SerializedResultPairWrapper create(ResultPair<A, B> pair) {
|
||||
SerializedResultPairWrapper serialized = new SerializedResultPairWrapper();
|
||||
|
||||
if (pair instanceof PairNoResult noResult)
|
||||
serialized.noResult = SerializedPairNoResult.create(noResult);
|
||||
else if (pair instanceof PairTPHequalRefTypeOrWildcardType tphEqualOther)
|
||||
serialized.tphEqualOther = SerializedPairTPHequalRefTypeOrWildcardType.create(tphEqualOther);
|
||||
else if (pair instanceof PairTPHEqualTPH tphEqualTph)
|
||||
serialized.tphEqualTph = SerializedPairTPHEqualTPH.create(tphEqualTph);
|
||||
else if (pair instanceof PairTPHsmallerTPH tphSmallerTPH)
|
||||
serialized.tphSmallerTPH = SerializedPairTPHsmallerTPH.create(tphSmallerTPH);
|
||||
if (pair instanceof PairNoResult noResult)
|
||||
serialized.noResult = SerializedPairNoResult.create(noResult);
|
||||
else if (pair instanceof PairTPHequalRefTypeOrWildcardType tphEqualOther)
|
||||
serialized.tphEqualOther = SerializedPairTPHequalRefTypeOrWildcardType.create(tphEqualOther);
|
||||
else if (pair instanceof PairTPHEqualTPH tphEqualTph)
|
||||
serialized.tphEqualTph = SerializedPairTPHEqualTPH.create(tphEqualTph);
|
||||
else if (pair instanceof PairTPHsmallerTPH tphSmallerTPH)
|
||||
serialized.tphSmallerTPH = SerializedPairTPHsmallerTPH.create(tphSmallerTPH);
|
||||
|
||||
return serialized;
|
||||
}
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes") // this is not optimal, but we have to conform to interface specifications
|
||||
public ResultPair toObject() {
|
||||
if (noResult != null)
|
||||
return noResult.toObject();
|
||||
else if (tphEqualOther != null)
|
||||
return tphEqualOther.toObject();
|
||||
else if (tphEqualTph != null)
|
||||
return tphEqualTph.toObject();
|
||||
else if (tphSmallerTPH != null)
|
||||
return tphSmallerTPH.toObject();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes") // this is not optimal, but we have to conform to interface specifications
|
||||
public ResultPair toObject() {
|
||||
if (noResult != null)
|
||||
return noResult.toObject();
|
||||
else if (tphEqualOther != null)
|
||||
return tphEqualOther.toObject();
|
||||
else if (tphEqualTph != null)
|
||||
return tphEqualTph.toObject();
|
||||
else if (tphSmallerTPH != null)
|
||||
return tphSmallerTPH.toObject();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package de.dhbwstuttgart.server.packet.dataContainers;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
|
||||
@@ -12,20 +13,20 @@ import java.util.HashSet;
|
||||
*/
|
||||
public class SerializedResultSet implements IPacket.IDataContainer<ResultSet> {
|
||||
|
||||
public SerializedResultPairWrapper[] results;
|
||||
public SerializedResultPairWrapper[] results;
|
||||
|
||||
public static SerializedResultSet create(ResultSet resultSet) {
|
||||
SerializedResultSet serialized = new SerializedResultSet();
|
||||
serialized.results = resultSet.results.stream().map(SerializedResultPairWrapper::create).toArray(SerializedResultPairWrapper[]::new);
|
||||
return serialized;
|
||||
}
|
||||
public static SerializedResultSet create(ResultSet resultSet) {
|
||||
SerializedResultSet serialized = new SerializedResultSet();
|
||||
serialized.results = resultSet.results.stream().map(SerializedResultPairWrapper::create).toArray(SerializedResultPairWrapper[]::new);
|
||||
return serialized;
|
||||
}
|
||||
|
||||
public ResultSet toObject() {
|
||||
return new ResultSet(
|
||||
new HashSet<>(
|
||||
Arrays.stream(this.results).map(SerializedResultPairWrapper::toObject).toList()
|
||||
)
|
||||
);
|
||||
}
|
||||
public ResultSet toObject() {
|
||||
return new ResultSet(
|
||||
new HashSet<>(
|
||||
Arrays.stream(this.results).map(SerializedResultPairWrapper::toObject).toList()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -3,19 +3,9 @@ package de.dhbwstuttgart.server.packet.dataContainers;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.token.SerializedExtendsWildcardType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.token.SerializedGenericRefType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.token.SerializedPlaceholderType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.token.SerializedRefType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.token.SerializedSuperWildcardType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.token.SerializedVoidType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.token.*;
|
||||
import de.dhbwstuttgart.syntaxtree.type.Void;
|
||||
import de.dhbwstuttgart.syntaxtree.type.*;
|
||||
|
||||
/**
|
||||
* Serializable container of
|
||||
@@ -25,48 +15,48 @@ import de.dhbwstuttgart.syntaxtree.type.Void;
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class SerializedTokenWrapper implements IPacket.IDataContainer<RefTypeOrTPHOrWildcardOrGeneric> {
|
||||
|
||||
@JsonProperty("ew")
|
||||
public SerializedExtendsWildcardType extendsWildcardType = null;
|
||||
@JsonProperty("gr")
|
||||
public SerializedGenericRefType genericRefType = null;
|
||||
@JsonProperty("p")
|
||||
public SerializedPlaceholderType placeholderType = null;
|
||||
@JsonProperty("r")
|
||||
public SerializedRefType refType = null;
|
||||
@JsonProperty("sw")
|
||||
public SerializedSuperWildcardType superWildcardType = null;
|
||||
@JsonProperty("v")
|
||||
public SerializedVoidType voidType = null;
|
||||
@JsonProperty("ew")
|
||||
public SerializedExtendsWildcardType extendsWildcardType = null;
|
||||
@JsonProperty("gr")
|
||||
public SerializedGenericRefType genericRefType = null;
|
||||
@JsonProperty("p")
|
||||
public SerializedPlaceholderType placeholderType = null;
|
||||
@JsonProperty("r")
|
||||
public SerializedRefType refType = null;
|
||||
@JsonProperty("sw")
|
||||
public SerializedSuperWildcardType superWildcardType = null;
|
||||
@JsonProperty("v")
|
||||
public SerializedVoidType voidType = null;
|
||||
|
||||
|
||||
public static SerializedTokenWrapper create(RefTypeOrTPHOrWildcardOrGeneric type) {
|
||||
SerializedTokenWrapper wrapper = new SerializedTokenWrapper();
|
||||
public static SerializedTokenWrapper create(RefTypeOrTPHOrWildcardOrGeneric type) {
|
||||
SerializedTokenWrapper wrapper = new SerializedTokenWrapper();
|
||||
|
||||
if (type instanceof ExtendsWildcardType)
|
||||
wrapper.extendsWildcardType = SerializedExtendsWildcardType.create((ExtendsWildcardType) type);
|
||||
else if (type instanceof GenericRefType)
|
||||
wrapper.genericRefType = SerializedGenericRefType.create((GenericRefType) type);
|
||||
else if (type instanceof TypePlaceholder)
|
||||
wrapper.placeholderType = SerializedPlaceholderType.create((TypePlaceholder) type);
|
||||
else if (type instanceof Void)
|
||||
wrapper.voidType = SerializedVoidType.create((Void) type);
|
||||
else if (type instanceof RefType)
|
||||
wrapper.refType = SerializedRefType.create((RefType) type);
|
||||
else if (type instanceof SuperWildcardType)
|
||||
wrapper.superWildcardType = SerializedSuperWildcardType.create((SuperWildcardType) type);
|
||||
if (type instanceof ExtendsWildcardType)
|
||||
wrapper.extendsWildcardType = SerializedExtendsWildcardType.create((ExtendsWildcardType) type);
|
||||
else if (type instanceof GenericRefType)
|
||||
wrapper.genericRefType = SerializedGenericRefType.create((GenericRefType) type);
|
||||
else if (type instanceof TypePlaceholder)
|
||||
wrapper.placeholderType = SerializedPlaceholderType.create((TypePlaceholder) type);
|
||||
else if (type instanceof Void)
|
||||
wrapper.voidType = SerializedVoidType.create((Void) type);
|
||||
else if (type instanceof RefType)
|
||||
wrapper.refType = SerializedRefType.create((RefType) type);
|
||||
else if (type instanceof SuperWildcardType)
|
||||
wrapper.superWildcardType = SerializedSuperWildcardType.create((SuperWildcardType) type);
|
||||
|
||||
return wrapper;
|
||||
}
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric toObject() {
|
||||
if (extendsWildcardType != null) return extendsWildcardType.toObject();
|
||||
if (genericRefType != null) return genericRefType.toObject();
|
||||
if (placeholderType != null) return placeholderType.toObject();
|
||||
if (refType != null) return refType.toObject();
|
||||
if (superWildcardType != null) return superWildcardType.toObject();
|
||||
if (voidType != null) return voidType.toObject();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric toObject() {
|
||||
if (extendsWildcardType != null) return extendsWildcardType.toObject();
|
||||
if (genericRefType != null) return genericRefType.toObject();
|
||||
if (placeholderType != null) return placeholderType.toObject();
|
||||
if (refType != null) return refType.toObject();
|
||||
if (superWildcardType != null) return superWildcardType.toObject();
|
||||
if (voidType != null) return voidType.toObject();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,11 +4,8 @@ import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Serializable container of
|
||||
@@ -17,33 +14,33 @@ import java.util.Set;
|
||||
*/
|
||||
public class SerializedUnifyConstraintSet implements IPacket.IDataContainer<ConstraintSet<UnifyPair>> {
|
||||
|
||||
public SerializedUnifyPair[] undConstraints;
|
||||
public SerializedUnifyPairConstraint[][] oderConstraints;
|
||||
public SerializedUnifyPair[] undConstraints;
|
||||
public SerializedUnifyPairConstraint[][] oderConstraints;
|
||||
|
||||
public static SerializedUnifyConstraintSet create(ConstraintSet<UnifyPair> unifyCons) {
|
||||
SerializedUnifyConstraintSet constraintSet = new SerializedUnifyConstraintSet();
|
||||
constraintSet.undConstraints = unifyCons.getUndConstraints().stream().map(SerializedUnifyPair::create).toArray(SerializedUnifyPair[]::new);
|
||||
constraintSet.oderConstraints = unifyCons.getOderConstraints().stream().map(constraints ->
|
||||
constraints.stream().map(SerializedUnifyPairConstraint::create).toArray(SerializedUnifyPairConstraint[]::new)
|
||||
).toArray(SerializedUnifyPairConstraint[][]::new);
|
||||
return constraintSet;
|
||||
}
|
||||
public static SerializedUnifyConstraintSet create(ConstraintSet<UnifyPair> unifyCons) {
|
||||
SerializedUnifyConstraintSet constraintSet = new SerializedUnifyConstraintSet();
|
||||
constraintSet.undConstraints = unifyCons.getUndConstraints().stream().map(SerializedUnifyPair::create).toArray(SerializedUnifyPair[]::new);
|
||||
constraintSet.oderConstraints = unifyCons.getOderConstraints().stream().map(constraints ->
|
||||
constraints.stream().map(SerializedUnifyPairConstraint::create).toArray(SerializedUnifyPairConstraint[]::new)
|
||||
).toArray(SerializedUnifyPairConstraint[][]::new);
|
||||
return constraintSet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConstraintSet<UnifyPair> toObject() {
|
||||
ConstraintSet<UnifyPair> consSet = new ConstraintSet<>();
|
||||
@Override
|
||||
public ConstraintSet<UnifyPair> toObject() {
|
||||
ConstraintSet<UnifyPair> consSet = new ConstraintSet<>();
|
||||
|
||||
Constraint<UnifyPair> undCons = new Constraint<>();
|
||||
undCons.addAll(Arrays.stream(undConstraints).map(SerializedUnifyPair::toObject).toList());
|
||||
consSet.addAllUndConstraint(undCons);
|
||||
Constraint<UnifyPair> undCons = new Constraint<>();
|
||||
undCons.addAll(Arrays.stream(undConstraints).map(SerializedUnifyPair::toObject).toList());
|
||||
consSet.addAllUndConstraint(undCons);
|
||||
|
||||
List<Set<Constraint<UnifyPair>>> oderCons = new ArrayList<>(Arrays.stream(oderConstraints).map(oderConsSet ->
|
||||
new HashSet<>(
|
||||
Arrays.stream(oderConsSet).map(SerializedUnifyPairConstraint::toObject).toList()
|
||||
)
|
||||
).toList());
|
||||
consSet.addAllOderConstraint(oderCons);
|
||||
List<Set<Constraint<UnifyPair>>> oderCons = new ArrayList<>(Arrays.stream(oderConstraints).map(oderConsSet ->
|
||||
new HashSet<>(
|
||||
Arrays.stream(oderConsSet).map(SerializedUnifyPairConstraint::toObject).toList()
|
||||
)
|
||||
).toList());
|
||||
consSet.addAllOderConstraint(oderCons);
|
||||
|
||||
return consSet;
|
||||
}
|
||||
return consSet;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,21 +12,21 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
*/
|
||||
public class SerializedUnifyPair implements IPacket.IDataContainer<UnifyPair> {
|
||||
|
||||
public SerializedUnifyTypeWrapper lhs;
|
||||
public SerializedUnifyTypeWrapper rhs;
|
||||
@JsonProperty("o")
|
||||
public PairOperator operator;
|
||||
public SerializedUnifyTypeWrapper lhs;
|
||||
public SerializedUnifyTypeWrapper rhs;
|
||||
@JsonProperty("o")
|
||||
public PairOperator operator;
|
||||
|
||||
public static SerializedUnifyPair create(UnifyPair unifyPair) {
|
||||
SerializedUnifyPair pair = new SerializedUnifyPair();
|
||||
pair.lhs = SerializedUnifyTypeWrapper.create(unifyPair.getLhsType());
|
||||
pair.rhs = SerializedUnifyTypeWrapper.create(unifyPair.getRhsType());
|
||||
pair.operator = unifyPair.getPairOp();
|
||||
return pair;
|
||||
}
|
||||
public static SerializedUnifyPair create(UnifyPair unifyPair) {
|
||||
SerializedUnifyPair pair = new SerializedUnifyPair();
|
||||
pair.lhs = SerializedUnifyTypeWrapper.create(unifyPair.getLhsType());
|
||||
pair.rhs = SerializedUnifyTypeWrapper.create(unifyPair.getRhsType());
|
||||
pair.operator = unifyPair.getPairOp();
|
||||
return pair;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnifyPair toObject() {
|
||||
return new UnifyPair(lhs.toObject(), rhs.toObject(), operator);
|
||||
}
|
||||
@Override
|
||||
public UnifyPair toObject() {
|
||||
return new UnifyPair(lhs.toObject(), rhs.toObject(), operator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
|
||||
@@ -16,37 +17,37 @@ import java.util.HashSet;
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class SerializedUnifyPairConstraint implements IPacket.IDataContainer<Constraint<UnifyPair>> {
|
||||
|
||||
@JsonProperty("i")
|
||||
public boolean isInherited;
|
||||
@JsonProperty("ec")
|
||||
public SerializedUnifyPairConstraint extendedConstraint = null;
|
||||
@JsonProperty("p")
|
||||
public SerializedUnifyPair[] methodSignatureConstraints;
|
||||
@JsonProperty("c")
|
||||
public SerializedUnifyPair[] constraintElements;
|
||||
@JsonProperty("i")
|
||||
public boolean isInherited;
|
||||
@JsonProperty("ec")
|
||||
public SerializedUnifyPairConstraint extendedConstraint = null;
|
||||
@JsonProperty("p")
|
||||
public SerializedUnifyPair[] methodSignatureConstraints;
|
||||
@JsonProperty("c")
|
||||
public SerializedUnifyPair[] constraintElements;
|
||||
|
||||
|
||||
public static SerializedUnifyPairConstraint create(Constraint<UnifyPair> constraint) {
|
||||
SerializedUnifyPairConstraint cons = new SerializedUnifyPairConstraint();
|
||||
cons.constraintElements = constraint.stream().map(SerializedUnifyPair::create).toArray(SerializedUnifyPair[]::new);
|
||||
cons.isInherited = constraint.isInherited();
|
||||
if (constraint.getExtendConstraint() != null) {
|
||||
cons.extendedConstraint = SerializedUnifyPairConstraint.create(constraint.getExtendConstraint());
|
||||
public static SerializedUnifyPairConstraint create(Constraint<UnifyPair> constraint) {
|
||||
SerializedUnifyPairConstraint cons = new SerializedUnifyPairConstraint();
|
||||
cons.constraintElements = constraint.stream().map(SerializedUnifyPair::create).toArray(SerializedUnifyPair[]::new);
|
||||
cons.isInherited = constraint.isInherited();
|
||||
if (constraint.getExtendConstraint() != null) {
|
||||
cons.extendedConstraint = SerializedUnifyPairConstraint.create(constraint.getExtendConstraint());
|
||||
}
|
||||
cons.methodSignatureConstraints = constraint.getmethodSignatureConstraint().stream().map(SerializedUnifyPair::create).toArray(SerializedUnifyPair[]::new);
|
||||
return cons;
|
||||
}
|
||||
cons.methodSignatureConstraints = constraint.getmethodSignatureConstraint().stream().map(SerializedUnifyPair::create).toArray(SerializedUnifyPair[]::new);
|
||||
return cons;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Constraint<UnifyPair> toObject() {
|
||||
Constraint<UnifyPair> cons = new Constraint<>();
|
||||
@Override
|
||||
public Constraint<UnifyPair> toObject() {
|
||||
Constraint<UnifyPair> cons = new Constraint<>();
|
||||
|
||||
cons.addAll(Arrays.stream(constraintElements).map(SerializedUnifyPair::toObject).toList());
|
||||
cons.setIsInherited(isInherited);
|
||||
if (extendedConstraint != null) {
|
||||
cons.setExtendConstraint(extendedConstraint.toObject());
|
||||
cons.addAll(Arrays.stream(constraintElements).map(SerializedUnifyPair::toObject).toList());
|
||||
cons.setIsInherited(isInherited);
|
||||
if (extendedConstraint != null) {
|
||||
cons.setExtendConstraint(extendedConstraint.toObject());
|
||||
}
|
||||
cons.setmethodSignatureConstraint(new HashSet<>(Arrays.stream(methodSignatureConstraints).map(SerializedUnifyPair::toObject).toList()));
|
||||
return cons;
|
||||
}
|
||||
cons.setmethodSignatureConstraint(new HashSet<>(Arrays.stream(methodSignatureConstraints).map(SerializedUnifyPair::toObject).toList()));
|
||||
return cons;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,17 +3,8 @@ package de.dhbwstuttgart.server.packet.dataContainers;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.unifyType.SerializedExtendsType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.unifyType.SerializedFunNType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.unifyType.SerializedPlaceholderType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.unifyType.SerializedReferenceType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.unifyType.SerializedSuperType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.unifyType.*;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.*;
|
||||
|
||||
/**
|
||||
* Serializable container of
|
||||
@@ -23,43 +14,43 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class SerializedUnifyTypeWrapper implements IPacket.IDataContainer<UnifyType> {
|
||||
|
||||
@JsonProperty("e")
|
||||
public SerializedExtendsType extendsType = null;
|
||||
@JsonProperty("s")
|
||||
public SerializedSuperType superType = null;
|
||||
@JsonProperty("r")
|
||||
public SerializedReferenceType referenceType = null;
|
||||
@JsonProperty("f")
|
||||
public SerializedFunNType funNType = null;
|
||||
@JsonProperty("p")
|
||||
public SerializedPlaceholderType placeholderType = null;
|
||||
@JsonProperty("e")
|
||||
public SerializedExtendsType extendsType = null;
|
||||
@JsonProperty("s")
|
||||
public SerializedSuperType superType = null;
|
||||
@JsonProperty("r")
|
||||
public SerializedReferenceType referenceType = null;
|
||||
@JsonProperty("f")
|
||||
public SerializedFunNType funNType = null;
|
||||
@JsonProperty("p")
|
||||
public SerializedPlaceholderType placeholderType = null;
|
||||
|
||||
|
||||
public static SerializedUnifyTypeWrapper create(UnifyType unifyType) {
|
||||
SerializedUnifyTypeWrapper wrapper = new SerializedUnifyTypeWrapper();
|
||||
public static SerializedUnifyTypeWrapper create(UnifyType unifyType) {
|
||||
SerializedUnifyTypeWrapper wrapper = new SerializedUnifyTypeWrapper();
|
||||
|
||||
if (unifyType instanceof ExtendsType)
|
||||
wrapper.extendsType = SerializedExtendsType.create((ExtendsType) unifyType);
|
||||
else if (unifyType instanceof SuperType)
|
||||
wrapper.superType = SerializedSuperType.create((SuperType) unifyType);
|
||||
else if (unifyType instanceof ReferenceType)
|
||||
wrapper.referenceType = SerializedReferenceType.create((ReferenceType) unifyType);
|
||||
else if (unifyType instanceof FunNType)
|
||||
wrapper.funNType = SerializedFunNType.create((FunNType) unifyType);
|
||||
else if (unifyType instanceof PlaceholderType)
|
||||
wrapper.placeholderType = SerializedPlaceholderType.create((PlaceholderType) unifyType);
|
||||
if (unifyType instanceof ExtendsType)
|
||||
wrapper.extendsType = SerializedExtendsType.create((ExtendsType) unifyType);
|
||||
else if (unifyType instanceof SuperType)
|
||||
wrapper.superType = SerializedSuperType.create((SuperType) unifyType);
|
||||
else if (unifyType instanceof ReferenceType)
|
||||
wrapper.referenceType = SerializedReferenceType.create((ReferenceType) unifyType);
|
||||
else if (unifyType instanceof FunNType)
|
||||
wrapper.funNType = SerializedFunNType.create((FunNType) unifyType);
|
||||
else if (unifyType instanceof PlaceholderType)
|
||||
wrapper.placeholderType = SerializedPlaceholderType.create((PlaceholderType) unifyType);
|
||||
|
||||
return wrapper;
|
||||
}
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public UnifyType toObject() {
|
||||
if (extendsType != null) return extendsType.toObject();
|
||||
if (superType != null) return superType.toObject();
|
||||
if (referenceType != null) return referenceType.toObject();
|
||||
if (funNType != null) return funNType.toObject();
|
||||
if (placeholderType != null) return placeholderType.toObject();
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public UnifyType toObject() {
|
||||
if (extendsType != null) return extendsType.toObject();
|
||||
if (superType != null) return superType.toObject();
|
||||
if (referenceType != null) return referenceType.toObject();
|
||||
if (funNType != null) return funNType.toObject();
|
||||
if (placeholderType != null) return placeholderType.toObject();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,27 +12,27 @@ import de.dhbwstuttgart.typeinference.result.PairNoResult;
|
||||
*/
|
||||
public class SerializedPairNoResult implements IPacket.IDataContainer<PairNoResult> {
|
||||
|
||||
@JsonProperty("l")
|
||||
public SerializedTokenWrapper left;
|
||||
@JsonProperty("r")
|
||||
public SerializedTokenWrapper right;
|
||||
@JsonProperty("l")
|
||||
public SerializedTokenWrapper left;
|
||||
@JsonProperty("r")
|
||||
public SerializedTokenWrapper right;
|
||||
|
||||
/**
|
||||
* Use a static create method to leave the default constructor and simplify list conversions
|
||||
*/
|
||||
public static SerializedPairNoResult create(PairNoResult pair) {
|
||||
SerializedPairNoResult serialized = new SerializedPairNoResult();
|
||||
serialized.left = SerializedTokenWrapper.create(pair.getLeft());
|
||||
serialized.right = SerializedTokenWrapper.create(pair.getRight());
|
||||
return serialized;
|
||||
}
|
||||
/**
|
||||
* Use a static create method to leave the default constructor and simplify list conversions
|
||||
*/
|
||||
public static SerializedPairNoResult create(PairNoResult pair) {
|
||||
SerializedPairNoResult serialized = new SerializedPairNoResult();
|
||||
serialized.left = SerializedTokenWrapper.create(pair.getLeft());
|
||||
serialized.right = SerializedTokenWrapper.create(pair.getRight());
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PairNoResult toObject() {
|
||||
return new PairNoResult(
|
||||
left.toObject(),
|
||||
right.toObject()
|
||||
);
|
||||
}
|
||||
@Override
|
||||
public PairNoResult toObject() {
|
||||
return new PairNoResult(
|
||||
left.toObject(),
|
||||
right.toObject()
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -12,23 +12,23 @@ import de.dhbwstuttgart.typeinference.result.PairTPHEqualTPH;
|
||||
*/
|
||||
public class SerializedPairTPHEqualTPH implements IPacket.IDataContainer<PairTPHEqualTPH> {
|
||||
|
||||
@JsonProperty("l")
|
||||
public SerializedPlaceholderType left;
|
||||
@JsonProperty("r")
|
||||
public SerializedPlaceholderType right;
|
||||
@JsonProperty("l")
|
||||
public SerializedPlaceholderType left;
|
||||
@JsonProperty("r")
|
||||
public SerializedPlaceholderType right;
|
||||
|
||||
public static SerializedPairTPHEqualTPH create(PairTPHEqualTPH pair) {
|
||||
SerializedPairTPHEqualTPH serialized = new SerializedPairTPHEqualTPH();
|
||||
serialized.left = SerializedPlaceholderType.create(pair.getLeft());
|
||||
serialized.right = SerializedPlaceholderType.create(pair.getRight());
|
||||
return serialized;
|
||||
}
|
||||
public static SerializedPairTPHEqualTPH create(PairTPHEqualTPH pair) {
|
||||
SerializedPairTPHEqualTPH serialized = new SerializedPairTPHEqualTPH();
|
||||
serialized.left = SerializedPlaceholderType.create(pair.getLeft());
|
||||
serialized.right = SerializedPlaceholderType.create(pair.getRight());
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PairTPHEqualTPH toObject() {
|
||||
return new PairTPHEqualTPH(
|
||||
left.toObject(),
|
||||
right.toObject()
|
||||
);
|
||||
}
|
||||
@Override
|
||||
public PairTPHEqualTPH toObject() {
|
||||
return new PairTPHEqualTPH(
|
||||
left.toObject(),
|
||||
right.toObject()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,24 +13,24 @@ import de.dhbwstuttgart.typeinference.result.PairTPHequalRefTypeOrWildcardType;
|
||||
*/
|
||||
public class SerializedPairTPHequalRefTypeOrWildcardType implements IPacket.IDataContainer<PairTPHequalRefTypeOrWildcardType> {
|
||||
|
||||
@JsonProperty("l")
|
||||
public SerializedPlaceholderType left;
|
||||
@JsonProperty("r")
|
||||
public SerializedTokenWrapper right;
|
||||
@JsonProperty("l")
|
||||
public SerializedPlaceholderType left;
|
||||
@JsonProperty("r")
|
||||
public SerializedTokenWrapper right;
|
||||
|
||||
public static SerializedPairTPHequalRefTypeOrWildcardType create(PairTPHequalRefTypeOrWildcardType pair) {
|
||||
SerializedPairTPHequalRefTypeOrWildcardType serialized = new SerializedPairTPHequalRefTypeOrWildcardType();
|
||||
serialized.left = SerializedPlaceholderType.create(pair.left);
|
||||
serialized.right = SerializedTokenWrapper.create(pair.right);
|
||||
return serialized;
|
||||
}
|
||||
public static SerializedPairTPHequalRefTypeOrWildcardType create(PairTPHequalRefTypeOrWildcardType pair) {
|
||||
SerializedPairTPHequalRefTypeOrWildcardType serialized = new SerializedPairTPHequalRefTypeOrWildcardType();
|
||||
serialized.left = SerializedPlaceholderType.create(pair.left);
|
||||
serialized.right = SerializedTokenWrapper.create(pair.right);
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PairTPHequalRefTypeOrWildcardType toObject() {
|
||||
return new PairTPHequalRefTypeOrWildcardType(
|
||||
left.toObject(),
|
||||
right.toObject()
|
||||
);
|
||||
}
|
||||
@Override
|
||||
public PairTPHequalRefTypeOrWildcardType toObject() {
|
||||
return new PairTPHequalRefTypeOrWildcardType(
|
||||
left.toObject(),
|
||||
right.toObject()
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -12,23 +12,23 @@ import de.dhbwstuttgart.typeinference.result.PairTPHsmallerTPH;
|
||||
*/
|
||||
public class SerializedPairTPHsmallerTPH implements IPacket.IDataContainer<PairTPHsmallerTPH> {
|
||||
|
||||
@JsonProperty("l")
|
||||
public SerializedPlaceholderType left;
|
||||
@JsonProperty("r")
|
||||
public SerializedPlaceholderType right;
|
||||
@JsonProperty("l")
|
||||
public SerializedPlaceholderType left;
|
||||
@JsonProperty("r")
|
||||
public SerializedPlaceholderType right;
|
||||
|
||||
public static SerializedPairTPHsmallerTPH create(PairTPHsmallerTPH pair) {
|
||||
SerializedPairTPHsmallerTPH serialized = new SerializedPairTPHsmallerTPH();
|
||||
serialized.left = SerializedPlaceholderType.create(pair.left);
|
||||
serialized.right = SerializedPlaceholderType.create(pair.right);
|
||||
return serialized;
|
||||
}
|
||||
public static SerializedPairTPHsmallerTPH create(PairTPHsmallerTPH pair) {
|
||||
SerializedPairTPHsmallerTPH serialized = new SerializedPairTPHsmallerTPH();
|
||||
serialized.left = SerializedPlaceholderType.create(pair.left);
|
||||
serialized.right = SerializedPlaceholderType.create(pair.right);
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PairTPHsmallerTPH toObject() {
|
||||
return new PairTPHsmallerTPH(
|
||||
left.toObject(),
|
||||
right.toObject()
|
||||
);
|
||||
}
|
||||
@Override
|
||||
public PairTPHsmallerTPH toObject() {
|
||||
return new PairTPHsmallerTPH(
|
||||
left.toObject(),
|
||||
right.toObject()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,17 +13,17 @@ import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
|
||||
*/
|
||||
public class SerializedExtendsWildcardType implements IPacket.IDataContainer<ExtendsWildcardType> {
|
||||
|
||||
@JsonProperty("e")
|
||||
public SerializedTokenWrapper extendsType;
|
||||
@JsonProperty("e")
|
||||
public SerializedTokenWrapper extendsType;
|
||||
|
||||
public static SerializedExtendsWildcardType create(ExtendsWildcardType extendsWildcardType) {
|
||||
SerializedExtendsWildcardType type = new SerializedExtendsWildcardType();
|
||||
type.extendsType = SerializedTokenWrapper.create(extendsWildcardType.getInnerType());
|
||||
return type;
|
||||
}
|
||||
public static SerializedExtendsWildcardType create(ExtendsWildcardType extendsWildcardType) {
|
||||
SerializedExtendsWildcardType type = new SerializedExtendsWildcardType();
|
||||
type.extendsType = SerializedTokenWrapper.create(extendsWildcardType.getInnerType());
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendsWildcardType toObject() {
|
||||
return new ExtendsWildcardType(extendsType.toObject(), new NullToken());
|
||||
}
|
||||
@Override
|
||||
public ExtendsWildcardType toObject() {
|
||||
return new ExtendsWildcardType(extendsType.toObject(), new NullToken());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,17 +12,17 @@ import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
|
||||
*/
|
||||
public class SerializedGenericRefType implements IPacket.IDataContainer<GenericRefType> {
|
||||
|
||||
@JsonProperty("n")
|
||||
public String name;
|
||||
@JsonProperty("n")
|
||||
public String name;
|
||||
|
||||
public static SerializedGenericRefType create(GenericRefType genericRefType) {
|
||||
SerializedGenericRefType serialized = new SerializedGenericRefType();
|
||||
serialized.name = genericRefType.getParsedName();
|
||||
return serialized;
|
||||
}
|
||||
public static SerializedGenericRefType create(GenericRefType genericRefType) {
|
||||
SerializedGenericRefType serialized = new SerializedGenericRefType();
|
||||
serialized.name = genericRefType.getParsedName();
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GenericRefType toObject() {
|
||||
return new GenericRefType(name, new NullToken());
|
||||
}
|
||||
@Override
|
||||
public GenericRefType toObject() {
|
||||
return new GenericRefType(name, new NullToken());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,17 +11,17 @@ import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
*/
|
||||
public class SerializedPlaceholderType implements IPacket.IDataContainer<TypePlaceholder> {
|
||||
|
||||
@JsonProperty("n")
|
||||
public String name;
|
||||
@JsonProperty("n")
|
||||
public String name;
|
||||
|
||||
public static SerializedPlaceholderType create(TypePlaceholder typePlaceholder) {
|
||||
SerializedPlaceholderType serialized = new SerializedPlaceholderType();
|
||||
serialized.name = typePlaceholder.getName();
|
||||
return serialized;
|
||||
}
|
||||
public static SerializedPlaceholderType create(TypePlaceholder typePlaceholder) {
|
||||
SerializedPlaceholderType serialized = new SerializedPlaceholderType();
|
||||
serialized.name = typePlaceholder.getName();
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TypePlaceholder toObject() {
|
||||
return (TypePlaceholder) TypePlaceholder.of(name);
|
||||
}
|
||||
@Override
|
||||
public TypePlaceholder toObject() {
|
||||
return (TypePlaceholder) TypePlaceholder.of(name);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.SerializedTokenWrapper;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
@@ -15,26 +16,26 @@ import java.util.Arrays;
|
||||
*/
|
||||
public class SerializedRefType implements IPacket.IDataContainer<RefType> {
|
||||
|
||||
@JsonProperty("n")
|
||||
public String name;
|
||||
@JsonProperty("p")
|
||||
public SerializedTokenWrapper[] parameters;
|
||||
@JsonProperty("n")
|
||||
public String name;
|
||||
@JsonProperty("p")
|
||||
public SerializedTokenWrapper[] parameters;
|
||||
|
||||
public static SerializedRefType create(RefType refType) {
|
||||
SerializedRefType serialized = new SerializedRefType();
|
||||
public static SerializedRefType create(RefType refType) {
|
||||
SerializedRefType serialized = new SerializedRefType();
|
||||
|
||||
serialized.name = refType.getName().toString();
|
||||
serialized.parameters = refType.getParaList().stream().map(SerializedTokenWrapper::create).toArray(SerializedTokenWrapper[]::new);
|
||||
serialized.name = refType.getName().toString();
|
||||
serialized.parameters = refType.getParaList().stream().map(SerializedTokenWrapper::create).toArray(SerializedTokenWrapper[]::new);
|
||||
|
||||
return serialized;
|
||||
}
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RefType toObject() {
|
||||
return new RefType(
|
||||
new JavaClassName(name),
|
||||
Arrays.stream(parameters).map(SerializedTokenWrapper::toObject).toList(),
|
||||
new NullToken()
|
||||
);
|
||||
}
|
||||
@Override
|
||||
public RefType toObject() {
|
||||
return new RefType(
|
||||
new JavaClassName(name),
|
||||
Arrays.stream(parameters).map(SerializedTokenWrapper::toObject).toList(),
|
||||
new NullToken()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,18 +13,18 @@ import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType;
|
||||
*/
|
||||
public class SerializedSuperWildcardType implements IPacket.IDataContainer<SuperWildcardType> {
|
||||
|
||||
@JsonProperty("s")
|
||||
public SerializedTokenWrapper superType;
|
||||
@JsonProperty("s")
|
||||
public SerializedTokenWrapper superType;
|
||||
|
||||
public static SerializedSuperWildcardType create(SuperWildcardType superWildcardType) {
|
||||
SerializedSuperWildcardType type = new SerializedSuperWildcardType();
|
||||
type.superType = SerializedTokenWrapper.create(superWildcardType.getInnerType());
|
||||
type.superType = SerializedTokenWrapper.create(superWildcardType.getInnerType());
|
||||
return type;
|
||||
}
|
||||
public static SerializedSuperWildcardType create(SuperWildcardType superWildcardType) {
|
||||
SerializedSuperWildcardType type = new SerializedSuperWildcardType();
|
||||
type.superType = SerializedTokenWrapper.create(superWildcardType.getInnerType());
|
||||
type.superType = SerializedTokenWrapper.create(superWildcardType.getInnerType());
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuperWildcardType toObject() {
|
||||
return new SuperWildcardType(superType.toObject(), new NullToken());
|
||||
}
|
||||
@Override
|
||||
public SuperWildcardType toObject() {
|
||||
return new SuperWildcardType(superType.toObject(), new NullToken());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,14 +11,14 @@ import de.dhbwstuttgart.syntaxtree.type.Void;
|
||||
*/
|
||||
public class SerializedVoidType implements IPacket.IDataContainer<Void> {
|
||||
|
||||
public int i = 0;
|
||||
public int i = 0;
|
||||
|
||||
public static SerializedVoidType create(Void voidType) {
|
||||
return new SerializedVoidType();
|
||||
}
|
||||
public static SerializedVoidType create(Void voidType) {
|
||||
return new SerializedVoidType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void toObject() {
|
||||
return new Void(new NullToken());
|
||||
}
|
||||
@Override
|
||||
public Void toObject() {
|
||||
return new Void(new NullToken());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,17 +10,17 @@ import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
||||
*/
|
||||
public class SerializedExtendsType extends SerializedWildcardType implements IPacket.IDataContainer<ExtendsType> {
|
||||
|
||||
public static SerializedExtendsType create(ExtendsType extendsType) {
|
||||
SerializedExtendsType type = new SerializedExtendsType();
|
||||
type.readWildcardTypeValues(extendsType);
|
||||
return type;
|
||||
}
|
||||
public static SerializedExtendsType create(ExtendsType extendsType) {
|
||||
SerializedExtendsType type = new SerializedExtendsType();
|
||||
type.readWildcardTypeValues(extendsType);
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendsType toObject() {
|
||||
return new ExtendsType(
|
||||
this.wildcardedType.toObject()
|
||||
);
|
||||
}
|
||||
@Override
|
||||
public ExtendsType toObject() {
|
||||
return new ExtendsType(
|
||||
this.wildcardedType.toObject()
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.SerializedUnifyTypeWrapper;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
@@ -14,19 +15,19 @@ import java.util.Arrays;
|
||||
public class SerializedFunNType extends SerializedUnifyType implements IPacket.IDataContainer<FunNType> {
|
||||
|
||||
|
||||
public static SerializedFunNType create(FunNType funN) {
|
||||
SerializedFunNType type = new SerializedFunNType();
|
||||
type.readUnifyTypeValues(funN);
|
||||
return type;
|
||||
}
|
||||
public static SerializedFunNType create(FunNType funN) {
|
||||
SerializedFunNType type = new SerializedFunNType();
|
||||
type.readUnifyTypeValues(funN);
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FunNType toObject() {
|
||||
return FunNType.getFunNType(
|
||||
new TypeParams(
|
||||
Arrays.stream(this.params).map(SerializedUnifyTypeWrapper::toObject).toList()
|
||||
)
|
||||
);
|
||||
}
|
||||
@Override
|
||||
public FunNType toObject() {
|
||||
return FunNType.getFunNType(
|
||||
new TypeParams(
|
||||
Arrays.stream(this.params).map(SerializedUnifyTypeWrapper::toObject).toList()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -10,29 +10,29 @@ import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
*/
|
||||
public class SerializedPlaceholderType extends SerializedUnifyType implements IPacket.IDataContainer<PlaceholderType> {
|
||||
|
||||
public boolean isGenerated;
|
||||
public boolean isInnerType;
|
||||
public int variance;
|
||||
public byte orCons;
|
||||
public boolean isGenerated;
|
||||
public boolean isInnerType;
|
||||
public int variance;
|
||||
public byte orCons;
|
||||
|
||||
|
||||
public static SerializedPlaceholderType create(PlaceholderType placeholder) {
|
||||
SerializedPlaceholderType type = new SerializedPlaceholderType();
|
||||
type.readUnifyTypeValues(placeholder);
|
||||
type.isGenerated = placeholder.isGenerated();
|
||||
type.isInnerType = placeholder.isInnerType();
|
||||
type.variance = placeholder.getVariance();
|
||||
type.orCons = placeholder.getOrCons();
|
||||
return type;
|
||||
}
|
||||
public static SerializedPlaceholderType create(PlaceholderType placeholder) {
|
||||
SerializedPlaceholderType type = new SerializedPlaceholderType();
|
||||
type.readUnifyTypeValues(placeholder);
|
||||
type.isGenerated = placeholder.isGenerated();
|
||||
type.isInnerType = placeholder.isInnerType();
|
||||
type.variance = placeholder.getVariance();
|
||||
type.orCons = placeholder.getOrCons();
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PlaceholderType toObject() {
|
||||
PlaceholderType placeholderType = new PlaceholderType(this.name, this.isGenerated);
|
||||
placeholderType.setInnerType(this.isInnerType);
|
||||
placeholderType.setVariance(this.variance);
|
||||
placeholderType.setOrCons(this.orCons);
|
||||
return placeholderType;
|
||||
}
|
||||
@Override
|
||||
public PlaceholderType toObject() {
|
||||
PlaceholderType placeholderType = new PlaceholderType(this.name, this.isGenerated);
|
||||
placeholderType.setInnerType(this.isInnerType);
|
||||
placeholderType.setVariance(this.variance);
|
||||
placeholderType.setOrCons(this.orCons);
|
||||
return placeholderType;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.SerializedUnifyTypeWrapper;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
@@ -14,24 +15,24 @@ import java.util.Arrays;
|
||||
*/
|
||||
public class SerializedReferenceType extends SerializedUnifyType implements IPacket.IDataContainer<ReferenceType> {
|
||||
|
||||
@JsonProperty("gt")
|
||||
public boolean genericTypeVar;
|
||||
@JsonProperty("gt")
|
||||
public boolean genericTypeVar;
|
||||
|
||||
public static SerializedReferenceType create(ReferenceType referenceType) {
|
||||
SerializedReferenceType type = new SerializedReferenceType();
|
||||
type.readUnifyTypeValues(referenceType);
|
||||
type.genericTypeVar = referenceType.isGenTypeVar();
|
||||
return type;
|
||||
}
|
||||
public static SerializedReferenceType create(ReferenceType referenceType) {
|
||||
SerializedReferenceType type = new SerializedReferenceType();
|
||||
type.readUnifyTypeValues(referenceType);
|
||||
type.genericTypeVar = referenceType.isGenTypeVar();
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReferenceType toObject() {
|
||||
ReferenceType referenceType = new ReferenceType(this.name, this.genericTypeVar);
|
||||
return (ReferenceType) referenceType.setTypeParams(
|
||||
new TypeParams(
|
||||
Arrays.stream(this.params).map(SerializedUnifyTypeWrapper::toObject).toList()
|
||||
)
|
||||
);
|
||||
}
|
||||
@Override
|
||||
public ReferenceType toObject() {
|
||||
ReferenceType referenceType = new ReferenceType(this.name, this.genericTypeVar);
|
||||
return (ReferenceType) referenceType.setTypeParams(
|
||||
new TypeParams(
|
||||
Arrays.stream(this.params).map(SerializedUnifyTypeWrapper::toObject).toList()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -10,17 +10,17 @@ import de.dhbwstuttgart.typeinference.unify.model.SuperType;
|
||||
*/
|
||||
public class SerializedSuperType extends SerializedWildcardType implements IPacket.IDataContainer<SuperType> {
|
||||
|
||||
public static SerializedSuperType create(SuperType superType) {
|
||||
SerializedSuperType type = new SerializedSuperType();
|
||||
type.readWildcardTypeValues(superType);
|
||||
return type;
|
||||
}
|
||||
public static SerializedSuperType create(SuperType superType) {
|
||||
SerializedSuperType type = new SerializedSuperType();
|
||||
type.readWildcardTypeValues(superType);
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuperType toObject() {
|
||||
return new SuperType(
|
||||
this.wildcardedType.toObject()
|
||||
);
|
||||
}
|
||||
@Override
|
||||
public SuperType toObject() {
|
||||
return new SuperType(
|
||||
this.wildcardedType.toObject()
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package de.dhbwstuttgart.server.packet.dataContainers.unifyType;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.SerializedUnifyTypeWrapper;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
@@ -12,13 +13,13 @@ import java.util.Arrays;
|
||||
*/
|
||||
abstract class SerializedUnifyType {
|
||||
|
||||
@JsonProperty("n")
|
||||
public String name;
|
||||
@JsonProperty("p")
|
||||
public SerializedUnifyTypeWrapper[] params = new SerializedUnifyTypeWrapper[]{};
|
||||
@JsonProperty("n")
|
||||
public String name;
|
||||
@JsonProperty("p")
|
||||
public SerializedUnifyTypeWrapper[] params = new SerializedUnifyTypeWrapper[]{};
|
||||
|
||||
public void readUnifyTypeValues(UnifyType unifyType) {
|
||||
name = unifyType.getName();
|
||||
params = Arrays.stream(unifyType.getTypeParams().get()).map(SerializedUnifyTypeWrapper::create).toArray(SerializedUnifyTypeWrapper[]::new);
|
||||
}
|
||||
public void readUnifyTypeValues(UnifyType unifyType) {
|
||||
name = unifyType.getName();
|
||||
params = Arrays.stream(unifyType.getTypeParams().get()).map(SerializedUnifyTypeWrapper::create).toArray(SerializedUnifyTypeWrapper[]::new);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,11 +10,11 @@ import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
|
||||
*/
|
||||
abstract class SerializedWildcardType extends SerializedUnifyType {
|
||||
|
||||
public SerializedUnifyTypeWrapper wildcardedType;
|
||||
public SerializedUnifyTypeWrapper wildcardedType;
|
||||
|
||||
public void readWildcardTypeValues(WildcardType wildcardType) {
|
||||
this.readUnifyTypeValues(wildcardType);
|
||||
wildcardedType = SerializedUnifyTypeWrapper.create(wildcardType.getWildcardedType());
|
||||
}
|
||||
public void readWildcardTypeValues(WildcardType wildcardType) {
|
||||
this.readUnifyTypeValues(wildcardType);
|
||||
wildcardedType = SerializedUnifyTypeWrapper.create(wildcardType.getWildcardedType());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -182,7 +182,7 @@ public class UnifyTypeFactory {
|
||||
if (lhs.getName().equals("AQ")) {
|
||||
System.out.println("");
|
||||
}
|
||||
((PlaceholderType)rhs).enableWildcardtable();
|
||||
((PlaceholderType)rhs).enableWildcardable();
|
||||
}
|
||||
|
||||
if (((rhs = ret.getLhsType()) instanceof PlaceholderType)
|
||||
@@ -191,7 +191,7 @@ public class UnifyTypeFactory {
|
||||
if (rhs.getName().equals("AQ")) {
|
||||
System.out.println("");
|
||||
}
|
||||
((PlaceholderType)lhs).enableWildcardtable();
|
||||
((PlaceholderType)lhs).enableWildcardable();
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
@@ -280,4 +280,4 @@ public class UnifyTypeFactory {
|
||||
return ret;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package de.dhbwstuttgart.typeinference;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import de.dhbwstuttgart.server.SocketClient;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
@@ -8,17 +7,15 @@ import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.RuleSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListener;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModelParallel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModelParallel;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
import org.apache.commons.io.output.NullWriter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
@@ -27,8 +24,6 @@ import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
import org.apache.commons.io.output.NullWriter;
|
||||
|
||||
/**
|
||||
* Provides the entry point of the type inference and unification algorithm via the methods
|
||||
@@ -38,334 +33,194 @@ import org.apache.commons.io.output.NullWriter;
|
||||
*/
|
||||
public class TypeInference {
|
||||
|
||||
protected final ConstraintSet<Pair> constraints;
|
||||
protected final List<ClassOrInterface> allClasses;
|
||||
protected final ClassLoader classLoader;
|
||||
protected final boolean shouldUnifyParallel;
|
||||
protected final boolean shouldLog;
|
||||
protected final Writer logFileWriter;
|
||||
protected final Writer statisticFileWriter;
|
||||
protected final ConstraintSet<Pair> constraints;
|
||||
protected final List<ClassOrInterface> allClasses;
|
||||
protected final ClassLoader classLoader;
|
||||
protected final boolean shouldUnifyParallel;
|
||||
protected final boolean shouldLog;
|
||||
protected final Writer logFileWriter;
|
||||
protected final Writer statisticFileWriter;
|
||||
|
||||
public TypeInference(
|
||||
ConstraintSet<Pair> constraints,
|
||||
List<ClassOrInterface> allClasses,
|
||||
ClassLoader classLoader,
|
||||
boolean shouldUnifyParallel,
|
||||
boolean shouldLog,
|
||||
Writer logFileWriter,
|
||||
Writer statisticFileWriter
|
||||
) {
|
||||
this.constraints = constraints;
|
||||
this.allClasses = allClasses;
|
||||
this.classLoader = classLoader;
|
||||
this.shouldUnifyParallel = shouldUnifyParallel;
|
||||
this.shouldLog = shouldLog;
|
||||
this.logFileWriter = logFileWriter;
|
||||
this.statisticFileWriter = statisticFileWriter;
|
||||
}
|
||||
|
||||
public UnifyResultModelParallel executeAsync(
|
||||
UnifyResultListener resultListener
|
||||
) throws ClassNotFoundException, IOException {
|
||||
|
||||
// generate finite closure
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFileWriter, classLoader);
|
||||
logFileWriter.write("FC:\\" + finiteClosure.toString() + "\n");
|
||||
System.out.println(finiteClosure);
|
||||
|
||||
// generate unifyConstraints
|
||||
ConstraintSet<UnifyPair> unifyConstraints = TypeInferenceHelper.constraintsToUnifyConstraints(constraints);
|
||||
logFileWriter.write("\nUnify_distributeInnerVars: " + unifyConstraints.toString());
|
||||
|
||||
TypeUnify unify = new TypeUnify();
|
||||
TypeUnify.statistics = statisticFileWriter;
|
||||
|
||||
Set<String> paraTypeVarNames = new HashSet<>();
|
||||
paraTypeVarNames.addAll(TypeInferenceHelper.methodParaTypeVarNames(allClasses));
|
||||
paraTypeVarNames.addAll(TypeInferenceHelper.constructorParaTypeVarNames(allClasses));
|
||||
|
||||
Set<String> returnAndFieldTypeVarNames = new HashSet<>();
|
||||
returnAndFieldTypeVarNames.addAll(TypeInferenceHelper.returnTypeVarNames(allClasses));
|
||||
returnAndFieldTypeVarNames.addAll(TypeInferenceHelper.fieldTypeVarNames(allClasses));
|
||||
|
||||
|
||||
UnifyResultModelParallel urm = new UnifyResultModelParallel(constraints, finiteClosure);
|
||||
urm.addUnifyResultListener(resultListener);
|
||||
|
||||
unifyConstraints = unifyConstraints.map(x -> {
|
||||
// Hier muss ueberlegt werden, ob
|
||||
// 1. alle Argument- und Retuntyp-Variablen in allen UnifyPairs
|
||||
// mit disableWildcardtable() werden.
|
||||
// 2. alle Typvariablen mit Argument- oder Retuntyp-Variablen
|
||||
// in Beziehung auch auf disableWildcardtable() gesetzt werden muessen
|
||||
// PL 2018-04-23
|
||||
if ((x.getLhsType() instanceof PlaceholderType)) {
|
||||
if (paraTypeVarNames.contains(x.getLhsType().getName())) {
|
||||
((PlaceholderType) x.getLhsType()).setVariance((byte) 1);
|
||||
((PlaceholderType) x.getLhsType()).disableWildcardtable();
|
||||
}
|
||||
if (returnAndFieldTypeVarNames.contains(x.getLhsType().getName())) {
|
||||
((PlaceholderType) x.getLhsType()).setVariance((byte) -1);
|
||||
((PlaceholderType) x.getLhsType()).disableWildcardtable();
|
||||
}
|
||||
}
|
||||
if ((x.getRhsType() instanceof PlaceholderType)) {
|
||||
if (paraTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
((PlaceholderType) x.getRhsType()).setVariance((byte) 1);
|
||||
((PlaceholderType) x.getRhsType()).disableWildcardtable();
|
||||
}
|
||||
if (returnAndFieldTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
((PlaceholderType) x.getRhsType()).setVariance((byte) -1);
|
||||
((PlaceholderType) x.getRhsType()).disableWildcardtable();
|
||||
}
|
||||
}
|
||||
return x;// HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE
|
||||
// JEWEILS ANDERE SEITE
|
||||
});
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
varianceTPH = TypeInferenceHelper.varianceInheritanceConstraintSet(unifyConstraints);
|
||||
|
||||
/*
|
||||
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL
|
||||
* 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen
|
||||
* //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH);
|
||||
* varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y
|
||||
* -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType()
|
||||
* instanceof PlaceholderType)) { if
|
||||
* (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
|
||||
* ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType(
|
||||
* )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0
|
||||
* && ((PlaceholderType)y.getRhsType()).getVariance() != 0) {
|
||||
* ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType(
|
||||
* )).getVariance()); } } return y; } ); } while
|
||||
* (!varianceTPHold.equals(varianceTPH));
|
||||
*/
|
||||
|
||||
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
|
||||
// logFileWriter, log);
|
||||
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyConstraints.getOderConstraints()/*.stream().map(x -> {
|
||||
Set<Set<UnifyPair>> ret = new HashSet<>();
|
||||
for (Constraint<UnifyPair> y : x) {
|
||||
ret.add(new HashSet<>(y));
|
||||
}
|
||||
return ret;
|
||||
}).collect(Collectors.toCollection(ArrayList::new))*/;
|
||||
|
||||
UnifyTaskModelParallel usedTasks = new UnifyTaskModelParallel();
|
||||
|
||||
unify.unifyAsync(
|
||||
unifyConstraints.getUndConstraints(),
|
||||
oderConstraints,
|
||||
finiteClosure,
|
||||
logFileWriter,
|
||||
shouldLog,
|
||||
urm,
|
||||
usedTasks
|
||||
);
|
||||
|
||||
return urm;
|
||||
}
|
||||
|
||||
public List<ResultSet> execute(Optional<String> remoteServer) throws ClassNotFoundException, IOException {
|
||||
// generate finite closure
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFileWriter, classLoader);
|
||||
logFileWriter.write("FC:\\" + finiteClosure.toString() + "\n");
|
||||
System.out.println(finiteClosure);
|
||||
|
||||
// generate unifyConstraints
|
||||
ConstraintSet<UnifyPair> unifyConstraints = TypeInferenceHelper.constraintsToUnifyConstraints(constraints);
|
||||
logFileWriter.write("\nUnify_distributeInnerVars: " + unifyConstraints.toString());
|
||||
|
||||
TypeUnify unify = new TypeUnify();
|
||||
TypeUnify.statistics = statisticFileWriter;
|
||||
|
||||
Set<String> paraTypeVarNames = new HashSet<>();
|
||||
paraTypeVarNames.addAll(TypeInferenceHelper.methodParaTypeVarNames(allClasses));
|
||||
paraTypeVarNames.addAll(TypeInferenceHelper.constructorParaTypeVarNames(allClasses));
|
||||
|
||||
Set<String> returnAndFieldTypeVarNames = new HashSet<>();
|
||||
returnAndFieldTypeVarNames.addAll(TypeInferenceHelper.returnTypeVarNames(allClasses));
|
||||
returnAndFieldTypeVarNames.addAll(TypeInferenceHelper.fieldTypeVarNames(allClasses));
|
||||
|
||||
unifyConstraints = TypeInferenceHelper.makeFixedPlaceholdersNonWildcardable(unifyConstraints, paraTypeVarNames, returnAndFieldTypeVarNames);
|
||||
|
||||
//PL 2020-02-05 alle Oder-Constraints Receiver und Parameter werden auf variance 1 gesetzt
|
||||
//Es wird davon ausgegangen, dass in OderConstraints in Bedingungen für Parameter die Typen der Argumente links stehen
|
||||
//und die Typen der Rückgabewerte immer rechts stehen
|
||||
|
||||
/*
|
||||
unifyCons.getOderConstraints().forEach(z -> z.forEach(y -> y.forEach(x -> {
|
||||
if ((x.getLhsType() instanceof PlaceholderType) && x.getPairOp().compareTo(PairOperator.SMALLERDOT) == 0) {
|
||||
((PlaceholderType) x.getLhsType()).setVariance((byte)1);
|
||||
}
|
||||
else if ((x.getRhsType() instanceof PlaceholderType) && x.getPairOp().compareTo(PairOperator.EQUALSDOT) == 0) {
|
||||
((PlaceholderType) x.getRhsType()).setVariance((byte)-1);
|
||||
}
|
||||
})));
|
||||
*/
|
||||
|
||||
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyConstraints.toString());
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = TypeInferenceHelper.varianceInheritanceConstraintSet(unifyConstraints);
|
||||
|
||||
/*
|
||||
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL
|
||||
* 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen
|
||||
* //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH);
|
||||
* varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y
|
||||
* -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType()
|
||||
* instanceof PlaceholderType)) { if
|
||||
* (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
|
||||
* ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType(
|
||||
* )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0
|
||||
* && ((PlaceholderType)y.getRhsType()).getVariance() != 0) {
|
||||
* ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType(
|
||||
* )).getVariance()); } } return y; } ); } while
|
||||
* (!varianceTPHold.equals(varianceTPH));
|
||||
*/
|
||||
|
||||
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
|
||||
// logFile, log);
|
||||
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyConstraints.getOderConstraints()//.stream().map(x -> {
|
||||
/*Set<Set<UnifyPair>> ret = new HashSet<>();
|
||||
for (Constraint<UnifyPair> y : x) {
|
||||
ret.add(new HashSet<>(y));
|
||||
}
|
||||
return ret;
|
||||
}).collect(Collectors.toCollection(ArrayList::new))*/;
|
||||
|
||||
UnifyTaskModelParallel usedTasks = new UnifyTaskModelParallel();
|
||||
|
||||
List<ResultSet> results;
|
||||
if (remoteServer.isPresent()) {
|
||||
SocketClient socketClient = new SocketClient(remoteServer.get());
|
||||
results = socketClient.execute(
|
||||
finiteClosure,
|
||||
constraints,
|
||||
unifyConstraints
|
||||
);
|
||||
} else if (shouldUnifyParallel) {
|
||||
results = this.executeParallel(
|
||||
unify,
|
||||
unifyConstraints,
|
||||
oderConstraints,
|
||||
finiteClosure,
|
||||
usedTasks
|
||||
);
|
||||
} else {
|
||||
results = this.executeSequential(
|
||||
unify,
|
||||
unifyConstraints,
|
||||
oderConstraints,
|
||||
finiteClosure,
|
||||
usedTasks
|
||||
);
|
||||
public TypeInference(
|
||||
ConstraintSet<Pair> constraints,
|
||||
List<ClassOrInterface> allClasses,
|
||||
ClassLoader classLoader,
|
||||
boolean shouldUnifyParallel,
|
||||
boolean shouldLog,
|
||||
Writer logFileWriter,
|
||||
Writer statisticFileWriter
|
||||
) {
|
||||
this.constraints = constraints;
|
||||
this.allClasses = allClasses;
|
||||
this.classLoader = classLoader;
|
||||
this.shouldUnifyParallel = shouldUnifyParallel;
|
||||
this.shouldLog = shouldLog;
|
||||
this.logFileWriter = logFileWriter;
|
||||
this.statisticFileWriter = statisticFileWriter;
|
||||
}
|
||||
|
||||
System.out.println("Found " + results.size() + " results");
|
||||
public static List<ResultSet> executeWithoutContext(
|
||||
IFiniteClosure finiteClosure,
|
||||
ConstraintSet<Pair> constraints,
|
||||
ConstraintSet<UnifyPair> unifyConstraints
|
||||
) {
|
||||
UnifyResultModelParallel urm = new UnifyResultModelParallel(constraints, finiteClosure);
|
||||
TypeUnify.statistics = new NullWriter();
|
||||
|
||||
statisticFileWriter.close();
|
||||
return results;
|
||||
}
|
||||
(new TypeUnify()).unifyParallel(
|
||||
unifyConstraints.getUndConstraints(),
|
||||
unifyConstraints.getOderConstraints(),
|
||||
finiteClosure,
|
||||
new OutputStreamWriter(new NullOutputStream()),
|
||||
false,
|
||||
urm,
|
||||
new UnifyTaskModelParallel() // TODO: move this to SocketData to cancel the pool once a client disconnects
|
||||
);
|
||||
|
||||
/**
|
||||
* We provide a UnifyTaskModelParallel anyway to reuse code
|
||||
*/
|
||||
protected List<ResultSet> executeSequential(
|
||||
TypeUnify unify,
|
||||
ConstraintSet<UnifyPair> unifyCons,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
IFiniteClosure finiteClosure,
|
||||
UnifyTaskModelParallel taskModel
|
||||
return urm.getResults();
|
||||
}
|
||||
|
||||
) throws IOException {
|
||||
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(
|
||||
unifyCons.getUndConstraints(),
|
||||
oderConstraints,
|
||||
finiteClosure,
|
||||
logFileWriter,
|
||||
shouldLog,
|
||||
new UnifyResultModelParallel(constraints, finiteClosure),
|
||||
taskModel
|
||||
);
|
||||
System.out.println("RESULT: " + result);
|
||||
logFileWriter.write("RES: " + result.toString() + "\n");
|
||||
logFileWriter.flush();
|
||||
public List<ResultSet> execute(Optional<String> remoteServer) throws ClassNotFoundException, IOException {
|
||||
// generate finite closure
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFileWriter, classLoader);
|
||||
logFileWriter.write("FC:\\" + finiteClosure + "\n");
|
||||
System.out.println(finiteClosure);
|
||||
|
||||
Set<Set<UnifyPair>> results = new HashSet<>(result);
|
||||
// generate unifyConstraints
|
||||
ConstraintSet<UnifyPair> unifyConstraints = TypeInferenceHelper.constraintsToUnifyConstraints(constraints);
|
||||
logFileWriter.write("\nUnify_distributeInnerVars: " + unifyConstraints.toString());
|
||||
|
||||
results = results.stream().map(x -> {
|
||||
// alle Paare a <.? b erden durch a =. b ersetzt
|
||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
|
||||
y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y;
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
if (res.isPresent()) {
|
||||
// wenn subst ein Erg liefert wurde was veraendert
|
||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure);
|
||||
} else
|
||||
// wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
return x;
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
System.out.println("RESULT Final: " + results);
|
||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||
logFileWriter.write("RES_FINAL: " + results.toString() + "\n");
|
||||
logFileWriter.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS);
|
||||
logFileWriter.flush();
|
||||
TypeUnify unify = new TypeUnify();
|
||||
TypeUnify.statistics = statisticFileWriter;
|
||||
|
||||
return results.stream()
|
||||
.map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(constraints)))))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
Set<String> paraTypeVarNames = new HashSet<>();
|
||||
paraTypeVarNames.addAll(TypeInferenceHelper.methodParaTypeVarNames(allClasses));
|
||||
paraTypeVarNames.addAll(TypeInferenceHelper.constructorParaTypeVarNames(allClasses));
|
||||
|
||||
protected List<ResultSet> executeParallel(
|
||||
TypeUnify unify,
|
||||
ConstraintSet<UnifyPair> unifyCons,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
IFiniteClosure finiteClosure,
|
||||
UnifyTaskModelParallel taskModel
|
||||
) throws IOException {
|
||||
UnifyResultModelParallel urm = new UnifyResultModelParallel(constraints, finiteClosure);
|
||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||
urm.addUnifyResultListener(li);
|
||||
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFileWriter, shouldLog, urm,
|
||||
taskModel);
|
||||
System.out.println("RESULT Final: " + li.getResults());
|
||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||
logFileWriter.write("RES_FINAL: " + li.getResults().toString() + "\n");
|
||||
logFileWriter.flush();
|
||||
statisticFileWriter.close();
|
||||
return li.getResults();
|
||||
}
|
||||
Set<String> returnAndFieldTypeVarNames = new HashSet<>();
|
||||
returnAndFieldTypeVarNames.addAll(TypeInferenceHelper.returnTypeVarNames(allClasses));
|
||||
returnAndFieldTypeVarNames.addAll(TypeInferenceHelper.fieldTypeVarNames(allClasses));
|
||||
|
||||
public static List<ResultSet> executeWithoutContext(
|
||||
IFiniteClosure finiteClosure,
|
||||
ConstraintSet<Pair> constraints,
|
||||
ConstraintSet<UnifyPair> unifyConstraints
|
||||
) {
|
||||
UnifyResultModelParallel urm = new UnifyResultModelParallel(constraints, finiteClosure);
|
||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||
urm.addUnifyResultListener(li);
|
||||
unifyConstraints = TypeInferenceHelper.makeFixedPlaceholdersNonWildcardable(unifyConstraints, paraTypeVarNames, returnAndFieldTypeVarNames);
|
||||
|
||||
TypeUnify.statistics = new NullWriter();
|
||||
//PL 2020-02-05 alle Oder-Constraints Receiver und Parameter werden auf variance 1 gesetzt
|
||||
//Es wird davon ausgegangen, dass in OderConstraints in Bedingungen für Parameter die Typen der Argumente links stehen
|
||||
//und die Typen der Rückgabewerte immer rechts stehen
|
||||
|
||||
(new TypeUnify()).unifyParallel(
|
||||
unifyConstraints.getUndConstraints(),
|
||||
unifyConstraints.getOderConstraints(),
|
||||
finiteClosure,
|
||||
new OutputStreamWriter(new NullOutputStream()),
|
||||
false,
|
||||
urm,
|
||||
new UnifyTaskModelParallel() // TODO: move this to SocketData to cancel the pool once a client disconnects
|
||||
);
|
||||
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyConstraints.toString());
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = TypeInferenceHelper.varianceInheritanceConstraintSet(unifyConstraints);
|
||||
|
||||
return li.getResults();
|
||||
}
|
||||
/*
|
||||
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL
|
||||
* 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen
|
||||
* //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH);
|
||||
* varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y
|
||||
* -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType()
|
||||
* instanceof PlaceholderType)) { if
|
||||
* (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) {
|
||||
* ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType(
|
||||
* )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0
|
||||
* && ((PlaceholderType)y.getRhsType()).getVariance() != 0) {
|
||||
* ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType(
|
||||
* )).getVariance()); } } return y; } ); } while
|
||||
* (!varianceTPHold.equals(varianceTPH));
|
||||
*/
|
||||
|
||||
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyConstraints.getOderConstraints();
|
||||
UnifyTaskModelParallel usedTasks = new UnifyTaskModelParallel();
|
||||
|
||||
List<ResultSet> results;
|
||||
if (remoteServer.isPresent()) {
|
||||
SocketClient socketClient = new SocketClient(remoteServer.get());
|
||||
results = socketClient.execute(
|
||||
finiteClosure,
|
||||
constraints,
|
||||
unifyConstraints
|
||||
);
|
||||
} else if (shouldUnifyParallel) {
|
||||
results = this.executeParallel(
|
||||
unify,
|
||||
unifyConstraints,
|
||||
oderConstraints,
|
||||
finiteClosure,
|
||||
usedTasks
|
||||
);
|
||||
} else {
|
||||
results = this.executeSequential(
|
||||
unify,
|
||||
unifyConstraints,
|
||||
oderConstraints,
|
||||
finiteClosure
|
||||
);
|
||||
}
|
||||
|
||||
System.out.println("Found " + results.size() + " results");
|
||||
|
||||
statisticFileWriter.close();
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* We provide a UnifyTaskModelParallel anyway to reuse code
|
||||
*/
|
||||
protected List<ResultSet> executeSequential(
|
||||
TypeUnify unify,
|
||||
ConstraintSet<UnifyPair> unifyCons,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
IFiniteClosure finiteClosure
|
||||
) throws IOException {
|
||||
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(
|
||||
unifyCons.getUndConstraints(),
|
||||
oderConstraints,
|
||||
finiteClosure,
|
||||
logFileWriter,
|
||||
shouldLog,
|
||||
new UnifyResultModelParallel(constraints, finiteClosure)
|
||||
);
|
||||
System.out.println("RESULT: " + result);
|
||||
logFileWriter.write("RES: " + result.toString() + "\n");
|
||||
logFileWriter.flush();
|
||||
|
||||
Set<Set<UnifyPair>> results = TypeInferenceHelper.resolveSubstitutions(new HashSet<>(result), finiteClosure);
|
||||
|
||||
System.out.println("RESULT Final: " + results);
|
||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||
logFileWriter.write("RES_FINAL: " + results.toString() + "\n");
|
||||
logFileWriter.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS);
|
||||
logFileWriter.flush();
|
||||
|
||||
return results.stream()
|
||||
.map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(constraints)))))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
protected List<ResultSet> executeParallel(
|
||||
TypeUnify unify,
|
||||
ConstraintSet<UnifyPair> unifyCons,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
IFiniteClosure finiteClosure,
|
||||
UnifyTaskModelParallel taskModel
|
||||
) throws IOException {
|
||||
UnifyResultModelParallel urm = new UnifyResultModelParallel(constraints, finiteClosure);
|
||||
unify.unifyParallel(
|
||||
unifyCons.getUndConstraints(),
|
||||
oderConstraints,
|
||||
finiteClosure,
|
||||
logFileWriter,
|
||||
shouldLog,
|
||||
urm,
|
||||
taskModel
|
||||
);
|
||||
System.out.println("RESULT Final: " + urm.getResults());
|
||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||
logFileWriter.write("RESULT_FINAL: " + urm.getResults().toString() + "\n");
|
||||
logFileWriter.flush();
|
||||
statisticFileWriter.close();
|
||||
|
||||
return urm.getResults();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,45 +5,64 @@ import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
|
||||
import de.dhbwstuttgart.typeinference.unify.DistributeVariance;
|
||||
import de.dhbwstuttgart.typeinference.unify.RuleSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Provides static helper methods for the TypeInferenceAlgorithm to reduce method size
|
||||
* Provides static helper methods for the TypeInferenceAlgorithm to reduce method size and prevent duplicate code
|
||||
*/
|
||||
public class TypeInferenceHelper {
|
||||
|
||||
public static Set<Set<UnifyPair>> resolveSubstitutions(Set<Set<UnifyPair>> results, IFiniteClosure finiteClosure) {
|
||||
return results.stream().map(x -> {
|
||||
// replace all (a <.? b) constraints with (a =. b) and apply the substitution rule
|
||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
|
||||
y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y;
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
|
||||
if (res.isPresent()) {
|
||||
// if any constraints have been changed by the substitution, apply all the unification rules again
|
||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure);
|
||||
} else {
|
||||
// Otherwise return the constraints, if nothing was changes
|
||||
return x;
|
||||
}
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
}
|
||||
|
||||
public static ConstraintSet<UnifyPair> constraintsToUnifyConstraints(ConstraintSet<Pair> constraints) {
|
||||
ConstraintSet<UnifyPair> unifyConstraints = UnifyTypeFactory.convert(constraints);
|
||||
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
|
||||
UnifyType lhs, rhs;
|
||||
if (((lhs = x.getLhsType()) instanceof PlaceholderType)
|
||||
&& ((rhs = x.getRhsType()) instanceof PlaceholderType)
|
||||
&& (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
|
||||
&& ((rhs = x.getRhsType()) instanceof PlaceholderType)
|
||||
&& (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
|
||||
((PlaceholderType) lhs).setInnerType(true);
|
||||
((PlaceholderType) rhs).setInnerType(true);
|
||||
}
|
||||
return x;
|
||||
};
|
||||
System.out.println("Unify:" + unifyConstraints.toString());
|
||||
unifyConstraints = unifyConstraints.map(distributeInnerVars);
|
||||
|
||||
return unifyConstraints;
|
||||
return unifyConstraints.map(distributeInnerVars);
|
||||
}
|
||||
|
||||
public static ConstraintSet<UnifyPair> makeFixedPlaceholdersNonWildcardable(
|
||||
ConstraintSet<UnifyPair> unifyConstraints,
|
||||
Set<String> paraTypeVarNames,
|
||||
Set<String> returnAndFieldTypeVarNames
|
||||
ConstraintSet<UnifyPair> unifyConstraints,
|
||||
Set<String> paraTypeVarNames,
|
||||
Set<String> returnAndFieldTypeVarNames
|
||||
) {
|
||||
return unifyConstraints.map(x -> {
|
||||
// Hier muss ueberlegt werden, ob
|
||||
@@ -55,41 +74,41 @@ public class TypeInferenceHelper {
|
||||
if ((x.getLhsType() instanceof PlaceholderType)) {
|
||||
if (paraTypeVarNames.contains(x.getLhsType().getName())) {
|
||||
((PlaceholderType) x.getLhsType()).setVariance((byte) 1);
|
||||
((PlaceholderType) x.getLhsType()).disableWildcardtable();
|
||||
((PlaceholderType) x.getLhsType()).disableWildcardable();
|
||||
}
|
||||
if (returnAndFieldTypeVarNames.contains(x.getLhsType().getName())) {
|
||||
((PlaceholderType) x.getLhsType()).setVariance((byte) -1);
|
||||
((PlaceholderType) x.getLhsType()).disableWildcardtable();
|
||||
((PlaceholderType) x.getLhsType()).disableWildcardable();
|
||||
}
|
||||
}
|
||||
if ((x.getRhsType() instanceof PlaceholderType)) {
|
||||
if (paraTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
((PlaceholderType) x.getRhsType()).setVariance((byte) 1);
|
||||
((PlaceholderType) x.getRhsType()).disableWildcardtable();
|
||||
((PlaceholderType) x.getRhsType()).disableWildcardable();
|
||||
}
|
||||
if (returnAndFieldTypeVarNames.contains(x.getRhsType().getName())) {
|
||||
((PlaceholderType) x.getRhsType()).setVariance((byte) -1);
|
||||
((PlaceholderType) x.getRhsType()).disableWildcardtable();
|
||||
((PlaceholderType) x.getRhsType()).disableWildcardable();
|
||||
}
|
||||
}
|
||||
return x;// HIER DIE JEWEILS RECHT BZW. LINKE SEITE AUF GLEICHE VARIANZ SETZEN WIE DIE
|
||||
// JEWEILS ANDERE SEITE
|
||||
// Hier die jeweils rechte bzw. linke Seite auf die gleiche Varianz setzen, wie die jeweils andere Seite
|
||||
return x;
|
||||
});
|
||||
}
|
||||
|
||||
public static Set<String> methodParaTypeVarNames(List<ClassOrInterface> allClasses) {
|
||||
return allClasses.stream().map(x -> x.getMethods().stream()
|
||||
.map(y -> y.getParameterList().getFormalparalist().stream()
|
||||
.filter(z -> z.getType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce(new HashSet<String>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}, (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
})).reduce(new HashSet<String>(), (a, b) -> {
|
||||
.map(y -> y.getParameterList().getFormalparalist().stream()
|
||||
.filter(z -> z.getType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce(new HashSet<String>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}, (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
})).reduce(new HashSet<>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
});
|
||||
@@ -97,17 +116,17 @@ public class TypeInferenceHelper {
|
||||
|
||||
public static Set<String> constructorParaTypeVarNames(List<ClassOrInterface> allClasses) {
|
||||
return allClasses.stream().map(x -> x.getConstructors().stream()
|
||||
.map(y -> y.getParameterList().getFormalparalist().stream()
|
||||
.filter(z -> z.getType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce(new HashSet<String>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}, (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
})).reduce(new HashSet<String>(), (a, b) -> {
|
||||
.map(y -> y.getParameterList().getFormalparalist().stream()
|
||||
.filter(z -> z.getType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce(new HashSet<String>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}, (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
})).reduce(new HashSet<>(), (a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
});
|
||||
@@ -115,31 +134,29 @@ public class TypeInferenceHelper {
|
||||
|
||||
public static Set<String> returnTypeVarNames(List<ClassOrInterface> allClasses) {
|
||||
return allClasses.stream()
|
||||
.map(x -> x.getMethods().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getReturnType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce((a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}).get();
|
||||
.map(x -> x.getMethods().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getReturnType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce((a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}).get();
|
||||
}
|
||||
|
||||
public static Set<String> fieldTypeVarNames(List<ClassOrInterface> allClasses) {
|
||||
return allClasses.stream()
|
||||
.map(x -> x.getFieldDecl().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getReturnType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce((a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}).get();
|
||||
.map(x -> x.getFieldDecl().stream().filter(y -> y.getReturnType() instanceof TypePlaceholder)
|
||||
.map(z -> ((TypePlaceholder) z.getReturnType()).getName())
|
||||
.collect(Collectors.toCollection(HashSet::new)))
|
||||
.reduce((a, b) -> {
|
||||
a.addAll(b);
|
||||
return a;
|
||||
}).get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a) wenn a eine
|
||||
* Variance !=0 hat auf alle Typvariablen in Theta.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public static Set<PlaceholderType> varianceInheritanceConstraintSet(ConstraintSet<UnifyPair> cons) {
|
||||
Set<UnifyPair> eq = cons.getAll();
|
||||
@@ -162,7 +179,7 @@ public class TypeInferenceHelper {
|
||||
ArrayList<PlaceholderType> phSetVariance = new ArrayList<>(phSet);
|
||||
phSetVariance.removeIf(x -> (x.getVariance() == 0));
|
||||
while (!phSetVariance.isEmpty()) {
|
||||
PlaceholderType a = phSetVariance.remove(0);
|
||||
PlaceholderType a = phSetVariance.removeFirst();
|
||||
usedTPH.add(a);
|
||||
// HashMap<PlaceholderType,Integer> ht = new HashMap<>();
|
||||
// ht.put(a, a.getVariance());
|
||||
@@ -171,8 +188,8 @@ public class TypeInferenceHelper {
|
||||
// ((PlaceholderType)x.getLhsType()).equals(a)));
|
||||
// durch if-Abfrage im foreach geloest
|
||||
cons.forEach(x -> {
|
||||
if (x.getLhsType() instanceof PlaceholderType && ((PlaceholderType) x.getLhsType()).equals(a)) {
|
||||
x.getRhsType().accept(new distributeVariance(), a.getVariance());
|
||||
if (x.getLhsType() instanceof PlaceholderType && x.getLhsType().equals(a)) {
|
||||
x.getRhsType().accept(new DistributeVariance(), a.getVariance());
|
||||
}
|
||||
});
|
||||
// ` eq1 = new HashSet<>(eq);
|
||||
@@ -180,8 +197,8 @@ public class TypeInferenceHelper {
|
||||
// ((PlaceholderType)x.getRhsType()).equals(a)));
|
||||
// durch if-Abfrage im foreach geloest
|
||||
cons.forEach(x -> {
|
||||
if (x.getRhsType() instanceof PlaceholderType && ((PlaceholderType) x.getRhsType()).equals(a)) {
|
||||
x.getLhsType().accept(new distributeVariance(), a.getVariance());
|
||||
if (x.getRhsType() instanceof PlaceholderType && x.getRhsType().equals(a)) {
|
||||
x.getLhsType().accept(new DistributeVariance(), a.getVariance());
|
||||
}
|
||||
});
|
||||
phSetVariance = new ArrayList<>(phSet); // macht vermutlich keinen Sinn PL 2018-10-18, doch, es koennen neue
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package de.dhbwstuttgart.typeinference.assumptions;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.NotImplementedException;
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
|
||||
@@ -13,13 +12,13 @@ import de.dhbwstuttgart.typeinference.constraints.GenericsResolver;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class FieldAssumption extends Assumption{
|
||||
private ClassOrInterface receiverClass;
|
||||
private RefTypeOrTPHOrWildcardOrGeneric type;
|
||||
private String name;
|
||||
public class FieldAssumption extends Assumption {
|
||||
private final ClassOrInterface receiverClass;
|
||||
private final RefTypeOrTPHOrWildcardOrGeneric type;
|
||||
private final String name;
|
||||
|
||||
public FieldAssumption(String fieldName, ClassOrInterface receiverType,
|
||||
RefTypeOrTPHOrWildcardOrGeneric type, TypeScope scope){
|
||||
RefTypeOrTPHOrWildcardOrGeneric type, TypeScope scope) {
|
||||
super(scope);
|
||||
this.type = type;
|
||||
this.receiverClass = receiverType;
|
||||
@@ -36,11 +35,11 @@ public class FieldAssumption extends Assumption{
|
||||
|
||||
public RefTypeOrTPHOrWildcardOrGeneric getReceiverType(GenericsResolver resolver) {
|
||||
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
|
||||
for(GenericTypeVar gtv : receiverClass.getGenerics()){
|
||||
for (GenericTypeVar gtv : receiverClass.getGenerics()) {
|
||||
//Hier wird ein GenericRefType gebildet, welcher einen für dieses Feld eindeutigen Namen hat
|
||||
GenericRefType genericRefType =
|
||||
new GenericRefType(gtv.getName()
|
||||
, new NullToken());
|
||||
new GenericRefType(gtv.getName()
|
||||
, new NullToken());
|
||||
//Dieser wird dann korrekt aufgelöst vom Resolver:
|
||||
params.add(resolver.resolve(genericRefType));
|
||||
}
|
||||
|
||||
@@ -7,13 +7,7 @@ import de.dhbwstuttgart.syntaxtree.GenericDeclarationList;
|
||||
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
|
||||
import de.dhbwstuttgart.syntaxtree.Method;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
@@ -21,19 +15,19 @@ import java.util.Optional;
|
||||
|
||||
public class FunNClass extends ClassOrInterface {
|
||||
public FunNClass(List<GenericRefType> funNParams) {
|
||||
super(0, new JavaClassName("Fun"+(funNParams.size()-1)), new ArrayList<>(), Optional.empty() /* eingefuegt PL 2018-11-24 */,
|
||||
createMethods(funNParams), new ArrayList<>(), createGenerics(funNParams),
|
||||
ASTFactory.createObjectType(), true, new ArrayList<>(), new NullToken());
|
||||
super(0, new JavaClassName("Fun" + (funNParams.size() - 1)), new ArrayList<>(), Optional.empty() /* eingefuegt PL 2018-11-24 */,
|
||||
createMethods(funNParams), new ArrayList<>(), createGenerics(funNParams),
|
||||
ASTFactory.createObjectType(), true, new ArrayList<>(), new NullToken());
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private static GenericDeclarationList createGenerics(List<GenericRefType> funNParams) {
|
||||
//PL 2018-06-22: so geaendert, dass generierte Generics den Namen der funParams entsprechen.
|
||||
//PL 2018-06-22: so geaendert, dass generierte Generics den Namen der funParams entsprechen.
|
||||
List<GenericTypeVar> generics = new ArrayList<>();
|
||||
for(GenericRefType param : funNParams){
|
||||
for (GenericRefType param : funNParams) {
|
||||
generics.add(new GenericTypeVar(param.getParsedName(),//NameGenerator.makeNewName(),
|
||||
new ArrayList<>(), new NullToken(), new NullToken()));
|
||||
new ArrayList<>(), new NullToken(), new NullToken()));
|
||||
}
|
||||
return new GenericDeclarationList(generics, new NullToken());
|
||||
}
|
||||
|
||||
@@ -13,14 +13,14 @@ import de.dhbwstuttgart.typeinference.constraints.GenericsResolver;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class MethodAssumption extends Assumption{
|
||||
private ClassOrInterface receiver;
|
||||
private RefTypeOrTPHOrWildcardOrGeneric retType;
|
||||
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params;
|
||||
public class MethodAssumption extends Assumption {
|
||||
private final Boolean isInherited;
|
||||
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params;
|
||||
private final ClassOrInterface receiver;
|
||||
private final RefTypeOrTPHOrWildcardOrGeneric retType;
|
||||
|
||||
public MethodAssumption(ClassOrInterface receiver, RefTypeOrTPHOrWildcardOrGeneric retType,
|
||||
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope, Boolean isInherited){
|
||||
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope, Boolean isInherited) {
|
||||
super(scope);
|
||||
this.receiver = receiver;
|
||||
this.retType = retType;
|
||||
@@ -28,23 +28,16 @@ public class MethodAssumption extends Assumption{
|
||||
this.isInherited = isInherited;
|
||||
}
|
||||
|
||||
/*
|
||||
public RefType getReceiverType() {
|
||||
|
||||
public ClassOrInterface getReceiver() {
|
||||
return receiver;
|
||||
}
|
||||
*/
|
||||
|
||||
public ClassOrInterface getReceiver(){
|
||||
return receiver;
|
||||
}
|
||||
|
||||
public RefTypeOrTPHOrWildcardOrGeneric getReturnType() {
|
||||
return retType;
|
||||
return retType;
|
||||
}
|
||||
|
||||
public List<? extends RefTypeOrTPHOrWildcardOrGeneric> getArgTypes(){
|
||||
return params;
|
||||
|
||||
public List<? extends RefTypeOrTPHOrWildcardOrGeneric> getArgTypes() {
|
||||
return params;
|
||||
}
|
||||
|
||||
public RefTypeOrTPHOrWildcardOrGeneric getReturnType(GenericsResolver resolver) {
|
||||
@@ -53,35 +46,31 @@ public class MethodAssumption extends Assumption{
|
||||
|
||||
public List<RefTypeOrTPHOrWildcardOrGeneric> getArgTypes(GenericsResolver resolver) {
|
||||
List<RefTypeOrTPHOrWildcardOrGeneric> ret = new ArrayList<>();
|
||||
for(RefTypeOrTPHOrWildcardOrGeneric param : params){
|
||||
for (RefTypeOrTPHOrWildcardOrGeneric param : params) {
|
||||
param = resolver.resolve(param);
|
||||
ret.add(param);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param resolver
|
||||
* @return
|
||||
*/
|
||||
|
||||
public RefTypeOrTPHOrWildcardOrGeneric getReceiverType(GenericsResolver resolver) {
|
||||
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
|
||||
for(GenericTypeVar gtv : receiver.getGenerics()){
|
||||
for (GenericTypeVar gtv : receiver.getGenerics()) {
|
||||
//Die Generics werden alle zu TPHs umgewandelt.
|
||||
params.add(resolver.resolve(new GenericRefType(gtv.getName(), new NullToken())));
|
||||
}
|
||||
RefTypeOrTPHOrWildcardOrGeneric receiverType;
|
||||
if(receiver instanceof FunNClass){
|
||||
receiverType = new RefType(new JavaClassName(receiver.getClassName().toString()+"$$"), params, new NullToken()); // new FunN(params);
|
||||
}else{
|
||||
if (receiver instanceof FunNClass) {
|
||||
receiverType = new RefType(new JavaClassName(receiver.getClassName().toString() + "$$"), params, new NullToken()); // new FunN(params);
|
||||
} else {
|
||||
receiverType = new RefType(receiver.getClassName(), params, new NullToken());
|
||||
}
|
||||
|
||||
return receiverType;
|
||||
}
|
||||
|
||||
|
||||
public Boolean isInherited() {
|
||||
return isInherited;
|
||||
return isInherited;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +1,13 @@
|
||||
package de.dhbwstuttgart.typeinference.assumptions;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Iterators;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
|
||||
import de.dhbwstuttgart.syntaxtree.Method;
|
||||
import de.dhbwstuttgart.syntaxtree.TypeScope;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Set;
|
||||
import java.util.Stack;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class TypeInferenceBlockInformation extends TypeInferenceInformation {
|
||||
private TypeScope methodContext;
|
||||
private ClassOrInterface currentClass;
|
||||
private final TypeScope methodContext;
|
||||
private final ClassOrInterface currentClass;
|
||||
|
||||
public TypeInferenceBlockInformation(Collection<ClassOrInterface> availableClasses,
|
||||
ClassOrInterface currentClass, TypeScope methodContext) {
|
||||
@@ -24,12 +15,15 @@ public class TypeInferenceBlockInformation extends TypeInferenceInformation {
|
||||
this.methodContext = new TypeScopeContainer(currentClass, methodContext);
|
||||
this.currentClass = currentClass;
|
||||
}
|
||||
|
||||
public TypeInferenceBlockInformation(TypeInferenceBlockInformation oldScope, TypeScope newScope) {
|
||||
this(oldScope.getAvailableClasses(), oldScope.currentClass, new TypeScopeContainer(oldScope.methodContext, newScope));
|
||||
}
|
||||
|
||||
public ClassOrInterface getCurrentClass() {
|
||||
return currentClass;
|
||||
}
|
||||
|
||||
public TypeScope getCurrentTypeScope() {
|
||||
return methodContext;
|
||||
}
|
||||
|
||||
@@ -1,20 +1,15 @@
|
||||
package de.dhbwstuttgart.typeinference.assumptions;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.NotImplementedException;
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.syntaxtree.*;
|
||||
import de.dhbwstuttgart.syntaxtree.statement.ArgumentList;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.Field;
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/*
|
||||
Anmerkung:
|
||||
@@ -27,25 +22,25 @@ Zweiteres hat den Vorteil, dass bei der Entwicklung leichter Dinge hinzugefügt
|
||||
Die ganze Logik steckt in dieser Klasse.
|
||||
*/
|
||||
public class TypeInferenceInformation {
|
||||
private Collection<ClassOrInterface> classes;
|
||||
private final Collection<ClassOrInterface> classes;
|
||||
|
||||
public TypeInferenceInformation(Collection<ClassOrInterface> availableClasses){
|
||||
public TypeInferenceInformation(Collection<ClassOrInterface> availableClasses) {
|
||||
classes = availableClasses;
|
||||
}
|
||||
|
||||
public RefTypeOrTPHOrWildcardOrGeneric checkGTV(RefTypeOrTPHOrWildcardOrGeneric type){
|
||||
if(type instanceof GenericRefType){
|
||||
public RefTypeOrTPHOrWildcardOrGeneric checkGTV(RefTypeOrTPHOrWildcardOrGeneric type) {
|
||||
if (type instanceof GenericRefType) {
|
||||
return TypePlaceholder.fresh(new NullToken());
|
||||
}else{
|
||||
} else {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
public List<FieldAssumption> getFields(String name){
|
||||
public List<FieldAssumption> getFields(String name) {
|
||||
List<FieldAssumption> ret = new ArrayList<>();
|
||||
for(ClassOrInterface cl : classes){
|
||||
for(Field m : cl.getFieldDecl()){
|
||||
if(m.getName().equals(name)){
|
||||
for (ClassOrInterface cl : classes) {
|
||||
for (Field m : cl.getFieldDecl()) {
|
||||
if (m.getName().equals(name)) {
|
||||
|
||||
ret.add(new FieldAssumption(name, cl, m.getType(), new TypeScopeContainer(cl, m)));
|
||||
}
|
||||
|
||||
@@ -12,7 +12,8 @@ import java.util.stream.Collectors;
|
||||
public class TypeScopeContainer implements TypeScope {
|
||||
ArrayList<TypeScope> scopes = new ArrayList<>();
|
||||
Stack<RefTypeOrTPHOrWildcardOrGeneric> types = new Stack<>();
|
||||
public TypeScopeContainer(TypeScope scope1, TypeScope scope2){
|
||||
|
||||
public TypeScopeContainer(TypeScope scope1, TypeScope scope2) {
|
||||
scopes.add(scope1);
|
||||
scopes.add(scope2);
|
||||
types.push(scope1.getReturnType());
|
||||
@@ -22,11 +23,11 @@ public class TypeScopeContainer implements TypeScope {
|
||||
@Override
|
||||
public Iterable<? extends GenericTypeVar> getGenerics() {
|
||||
return Iterables.concat(scopes.stream().
|
||||
map(TypeScope::getGenerics).collect(Collectors.toList()).toArray(new Iterable[0]));
|
||||
map(TypeScope::getGenerics).collect(Collectors.toList()).toArray(new Iterable[0]));
|
||||
}
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric getReturnType() {
|
||||
return types.peek();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,69 +1,64 @@
|
||||
package de.dhbwstuttgart.typeinference.constraints;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
public class Constraint<A> extends HashSet<A> {
|
||||
private static final long serialVersionUID = 1L;
|
||||
private Boolean isInherited = false;//wird nur für die Method-Constraints benoetigt
|
||||
|
||||
/*
|
||||
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
|
||||
* auszuwaehlen
|
||||
*/
|
||||
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
|
||||
|
||||
private Constraint<A> extendConstraint = null;
|
||||
|
||||
public Constraint() {
|
||||
super();
|
||||
}
|
||||
|
||||
public Constraint(Boolean isInherited) {
|
||||
this.isInherited = isInherited;
|
||||
}
|
||||
|
||||
public Constraint(Boolean isInherited, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
|
||||
this.isInherited = isInherited;
|
||||
this.extendConstraint = extendConstraint;
|
||||
this.methodSignatureConstraint = methodSignatureConstraint;
|
||||
}
|
||||
|
||||
public void setIsInherited(Boolean isInherited) {
|
||||
this.isInherited = isInherited;
|
||||
}
|
||||
|
||||
public Boolean isInherited() {
|
||||
return isInherited;
|
||||
}
|
||||
|
||||
public Constraint<A> getExtendConstraint() {
|
||||
return extendConstraint;
|
||||
}
|
||||
|
||||
public void setExtendConstraint(Constraint<A> c) {
|
||||
extendConstraint = c;
|
||||
}
|
||||
|
||||
public Set<A> getmethodSignatureConstraint() {
|
||||
return methodSignatureConstraint;
|
||||
}
|
||||
|
||||
public void setmethodSignatureConstraint(Set<A> c) {
|
||||
methodSignatureConstraint = c;
|
||||
}
|
||||
private static final long serialVersionUID = 1L;
|
||||
/*
|
||||
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
|
||||
* auszuwaehlen
|
||||
*/
|
||||
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
|
||||
private Boolean isInherited = false;//wird nur für die Method-Constraints benoetigt
|
||||
private Constraint<A> extendConstraint = null;
|
||||
|
||||
public Constraint() {
|
||||
super();
|
||||
}
|
||||
|
||||
public Constraint(Boolean isInherited) {
|
||||
this.isInherited = isInherited;
|
||||
}
|
||||
|
||||
public Constraint(Boolean isInherited, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
|
||||
this.isInherited = isInherited;
|
||||
this.extendConstraint = extendConstraint;
|
||||
this.methodSignatureConstraint = methodSignatureConstraint;
|
||||
}
|
||||
|
||||
public void setIsInherited(Boolean isInherited) {
|
||||
this.isInherited = isInherited;
|
||||
}
|
||||
|
||||
public Boolean isInherited() {
|
||||
return isInherited;
|
||||
}
|
||||
|
||||
public Constraint<A> getExtendConstraint() {
|
||||
return extendConstraint;
|
||||
}
|
||||
|
||||
public void setExtendConstraint(Constraint<A> c) {
|
||||
extendConstraint = c;
|
||||
}
|
||||
|
||||
public Set<A> getmethodSignatureConstraint() {
|
||||
return methodSignatureConstraint;
|
||||
}
|
||||
|
||||
public void setmethodSignatureConstraint(Set<A> c) {
|
||||
methodSignatureConstraint = c;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return super.toString() + "\nisInherited = " + isInherited
|
||||
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
|
||||
+ "\n";
|
||||
}
|
||||
|
||||
public String toStringBase() {
|
||||
return super.toString();
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return super.toString() + "\nisInherited = " + isInherited
|
||||
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
|
||||
+ "\n" ;
|
||||
}
|
||||
|
||||
public String toStringBase() {
|
||||
return super.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
package de.dhbwstuttgart.typeinference.constraints;
|
||||
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.BinaryOperator;
|
||||
@@ -15,7 +13,7 @@ public class ConstraintSet<A> {
|
||||
Constraint<A> undConstraints = new Constraint<>();
|
||||
List<Set<Constraint<A>>> oderConstraints = new ArrayList<>();
|
||||
|
||||
public void addUndConstraint(A p){
|
||||
public void addUndConstraint(A p) {
|
||||
undConstraints.add(p);
|
||||
}
|
||||
|
||||
@@ -23,28 +21,28 @@ public class ConstraintSet<A> {
|
||||
oderConstraints.add(methodConstraints);
|
||||
}
|
||||
|
||||
public void addAllUndConstraint(Constraint<A> allUndConstraints){
|
||||
public void addAllUndConstraint(Constraint<A> allUndConstraints) {
|
||||
undConstraints.addAll(allUndConstraints);
|
||||
}
|
||||
|
||||
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints){
|
||||
|
||||
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints) {
|
||||
this.oderConstraints.addAll(allOderConstraints);
|
||||
}
|
||||
|
||||
|
||||
public void addAll(ConstraintSet constraints) {
|
||||
this.addAllUndConstraint(constraints.undConstraints);
|
||||
this.addAllOderConstraint(constraints.oderConstraints);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(){
|
||||
BinaryOperator<String> b = (x,y) -> x+y;
|
||||
return "\nUND:" + this.undConstraints.toString() + "\n" +
|
||||
"ODER:" + this.oderConstraints.stream().reduce("", (x,y) -> x.toString()+ "\n" +y, b);
|
||||
//cartesianProduct().toString();
|
||||
public String toString() {
|
||||
BinaryOperator<String> b = (x, y) -> x + y;
|
||||
return "\nUND:" + this.undConstraints.toString() + "\n" +
|
||||
"ODER:" + this.oderConstraints.stream().reduce("", (x, y) -> x + "\n" + y, b);
|
||||
//cartesianProduct().toString();
|
||||
}
|
||||
|
||||
public Set<List<Constraint<A>>> cartesianProduct(){
|
||||
public Set<List<Constraint<A>>> cartesianProduct() {
|
||||
Set<Constraint<A>> toAdd = new HashSet<>();
|
||||
toAdd.add(undConstraints);
|
||||
List<Set<Constraint<A>>> allConstraints = new ArrayList<>();
|
||||
@@ -54,7 +52,7 @@ public class ConstraintSet<A> {
|
||||
}
|
||||
|
||||
public <B> ConstraintSet<B> map(Function<? super A, ? extends B> o) {
|
||||
Hashtable<Constraint<A>,Constraint<B>> CSA2CSB = new Hashtable<>();
|
||||
Hashtable<Constraint<A>, Constraint<B>> CSA2CSB = new Hashtable<>();
|
||||
ConstraintSet<B> ret = new ConstraintSet<>();
|
||||
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new));
|
||||
List<Set<Constraint<B>>> newOder = new ArrayList<>();
|
||||
@@ -68,23 +66,23 @@ public class ConstraintSet<A> {
|
||||
CSA2CSB.put(as, newConst);} );
|
||||
}
|
||||
*/
|
||||
|
||||
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||
|
||||
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
|
||||
newOder.add(
|
||||
oderConstraint.parallelStream().map((Constraint<A> as) -> {
|
||||
|
||||
Constraint<B> newConst = as.stream()
|
||||
.map(o)
|
||||
.collect(Collectors.toCollection((as.getExtendConstraint() != null)
|
||||
? () -> new Constraint<B> (as.isInherited(),
|
||||
as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new)),
|
||||
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new)))
|
||||
: () -> new Constraint<B> (as.isInherited())
|
||||
));
|
||||
|
||||
//CSA2CSB.put(as, newConst);
|
||||
|
||||
return newConst;
|
||||
oderConstraint.parallelStream().map((Constraint<A> as) -> {
|
||||
|
||||
Constraint<B> newConst = as.stream()
|
||||
.map(o)
|
||||
.collect(Collectors.toCollection((as.getExtendConstraint() != null)
|
||||
? () -> new Constraint<B>(as.isInherited(),
|
||||
as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new)),
|
||||
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new)))
|
||||
: () -> new Constraint<B>(as.isInherited())
|
||||
));
|
||||
|
||||
//CSA2CSB.put(as, newConst);
|
||||
|
||||
return newConst;
|
||||
|
||||
/*
|
||||
Constraint<B> bs = CSA2CSB.get(as);
|
||||
@@ -93,36 +91,36 @@ public class ConstraintSet<A> {
|
||||
}
|
||||
return bs;
|
||||
*/
|
||||
}).collect(Collectors.toSet())
|
||||
}).collect(Collectors.toSet())
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
ret.oderConstraints = newOder;
|
||||
return ret;
|
||||
}
|
||||
|
||||
public void forEach (Consumer<? super A> c) {
|
||||
|
||||
public void forEach(Consumer<? super A> c) {
|
||||
undConstraints.stream().forEach(c);
|
||||
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
|
||||
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
|
||||
as.stream().forEach(c));
|
||||
as.stream().forEach(c));
|
||||
}
|
||||
}
|
||||
|
||||
public Set<A> getAll () {
|
||||
Set<A> ret = new HashSet<>();
|
||||
|
||||
public Set<A> getAll() {
|
||||
Set<A> ret = new HashSet<>();
|
||||
ret.addAll(undConstraints);
|
||||
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
|
||||
oderConstraint.parallelStream().forEach((Constraint<A> as) -> ret.addAll(as));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
public List<Set<Constraint<A>>> getOderConstraints() {
|
||||
return oderConstraints;
|
||||
return oderConstraints;
|
||||
}
|
||||
|
||||
|
||||
public Set<A> getUndConstraints() {
|
||||
return undConstraints;
|
||||
return undConstraints;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
package de.dhbwstuttgart.typeinference.constraints;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
|
||||
/**
|
||||
* Wird für Generics benötigt
|
||||
* TODO: Erklörung!
|
||||
*/
|
||||
public interface GenericsResolver {
|
||||
public RefTypeOrTPHOrWildcardOrGeneric resolve(RefTypeOrTPHOrWildcardOrGeneric generic);
|
||||
RefTypeOrTPHOrWildcardOrGeneric resolve(RefTypeOrTPHOrWildcardOrGeneric generic);
|
||||
}
|
||||
|
||||
@@ -1,130 +1,44 @@
|
||||
package de.dhbwstuttgart.typeinference.constraints;
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class Pair implements Serializable
|
||||
{
|
||||
|
||||
public class Pair implements Serializable {
|
||||
public final RefTypeOrTPHOrWildcardOrGeneric TA1;
|
||||
public final RefTypeOrTPHOrWildcardOrGeneric TA2;
|
||||
|
||||
private PairOperator eOperator = PairOperator.SMALLER;
|
||||
private Boolean noUnification = false;
|
||||
|
||||
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2 )
|
||||
{
|
||||
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2) {
|
||||
this.TA1 = TA1;
|
||||
this.TA2 = TA2;
|
||||
if(TA1 == null || TA2 == null)
|
||||
if (TA1 == null || TA2 == null)
|
||||
throw new NullPointerException();
|
||||
eOperator = PairOperator.SMALLER;
|
||||
}
|
||||
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp)
|
||||
{
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp) {
|
||||
// Konstruktor
|
||||
this(TA1,TA2);
|
||||
this(TA1, TA2);
|
||||
this.eOperator = eOp;
|
||||
}
|
||||
|
||||
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification)
|
||||
{
|
||||
|
||||
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification) {
|
||||
// Konstruktor
|
||||
this(TA1,TA2);
|
||||
this(TA1, TA2);
|
||||
this.eOperator = eOp;
|
||||
this.noUnification = noUnification;
|
||||
}
|
||||
|
||||
public String toString()
|
||||
{
|
||||
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen
|
||||
String strElement1 = "NULL";
|
||||
String strElement2 = "NULL";
|
||||
String Operator = "<.";
|
||||
|
||||
if( TA1 != null )
|
||||
strElement1 = TA1.toString();
|
||||
|
||||
if( TA2 != null )
|
||||
strElement2 = TA2.toString();
|
||||
|
||||
/* PL ausskommentiert 2018-05-24
|
||||
if(OperatorEqual())
|
||||
Operator = "=";
|
||||
if(OperatorSmaller())
|
||||
Operator = "<.";
|
||||
if(OperatorSmallerExtends())
|
||||
Operator = "<?";
|
||||
*/
|
||||
|
||||
return "\n(" + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
|
||||
|
||||
/*- Equals: " + bEqual*/
|
||||
}
|
||||
|
||||
/**
|
||||
* <br/>Author: J�rg B�uerle
|
||||
* @param obj
|
||||
* @return
|
||||
*/
|
||||
public boolean equals(Object obj)
|
||||
{
|
||||
boolean ret = true;
|
||||
ret &= (obj instanceof Pair);
|
||||
if(!ret)return ret;
|
||||
ret &= ((Pair)obj).TA1.equals(this.TA1);
|
||||
ret &= ((Pair)obj).TA2.equals(this.TA2);
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ Equal ist.
|
||||
*/
|
||||
public boolean OperatorEqual()
|
||||
{
|
||||
return eOperator == PairOperator.EQUALSDOT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ Smaller ist.
|
||||
*/
|
||||
public boolean OperatorSmaller()
|
||||
{
|
||||
return eOperator == PairOperator.SMALLER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ SmallerExtends ist.
|
||||
*/
|
||||
public boolean OperatorSmallerExtends()
|
||||
{
|
||||
return eOperator == PairOperator.SMALLERDOTWC;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Gibt den Operator zurück.
|
||||
*/
|
||||
public PairOperator GetOperator()
|
||||
{
|
||||
return eOperator;
|
||||
}
|
||||
|
||||
public boolean OperatorSmallerDot() {
|
||||
return eOperator == PairOperator.SMALLERDOT;
|
||||
}
|
||||
|
||||
|
||||
static public Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
|
||||
HashMap<String, TypePlaceholder> ret = new HashMap<>();
|
||||
constraints.map((Pair p) -> {
|
||||
@@ -138,5 +52,82 @@ public class Pair implements Serializable
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen
|
||||
String strElement1 = "NULL";
|
||||
String strElement2 = "NULL";
|
||||
String Operator = "<.";
|
||||
|
||||
if (TA1 != null)
|
||||
strElement1 = TA1.toString();
|
||||
|
||||
if (TA2 != null)
|
||||
strElement2 = TA2.toString();
|
||||
|
||||
/* PL ausskommentiert 2018-05-24
|
||||
if(OperatorEqual())
|
||||
Operator = "=";
|
||||
if(OperatorSmaller())
|
||||
Operator = "<.";
|
||||
if(OperatorSmallerExtends())
|
||||
Operator = "<?";
|
||||
*/
|
||||
|
||||
return "\n(" + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
|
||||
|
||||
/*- Equals: " + bEqual*/
|
||||
}
|
||||
|
||||
/**
|
||||
* <br/>Author: J�rg B�uerle
|
||||
*
|
||||
* @param obj
|
||||
* @return
|
||||
*/
|
||||
public boolean equals(Object obj) {
|
||||
boolean ret = true;
|
||||
ret &= (obj instanceof Pair);
|
||||
if (!ret) return ret;
|
||||
ret &= ((Pair) obj).TA1.equals(this.TA1);
|
||||
ret &= ((Pair) obj).TA2.equals(this.TA2);
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ Equal ist.
|
||||
*/
|
||||
public boolean OperatorEqual() {
|
||||
return eOperator == PairOperator.EQUALSDOT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ Smaller ist.
|
||||
*/
|
||||
public boolean OperatorSmaller() {
|
||||
return eOperator == PairOperator.SMALLER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ SmallerExtends ist.
|
||||
*/
|
||||
public boolean OperatorSmallerExtends() {
|
||||
return eOperator == PairOperator.SMALLERDOTWC;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Gibt den Operator zurück.
|
||||
*/
|
||||
public PairOperator GetOperator() {
|
||||
return eOperator;
|
||||
}
|
||||
|
||||
public boolean OperatorSmallerDot() {
|
||||
return eOperator == PairOperator.SMALLERDOT;
|
||||
}
|
||||
}
|
||||
// ino.end
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
|
||||
public class GenericInsertPair {
|
||||
public class GenericInsertPair {
|
||||
public TypePlaceholder TA1;
|
||||
public TypePlaceholder TA2;
|
||||
|
||||
@@ -13,19 +12,13 @@ public class GenericInsertPair {
|
||||
TA2 = superType;
|
||||
}
|
||||
|
||||
public GenericInsertPair(Pair pair) {
|
||||
TA1 = (TypePlaceholder) pair.TA1;
|
||||
TA2 = (TypePlaceholder) pair.TA2;
|
||||
public boolean contains(TypePlaceholder additionalTPH) {
|
||||
if (TA1.equals(additionalTPH)) return true;
|
||||
return TA2.equals(additionalTPH);
|
||||
}
|
||||
|
||||
public boolean contains(TypePlaceholder additionalTPH) {
|
||||
if(TA1.equals(additionalTPH))return true;
|
||||
if(TA2.equals(additionalTPH))return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "GenIns(" + TA1.toString() + " < " + TA2.toString() + ")";
|
||||
return "GenIns(" + TA1.toString() + " < " + TA2.toString() + ")";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,31 +2,22 @@ package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.NotImplementedException;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
|
||||
/**
|
||||
* enthaelt alle Paare, die in einem Ergebnis nicht vorkommen koennen
|
||||
* sie sind noetig fuer origPairs in PairTPHsmallerTPH, da hier auch
|
||||
* Paare vorkommen koennen die keine Result sind (z.B. bei FunN$$)
|
||||
* Contains all pairs, that cannot be in a result.
|
||||
* They are required for origPairs in PairTPHsmallerTPH, because there can
|
||||
* be pairs as well, that are no results (e.g. FunN$$)
|
||||
*/
|
||||
public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>{
|
||||
//public final TypePlaceholder left;
|
||||
//public final TypePlaceholder right;
|
||||
|
||||
/*
|
||||
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
|
||||
* wichtig fuer generated Generics
|
||||
*/
|
||||
ResultPair origPair;
|
||||
public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric> {
|
||||
|
||||
public PairNoResult(RefTypeOrTPHOrWildcardOrGeneric left, RefTypeOrTPHOrWildcardOrGeneric right){
|
||||
public PairNoResult(RefTypeOrTPHOrWildcardOrGeneric left, RefTypeOrTPHOrWildcardOrGeneric right) {
|
||||
super(left, right);
|
||||
}
|
||||
|
||||
/* noch nicht implementiert. */
|
||||
@Override
|
||||
public void accept(ResultPairVisitor visitor) {
|
||||
throw new NotImplementedException();
|
||||
throw new NotImplementedException();
|
||||
//visitor.visit(this);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
|
||||
public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder> {
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
|
||||
/**
|
||||
* Steht für A =. RefType
|
||||
*/
|
||||
public class PairTPHequalRefTypeOrWildcardType extends ResultPair{
|
||||
public class PairTPHequalRefTypeOrWildcardType extends ResultPair<TypePlaceholder, RefTypeOrTPHOrWildcardOrGeneric> {
|
||||
public final TypePlaceholder left;
|
||||
public final RefTypeOrTPHOrWildcardOrGeneric right;
|
||||
|
||||
public PairTPHequalRefTypeOrWildcardType(TypePlaceholder left, RefTypeOrTPHOrWildcardOrGeneric right){
|
||||
public PairTPHequalRefTypeOrWildcardType(TypePlaceholder left, RefTypeOrTPHOrWildcardOrGeneric right) {
|
||||
super(left, right);
|
||||
this.left = left;
|
||||
this.right = right;
|
||||
@@ -21,9 +20,9 @@ public class PairTPHequalRefTypeOrWildcardType extends ResultPair{
|
||||
public void accept(ResultPairVisitor visitor) {
|
||||
visitor.visit(this);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "(" + left.toString() + " = " + right.toString() + ")";
|
||||
return "(" + left.toString() + " = " + right.toString() + ")";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,34 +6,37 @@ import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
/**
|
||||
* Steht für: A <. B
|
||||
*/
|
||||
public class PairTPHsmallerTPH extends ResultPair{
|
||||
public class PairTPHsmallerTPH extends ResultPair<TypePlaceholder, TypePlaceholder> {
|
||||
public final TypePlaceholder left;
|
||||
public final TypePlaceholder right;
|
||||
|
||||
/*
|
||||
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
|
||||
* wichtig fuer generated Generics
|
||||
*/
|
||||
ResultPair origPair;
|
||||
|
||||
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right){
|
||||
/**
|
||||
* the original pair from which this result pair was generated. Important for generated generics
|
||||
*/
|
||||
ResultPair<? extends RefTypeOrTPHOrWildcardOrGeneric, ? extends RefTypeOrTPHOrWildcardOrGeneric> origPair;
|
||||
|
||||
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right) {
|
||||
super(left, right);
|
||||
this.left = left;
|
||||
this.right = right;
|
||||
}
|
||||
|
||||
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right, ResultPair origPair){
|
||||
this(left, right);
|
||||
this.origPair = origPair;
|
||||
public PairTPHsmallerTPH(
|
||||
TypePlaceholder left,
|
||||
TypePlaceholder right,
|
||||
ResultPair<? extends RefTypeOrTPHOrWildcardOrGeneric, ? extends RefTypeOrTPHOrWildcardOrGeneric> origPair
|
||||
) {
|
||||
this(left, right);
|
||||
this.origPair = origPair;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void accept(ResultPairVisitor visitor) {
|
||||
visitor.visit(this);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "(" + left.toString() + " < " + right.toString() + ")";
|
||||
return "(" + left.toString() + " < " + right.toString() + ")";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,27 +1,20 @@
|
||||
package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
public class ResolvedType{
|
||||
private ResultPair<?, ?> resultPair;
|
||||
|
||||
public class ResolvedType {
|
||||
public final RefTypeOrTPHOrWildcardOrGeneric resolvedType;
|
||||
//public final Set<GenericInsertPair> additionalGenerics;
|
||||
private ResultPair<?, ?> resultPair;
|
||||
|
||||
public ResolvedType(RefTypeOrTPHOrWildcardOrGeneric resolvedType, Set<GenericInsertPair> additionalGenerics){
|
||||
public ResolvedType(RefTypeOrTPHOrWildcardOrGeneric resolvedType) {
|
||||
this.resolvedType = resolvedType;
|
||||
//this.additionalGenerics = additionalGenerics;
|
||||
}
|
||||
|
||||
public void setResultPair(ResultPair<?, ?> resultPair) {
|
||||
this.resultPair = resultPair;
|
||||
}
|
||||
|
||||
public ResultPair<?, ?> getResultPair() {
|
||||
return resultPair;
|
||||
}
|
||||
public ResultPair<?, ?> getResultPair() {
|
||||
return resultPair;
|
||||
}
|
||||
|
||||
public void setResultPair(ResultPair<?, ?> resultPair) {
|
||||
this.resultPair = resultPair;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,17 +5,17 @@ import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
/**
|
||||
* Paare, welche das Unifikationsergebnis darstellen
|
||||
*/
|
||||
public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric> {
|
||||
public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric, B extends RefTypeOrTPHOrWildcardOrGeneric> {
|
||||
private final A left;
|
||||
private final B right;
|
||||
|
||||
public abstract void accept(ResultPairVisitor visitor);
|
||||
|
||||
public ResultPair(A left, B right){
|
||||
public ResultPair(A left, B right) {
|
||||
this.left = left;
|
||||
this.right = right;
|
||||
}
|
||||
|
||||
public abstract void accept(ResultPairVisitor visitor);
|
||||
|
||||
public A getLeft() {
|
||||
return left;
|
||||
}
|
||||
@@ -23,40 +23,37 @@ public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B ext
|
||||
public B getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
|
||||
public String toString() {
|
||||
return "(" + left.toString() + ", " + right.toString() + ")";
|
||||
return "(" + left.toString() + ", " + right.toString() + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((left == null) ? 0 : left.getOffset().hashCode());
|
||||
result = prime * result + ((right == null) ? 0 : right.getOffset().hashCode());
|
||||
return result;
|
||||
}
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((left == null) ? 0 : left.getOffset().hashCode());
|
||||
result = prime * result + ((right == null) ? 0 : right.getOffset().hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
ResultPair<?, ?> other = (ResultPair<?, ?>) obj;
|
||||
if (left == null) {
|
||||
if (other.left != null)
|
||||
return false;
|
||||
} else if (!left.getOffset().equals(other.left.getOffset()))
|
||||
return false;
|
||||
if (right == null) {
|
||||
return other.right == null;
|
||||
} else return right.getOffset().equals(other.right.getOffset());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
ResultPair<?,?> other = (ResultPair<?,?>) obj;
|
||||
if (left == null) {
|
||||
if (other.left != null)
|
||||
return false;
|
||||
} else if (!left.getOffset().equals(other.left.getOffset()))
|
||||
return false;
|
||||
if (right == null) {
|
||||
if (other.right != null)
|
||||
return false;
|
||||
} else if (!right.getOffset().equals(other.right.getOffset()))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -2,9 +2,11 @@ package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
public interface ResultPairVisitor {
|
||||
void visit(PairTPHsmallerTPH p);
|
||||
|
||||
void visit(PairTPHequalRefTypeOrWildcardType p);
|
||||
|
||||
void visit(PairTPHEqualTPH p);
|
||||
|
||||
|
||||
//bisher nicht umgesetzt
|
||||
//void visit(PairNoResult p);
|
||||
}
|
||||
|
||||
@@ -1,44 +1,43 @@
|
||||
package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.NotImplementedException;
|
||||
import de.dhbwstuttgart.syntaxtree.type.*;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.NotImplementedException;
|
||||
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public class ResultSet {
|
||||
|
||||
public final Set<ResultPair> results;
|
||||
public Set<ResultPair<TypePlaceholder, TypePlaceholder>> genIns;
|
||||
|
||||
public ResultSet(Set<ResultPair> set){
|
||||
public ResultSet(Set<ResultPair> set) {
|
||||
this.results = set;
|
||||
this.genIns = new HashSet<>();
|
||||
results.forEach(x -> { if (x instanceof PairTPHsmallerTPH) { this.genIns.add(x);}} );
|
||||
results.forEach(x -> {
|
||||
if (x instanceof PairTPHsmallerTPH) {
|
||||
this.genIns.add(x);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
public boolean contains(ResultPair toCheck) {
|
||||
return this.results.contains(toCheck);
|
||||
return this.results.contains(toCheck);
|
||||
}
|
||||
|
||||
|
||||
public void remove(ResultPair toCheck) {
|
||||
results.remove(toCheck);
|
||||
results.remove(toCheck);
|
||||
}
|
||||
|
||||
public ResolvedType resolveType(RefTypeOrTPHOrWildcardOrGeneric type) {
|
||||
if(type instanceof TypePlaceholder)
|
||||
return new Resolver(this).resolve((TypePlaceholder)type);
|
||||
if(type instanceof GenericRefType)return new ResolvedType(type, new HashSet<>());
|
||||
if(type instanceof RefType) {
|
||||
if (type instanceof TypePlaceholder)
|
||||
return new Resolver(this).resolve((TypePlaceholder) type);
|
||||
if (type instanceof GenericRefType) return new ResolvedType(type);
|
||||
if (type instanceof RefType) {
|
||||
RelatedTypeWalker related = new RelatedTypeWalker(null, this);
|
||||
type.accept(related);
|
||||
return new ResolvedType(type, related.relatedTPHs);
|
||||
return new ResolvedType(type);
|
||||
} else {
|
||||
throw new NotImplementedException();
|
||||
//return new ResolvedType(type,new HashSet<>());
|
||||
@@ -46,74 +45,73 @@ public class ResultSet {
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return results.toString();
|
||||
return results.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o instanceof ResultSet) {
|
||||
ResultSet other = (ResultSet)o;
|
||||
return this.results.equals(other.results);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o instanceof ResultSet other) {
|
||||
return this.results.equals(other.results);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return results.hashCode();
|
||||
}
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return results.hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
class Resolver implements ResultSetVisitor {
|
||||
private final ResultSet result;
|
||||
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
|
||||
private TypePlaceholder toResolve;
|
||||
private RefTypeOrTPHOrWildcardOrGeneric resolved;
|
||||
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
|
||||
private ResultPair<?,?> currentPair;
|
||||
private ResultPair<?, ?> currentPair;
|
||||
|
||||
public Resolver(ResultSet resultPairs){
|
||||
public Resolver(ResultSet resultPairs) {
|
||||
this.result = resultPairs;
|
||||
}
|
||||
|
||||
public ResolvedType resolve(TypePlaceholder tph){
|
||||
public ResolvedType resolve(TypePlaceholder tph) {
|
||||
toResolve = tph;
|
||||
resolved = null;
|
||||
System.out.println(tph.toString());
|
||||
for(ResultPair<?,?> resultPair : result.results) {
|
||||
if(resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)){
|
||||
currentPair = resultPair;
|
||||
for (ResultPair<?, ?> resultPair : result.results) {
|
||||
if (resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)) {
|
||||
currentPair = resultPair;
|
||||
return resolve(((PairTPHEqualTPH) resultPair).getRight());
|
||||
}
|
||||
}
|
||||
for(ResultPair<?,?> resultPair : result.results){
|
||||
currentPair = resultPair;
|
||||
for (ResultPair<?, ?> resultPair : result.results) {
|
||||
currentPair = resultPair;
|
||||
resultPair.accept(this);
|
||||
}
|
||||
if(resolved==null){//TPH kommt nicht im Result vor:
|
||||
if (resolved == null) {//TPH kommt nicht im Result vor:
|
||||
resolved = tph;
|
||||
}
|
||||
|
||||
ResolvedType result = new ResolvedType(resolved, additionalTPHs);//resolved;
|
||||
ResolvedType result = new ResolvedType(resolved); //resolved;
|
||||
result.setResultPair(currentPair);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHsmallerTPH p) {
|
||||
currentPair = p;
|
||||
if(p.left.equals(toResolve)){
|
||||
currentPair = p;
|
||||
if (p.left.equals(toResolve)) {
|
||||
additionalTPHs.add(new GenericInsertPair(p.left, p.right));
|
||||
additionalTPHs.addAll(new RelatedTypeWalker(p.right, result).relatedTPHs);
|
||||
}
|
||||
if(p.right.equals(toResolve))
|
||||
if (p.right.equals(toResolve))
|
||||
additionalTPHs.addAll(new RelatedTypeWalker(p.left, result).relatedTPHs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHequalRefTypeOrWildcardType p) {
|
||||
currentPair = p;
|
||||
if(p.left.equals(toResolve)){
|
||||
currentPair = p;
|
||||
if (p.left.equals(toResolve)) {
|
||||
resolved = p.right;
|
||||
RelatedTypeWalker related = new RelatedTypeWalker(null, result);
|
||||
p.right.accept(related);
|
||||
@@ -150,8 +148,7 @@ class Resolver implements ResultSetVisitor {
|
||||
public void visit(ExtendsWildcardType extendsWildcardType) {
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -162,23 +159,23 @@ class Resolver implements ResultSetVisitor {
|
||||
class TPHResolver implements ResultSetVisitor {
|
||||
|
||||
private final TypePlaceholder tph;
|
||||
Set<GenericInsertPair> resolved = new HashSet<>();
|
||||
private final ResultSet resultSet;
|
||||
Set<GenericInsertPair> resolved = new HashSet<>();
|
||||
|
||||
TPHResolver(TypePlaceholder tph, ResultSet resultSet){
|
||||
TPHResolver(TypePlaceholder tph, ResultSet resultSet) {
|
||||
this.resultSet = resultSet;
|
||||
this.tph = tph;
|
||||
for(ResultPair p : resultSet.results){
|
||||
for (ResultPair p : resultSet.results) {
|
||||
p.accept(this);
|
||||
}
|
||||
if(resolved.size() == 0){
|
||||
if (resolved.isEmpty()) {
|
||||
resolved.add(new GenericInsertPair(tph, null));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHsmallerTPH p) {
|
||||
if(p.left.equals(tph) || p.right.equals(tph)){
|
||||
if (p.left.equals(tph) || p.right.equals(tph)) {
|
||||
resolved.add(new GenericInsertPair(p.left, p.right));
|
||||
}
|
||||
}
|
||||
@@ -186,10 +183,10 @@ class TPHResolver implements ResultSetVisitor {
|
||||
@Override
|
||||
public void visit(PairTPHequalRefTypeOrWildcardType p) {
|
||||
TypePlaceholder otherSide = null;
|
||||
if(p.right.equals(tph)){
|
||||
if (p.right.equals(tph)) {
|
||||
otherSide = p.left;
|
||||
}
|
||||
if(otherSide != null){
|
||||
if (otherSide != null) {
|
||||
Set<ResultPair> newResultSet = new HashSet<>(this.resultSet.results);
|
||||
newResultSet.remove(p);
|
||||
resolved.addAll(new TPHResolver(otherSide, new ResultSet(newResultSet)).resolved);
|
||||
@@ -236,29 +233,30 @@ class RelatedTypeWalker implements ResultSetVisitor {
|
||||
|
||||
/**
|
||||
* Läuft über das resultSet und speichert alle TPHs, welche mit start in Verbindung stehen
|
||||
* @param start - kann null sein, wenn der Walker für einen RefType benutzt wird
|
||||
*
|
||||
* @param start - kann null sein, wenn der Walker für einen RefType benutzt wird
|
||||
* @param resultSet
|
||||
*/
|
||||
RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet){
|
||||
RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet) {
|
||||
this.toResolve = start;
|
||||
this.resultSet = resultSet;
|
||||
int resolved = 0;
|
||||
do{
|
||||
do {
|
||||
resolved = relatedTPHs.size();
|
||||
for(ResultPair p : resultSet.results){
|
||||
for (ResultPair p : resultSet.results) {
|
||||
p.accept(this);
|
||||
p.accept(this);
|
||||
}
|
||||
}while(resolved - relatedTPHs.size() > 0);
|
||||
} while (resolved - relatedTPHs.size() > 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHsmallerTPH p) {
|
||||
if(p.getRight().equals(toResolve)){
|
||||
if (p.getRight().equals(toResolve)) {
|
||||
relatedTPHs.addAll(new TPHResolver(p.right, resultSet).resolved);
|
||||
//relatedTPHs.addAll(new RelatedTypeWalker(p.right, resultSet).relatedTPHs);
|
||||
}
|
||||
if(p.getLeft().equals(toResolve)){
|
||||
if (p.getLeft().equals(toResolve)) {
|
||||
relatedTPHs.addAll(new TPHResolver(p.left, resultSet).resolved);
|
||||
//relatedTPHs.addAll(new RelatedTypeWalker(p.left, resultSet).relatedTPHs);
|
||||
}
|
||||
@@ -266,7 +264,7 @@ class RelatedTypeWalker implements ResultSetVisitor {
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHequalRefTypeOrWildcardType p) {
|
||||
if(p.getLeft().equals(toResolve)){
|
||||
if (p.getLeft().equals(toResolve)) {
|
||||
p.getRight().accept(this);
|
||||
}
|
||||
}
|
||||
@@ -283,7 +281,7 @@ class RelatedTypeWalker implements ResultSetVisitor {
|
||||
|
||||
@Override
|
||||
public void visit(RefType refType) {
|
||||
for(RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()){
|
||||
for (RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()) {
|
||||
param.accept(this);
|
||||
}
|
||||
}
|
||||
@@ -306,4 +304,4 @@ class RelatedTypeWalker implements ResultSetVisitor {
|
||||
@Override
|
||||
public void visit(GenericRefType genericRefType) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.type.*;
|
||||
|
||||
public interface ResultSetVisitor extends ResultPairVisitor{
|
||||
public interface ResultSetVisitor extends ResultPairVisitor {
|
||||
|
||||
void visit(RefType refType);
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package de.dhbwstuttgart.typeinference.typeAlgo;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.StatementVisitor;
|
||||
import de.dhbwstuttgart.syntaxtree.type.*;
|
||||
import de.dhbwstuttgart.typeinference.constraints.GenericsResolver;
|
||||
|
||||
@@ -9,25 +8,24 @@ import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Ein GenericsResolver, welcher Generics mit dem selben Namen den selben TPH zuordnet
|
||||
* A generics resolver, which assigns generics with the same name to the same TPH
|
||||
*/
|
||||
public class GenericsResolverSameName implements GenericsResolver, TypeVisitor<RefTypeOrTPHOrWildcardOrGeneric>{
|
||||
public class GenericsResolverSameName implements GenericsResolver, TypeVisitor<RefTypeOrTPHOrWildcardOrGeneric> {
|
||||
|
||||
HashMap<String, TypePlaceholder> map = new HashMap<>();
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric resolve(RefTypeOrTPHOrWildcardOrGeneric generic) {
|
||||
return generic.acceptTV(this);
|
||||
return generic.acceptTV(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric visit(RefType refType) {
|
||||
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
|
||||
for(RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()){
|
||||
for (RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()) {
|
||||
params.add(param.acceptTV(this));
|
||||
}
|
||||
RefType ret = new RefType(refType.getName(), params, refType.getOffset());
|
||||
return ret;
|
||||
return new RefType(refType.getName(), params, refType.getOffset());
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -48,7 +46,7 @@ public class GenericsResolverSameName implements GenericsResolver, TypeVisitor<R
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric visit(GenericRefType genericRefType) {
|
||||
String name = genericRefType.getParsedName();
|
||||
if(!map.containsKey(name)){
|
||||
if (!map.containsKey(name)) {
|
||||
map.put(name, TypePlaceholder.fresh(genericRefType.getOffset()));
|
||||
}
|
||||
return map.get(name);
|
||||
|
||||
@@ -1,49 +1,48 @@
|
||||
package de.dhbwstuttgart.typeinference.typeAlgo;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.DebugException;
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
import de.dhbwstuttgart.syntaxtree.*;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.statement.Statement;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.Constructor;
|
||||
import de.dhbwstuttgart.syntaxtree.Method;
|
||||
import de.dhbwstuttgart.syntaxtree.SourceFile;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceBlockInformation;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceInformation;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.util.BiRelation;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.Collection;
|
||||
|
||||
public class TYPE {
|
||||
|
||||
private final Collection<SourceFile> sfs;
|
||||
private final TypeInferenceInformation typeInferenceInformation;
|
||||
|
||||
public TYPE(Collection<SourceFile> sourceFiles, Collection<ClassOrInterface> allAvailableClasses){
|
||||
|
||||
public TYPE(Collection<SourceFile> sourceFiles, Collection<ClassOrInterface> allAvailableClasses) {
|
||||
sfs = sourceFiles;
|
||||
this.typeInferenceInformation = new TypeInferenceInformation(allAvailableClasses);
|
||||
}
|
||||
|
||||
public ConstraintSet getConstraints() {
|
||||
ConstraintSet ret = new ConstraintSet();
|
||||
for(SourceFile sf : sfs)
|
||||
for (ClassOrInterface cl : sf.KlassenVektor) {
|
||||
ret.addAll(getConstraintsClass(cl ,typeInferenceInformation));
|
||||
}
|
||||
public ConstraintSet<Pair> getConstraints() {
|
||||
ConstraintSet<Pair> ret = new ConstraintSet<>();
|
||||
for (SourceFile sf : sfs)
|
||||
for (ClassOrInterface cl : sf.KlassenVektor) {
|
||||
ret.addAll(getConstraintsClass(cl, typeInferenceInformation));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
private ConstraintSet getConstraintsClass(ClassOrInterface cl, TypeInferenceInformation info) {
|
||||
ConstraintSet ret = new ConstraintSet();
|
||||
ConstraintSet methConstrains;
|
||||
for(Method m : cl.getMethods()){
|
||||
ret.addAll(methConstrains = getConstraintsMethod(m,info, cl));
|
||||
private ConstraintSet<Pair> getConstraintsClass(ClassOrInterface cl, TypeInferenceInformation info) {
|
||||
ConstraintSet<Pair> ret = new ConstraintSet<>();
|
||||
|
||||
for (Method m : cl.getMethods()) {
|
||||
ConstraintSet<Pair> methConstrains = getConstraintsMethod(m, info, cl);
|
||||
ret.addAll(methConstrains);
|
||||
m.constraints.addAll(methConstrains);
|
||||
}
|
||||
for(Constructor m : cl.getConstructors()){
|
||||
ret.addAll(getConstraintsConstructor(m,info, cl));
|
||||
for (Constructor m : cl.getConstructors()) {
|
||||
ret.addAll(getConstraintsConstructor(m, info, cl));
|
||||
}
|
||||
if (cl.getfieldInitializations().isPresent()) {
|
||||
ret.addAll(getConstraintsConstructor(cl.getfieldInitializations().get(), info, cl));
|
||||
ret.addAll(getConstraintsConstructor(cl.getfieldInitializations().get(), info, cl));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
@@ -68,20 +67,20 @@ public class TYPE {
|
||||
return new TypeInferenceInformation(classes);
|
||||
}
|
||||
*/
|
||||
|
||||
private ConstraintSet getConstraintsMethod(Method m, TypeInferenceInformation info, ClassOrInterface currentClass) {
|
||||
if(m.block == null)return new ConstraintSet(); //Abstrakte Methoden generieren keine Constraints
|
||||
|
||||
private ConstraintSet<Pair> getConstraintsMethod(Method m, TypeInferenceInformation info, ClassOrInterface currentClass) {
|
||||
if (m.block == null) return new ConstraintSet<>(); //Abstrakte Methoden generieren keine Constraints
|
||||
TypeInferenceBlockInformation blockInfo = new TypeInferenceBlockInformation(info.getAvailableClasses(), currentClass, m);
|
||||
TYPEStmt methodScope = new TYPEStmt(blockInfo);
|
||||
m.block.accept(methodScope);
|
||||
return methodScope.getConstraints();
|
||||
}
|
||||
|
||||
private ConstraintSet getConstraintsConstructor(Constructor m, TypeInferenceInformation info, ClassOrInterface currentClass) {
|
||||
private ConstraintSet<Pair> getConstraintsConstructor(Constructor m, TypeInferenceInformation info, ClassOrInterface currentClass) {
|
||||
TypeInferenceBlockInformation blockInfo = new TypeInferenceBlockInformation(info.getAvailableClasses(), currentClass, m);
|
||||
TYPEStmt methodScope = new TYPEStmt(blockInfo);
|
||||
//for(Statement stmt : m.fieldInitializations)stmt.accept(methodScope);
|
||||
ConstraintSet ret = this.getConstraintsMethod(m, info, currentClass);
|
||||
ConstraintSet<Pair> ret = this.getConstraintsMethod(m, info, currentClass);
|
||||
ret.addAll(methodScope.getConstraints());
|
||||
return ret;
|
||||
}
|
||||
|
||||
@@ -11,43 +11,47 @@ import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
|
||||
import de.dhbwstuttgart.syntaxtree.statement.*;
|
||||
import de.dhbwstuttgart.syntaxtree.type.*;
|
||||
import de.dhbwstuttgart.syntaxtree.type.Void;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.FieldAssumption;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.FunNClass;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.MethodAssumption;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceBlockInformation;
|
||||
import de.dhbwstuttgart.typeinference.constraints.*;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.GenericsResolver;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.util.BiRelation;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class TYPEStmt implements StatementVisitor{
|
||||
public class TYPEStmt implements StatementVisitor {
|
||||
|
||||
private final TypeInferenceBlockInformation info;
|
||||
private final ConstraintSet constraintsSet = new ConstraintSet();
|
||||
private final ConstraintSet<Pair> constraintsSet = new ConstraintSet<>();
|
||||
private final RefType number = new RefType(ASTFactory.createClass(Number.class).getClassName(), new NullToken());
|
||||
private final RefType longg = new RefType(ASTFactory.createClass(Long.class).getClassName(), new NullToken());
|
||||
private final RefType integer = new RefType(ASTFactory.createClass(Integer.class).getClassName(), new NullToken());
|
||||
private final RefType shortt = new RefType(ASTFactory.createClass(Short.class).getClassName(), new NullToken());
|
||||
private final RefType bytee = new RefType(ASTFactory.createClass(Byte.class).getClassName(), new NullToken());
|
||||
private final RefType floatt = new RefType(ASTFactory.createClass(Float.class).getClassName(), new NullToken());
|
||||
private final RefType doublee = new RefType(ASTFactory.createClass(Double.class).getClassName(), new NullToken());
|
||||
private final RefType string = new RefType(ASTFactory.createClass(String.class).getClassName(), new NullToken());
|
||||
private final RefType bool = new RefType(ASTFactory.createClass(Boolean.class).getClassName(), new NullToken());
|
||||
|
||||
public TYPEStmt(TypeInferenceBlockInformation info){
|
||||
public TYPEStmt(TypeInferenceBlockInformation info) {
|
||||
this.info = info;
|
||||
}
|
||||
|
||||
public ConstraintSet getConstraints() {
|
||||
return constraintsSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Erstellt einen neuen GenericResolver
|
||||
* Die Idee dieser Datenstruktur ist es, GTVs einen eindeutigen TPH zuzuweisen.
|
||||
* Bei Methodenaufrufen oder anderen Zugriffen, bei denen alle benutzten GTVs jeweils einen einheitlichen TPH bekommen müssen
|
||||
* kann diese Klasse eingesetzt werden. Wichtig ist, dass hierfür jeweils eine frische Instanz benutzt wird.
|
||||
* @return
|
||||
* Creates a new GenericsResolver
|
||||
* The idea behind this data-structure is to assign a unique TPH to GTVs.
|
||||
* On method calls or other accesses where all used GTVs must get a unique TPH, this class can be used.
|
||||
* It is important to always use a fresh instance for that.
|
||||
*/
|
||||
private static GenericsResolver getResolverInstance(){
|
||||
private static GenericsResolver getResolverInstance() {
|
||||
return new GenericsResolverSameName();
|
||||
}
|
||||
|
||||
@@ -55,10 +59,73 @@ public class TYPEStmt implements StatementVisitor{
|
||||
return null;
|
||||
}
|
||||
|
||||
public static List<MethodAssumption> getMethods(String name, int numArgs, TypeInferenceBlockInformation info) {
|
||||
List<MethodAssumption> ret = new ArrayList<>();
|
||||
//TODO: apply Methoden wieder anfügen. Diese könnten möglicherweise auch in den Assumptions auftauchen (überdenken)
|
||||
if (name.equals("apply")) {
|
||||
List<GenericRefType> funNParams = new ArrayList<>();
|
||||
for (int i = 0; i < numArgs + 1; i++) {
|
||||
//funNParams.add(TypePlaceholder.fresh(new NullToken()));
|
||||
funNParams.add(new GenericRefType(NameGenerator.makeNewName(),
|
||||
new NullToken()));
|
||||
}
|
||||
funNParams.get(funNParams.size() - 1);
|
||||
ret.add(new MethodAssumption(new FunNClass(funNParams), funNParams.get(funNParams.size() - 1), funNParams.subList(0, funNParams.size() - 1),
|
||||
new TypeScope() {
|
||||
@Override
|
||||
public Iterable<? extends GenericTypeVar> getGenerics() {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric getReturnType() {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}, false));
|
||||
}
|
||||
for (ClassOrInterface cl : info.getAvailableClasses()) {
|
||||
for (Method m : cl.getMethods()) {
|
||||
if (m.getName().equals(name) &&
|
||||
m.getParameterList().getFormalparalist().size() == numArgs) {
|
||||
RefTypeOrTPHOrWildcardOrGeneric retType = m.getReturnType();//info.checkGTV(m.getReturnType());
|
||||
|
||||
ret.add(new MethodAssumption(cl, retType, convertParams(m.getParameterList(), info),
|
||||
createTypeScope(cl, m), m.isInherited));
|
||||
}
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
public static List<MethodAssumption> getMethods(String name, ArgumentList arglist, TypeInferenceBlockInformation info) {
|
||||
return getMethods(name, arglist.getArguments().size(), info);
|
||||
}
|
||||
|
||||
protected static List<RefTypeOrTPHOrWildcardOrGeneric> convertParams(ParameterList parameterList, TypeInferenceBlockInformation info) {
|
||||
//TODO: Hier müssen die Parameter mit den TPHs in den GEnerics des Receivers verknüpft werden
|
||||
/*
|
||||
Beispiel:
|
||||
auto test = new List<String>();
|
||||
test.add("hallo");
|
||||
|
||||
Hier kriegt der Receiver ja den COnstraint TPH REceiver <. List<TPH A>
|
||||
Dann mus bei dem Parameter der COnstraint entstehen: TPH A <. String
|
||||
*/
|
||||
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
|
||||
for (FormalParameter fp : parameterList.getFormalparalist()) {
|
||||
params.add(fp.getType()); //info.checkGTV(fp.getType())); //PL 2018-06-22 GTV sollen in Argumenten erhalten bleiben
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
||||
public ConstraintSet<Pair> getConstraints() {
|
||||
return constraintsSet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(ArgumentList arglist) {
|
||||
for(int i = 0;i<arglist.getArguments().size();i++){
|
||||
arglist.getArguments().get(i).accept(this);
|
||||
for (int i = 0; i < arglist.getArguments().size(); i++) {
|
||||
arglist.getArguments().get(i).accept(this);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,13 +136,12 @@ public class TYPEStmt implements StatementVisitor{
|
||||
lambdaParams.add(tphRetType);
|
||||
//lambdaParams.add(0,tphRetType);
|
||||
constraintsSet.addUndConstraint(
|
||||
new Pair(lambdaExpression.getType(),
|
||||
new RefType(new JavaClassName("Fun"+(lambdaParams.size()-1)+"$$"), lambdaParams, new NullToken()),
|
||||
//new FunN(lambdaParams),
|
||||
PairOperator.EQUALSDOT));
|
||||
new Pair(lambdaExpression.getType(),
|
||||
new RefType(new JavaClassName("Fun" + (lambdaParams.size() - 1) + "$$"), lambdaParams, new NullToken()),
|
||||
PairOperator.EQUALSDOT));
|
||||
constraintsSet.addUndConstraint(
|
||||
new Pair(lambdaExpression.getReturnType(),
|
||||
tphRetType,PairOperator.EQUALSDOT));
|
||||
new Pair(lambdaExpression.getReturnType(),
|
||||
tphRetType, PairOperator.EQUALSDOT));
|
||||
|
||||
//Constraints des Bodys generieren:
|
||||
TYPEStmt lambdaScope = new TYPEStmt(new TypeInferenceBlockInformation(info, lambdaExpression));
|
||||
@@ -88,12 +154,12 @@ public class TYPEStmt implements StatementVisitor{
|
||||
assign.lefSide.accept(this);
|
||||
assign.rightSide.accept(this);
|
||||
constraintsSet.addUndConstraint(new Pair(
|
||||
assign.rightSide.getType(), assign.lefSide.getType(), PairOperator.SMALLERDOT));
|
||||
assign.rightSide.getType(), assign.lefSide.getType(), PairOperator.SMALLERDOT));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(Block block) {
|
||||
for(Statement stmt : block.getStatements()){
|
||||
for (Statement stmt : block.getStatements()) {
|
||||
stmt.accept(this);
|
||||
}
|
||||
}
|
||||
@@ -111,17 +177,17 @@ public class TYPEStmt implements StatementVisitor{
|
||||
@Override
|
||||
public void visit(FieldVar fieldVar) {
|
||||
fieldVar.receiver.accept(this);
|
||||
Set<Constraint> oderConstraints = new HashSet<>();
|
||||
for(FieldAssumption fieldAssumption : info.getFields(fieldVar.fieldVarName)){
|
||||
Constraint constraint = new Constraint();
|
||||
Set<Constraint<Pair>> oderConstraints = new HashSet<>();
|
||||
for (FieldAssumption fieldAssumption : info.getFields(fieldVar.fieldVarName)) {
|
||||
Constraint<Pair> constraint = new Constraint<>();
|
||||
GenericsResolver resolver = getResolverInstance();
|
||||
constraint.add(new Pair(fieldVar.receiver.getType(), fieldAssumption.getReceiverType(resolver), PairOperator.SMALLERDOT)); //PL 2019-12-09: SMALLERDOT eingefuegt, EQUALSDOT entfernt, wenn ds Field privat ist muesste es EQUALSDOT lauten
|
||||
constraint.add(new Pair(
|
||||
fieldVar.getType(), fieldAssumption.getType(resolver), PairOperator.EQUALSDOT));
|
||||
fieldVar.getType(), fieldAssumption.getType(resolver), PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
}
|
||||
if(oderConstraints.size() == 0)
|
||||
throw new TypeinferenceException("Kein Feld "+fieldVar.fieldVarName+ " gefunden", fieldVar.getOffset());
|
||||
if (oderConstraints.isEmpty())
|
||||
throw new TypeinferenceException("Kein Feld " + fieldVar.fieldVarName + " gefunden", fieldVar.getOffset());
|
||||
constraintsSet.addOderConstraint(oderConstraints);
|
||||
}
|
||||
|
||||
@@ -141,7 +207,7 @@ public class TYPEStmt implements StatementVisitor{
|
||||
ifStmt.then_block.accept(this);
|
||||
//Beide Blöcke müssen den gleichen Supertyp haben, welcher den Rückgabetyp des If-Stmts darstellt
|
||||
constraintsSet.addUndConstraint(new Pair(ifStmt.else_block.getType(), ifStmt.getType(), PairOperator.SMALLERDOT));
|
||||
if(ifStmt.else_block != null){
|
||||
if (ifStmt.else_block != null) {
|
||||
ifStmt.else_block.accept(this);
|
||||
constraintsSet.addUndConstraint(new Pair(ifStmt.else_block.getType(), ifStmt.getType(), PairOperator.SMALLERDOT));
|
||||
}
|
||||
@@ -155,30 +221,30 @@ public class TYPEStmt implements StatementVisitor{
|
||||
|
||||
@Override
|
||||
public void visit(LocalVar localVar) {
|
||||
// Es werden nur bei Feldvariablen Constraints generiert. Lokale Variablen sind eindeutig
|
||||
// Constraints are only generated for field variables. Local variables are unambiguous
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(LocalVarDecl localVarDecl) {
|
||||
//Hier ist nichts zu tun. Allen lokalen Variablen bekommen beim parsen schon den korrekten Typ
|
||||
// Nothing to do here. All local variables get their correct type already while being parsed
|
||||
}
|
||||
|
||||
@Override
|
||||
//Es wird in OderConstraints davon ausgegangen dass die Bedingungen für die Typen der Argumente links stehen
|
||||
//und die Typen der Rückgabewerte immer rechts stehen (vgl. JavaTXCompiler)
|
||||
// In oder-constraints it is assumed, that the conditions for the argument types are placed left
|
||||
// and the types of the return values are placed right (see JavaTXCompiler)
|
||||
public void visit(MethodCall methodCall) {
|
||||
|
||||
|
||||
methodCall.receiver.accept(this);
|
||||
//Overloading:
|
||||
Set<Constraint<Pair>> methodConstraints = new HashSet<>();
|
||||
for(MethodAssumption m : this.getMethods(methodCall.name, methodCall.arglist, info)){
|
||||
for (MethodAssumption m : getMethods(methodCall.name, methodCall.arglist, info)) {
|
||||
GenericsResolver resolver = getResolverInstance();
|
||||
Set<Constraint<Pair>> oneMethodConstraints = generateConstraint(methodCall, m, info, resolver);
|
||||
methodConstraints.addAll(oneMethodConstraints);
|
||||
|
||||
|
||||
/* pl 2023-01-20: in generateConstraint bereits umgesetzt
|
||||
Constraint<Pair> extendsOneMethodConstraint = oneMethodConstraint.stream()
|
||||
.map(x -> (x.TA1 instanceof TypePlaceholder &&
|
||||
.map(x -> (x.TA1 instanceof TypePlaceholder &&
|
||||
x.GetOperator() == PairOperator.EQUALSDOT &&
|
||||
!(x.TA2 instanceof TypePlaceholder))
|
||||
? new Pair(x.TA1, new ExtendsWildcardType(x.TA2, x.TA2.getOffset()), PairOperator.EQUALSDOT)
|
||||
@@ -189,8 +255,8 @@ public class TYPEStmt implements StatementVisitor{
|
||||
methodConstraints.add(extendsOneMethodConstraint);
|
||||
*/
|
||||
}
|
||||
if(methodConstraints.size()<1){
|
||||
throw new TypeinferenceException("Methode "+methodCall.name+" ist nicht vorhanden!",methodCall.getOffset());
|
||||
if (methodConstraints.isEmpty()) {
|
||||
throw new TypeinferenceException("Methode " + methodCall.name + " ist nicht vorhanden!", methodCall.getOffset());
|
||||
}
|
||||
constraintsSet.addOderConstraint(methodConstraints);
|
||||
}
|
||||
@@ -198,12 +264,12 @@ public class TYPEStmt implements StatementVisitor{
|
||||
@Override
|
||||
public void visit(NewClass methodCall) {
|
||||
//Overloading:
|
||||
Set<Constraint> methodConstraints = new HashSet<>();
|
||||
for(MethodAssumption m : this.getConstructors(info, (RefType) methodCall.getType(), methodCall.getArgumentList())){
|
||||
Set<Constraint<Pair>> methodConstraints = new HashSet<>();
|
||||
for (MethodAssumption m : this.getConstructors(info, (RefType) methodCall.getType(), methodCall.getArgumentList())) {
|
||||
methodConstraints.add(generateConstructorConstraint(methodCall, m, info, getResolverInstance()));
|
||||
}
|
||||
if(methodConstraints.size()<1){
|
||||
throw new TypeinferenceException("Konstruktor in Klasse "+methodCall.getType().toString()+" ist nicht vorhanden!",methodCall.getOffset());
|
||||
if (methodConstraints.isEmpty()) {
|
||||
throw new TypeinferenceException("Konstruktor in Klasse " + methodCall.getType().toString() + " ist nicht vorhanden!", methodCall.getOffset());
|
||||
}
|
||||
constraintsSet.addOderConstraint(methodConstraints);
|
||||
}
|
||||
@@ -218,132 +284,122 @@ public class TYPEStmt implements StatementVisitor{
|
||||
receiver.expr.accept(this);
|
||||
}
|
||||
|
||||
private final RefType number = new RefType(ASTFactory.createClass(Number.class).getClassName(), new NullToken());
|
||||
private final RefType longg = new RefType(ASTFactory.createClass(Long.class).getClassName(), new NullToken());
|
||||
private final RefType integer = new RefType(ASTFactory.createClass(Integer.class).getClassName(), new NullToken());
|
||||
private final RefType shortt = new RefType(ASTFactory.createClass(Short.class).getClassName(), new NullToken());
|
||||
private final RefType bytee = new RefType(ASTFactory.createClass(Byte.class).getClassName(), new NullToken());
|
||||
private final RefType floatt = new RefType(ASTFactory.createClass(Float.class).getClassName(), new NullToken());
|
||||
private final RefType doublee = new RefType(ASTFactory.createClass(Double.class).getClassName(), new NullToken());
|
||||
private final RefType string = new RefType(ASTFactory.createClass(String.class).getClassName(), new NullToken());
|
||||
private final RefType bool = new RefType(ASTFactory.createClass(Boolean.class).getClassName(), new NullToken());
|
||||
@Override
|
||||
public void visit(UnaryExpr unaryExpr) {
|
||||
if(unaryExpr.operation == UnaryExpr.Operation.POSTDECREMENT ||
|
||||
unaryExpr.operation == UnaryExpr.Operation.POSTINCREMENT ||
|
||||
unaryExpr.operation == UnaryExpr.Operation.PREDECREMENT ||
|
||||
unaryExpr.operation == UnaryExpr.Operation.PREINCREMENT){
|
||||
if (unaryExpr.operation == UnaryExpr.Operation.POSTDECREMENT ||
|
||||
unaryExpr.operation == UnaryExpr.Operation.POSTINCREMENT ||
|
||||
unaryExpr.operation == UnaryExpr.Operation.PREDECREMENT ||
|
||||
unaryExpr.operation == UnaryExpr.Operation.PREINCREMENT) {
|
||||
//@see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.14.2
|
||||
//Expression muss zu Numeric Convertierbar sein. also von Numeric erben
|
||||
// Expression must be convertable to Numeric and also inherit from Numeric
|
||||
constraintsSet.addUndConstraint(new Pair(unaryExpr.expr.getType(), number, PairOperator.SMALLERNEQDOT));
|
||||
//The type of the postfix increment expression is the type of the variable
|
||||
// The type of the postfix increment expression is the type of the variable
|
||||
constraintsSet.addUndConstraint(new Pair(unaryExpr.expr.getType(), unaryExpr.getType(), PairOperator.EQUALSDOT));
|
||||
}else{
|
||||
} else {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
//Es wird in OderConstraints davon ausgegangen dass die Bedingungen für die Typen der Argumente links stehen
|
||||
//und die Typen der Rückgabewerte immer rechts stehen (vgl. JavaTXCompiler)
|
||||
// In oder-constraints it is assumed, that the conditions for the argument types are placed left
|
||||
// and the types of the return values are placed right (see JavaTXCompiler)
|
||||
public void visit(BinaryExpr binary) {
|
||||
binary.lexpr.accept(this);
|
||||
binary.rexpr.accept(this);
|
||||
if(binary.operation.equals(BinaryExpr.Operator.DIV) ||
|
||||
binary.operation.equals(BinaryExpr.Operator.MUL)||
|
||||
binary.operation.equals(BinaryExpr.Operator.MOD)||
|
||||
binary.operation.equals(BinaryExpr.Operator.ADD)||
|
||||
binary.operation.equals(BinaryExpr.Operator.SUB)) {
|
||||
binary.lexpr.accept(this);
|
||||
binary.rexpr.accept(this);
|
||||
if (binary.operation.equals(BinaryExpr.Operator.DIV) ||
|
||||
binary.operation.equals(BinaryExpr.Operator.MUL) ||
|
||||
binary.operation.equals(BinaryExpr.Operator.MOD) ||
|
||||
binary.operation.equals(BinaryExpr.Operator.ADD) ||
|
||||
binary.operation.equals(BinaryExpr.Operator.SUB)) {
|
||||
Set<Constraint<Pair>> numericAdditionOrStringConcatenation = new HashSet<>();
|
||||
|
||||
// TODO PL 2018-11-06
|
||||
|
||||
// TODO PL 2018-11-06
|
||||
|
||||
// Auf importierte Typen einschraenken
|
||||
// pruefen, ob Typen richtig bestimmt werden.
|
||||
|
||||
|
||||
|
||||
|
||||
//Zuerst der Fall für Numerische AusdrücPairOpnumericeratorke, das sind Mul, Mod und Div immer:
|
||||
//see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17
|
||||
//Expression muss zu Numeric Convertierbar sein. also von Numeric erben
|
||||
Constraint<Pair> numeric;
|
||||
//PL eingefuegt 2018-07-17
|
||||
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(bytee.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), bytee, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), bytee, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.getType(), integer, PairOperator.EQUALSDOT));
|
||||
if (info.getAvailableClasses().stream().map(ClassOrInterface::getClassName).collect(Collectors.toCollection(HashSet::new)).contains(bytee.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), bytee, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), bytee, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.getType(), integer, PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
}
|
||||
//PL eingefuegt 2018-07-17
|
||||
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(shortt.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), shortt, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), shortt, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.getType(), integer, PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
if (info.getAvailableClasses().stream().map(ClassOrInterface::getClassName).collect(Collectors.toCollection(HashSet::new)).contains(shortt.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), shortt, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), shortt, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.getType(), integer, PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
}
|
||||
//PL eingefuegt 2018-07-17
|
||||
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(integer.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), integer, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), integer, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(integer, binary.getType(), PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
if (info.getAvailableClasses().stream().map(ClassOrInterface::getClassName).collect(Collectors.toCollection(HashSet::new)).contains(integer.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), integer, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), integer, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(integer, binary.getType(), PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
}
|
||||
//PL eingefuegt 2018-07-17
|
||||
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(longg.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), longg, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), longg, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(longg, binary.getType(), PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
if (info.getAvailableClasses().stream().map(ClassOrInterface::getClassName).collect(Collectors.toCollection(HashSet::new)).contains(longg.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), longg, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), longg, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(longg, binary.getType(), PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
}
|
||||
//PL eingefuegt 2018-07-17
|
||||
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(floatt.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), floatt, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), floatt, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(floatt, binary.getType(), PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
if (info.getAvailableClasses().stream().map(ClassOrInterface::getClassName).collect(Collectors.toCollection(HashSet::new)).contains(floatt.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), floatt, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), floatt, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(floatt, binary.getType(), PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
}
|
||||
//PL eingefuegt 2018-07-17
|
||||
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(doublee.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), doublee, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), doublee, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(doublee, binary.getType(), PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
if (info.getAvailableClasses().stream().map(ClassOrInterface::getClassName).collect(Collectors.toCollection(HashSet::new)).contains(doublee.getName())) {
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.lexpr.getType(), doublee, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.rexpr.getType(), doublee, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(doublee, binary.getType(), PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
}
|
||||
/* PL auskommentiert Anfang 2018-07-17
|
||||
/*
|
||||
In Java passiert bei den binären Operatoren eine sogenannte Type Promotion:
|
||||
https://docs.oracle.com/javase/specs/jls/se7/html/jls-5.html#jls-5.6.2
|
||||
Das bedeutet, dass Java die Typen je nach belieben castet, so lange sie nur von Number erben
|
||||
|
||||
|
||||
numeric = new Constraint<>();
|
||||
numeric.add(new Pair(binary.getType(), number, PairOperator.SMALLERDOT));
|
||||
numericAdditionOrStringConcatenation.add(numeric);
|
||||
* PL auskommentiert Ende 2018-07-17 */
|
||||
|
||||
if(binary.operation.equals(BinaryExpr.Operator.ADD)) {
|
||||
|
||||
if (binary.operation.equals(BinaryExpr.Operator.ADD)) {
|
||||
//Dann kann der Ausdruck auch das aneinanderfügen zweier Strings sein: ("a" + "b") oder (1 + 2)
|
||||
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(string.getName())) {
|
||||
Constraint<Pair> stringConcat = new Constraint<>();
|
||||
stringConcat.add(new Pair(binary.lexpr.getType(), string, PairOperator.EQUALSDOT));
|
||||
stringConcat.add(new Pair(binary.rexpr.getType(), string, PairOperator.EQUALSDOT));
|
||||
stringConcat.add(new Pair(string, binary.getType(), PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(stringConcat);
|
||||
}
|
||||
if (info.getAvailableClasses().stream().map(ClassOrInterface::getClassName).collect(Collectors.toCollection(HashSet::new)).contains(string.getName())) {
|
||||
Constraint<Pair> stringConcat = new Constraint<>();
|
||||
stringConcat.add(new Pair(binary.lexpr.getType(), string, PairOperator.EQUALSDOT));
|
||||
stringConcat.add(new Pair(binary.rexpr.getType(), string, PairOperator.EQUALSDOT));
|
||||
stringConcat.add(new Pair(string, binary.getType(), PairOperator.EQUALSDOT));
|
||||
numericAdditionOrStringConcatenation.add(stringConcat);
|
||||
}
|
||||
}
|
||||
if(numericAdditionOrStringConcatenation.size()<1){
|
||||
throw new TypeinferenceException("Kein Typ für " + binary.operation.toString() + " vorhanden", binary.getOffset());
|
||||
if (numericAdditionOrStringConcatenation.isEmpty()) {
|
||||
throw new TypeinferenceException("Kein Typ für " + binary.operation + " vorhanden", binary.getOffset());
|
||||
}
|
||||
constraintsSet.addOderConstraint(numericAdditionOrStringConcatenation);
|
||||
}else if(binary.operation.equals(BinaryExpr.Operator.LESSEQUAL) ||
|
||||
binary.operation.equals(BinaryExpr.Operator.BIGGEREQUAL) ||
|
||||
binary.operation.equals(BinaryExpr.Operator.BIGGERTHAN) ||
|
||||
binary.operation.equals(BinaryExpr.Operator.LESSTHAN)) {
|
||||
} else if (binary.operation.equals(BinaryExpr.Operator.LESSEQUAL) ||
|
||||
binary.operation.equals(BinaryExpr.Operator.BIGGEREQUAL) ||
|
||||
binary.operation.equals(BinaryExpr.Operator.BIGGERTHAN) ||
|
||||
binary.operation.equals(BinaryExpr.Operator.LESSTHAN)) {
|
||||
/* //eingefuegt PL 2018-05-24
|
||||
Set<Constraint<Pair>> numericRelationConcatenation = new HashSet<>();
|
||||
Constraint<Pair> numeric = new Constraint<>();
|
||||
@@ -376,13 +432,13 @@ public class TYPEStmt implements StatementVisitor{
|
||||
numeric.add(new Pair(binary.rexpr.getType(), doublee, PairOperator.SMALLERDOT));
|
||||
numeric.add(new Pair(binary.getType(), bool, PairOperator.SMALLERDOT));
|
||||
numericRelationConcatenation.add(numeric);
|
||||
|
||||
|
||||
//***ACHTUNG: Moeglicherweise oder und und-Contraint falsch
|
||||
constraintsSet.addOderConstraint(numericRelationConcatenation);
|
||||
//***ACHTUNG: Moeglicherweise oder und und-Contraint falsch
|
||||
*/
|
||||
//Testeise eingefuegt PL 2018-05-24
|
||||
//Hier sollte evtl. noch importe angefragt werden PL 2019-05-07
|
||||
//Hier sollte evtl. noch importe angefragt werden PL 2019-05-07
|
||||
constraintsSet.addUndConstraint(new Pair(binary.lexpr.getType(), number, PairOperator.SMALLERNEQDOT));
|
||||
constraintsSet.addUndConstraint(new Pair(binary.rexpr.getType(), number, PairOperator.SMALLERNEQDOT));
|
||||
//Rückgabetyp ist Boolean
|
||||
@@ -393,109 +449,103 @@ public class TYPEStmt implements StatementVisitor{
|
||||
//constraintsSet.addUndConstraint(new Pair(binary.rexpr.getType(), number, PairOperator.SMALLERDOT));
|
||||
//Rückgabetyp ist Boolean
|
||||
//constraintsSet.addUndConstraint(new Pair(bool, binary.getType(), PairOperator.EQUALSDOT));
|
||||
}else if(binary.operation.equals(BinaryExpr.Operator.EQUAL) || binary.operation.equals(BinaryExpr.Operator.NOTEQUAL)){
|
||||
} else if (binary.operation.equals(BinaryExpr.Operator.EQUAL) || binary.operation.equals(BinaryExpr.Operator.NOTEQUAL)) {
|
||||
/*Auszug aus https://docs.oracle.com/javase/specs/jls/se9/html/jls-15.html#jls-15.21
|
||||
The equality operators may be used to compare two operands that are convertible (§5.1.8) to numeric type, or two operands of type boolean or Boolean, or two operands that are each of either reference type or the null type. All other cases result in a compile-time error.
|
||||
*/
|
||||
//Der Equals Operator geht mit fast allen Typen, daher werden hier keine Constraints gesetzt
|
||||
constraintsSet.addUndConstraint(new Pair(bool, binary.getType(), PairOperator.EQUALSDOT));
|
||||
}else{
|
||||
throw new NotImplementedException();
|
||||
} else {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(Literal literal) {
|
||||
//Nothing to do here. Literale erzeugen keine Constraints
|
||||
//PL 2018-06-23 Sie haben einen Typ. Der muesste hier eingefuegt werden
|
||||
//wie hier fuer double gezeigt. Im Momment auskommentiert, weil zu wenige Literaltypen
|
||||
//funktionieren
|
||||
if (literal.value instanceof Short) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), shortt, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Byte) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), bytee, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Float) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), floatt, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Double) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), doublee, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Long) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), longg, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Integer) {
|
||||
//constraintsSet.addUndConstraint(new Pair(literal.getType(),integer, PairOperator.EQUALSDOT));
|
||||
// /*
|
||||
HashSet<JavaClassName> clNames = info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new));
|
||||
Set<Constraint> oderConstraints = new HashSet<>();
|
||||
Constraint constraint = new Constraint();
|
||||
constraint.add(new Pair(literal.getType(), integer, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
if (clNames.stream().filter(x -> x.toString().equals("java.lang.Double")).findAny().isPresent()) {
|
||||
constraint = new Constraint();
|
||||
constraint.add(new Pair(literal.getType(), doublee, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
}
|
||||
if (clNames.stream().filter(x -> x.toString().equals("java.lang.Long")).findAny().isPresent()) {
|
||||
constraint = new Constraint();
|
||||
constraint.add(new Pair(literal.getType(), longg, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
}
|
||||
if (clNames.stream().filter(x -> x.toString().equals("java.lang.Float")).findAny().isPresent()) {
|
||||
constraint = new Constraint();
|
||||
constraint.add(new Pair(literal.getType(), floatt, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
}
|
||||
if (clNames.stream().filter(x -> x.toString().equals("java.lang.Short")).findAny().isPresent()) {
|
||||
constraint = new Constraint();
|
||||
constraint.add(new Pair(literal.getType(), shortt, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
}
|
||||
if (clNames.stream().filter(x -> x.toString().equals("java.lang.Byte")).findAny().isPresent()) {
|
||||
constraint = new Constraint();
|
||||
constraint.add(new Pair(literal.getType(), bytee, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
}
|
||||
constraintsSet.addOderConstraint(oderConstraints);
|
||||
// */
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Short) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(),shortt, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Byte) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(),bytee, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Float) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(),floatt, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof String) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(),string, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Boolean) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(),bool, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
else {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
//PL 2018-06-23 Literale haben einen Typ. Der muesste hier eingefuegt werden
|
||||
//wie hier fuer double gezeigt. Im Moment auskommentiert, weil zu wenige Literaltypen
|
||||
//funktionieren
|
||||
if (literal.value instanceof Short) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), shortt, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Byte) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), bytee, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Float) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), floatt, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Double) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), doublee, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Long) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), longg, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Integer) {
|
||||
HashSet<JavaClassName> clNames = info.getAvailableClasses().stream().map(ClassOrInterface::getClassName).collect(Collectors.toCollection(HashSet::new));
|
||||
Set<Constraint<Pair>> oderConstraints = new HashSet<>();
|
||||
Constraint<Pair> constraint = new Constraint<>();
|
||||
constraint.add(new Pair(literal.getType(), integer, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
if (clNames.stream().anyMatch(x -> x.toString().equals("java.lang.Double"))) {
|
||||
constraint = new Constraint<>();
|
||||
constraint.add(new Pair(literal.getType(), doublee, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
}
|
||||
if (clNames.stream().anyMatch(x -> x.toString().equals("java.lang.Long"))) {
|
||||
constraint = new Constraint<>();
|
||||
constraint.add(new Pair(literal.getType(), longg, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
}
|
||||
if (clNames.stream().anyMatch(x -> x.toString().equals("java.lang.Float"))) {
|
||||
constraint = new Constraint<>();
|
||||
constraint.add(new Pair(literal.getType(), floatt, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
}
|
||||
if (clNames.stream().anyMatch(x -> x.toString().equals("java.lang.Short"))) {
|
||||
constraint = new Constraint<>();
|
||||
constraint.add(new Pair(literal.getType(), shortt, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
}
|
||||
if (clNames.stream().anyMatch(x -> x.toString().equals("java.lang.Byte"))) {
|
||||
constraint = new Constraint<>();
|
||||
constraint.add(new Pair(literal.getType(), bytee, PairOperator.EQUALSDOT));
|
||||
oderConstraints.add(constraint);
|
||||
}
|
||||
constraintsSet.addOderConstraint(oderConstraints);
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Short) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), shortt, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Byte) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), bytee, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Float) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), floatt, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof String) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), string, PairOperator.EQUALSDOT));
|
||||
return;
|
||||
}
|
||||
if (literal.value instanceof Boolean) {
|
||||
constraintsSet.addUndConstraint(new Pair(literal.getType(), bool, PairOperator.EQUALSDOT));
|
||||
} else {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(Return returnExpr) {
|
||||
returnExpr.retexpr.accept(this);
|
||||
constraintsSet.addUndConstraint(new Pair(returnExpr.getType(),info.getCurrentTypeScope().getReturnType(), PairOperator.EQUALSDOT));
|
||||
constraintsSet.addUndConstraint(new Pair(returnExpr.getType(), info.getCurrentTypeScope().getReturnType(), PairOperator.EQUALSDOT));
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -518,36 +568,22 @@ public class TYPEStmt implements StatementVisitor{
|
||||
//Im Falle von this, müssen die Generics in der Klasse als RefTypes behandelt werden.
|
||||
ClassOrInterface currentClass = info.getCurrentClass();
|
||||
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
|
||||
for(GenericTypeVar gtv : currentClass.getGenerics()){
|
||||
for (GenericTypeVar gtv : currentClass.getGenerics()) {
|
||||
params.add(new GenericRefType(gtv.getName(), aThis.getOffset()));
|
||||
}
|
||||
RefType thisType = new RefType(currentClass.getClassName(), params, aThis.getOffset());
|
||||
constraintsSet.addUndConstraint(new Pair(
|
||||
aThis.getType(), thisType, PairOperator.EQUALSDOT));
|
||||
}
|
||||
|
||||
private static TypeScope createNullTypeScope() {
|
||||
return new TypeScope() {
|
||||
@Override
|
||||
public Iterable<? extends GenericTypeVar> getGenerics() {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric getReturnType() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
aThis.getType(), thisType, PairOperator.EQUALSDOT));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(WhileStmt whileStmt) {
|
||||
RefType booleanType = new RefType(ASTFactory.createClass(java.lang.Boolean.class).getClassName(), new NullToken());
|
||||
//Expression inferieren:
|
||||
// Infer expression:
|
||||
whileStmt.expr.accept(this);
|
||||
//Expression muss boolean sein:
|
||||
// Expression must be boolean:
|
||||
constraintsSet.addUndConstraint(new Pair(whileStmt.expr.getType(), booleanType, PairOperator.EQUALSDOT));
|
||||
//LoopBlock inferieren:
|
||||
// Infer loopBlock:
|
||||
whileStmt.loopBlock.accept(this);
|
||||
}
|
||||
|
||||
@@ -556,11 +592,15 @@ public class TYPEStmt implements StatementVisitor{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
/*
|
||||
METHOD CALL Section:
|
||||
*/
|
||||
|
||||
@Override
|
||||
public void visit(AssignToField assignLeftSide) {
|
||||
//Hier ist kein Code nötig. Es werden keine extra Constraints generiert
|
||||
//HIER muss Code rein PL 2018-10-24
|
||||
assignLeftSide.field.accept(this);
|
||||
//HIER muss Code rein PL 2018-10-24
|
||||
assignLeftSide.field.accept(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -573,63 +613,48 @@ public class TYPEStmt implements StatementVisitor{
|
||||
//TODO: Für einen super-Call werden keine Constraints erzeugt bisher
|
||||
}
|
||||
|
||||
/*
|
||||
METHOD CALL Section:
|
||||
*/
|
||||
|
||||
protected Set<Constraint<Pair>> generateConstraint(MethodCall forMethod, MethodAssumption assumption,
|
||||
TypeInferenceBlockInformation info, GenericsResolver resolver){
|
||||
TypeInferenceBlockInformation info, GenericsResolver resolver) {
|
||||
Constraint<Pair> methodConstraint, extendsMethodConstraint;
|
||||
methodConstraint = new Constraint<>(assumption.isInherited());
|
||||
extendsMethodConstraint = new Constraint<>(assumption.isInherited());// PL 2023-01-24: Ersetzt die Dopplung in visit(MethodCall)
|
||||
|
||||
ClassOrInterface receiverCl = assumption.getReceiver();
|
||||
/*
|
||||
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
|
||||
for(GenericTypeVar gtv : receiverCl.getGenerics()){
|
||||
//Die Generics werden alle zu TPHs umgewandelt.
|
||||
params.add(resolver.resolve(gtv.getName()));
|
||||
}
|
||||
|
||||
RefTypeOrTPHOrWildcardOrGeneric receiverType = new RefType(assumption.getReceiver().getClassName(), params, forMethod.getOffset());
|
||||
*/
|
||||
|
||||
RefTypeOrTPHOrWildcardOrGeneric receiverType = assumption.getReceiverType(resolver);
|
||||
methodConstraint.add(new Pair(forMethod.receiver.getType(), receiverType, PairOperator.EQUALSDOT));//PL 2020-03-17 SMALLERDOT in EQUALSDOT umgewandelt, weil alle geerbten Methoden in den jeweilen Klassen enthalten sind.
|
||||
|
||||
//PL 2023-01-24: dafuer ? extends receiverType noch ergaenzt
|
||||
methodConstraint.add(new Pair(forMethod.receiver.getType(), receiverType, PairOperator.EQUALSDOT));//PL 2020-03-17 SMALLERDOT in EQUALSDOT umgewandelt, weil alle geerbten Methoden in den jeweilen Klassen enthalten sind.
|
||||
|
||||
//PL 2023-01-24: dafuer ? extends receiverType noch ergaenzt
|
||||
extendsMethodConstraint.add(new Pair(forMethod.receiver.getType(), new ExtendsWildcardType(receiverType, receiverType.getOffset()), PairOperator.EQUALSDOT));
|
||||
|
||||
|
||||
//gegenseite Verschraenkung der beiden Mengen von Typannahmen
|
||||
methodConstraint.setExtendConstraint(extendsMethodConstraint);
|
||||
extendsMethodConstraint.setExtendConstraint(methodConstraint);
|
||||
|
||||
|
||||
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ANFANG
|
||||
//methodConstraint.add(new Pair(forMethod.receiverType, retType,
|
||||
// PairOperator.EQUALSDOT));
|
||||
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ENDE
|
||||
|
||||
|
||||
|
||||
|
||||
methodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));
|
||||
extendsMethodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));
|
||||
|
||||
|
||||
//methodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));
|
||||
//extendsMethodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));
|
||||
|
||||
((TypePlaceholder)forMethod.getType()).setOrCons((byte)-1);//fuer Maximums-Bestimmung
|
||||
|
||||
|
||||
((TypePlaceholder) forMethod.getType()).setOrCons((byte) -1);//fuer Maximums-Bestimmung
|
||||
|
||||
Set<Pair> parameterContraints = generateParameterConstraints(forMethod, assumption, info, resolver);
|
||||
|
||||
|
||||
methodConstraint.addAll(parameterContraints);
|
||||
extendsMethodConstraint.addAll(parameterContraints);
|
||||
|
||||
|
||||
Set<Pair> methodSignatureConstraint = generatemethodSignatureConstraint(forMethod, assumption, info, resolver);
|
||||
|
||||
|
||||
System.out.println("methodSignatureConstraint; " + methodSignatureConstraint);
|
||||
|
||||
|
||||
methodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
|
||||
extendsMethodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
|
||||
|
||||
|
||||
Set<Constraint<Pair>> ret = new HashSet<>();
|
||||
ret.add(methodConstraint);
|
||||
ret.add(extendsMethodConstraint);
|
||||
@@ -639,107 +664,46 @@ public class TYPEStmt implements StatementVisitor{
|
||||
protected Set<Pair> generateParameterConstraints(MethodCall foMethod, MethodAssumption assumption,
|
||||
TypeInferenceBlockInformation info, GenericsResolver resolver) {
|
||||
Set<Pair> ret = new HashSet<>();
|
||||
for(int i = 0;i<foMethod.arglist.getArguments().size();i++){
|
||||
for (int i = 0; i < foMethod.arglist.getArguments().size(); i++) {
|
||||
foMethod.arglist.getArguments().get(i).accept(this);
|
||||
RefTypeOrTPHOrWildcardOrGeneric argType = foMethod.arglist.getArguments().get(i).getType();
|
||||
RefTypeOrTPHOrWildcardOrGeneric assType = assumption.getArgTypes(resolver).get(i);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
ret.add(new Pair(argType, assType, PairOperator.SMALLERDOT));
|
||||
|
||||
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ANFANG
|
||||
// ret.add(new Pair(foMethod.argTypes.get(i), assType, PairOperator.EQUALSDOT));
|
||||
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ENDE
|
||||
|
||||
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ANFANG
|
||||
// ret.add(new Pair(foMethod.argTypes.get(i), assType, PairOperator.EQUALSDOT));
|
||||
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ENDE
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
|
||||
protected Set<Pair> generatemethodSignatureConstraint(MethodCall foMethod, MethodAssumption assumption,
|
||||
TypeInferenceBlockInformation info, GenericsResolver resolver) {
|
||||
Set<Pair> ret = new HashSet<>();
|
||||
|
||||
for(int i = 0; i<foMethod.arglist.getArguments().size(); i++){
|
||||
|
||||
//Zuordnung von MethoCall.signature (Argumenttypen) zu der Argumenttypen der ausgewaehlten Methode (assumption.params)
|
||||
ret.add(new Pair(foMethod.signature.get(i), assumption.getArgTypes().get(i), PairOperator.EQUALSDOT));
|
||||
|
||||
}
|
||||
|
||||
//Zuordnung von MethodCall.signature(ReturnType) zu dem ReturnType der ausgewaehlten Methode (assumption.returnType)
|
||||
TypeInferenceBlockInformation info, GenericsResolver resolver) {
|
||||
Set<Pair> ret = new HashSet<>();
|
||||
|
||||
for (int i = 0; i < foMethod.arglist.getArguments().size(); i++) {
|
||||
|
||||
//Zuordnung von MethoCall.signature (Argumenttypen) zu der Argumenttypen der ausgewaehlten Methode (assumption.params)
|
||||
ret.add(new Pair(foMethod.signature.get(i), assumption.getArgTypes().get(i), PairOperator.EQUALSDOT));
|
||||
|
||||
}
|
||||
|
||||
//Zuordnung von MethodCall.signature(ReturnType) zu dem ReturnType der ausgewaehlten Methode (assumption.returnType)
|
||||
System.out.println(foMethod.name);
|
||||
ret.add(new Pair(foMethod.signature.get(foMethod.signature.size()-1), assumption.getReturnType(), PairOperator.EQUALSDOT));
|
||||
return ret;
|
||||
}
|
||||
|
||||
public static List<MethodAssumption> getMethods(String name, int numArgs, TypeInferenceBlockInformation info) {
|
||||
List<MethodAssumption> ret = new ArrayList<>();
|
||||
//TODO: apply Methoden wieder anfügen. Diese könnten möglicherweise auch in den Assumptions auftauchen (überdenken)
|
||||
if(name.equals("apply")){
|
||||
List<GenericRefType> funNParams = new ArrayList<>();
|
||||
for(int i = 0; i< numArgs + 1 ; i++){
|
||||
//funNParams.add(TypePlaceholder.fresh(new NullToken()));
|
||||
funNParams.add(new GenericRefType(NameGenerator.makeNewName(),
|
||||
new NullToken()));
|
||||
}
|
||||
funNParams.get(funNParams.size()-1);
|
||||
ret.add(new MethodAssumption(new FunNClass(funNParams), funNParams.get(funNParams.size()-1), funNParams.subList(0, funNParams.size()-1),
|
||||
new TypeScope() {
|
||||
@Override
|
||||
public Iterable<? extends GenericTypeVar> getGenerics() {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public RefTypeOrTPHOrWildcardOrGeneric getReturnType() {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}, false));
|
||||
}
|
||||
for(ClassOrInterface cl : info.getAvailableClasses()){
|
||||
for(Method m : cl.getMethods()){
|
||||
if(m.getName().equals(name) &&
|
||||
m.getParameterList().getFormalparalist().size() == numArgs){
|
||||
RefTypeOrTPHOrWildcardOrGeneric retType = m.getReturnType();//info.checkGTV(m.getReturnType());
|
||||
|
||||
ret.add(new MethodAssumption(cl, retType, convertParams(m.getParameterList(),info),
|
||||
createTypeScope(cl, m), m.isInherited));
|
||||
}
|
||||
}
|
||||
}
|
||||
ret.add(new Pair(foMethod.signature.get(foMethod.signature.size() - 1), assumption.getReturnType(), PairOperator.EQUALSDOT));
|
||||
return ret;
|
||||
}
|
||||
|
||||
public static List<MethodAssumption> getMethods(String name, ArgumentList arglist, TypeInferenceBlockInformation info) {
|
||||
return getMethods(name, arglist.getArguments().size(), info);
|
||||
}
|
||||
|
||||
protected static List<RefTypeOrTPHOrWildcardOrGeneric> convertParams(ParameterList parameterList, TypeInferenceBlockInformation info){
|
||||
//TODO: Hier müssen die Parameter mit den TPHs in den GEnerics des Receivers verknüpft werden
|
||||
/*
|
||||
BEispiel:
|
||||
auto test = new List<String>();
|
||||
test.add("hallo");
|
||||
|
||||
Hier kriegt der Receiver ja den COnstraint TPH REceiver <. List<TPH A>
|
||||
Dann mus bei dem Parameter der COnstraint entstehen: TPH A <. String
|
||||
*/
|
||||
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
|
||||
for(FormalParameter fp : parameterList.getFormalparalist()){
|
||||
params.add(fp.getType()); //info.checkGTV(fp.getType())); //PL 2018-06-22 GTV sollen in Argumenten erhalten bleiben
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
||||
public List<MethodAssumption> getConstructors(TypeInferenceBlockInformation info, RefType ofType, ArgumentList argList){
|
||||
public List<MethodAssumption> getConstructors(TypeInferenceBlockInformation info, RefType ofType, ArgumentList argList) {
|
||||
List<MethodAssumption> ret = new ArrayList<>();
|
||||
for(ClassOrInterface cl : info.getAvailableClasses()){
|
||||
if(cl.getClassName().equals(ofType.getName())){
|
||||
for(Method m : cl.getConstructors()){
|
||||
if(m.getParameterList().getFormalparalist().size() == argList.getArguments().size()){
|
||||
for (ClassOrInterface cl : info.getAvailableClasses()) {
|
||||
if (cl.getClassName().equals(ofType.getName())) {
|
||||
for (Method m : cl.getConstructors()) {
|
||||
if (m.getParameterList().getFormalparalist().size() == argList.getArguments().size()) {
|
||||
ret.add(new MethodAssumption(cl, cl.generateTypeOfThisClass(), convertParams(m.getParameterList(),
|
||||
info), createTypeScope(cl, m), m.isInherited));
|
||||
info), createTypeScope(cl, m), m.isInherited));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -748,12 +712,12 @@ public class TYPEStmt implements StatementVisitor{
|
||||
}
|
||||
|
||||
protected Constraint<Pair> generateConstructorConstraint(NewClass forConstructor, MethodAssumption assumption,
|
||||
TypeInferenceBlockInformation info, GenericsResolver resolver){
|
||||
Constraint methodConstraint = new Constraint();
|
||||
TypeInferenceBlockInformation info, GenericsResolver resolver) {
|
||||
Constraint<Pair> methodConstraint = new Constraint<>();
|
||||
//WELCHEN SINN MACHT DIESER CONSTRAINT???
|
||||
//Ist er nicht immer classname <. classname und damit redundant?
|
||||
methodConstraint.add(new Pair(assumption.getReturnType(resolver), forConstructor.getType(),
|
||||
PairOperator.SMALLERDOT));
|
||||
PairOperator.SMALLERDOT));
|
||||
//WELCHEN SINN MACHT DIESER CONSTRAINT???
|
||||
methodConstraint.addAll(generateParameterConstraints(forConstructor, assumption, info, resolver));
|
||||
return methodConstraint;
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class DistributeVariance extends VisitUnifyTypeVisitor<Integer> {
|
||||
|
||||
public static int inverseVariance(int variance) {
|
||||
// TODO: or should this simply be "return -variance;" ???
|
||||
return switch (variance) {
|
||||
case 1 -> -1;
|
||||
case -1 -> 1;
|
||||
default -> 0;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public PlaceholderType visit(PlaceholderType phty, Integer ht) {
|
||||
if (ht != 0) {
|
||||
if (phty.getVariance() == 0) {
|
||||
phty.setVariance(ht);
|
||||
}
|
||||
//PL 2018-05-17 urspruengliche Variance nicht veraendern
|
||||
//else if (phty.getVariance() != ht) {
|
||||
// phty.setVariance(0);
|
||||
//}
|
||||
}
|
||||
return phty;
|
||||
}
|
||||
|
||||
public FunNType visit(FunNType funnty, Integer ht) {
|
||||
List<UnifyType> param = new ArrayList<>(Arrays.asList(funnty.getTypeParams().get()));
|
||||
UnifyType resultType = param.removeLast();
|
||||
Integer htInverse = inverseVariance(ht);
|
||||
param = param.stream()
|
||||
.map(x -> x.accept(this, htInverse))
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
param.add(resultType.accept(this, ht));
|
||||
return FunNType.getFunNType(new TypeParams(param));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
|
||||
public class FreshPlaceholder extends VisitUnifyTypeVisitor<HashMap<PlaceholderType, PlaceholderType>> {
|
||||
|
||||
@Override
|
||||
public PlaceholderType visit(PlaceholderType phty, HashMap<PlaceholderType, PlaceholderType> ht) {
|
||||
return ht.get(phty);
|
||||
}
|
||||
}
|
||||
@@ -1,23 +1,22 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
|
||||
|
||||
/**
|
||||
* Implements set operations using google guava.
|
||||
* @author DH10STF
|
||||
*
|
||||
* @author DH10STF
|
||||
*/
|
||||
public class GuavaSetOperations implements ISetOperations {
|
||||
|
||||
@Override
|
||||
public <B> Set<List<B>> cartesianProduct(List<? extends Set<? extends B>> sets) {
|
||||
// Wraps the call to google guava
|
||||
return Sets.cartesianProduct(sets);
|
||||
}
|
||||
@Override
|
||||
public <B> Set<List<B>> cartesianProduct(List<? extends Set<? extends B>> sets) {
|
||||
// Wraps the call to google guava
|
||||
return Sets.cartesianProduct(sets);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,108 +1,100 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.*;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Implementation of the Martelli-Montanari unification algorithm.
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class MartelliMontanariUnify implements IUnify {
|
||||
|
||||
@Override
|
||||
public Optional<Unifier> unify(Set<UnifyType> terms) {
|
||||
// Sets with less than 2 terms are trivially unified
|
||||
if(terms.size() < 2)
|
||||
return Optional.of(Unifier.identity());
|
||||
|
||||
// For the the set of terms {t1,...,tn},
|
||||
// build a list of equations {(t1 = t2), (t2 = t3), (t3 = t4), ....}
|
||||
ArrayList<UnifyPair> termsList = new ArrayList<UnifyPair>();
|
||||
Iterator<UnifyType> iter = terms.iterator();
|
||||
UnifyType prev = iter.next();
|
||||
while(iter.hasNext()) {
|
||||
UnifyType next = iter.next();
|
||||
termsList.add(new UnifyPair(prev, next, PairOperator.EQUALSDOT));
|
||||
prev = next;
|
||||
}
|
||||
|
||||
// Start with the identity unifier. Substitutions will be added later.
|
||||
Unifier mgu = Unifier.identity();
|
||||
|
||||
// Apply rules while possible
|
||||
int idx = 0;
|
||||
while(idx < termsList.size()) {
|
||||
UnifyPair pair = termsList.get(idx);
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
TypeParams rhsTypeParams = rhsType.getTypeParams();
|
||||
TypeParams lhsTypeParams = lhsType.getTypeParams();
|
||||
|
||||
// REDUCE - Rule
|
||||
if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)) {
|
||||
Set<UnifyPair> result = new HashSet<>();
|
||||
|
||||
// f<...> = g<...> with f != g are not unifiable
|
||||
if(!rhsType.getName().equals(lhsType.getName()))
|
||||
return Optional.empty(); // conflict
|
||||
// f<t1,...,tn> = f<s1,...,sm> are not unifiable
|
||||
if(rhsTypeParams.size() != lhsTypeParams.size())
|
||||
return Optional.empty(); // conflict
|
||||
// f = g is not unifiable (cannot be f = f because erase rule would have been applied)
|
||||
//if(rhsTypeParams.size() == 0)
|
||||
//return Optional.empty();
|
||||
|
||||
// Unpack the arguments
|
||||
for(int i = 0; i < rhsTypeParams.size(); i++)
|
||||
result.add(new UnifyPair(rhsTypeParams.get(i), lhsTypeParams.get(i), PairOperator.EQUALSDOT));
|
||||
|
||||
termsList.remove(idx);
|
||||
termsList.addAll(result);
|
||||
continue;
|
||||
}
|
||||
|
||||
// DELETE - Rule
|
||||
if(pair.getRhsType().equals(pair.getLhsType())) {
|
||||
termsList.remove(idx);
|
||||
continue;
|
||||
}
|
||||
@Override
|
||||
public Optional<Unifier> unify(Set<UnifyType> terms) {
|
||||
// Sets with less than 2 terms are trivially unified
|
||||
if (terms.size() < 2)
|
||||
return Optional.of(Unifier.identity());
|
||||
|
||||
// SWAP - Rule
|
||||
if(!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
|
||||
termsList.remove(idx);
|
||||
termsList.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT));
|
||||
continue;
|
||||
}
|
||||
|
||||
// OCCURS-CHECK
|
||||
if(pair.getLhsType() instanceof PlaceholderType
|
||||
&& pair.getRhsType().getTypeParams().occurs((PlaceholderType) pair.getLhsType()))
|
||||
return Optional.empty();
|
||||
|
||||
// SUBST - Rule
|
||||
if(lhsType instanceof PlaceholderType) {
|
||||
mgu.add((PlaceholderType) lhsType, rhsType);
|
||||
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
|
||||
termsList = termsList.stream().map(x -> mgu.apply(x)).collect(Collectors.toCollection(ArrayList::new));
|
||||
idx = idx+1 == termsList.size() ? 0 : idx+1;
|
||||
continue;
|
||||
}
|
||||
|
||||
idx++;
|
||||
}
|
||||
|
||||
return Optional.of(mgu);
|
||||
}
|
||||
// For the the set of terms {t1,...,tn},
|
||||
// build a list of equations {(t1 = t2), (t2 = t3), (t3 = t4), ....}
|
||||
ArrayList<UnifyPair> termsList = new ArrayList<UnifyPair>();
|
||||
Iterator<UnifyType> iter = terms.iterator();
|
||||
UnifyType prev = iter.next();
|
||||
while (iter.hasNext()) {
|
||||
UnifyType next = iter.next();
|
||||
termsList.add(new UnifyPair(prev, next, PairOperator.EQUALSDOT));
|
||||
prev = next;
|
||||
}
|
||||
|
||||
// Start with the identity unifier. Substitutions will be added later.
|
||||
Unifier mgu = Unifier.identity();
|
||||
|
||||
// Apply rules while possible
|
||||
int idx = 0;
|
||||
while (idx < termsList.size()) {
|
||||
UnifyPair pair = termsList.get(idx);
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
TypeParams rhsTypeParams = rhsType.getTypeParams();
|
||||
TypeParams lhsTypeParams = lhsType.getTypeParams();
|
||||
|
||||
// REDUCE - Rule
|
||||
if (!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)) {
|
||||
Set<UnifyPair> result = new HashSet<>();
|
||||
|
||||
// f<...> = g<...> with f != g are not unifiable
|
||||
if (!rhsType.getName().equals(lhsType.getName()))
|
||||
return Optional.empty(); // conflict
|
||||
// f<t1,...,tn> = f<s1,...,sm> are not unifiable
|
||||
if (rhsTypeParams.size() != lhsTypeParams.size())
|
||||
return Optional.empty(); // conflict
|
||||
// f = g is not unifiable (cannot be f = f because erase rule would have been applied)
|
||||
//if(rhsTypeParams.size() == 0)
|
||||
//return Optional.empty();
|
||||
|
||||
// Unpack the arguments
|
||||
for (int i = 0; i < rhsTypeParams.size(); i++)
|
||||
result.add(new UnifyPair(rhsTypeParams.get(i), lhsTypeParams.get(i), PairOperator.EQUALSDOT));
|
||||
|
||||
termsList.remove(idx);
|
||||
termsList.addAll(result);
|
||||
continue;
|
||||
}
|
||||
|
||||
// DELETE - Rule
|
||||
if (pair.getRhsType().equals(pair.getLhsType())) {
|
||||
termsList.remove(idx);
|
||||
continue;
|
||||
}
|
||||
|
||||
// SWAP - Rule
|
||||
if (!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
|
||||
termsList.remove(idx);
|
||||
termsList.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT));
|
||||
continue;
|
||||
}
|
||||
|
||||
// OCCURS-CHECK
|
||||
if (pair.getLhsType() instanceof PlaceholderType
|
||||
&& pair.getRhsType().getTypeParams().occurs((PlaceholderType) pair.getLhsType()))
|
||||
return Optional.empty();
|
||||
|
||||
// SUBST - Rule
|
||||
if (lhsType instanceof PlaceholderType) {
|
||||
mgu.add((PlaceholderType) lhsType, rhsType);
|
||||
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
|
||||
termsList = termsList.stream().map(x -> mgu.apply(x)).collect(Collectors.toCollection(ArrayList::new));
|
||||
idx = idx + 1 == termsList.size() ? 0 : idx + 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
idx++;
|
||||
}
|
||||
|
||||
return Optional.of(mgu);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,76 +11,77 @@ import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Implementation of match derived from unification algorithm.
|
||||
*
|
||||
* @author Martin Pluemicke
|
||||
*/
|
||||
public class Match implements IMatch {
|
||||
|
||||
@Override
|
||||
//vorne muss das Pattern stehen
|
||||
//A<X> =. A<Integer> ==> True
|
||||
//A<Integer> =. A<X> ==> False
|
||||
public Optional<Unifier> match(ArrayList<UnifyPair> termsList) {
|
||||
|
||||
// Start with the identity unifier. Substitutions will be added later.
|
||||
Unifier mgu = Unifier.identity();
|
||||
|
||||
// Apply rules while possible
|
||||
int idx = 0;
|
||||
while(idx < termsList.size()) {
|
||||
UnifyPair pair = termsList.get(idx);
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
TypeParams rhsTypeParams = rhsType.getTypeParams();
|
||||
TypeParams lhsTypeParams = lhsType.getTypeParams();
|
||||
|
||||
// REDUCE - Rule
|
||||
if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)) {
|
||||
Set<UnifyPair> result = new HashSet<>();
|
||||
|
||||
// f<...> = g<...> with f != g are not unifiable
|
||||
if(!rhsType.getName().equals(lhsType.getName()))
|
||||
return Optional.empty(); // conflict
|
||||
// f<t1,...,tn> = f<s1,...,sm> are not unifiable
|
||||
if(rhsTypeParams.size() != lhsTypeParams.size())
|
||||
return Optional.empty(); // conflict
|
||||
// f = g is not unifiable (cannot be f = f because erase rule would have been applied)
|
||||
//if(rhsTypeParams.size() == 0)
|
||||
//return Optional.empty();
|
||||
|
||||
// Unpack the arguments
|
||||
for(int i = 0; i < rhsTypeParams.size(); i++)
|
||||
result.add(new UnifyPair(lhsTypeParams.get(i), rhsTypeParams.get(i), PairOperator.EQUALSDOT));
|
||||
|
||||
termsList.remove(idx);
|
||||
termsList.addAll(result);
|
||||
continue;
|
||||
}
|
||||
|
||||
// DELETE - Rule
|
||||
if(pair.getRhsType().equals(pair.getLhsType())) {
|
||||
termsList.remove(idx);
|
||||
continue;
|
||||
}
|
||||
@Override
|
||||
//vorne muss das Pattern stehen
|
||||
//A<X> =. A<Integer> ==> True
|
||||
//A<Integer> =. A<X> ==> False
|
||||
public Optional<Unifier> match(ArrayList<UnifyPair> termsList) {
|
||||
|
||||
// SWAP - Rule
|
||||
if(!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
|
||||
return Optional.empty(); // conflict
|
||||
}
|
||||
|
||||
// OCCURS-CHECK
|
||||
//deleted
|
||||
|
||||
// SUBST - Rule
|
||||
if(lhsType instanceof PlaceholderType) {
|
||||
mgu.add((PlaceholderType) lhsType, rhsType);
|
||||
termsList = termsList.stream().map(mgu::applyleft).collect(Collectors.toCollection(ArrayList::new));
|
||||
idx = idx+1 == termsList.size() ? 0 : idx+1;
|
||||
continue;
|
||||
}
|
||||
|
||||
idx++;
|
||||
}
|
||||
|
||||
return Optional.of(mgu);
|
||||
}
|
||||
}
|
||||
// Start with the identity unifier. Substitutions will be added later.
|
||||
Unifier mgu = Unifier.identity();
|
||||
|
||||
// Apply rules while possible
|
||||
int idx = 0;
|
||||
while (idx < termsList.size()) {
|
||||
UnifyPair pair = termsList.get(idx);
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
TypeParams rhsTypeParams = rhsType.getTypeParams();
|
||||
TypeParams lhsTypeParams = lhsType.getTypeParams();
|
||||
|
||||
// REDUCE - Rule
|
||||
if (!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)) {
|
||||
Set<UnifyPair> result = new HashSet<>();
|
||||
|
||||
// f<...> = g<...> with f != g are not unifiable
|
||||
if (!rhsType.getName().equals(lhsType.getName()))
|
||||
return Optional.empty(); // conflict
|
||||
// f<t1,...,tn> = f<s1,...,sm> are not unifiable
|
||||
if (rhsTypeParams.size() != lhsTypeParams.size())
|
||||
return Optional.empty(); // conflict
|
||||
// f = g is not unifiable (cannot be f = f because erase rule would have been applied)
|
||||
//if(rhsTypeParams.size() == 0)
|
||||
//return Optional.empty();
|
||||
|
||||
// Unpack the arguments
|
||||
for (int i = 0; i < rhsTypeParams.size(); i++)
|
||||
result.add(new UnifyPair(lhsTypeParams.get(i), rhsTypeParams.get(i), PairOperator.EQUALSDOT));
|
||||
|
||||
termsList.remove(idx);
|
||||
termsList.addAll(result);
|
||||
continue;
|
||||
}
|
||||
|
||||
// DELETE - Rule
|
||||
if (pair.getRhsType().equals(pair.getLhsType())) {
|
||||
termsList.remove(idx);
|
||||
continue;
|
||||
}
|
||||
|
||||
// SWAP - Rule
|
||||
if (!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
|
||||
return Optional.empty(); // conflict
|
||||
}
|
||||
|
||||
// OCCURS-CHECK
|
||||
//deleted
|
||||
|
||||
// SUBST - Rule
|
||||
if (lhsType instanceof PlaceholderType) {
|
||||
mgu.add((PlaceholderType) lhsType, rhsType);
|
||||
termsList = termsList.stream().map(mgu::applyLeft).collect(Collectors.toCollection(ArrayList::new));
|
||||
idx = idx + 1 == termsList.size() ? 0 : idx + 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
idx++;
|
||||
}
|
||||
|
||||
return Optional.of(mgu);
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,90 +10,80 @@ import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
public class TypeUnify {
|
||||
|
||||
public static Writer statistics;
|
||||
/**
|
||||
* unify parallel ohne result modell
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
||||
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
||||
taskModel.setPool(pool);
|
||||
resultModel.setPool(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool);
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* unify asynchron mit Rückgabe UnifyResultModel, ohne dass alle results gesammelt sind
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModelParallel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
||||
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
||||
taskModel.setPool(pool);
|
||||
resultModel.setPool(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool);
|
||||
pool.invoke(unifyTask);
|
||||
return resultModel;
|
||||
}
|
||||
|
||||
/**
|
||||
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModelParallel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
||||
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
||||
taskModel.setPool(pool);
|
||||
resultModel.setPool(pool);
|
||||
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool, statistics);
|
||||
|
||||
pool.invoke(unifyTask);
|
||||
unifyTask.join();
|
||||
public static Writer statistics;
|
||||
|
||||
return resultModel;
|
||||
}
|
||||
|
||||
/*
|
||||
public Set<Set<UnifyPair>> unifySequential(Set<UnifyPair> eq, IFiniteClosure fc, FileWriter logFile, Boolean log) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc, false, logFile, log);
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
return res;
|
||||
}
|
||||
*/
|
||||
|
||||
/**
|
||||
* unify sequentiell mit oderconstraints
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
||||
resultModel.setPool(ForkJoinPool.commonPool());
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObject(logFile, ForkJoinPool.commonPool()), log, resultModel, ForkJoinPool.commonPool());
|
||||
unifyTask.statisticsFile = statistics;
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* Unify parallel without result model
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
||||
ForkJoinPool pool = new ForkJoinPool(
|
||||
Runtime.getRuntime().availableProcessors(),
|
||||
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
|
||||
null,
|
||||
true
|
||||
);
|
||||
taskModel.setPool(pool);
|
||||
resultModel.setPool(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(
|
||||
undConstrains,
|
||||
oderConstraints,
|
||||
fc,
|
||||
true,
|
||||
new WriterActiveObject(logFile, pool),
|
||||
log,
|
||||
resultModel,
|
||||
pool
|
||||
);
|
||||
pool.invoke(unifyTask);
|
||||
return unifyTask.join();
|
||||
}
|
||||
|
||||
/**
|
||||
* Unify parallel returning UnifyResultModel after gathering all results
|
||||
*/
|
||||
public void unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
||||
ForkJoinPool pool = new ForkJoinPool(
|
||||
Runtime.getRuntime().availableProcessors(),
|
||||
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
|
||||
null,
|
||||
true
|
||||
);
|
||||
taskModel.setPool(pool);
|
||||
resultModel.setPool(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(
|
||||
undConstrains,
|
||||
oderConstraints,
|
||||
fc,
|
||||
true,
|
||||
new WriterActiveObject(logFile, pool),
|
||||
log,
|
||||
resultModel,
|
||||
pool,
|
||||
statistics
|
||||
);
|
||||
|
||||
pool.invoke(unifyTask);
|
||||
unifyTask.join();
|
||||
}
|
||||
|
||||
/**
|
||||
* Unify sequential with oder-constraints
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel) {
|
||||
resultModel.setPool(ForkJoinPool.commonPool());
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(
|
||||
undConstrains,
|
||||
oderConstraints,
|
||||
fc,
|
||||
false,
|
||||
new WriterActiveObject(logFile, ForkJoinPool.commonPool()),
|
||||
log,
|
||||
resultModel,
|
||||
ForkJoinPool.commonPool()
|
||||
);
|
||||
TypeUnifyTask.statisticsFile = statistics;
|
||||
return unifyTask.compute();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -4,54 +4,43 @@ import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.io.Writer;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
public class TypeUnify2Task extends TypeUnifyTask {
|
||||
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe;
|
||||
|
||||
//statistics
|
||||
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
Set<UnifyPair> nextSetElement,
|
||||
IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm,
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool, Writer statistics) {
|
||||
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, urm, methodSignatureConstraintUebergabe, pool );
|
||||
|
||||
}
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, urm, pool);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
||||
}
|
||||
|
||||
Set<UnifyPair> getNextSetElement() {
|
||||
return nextSetElement;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, methodSignatureConstraintUebergabe);
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe;
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, urm, pool);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
||||
}
|
||||
|
||||
Set<UnifyPair> getNextSetElement() {
|
||||
return nextSetElement;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, methodSignatureConstraintUebergabe);
|
||||
/*if (isUndefinedPairSetSet(res)) {
|
||||
return new HashSet<>(); }
|
||||
else
|
||||
*/
|
||||
//writeLog("xxx");
|
||||
//noOfThread--;
|
||||
return res;
|
||||
}
|
||||
|
||||
public void closeLogFile() {
|
||||
if(parallel){
|
||||
logFile.close();
|
||||
}else{
|
||||
logFile.closeNonThreaded();
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
//writeLog("xxx");
|
||||
//noOfThread--;
|
||||
return res;
|
||||
}
|
||||
|
||||
public void closeLogFile() {
|
||||
if (parallel) {
|
||||
logFile.close();
|
||||
} else {
|
||||
logFile.closeNonThreaded();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,43 +32,34 @@ import java.util.stream.Collectors;
|
||||
*/
|
||||
public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"/test/logFiles/log" geschrieben werden soll?
|
||||
|
||||
/**
|
||||
* Element, das aus dem nextSet den Gleichunen dieses Threads hinzugefuegt wurde
|
||||
*/
|
||||
Set<UnifyPair> nextSetElement;
|
||||
|
||||
/**
|
||||
* Fuer die Threads
|
||||
*/
|
||||
protected UnifyResultModelParallel urm;
|
||||
private static int totalnoOfThread = 0;
|
||||
int thNo;
|
||||
|
||||
public static final String rootDirectory = System.getProperty("user.dir") + "/test/logFiles/";
|
||||
static Writer statisticsFile = new NullWriter();
|
||||
private static int totalNoOfThreads = 0;
|
||||
/*
|
||||
* Thread related
|
||||
*/
|
||||
protected UnifyResultModelParallel unifyResultModel;
|
||||
protected WriterActiveObject logFile;
|
||||
protected ForkJoinPool pool;
|
||||
|
||||
/**
|
||||
* The implementation of the standard unify that will be used during the unification
|
||||
*/
|
||||
protected IUnify stdUnify = new MartelliMontanariUnify();
|
||||
|
||||
/**
|
||||
* The implementation of the rules that will be used during the unification.
|
||||
*/
|
||||
protected IRuleSet rules;
|
||||
|
||||
protected Set<UnifyPair> eq; //und-constraints
|
||||
|
||||
protected List<Set<Constraint<UnifyPair>>> oderConstraintsField;
|
||||
|
||||
protected IFiniteClosure fc;
|
||||
|
||||
protected boolean parallel;
|
||||
|
||||
static Writer statisticsFile = new NullWriter();
|
||||
//gibt an ob ein Log-File nach System.getProperty("user.dir")+"/test/logFiles/log" geschrieben werden soll?
|
||||
Boolean log = true;
|
||||
/**
|
||||
* Element that was added to the equations of this thread from the nextSet
|
||||
*/
|
||||
Set<UnifyPair> nextSetElement;
|
||||
int threadNumber;
|
||||
|
||||
public TypeUnifyTask() {
|
||||
rules = new RuleSet();
|
||||
@@ -77,7 +68,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//statistics
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, ForkJoinPool pool, Writer statisticsFile) {
|
||||
this(eq, oderConstraints, fc, parallel, logFile, log, urm, pool);
|
||||
this.statisticsFile = statisticsFile;
|
||||
TypeUnifyTask.statisticsFile = statisticsFile;
|
||||
}
|
||||
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, ForkJoinPool pool) {
|
||||
@@ -89,12 +80,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
this.log = log;
|
||||
this.pool = pool;
|
||||
|
||||
totalnoOfThread++;
|
||||
thNo = totalnoOfThread;
|
||||
writeLog("thNo2 " + thNo);
|
||||
totalNoOfThreads++;
|
||||
threadNumber = totalNoOfThreads;
|
||||
writeLog("thNo2 " + threadNumber);
|
||||
try {
|
||||
if (log) {
|
||||
this.logFile = new WriterActiveObject(new FileWriter(System.getProperty("user.dir") + "/logFiles/" + "Thread_" + thNo), pool);
|
||||
this.logFile = new WriterActiveObject(new FileWriter(System.getProperty("user.dir") + "/logFiles/" + "Thread_" + threadNumber), pool);
|
||||
} else {
|
||||
this.logFile = new WriterActiveObject(new OutputStreamWriter(new NullOutputStream()), pool);
|
||||
}
|
||||
@@ -102,18 +93,18 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
System.err.println("log-File nicht vorhanden");
|
||||
}
|
||||
rules = new RuleSet(logFile);
|
||||
this.urm = urm;
|
||||
this.unifyResultModel = urm;
|
||||
}
|
||||
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
Set<UnifyPair> neweq = new HashSet<>(eq);
|
||||
/* 1-elementige Oder-Constraints werden in und-Constraints umgewandelt */
|
||||
oderConstraintsField.stream()
|
||||
.filter(x -> x.size() == 1)
|
||||
.map(y -> y.stream().findFirst().get()).forEach(neweq::addAll);
|
||||
.filter(x -> x.size() == 1)
|
||||
.map(y -> y.stream().findFirst().get()).forEach(neweq::addAll);
|
||||
ArrayList<Set<Constraint<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream()
|
||||
.filter(x -> x.size() > 1)
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
.filter(x -> x.size() > 1)
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, new HashSet<>());
|
||||
|
||||
if (parallel) {
|
||||
@@ -142,7 +133,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
* ? extends ? extends Theta rausfiltern
|
||||
*/
|
||||
Set<UnifyPair> doubleExt = eq.stream().filter(UnifyPair::wrongWildcard).peek(UnifyPair::setUndefinedPair)
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
if (!doubleExt.isEmpty()) {
|
||||
Set<Set<UnifyPair>> ret = new HashSet<>();
|
||||
ret.add(doubleExt);
|
||||
@@ -154,13 +145,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
*/
|
||||
|
||||
Set<UnifyPair> ocurrPairs = eq.stream().filter(x -> {
|
||||
UnifyType lhs, rhs;
|
||||
return (lhs = x.getLhsType()) instanceof PlaceholderType
|
||||
&& !((rhs = x.getRhsType()) instanceof PlaceholderType)
|
||||
&& rhs.getTypeParams().occurs((PlaceholderType) lhs);
|
||||
})
|
||||
.peek(UnifyPair::setUndefinedPair)
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
UnifyType lhs, rhs;
|
||||
return (lhs = x.getLhsType()) instanceof PlaceholderType
|
||||
&& !((rhs = x.getRhsType()) instanceof PlaceholderType)
|
||||
&& rhs.getTypeParams().occurs((PlaceholderType) lhs);
|
||||
})
|
||||
.peek(UnifyPair::setUndefinedPair)
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
writeLog("ocurrPairs: " + ocurrPairs);
|
||||
if (!ocurrPairs.isEmpty()) {
|
||||
Set<Set<UnifyPair>> ret = new HashSet<>();
|
||||
@@ -213,8 +204,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
// Add the set of [a =. Theta | (a=. Theta) in Eq2']
|
||||
//TODO: Occurscheck anwenden als Fehler identifizieren
|
||||
Set<UnifyPair> bufferSet = eq2s.stream()
|
||||
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
|
||||
.collect(Collectors.toSet());
|
||||
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
if (!bufferSet.isEmpty()) { // Do not add empty sets or the cartesian product will always be empty.
|
||||
Set<Set<UnifyPair>> wrap = new HashSet<>();
|
||||
@@ -274,7 +265,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
{// sequentiell (Step 6b is included)
|
||||
if (eqPrime.equals(eq) && eqPrimePrime.isEmpty()
|
||||
&& oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
||||
&& oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
||||
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
||||
//PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
|
||||
//eqPrimePrime Veraenderungen in subst repraesentieren.
|
||||
@@ -289,18 +280,18 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
//Substitutionen in methodcontraintsets werdne ausgeführt
|
||||
eqPrimePrimeSet = eqPrimePrimeSet.stream().map(
|
||||
x -> {
|
||||
Optional<Set<UnifyPair>> help = rules.subst(x);
|
||||
return help.isPresent() ?
|
||||
help.get() :
|
||||
x;
|
||||
}).collect(Collectors.toSet());
|
||||
x -> {
|
||||
Optional<Set<UnifyPair>> help = rules.subst(x);
|
||||
return help.isPresent() ?
|
||||
help.get() :
|
||||
x;
|
||||
}).collect(Collectors.toSet());
|
||||
/*
|
||||
* Ende
|
||||
*/
|
||||
|
||||
|
||||
urm.notify(eqPrimePrimeSet);
|
||||
unifyResultModel.notify(eqPrimePrimeSet);
|
||||
writeStatistics("Result: " + eqPrimePrimeSet);
|
||||
}
|
||||
} else if (eqPrimePrime.isPresent()) {
|
||||
@@ -341,8 +332,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//oneElems: Alle 1-elementigen Mengen, die nur ein Paar
|
||||
//a <. theta, theta <. a oder a =. theta enthalten
|
||||
Set<Set<UnifyPair>> oneElems = topLevelSets.stream()
|
||||
.filter(x -> x.size() == 1)
|
||||
.map(y -> y.stream().findFirst().get()).collect(Collectors.toSet());
|
||||
.filter(x -> x.size() == 1)
|
||||
.map(y -> y.stream().findFirst().get()).collect(Collectors.toSet());
|
||||
|
||||
//optNextSet: Eine mehrelementige Menge, wenn vorhanden
|
||||
Optional<Set<? extends Set<UnifyPair>>> optNextSet = topLevelSets.stream().filter(x -> x.size() > 1).findAny();
|
||||
@@ -374,15 +365,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
if (sameBase) { //angefuegt PL 2020-02-30
|
||||
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
|
||||
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
|
||||
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
|
||||
.reduce((a, b) -> {
|
||||
if (a == b) return a;
|
||||
else return 0;
|
||||
})) //2 kommt insbesondere bei Oder-Constraints vor
|
||||
.filter(d -> d.isPresent())
|
||||
.map(e -> e.get())
|
||||
.findAny();
|
||||
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
|
||||
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
|
||||
.reduce((a, b) -> {
|
||||
if (a == b) return a;
|
||||
else return 0;
|
||||
})) //2 kommt insbesondere bei Oder-Constraints vor
|
||||
.filter(d -> d.isPresent())
|
||||
.map(e -> e.get())
|
||||
.findAny();
|
||||
if (xi.isPresent()) {
|
||||
variance = xi.get();
|
||||
}
|
||||
@@ -396,19 +387,19 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//Varianz-Bestimmung Oder-Constraints
|
||||
if (oderConstraint) {
|
||||
Optional<Integer> optVariance =
|
||||
nextSetasList
|
||||
.getFirst()
|
||||
.stream()
|
||||
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
|
||||
!(x.getRhsType() instanceof PlaceholderType) &&
|
||||
x.getPairOp() == PairOperator.EQUALSDOT)
|
||||
.map(x ->
|
||||
((PlaceholderType) x.getGroundBasePair().getLhsType()).getVariance())
|
||||
.reduce((n, m) -> {
|
||||
if ((n == 0) && (m == 0)) return 0;
|
||||
else if (n != 0) return n; //es muss mindestens eine Variance != 0 sein
|
||||
else return m;
|
||||
});
|
||||
nextSetasList
|
||||
.getFirst()
|
||||
.stream()
|
||||
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
|
||||
!(x.getRhsType() instanceof PlaceholderType) &&
|
||||
x.getPairOp() == PairOperator.EQUALSDOT)
|
||||
.map(x ->
|
||||
((PlaceholderType) x.getGroundBasePair().getLhsType()).getVariance())
|
||||
.reduce((n, m) -> {
|
||||
if ((n == 0) && (m == 0)) return 0;
|
||||
else if (n != 0) return n; //es muss mindestens eine Variance != 0 sein
|
||||
else return m;
|
||||
});
|
||||
//Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
|
||||
//da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
|
||||
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
|
||||
@@ -457,7 +448,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, urm, methodSignatureConstraint, this.pool);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, unifyResultModel, methodSignatureConstraint, this.pool);
|
||||
forkOrig.fork();
|
||||
|
||||
while (!nextSetasList.isEmpty()) {
|
||||
@@ -471,14 +462,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, unifyResultModel, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
forks.add(fork);
|
||||
fork.fork();
|
||||
}
|
||||
|
||||
res = forkOrig.join();
|
||||
|
||||
forks.forEach(x -> writeLog("wait: " + x.thNo));
|
||||
forks.forEach(x -> writeLog("wait: " + x.threadNumber));
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
add_res.add(fork_res);
|
||||
@@ -494,7 +485,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, unifyResultModel, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
forkOrig.fork();
|
||||
|
||||
while (!nextSetasList.isEmpty()) {
|
||||
@@ -507,21 +498,21 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, unifyResultModel, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
forks.add(fork);
|
||||
fork.fork();
|
||||
}
|
||||
|
||||
writeLog("wait " + forkOrig.thNo);
|
||||
writeLog("wait " + forkOrig.threadNumber);
|
||||
res = forkOrig.join();
|
||||
|
||||
writeLog("JoinOrig " + Integer.valueOf(forkOrig.thNo).toString());
|
||||
writeLog("JoinOrig " + Integer.valueOf(forkOrig.threadNumber).toString());
|
||||
|
||||
forks.forEach(x -> writeLog("wait: " + x.thNo));
|
||||
forks.forEach(x -> writeLog("wait: " + x.threadNumber));
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
|
||||
writeLog("Join " + Integer.valueOf(fork.thNo).toString());
|
||||
writeLog("Join " + Integer.valueOf(fork.threadNumber).toString());
|
||||
add_res.add(fork_res);
|
||||
if (!isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
@@ -536,7 +527,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, unifyResultModel, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
forkOrig.fork();
|
||||
|
||||
while (!nextSetasList.isEmpty()) {
|
||||
@@ -545,21 +536,21 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, urm, methodSignatureConstraint, this.pool);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, unifyResultModel, methodSignatureConstraint, this.pool);
|
||||
forks.add(fork);
|
||||
fork.fork();
|
||||
}
|
||||
|
||||
writeLog("wait " + forkOrig.thNo);
|
||||
writeLog("wait " + forkOrig.threadNumber);
|
||||
res = forkOrig.join();
|
||||
|
||||
writeLog("JoinOrig " + Integer.valueOf(forkOrig.thNo).toString());
|
||||
writeLog("JoinOrig " + Integer.valueOf(forkOrig.threadNumber).toString());
|
||||
|
||||
forks.forEach(x -> writeLog("wait: " + x.thNo));
|
||||
forks.forEach(x -> writeLog("wait: " + x.threadNumber));
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
|
||||
writeLog("Join " + Integer.valueOf(fork.thNo).toString());
|
||||
writeLog("Join " + Integer.valueOf(fork.threadNumber).toString());
|
||||
add_res.add(fork_res);
|
||||
}
|
||||
} else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
|
||||
@@ -578,8 +569,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
result = res;
|
||||
} else {
|
||||
if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result))
|
||||
|| (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
|| (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
result.addAll(res);
|
||||
}
|
||||
}
|
||||
@@ -591,8 +582,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
result = par_res;
|
||||
} else {
|
||||
if ((isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result))
|
||||
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
result.addAll(par_res);
|
||||
}
|
||||
}
|
||||
@@ -602,47 +593,43 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
if (isUndefinedPairSetSet(res) && aParDef.isEmpty()) {
|
||||
int nofstred;
|
||||
Set<UnifyPair> abhSubst = res.stream()
|
||||
.map(b ->
|
||||
b.stream()
|
||||
.map(x -> x.getAllSubstitutions())
|
||||
.reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
}).get())
|
||||
.reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
}).get();
|
||||
.map(b ->
|
||||
b.stream()
|
||||
.map(x -> x.getAllSubstitutions())
|
||||
.reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
}).get())
|
||||
.reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
}).get();
|
||||
abhSubst.addAll(
|
||||
res.stream()
|
||||
.map(b ->
|
||||
b.stream()
|
||||
.map(x -> x.getThisAndAllBases()) //getAllBases durch getThisAndAllBases ersetzt, weil auch im UnifyPair selbst schon ein Fehler liegen kann.
|
||||
.reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
}).get())
|
||||
res.stream()
|
||||
.map(b ->
|
||||
b.stream()
|
||||
.map(UnifyPair::getThisAndAllBases) //getAllBases durch getThisAndAllBases ersetzt, weil auch im UnifyPair selbst schon ein Fehler liegen kann.
|
||||
.reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
}).get()
|
||||
}).get())
|
||||
.reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
}).get()
|
||||
);
|
||||
Set<UnifyPair> b = a;//effective final a
|
||||
Set<UnifyPair> b = a;
|
||||
Set<UnifyPair> durchschnitt = abhSubst.stream()
|
||||
.filter(x -> b.contains(x))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
int len = nextSetasList.size();
|
||||
.filter(x -> b.contains(x))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
nofstred = nextSetasList.size();
|
||||
|
||||
writeLog("res (undef): " + res.toString() + "\n" +
|
||||
"abhSubst: " + abhSubst.toString() + "\n" +
|
||||
"Durchschnitt: " + durchschnitt.toString() + "\n" +
|
||||
"nextSet: " + nextSet.toString() + "\n" +
|
||||
"nextSetasList: " + nextSetasList.toString() + "\n" +
|
||||
"Number first erased Elements (undef): " + (len - nofstred) + "\n" +
|
||||
"Number second erased Elements (undef): " + (nofstred - nextSetasList.size()) + "\n" +
|
||||
"Number erased Elements (undef): " + (len - nextSetasList.size()));
|
||||
writeLog(
|
||||
"res (undef): " + res + "\n" +
|
||||
"abhSubst: " + abhSubst + "\n" +
|
||||
"Durchschnitt: " + durchschnitt + "\n" +
|
||||
"nextSet: " + nextSet + "\n" +
|
||||
"nextSetasList: " + nextSetasList + "\n"
|
||||
);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
@@ -856,31 +843,31 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
while (eq2sprimeit.hasNext()) {// alle mit Variance != 0 nach vorne schieben
|
||||
UnifyPair up = eq2sprimeit.next();
|
||||
if ((up.getLhsType() instanceof PlaceholderType &&
|
||||
((PlaceholderType) up.getLhsType()).getVariance() == 1 &&
|
||||
!((PlaceholderType) up.getLhsType()).isInnerType()) ||
|
||||
(up.getRhsType() instanceof PlaceholderType &&
|
||||
((PlaceholderType) up.getRhsType()).getVariance() == -1) &&
|
||||
!((PlaceholderType) up.getRhsType()).isInnerType()) {
|
||||
((PlaceholderType) up.getLhsType()).getVariance() == 1 &&
|
||||
!((PlaceholderType) up.getLhsType()).isInnerType()) ||
|
||||
(up.getRhsType() instanceof PlaceholderType &&
|
||||
((PlaceholderType) up.getRhsType()).getVariance() == -1) &&
|
||||
!((PlaceholderType) up.getRhsType()).isInnerType()) {
|
||||
eq2sAsListFst.add(up);
|
||||
eq2s.remove(up);
|
||||
} else if ((up.getLhsType() instanceof PlaceholderType && ((PlaceholderType) up.getLhsType()).getVariance() == 1 && ((PlaceholderType) up.getLhsType()).isInnerType())
|
||||
|| (up.getRhsType() instanceof PlaceholderType && ((PlaceholderType) up.getRhsType()).getVariance() == -1) && ((PlaceholderType) up.getRhsType()).isInnerType()) {
|
||||
|| (up.getRhsType() instanceof PlaceholderType && ((PlaceholderType) up.getRhsType()).getVariance() == -1) && ((PlaceholderType) up.getRhsType()).isInnerType()) {
|
||||
eq2sAsListSnd.add(up);
|
||||
eq2s.remove(up);
|
||||
} else if ((up.getLhsType() instanceof PlaceholderType &&
|
||||
((PlaceholderType) up.getLhsType()).getVariance() == -1 &&
|
||||
!((PlaceholderType) up.getLhsType()).isInnerType()) ||
|
||||
(up.getRhsType() instanceof PlaceholderType &&
|
||||
((PlaceholderType) up.getRhsType()).getVariance() == -1) &&
|
||||
!((PlaceholderType) up.getRhsType()).isInnerType()) {
|
||||
((PlaceholderType) up.getLhsType()).getVariance() == -1 &&
|
||||
!((PlaceholderType) up.getLhsType()).isInnerType()) ||
|
||||
(up.getRhsType() instanceof PlaceholderType &&
|
||||
((PlaceholderType) up.getRhsType()).getVariance() == -1) &&
|
||||
!((PlaceholderType) up.getRhsType()).isInnerType()) {
|
||||
eq2sAsListThird.add(up);
|
||||
eq2s.remove(up);
|
||||
} else if ((up.getLhsType() instanceof PlaceholderType && ((PlaceholderType) up.getLhsType()).getVariance() == -1 && ((PlaceholderType) up.getLhsType()).isInnerType())
|
||||
|| (up.getRhsType() instanceof PlaceholderType && ((PlaceholderType) up.getRhsType()).getVariance() == 1) && ((PlaceholderType) up.getRhsType()).isInnerType()) {
|
||||
|| (up.getRhsType() instanceof PlaceholderType && ((PlaceholderType) up.getRhsType()).getVariance() == 1) && ((PlaceholderType) up.getRhsType()).isInnerType()) {
|
||||
eq2sAsListFourth.add(up);
|
||||
eq2s.remove(up);
|
||||
} else if ((up.getLhsType() instanceof PlaceholderType && ((PlaceholderType) up.getLhsType()).isInnerType())
|
||||
|| (up.getRhsType() instanceof PlaceholderType && ((PlaceholderType) up.getRhsType()).isInnerType())) {
|
||||
|| (up.getRhsType() instanceof PlaceholderType && ((PlaceholderType) up.getRhsType()).isInnerType())) {
|
||||
eq2sAsListBack.add(up);
|
||||
eq2s.remove(up);
|
||||
}
|
||||
@@ -888,14 +875,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//if (eq2sAsListFst.isEmpty())
|
||||
{
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind
|
||||
.filter(x -> x.stream()
|
||||
.anyMatch(y ->
|
||||
y.stream().anyMatch(z -> ((z.getLhsType() instanceof PlaceholderType)
|
||||
&& (((PlaceholderType) (z.getLhsType())).getVariance() != 0))
|
||||
|| ((z.getRhsType() instanceof PlaceholderType)
|
||||
&& (((PlaceholderType) (z.getRhsType())).getVariance() != 0))
|
||||
)
|
||||
)).collect(Collectors.toList());
|
||||
.filter(x -> x.stream()
|
||||
.anyMatch(y ->
|
||||
y.stream().anyMatch(z -> ((z.getLhsType() instanceof PlaceholderType)
|
||||
&& (((PlaceholderType) (z.getLhsType())).getVariance() != 0))
|
||||
|| ((z.getRhsType() instanceof PlaceholderType)
|
||||
&& (((PlaceholderType) (z.getRhsType())).getVariance() != 0))
|
||||
)
|
||||
)).collect(Collectors.toList());
|
||||
if (!oderConstraintsVariance.isEmpty()) {
|
||||
Set<Constraint<UnifyPair>> ret = oderConstraintsVariance.get(0);
|
||||
oderConstraintsOutput.remove(ret);
|
||||
@@ -904,15 +891,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
//Alle wildcard Faelle rausfiltern bei not wildcardable
|
||||
ret = ret.stream().filter(x -> {
|
||||
Optional<UnifyPair> optElem;
|
||||
return !((optElem = x.stream().filter(y -> (y.getLhsType()) instanceof PlaceholderType
|
||||
&& !((PlaceholderType) y.getLhsType()).isWildcardable()
|
||||
&& y.getPairOp() == PairOperator.EQUALSDOT
|
||||
&& !(y.getRhsType() instanceof PlaceholderType))
|
||||
.findAny()).isPresent()
|
||||
&& optElem.get().getRhsType() instanceof ExtendsType);
|
||||
})
|
||||
.collect(Collectors.toSet());
|
||||
Optional<UnifyPair> optElem;
|
||||
return !((optElem = x.stream().filter(y -> (y.getLhsType()) instanceof PlaceholderType
|
||||
&& !((PlaceholderType) y.getLhsType()).isWildcardable()
|
||||
&& y.getPairOp() == PairOperator.EQUALSDOT
|
||||
&& !(y.getRhsType() instanceof PlaceholderType))
|
||||
.findAny()).isPresent()
|
||||
&& optElem.get().getRhsType() instanceof ExtendsType);
|
||||
})
|
||||
.collect(Collectors.toSet());
|
||||
ret.stream().forEach(x -> x.stream().forEach(y -> {
|
||||
Set<UnifyPair> x_new = new HashSet<>(x); //PL 2020-03-18: y selbst darf nicht in die Substitutionen
|
||||
x_new.remove(y);
|
||||
@@ -923,10 +910,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
}
|
||||
|
||||
writeLog("eq2s: " + eq2s.toString() + "\n" +
|
||||
"eq2sAsListFst: " + eq2sAsListFst.toString() + "\n" +
|
||||
"eq2sAsListSnd: " + eq2sAsListSnd.toString() + "\n" +
|
||||
"eq2sAsListBack: " + eq2sAsListBack.toString());
|
||||
writeLog("eq2s: " + eq2s + "\n" +
|
||||
"eq2sAsListFst: " + eq2sAsListFst + "\n" +
|
||||
"eq2sAsListSnd: " + eq2sAsListSnd + "\n" +
|
||||
"eq2sAsListBack: " + eq2sAsListBack);
|
||||
|
||||
eq2sAsList.addAll(eq2sAsListFst);
|
||||
eq2sAsList.addAll(eq2sAsListSnd);
|
||||
@@ -945,15 +932,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
//Alle wildcard Faelle rausfiltern bei not wildcardable
|
||||
ret = ret.stream().filter(x -> {
|
||||
Optional<UnifyPair> optElem;
|
||||
return !((optElem = x.stream().filter(y -> (y.getLhsType()) instanceof PlaceholderType
|
||||
&& !((PlaceholderType) y.getLhsType()).isWildcardable()
|
||||
&& y.getPairOp() == PairOperator.EQUALSDOT
|
||||
&& !(y.getRhsType() instanceof PlaceholderType))
|
||||
.findAny()).isPresent()
|
||||
&& optElem.get().getRhsType() instanceof ExtendsType);
|
||||
})
|
||||
.collect(Collectors.toSet());
|
||||
Optional<UnifyPair> optElem;
|
||||
return !((optElem = x.stream().filter(y -> (y.getLhsType()) instanceof PlaceholderType
|
||||
&& !((PlaceholderType) y.getLhsType()).isWildcardable()
|
||||
&& y.getPairOp() == PairOperator.EQUALSDOT
|
||||
&& !(y.getRhsType() instanceof PlaceholderType))
|
||||
.findAny()).isPresent()
|
||||
&& optElem.get().getRhsType() instanceof ExtendsType);
|
||||
})
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
ret.stream().forEach(x -> x.stream().forEach(y -> {
|
||||
Set<UnifyPair> x_new = new HashSet<>(x); //PL 2020-03-18: y selbst darf nicht in die Substitutionen
|
||||
@@ -1157,7 +1144,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
// Filter empty sets or sets that only contain an empty set.
|
||||
return result.stream().map(x -> x.stream().filter(y -> !y.isEmpty()).collect(Collectors.toCollection(HashSet::new)))
|
||||
.filter(x -> !x.isEmpty()).collect(Collectors.toCollection(HashSet::new));
|
||||
.filter(x -> !x.isEmpty()).collect(Collectors.toCollection(HashSet::new));
|
||||
}
|
||||
|
||||
//TODO: Wenn Theta' nicht im FC muss ein Fehler produziert werden PL 18-04-20
|
||||
@@ -1208,12 +1195,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
return aa;
|
||||
};
|
||||
HashMap<PlaceholderType, PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream()
|
||||
.reduce(new HashMap<>(),
|
||||
(aa, b) -> {
|
||||
aa.put(b, PlaceholderType.freshPlaceholder());
|
||||
return aa;
|
||||
}, combiner);
|
||||
return x.accept(new freshPlaceholder(), hm);
|
||||
.reduce(new HashMap<>(),
|
||||
(aa, b) -> {
|
||||
aa.put(b, PlaceholderType.freshPlaceholder());
|
||||
return aa;
|
||||
}, combiner);
|
||||
return x.accept(new FreshPlaceholder(), hm);
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
IMatch match = new Match();
|
||||
@@ -1228,7 +1215,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
if ((match.match(ml)).isEmpty()) {
|
||||
thetaQs.remove(c);
|
||||
}
|
||||
writeLog("thetaQs von " + c + ": " + thetaQs.toString());
|
||||
writeLog("thetaQs von " + c + ": " + thetaQs);
|
||||
//Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
|
||||
//thetaQs.add(thetaPrime); //PL 18-02-05 wieder geloescht
|
||||
//PL 2017-10-03: War auskommentiert habe ich wieder einkommentiert,
|
||||
@@ -1252,7 +1239,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
for (TypeParams tp : permuteParams(candidateParams))
|
||||
thetaQPrimes.add(c.setTypeParams(tp));
|
||||
}
|
||||
writeLog("thetaQPrimes von " + c + ": " + thetaQPrimes.toString());
|
||||
writeLog("thetaQPrimes von " + c + ": " + thetaQPrimes);
|
||||
for (UnifyType tqp : thetaQPrimes) {//PL 2020-03-08 umbauen in der Schleife wird nur unifizierbarer Typ gesucht break am Ende
|
||||
Collection<PlaceholderType> tphs = tqp.getInvolvedPlaceholderTypes();
|
||||
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
|
||||
@@ -1265,9 +1252,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
for (Entry<PlaceholderType, UnifyType> sigma : unifier) {
|
||||
if (!tphs.contains(sigma.getKey())) {//eingefuegt PL 2019-02-02 Bug 127
|
||||
substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT,
|
||||
//TODO: nochmals ueberlegen ob hier pair.getSubstitution() korrekt ist, oder ob leere Menge hin müsste
|
||||
//alle folgenden New UnifyPair ebenfalls ueberpruefen PL 2018-04-19
|
||||
pair.getSubstitution(), pair));
|
||||
//TODO: nochmals ueberlegen ob hier pair.getSubstitution() korrekt ist, oder ob leere Menge hin müsste
|
||||
//alle folgenden New UnifyPair ebenfalls ueberpruefen PL 2018-04-19
|
||||
pair.getSubstitution(), pair));
|
||||
}
|
||||
}
|
||||
//List<UnifyType> freshTphs = new ArrayList<>(); PL 18-02-06 in die For-Schleife verschoben
|
||||
@@ -1279,14 +1266,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//eingefuegt PL 2018-03-29 Anfang ? ext. theta hinzufuegen
|
||||
if (a.isWildcardable()) {
|
||||
Set<UnifyType> smaller_ext = smaller.stream().filter(x -> !(x instanceof ExtendsType) && !(x instanceof SuperType))
|
||||
.map(x -> {
|
||||
//BinaryOperator<HashMap<PlaceholderType,PlaceholderType>> combiner = (aa,b) -> { aa.putAll(b); return aa;}; //Variablenumbenennung rausgenommen
|
||||
//HashMap<PlaceholderType,PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream() //Variablen muessen wahrscheinlich erhalten bleiben
|
||||
// .reduce(new HashMap<PlaceholderType,PlaceholderType>(),
|
||||
// (aa, b)-> { aa.put(b,PlaceholderType.freshPlaceholder()); return aa; }, combiner);
|
||||
return new ExtendsType(x);
|
||||
})//.accept(new freshPlaceholder(), hm));}
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
.map(x -> {
|
||||
//BinaryOperator<HashMap<PlaceholderType,PlaceholderType>> combiner = (aa,b) -> { aa.putAll(b); return aa;}; //Variablenumbenennung rausgenommen
|
||||
//HashMap<PlaceholderType,PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream() //Variablen muessen wahrscheinlich erhalten bleiben
|
||||
// .reduce(new HashMap<PlaceholderType,PlaceholderType>(),
|
||||
// (aa, b)-> { aa.put(b,PlaceholderType.freshPlaceholder()); return aa; }, combiner);
|
||||
return new ExtendsType(x);
|
||||
})//.accept(new freshPlaceholder(), hm));}
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
smaller.addAll(smaller_ext);
|
||||
}
|
||||
//eingefuegt PL 2018-03-29 Ende ? ext. theta hinzufuegen
|
||||
@@ -1305,13 +1292,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
UnifyPair up = new UnifyPair(a, theta, PairOperator.EQUALSDOT, pair.getSubstitution(), pair);
|
||||
//TODO PL 2019-01-24: upit.next() ist nicht unbedingt ein PlaceholderType -> Visitor erledigt
|
||||
for (UnifyType unifyType : up.getRhsType().getTypeParams())
|
||||
unifyType.accept(new distributeVariance(), a.getVariance());//((PlaceholderType)upit.next()).setVariance(a.getVariance());
|
||||
unifyType.accept(new DistributeVariance(), a.getVariance());//((PlaceholderType)upit.next()).setVariance(a.getVariance());
|
||||
resultPrime.add(up);
|
||||
} else {
|
||||
UnifyPair up = new UnifyPair(a, theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0]))), PairOperator.EQUALSDOT, pair.getSubstitution(), pair);
|
||||
//TODO PL 2019-01-24: upit.next() ist nicht unbedingt ein PlaceholderType -> Visitor erledigt
|
||||
for (UnifyType unifyType : up.getRhsType().getTypeParams())
|
||||
unifyType.accept(new distributeVariance(), a.getVariance()); //((PlaceholderType)upit.next()).setVariance(a.getVariance());
|
||||
unifyType.accept(new DistributeVariance(), a.getVariance()); //((PlaceholderType)upit.next()).setVariance(a.getVariance());
|
||||
resultPrime.add(up);
|
||||
}
|
||||
resultPrime.addAll(substitutionSet);
|
||||
@@ -1323,7 +1310,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
}
|
||||
}
|
||||
writeLog("result von " + pair + ": " + result.toString());
|
||||
writeLog("result von " + pair + ": " + result);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -1337,7 +1324,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
PlaceholderType aPrime = PlaceholderType.freshPlaceholder();
|
||||
aPrime.setVariance(a.getVariance());
|
||||
aPrime.disableWildcardtable();
|
||||
aPrime.disableWildcardable();
|
||||
UnifyType extAPrime = new ExtendsType(aPrime);
|
||||
UnifyType thetaPrime = extThetaPrime.getExtendedType();
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
@@ -1363,11 +1350,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
PlaceholderType aPrime = PlaceholderType.freshPlaceholder();
|
||||
aPrime.setVariance(a.getVariance());
|
||||
aPrime.disableWildcardtable();
|
||||
aPrime.disableWildcardable();
|
||||
UnifyType supAPrime = new SuperType(aPrime);
|
||||
UnifyType thetaPrime = subThetaPrime.getSuperedType();
|
||||
Set<UnifyPair> resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(thetaPrime, a, PairOperator.SMALLERDOT, pair.getSubstitution(), pair, pair.getfBounded()));
|
||||
resultPrime.add(new UnifyPair(thetaPrime, a, PairOperator.SMALLERDOT, pair.getSubstitution(), pair, pair.getFBounded()));
|
||||
result.add(resultPrime);
|
||||
//writeLog(resultPrime.toString());
|
||||
|
||||
@@ -1399,18 +1386,18 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//fc.setLogTrue();
|
||||
//writeLog("FBOUNDED: " + pair.getfBounded());
|
||||
//writeLog("Pair: " + pair);
|
||||
Set<UnifyType> greater = fc.greater(theta, pair.getfBounded());
|
||||
Set<UnifyType> greater = fc.greater(theta, pair.getFBounded());
|
||||
//writeLog("GREATER: " + greater + pair + "THETA: " + theta + "FBOUNDED: " + pair.getfBounded() + " ");
|
||||
if (a.isWildcardable()) {
|
||||
Set<UnifyType> greater_ext = greater.stream().filter(x -> !(x instanceof ExtendsType) && !(x instanceof SuperType))
|
||||
.map(x -> {
|
||||
//BinaryOperator<HashMap<PlaceholderType,PlaceholderType>> combiner = (aa,b) -> { aa.putAll(b); return aa;}; //Variablenumbenennung rausgenommen
|
||||
//HashMap<PlaceholderType,PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream() //Variablen muessen wahrscheinlich erhalten bleiben
|
||||
// .reduce(new HashMap<PlaceholderType,PlaceholderType>(),
|
||||
// (aa, b)-> { aa.put(b,PlaceholderType.freshPlaceholder()); return aa; }, combiner);
|
||||
return new SuperType(x);
|
||||
})//.accept(new freshPlaceholder(), hm));}
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
.map(x -> {
|
||||
//BinaryOperator<HashMap<PlaceholderType,PlaceholderType>> combiner = (aa,b) -> { aa.putAll(b); return aa;}; //Variablenumbenennung rausgenommen
|
||||
//HashMap<PlaceholderType,PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream() //Variablen muessen wahrscheinlich erhalten bleiben
|
||||
// .reduce(new HashMap<PlaceholderType,PlaceholderType>(),
|
||||
// (aa, b)-> { aa.put(b,PlaceholderType.freshPlaceholder()); return aa; }, combiner);
|
||||
return new SuperType(x);
|
||||
})//.accept(new freshPlaceholder(), hm));}
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
greater.addAll(greater_ext);
|
||||
}
|
||||
//eingefuegt PL 2019-01-03 ENDE
|
||||
@@ -1424,7 +1411,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
for (int i = 0; !allGen && i < freshTphs.length; i++) {
|
||||
freshTphs[i] = PlaceholderType.freshPlaceholder();
|
||||
((PlaceholderType) freshTphs[i]).setVariance(a.getVariance());
|
||||
Set<UnifyType> fBounded = new HashSet<>(pair.getfBounded()); //PL 2019-01-09 new HashSet eingefuegt
|
||||
Set<UnifyType> fBounded = new HashSet<>(pair.getFBounded()); //PL 2019-01-09 new HashSet eingefuegt
|
||||
|
||||
int i_ef = i;
|
||||
BiFunction<Boolean, UnifyType, Boolean> f = (x, y) ->
|
||||
@@ -1473,10 +1460,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
PlaceholderType freshTph = PlaceholderType.freshPlaceholder();
|
||||
|
||||
freshTph.setVariance(a.getVariance());
|
||||
freshTph.disableWildcardtable();
|
||||
freshTph.disableWildcardable();
|
||||
resultPrime = new HashSet<>();
|
||||
resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair));
|
||||
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair, pair.getfBounded()));
|
||||
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair, pair.getFBounded()));
|
||||
result.add(resultPrime);
|
||||
//writeLog("resultPrime: " + resultPrime.toString());
|
||||
|
||||
@@ -1528,14 +1515,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
void writeLog(String str) {
|
||||
if (log) {
|
||||
if (parallel) {
|
||||
logFile.write("Thread no.:" + thNo + "\n"
|
||||
+ "parallel:" + parallel + "\n"
|
||||
+ str + "\n\n"
|
||||
logFile.write("Thread no.:" + threadNumber + "\n"
|
||||
+ "parallel:" + parallel + "\n"
|
||||
+ str + "\n\n"
|
||||
);
|
||||
} else {
|
||||
logFile.writeNonThreaded("Thread no.:" + thNo + "\n"
|
||||
+ "parallel:" + parallel + "\n"
|
||||
+ str + "\n\n"
|
||||
logFile.writeNonThreaded("Thread no.:" + threadNumber + "\n"
|
||||
+ "parallel:" + parallel + "\n"
|
||||
+ str + "\n\n"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1543,11 +1530,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
void writeStatistics(String str) {
|
||||
try {
|
||||
statisticsFile.write("Thread No. " + thNo + ": " + str + "\n");
|
||||
statisticsFile.write("Thread No. " + threadNumber + ": " + str + "\n");
|
||||
statisticsFile.flush();
|
||||
|
||||
} catch (IOException e) {
|
||||
System.err.println("kein StatisticsFile");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class UnifyResultEvent {
|
||||
|
||||
private List<ResultSet> newTypeResult;
|
||||
|
||||
public UnifyResultEvent(List<ResultSet> newTypeResult) {
|
||||
this.newTypeResult = newTypeResult;
|
||||
}
|
||||
|
||||
public List<ResultSet> getNewTypeResult() {
|
||||
return newTypeResult;
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
public interface UnifyResultListener {
|
||||
|
||||
void onNewTypeResultFound(UnifyResultEvent evt);
|
||||
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class UnifyResultListenerImpl implements UnifyResultListener {
|
||||
|
||||
List<ResultSet> results = new ArrayList<>();
|
||||
|
||||
public synchronized void onNewTypeResultFound(UnifyResultEvent evt) {
|
||||
results.addAll(evt.getNewTypeResult());
|
||||
}
|
||||
|
||||
public List<ResultSet> getResults() {
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class UnifyResultModel {
|
||||
|
||||
ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons;
|
||||
|
||||
IFiniteClosure fc;
|
||||
|
||||
public UnifyResultModel(ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons,
|
||||
IFiniteClosure fc) {
|
||||
this.cons = cons;
|
||||
this.fc = fc;
|
||||
}
|
||||
|
||||
private List<UnifyResultListener> listeners = new ArrayList<>();
|
||||
|
||||
public void addUnifyResultListener(UnifyResultListener listenerToAdd) {
|
||||
listeners.add(listenerToAdd);
|
||||
}
|
||||
|
||||
public void removeUnifyResultListener(UnifyResultListener listenerToRemove) {
|
||||
listeners.remove(listenerToRemove);
|
||||
}
|
||||
|
||||
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
|
||||
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y; //alle Paare a <.? b erden durch a =. b ersetzt
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
|
||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
|
||||
}
|
||||
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
|
||||
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons))))
|
||||
.collect(Collectors.toList());
|
||||
UnifyResultEvent evt = new UnifyResultEvent(newResult);
|
||||
|
||||
for (UnifyResultListener listener : listeners) {
|
||||
listener.onNewTypeResultFound(evt);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,57 +1,54 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.typeinference.TypeInferenceHelper;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class UnifyResultModelParallel {
|
||||
private final ConstraintSet<Pair> cons;
|
||||
private final IFiniteClosure fc;
|
||||
private final List<ResultSet> results = new ArrayList<>();
|
||||
private ForkJoinPool pool;
|
||||
private ConstraintSet<Pair> cons;
|
||||
private IFiniteClosure fc;
|
||||
private List<UnifyResultListener> listeners = new ArrayList<>();
|
||||
|
||||
public UnifyResultModelParallel(ConstraintSet<Pair> cons, IFiniteClosure fc){
|
||||
public UnifyResultModelParallel(ConstraintSet<Pair> cons, IFiniteClosure fc) {
|
||||
this.cons = cons;
|
||||
this.fc = fc;
|
||||
}
|
||||
|
||||
public void setPool(ForkJoinPool pool){
|
||||
public void setPool(ForkJoinPool pool) {
|
||||
this.pool = pool;
|
||||
}
|
||||
public void addUnifyResultListener(UnifyResultListener listenerToAdd) {
|
||||
listeners.add(listenerToAdd);
|
||||
}
|
||||
public void removeUnifyResultListener(UnifyResultListener listenerToRemove) {
|
||||
listeners.remove(listenerToRemove);
|
||||
}
|
||||
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet){
|
||||
pool.execute(()->{
|
||||
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y; //alle Paare a <.? b erden durch a =. b ersetzt
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
|
||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
|
||||
}
|
||||
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
|
||||
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons))))
|
||||
.collect(Collectors.toList());
|
||||
UnifyResultEvent evt = new UnifyResultEvent(newResult);
|
||||
|
||||
for (UnifyResultListener listener : listeners) {
|
||||
listener.onNewTypeResultFound(evt);
|
||||
}
|
||||
public synchronized void onNewTypeResultFound(List<ResultSet> result) {
|
||||
results.addAll(result);
|
||||
}
|
||||
|
||||
public List<ResultSet> getResults() {
|
||||
return results;
|
||||
}
|
||||
|
||||
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
|
||||
pool.execute(() -> {
|
||||
Set<Set<UnifyPair>> eqPrimePrimeSetRet =
|
||||
TypeInferenceHelper.resolveSubstitutions(new HashSet<>(eqPrimePrimeSet), fc);
|
||||
|
||||
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
|
||||
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons))))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
// TODO: this is a synchronized call, can we find a way around this?
|
||||
this.onNewTypeResultFound(newResult);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,12 +5,12 @@ import java.util.concurrent.ForkJoinPool;
|
||||
public class UnifyTaskModelParallel {
|
||||
private ForkJoinPool pool;
|
||||
|
||||
public void setPool(ForkJoinPool pool){
|
||||
public void setPool(ForkJoinPool pool) {
|
||||
this.pool = pool;
|
||||
}
|
||||
|
||||
public void cancel(){
|
||||
if(this.pool != null) {
|
||||
public void cancel() {
|
||||
if (this.pool != null) {
|
||||
this.pool.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class VisitUnifyTypeVisitor<T> implements UnifyTypeVisitor<T> {
|
||||
|
||||
public ReferenceType visit(ReferenceType refty, T ht) {
|
||||
return new ReferenceType(refty.getName(),
|
||||
new TypeParams(
|
||||
Arrays.stream(refty.getTypeParams().get())
|
||||
.map(x -> x.accept(this, ht))
|
||||
.collect(Collectors.toCollection(ArrayList::new))));
|
||||
}
|
||||
|
||||
public PlaceholderType visit(PlaceholderType phty, T ht) {
|
||||
return phty;
|
||||
}
|
||||
|
||||
public FunNType visit(FunNType funnty, T ht) {
|
||||
return FunNType.getFunNType(
|
||||
new TypeParams(
|
||||
Arrays.stream(funnty.getTypeParams().get())
|
||||
.map(x -> x.accept(this, ht))
|
||||
.collect(Collectors.toCollection(ArrayList::new)))
|
||||
);
|
||||
}
|
||||
|
||||
public SuperType visit(SuperType suty, T ht) {
|
||||
return new SuperType(suty.getWildcardedType().accept(this, ht));
|
||||
}
|
||||
|
||||
public ExtendsType visit(ExtendsType extty, T ht) {
|
||||
return new ExtendsType(extty.getWildcardedType().accept(this, ht));
|
||||
}
|
||||
}
|
||||
@@ -5,48 +5,54 @@ import java.io.Writer;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
public class WriterActiveObject {
|
||||
private Writer writer;
|
||||
private ForkJoinPool pool;
|
||||
|
||||
public WriterActiveObject(Writer writer, ForkJoinPool pool){
|
||||
private final Writer writer;
|
||||
private final ForkJoinPool pool;
|
||||
|
||||
public WriterActiveObject(Writer writer, ForkJoinPool pool) {
|
||||
this.writer = writer;
|
||||
this.pool = pool;
|
||||
}
|
||||
|
||||
public void close(){
|
||||
pool.execute(()->{
|
||||
try {
|
||||
writer.close();
|
||||
} catch (IOException e) {
|
||||
System.out.println(e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void write(String message){
|
||||
pool.execute(()->{
|
||||
public void write(String message) {
|
||||
pool.execute(() -> {
|
||||
try {
|
||||
writer.write(message);
|
||||
// TODO: do something against this flush(). Writing to disk is slooooooow
|
||||
writer.flush();
|
||||
} catch (IOException e) {
|
||||
// TODO: why is here a random error conversion down to a runtimeException?
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void writeNonThreaded(String message){
|
||||
public void writeNonThreaded(String message) {
|
||||
try {
|
||||
writer.write(message);
|
||||
// TODO: do something against this flush(). Writing to disk is slooooooow
|
||||
writer.flush();
|
||||
} catch (IOException e) {
|
||||
// TODO: why is here a random error conversion down to a runtimeException?
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public void closeNonThreaded(){
|
||||
public void close() {
|
||||
pool.execute(() -> {
|
||||
try {
|
||||
writer.close();
|
||||
} catch (IOException e) {
|
||||
System.err.println(e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void closeNonThreaded() {
|
||||
try {
|
||||
writer.close();
|
||||
} catch (IOException e) {
|
||||
// TODO: why is here a random error conversion down to a runtimeException?
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
|
||||
public class distributeVariance extends visitUnifyTypeVisitor<Integer> {
|
||||
|
||||
public static int inverseVariance(int variance) {
|
||||
Integer ret = 0;
|
||||
if (variance == 1) {
|
||||
ret = -1;
|
||||
}
|
||||
if (variance == -1) {
|
||||
ret = 1;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public PlaceholderType visit(PlaceholderType phty, Integer ht) {
|
||||
if (ht != 0) {
|
||||
if (phty.getVariance() == 0) {
|
||||
phty.setVariance(ht);
|
||||
}
|
||||
//PL 2018-05-17 urspruengliche Variance nicht veraendern
|
||||
//else if (phty.getVariance() != ht) {
|
||||
// phty.setVariance(0);
|
||||
//}
|
||||
}
|
||||
return phty;
|
||||
}
|
||||
|
||||
public FunNType visit(FunNType funnty, Integer ht) {
|
||||
List<UnifyType> param = new ArrayList<>(funnty.getTypeParams().get().length);
|
||||
param.addAll(Arrays.asList(funnty.getTypeParams().get()));
|
||||
UnifyType resultType = param.remove(param.size()-1);
|
||||
Integer htInverse = inverseVariance(ht);
|
||||
param = param.stream()
|
||||
.map(x -> x.accept(this, htInverse))
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
param.add(resultType.accept(this, ht));
|
||||
return FunNType.getFunNType(new TypeParams(param));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
|
||||
public class freshPlaceholder extends visitUnifyTypeVisitor<HashMap<PlaceholderType,PlaceholderType>> {
|
||||
|
||||
@Override
|
||||
public PlaceholderType visit(PlaceholderType phty, HashMap<PlaceholderType,PlaceholderType> ht) {
|
||||
return ht.get(phty);
|
||||
}
|
||||
}
|
||||
@@ -6,58 +6,71 @@ import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public interface IFiniteClosure {
|
||||
|
||||
public void setLogTrue();
|
||||
/**
|
||||
* Returns all types of the finite closure that are subtypes of the argument.
|
||||
* @return The set of subtypes of the argument.
|
||||
*/
|
||||
public Set<UnifyType> smaller(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
/**
|
||||
* Returns all types of the finite closure that are supertypes of the argument.
|
||||
* @return The set of supertypes of the argument.
|
||||
*/
|
||||
public Set<UnifyType> greater(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
/**
|
||||
* Wo passt Type rein?
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
public Set<UnifyType> grArg(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
/**
|
||||
* Was passt in Type rein?
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
public Set<UnifyType> smArg(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
public Set<UnifyType> grArg(ReferenceType type, Set<UnifyType> fBounded);
|
||||
public Set<UnifyType> smArg(ReferenceType type, Set<UnifyType> fBounded);
|
||||
|
||||
public Set<UnifyType> grArg(ExtendsType type, Set<UnifyType> fBounded);
|
||||
public Set<UnifyType> smArg(ExtendsType type, Set<UnifyType> fBounded);
|
||||
|
||||
public Set<UnifyType> grArg(SuperType type, Set<UnifyType> fBounded);
|
||||
public Set<UnifyType> smArg(SuperType type, Set<UnifyType> fBounded);
|
||||
void setLogTrue();
|
||||
|
||||
public Set<UnifyType> grArg(PlaceholderType type, Set<UnifyType> fBounded);
|
||||
public Set<UnifyType> smArg(PlaceholderType type, Set<UnifyType> fBounded);
|
||||
/**
|
||||
* Returns all types of the finite closure that are subtypes of the argument.
|
||||
*
|
||||
* @return The set of subtypes of the argument.
|
||||
*/
|
||||
Set<UnifyType> smaller(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
public Set<UnifyType> grArg(FunNType type, Set<UnifyType> fBounded);
|
||||
public Set<UnifyType> smArg(FunNType type, Set<UnifyType> fBounded);
|
||||
/**
|
||||
* Returns all types of the finite closure that are supertypes of the argument.
|
||||
*
|
||||
* @return The set of supertypes of the argument.
|
||||
*/
|
||||
Set<UnifyType> greater(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
public Set<UnifyPair> getPairs();
|
||||
public Optional<UnifyType> getLeftHandedType(String typeName);
|
||||
public Set<UnifyType> getAncestors(UnifyType t);
|
||||
public Set<UnifyType> getChildren(UnifyType t);
|
||||
public Set<UnifyType> getAllTypesByName(String typeName);
|
||||
/**
|
||||
* Wo passt Type rein?
|
||||
*
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
Set<UnifyType> grArg(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop);
|
||||
/**
|
||||
* Was passt in Type rein?
|
||||
*
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
Set<UnifyType> smArg(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
Set<UnifyType> grArg(ReferenceType type, Set<UnifyType> fBounded);
|
||||
|
||||
Set<UnifyType> smArg(ReferenceType type, Set<UnifyType> fBounded);
|
||||
|
||||
Set<UnifyType> grArg(ExtendsType type, Set<UnifyType> fBounded);
|
||||
|
||||
Set<UnifyType> smArg(ExtendsType type, Set<UnifyType> fBounded);
|
||||
|
||||
Set<UnifyType> grArg(SuperType type, Set<UnifyType> fBounded);
|
||||
|
||||
Set<UnifyType> smArg(SuperType type, Set<UnifyType> fBounded);
|
||||
|
||||
Set<UnifyType> grArg(PlaceholderType type, Set<UnifyType> fBounded);
|
||||
|
||||
Set<UnifyType> smArg(PlaceholderType type, Set<UnifyType> fBounded);
|
||||
|
||||
Set<UnifyType> grArg(FunNType type, Set<UnifyType> fBounded);
|
||||
|
||||
Set<UnifyType> smArg(FunNType type, Set<UnifyType> fBounded);
|
||||
|
||||
Set<UnifyPair> getPairs();
|
||||
|
||||
Optional<UnifyType> getLeftHandedType(String typeName);
|
||||
|
||||
Set<UnifyType> getAncestors(UnifyType t);
|
||||
|
||||
Set<UnifyType> getChildren(UnifyType t);
|
||||
|
||||
Set<UnifyType> getAllTypesByName(String typeName);
|
||||
|
||||
int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairOp);
|
||||
}
|
||||
|
||||
@@ -8,18 +8,20 @@ import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Match
|
||||
*
|
||||
* @author Martin Pluemicke
|
||||
* abgeleitet aus IUnify.java
|
||||
*/
|
||||
public interface IMatch {
|
||||
|
||||
/**
|
||||
* Finds the most general matcher sigma of the set {t1 =. t1',...,tn =. tn'} so that
|
||||
* sigma(t1) = t1' , ... sigma(tn) = tn'.
|
||||
* @param terms The set of terms to be matched
|
||||
* @return An optional of the most general matcher if it exists or an empty optional if there is no matcher.
|
||||
*/
|
||||
public Optional<Unifier> match(ArrayList<UnifyPair> termsList);
|
||||
|
||||
|
||||
}
|
||||
/**
|
||||
* Finds the most general matcher sigma of the set {t1 =. t1',...,tn =. tn'} so that
|
||||
* sigma(t1) = t1' , ... sigma(tn) = tn'.
|
||||
*
|
||||
* @param termsList The set of terms to be matched
|
||||
* @return An optional of the most general matcher if it exists or an empty optional if there is no matcher.
|
||||
*/
|
||||
Optional<Unifier> match(ArrayList<UnifyPair> termsList);
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -9,95 +9,115 @@ import java.util.Set;
|
||||
|
||||
/**
|
||||
* Contains the inference rules that are applied to the set Eq.
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public interface IRuleSet {
|
||||
|
||||
public Optional<UnifyPair> reduceUp(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceLow(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceUpLow(UnifyPair pair);
|
||||
public Optional<Set<UnifyPair>> reduceExt(UnifyPair pair, IFiniteClosure fc);
|
||||
public Optional<Set<UnifyPair>> reduceSup(UnifyPair pair, IFiniteClosure fc);
|
||||
public Optional<Set<UnifyPair>> reduceEq(UnifyPair pair);
|
||||
public Optional<Set<UnifyPair>> reduce1(UnifyPair pair, IFiniteClosure fc);
|
||||
public Optional<Set<UnifyPair>> reduce2(UnifyPair pair);
|
||||
|
||||
/*
|
||||
* Missing Reduce-Rules for Wildcards
|
||||
*/
|
||||
public Optional<UnifyPair> reduceWildcardLow(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceWildcardLowRight(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceWildcardUp(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceWildcardUpRight(UnifyPair pair);
|
||||
|
||||
Optional<UnifyPair> reduceUp(UnifyPair pair);
|
||||
|
||||
Optional<UnifyPair> reduceLow(UnifyPair pair);
|
||||
|
||||
Optional<UnifyPair> reduceUpLow(UnifyPair pair);
|
||||
|
||||
Optional<Set<UnifyPair>> reduceExt(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
Optional<Set<UnifyPair>> reduceSup(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
Optional<Set<UnifyPair>> reduceEq(UnifyPair pair);
|
||||
|
||||
Optional<Set<UnifyPair>> reduce1(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
Optional<Set<UnifyPair>> reduce2(UnifyPair pair);
|
||||
|
||||
/*
|
||||
* Missing Reduce-Rules for Wildcards
|
||||
*/
|
||||
Optional<UnifyPair> reduceWildcardLow(UnifyPair pair);
|
||||
|
||||
Optional<UnifyPair> reduceWildcardLowRight(UnifyPair pair);
|
||||
|
||||
Optional<UnifyPair> reduceWildcardUp(UnifyPair pair);
|
||||
|
||||
Optional<UnifyPair> reduceWildcardUpRight(UnifyPair pair);
|
||||
|
||||
/*
|
||||
* vgl. JAVA_BSP/Wildcard6.java
|
||||
public Optional<UnifyPair> reduceWildcardLowUp(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceWildcardUpLow(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceWildcardLeft(UnifyPair pair);
|
||||
Optional<UnifyPair> reduceWildcardLowUp(UnifyPair pair);
|
||||
Optional<UnifyPair> reduceWildcardUpLow(UnifyPair pair);
|
||||
Optional<UnifyPair> reduceWildcardLeft(UnifyPair pair);
|
||||
*/
|
||||
|
||||
/*
|
||||
* Additional Rules which replace cases of the cartesian product
|
||||
*/
|
||||
|
||||
/**
|
||||
* Rule that replaces the fourth case of the cartesian product where (a <.? Theta)
|
||||
*/
|
||||
public Optional<UnifyPair> reduceTph(UnifyPair pair);
|
||||
|
||||
/**
|
||||
* Rule that replaces the sixth case of the cartesian product where (? ext Theta <.? a)
|
||||
*/
|
||||
public Optional<Set<UnifyPair>> reduceTphExt(UnifyPair pair);
|
||||
|
||||
/**
|
||||
* Rule that replaces the fourth case of the cartesian product where (? sup Theta <.? a)
|
||||
*/
|
||||
public Optional<Set<UnifyPair>> reduceTphSup(UnifyPair pair);
|
||||
|
||||
/*
|
||||
* FunN Rules
|
||||
*/
|
||||
public Optional<Set<UnifyPair>> reduceFunN(UnifyPair pair);
|
||||
public Optional<Set<UnifyPair>> greaterFunN(UnifyPair pair);
|
||||
public Optional<Set<UnifyPair>> smallerFunN(UnifyPair pair);
|
||||
|
||||
/**
|
||||
* Checks whether the erase1-Rule applies to the pair.
|
||||
* @return True if the pair is erasable, false otherwise.
|
||||
*/
|
||||
public boolean erase1(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
/**
|
||||
* Checks whether the erase2-Rule applies to the pair.
|
||||
* @return True if the pair is erasable, false otherwise.
|
||||
*/
|
||||
public boolean erase2(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
/**
|
||||
* Checks whether the erase3-Rule applies to the pair.
|
||||
* @return True if the pair is erasable, false otherwise.
|
||||
*/
|
||||
public boolean erase3(UnifyPair pair);
|
||||
|
||||
public Optional<UnifyPair> swap(UnifyPair pair);
|
||||
|
||||
public Optional<UnifyPair> adapt(UnifyPair pair, IFiniteClosure fc);
|
||||
public Optional<UnifyPair> adaptExt(UnifyPair pair, IFiniteClosure fc);
|
||||
public Optional<UnifyPair> adaptSup(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
/**
|
||||
* Applies the subst-Rule to a set of pairs (usually Eq').
|
||||
* @param pairs The set of pairs where the subst rule should apply.
|
||||
* @return An optional of the modified set, if there were any substitutions. An empty optional if there were no substitutions.
|
||||
*/
|
||||
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs, List<Set<Constraint<UnifyPair>>> oderConstraints);
|
||||
|
||||
/**
|
||||
* Applies the subst-Rule to a set of pairs (usually Eq').
|
||||
* @param pairs The set of pairs where the subst rule should apply.
|
||||
* @return An optional of the modified set, if there were any substitutions. An empty optional if there were no substitutions.
|
||||
*/
|
||||
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs);
|
||||
}
|
||||
|
||||
/*
|
||||
* Additional Rules which replace cases of the cartesian product
|
||||
*/
|
||||
|
||||
/**
|
||||
* Rule that replaces the fourth case of the cartesian product where (a <.? Theta)
|
||||
*/
|
||||
Optional<UnifyPair> reduceTph(UnifyPair pair);
|
||||
|
||||
/**
|
||||
* Rule that replaces the sixth case of the cartesian product where (? ext Theta <.? a)
|
||||
*/
|
||||
Optional<Set<UnifyPair>> reduceTphExt(UnifyPair pair);
|
||||
|
||||
/**
|
||||
* Rule that replaces the fourth case of the cartesian product where (? sup Theta <.? a)
|
||||
*/
|
||||
Optional<Set<UnifyPair>> reduceTphSup(UnifyPair pair);
|
||||
|
||||
/*
|
||||
* FunN Rules
|
||||
*/
|
||||
Optional<Set<UnifyPair>> reduceFunN(UnifyPair pair);
|
||||
|
||||
Optional<Set<UnifyPair>> greaterFunN(UnifyPair pair);
|
||||
|
||||
Optional<Set<UnifyPair>> smallerFunN(UnifyPair pair);
|
||||
|
||||
/**
|
||||
* Checks whether the erase1-Rule applies to the pair.
|
||||
*
|
||||
* @return True if the pair is erasable, false otherwise.
|
||||
*/
|
||||
boolean erase1(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
/**
|
||||
* Checks whether the erase2-Rule applies to the pair.
|
||||
*
|
||||
* @return True if the pair is erasable, false otherwise.
|
||||
*/
|
||||
boolean erase2(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
/**
|
||||
* Checks whether the erase3-Rule applies to the pair.
|
||||
*
|
||||
* @return True if the pair is erasable, false otherwise.
|
||||
*/
|
||||
boolean erase3(UnifyPair pair);
|
||||
|
||||
Optional<UnifyPair> swap(UnifyPair pair);
|
||||
|
||||
Optional<UnifyPair> adapt(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
Optional<UnifyPair> adaptExt(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
Optional<UnifyPair> adaptSup(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
/**
|
||||
* Applies the subst-Rule to a set of pairs (usually Eq').
|
||||
*
|
||||
* @param pairs The set of pairs where the subst rule should apply.
|
||||
* @return An optional of the modified set, if there were any substitutions. An empty optional if there were no substitutions.
|
||||
*/
|
||||
Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs, List<Set<Constraint<UnifyPair>>> oderConstraints);
|
||||
|
||||
/**
|
||||
* Applies the subst-Rule to a set of pairs (usually Eq').
|
||||
*
|
||||
* @param pairs The set of pairs where the subst rule should apply.
|
||||
* @return An optional of the modified set, if there were any substitutions. An empty optional if there were no substitutions.
|
||||
*/
|
||||
Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs);
|
||||
}
|
||||
|
||||
@@ -5,12 +5,14 @@ import java.util.Set;
|
||||
|
||||
/**
|
||||
* Contains operations on sets.
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public interface ISetOperations {
|
||||
/**
|
||||
* Calculates the cartesian product of the sets.
|
||||
* @return The cartesian product
|
||||
*/
|
||||
<B> Set<List<B>> cartesianProduct(List<? extends Set<? extends B>> sets);
|
||||
/**
|
||||
* Calculates the cartesian product of the sets.
|
||||
*
|
||||
* @return The cartesian product
|
||||
*/
|
||||
<B> Set<List<B>> cartesianProduct(List<? extends Set<? extends B>> sets);
|
||||
}
|
||||
|
||||
@@ -10,26 +10,29 @@ import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Standard unification algorithm (e.g. Robinson, Paterson-Wegman, Martelli-Montanari)
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public interface IUnify {
|
||||
|
||||
/**
|
||||
* Finds the most general unifier sigma of the set {t1 =. t1',...,tn =. tn'} so that
|
||||
* sigma(t1) = sigma(t1') , ... sigma(tn) = sigma(tn').
|
||||
* @param terms The set of terms to be unified
|
||||
* @return An optional of the most general unifier if it exists or an empty optional if there is no unifier.
|
||||
*/
|
||||
public Optional<Unifier> unify(Set<UnifyType> terms);
|
||||
|
||||
/**
|
||||
* Finds the most general unifier sigma of the set {t1 =. t1',...,tn =. tn'} so that
|
||||
* sigma(t1) = sigma(t1') , ... sigma(tn) = sigma(tn').
|
||||
* @param terms The set of terms to be unified
|
||||
* @return An optional of the most general unifier if it exists or an empty optional if there is no unifier.
|
||||
*/
|
||||
default public Optional<Unifier> unify(UnifyType... terms) {
|
||||
return unify(Arrays.stream(terms).collect(Collectors.toSet()));
|
||||
}
|
||||
|
||||
}
|
||||
/**
|
||||
* Finds the most general unifier sigma of the set {t1 =. t1',...,tn =. tn'} so that
|
||||
* sigma(t1) = sigma(t1') , ... sigma(tn) = sigma(tn').
|
||||
*
|
||||
* @param terms The set of terms to be unified
|
||||
* @return An optional of the most general unifier if it exists or an empty optional if there is no unifier.
|
||||
*/
|
||||
Optional<Unifier> unify(Set<UnifyType> terms);
|
||||
|
||||
/**
|
||||
* Finds the most general unifier sigma of the set {t1 =. t1',...,tn =. tn'} so that
|
||||
* sigma(t1) = sigma(t1') , ... sigma(tn) = sigma(tn').
|
||||
*
|
||||
* @param terms The set of terms to be unified
|
||||
* @return An optional of the most general unifier if it exists or an empty optional if there is no unifier.
|
||||
*/
|
||||
default Optional<Unifier> unify(UnifyType... terms) {
|
||||
return unify(Arrays.stream(terms).collect(Collectors.toSet()));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -4,14 +4,14 @@ import de.dhbwstuttgart.typeinference.unify.model.*;
|
||||
|
||||
public interface UnifyTypeVisitor<T> {
|
||||
|
||||
public ReferenceType visit(ReferenceType refty, T ht);
|
||||
|
||||
public PlaceholderType visit(PlaceholderType phty, T ht);
|
||||
|
||||
public FunNType visit(FunNType funnty, T ht);
|
||||
|
||||
public SuperType visit(SuperType suty, T ht);
|
||||
|
||||
public ExtendsType visit(ExtendsType extty, T ht);
|
||||
ReferenceType visit(ReferenceType refty, T ht);
|
||||
|
||||
}
|
||||
PlaceholderType visit(PlaceholderType phty, T ht);
|
||||
|
||||
FunNType visit(FunNType funnty, T ht);
|
||||
|
||||
SuperType visit(SuperType suty, T ht);
|
||||
|
||||
ExtendsType visit(ExtendsType extty, T ht);
|
||||
|
||||
}
|
||||
|
||||
@@ -6,88 +6,86 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* An extends wildcard type "? extends T".
|
||||
* An extends wildcard type "? extends T".
|
||||
*/
|
||||
public final class ExtendsType extends WildcardType {
|
||||
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new extends wildcard type.
|
||||
* @param extendedType The extended type e.g. Integer in "? extends Integer"
|
||||
*/
|
||||
public ExtendsType(UnifyType extendedType) {
|
||||
super("? extends " + extendedType.getName(), extendedType);
|
||||
if (extendedType instanceof ExtendsType) {
|
||||
System.out.print("");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The extended type e.g. Integer in "? extends Integer"
|
||||
*/
|
||||
public UnifyType getExtendedType() {
|
||||
return wildcardedType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the type parameters of the wildcarded type and returns a new extendstype that extends that type.
|
||||
*/
|
||||
@Override
|
||||
public UnifyType setTypeParams(TypeParams newTp) {
|
||||
UnifyType newType = wildcardedType.setTypeParams(newTp);
|
||||
if(newType == wildcardedType)
|
||||
return this; // Reduced the amount of objects created
|
||||
return new ExtendsType(wildcardedType.setTypeParams(newTp));
|
||||
}
|
||||
/**
|
||||
* Creates a new extends wildcard type.
|
||||
*
|
||||
* @param extendedType The extended type e.g. Integer in "? extends Integer"
|
||||
*/
|
||||
public ExtendsType(UnifyType extendedType) {
|
||||
super("? extends " + extendedType.getName(), extendedType);
|
||||
if (extendedType instanceof ExtendsType) {
|
||||
System.out.print("");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.smArg(this, fBounded);
|
||||
}
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.grArg(this, fBounded);
|
||||
}
|
||||
/**
|
||||
* The extended type e.g. Integer in "? extends Integer"
|
||||
*/
|
||||
public UnifyType getExtendedType() {
|
||||
return wildcardedType;
|
||||
}
|
||||
|
||||
@Override
|
||||
UnifyType apply(Unifier unif) {
|
||||
UnifyType newType = wildcardedType.apply(unif);
|
||||
if(newType.hashCode() == wildcardedType.hashCode() && newType.equals(wildcardedType))
|
||||
return this; // Reduced the amount of objects created
|
||||
return new ExtendsType(newType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
/*
|
||||
* It is important that the prime that is added is different to the prime added in hashCode() of SuperType.
|
||||
* Otherwise ? extends T and ? super T have the same hashCode() for every Type T.
|
||||
*/
|
||||
return wildcardedType.hashCode() + 229;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(!(obj instanceof ExtendsType))
|
||||
return false;
|
||||
|
||||
if(obj.hashCode() != this.hashCode())
|
||||
return false;
|
||||
|
||||
ExtendsType other = (ExtendsType) obj;
|
||||
|
||||
|
||||
return other.getWildcardedType().equals(wildcardedType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "? extends " + wildcardedType;
|
||||
}
|
||||
/**
|
||||
* Sets the type parameters of the wildcarded type and returns a new extendstype that extends that type.
|
||||
*/
|
||||
@Override
|
||||
public UnifyType setTypeParams(TypeParams newTp) {
|
||||
UnifyType newType = wildcardedType.setTypeParams(newTp);
|
||||
if (newType == wildcardedType)
|
||||
return this; // Reduced the amount of objects created
|
||||
return new ExtendsType(wildcardedType.setTypeParams(newTp));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@Override
|
||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.smArg(this, fBounded);
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.grArg(this, fBounded);
|
||||
}
|
||||
|
||||
@Override
|
||||
UnifyType apply(Unifier unifier) {
|
||||
UnifyType newType = wildcardedType.apply(unifier);
|
||||
if (newType.hashCode() == wildcardedType.hashCode() && newType.equals(wildcardedType))
|
||||
return this; // Reduced the amount of objects created
|
||||
return new ExtendsType(newType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
/*
|
||||
* It is important that the prime that is added is different to the prime added in hashCode() of SuperType.
|
||||
* Otherwise ? extends T and ? super T have the same hashCode() for every Type T.
|
||||
*/
|
||||
return wildcardedType.hashCode() + 229;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof ExtendsType other))
|
||||
return false;
|
||||
|
||||
if (obj.hashCode() != this.hashCode())
|
||||
return false;
|
||||
|
||||
return other.getWildcardedType().equals(wildcardedType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "? extends " + wildcardedType;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -5,98 +5,99 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* A real function type in java.
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class FunNType extends UnifyType {
|
||||
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a FunN-Type with the specified TypeParameters.
|
||||
*/
|
||||
protected FunNType(TypeParams p) {
|
||||
super("Fun"+(p.size()-1)+"$$", p);
|
||||
}
|
||||
/**
|
||||
* Creates a FunN-Type with the specified TypeParameters.
|
||||
*/
|
||||
protected FunNType(TypeParams p) {
|
||||
super("Fun" + (p.size() - 1) + "$$", p);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new FunNType.
|
||||
* @param tp The parameters of the type.
|
||||
* @return A FunNType.
|
||||
* @throws IllegalArgumentException is thrown when there are to few type parameters or there are wildcard-types.
|
||||
*/
|
||||
public static FunNType getFunNType(TypeParams tp) throws IllegalArgumentException {
|
||||
if(tp.size() == 0)
|
||||
throw new IllegalArgumentException("FunNTypes need at least one type parameter");
|
||||
for(UnifyType t : tp)
|
||||
if(t instanceof WildcardType)
|
||||
throw new IllegalArgumentException("Invalid TypeParams for a FunNType: " + tp);
|
||||
return new FunNType(tp);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the degree of the function type, e.g. 2 for FunN<Integer, Integer, Integer>.
|
||||
*/
|
||||
public int getN() {
|
||||
return typeParams.size()-1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnifyType setTypeParams(TypeParams newTp) {
|
||||
if(newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
|
||||
return this;
|
||||
return getFunNType(newTp);
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.smArg(this, fBounded);
|
||||
}
|
||||
/**
|
||||
* Creates a new FunNType.
|
||||
*
|
||||
* @param tp The parameters of the type.
|
||||
* @return A FunNType.
|
||||
* @throws IllegalArgumentException is thrown when there are to few type parameters or there are wildcard-types.
|
||||
*/
|
||||
public static FunNType getFunNType(TypeParams tp) throws IllegalArgumentException {
|
||||
if (tp.size() == 0)
|
||||
throw new IllegalArgumentException("FunNTypes need at least one type parameter");
|
||||
for (UnifyType t : tp)
|
||||
if (t instanceof WildcardType)
|
||||
throw new IllegalArgumentException("Invalid TypeParams for a FunNType: " + tp);
|
||||
return new FunNType(tp);
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.grArg(this, fBounded);
|
||||
}
|
||||
|
||||
@Override
|
||||
UnifyType apply(Unifier unif) {
|
||||
// TODO this bypasses the validation of the type parameters.
|
||||
// Wildcard types can be unified into FunNTypes.
|
||||
TypeParams newParams = typeParams.apply(unif);
|
||||
if(newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
|
||||
return this;
|
||||
|
||||
return new FunNType(newParams);
|
||||
}
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean wrongWildcard() {
|
||||
return (new ArrayList<UnifyType>(Arrays.asList(getTypeParams()
|
||||
.get())).stream().filter(x -> (x instanceof WildcardType)).findFirst().isPresent());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 181 + typeParams.hashCode();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(!(obj instanceof FunNType))
|
||||
return false;
|
||||
|
||||
if(obj.hashCode() != this.hashCode())
|
||||
return false;
|
||||
|
||||
FunNType other = (FunNType) obj;
|
||||
/**
|
||||
* Returns the degree of the function type, e.g. 2 for FunN<Integer, Integer, Integer>.
|
||||
*/
|
||||
public int getN() {
|
||||
return typeParams.size() - 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnifyType setTypeParams(TypeParams newTp) {
|
||||
if (newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
|
||||
return this;
|
||||
return getFunNType(newTp);
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.smArg(this, fBounded);
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.grArg(this, fBounded);
|
||||
}
|
||||
|
||||
@Override
|
||||
UnifyType apply(Unifier unifier) {
|
||||
// TODO this bypasses the validation of the type parameters.
|
||||
// Wildcard types can be unified into FunNTypes.
|
||||
TypeParams newParams = typeParams.apply(unifier);
|
||||
if (newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
|
||||
return this;
|
||||
|
||||
return new FunNType(newParams);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean wrongWildcard() {
|
||||
List<UnifyType> typeParamList = new ArrayList<>(Arrays.asList(getTypeParams().get()));
|
||||
return typeParamList.stream().anyMatch(x -> (x instanceof WildcardType));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 181 + typeParams.hashCode();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof FunNType other))
|
||||
return false;
|
||||
|
||||
if (obj.hashCode() != this.hashCode())
|
||||
return false;
|
||||
|
||||
return other.getTypeParams().equals(typeParams);
|
||||
}
|
||||
|
||||
return other.getTypeParams().equals(typeParams);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -6,113 +6,114 @@ import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A node of a directed graph.
|
||||
* @author Florian Steurer
|
||||
*
|
||||
* @param <T> The type of the content of the node.
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
class Node<T> {
|
||||
|
||||
/**
|
||||
* The content of the node.
|
||||
*/
|
||||
private T content;
|
||||
|
||||
/**
|
||||
* The set of predecessors
|
||||
*/
|
||||
private HashSet<Node<T>> predecessors = new HashSet<>();
|
||||
|
||||
/**
|
||||
* The set of descendants
|
||||
*/
|
||||
private HashSet<Node<T>> descendants = new HashSet<>();
|
||||
|
||||
/**
|
||||
* Creates a node containing the specified content.
|
||||
*/
|
||||
public Node(T content) {
|
||||
this.content = content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a directed edge from this node to the descendant (this -> descendant)
|
||||
*/
|
||||
public void addDescendant(Node<T> descendant) {
|
||||
if(descendants.contains(descendant))
|
||||
return;
|
||||
|
||||
descendants.add(descendant);
|
||||
descendant.addPredecessor(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds some directed edges from this node to the descendant (this -> descendant)
|
||||
*/
|
||||
public void addAllDescendant(Set<Node<T>> allDescendants) {
|
||||
for(Node<T> descendant: allDescendants) {
|
||||
addDescendant(descendant);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a directed edge from the predecessor to this node (predecessor -> this)
|
||||
*/
|
||||
public void addPredecessor(Node<T> predecessor) {
|
||||
if(predecessors.contains(predecessor))
|
||||
return;
|
||||
|
||||
predecessors.add(predecessor);
|
||||
predecessor.addDescendant(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds some directed edges from the predecessor to this node (predecessor -> this)
|
||||
*/
|
||||
public void addAllPredecessor(Set<Node<T>> allPredecessors) {
|
||||
for(Node<T> predecessor: allPredecessors) {
|
||||
addPredecessor(predecessor);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The content of this node.
|
||||
*/
|
||||
public T getContent() {
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all predecessors (nodes that have a directed edge to this node)
|
||||
*/
|
||||
public Set<Node<T>> getPredecessors() {
|
||||
return predecessors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all descendants. All nodes M, where there is a edge from this node to the node M.
|
||||
* @return
|
||||
*/
|
||||
public Set<Node<T>> getDescendants() {
|
||||
return descendants;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the content of all descendants.
|
||||
*/
|
||||
public Set<T> getContentOfDescendants() {
|
||||
return descendants.stream().map(x -> x.getContent()).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the content of all predecessors.
|
||||
*/
|
||||
public Set<T> getContentOfPredecessors() {
|
||||
return predecessors.stream().map(x -> x.getContent()).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Elem: Node(" + content.toString() + ")\nPrec: " + getContentOfPredecessors().toString()
|
||||
+ "\nDesc: " + getContentOfDescendants().toString() + "\n\n";
|
||||
}
|
||||
|
||||
/**
|
||||
* The content of the node.
|
||||
*/
|
||||
private final T content;
|
||||
|
||||
/**
|
||||
* The set of predecessors
|
||||
*/
|
||||
private final HashSet<Node<T>> predecessors = new HashSet<>();
|
||||
|
||||
/**
|
||||
* The set of descendants
|
||||
*/
|
||||
private final HashSet<Node<T>> descendants = new HashSet<>();
|
||||
|
||||
/**
|
||||
* Creates a node containing the specified content.
|
||||
*/
|
||||
public Node(T content) {
|
||||
this.content = content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a directed edge from this node to the descendant (this -> descendant)
|
||||
*/
|
||||
public void addDescendant(Node<T> descendant) {
|
||||
if (descendants.contains(descendant))
|
||||
return;
|
||||
|
||||
descendants.add(descendant);
|
||||
descendant.addPredecessor(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds some directed edges from this node to the descendant (this -> descendant)
|
||||
*/
|
||||
public void addAllDescendant(Set<Node<T>> allDescendants) {
|
||||
for (Node<T> descendant : allDescendants) {
|
||||
addDescendant(descendant);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a directed edge from the predecessor to this node (predecessor -> this)
|
||||
*/
|
||||
public void addPredecessor(Node<T> predecessor) {
|
||||
if (predecessors.contains(predecessor))
|
||||
return;
|
||||
|
||||
predecessors.add(predecessor);
|
||||
predecessor.addDescendant(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds some directed edges from the predecessor to this node (predecessor -> this)
|
||||
*/
|
||||
public void addAllPredecessor(Set<Node<T>> allPredecessors) {
|
||||
for (Node<T> predecessor : allPredecessors) {
|
||||
addPredecessor(predecessor);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The content of this node.
|
||||
*/
|
||||
public T getContent() {
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return All predecessors (nodes that have a directed edge to this node)
|
||||
*/
|
||||
public Set<Node<T>> getPredecessors() {
|
||||
return predecessors;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return All nodes M, where there is an edge from this node to the node M.
|
||||
*/
|
||||
public Set<Node<T>> getDescendants() {
|
||||
return descendants;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The content of all descendants.
|
||||
*/
|
||||
public Set<T> getContentOfDescendants() {
|
||||
return descendants.stream().map(Node::getContent).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The content of all predecessors.
|
||||
*/
|
||||
public Set<T> getContentOfPredecessors() {
|
||||
return predecessors.stream().map(Node::getContent).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return
|
||||
"Elem: Node(" + content.toString() + ")\n" +
|
||||
"Prec: " + getContentOfPredecessors().toString() + "\n" +
|
||||
"Desc: " + getContentOfDescendants().toString() + "\n\n";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
public abstract class OrderingExtend<T> extends com.google.common.collect.Ordering<T> {
|
||||
|
||||
public List<T> maxElements(Iterable<T> iterable) {
|
||||
ArrayList<T> ret = new ArrayList<>();
|
||||
while (iterable.iterator().hasNext()) {
|
||||
Set<T> believe = new HashSet<>();
|
||||
|
||||
T max = max(iterable);
|
||||
ret.add(max);
|
||||
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T elem = it.next();
|
||||
if (!(compare(max, elem) == 1) && !max.equals(elem)) {
|
||||
believe.add(elem);
|
||||
}
|
||||
}
|
||||
iterable = believe;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
public List<T> minElements(Iterable<T> iterable) {
|
||||
ArrayList<T> ret = new ArrayList<>();
|
||||
while (iterable.iterator().hasNext()) {
|
||||
Set<T> believe = new HashSet<>();
|
||||
|
||||
T min = min(iterable);
|
||||
ret.add(min);
|
||||
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T elem = it.next();
|
||||
if (!(compare(min, elem) == -1) && !min.equals(elem)) {
|
||||
believe.add(elem);
|
||||
}
|
||||
}
|
||||
iterable = believe;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
public List<T> smallerEqThan(T elem, Iterable<T> iterable) {
|
||||
List<T> ret = smallerThan(elem, iterable);
|
||||
ret.add(elem);
|
||||
return ret;
|
||||
|
||||
}
|
||||
|
||||
public List<T> smallerThan(T elem, Iterable<T> iterable) {
|
||||
ArrayList<T> ret = new ArrayList<>();
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T itElem = it.next();
|
||||
if (!itElem.equals(elem) && compare(elem, itElem) == 1) {
|
||||
ret.add(itElem);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
public List<T> greaterEqThan(T elem, Iterable<T> iterable) {
|
||||
List<T> ret = greaterThan(elem, iterable);
|
||||
ret.add(elem);
|
||||
return ret;
|
||||
|
||||
}
|
||||
|
||||
public List<T> greaterThan(T elem, Iterable<T> iterable) {
|
||||
ArrayList<T> ret = new ArrayList<>();
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T itElem = it.next();
|
||||
if (!itElem.equals(elem) && (compare(elem, itElem) == -1)) {
|
||||
ret.add(itElem);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
@@ -1,446 +0,0 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.Match;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.util.Pair;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.function.BinaryOperator;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
|
||||
|
||||
public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
|
||||
protected IFiniteClosure fc;
|
||||
|
||||
public OrderingUnifyPair(IFiniteClosure fc) {
|
||||
this.fc = fc;
|
||||
}
|
||||
|
||||
/*
|
||||
* vergleicht Paare (a =. Theta) und (a =. Theta')
|
||||
* in dem compare(Theta, Theta') aufgerufen wird.
|
||||
*/
|
||||
public int compareEq (UnifyPair left, UnifyPair right) {
|
||||
try {
|
||||
//if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht
|
||||
if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) {
|
||||
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC);
|
||||
}
|
||||
else {
|
||||
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT);
|
||||
}}
|
||||
catch (ClassCastException e) {
|
||||
try {
|
||||
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
|
||||
((FiniteClosure)fc).logFile.flush();
|
||||
}
|
||||
catch (IOException ie) {
|
||||
}
|
||||
return -99;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
public int compareEq (UnifyPair left, UnifyPair right) {
|
||||
if (left == null || right == null)
|
||||
System.out.println("Fehler");
|
||||
if (left.getLhsType() instanceof PlaceholderType) {
|
||||
return fc.compare(left.getRhsType(), right.getRhsType(), left.getPairOp());
|
||||
}
|
||||
else {
|
||||
return fc.compare(left.getLhsType(), right.getLhsType(), left.getPairOp());
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
public Pair<Integer,Set<UnifyPair>> compare (UnifyType left, UnifyType right) {
|
||||
UnifyPair up;
|
||||
if (left instanceof WildcardType || right instanceof WildcardType) {
|
||||
up = new UnifyPair(left, right, PairOperator.SMALLERDOTWC);
|
||||
if (((left instanceof ExtendsType)
|
||||
&& (((ExtendsType)left).getExtendedType().getName().equals("java.util.Vector"))
|
||||
&& (((ReferenceType)((ExtendsType)left).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)) ||
|
||||
((right instanceof ExtendsType)
|
||||
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
||||
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
||||
{
|
||||
System.out.println("");
|
||||
}
|
||||
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
|
||||
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
|
||||
{
|
||||
System.out.println("");
|
||||
}
|
||||
}
|
||||
else {
|
||||
up = new UnifyPair(left, right, PairOperator.SMALLERDOT);
|
||||
}
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask();
|
||||
HashSet<UnifyPair> hs = new HashSet<>();
|
||||
hs.add(up);
|
||||
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, fc);
|
||||
long smallerLen = smallerRes.stream().filter(x -> !(x.getLhsType() instanceof PlaceholderType && x.getRhsType() instanceof PlaceholderType)).count();
|
||||
if (smallerLen == 0) return new Pair<>(-1, smallerRes);
|
||||
else {
|
||||
if (left instanceof WildcardType || right instanceof WildcardType) {
|
||||
up = new UnifyPair(right, left, PairOperator.SMALLERDOTWC);
|
||||
if (((left instanceof ExtendsType)
|
||||
&& (((ExtendsType)left).getExtendedType().getName().equals("java.util.Vector"))
|
||||
&& (((ReferenceType)((ExtendsType)left).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)) ||
|
||||
((right instanceof ExtendsType)
|
||||
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
||||
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
||||
{
|
||||
System.out.println("");
|
||||
}
|
||||
if (right instanceof SuperType)
|
||||
{
|
||||
System.out.println("");
|
||||
}
|
||||
}
|
||||
else {
|
||||
up = new UnifyPair(right, left, PairOperator.SMALLERDOT);
|
||||
}
|
||||
//TypeUnifyTask unifyTask = new TypeUnifyTask();
|
||||
hs = new HashSet<>();
|
||||
hs.add(up);
|
||||
Set<UnifyPair> greaterRes = unifyTask.applyTypeUnificationRules(hs, fc);
|
||||
long greaterLen = greaterRes.stream().filter(x -> !(x.getLhsType() instanceof PlaceholderType && x.getRhsType() instanceof PlaceholderType)).count();
|
||||
if (greaterLen == 0) return new Pair<>(1, greaterRes);
|
||||
else return new Pair<>(0, new HashSet<>());
|
||||
}
|
||||
}
|
||||
|
||||
/* TODO muss noch verifiziert werden PL 2018-03-21
|
||||
* (non-Javadoc)
|
||||
* fuehrt zu Fehlern bei Arrays.sort (contract nicht erfuellt)
|
||||
* @see com.google.common.collect.Ordering#compare(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
public int compare (Set<UnifyPair> leftpara, Set<UnifyPair> rightpara) {
|
||||
|
||||
Set<UnifyPair> left = new HashSet<>(leftpara);
|
||||
Set<UnifyPair> right = new HashSet<>(rightpara);
|
||||
|
||||
/*
|
||||
//pairop = PairOperator.SMALLERDOTWC;
|
||||
List<UnifyType> al = new ArrayList<>();
|
||||
PlaceholderType xx = PlaceholderType.freshPlaceholder();
|
||||
al.add(xx);
|
||||
UnifyType t1 = new ExtendsType(new ReferenceType("Vector", new TypeParams(al)));
|
||||
|
||||
//PlaceholderType yy =new PlaceholderType("yy");
|
||||
List<UnifyType> alr = new ArrayList<>();
|
||||
UnifyType exx = new ExtendsType(xx);
|
||||
alr.add(exx);
|
||||
UnifyType t2 = new ExtendsType(new ReferenceType("Vector", new TypeParams(alr)));
|
||||
|
||||
PlaceholderType a = PlaceholderType.freshPlaceholder();
|
||||
a.setInnerType(true);
|
||||
UnifyPair p1 = new UnifyPair(a, t1, PairOperator.SMALLERDOTWC);
|
||||
PlaceholderType b = PlaceholderType.freshPlaceholder();
|
||||
b.setInnerType(true);
|
||||
UnifyPair p2 = new UnifyPair(b, t2, PairOperator.SMALLERDOTWC);
|
||||
|
||||
List<UnifyType> al3 = new ArrayList<>();
|
||||
al3.add(a);
|
||||
|
||||
List<UnifyType> al4 = new ArrayList<>();
|
||||
al4.add(b);
|
||||
|
||||
UnifyPair p3 = new UnifyPair(new PlaceholderType("c"), new ReferenceType("Vector", new TypeParams(al3)), PairOperator.EQUALSDOT);
|
||||
UnifyPair p4 = new UnifyPair(new PlaceholderType("c"), new ReferenceType("Vector", new TypeParams(al4)), PairOperator.EQUALSDOT);
|
||||
|
||||
right = new HashSet<>();
|
||||
right.add(p1);
|
||||
right.add(p3);
|
||||
left = new HashSet<>();
|
||||
left.add(p2);
|
||||
left.add(p4);
|
||||
*/
|
||||
|
||||
|
||||
Set<UnifyPair> lefteq = left.stream()
|
||||
.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
Set<UnifyPair> righteq = right.stream()
|
||||
.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
Set<UnifyPair> leftle = left.stream()
|
||||
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
|
||||
&& x.getPairOp() == PairOperator.SMALLERDOT))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
Set<UnifyPair> rightle = right.stream()
|
||||
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
|
||||
&& x.getPairOp() == PairOperator.SMALLERDOT))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
Set<UnifyPair> leftlewc = left.stream()
|
||||
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
|
||||
&& x.getPairOp() == PairOperator.SMALLERDOTWC))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
Set<UnifyPair> rightlewc = right.stream()
|
||||
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
|
||||
&& x.getPairOp() == PairOperator.SMALLERDOTWC))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
//System.out.println(left.toString());
|
||||
//Fall 2
|
||||
//if (lefteq.iterator().next().getLhsType().getName().equals("AJO")) {
|
||||
// System.out.print("");
|
||||
//}
|
||||
|
||||
//ODER-CONSTRAINT
|
||||
Set<UnifyPair> leftBase = left.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
|
||||
Set<UnifyPair> rightBase = right.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
Set<UnifyPair> lefteqOder = left.stream()
|
||||
.filter(x -> { UnifyPair y = x.getGroundBasePair();
|
||||
/*try {
|
||||
((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("y: " + y.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("y.getLhsType() : " + y.getLhsType() .toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("y.getRhsType(): " + y.getRhsType().toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("x.getPairOp(): " + x.getPairOp().toString() +"\n\n");
|
||||
}
|
||||
catch (IOException ie) {
|
||||
} */
|
||||
return (y.getLhsType() instanceof PlaceholderType &&
|
||||
!(y.getRhsType() instanceof PlaceholderType) &&
|
||||
x.getPairOp() == PairOperator.EQUALSDOT);})
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
left.removeAll(lefteqOder);
|
||||
Set<UnifyPair> righteqOder = right.stream()
|
||||
.filter(x -> { UnifyPair y = x.getGroundBasePair();
|
||||
return (y.getLhsType() instanceof PlaceholderType &&
|
||||
!(y.getRhsType() instanceof PlaceholderType) &&
|
||||
x.getPairOp() == PairOperator.EQUALSDOT);})
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
right.removeAll(righteqOder);
|
||||
Set<UnifyPair> lefteqRet = left.stream()
|
||||
.filter(x -> { UnifyPair y = x.getGroundBasePair();
|
||||
return (y.getRhsType() instanceof PlaceholderType &&
|
||||
((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1);})
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
left.removeAll(lefteqRet);
|
||||
Set<UnifyPair> righteqRet = right.stream()
|
||||
.filter(x -> { UnifyPair y = x.getGroundBasePair();
|
||||
return (y.getRhsType() instanceof PlaceholderType &&
|
||||
((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1);})
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
right.removeAll(righteqRet);
|
||||
Set<UnifyPair> leftleOder = left.stream()
|
||||
.filter(x -> (x.getPairOp() == PairOperator.SMALLERDOT))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
Set<UnifyPair> rightleOder = right.stream()
|
||||
.filter(x -> (x.getPairOp() == PairOperator.SMALLERDOT))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
/*
|
||||
synchronized(this) {
|
||||
try {
|
||||
((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("lefteqOder: " + lefteqOder.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("righteqOder: " + righteqOder.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("lefteqRet: " + lefteqRet.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("righteqRet: " + righteqRet.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("leftleOder: " + leftleOder.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("rightleOder: " + rightleOder.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.flush();
|
||||
}
|
||||
catch (IOException ie) {
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
Integer compareEq;
|
||||
if (lefteqOder.size() == 1 && righteqOder.size() == 1 && lefteqRet.size() == 1 && righteqRet.size() == 1) {
|
||||
Match m = new Match();
|
||||
if ((compareEq = compareEq(lefteqOder.iterator().next().getGroundBasePair(), righteqOder.iterator().next().getGroundBasePair())) == -1) {
|
||||
ArrayList<UnifyPair> matchList =
|
||||
rightleOder.stream().map(x -> {
|
||||
UnifyPair leftElem = leftleOder.stream()
|
||||
.filter(y -> y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType()))
|
||||
.findAny().get();
|
||||
return new UnifyPair(x.getRhsType(), leftElem.getRhsType(), PairOperator.EQUALSDOT);})
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
if (m.match(matchList).isPresent()) {
|
||||
//try { ((FiniteClosure)fc).logFile.write("result1: -1 \n\n"); } catch (IOException ie) {}
|
||||
return -1;
|
||||
}
|
||||
else {
|
||||
//try { ((FiniteClosure)fc).logFile.write("result1: 0 \n\n"); } catch (IOException ie) {}
|
||||
return 0;
|
||||
}
|
||||
} else if (compareEq == 1) {
|
||||
ArrayList<UnifyPair> matchList =
|
||||
leftleOder.stream().map(x -> {
|
||||
UnifyPair rightElem = rightleOder.stream()
|
||||
.filter(y ->
|
||||
y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType()))
|
||||
.findAny().get();
|
||||
return new UnifyPair(x.getRhsType(), rightElem.getRhsType(), PairOperator.EQUALSDOT);})
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
if (m.match(matchList).isPresent()) {
|
||||
//try { ((FiniteClosure)fc).logFile.write("result2: 1 \n\n"); } catch (IOException ie) {}
|
||||
return 1;
|
||||
}
|
||||
else {
|
||||
//try { ((FiniteClosure)fc).logFile.write("result2: 0 \n\n"); } catch (IOException ie) {}
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
/*
|
||||
synchronized(this) {
|
||||
try {
|
||||
((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("lefteqOder: " + lefteqOder.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("righteqOder: " + righteqOder.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("lefteqRet: " + lefteqRet.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("righteqRet: " + righteqRet.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("leftleOder: " + leftleOder.toString() +"\n");
|
||||
((FiniteClosure)fc).logFile.write("rightleOder: " + rightleOder.toString() +"\n\n");
|
||||
((FiniteClosure)fc).logFile.write("result3: 0 \n\n");
|
||||
((FiniteClosure)fc).logFile.flush();
|
||||
}
|
||||
catch (IOException ie) {
|
||||
}
|
||||
}
|
||||
*/
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof ExtendsType && leftle.size() == 1 && righteq.size() == 0 && rightle.size() == 1) {
|
||||
return 1;
|
||||
}
|
||||
//Fall 2
|
||||
if (lefteq.size() == 0 && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof ExtendsType && rightle.size() == 1) {
|
||||
return -1;
|
||||
}
|
||||
//Fall 3
|
||||
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof SuperType && leftle.size() == 1 && righteq.size() == 0 && rightle.size() == 1) {
|
||||
return -1;
|
||||
}
|
||||
//Fall 3
|
||||
if (lefteq.size() == 0 && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof SuperType && rightle.size() == 1) {
|
||||
return 1;
|
||||
}
|
||||
//Fall 5
|
||||
if (lefteq.size() == 1 && leftle.size() == 0 && righteq.size() == 1 && rightle.size() == 1) {
|
||||
return -1;
|
||||
}
|
||||
//Fall 5
|
||||
if (lefteq.size() == 1 && leftle.size() == 1 && righteq.size() == 1 && rightle.size() == 0) {
|
||||
return 1;
|
||||
}
|
||||
//Fall 5
|
||||
if (lefteq.size() == 1 && leftle.size() == 1 && righteq.size() == 1 && rightle.size() == 1) {
|
||||
return 0;
|
||||
}
|
||||
// Nur Paare a =. Theta
|
||||
if (leftle.size() == 0 && rightle.size() == 0 && leftlewc.size() == 0 && rightlewc.size() ==0) {
|
||||
Stream<UnifyPair> lseq = lefteq.stream(); //left.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT));
|
||||
Stream<UnifyPair> rseq = righteq.stream(); //right.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT));
|
||||
BinaryOperator<HashMap<UnifyType,UnifyPair>> combiner = (x,y) -> { x.putAll(y); return x;};
|
||||
HashMap<UnifyType,UnifyPair> hm = rseq.reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getLhsType(),y); return x; }, combiner);
|
||||
lseq = lseq.filter(x -> !(hm.get(x.getLhsType()) == null));//NOCHMALS UEBERPRUEFEN!!!!
|
||||
lseq = lseq.filter(x -> !x.equals(hm.get(x.getLhsType()))); //Elemente die gleich sind muessen nicht verglichen werden
|
||||
Optional<Integer> si = lseq.map(x -> compareEq(x, hm.get(x.getLhsType()))).reduce((x,y)-> { if (x == y) return x; else return 0; } );
|
||||
if (!si.isPresent()) return 0;
|
||||
else return si.get();
|
||||
}
|
||||
//Fall 1 und 4
|
||||
if (lefteq.size() >= 1 && righteq.size() >= 1 && (leftlewc.size() > 0 || rightlewc.size() > 0)) {
|
||||
if (lefteq.iterator().next().getLhsType().getName().equals("D"))
|
||||
System.out.print("");
|
||||
//Set<PlaceholderType> varsleft = lefteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
|
||||
//Set<PlaceholderType> varsright = righteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
|
||||
//filtern des Paares a = Theta, das durch a <. Thata' generiert wurde (nur im Fall 1 relevant) andere Substitutioen werden rausgefiltert
|
||||
lefteq.removeIf(x -> (x.getBasePair()!=null) && !(x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
|
||||
||x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName())));//removeIf(x -> !varsright.contains(x.getLhsType()));
|
||||
righteq.removeIf(x -> (x.getBasePair()!=null) && !(x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
|
||||
||x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName())));//.removeIf(x -> !varsleft.contains(x.getLhsType()));
|
||||
UnifyPair lseq = lefteq.iterator().next();
|
||||
UnifyPair rseq = righteq.iterator().next();
|
||||
if (lseq.getRhsType().getName().equals("Object")) {
|
||||
if (rseq.getRhsType().getName().equals("Object")) return 0;
|
||||
else return 1;
|
||||
}
|
||||
else {
|
||||
if (rseq.getRhsType().getName().equals("Object")) return -1;
|
||||
}
|
||||
if (leftlewc.size() == rightlewc.size()) {
|
||||
//TODO: Hier wird bei Wildcards nicht das richtige compare aufgerufen PL 18-04-20
|
||||
Pair<Integer, Set<UnifyPair>> int_Unifier = compare(lseq.getRhsType(), rseq.getRhsType());
|
||||
Unifier uni = new Unifier();
|
||||
int_Unifier.getValue().get().forEach(x -> uni.add((PlaceholderType) x.getLhsType(), x.getRhsType()));
|
||||
if (!lseq.getRhsType().getName().equals(rseq.getRhsType().getName())
|
||||
|| leftlewc.size() == 0 || rightlewc.size() == 0) return int_Unifier.getKey();
|
||||
else {
|
||||
Set <UnifyPair> lsleuni = leftlewc.stream().map(x -> uni.apply(x)).collect(Collectors.toCollection(HashSet::new));
|
||||
Set <UnifyPair> rsleuni = rightlewc.stream().map(x -> uni.apply(x)).collect(Collectors.toCollection(HashSet::new));
|
||||
BinaryOperator<HashMap<UnifyType,UnifyPair>> combiner = (x,y) -> { x.putAll(y); return x;};
|
||||
|
||||
HashMap<UnifyType,UnifyPair> hm;
|
||||
Optional<Integer> si;
|
||||
//1. Fall
|
||||
if (leftlewc.iterator().next().getLhsType() instanceof PlaceholderType) {
|
||||
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getLhsType(),y); return x; }, combiner);
|
||||
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getLhsType()) == null));
|
||||
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
|
||||
}
|
||||
//4. Fall
|
||||
else {
|
||||
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getRhsType(),y); return x; }, combiner);
|
||||
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getRhsType()) == null));
|
||||
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
|
||||
}
|
||||
if (!si.isPresent()) return 0;
|
||||
else return si.get();
|
||||
}
|
||||
} else {
|
||||
if (leftlewc.size() > 0) {
|
||||
Set<UnifyPair> subst;
|
||||
subst = leftlewc.stream().map(x -> {
|
||||
if (x.getLhsType() instanceof PlaceholderType) {
|
||||
return new UnifyPair(x.getLhsType(), x.getRhsType(), PairOperator.EQUALSDOT);
|
||||
}
|
||||
else {
|
||||
return new UnifyPair(x.getRhsType(), x.getLhsType(), PairOperator.EQUALSDOT);
|
||||
}}).collect(Collectors.toCollection(HashSet::new));
|
||||
Unifier uni = new Unifier();
|
||||
lseq = uni.apply(lseq);
|
||||
}
|
||||
else {
|
||||
Set<UnifyPair> subst;
|
||||
subst = rightlewc.stream().map(x -> {
|
||||
if (x.getLhsType() instanceof PlaceholderType) {
|
||||
return new UnifyPair(x.getLhsType(), x.getRhsType(), PairOperator.EQUALSDOT);
|
||||
}
|
||||
else {
|
||||
return new UnifyPair(x.getRhsType(), x.getLhsType(), PairOperator.EQUALSDOT);
|
||||
}}).collect(Collectors.toCollection(HashSet::new));
|
||||
Unifier uni = new Unifier();
|
||||
subst.stream().forEach(x -> uni.add((PlaceholderType) x.getLhsType(), x.getRhsType()));
|
||||
rseq = uni.apply(rseq);
|
||||
}
|
||||
return compareEq(lseq, rseq);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -2,48 +2,49 @@ package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
/**
|
||||
* Operators of pairs of the unification.
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public enum PairOperator {
|
||||
/**
|
||||
* The smaller operator (T < P) is used to express a subtyping relation between
|
||||
* T and P for example in the finite closure. It is necessarily true.
|
||||
*/
|
||||
SMALLER,
|
||||
|
||||
/**
|
||||
* The smallerdot operator (T <. P) is used to express a subtyping relation between
|
||||
* of T and P in a CONSTRAINT during the unification. It is not necessarily true.
|
||||
*/
|
||||
SMALLERDOT,
|
||||
|
||||
/**
|
||||
* The smallernedot operator for arguments (T <!=. P) is the same as SMALLERDOT without
|
||||
* T == P. It is used for operations + / - / * / < / > / ... with the Supertype Number
|
||||
*/
|
||||
SMALLERNEQDOT,
|
||||
|
||||
/**
|
||||
* The smallerdot operator for arguments (T <.? P) is used to express that
|
||||
* T is an element of smArg(P) (or P is an element of grArg(T)) in a CONSTRAINT
|
||||
* during the unification. It is not necessarily true.
|
||||
*/
|
||||
SMALLERDOTWC,
|
||||
|
||||
/**
|
||||
* The equalsdot operator (T =. P) is used to express that two types during the unification
|
||||
* should be equal. It is not necessarily true.
|
||||
*/
|
||||
EQUALSDOT;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
switch (this) {
|
||||
case SMALLER: return "<";
|
||||
case SMALLERDOT: return "<.";
|
||||
case SMALLERNEQDOT: return "<!=.";
|
||||
case SMALLERDOTWC: return "<.?";
|
||||
default: return "=."; // EQUALSDOT
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The smaller operator (T < P) is used to express a subtyping relation between
|
||||
* T and P for example in the finite closure. It is necessarily true.
|
||||
*/
|
||||
SMALLER,
|
||||
|
||||
/**
|
||||
* The smallerdot operator (T <. P) is used to express a subtyping relation between
|
||||
* of T and P in a CONSTRAINT during the unification. It is not necessarily true.
|
||||
*/
|
||||
SMALLERDOT,
|
||||
|
||||
/**
|
||||
* The smallernedot operator for arguments (T <!=. P) is the same as SMALLERDOT without
|
||||
* T == P. It is used for operations + / - / * / < / > / ... with the Supertype Number
|
||||
*/
|
||||
SMALLERNEQDOT,
|
||||
|
||||
/**
|
||||
* The smallerdot operator for arguments (T <.? P) is used to express that
|
||||
* T is an element of smArg(P) (or P is an element of grArg(T)) in a CONSTRAINT
|
||||
* during the unification. It is not necessarily true.
|
||||
*/
|
||||
SMALLERDOTWC,
|
||||
|
||||
/**
|
||||
* The equalsdot operator (T =. P) is used to express that two types during the unification
|
||||
* should be equal. It is not necessarily true.
|
||||
*/
|
||||
EQUALSDOT;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return switch (this) {
|
||||
case SMALLER -> "<";
|
||||
case SMALLERDOT -> "<.";
|
||||
case SMALLERNEQDOT -> "<!=.";
|
||||
case SMALLERDOTWC -> "<.?";
|
||||
default -> "=."; // EQUALSDOT
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,43 +10,39 @@ import java.util.Set;
|
||||
|
||||
/**
|
||||
* An unbounded placeholder type.
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public final class PlaceholderType extends UnifyType{
|
||||
|
||||
/**
|
||||
* Static list containing the names of all existing placeholders.
|
||||
* Used for generating fresh placeholders.
|
||||
*/
|
||||
public static final ArrayList<String> EXISTING_PLACEHOLDERS = new ArrayList<String>();
|
||||
|
||||
/**
|
||||
* Prefix of auto-generated placeholder names.
|
||||
*/
|
||||
protected static String nextName = "gen_";
|
||||
|
||||
/**
|
||||
* Random number generator used to generate fresh placeholder name.
|
||||
*/
|
||||
protected static Random rnd = new Random(43558747548978L);
|
||||
|
||||
/**
|
||||
* True if this object was auto-generated, false if this object was user-generated.
|
||||
*/
|
||||
private final boolean IsGenerated;
|
||||
|
||||
|
||||
/**
|
||||
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
|
||||
*/
|
||||
private boolean wildcardable = true;
|
||||
|
||||
/**
|
||||
* is innerType gibt an, ob der Type des PlaceholderType innerhalb eines Typkonstruktorsverwendet wird
|
||||
*/
|
||||
private boolean innerType = false;
|
||||
|
||||
/**
|
||||
public final class PlaceholderType extends UnifyType {
|
||||
|
||||
/**
|
||||
* Static list containing the names of all existing placeholders.
|
||||
* Used for generating fresh placeholders.
|
||||
*/
|
||||
public static final ArrayList<String> EXISTING_PLACEHOLDERS = new ArrayList<String>();
|
||||
|
||||
/**
|
||||
* Random number generator used to generate fresh placeholder name.
|
||||
*/
|
||||
private static final Random rnd = new Random(43558747548978L);
|
||||
|
||||
/**
|
||||
* True if this object was auto-generated, false if this object was user-generated.
|
||||
*/
|
||||
private final boolean IsGenerated;
|
||||
|
||||
|
||||
/**
|
||||
* Defines, if a WildcardType can be assigned to this PlaceholderType
|
||||
*/
|
||||
private boolean canBeWildcard = true;
|
||||
|
||||
/**
|
||||
* Defines, if this type is used inside a type constructor
|
||||
*/
|
||||
private boolean innerType = false;
|
||||
|
||||
/**
|
||||
* variance shows the variance of the pair
|
||||
* -1: contravariant
|
||||
* 1 covariant
|
||||
@@ -54,153 +50,152 @@ public final class PlaceholderType extends UnifyType{
|
||||
* PL 2018-03-21
|
||||
*/
|
||||
private int variance = 0;
|
||||
|
||||
|
||||
/*
|
||||
* Fuer Oder-Constraints:
|
||||
* For Oder-Constraints:
|
||||
* orCons = 1: Receiver
|
||||
* orCons = 0: Argument oder kein Oder-Constraint
|
||||
* orCons = 0: Argument oder kein Oder-Constraint
|
||||
* orCons = -1: RetType
|
||||
*/
|
||||
private byte orCons = 0;
|
||||
|
||||
/**
|
||||
* Creates a new placeholder type with the specified name.
|
||||
*/
|
||||
public PlaceholderType(String name) {
|
||||
super(name, new TypeParams());
|
||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||
IsGenerated = false; // This type is user generated
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new placeholdertype
|
||||
* @param isGenerated true if this placeholder is auto-generated, false if it is user-generated.
|
||||
*/
|
||||
public PlaceholderType(String name, boolean isGenerated) {
|
||||
super(name, new TypeParams());
|
||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||
IsGenerated = isGenerated;
|
||||
}
|
||||
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a fresh placeholder type with a name that does so far not exist.
|
||||
* A user could later instantiate a type using the same name that is equivalent to this type.
|
||||
* @return A fresh placeholder type.
|
||||
*/
|
||||
public synchronized static PlaceholderType freshPlaceholder() {
|
||||
String name = nextName + (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
|
||||
// Add random chars while the name is in use.
|
||||
while(EXISTING_PLACEHOLDERS.contains(name)) {
|
||||
name += (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
|
||||
}
|
||||
return new PlaceholderType(name, true);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* True if this placeholder is auto-generated, false if it is user-generated.
|
||||
*/
|
||||
public boolean isGenerated() {
|
||||
return IsGenerated;
|
||||
}
|
||||
|
||||
public void setVariance(int v) {
|
||||
variance = v;
|
||||
}
|
||||
|
||||
public int getVariance() {
|
||||
return variance;
|
||||
}
|
||||
|
||||
public void reversVariance() {
|
||||
if (variance == 1) {
|
||||
setVariance(-1);
|
||||
} else {
|
||||
if (variance == -1) {
|
||||
setVariance(1);
|
||||
}}
|
||||
}
|
||||
|
||||
public void setOrCons(byte i) {
|
||||
orCons = i;
|
||||
}
|
||||
|
||||
public byte getOrCons() {
|
||||
return orCons;
|
||||
}
|
||||
|
||||
public Boolean isWildcardable() {
|
||||
return wildcardable;
|
||||
}
|
||||
public void disableWildcardtable() {
|
||||
wildcardable = false;
|
||||
}
|
||||
|
||||
public void enableWildcardtable() {
|
||||
wildcardable = true;
|
||||
}
|
||||
|
||||
public void setWildcardtable(Boolean wildcardable) {
|
||||
this.wildcardable = wildcardable;
|
||||
}
|
||||
|
||||
public Boolean isInnerType() {
|
||||
return innerType;
|
||||
}
|
||||
|
||||
public void setInnerType(Boolean innerType) {
|
||||
this.innerType = innerType;
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.smArg(this, fBounded);
|
||||
}
|
||||
/**
|
||||
* Creates a new placeholder type with the specified name.
|
||||
*/
|
||||
public PlaceholderType(String name) {
|
||||
super(name, new TypeParams());
|
||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||
IsGenerated = false; // This type is user generated
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.grArg(this, fBounded);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnifyType setTypeParams(TypeParams newTp) {
|
||||
return this; // Placeholders never have params.
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return typeName.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
UnifyType apply(Unifier unif) {
|
||||
if(unif.hasSubstitute(this)) {
|
||||
UnifyType ret = unif.getSubstitute(this);
|
||||
//PL 2018-05-17 Auskommentierung muesste korrekt sein,
|
||||
//bereits in JavaTXComplier Variancen gesetzt werden.
|
||||
//ret.accept(new distributeVariance(), this.getVariance());
|
||||
return ret;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(!(obj instanceof PlaceholderType))
|
||||
return false;
|
||||
|
||||
return ((PlaceholderType) obj).getName().equals(typeName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new placeholdertype
|
||||
*
|
||||
* @param isGenerated true if this placeholder is auto-generated, false if it is user-generated.
|
||||
*/
|
||||
public PlaceholderType(String name, boolean isGenerated) {
|
||||
super(name, new TypeParams());
|
||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||
IsGenerated = isGenerated;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||
ArrayList<PlaceholderType> ret = new ArrayList<>();
|
||||
ret.add(this);
|
||||
return ret;
|
||||
}
|
||||
/**
|
||||
* Creates a fresh placeholder type with a name that does so far not exist.
|
||||
* A user could later instantiate a type using the same name that is equivalent to this type.
|
||||
*
|
||||
* @return A fresh placeholder type.
|
||||
*/
|
||||
public synchronized static PlaceholderType freshPlaceholder() {
|
||||
String name = "gen_" + (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
|
||||
// Add random chars while the name is in use.
|
||||
while (EXISTING_PLACEHOLDERS.contains(name)) {
|
||||
name += (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
|
||||
}
|
||||
return new PlaceholderType(name, true);
|
||||
}
|
||||
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
}
|
||||
|
||||
/**
|
||||
* True if this placeholder is auto-generated, false if it is user-generated.
|
||||
*/
|
||||
public boolean isGenerated() {
|
||||
return IsGenerated;
|
||||
}
|
||||
|
||||
public int getVariance() {
|
||||
return variance;
|
||||
}
|
||||
|
||||
public void setVariance(int v) {
|
||||
variance = v;
|
||||
}
|
||||
|
||||
public void reversVariance() {
|
||||
if (variance == 1) {
|
||||
setVariance(-1);
|
||||
} else {
|
||||
if (variance == -1) {
|
||||
setVariance(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public byte getOrCons() {
|
||||
return orCons;
|
||||
}
|
||||
|
||||
public void setOrCons(byte i) {
|
||||
orCons = i;
|
||||
}
|
||||
|
||||
public Boolean isWildcardable() {
|
||||
return canBeWildcard;
|
||||
}
|
||||
|
||||
public void disableWildcardable() {
|
||||
canBeWildcard = false;
|
||||
}
|
||||
|
||||
public void enableWildcardable() {
|
||||
canBeWildcard = true;
|
||||
}
|
||||
|
||||
public Boolean isInnerType() {
|
||||
return innerType;
|
||||
}
|
||||
|
||||
public void setInnerType(Boolean innerType) {
|
||||
this.innerType = innerType;
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.smArg(this, fBounded);
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.grArg(this, fBounded);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnifyType setTypeParams(TypeParams newTp) {
|
||||
throw new RuntimeException("Placeholders never have params");
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return typeName.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
UnifyType apply(Unifier unifier) {
|
||||
if (unifier.hasSubstitute(this)) {
|
||||
//PL 2018-05-17 Auskommentierung muesste korrekt sein,
|
||||
//bereits in JavaTXComplier Variancen gesetzt werden.
|
||||
//ret.accept(new distributeVariance(), this.getVariance());
|
||||
return unifier.getSubstitute(this);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj instanceof PlaceholderType placeholderObj) {
|
||||
return placeholderObj.getName().equals(typeName);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||
ArrayList<PlaceholderType> ret = new ArrayList<>();
|
||||
ret.add(this);
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,93 +7,92 @@ import java.util.Set;
|
||||
|
||||
/**
|
||||
* A reference type e.q. Integer or List<T>.
|
||||
* @author Florian Steurer
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class ReferenceType extends UnifyType {
|
||||
|
||||
/**
|
||||
* The buffered hashCode
|
||||
*/
|
||||
private final int hashCode;
|
||||
|
||||
/**
|
||||
* gibt an, ob der ReferenceType eine generische Typvariable ist
|
||||
*/
|
||||
private final boolean genericTypeVar;
|
||||
|
||||
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
}
|
||||
|
||||
public ReferenceType(String name, Boolean genericTypeVar) {
|
||||
super(name, new TypeParams());
|
||||
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
|
||||
this.genericTypeVar = genericTypeVar;
|
||||
}
|
||||
|
||||
public ReferenceType(String name, UnifyType... params) {
|
||||
super(name, new TypeParams(params));
|
||||
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
|
||||
genericTypeVar = false;
|
||||
}
|
||||
|
||||
public ReferenceType(String name, TypeParams params) {
|
||||
super(name, params);
|
||||
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
|
||||
genericTypeVar = false;
|
||||
}
|
||||
/**
|
||||
* The buffered hashCode
|
||||
*/
|
||||
private final int hashCode;
|
||||
|
||||
public boolean isGenTypeVar () {
|
||||
return genericTypeVar;
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.smArg(this, fBounded);
|
||||
}
|
||||
/**
|
||||
* Defines, if the reference type is a generic type variable
|
||||
*/
|
||||
private final boolean genericTypeVar;
|
||||
|
||||
@Override
|
||||
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.grArg(this, fBounded);
|
||||
}
|
||||
public ReferenceType(String name, Boolean genericTypeVar) {
|
||||
super(name, new TypeParams());
|
||||
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
|
||||
this.genericTypeVar = genericTypeVar;
|
||||
}
|
||||
|
||||
@Override
|
||||
UnifyType apply(Unifier unif) {
|
||||
TypeParams newParams = typeParams.apply(unif);
|
||||
|
||||
if(newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
|
||||
return this;
|
||||
|
||||
return new ReferenceType(typeName, newParams);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnifyType setTypeParams(TypeParams newTp) {
|
||||
if(newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
|
||||
return this; // reduced the amount of objects created
|
||||
return new ReferenceType(typeName, newTp);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(!(obj instanceof ReferenceType))
|
||||
return false;
|
||||
|
||||
if(obj.hashCode() != this.hashCode())
|
||||
return false;
|
||||
|
||||
ReferenceType other = (ReferenceType) obj;
|
||||
|
||||
if(!other.getName().equals(typeName))
|
||||
return false;
|
||||
public ReferenceType(String name, UnifyType... params) {
|
||||
super(name, new TypeParams(params));
|
||||
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
|
||||
genericTypeVar = false;
|
||||
}
|
||||
|
||||
return other.getTypeParams().equals(typeParams);
|
||||
}
|
||||
}
|
||||
public ReferenceType(String name, TypeParams params) {
|
||||
super(name, params);
|
||||
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
|
||||
genericTypeVar = false;
|
||||
}
|
||||
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
}
|
||||
|
||||
public boolean isGenTypeVar() {
|
||||
return genericTypeVar;
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.smArg(this, fBounded);
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.grArg(this, fBounded);
|
||||
}
|
||||
|
||||
@Override
|
||||
UnifyType apply(Unifier unifier) {
|
||||
TypeParams newParams = typeParams.apply(unifier);
|
||||
|
||||
if (newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
|
||||
return this;
|
||||
|
||||
return new ReferenceType(typeName, newParams);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnifyType setTypeParams(TypeParams newTp) {
|
||||
if (newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams)) {
|
||||
// reduced the amount of objects created by reusing this instance
|
||||
return this;
|
||||
}
|
||||
return new ReferenceType(typeName, newTp);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof ReferenceType referenceObj))
|
||||
return false;
|
||||
|
||||
if (obj.hashCode() != this.hashCode())
|
||||
return false;
|
||||
|
||||
if (!referenceObj.getName().equals(typeName))
|
||||
return false;
|
||||
|
||||
return referenceObj.getTypeParams().equals(typeParams);
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user