Compare commits
50 Commits
LSP-Interf
...
c54e012426
Author | SHA1 | Date | |
---|---|---|---|
|
c54e012426 | ||
|
52f6ebcf3c | ||
|
c80a0c8596 | ||
|
2278fb1b91 | ||
|
32b16cd5fd | ||
|
fd30c5f63f | ||
|
8bfd6ae255 | ||
|
ad2dfb13bd | ||
|
501633a90c | ||
|
4defa50ca2 | ||
|
d65e90536a | ||
|
3de7f1aa61 | ||
|
029e40b775 | ||
|
459bfcdd5f | ||
|
02886c38ea | ||
|
57ffae0481 | ||
|
d084d74a25 | ||
|
cd15016f61 | ||
|
b0e5eee25c | ||
|
d1bd285be7 | ||
|
a902fd5bee | ||
|
ced9fdc9f7 | ||
|
53417bf298 | ||
|
2d4da03f00 | ||
|
f7a13f5faa | ||
|
8fe80b4396 | ||
|
eb1201ae5e | ||
|
963ad76593 | ||
|
1eba09e3b0 | ||
|
fc82125d14 | ||
|
dad468368b | ||
|
fdd4f3aa59 | ||
|
a0c11b60e8 | ||
|
4cddf73e6d | ||
|
5024a02447 | ||
|
6c2d97b770 | ||
|
426c2916d3 | ||
|
f722a00fbb | ||
|
32797c9b9f | ||
|
87f655c85a | ||
|
613dceae1d | ||
|
81cac06e16 | ||
|
a47d5bc024 | ||
|
e5916d455a | ||
|
ebb639e72e | ||
|
f0a4a51ce6 | ||
|
7442880452 | ||
|
c4dc3b4245 | ||
1391206dfe | |||
659bf6b500 |
40
independentTest.sh
Executable file
40
independentTest.sh
Executable file
@@ -0,0 +1,40 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
|
||||
REPO="https://gitea.hb.dhbw-stuttgart.de/f.holzwarth/JavaCompilerCore.git"
|
||||
TDIR="./testBuild"
|
||||
|
||||
rm -rf "$TDIR" 2>/dev/null
|
||||
mkdir $TDIR
|
||||
|
||||
cd $TDIR
|
||||
git clone $REPO .
|
||||
git checkout feat/unify-server
|
||||
# git checkout dad468368b86bdd5a3d3b2754b17617cee0a9107 # 1:55
|
||||
# git checkout a0c11b60e8c9d7addcbe0d3a09c9ce2924e9d5c0 # 2:25
|
||||
# git checkout 4cddf73e6d6c9116d3e1705c4b27a8e7f18d80c3 # 2:27
|
||||
# git checkout 6c2d97b7703d954e4a42eef3ec374bcf313af75c # 2:13
|
||||
# git checkout f722a00fbb6e69423d48a890e4a6283471763e64 # 1:35
|
||||
# git checkout f0a4a51ce65639ce9a9470ff0fdb538fdf9c02cc # 2:19
|
||||
# git checkout 1391206dfe59263cdb22f93371cfd1dd5465d97f # 1:29
|
||||
|
||||
date "+%Y.%m.%d %H:%M:%S"
|
||||
|
||||
# mvn clean compile -DskipTests package
|
||||
## prefix each stderr line with " | "
|
||||
# exec 2> >(trap "" INT TERM; sed 's/^/ | /' >&2)
|
||||
# echo -e "\nMatrix test:\n |"
|
||||
# time java -jar target/JavaTXcompiler-0.1-jar-with-dependencies.jar resources/bytecode/javFiles/Matrix.jav >/dev/null;
|
||||
|
||||
|
||||
mvn clean compile test
|
||||
|
||||
|
||||
echo -e "\nCleanup... "
|
||||
cd -
|
||||
rm -rf "$TDIR" 2>/dev/null
|
||||
|
||||
echo -e "\nFinished "
|
||||
date "+%Y.%m.%d %H:%M:%S"
|
||||
echo -e "\n "
|
||||
|
15
pom.xml
15
pom.xml
@@ -44,6 +44,21 @@ http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<artifactId>asm</artifactId>
|
||||
<version>9.5</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.java-websocket</groupId>
|
||||
<artifactId>Java-WebSocket</artifactId>
|
||||
<version>1.5.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-simple</artifactId>
|
||||
<version>1.7.25</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>2.17.2</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
@@ -5,42 +5,61 @@ import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.util.*;
|
||||
public class ConsoleInterface {
|
||||
private static final String directory = System.getProperty("user.dir");
|
||||
|
||||
public static void main(String[] args) throws IOException, ClassNotFoundException {
|
||||
List<File> input = new ArrayList<>();
|
||||
List<File> classpath = new ArrayList<>();
|
||||
String outputPath = null;
|
||||
Iterator<String> it = Arrays.asList(args).iterator();
|
||||
if(args.length == 0){
|
||||
System.out.println("No input files given. Get help with --help");
|
||||
System.exit(1);
|
||||
}else if(args.length == 1 && args[0].equals("--help")){
|
||||
System.out.println("Usage: javatx [OPTION]... [FILE]...\n" +
|
||||
"\t-cp\tSet Classpath\n" +
|
||||
"\t-d\tSet destination directory");
|
||||
System.exit(1);
|
||||
public class ConsoleInterface {
|
||||
private static final String directory = System.getProperty("user.dir");
|
||||
|
||||
public static void main(String[] args) throws IOException, ClassNotFoundException {
|
||||
List<File> input = new ArrayList<>();
|
||||
List<File> classpath = new ArrayList<>();
|
||||
String outputPath = null;
|
||||
Iterator<String> it = Arrays.asList(args).iterator();
|
||||
Optional<Integer> serverPort = Optional.empty();
|
||||
Optional<String> unifyServer = Optional.empty();
|
||||
|
||||
if (args.length == 0) {
|
||||
System.out.println("No input files given. Get help with --help");
|
||||
System.exit(1);
|
||||
} else if (args.length == 1 && args[0].equals("--help")) {
|
||||
System.out.println("Usage: javatx [OPTION]... [FILE]...\n" +
|
||||
"\t-cp\tSet Classpath\n" +
|
||||
"\t-d\tSet destination directory\n" +
|
||||
"\t[--server-mode <port>]\n" +
|
||||
"\t[--unify-server <url>]\n");
|
||||
System.exit(1);
|
||||
}
|
||||
while (it.hasNext()) {
|
||||
String arg = it.next();
|
||||
if (arg.equals("-d")) {
|
||||
outputPath = it.next();
|
||||
} else if (arg.startsWith("-d")) {
|
||||
outputPath = arg.substring(2);
|
||||
} else if (arg.equals("-cp") || arg.equals("-classpath")) {
|
||||
String[] cps = it.next().split(":");
|
||||
for (String cp : cps) {
|
||||
classpath.add(new File(cp));
|
||||
}
|
||||
while(it.hasNext()){
|
||||
String arg = it.next();
|
||||
if(arg.equals("-d")){
|
||||
outputPath = it.next();
|
||||
}else if(arg.startsWith("-d")) {
|
||||
outputPath = arg.substring(2);
|
||||
}else if(arg.equals("-cp") || arg.equals("-classpath")){
|
||||
String[] cps = it.next().split(":");
|
||||
for(String cp : cps){
|
||||
classpath.add(new File(cp));
|
||||
}
|
||||
}else{
|
||||
input.add(new File(arg));
|
||||
}
|
||||
}
|
||||
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null);
|
||||
//compiler.typeInference();
|
||||
compiler.generateBytecode();
|
||||
}
|
||||
} else if (arg.equals("--server-mode")) {
|
||||
serverPort = Optional.of(Integer.parseInt(it.next()));
|
||||
} else if (arg.equals("--unify-server")) {
|
||||
unifyServer = Optional.of(it.next());
|
||||
} else {
|
||||
input.add(new File(arg));
|
||||
}
|
||||
}
|
||||
|
||||
if (serverPort.isPresent()) {
|
||||
if (unifyServer.isPresent()) throw new RuntimeException("Cannot use unifyServer when in server mode!");
|
||||
|
||||
JavaTXServer server = new JavaTXServer(serverPort.get());
|
||||
server.listen();
|
||||
}
|
||||
else {
|
||||
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null, unifyServer);
|
||||
//compiler.typeInference();
|
||||
compiler.generateBytecode();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -5,7 +5,6 @@ import de.dhbwstuttgart.bytecode.Codegen;
|
||||
import de.dhbwstuttgart.environment.CompilationEnvironment;
|
||||
import de.dhbwstuttgart.environment.DirectoryClassLoader;
|
||||
import de.dhbwstuttgart.exceptions.DebugException;
|
||||
import de.dhbwstuttgart.languageServerInterface.model.LanguageServerTransferObject;
|
||||
import de.dhbwstuttgart.parser.JavaTXParser;
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.parser.scope.GenericsRegistry;
|
||||
@@ -13,6 +12,7 @@ import de.dhbwstuttgart.parser.SyntaxTreeGenerator.SyntaxTreeGenerator;
|
||||
import de.dhbwstuttgart.parser.antlr.Java17Parser.SourceFileContext;
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
|
||||
import de.dhbwstuttgart.server.SocketClient;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
|
||||
import de.dhbwstuttgart.syntaxtree.Method;
|
||||
@@ -36,10 +36,13 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.typeAlgo.TYPE;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.typeinference.unify.RuleSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
@@ -62,24 +65,29 @@ import org.apache.commons.io.output.NullOutputStream;
|
||||
|
||||
public class JavaTXCompiler {
|
||||
|
||||
// do not use this in any code, that can be executed serverside!
|
||||
public static PlaceholderRegistry defaultClientPlaceholderRegistry = new PlaceholderRegistry();
|
||||
|
||||
// public static JavaTXCompiler INSTANCE;
|
||||
final CompilationEnvironment environment;
|
||||
Boolean resultmodel = true;
|
||||
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
|
||||
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
|
||||
|
||||
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
|
||||
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
|
||||
public final DirectoryClassLoader classLoader;
|
||||
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
|
||||
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
|
||||
public final DirectoryClassLoader classLoader;
|
||||
|
||||
public final List<File> classPath;
|
||||
private final File outputPath;
|
||||
|
||||
private final Optional<String> unifyServer;
|
||||
|
||||
public DirectoryClassLoader getClassLoader() {
|
||||
return classLoader;
|
||||
}
|
||||
|
||||
|
||||
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
|
||||
this(Arrays.asList(sourceFile), List.of(), new File("."));
|
||||
this(Arrays.asList(sourceFile), List.of(), new File("."), Optional.empty());
|
||||
}
|
||||
|
||||
public JavaTXCompiler(File sourceFile, Boolean log) throws IOException, ClassNotFoundException {
|
||||
@@ -88,10 +96,18 @@ public class JavaTXCompiler {
|
||||
}
|
||||
|
||||
public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException {
|
||||
this(sourceFiles, List.of(), new File("."));
|
||||
this(sourceFiles, List.of(), new File("."), Optional.empty());
|
||||
}
|
||||
|
||||
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath) throws IOException, ClassNotFoundException {
|
||||
this(sources, contextPath, outputPath, Optional.empty());
|
||||
}
|
||||
|
||||
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath, Optional<String> unifyServer) throws IOException, ClassNotFoundException {
|
||||
// ensure new default placeholder registry for tests
|
||||
defaultClientPlaceholderRegistry = new PlaceholderRegistry();
|
||||
|
||||
this.unifyServer = unifyServer;
|
||||
var path = new ArrayList<>(contextPath);
|
||||
if (contextPath.isEmpty()) {
|
||||
// When no contextPaths are given, the working directory is the sources root
|
||||
@@ -301,13 +317,14 @@ public class JavaTXCompiler {
|
||||
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||
UnifyResultModel urm = null;
|
||||
// urm.addUnifyResultListener(resultListener);
|
||||
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, defaultClientPlaceholderRegistry);
|
||||
try {
|
||||
logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile;
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this);
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this, context.placeholderRegistry());
|
||||
System.out.println(finiteClosure);
|
||||
urm = new UnifyResultModel(cons, finiteClosure);
|
||||
urm.addUnifyResultListener(resultListener);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, context.placeholderRegistry());
|
||||
|
||||
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
|
||||
UnifyType lhs, rhs;
|
||||
@@ -321,13 +338,12 @@ public class JavaTXCompiler {
|
||||
logFile.write(unifyCons.toString());
|
||||
unifyCons = unifyCons.map(distributeInnerVars);
|
||||
logFile.write(unifyCons.toString());
|
||||
TypeUnify unify = new TypeUnify();
|
||||
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
|
||||
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
|
||||
for (SourceFile f : this.sourceFiles.values()) {
|
||||
logFile.write(ASTTypePrinter.print(f));
|
||||
}
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||
@@ -343,7 +359,7 @@ public class JavaTXCompiler {
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
|
||||
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
|
||||
*/;
|
||||
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
|
||||
TypeUnify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||
} catch (IOException e) {
|
||||
System.err.println("kein LogFile");
|
||||
}
|
||||
@@ -366,13 +382,14 @@ public class JavaTXCompiler {
|
||||
|
||||
final ConstraintSet<Pair> cons = getConstraints(file);
|
||||
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||
PlaceholderRegistry placeholderRegistry = new PlaceholderRegistry();
|
||||
try {
|
||||
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
|
||||
if (log) logFolder.mkdirs();
|
||||
Writer logFile = log ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this);
|
||||
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this, placeholderRegistry);
|
||||
System.out.println(finiteClosure);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, placeholderRegistry);
|
||||
System.out.println("xxx1");
|
||||
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
|
||||
UnifyType lhs, rhs;
|
||||
@@ -388,12 +405,11 @@ public class JavaTXCompiler {
|
||||
System.out.println("Unify:" + unifyCons.toString());
|
||||
unifyCons = unifyCons.map(distributeInnerVars);
|
||||
logFile.write("\nUnify_distributeInnerVars: " + unifyCons.toString());
|
||||
TypeUnify unify = new TypeUnify();
|
||||
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
|
||||
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
|
||||
logFile.write(ASTTypePrinter.print(sf));
|
||||
System.out.println(ASTTypePrinter.print(sf));
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
@@ -411,16 +427,24 @@ public class JavaTXCompiler {
|
||||
/*
|
||||
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
|
||||
*/;
|
||||
if (resultmodel) {
|
||||
|
||||
if (unifyServer.isPresent()) {
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
|
||||
SocketClient socketClient = new SocketClient(unifyServer.get());
|
||||
return socketClient.execute(finiteClosure, cons, unifyCons, context);
|
||||
}
|
||||
else if (resultmodel) {
|
||||
/* UnifyResultModel Anfang */
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||
urm.addUnifyResultListener(li);
|
||||
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
|
||||
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
|
||||
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||
System.out.println("RESULT Final: " + li.getResults());
|
||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
return li.getResults();
|
||||
}
|
||||
/* UnifyResultModel End */
|
||||
@@ -428,115 +452,37 @@ public class JavaTXCompiler {
|
||||
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
|
||||
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
|
||||
// finiteClosure));
|
||||
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
|
||||
UnifyContext context = new UnifyContext(logFile, log, false, new UnifyResultModel(cons, finiteClosure), usedTasks, placeholderRegistry);
|
||||
Set<Set<UnifyPair>> result = TypeUnify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||
System.out.println("RESULT: " + result);
|
||||
logFile.write("RES: " + result.toString() + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
results.addAll(result);
|
||||
|
||||
results = results.stream().map(x -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet(placeholderRegistry).subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
|
||||
y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y; // alle Paare a <.? b erden durch a =. b ersetzt
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
|
||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure);
|
||||
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure);
|
||||
} else
|
||||
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
System.out.println("RESULT Final: " + results);
|
||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||
logFile.write("RES_FINAL: " + results.toString() + "\n");
|
||||
logFile.flush();
|
||||
logFile.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS);
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
logFile.write("PLACEHOLDERS: " + placeholderRegistry);
|
||||
// logFile.flush();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
System.err.println("kein LogFile");
|
||||
}
|
||||
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons))))).collect(Collectors.toList());
|
||||
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons), placeholderRegistry)))).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* TEMPORARY - Only for Language Server Usage
|
||||
*/
|
||||
public LanguageServerTransferObject getResultSetAndAbstractSyntax(File file) throws IOException, ClassNotFoundException {
|
||||
var sf = sourceFiles.get(file);
|
||||
if(sf == null){
|
||||
sf = sourceFiles.values().stream().findFirst().get();
|
||||
}
|
||||
Set<ClassOrInterface> allClasses = new HashSet<>();
|
||||
allClasses.addAll(getAvailableClasses(sf));
|
||||
allClasses.addAll(sf.getClasses());
|
||||
var newClasses = CompilationEnvironment.loadDefaultPackageClasses(sf.getPkgName(), file, this).stream().map(ASTFactory::createClass).collect(Collectors.toSet());
|
||||
for (var clazz : newClasses) {
|
||||
// Don't load classes that get recompiled
|
||||
if (sf.getClasses().stream().anyMatch(nf -> nf.getClassName().equals(clazz.getClassName())))
|
||||
continue;
|
||||
if (allClasses.stream().noneMatch(old -> old.getClassName().equals(clazz.getClassName())))
|
||||
allClasses.add(clazz);
|
||||
}
|
||||
|
||||
final ConstraintSet<Pair> cons = getConstraints(file);
|
||||
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||
try {
|
||||
Writer logFile = new OutputStreamWriter(new NullOutputStream());
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons);
|
||||
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
|
||||
UnifyType lhs, rhs;
|
||||
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
|
||||
((PlaceholderType) lhs).setInnerType(true);
|
||||
((PlaceholderType) rhs).setInnerType(true);
|
||||
}
|
||||
return x;
|
||||
|
||||
};
|
||||
|
||||
|
||||
unifyCons = unifyCons.map(distributeInnerVars);
|
||||
TypeUnify unify = new TypeUnify();
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints();
|
||||
|
||||
if (resultmodel) {
|
||||
/* UnifyResultModel Anfang */
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||
urm.addUnifyResultListener(li);
|
||||
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
|
||||
generateBytecode(sf, li.getResults());
|
||||
return new LanguageServerTransferObject(li.getResults(), sf, ASTTypePrinter.print(sf), generatedGenerics);
|
||||
}
|
||||
/* UnifyResultModel End */
|
||||
else {
|
||||
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
|
||||
results.addAll(result);
|
||||
|
||||
results = results.stream().map(x -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
|
||||
y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y; // alle Paare a <.? b erden durch a =. b ersetzt
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
|
||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure);
|
||||
} else
|
||||
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
}
|
||||
} catch (ClassNotFoundException e) {
|
||||
}
|
||||
generateBytecode(sf, results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons))))).collect(Collectors.toList()));
|
||||
return new LanguageServerTransferObject(results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons))))).collect(Collectors.toList()), sf, ASTTypePrinter.print(sf), generatedGenerics);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Vererbt alle Variancen bei Paaren (a <. theta) oder (Theta <. a) wenn a eine Variance !=0 hat auf alle Typvariablen in Theta.
|
||||
*
|
||||
@@ -666,10 +612,6 @@ public class JavaTXCompiler {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param path - output-Directory can be null, then class file output is in the same directory as the parsed source files
|
||||
* @return
|
||||
*/
|
||||
public Map<JavaClassName, byte[]> generateBytecode(File sourceFile) throws ClassNotFoundException, IOException {
|
||||
var sf = sourceFiles.get(sourceFile);
|
||||
if (sf.isGenerated()) return null;
|
||||
|
31
src/main/java/de/dhbwstuttgart/core/JavaTXServer.java
Normal file
31
src/main/java/de/dhbwstuttgart/core/JavaTXServer.java
Normal file
@@ -0,0 +1,31 @@
|
||||
package de.dhbwstuttgart.core;
|
||||
|
||||
import de.dhbwstuttgart.server.SocketServer;
|
||||
|
||||
public class JavaTXServer {
|
||||
|
||||
public static boolean isRunning = false;
|
||||
|
||||
final SocketServer socketServer;
|
||||
|
||||
public JavaTXServer(int port) {
|
||||
this.socketServer = new SocketServer(port);
|
||||
isRunning = true;
|
||||
}
|
||||
|
||||
public void listen() {
|
||||
socketServer.start();
|
||||
}
|
||||
|
||||
public void forceStop() {
|
||||
try {
|
||||
socketServer.stop();
|
||||
}
|
||||
catch (InterruptedException exception) {
|
||||
System.err.println("Interrupted socketServer: " + exception);
|
||||
}
|
||||
isRunning = false;
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -0,0 +1,11 @@
|
||||
package de.dhbwstuttgart.exceptions;
|
||||
|
||||
|
||||
/**
|
||||
* Eine Runtime Exception, die für den Fall genutzt wird, dass eine Unifikation abgebrochen wird.
|
||||
* Durch das Werfen einer Exception können Abbrüche auch aus Methodenaufrufen heraus
|
||||
* geprüft werden, da zuvor nur ein return X; stattfinden würde.
|
||||
*/
|
||||
public class UnifyCancelException extends RuntimeException {
|
||||
|
||||
}
|
@@ -1,277 +0,0 @@
|
||||
package de.dhbwstuttgart.languageServerInterface;
|
||||
|
||||
|
||||
import de.dhbwstuttgart.bytecode.Codegen;
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.environment.ByteArrayClassLoader;
|
||||
import de.dhbwstuttgart.environment.IByteArrayClassLoader;
|
||||
import de.dhbwstuttgart.languageServerInterface.model.LanguageServerTransferObject;
|
||||
import de.dhbwstuttgart.syntaxtree.SourceFile;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
|
||||
import de.dhbwstuttgart.target.generate.ASTToTargetAST;
|
||||
import de.dhbwstuttgart.target.generate.GenericsResult;
|
||||
import de.dhbwstuttgart.target.tree.TargetStructure;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* Implementation of an Interface for the Language-Server to get the Resultset and abstract Syntax.
|
||||
*/
|
||||
public class LanguageServerInterface {
|
||||
|
||||
|
||||
public LanguageServerTransferObject getResultSetAndAbastractSyntax(String path, String resetNamesTo) throws IOException, URISyntaxException, ClassNotFoundException {
|
||||
NameGenerator.resetTo(resetNamesTo);
|
||||
return getResultSetAndAbstractSyntax(path);
|
||||
|
||||
}
|
||||
|
||||
public SourceFile getAst(String path, String resetNamesTo) throws IOException, URISyntaxException, ClassNotFoundException {
|
||||
NameGenerator.resetTo(resetNamesTo);
|
||||
return getAST(path);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* returns the ResultSets, GenericResultSet and the AST
|
||||
* You have to give the input as well as the path because of potential locks when the File is currently opened in an IDE.
|
||||
* Example: file:///c:/test/main.jav -> file:///c:/test/out/main.class
|
||||
*
|
||||
* @param path the URI of the File. See Example.
|
||||
* @throws IOException
|
||||
* @throws ClassNotFoundException
|
||||
* @throws URISyntaxException
|
||||
*/
|
||||
public LanguageServerTransferObject getResultSetAndAbstractSyntax(String path) throws IOException, ClassNotFoundException, URISyntaxException {
|
||||
|
||||
|
||||
System.setOut(new PrintStream(OutputStream.nullOutputStream()));
|
||||
|
||||
|
||||
URI uri = new URI(path);
|
||||
ArrayList<String> pathWithoutName = new ArrayList<>(List.of(uri.getPath().split("/")));
|
||||
pathWithoutName.remove(List.of(uri.getPath().split("/")).size() - 1);
|
||||
String stringPathWithoutName = "";
|
||||
|
||||
for (String i : pathWithoutName) {
|
||||
stringPathWithoutName += "/" + i;
|
||||
}
|
||||
|
||||
try {
|
||||
FileUtils.cleanDirectory(new File(stringPathWithoutName + "/out"));
|
||||
} catch (Exception e) {
|
||||
|
||||
}
|
||||
try {
|
||||
(new File(stringPathWithoutName + "/out")).mkdirs();
|
||||
} catch (Exception e) {
|
||||
|
||||
}
|
||||
var test = getLanguageServerTransferObject(uri.getPath().split("/")[uri.getPath().split("/").length - 1], new ByteArrayClassLoader(), new File(stringPathWithoutName).getPath());
|
||||
System.setOut(System.out);
|
||||
return test;
|
||||
// File oldTempSourcefile = new File(stringPathWithoutName + "/out/" + uri.getPath().split("/")[uri.getPath().split("/").length - 1].replace(".jav", ".tmp"));
|
||||
// File oldTempOutFile = new File(stringPathWithoutName + "/out/" + uri.getPath().split("/")[uri.getPath().split("/").length - 1].replace(".jav", ".class"));
|
||||
// Files.deleteIfExists(oldTempSourcefile.toPath());
|
||||
// Files.deleteIfExists(oldTempOutFile.toPath());
|
||||
// FileUtils.deleteDirectory(oldTempSourcefile);
|
||||
//
|
||||
// File tempSourcefile = new File(stringPathWithoutName + "/out/" + uri.getPath().split("/")[uri.getPath().split("/").length - 1].replace(".jav", ".tmp"));
|
||||
// File tempOutFile = new File(stringPathWithoutName + "/out/");
|
||||
// FileUtils.writeStringToFile(tempSourcefile, input, "UTF-8");
|
||||
//
|
||||
// //TODO: Enable for multiple Class-Files
|
||||
// JavaTXCompiler tx = new JavaTXCompiler(new ArrayList<File>(List.of(tempSourcefile)), new ArrayList<File>(List.of((tempSourcefile))), tempOutFile);
|
||||
// var result = tx.getResultSetAndAbstractSyntax(tempSourcefile);
|
||||
// System.setOut(System.out);
|
||||
// tempSourcefile.delete();
|
||||
// tempOutFile.delete();
|
||||
// return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the AST without calculating the result
|
||||
* You have to give the input as well as the path because of potential locks when the File is currently opened in an IDE.
|
||||
* Example: file:///c:/test/main.jav -> file:///c:/test/out/main.class
|
||||
*
|
||||
* @param path the URI of the File. See Example.
|
||||
* @throws IOException
|
||||
* @throws ClassNotFoundException
|
||||
* @throws URISyntaxException
|
||||
*/
|
||||
public SourceFile getAST(String path) throws IOException, ClassNotFoundException, URISyntaxException {
|
||||
|
||||
|
||||
System.setOut(new PrintStream(OutputStream.nullOutputStream()));
|
||||
|
||||
|
||||
URI uri = new URI(path);
|
||||
ArrayList<String> pathWithoutName = new ArrayList<>(List.of(uri.getPath().split("/")));
|
||||
pathWithoutName.remove(List.of(uri.getPath().split("/")).size() - 1);
|
||||
String stringPathWithoutName = "";
|
||||
|
||||
for (String i : pathWithoutName) {
|
||||
stringPathWithoutName += "/" + i;
|
||||
}
|
||||
|
||||
try {
|
||||
FileUtils.cleanDirectory(new File(stringPathWithoutName + "/out"));
|
||||
} catch (Exception e) {
|
||||
|
||||
}
|
||||
try {
|
||||
(new File(stringPathWithoutName + "/out")).mkdirs();
|
||||
} catch (Exception e) {
|
||||
|
||||
}
|
||||
var test = getAST(uri.getPath().split("/")[uri.getPath().split("/").length - 1], new File(stringPathWithoutName).getPath());
|
||||
System.setOut(System.out);
|
||||
return test;
|
||||
}
|
||||
|
||||
private static void writeClassFile(String name, byte[] code, Path outputPath) throws IOException {
|
||||
Files.createDirectories(outputPath);
|
||||
Files.write(outputPath.resolve(name + ".class"), code);
|
||||
}
|
||||
|
||||
public static Map<String, ? extends Class<?>> generateClassFiles(IByteArrayClassLoader classLoader, Path path, Path outputPath, String... files) throws IOException, ClassNotFoundException {
|
||||
Files.createDirectories(outputPath);
|
||||
var filenames = Arrays.stream(files).map(filename -> Path.of(path.toString(), filename).toFile()).toList();
|
||||
var compiler = new JavaTXCompiler(filenames, List.of(path.toFile(), outputPath.toFile()), outputPath.toFile());
|
||||
|
||||
var result = new HashMap<String, Class<?>>();
|
||||
for (var file : filenames) {
|
||||
var resultSet = compiler.typeInference(file);
|
||||
|
||||
var sourceFile = compiler.sourceFiles.get(file);
|
||||
var converter = new ASTToTargetAST(compiler, resultSet, sourceFile, classLoader);
|
||||
var classes = compiler.sourceFiles.get(file).getClasses();
|
||||
|
||||
result.putAll(classes.stream().map(cli -> {
|
||||
try {
|
||||
return generateClass(converter.convert(cli), classLoader, converter, outputPath);
|
||||
} catch (IOException exception) {
|
||||
throw new RuntimeException(exception);
|
||||
}
|
||||
}).collect(Collectors.toMap(Class::getName, Function.identity())));
|
||||
|
||||
converter.generateFunNTypes();
|
||||
|
||||
for (var entry : converter.auxiliaries.entrySet()) {
|
||||
writeClassFile(entry.getKey(), entry.getValue(), outputPath);
|
||||
}
|
||||
}
|
||||
|
||||
for (var entry : compiler.loadedClasses.entrySet()) {
|
||||
var name = entry.getKey().toString();
|
||||
result.put(name, classLoader.loadClass(Path.of(entry.getValue().classFile().toURI())));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Class<?> generateClass(TargetStructure clazz, IByteArrayClassLoader classLoader, ASTToTargetAST converter, Path outputPath) throws IOException {
|
||||
Codegen codegen = new Codegen(clazz, converter.compiler, converter);
|
||||
var code = codegen.generate();
|
||||
writeClassFile(clazz.qualifiedName().getClassName(), code, outputPath);
|
||||
return classLoader.loadClass(code);
|
||||
}
|
||||
|
||||
public static Map<String, ? extends Class<?>> generateClassFiles(String filename, IByteArrayClassLoader classLoader, String filePath) throws IOException, ClassNotFoundException {
|
||||
var file = Path.of(filePath, filename).toFile();
|
||||
var compiler = new JavaTXCompiler(List.of(file), List.of(file.getParentFile()), Path.of(filePath + "/out").toFile());
|
||||
var resultSet = compiler.typeInference(file);
|
||||
|
||||
var sourceFile = compiler.sourceFiles.get(file);
|
||||
var converter = new ASTToTargetAST(compiler, resultSet, sourceFile, classLoader);
|
||||
var classes = compiler.sourceFiles.get(file).getClasses();
|
||||
|
||||
var result = classes.stream().map(cli -> {
|
||||
try {
|
||||
return generateClass(converter.convert(cli), classLoader, converter, Path.of(filePath + "/out/"));
|
||||
} catch (IOException exception) {
|
||||
throw new RuntimeException(exception);
|
||||
}
|
||||
}).collect(Collectors.toMap(Class::getName, Function.identity()));
|
||||
|
||||
converter.generateFunNTypes();
|
||||
|
||||
for (var entry : converter.auxiliaries.entrySet()) {
|
||||
writeClassFile(entry.getKey(), entry.getValue(), Path.of(filePath + "/out/"));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static SourceFile getAST(String filename, String filePath) throws IOException, ClassNotFoundException {
|
||||
var file = Path.of(filePath, filename).toFile();
|
||||
var compiler = new JavaTXCompiler(List.of(file), List.of(file.getParentFile()), Path.of(filePath + "/out").toFile());
|
||||
return compiler.sourceFiles.get(file);
|
||||
}
|
||||
|
||||
public static LanguageServerTransferObject getLanguageServerTransferObject(String filename, IByteArrayClassLoader classLoader, String filePath) throws IOException, ClassNotFoundException {
|
||||
var file = Path.of(filePath, filename).toFile();
|
||||
var compiler = new JavaTXCompiler(List.of(file), List.of(file.getParentFile()), Path.of(filePath + "/out").toFile());
|
||||
var resultSet = compiler.typeInference(file);
|
||||
|
||||
var sourceFile = compiler.sourceFiles.get(file);
|
||||
var converter = new ASTToTargetAST(compiler, resultSet, sourceFile, classLoader);
|
||||
compiler.generateBytecode();
|
||||
converter.generateFunNTypes();
|
||||
|
||||
var ta = converter.javaGenerics();
|
||||
var tb = converter.txGenerics();
|
||||
Map<SourceFile, List<GenericsResult>> generics = new HashMap<>();
|
||||
|
||||
ArrayList<GenericsResult> genericsResults = new ArrayList<>();
|
||||
genericsResults.addAll(ta);
|
||||
genericsResults.addAll(tb);
|
||||
generics.put(converter.compiler.sourceFiles.values().stream().findFirst().get(), ta);
|
||||
var test = new LanguageServerTransferObject(resultSet, converter.compiler.sourceFiles.values().stream().findFirst().get(), "", generics);
|
||||
return test;
|
||||
}
|
||||
|
||||
/**
|
||||
* generates Bytecode for the given Path of the File.
|
||||
* The Generated Bytecode can be found in the same place as the path, except the File lies in an /out/ Directory
|
||||
* Example: file:///c:/test/main.jav -> file:///c:/test/out/main.class
|
||||
*
|
||||
* @param uri the URI of the File. See Example.
|
||||
* @throws IOException
|
||||
* @throws ClassNotFoundException
|
||||
* @throws URISyntaxException
|
||||
*/
|
||||
public void generateBytecode(URI uri) throws IOException, ClassNotFoundException, URISyntaxException {
|
||||
|
||||
System.setOut(new PrintStream(OutputStream.nullOutputStream()));
|
||||
|
||||
|
||||
File inputDir = new File(uri.getPath());
|
||||
File outFile = new File(uri.getPath() + "/out");
|
||||
FileUtils.cleanDirectory(outFile);
|
||||
String[] allowedEndings = {".jav"};
|
||||
ArrayList<File> files = new ArrayList<>();
|
||||
|
||||
try (Stream<Path> stream = Files.walk(Paths.get(inputDir.toURI()))) {
|
||||
stream.filter(Files::isRegularFile)
|
||||
.forEach(el -> files.add(el.toFile()));
|
||||
}
|
||||
|
||||
|
||||
List<String> javFiles = files.stream().filter(el -> el.getName().split("\\.").length >= 2 && el.getName().split("\\.")[el.getName().split("\\.").length - 1].contains("jav")).map(el -> el.getPath().replace(inputDir.getPath(), "").substring(1)).toList();
|
||||
//TODO: Link between Files
|
||||
generateClassFiles(new ByteArrayClassLoader(), Path.of(inputDir.toURI()), Path.of(outFile.toURI()), javFiles.toArray(new String[0]));
|
||||
System.setOut(System.out);
|
||||
}
|
||||
}
|
@@ -1,40 +0,0 @@
|
||||
package de.dhbwstuttgart.languageServerInterface;
|
||||
|
||||
|
||||
|
||||
import de.dhbwstuttgart.languageServerInterface.model.CustomParserErrorHandler;
|
||||
import de.dhbwstuttgart.languageServerInterface.model.ParserError;
|
||||
import de.dhbwstuttgart.parser.antlr.Java17Lexer;
|
||||
import de.dhbwstuttgart.parser.antlr.Java17Parser;
|
||||
import de.dhbwstuttgart.parser.antlr.Java17ParserBaseListener;
|
||||
import org.antlr.v4.runtime.CharStream;
|
||||
import org.antlr.v4.runtime.CharStreams;
|
||||
import org.antlr.v4.runtime.CommonTokenStream;
|
||||
import org.antlr.v4.runtime.tree.ParseTree;
|
||||
import org.antlr.v4.runtime.tree.ParseTreeWalker;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class ParserInterface {
|
||||
|
||||
public List<ParserError> getParseErrors(String input){
|
||||
|
||||
CustomParserErrorHandler errorListener = new CustomParserErrorHandler();
|
||||
CharStream charStream = CharStreams.fromString(input);
|
||||
|
||||
Java17Lexer lexer = new Java17Lexer(charStream);
|
||||
CommonTokenStream tokens = new CommonTokenStream(lexer);
|
||||
|
||||
Java17Parser parser = new Java17Parser(tokens);
|
||||
parser.removeErrorListeners();
|
||||
parser.addErrorListener(errorListener);
|
||||
|
||||
|
||||
ParseTree tree = parser.sourceFile();
|
||||
ParseTreeWalker walker = new ParseTreeWalker();
|
||||
Java17ParserBaseListener listener = new Java17ParserBaseListener();
|
||||
walker.walk(listener, tree);
|
||||
|
||||
return errorListener.getErrorMessages();
|
||||
}
|
||||
}
|
@@ -1,47 +0,0 @@
|
||||
package de.dhbwstuttgart.languageServerInterface.model;
|
||||
|
||||
|
||||
|
||||
import org.antlr.v4.runtime.*;
|
||||
import org.antlr.v4.runtime.atn.ATNConfigSet;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.BitSet;
|
||||
import java.util.List;
|
||||
|
||||
public class CustomParserErrorHandler implements ANTLRErrorListener {
|
||||
private final List<ParserError> errorMessages = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) {
|
||||
int endCharPosition = charPositionInLine;
|
||||
if (offendingSymbol instanceof Token) {
|
||||
Token offendingToken = (Token) offendingSymbol;
|
||||
endCharPosition = charPositionInLine + offendingToken.getText().length();
|
||||
}
|
||||
|
||||
ParserError parserError = new ParserError(line, charPositionInLine, endCharPosition, msg);
|
||||
errorMessages.add(parserError);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reportAmbiguity(Parser parser, DFA dfa, int i, int i1, boolean b, BitSet bitSet, ATNConfigSet atnConfigSet) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reportAttemptingFullContext(Parser parser, DFA dfa, int i, int i1, BitSet bitSet, ATNConfigSet atnConfigSet) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reportContextSensitivity(Parser parser, DFA dfa, int i, int i1, int i2, ATNConfigSet atnConfigSet) {
|
||||
|
||||
}
|
||||
|
||||
public List<ParserError> getErrorMessages() {
|
||||
return errorMessages;
|
||||
}
|
||||
}
|
@@ -1,31 +0,0 @@
|
||||
package de.dhbwstuttgart.languageServerInterface.model;
|
||||
|
||||
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.SourceFile;
|
||||
import de.dhbwstuttgart.target.generate.GenericsResult;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class LanguageServerTransferObject {
|
||||
List<ResultSet> resultSets;
|
||||
SourceFile Ast;
|
||||
String printedAst;
|
||||
Map<SourceFile, List<GenericsResult>> generatedGenerics = new HashMap<>();
|
||||
|
||||
|
||||
public LanguageServerTransferObject(List<ResultSet> resultSets, SourceFile Ast, String printedAst, Map<SourceFile, List<GenericsResult>> generatedGenerics) {
|
||||
this.resultSets = resultSets;
|
||||
this.Ast = Ast;
|
||||
this.printedAst = printedAst;
|
||||
this.generatedGenerics = generatedGenerics;
|
||||
}
|
||||
|
||||
public List<ResultSet> getResultSets() {return resultSets;}
|
||||
public SourceFile getAst() {return Ast;}
|
||||
public String getPrintedAst() {return printedAst;}
|
||||
public Map<SourceFile, List<GenericsResult>> getGeneratedGenerics() {return generatedGenerics;}
|
||||
}
|
@@ -1,48 +0,0 @@
|
||||
package de.dhbwstuttgart.languageServerInterface.model;
|
||||
|
||||
public class ParserError {
|
||||
|
||||
private int line;
|
||||
private int charPositionInLine;
|
||||
private int endCharPosition;
|
||||
String msg;
|
||||
|
||||
public ParserError(int line, int charPositionInLine, int endCharPosition, String msg) {
|
||||
this.line = line;
|
||||
this.charPositionInLine = charPositionInLine;
|
||||
this. endCharPosition = endCharPosition;
|
||||
this.msg = msg;
|
||||
}
|
||||
|
||||
public int getEndCharPosition() {
|
||||
return endCharPosition;
|
||||
}
|
||||
|
||||
public void setEndCharPosition(int endCharPosition) {
|
||||
this.endCharPosition = endCharPosition;
|
||||
}
|
||||
|
||||
public void setCharPositionInLine(int charPositionInLine) {
|
||||
this.charPositionInLine = charPositionInLine;
|
||||
}
|
||||
|
||||
public void setLine(int line) {
|
||||
this.line = line;
|
||||
}
|
||||
|
||||
public void setMsg(String msg) {
|
||||
this.msg = msg;
|
||||
}
|
||||
|
||||
public int getCharPositionInLine() {
|
||||
return charPositionInLine;
|
||||
}
|
||||
|
||||
public int getLine() {
|
||||
return line;
|
||||
}
|
||||
|
||||
public String getMsg() {
|
||||
return msg;
|
||||
}
|
||||
}
|
@@ -1,20 +0,0 @@
|
||||
package de.dhbwstuttgart.languageServerInterface.model;
|
||||
|
||||
|
||||
import com.google.common.reflect.TypeResolver;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultEvent;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListener;
|
||||
|
||||
public class ResultSetListener implements UnifyResultListener {
|
||||
|
||||
TypeResolver typeResolver;
|
||||
|
||||
public ResultSetListener(TypeResolver typeResolver){
|
||||
this.typeResolver = typeResolver;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNewTypeResultFound(UnifyResultEvent evt) {
|
||||
|
||||
}
|
||||
}
|
@@ -1,4 +1,25 @@
|
||||
package de.dhbwstuttgart.parser;
|
||||
|
||||
public record SourceLoc(String file, int line) {
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
|
||||
public record SourceLoc(String file, int line) implements ISerializableData {
|
||||
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
var serialized = new SerialMap();
|
||||
serialized.put("file", file);
|
||||
serialized.put("line", line);
|
||||
return serialized;
|
||||
}
|
||||
|
||||
public static SourceLoc fromSerial(SerialMap data) {
|
||||
return new SourceLoc(
|
||||
data.getValue("file").getOf(String.class),
|
||||
data.getValue("line").getOf(Integer.class)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -12,6 +12,7 @@ import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.type.*;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.*;
|
||||
|
||||
import java.util.*;
|
||||
@@ -26,16 +27,21 @@ public class FCGenerator {
|
||||
*
|
||||
* @param availableClasses - Alle geparsten Klassen
|
||||
*/
|
||||
public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException {
|
||||
return toFC(availableClasses, classLoader).stream().map(t -> UnifyTypeFactory.convert(compiler, t)).collect(Collectors.toSet());
|
||||
public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException {
|
||||
return toFC(
|
||||
availableClasses,
|
||||
classLoader,
|
||||
placeholderRegistry
|
||||
).stream().map(t -> UnifyTypeFactory.convert(compiler, t, placeholderRegistry))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
public static Set<Pair> toFC(Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException {
|
||||
public static Set<Pair> toFC(Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException {
|
||||
HashSet<Pair> pairs = new HashSet<>();
|
||||
//PL 2018-09-18: gtvs vor die for-Schleife gezogen, damit immer die gleichen Typeplaceholder eingesetzt werden.
|
||||
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs = new HashMap<>();
|
||||
for(ClassOrInterface cly : availableClasses){
|
||||
List<Pair> newPairs = getSuperTypes(cly, availableClasses, gtvs, classLoader);
|
||||
List<Pair> newPairs = getSuperTypes(cly, availableClasses, gtvs, classLoader, placeholderRegistry);
|
||||
pairs.addAll(newPairs);
|
||||
|
||||
//For all Functional Interfaces FI: FunN$$<... args auf dem Functional Interface ...> <. FI is added to FC
|
||||
@@ -75,8 +81,13 @@ public class FCGenerator {
|
||||
* @param forType
|
||||
* @return
|
||||
*/
|
||||
private static List<Pair> getSuperTypes(ClassOrInterface forType, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException {
|
||||
return getSuperTypes(forType, availableClasses, new HashMap<>(), classLoader);
|
||||
private static List<Pair> getSuperTypes(
|
||||
ClassOrInterface forType,
|
||||
Collection<ClassOrInterface> availableClasses,
|
||||
ClassLoader classLoader,
|
||||
PlaceholderRegistry placeholderRegistry
|
||||
) throws ClassNotFoundException {
|
||||
return getSuperTypes(forType, availableClasses, new HashMap<>(), classLoader, placeholderRegistry);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -87,8 +98,13 @@ public class FCGenerator {
|
||||
* @return
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
private static List<Pair> getSuperTypes(ClassOrInterface forType, Collection<ClassOrInterface> availableClasses,
|
||||
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs, ClassLoader classLoader) throws ClassNotFoundException {
|
||||
private static List<Pair> getSuperTypes(
|
||||
ClassOrInterface forType,
|
||||
Collection<ClassOrInterface> availableClasses,
|
||||
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs,
|
||||
ClassLoader classLoader,
|
||||
PlaceholderRegistry placeholderRegistry
|
||||
) throws ClassNotFoundException {
|
||||
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
|
||||
//Die GTVs, die in forType hinzukommen:
|
||||
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> newGTVs = new HashMap<>();
|
||||
@@ -147,7 +163,7 @@ public class FCGenerator {
|
||||
if(superClass.getClassName().equals(ASTFactory.createObjectClass().getClassName())){
|
||||
superTypes = Arrays.asList(new Pair(ASTFactory.createObjectType(), ASTFactory.createObjectType(), PairOperator.SMALLER));
|
||||
}else{
|
||||
superTypes = getSuperTypes(superClass, availableClasses, newGTVs, classLoader);
|
||||
superTypes = getSuperTypes(superClass, availableClasses, newGTVs, classLoader, placeholderRegistry);
|
||||
}
|
||||
|
||||
retList.add(ret);
|
||||
|
158
src/main/java/de/dhbwstuttgart/server/SocketClient.java
Normal file
158
src/main/java/de/dhbwstuttgart/server/SocketClient.java
Normal file
@@ -0,0 +1,158 @@
|
||||
package de.dhbwstuttgart.server;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.server.packet.IServerToClientPacket;
|
||||
import de.dhbwstuttgart.server.packet.PacketContainer;
|
||||
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
|
||||
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import java.net.URI;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.java_websocket.client.WebSocketClient;
|
||||
import org.java_websocket.enums.ReadyState;
|
||||
import org.java_websocket.handshake.ServerHandshake;
|
||||
|
||||
|
||||
/**
|
||||
* The Client-side of the websocket
|
||||
*/
|
||||
public class SocketClient extends WebSocketClient {
|
||||
|
||||
// use a latch to wait until the connection is closed by the remote host
|
||||
private final CountDownLatch closeLatch = new CountDownLatch(1);
|
||||
// temporarily: The received unify result
|
||||
// TODO: replace with uuid and future system, such that responses can be mapped by a uuid to fulfill a Future
|
||||
private UnifyResultPacket unifyResultPacket;
|
||||
|
||||
public SocketClient(String url) {
|
||||
super(URI.create(url), Map.of(
|
||||
"packetProtocolVersion", SocketServer.packetProtocolVersion
|
||||
));
|
||||
// make sure, the url is in a valid format
|
||||
final String regex = "^wss?://(\\w+(\\.\\w+)?)*:(\\d+)$";
|
||||
final Matcher matcher = Pattern.compile(regex).matcher(url);
|
||||
if (!matcher.find()) {
|
||||
throw new RuntimeException("Provided string \"" + url + "\" is not a valid server URL! Use pattern ws(s?)://<host.name>:<port>");
|
||||
}
|
||||
}
|
||||
|
||||
public SocketClient(String host, int port, boolean secure) {
|
||||
super(URI.create(String.format("%s://%s:%d/", secure ? "wss" : "ws", host, port)));
|
||||
}
|
||||
|
||||
/**
|
||||
* The main method for connecting, requesting and waiting for the server to unify.
|
||||
* This is synchronized to prevent multiple webSockets connections at the moment, but it is not called from any
|
||||
* thread except the main thread right now and is not necessary at all, probably. Maybe remove it later
|
||||
*/
|
||||
synchronized public List<ResultSet> execute(
|
||||
FiniteClosure finiteClosure,
|
||||
ConstraintSet<Pair> constraintSet,
|
||||
ConstraintSet<UnifyPair> unifyConstraintSet,
|
||||
UnifyContext context
|
||||
) throws JsonProcessingException {
|
||||
|
||||
try {
|
||||
// wait for the connection to be set up
|
||||
this.connectBlocking();
|
||||
// make sure the connection has been established successfully
|
||||
if (this.getReadyState() != ReadyState.OPEN) {
|
||||
throw new RuntimeException("WebSocket Client could not connect to remote host at " + this.uri);
|
||||
}
|
||||
|
||||
// send the unify task request
|
||||
UnifyRequestPacket packet = new UnifyRequestPacket(finiteClosure, constraintSet, unifyConstraintSet, context.placeholderRegistry());
|
||||
String json = PacketContainer.serialize(packet);
|
||||
this.send(json);
|
||||
|
||||
// block the thread, until the connection is closed by the remote host (usually after sending the results)
|
||||
this.waitUntilClosed();
|
||||
// wait for the connection to fully close
|
||||
this.closeBlocking();
|
||||
} catch (InterruptedException exception) {
|
||||
System.err.println("Server connection interrupted: " + exception);
|
||||
this.notifyAll();
|
||||
throw new RuntimeException("Aborted server connection", exception);
|
||||
}
|
||||
catch (Exception exception) {
|
||||
throw new RuntimeException("Exception occurred in server connection: ", exception);
|
||||
}
|
||||
|
||||
// detect error cases, in which no error was thrown, but also no result was sent back from the server
|
||||
if (this.unifyResultPacket == null) {
|
||||
throw new RuntimeException("Did not receive server response but closed connection already");
|
||||
}
|
||||
|
||||
return unifyResultPacket.getResultSet(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specific client-side implementations to handle incoming packets
|
||||
*/
|
||||
protected void handleReceivedPacket(IPacket packet) {
|
||||
if (packet instanceof IServerToClientPacket serverToClientPacket) {
|
||||
|
||||
try {
|
||||
serverToClientPacket.onHandle(this.getConnection(), this);
|
||||
}
|
||||
catch (Exception exception) {
|
||||
this.closeLatch.countDown();
|
||||
this.close();
|
||||
throw exception;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
System.err.println("Received package of invalid type + " + packet.getClass().getName());
|
||||
this.close();
|
||||
}
|
||||
|
||||
public void setUnifyResultSets(UnifyResultPacket unifyResultPacket) {
|
||||
this.unifyResultPacket = unifyResultPacket;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOpen(ServerHandshake handshakedata) {
|
||||
System.out.println("Connected to server with status " + handshakedata.getHttpStatus());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMessage(String message) {
|
||||
// System.out.println("received: " + message);
|
||||
IPacket packet = PacketContainer.deserialize(message);
|
||||
this.handleReceivedPacket(packet);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClose(int code, String reason, boolean remote) {
|
||||
System.out.println(
|
||||
"Disconnected from server " +
|
||||
"with code " + code + " " +
|
||||
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
|
||||
"(closed by remote: " + remote + ")"
|
||||
);
|
||||
this.closeLatch.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(Exception e) {
|
||||
System.out.println("Error: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
public void waitUntilClosed() throws InterruptedException {
|
||||
closeLatch.await();
|
||||
}
|
||||
}
|
203
src/main/java/de/dhbwstuttgart/server/SocketServer.java
Normal file
203
src/main/java/de/dhbwstuttgart/server/SocketServer.java
Normal file
@@ -0,0 +1,203 @@
|
||||
package de.dhbwstuttgart.server;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import de.dhbwstuttgart.server.packet.ErrorPacket;
|
||||
import de.dhbwstuttgart.server.packet.IClientToServerPacket;
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.server.packet.MessagePacket;
|
||||
import de.dhbwstuttgart.server.packet.PacketContainer;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.Objects;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import org.java_websocket.WebSocket;
|
||||
import org.java_websocket.handshake.ClientHandshake;
|
||||
import org.java_websocket.server.WebSocketServer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class SocketServer extends WebSocketServer {
|
||||
|
||||
/**
|
||||
* Increase this every time a breaking change to the server communication is done.
|
||||
* This will prevent errors when server version and client version do not match.
|
||||
*/
|
||||
public static final String packetProtocolVersion = "1";
|
||||
|
||||
public static final Logger log = LoggerFactory.getLogger(SocketServer.class);
|
||||
|
||||
public SocketServer(int port) {
|
||||
super(new InetSocketAddress(port));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
|
||||
String ppv = clientHandshake.getFieldValue("packetProtocolVersion");
|
||||
if (!ppv.equals(packetProtocolVersion)) {
|
||||
try {
|
||||
ErrorPacket errorPacket = ErrorPacket.create(
|
||||
"Mismatch in packet protocol version! Client (you): " + ppv + " and Server (me): " + packetProtocolVersion,
|
||||
true
|
||||
);
|
||||
webSocket.send(PacketContainer.serialize(errorPacket));
|
||||
}
|
||||
catch (JsonProcessingException exception) {
|
||||
System.err.println("Failed to serialize json: " + exception);
|
||||
}
|
||||
webSocket.close(1);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
SocketData socketData = new SocketData(UUID.randomUUID().toString());
|
||||
webSocket.setAttachment(socketData);
|
||||
System.out.println("New connection: " + socketData.id + " (with ppv " + ppv + ")");
|
||||
|
||||
try {
|
||||
sendMessage(webSocket, "Welcome to the server!");
|
||||
|
||||
// wait 10 seconds for the client to send a task and close the connection if nothing has been received until then
|
||||
try (ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor()) {
|
||||
Runnable task = () -> {
|
||||
if (webSocket.<SocketData>getAttachment().unhandledTasks.get() == 0 || !webSocket.isOpen()) {
|
||||
return;
|
||||
}
|
||||
sendMessage(webSocket, "No task received after 10 seconds. Closing connection...");
|
||||
webSocket.close();
|
||||
};
|
||||
executor.schedule(task, 10, TimeUnit.SECONDS);
|
||||
executor.shutdown();
|
||||
}
|
||||
|
||||
// and finally, when your program wants to exit
|
||||
} catch (Exception e) {
|
||||
log.error("e: ", e);
|
||||
webSocket.close(1, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClose(WebSocket webSocket, int code, String reason, boolean remote) {
|
||||
SocketData socketData = webSocket.getAttachment();
|
||||
System.out.println("Connection closed: " + socketData.id);
|
||||
System.out.println(
|
||||
"Disconnected client " + socketData.id + " " +
|
||||
"with code " + code + " " +
|
||||
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
|
||||
"(closed by client: " + remote + ")"
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMessage(WebSocket webSocket, String s) {
|
||||
// System.out.println("Received: " + s.substring(0, 50));
|
||||
IPacket reconstructedPacket = PacketContainer.deserialize(s);
|
||||
try {
|
||||
this.onPacketReceived(webSocket, reconstructedPacket);
|
||||
} catch (JsonProcessingException e) {
|
||||
this.log("Error on processing incoming package: " + e.getMessage(), webSocket);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(WebSocket webSocket, Exception e) {
|
||||
log(e.getMessage(), webSocket);
|
||||
webSocket.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStart() {
|
||||
System.out.println("Websocket server started on port " + this.getPort());
|
||||
}
|
||||
|
||||
/**
|
||||
* A shorthand method for sending informational messages to the client
|
||||
*/
|
||||
public void sendMessage(WebSocket webSocket, String text) {
|
||||
try {
|
||||
MessagePacket message = new MessagePacket();
|
||||
message.message = text;
|
||||
webSocket.send(PacketContainer.serialize(message));
|
||||
} catch (Exception e) {
|
||||
System.err.println("Failed to send message: " + text);
|
||||
log.error("e: ", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A shorthand method for sending error messages to the client
|
||||
*/
|
||||
public void sendError(WebSocket webSocket, String text) {
|
||||
try {
|
||||
ErrorPacket error = new ErrorPacket();
|
||||
error.error = text;
|
||||
webSocket.send(PacketContainer.serialize(error));
|
||||
} catch (Exception e) {
|
||||
System.err.println("Failed to send error: " + text);
|
||||
log.error("e: ", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The server-side implementation on how to handle certain packets when received
|
||||
*/
|
||||
private void onPacketReceived(WebSocket webSocket, IPacket packet) throws JsonProcessingException {
|
||||
SocketData socketData = webSocket.getAttachment();
|
||||
|
||||
// limit the amount of tasks per connection
|
||||
final int maxTasks = 100;
|
||||
if (socketData.totalTasks.get() >= maxTasks) {
|
||||
sendError(webSocket, "Exceeded the maximum amount of " + maxTasks + " tasks per session");
|
||||
webSocket.close();
|
||||
return;
|
||||
}
|
||||
|
||||
// only allow packets, that are meant to be handled by the server
|
||||
if (!(packet instanceof IClientToServerPacket clientToServerPacket)) {
|
||||
sendMessage(webSocket, "The package of type " + packet.getClass().getName() + " is not handled by the server!");
|
||||
return;
|
||||
}
|
||||
|
||||
// update the socket data
|
||||
socketData.unhandledTasks.incrementAndGet();
|
||||
socketData.totalTasks.incrementAndGet();
|
||||
|
||||
// add the packet to the queue, so it can be started by the worker
|
||||
CompletableFuture.runAsync(() -> {
|
||||
clientToServerPacket.onHandle(webSocket, this);
|
||||
|
||||
// if the websocket has 0 unhandled Tasks, close the connection
|
||||
int remainingUnhandledTasks = socketData.unhandledTasks.decrementAndGet();
|
||||
if (remainingUnhandledTasks <= 0) {
|
||||
sendMessage(webSocket, "All requested tasks finished! Closing connection...");
|
||||
webSocket.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void log(String msg, WebSocket webSocket) {
|
||||
SocketData socketData = webSocket == null ? new SocketData("???") : webSocket.getAttachment();
|
||||
System.out.println("["+socketData.id+"] " + msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* The data that is associated server-side with any connected client.
|
||||
* This makes it possible to store information that can be mapped to any existing connection.
|
||||
*/
|
||||
public static class SocketData {
|
||||
|
||||
public final String id;
|
||||
public final AtomicInteger unhandledTasks = new AtomicInteger(0);
|
||||
public final AtomicInteger totalTasks = new AtomicInteger(0);
|
||||
|
||||
public SocketData(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,30 @@
|
||||
package de.dhbwstuttgart.server.packet;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import de.dhbwstuttgart.server.SocketClient;
|
||||
import de.dhbwstuttgart.server.SocketServer;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
|
||||
import org.java_websocket.WebSocket;
|
||||
|
||||
public class DebugPacket implements IClientToServerPacket, IServerToClientPacket {
|
||||
|
||||
public SerialUUID a1;
|
||||
public SerialUUID a2;
|
||||
public SerialMap b1;
|
||||
public SerialMap b2;
|
||||
public SerialList<? extends ISerialNode> c1;
|
||||
public SerialList<? extends ISerialNode> c2;
|
||||
public SerialValue<?> d1;
|
||||
public SerialValue<?> d2;
|
||||
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketClient socketServer) {}
|
||||
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketServer socketServer) {}
|
||||
|
||||
}
|
@@ -0,0 +1,41 @@
|
||||
package de.dhbwstuttgart.server.packet;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import de.dhbwstuttgart.server.SocketClient;
|
||||
import de.dhbwstuttgart.server.SocketServer;
|
||||
import org.java_websocket.WebSocket;
|
||||
|
||||
/**
|
||||
* A packet to send simple error messages between the client and the server
|
||||
*/
|
||||
public class ErrorPacket implements IClientToServerPacket, IServerToClientPacket {
|
||||
|
||||
/**
|
||||
* The error endpoint for messages from the server, that should be logged out outputted
|
||||
*/
|
||||
public String error;
|
||||
public boolean isFatal;
|
||||
|
||||
@JsonIgnore
|
||||
public static ErrorPacket create(String error, boolean isFatal) {
|
||||
ErrorPacket packet = new ErrorPacket();
|
||||
packet.error = error;
|
||||
packet.isFatal = isFatal;
|
||||
return packet;
|
||||
}
|
||||
|
||||
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
|
||||
System.err.println("[socket] " + "ErrorPacket: " + this.error);
|
||||
if (this.isFatal) {
|
||||
throw new RuntimeException("Received fatal error from server: " + this.error);
|
||||
}
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
|
||||
socketServer.log("ErrorPacket: " + this.error, webSocket);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,12 @@
|
||||
package de.dhbwstuttgart.server.packet;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import de.dhbwstuttgart.server.SocketServer;
|
||||
import org.java_websocket.WebSocket;
|
||||
|
||||
public interface IClientToServerPacket extends IPacket {
|
||||
|
||||
@JsonIgnore
|
||||
void onHandle(WebSocket webSocket, SocketServer socketServer);
|
||||
|
||||
}
|
12
src/main/java/de/dhbwstuttgart/server/packet/IPacket.java
Normal file
12
src/main/java/de/dhbwstuttgart/server/packet/IPacket.java
Normal file
@@ -0,0 +1,12 @@
|
||||
package de.dhbwstuttgart.server.packet;
|
||||
|
||||
/**
|
||||
* The shared interface for all packet of the client-server connection.
|
||||
* A packet must always:
|
||||
* - Have a default / no-parameter constructor
|
||||
* - Have only serializable public properties (or disable them via jackson annotations)
|
||||
*
|
||||
*/
|
||||
public interface IPacket {
|
||||
|
||||
}
|
@@ -0,0 +1,12 @@
|
||||
package de.dhbwstuttgart.server.packet;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import de.dhbwstuttgart.server.SocketClient;
|
||||
import org.java_websocket.WebSocket;
|
||||
|
||||
public interface IServerToClientPacket extends IPacket {
|
||||
|
||||
@JsonIgnore
|
||||
void onHandle(WebSocket webSocket, SocketClient socketClient);
|
||||
|
||||
}
|
@@ -0,0 +1,29 @@
|
||||
package de.dhbwstuttgart.server.packet;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import de.dhbwstuttgart.server.SocketClient;
|
||||
import de.dhbwstuttgart.server.SocketServer;
|
||||
import org.java_websocket.WebSocket;
|
||||
|
||||
/**
|
||||
* A fallback packet that is generated, if the received json could not be mapped to an existing package
|
||||
*/
|
||||
public class InvalidPacket implements IClientToServerPacket, IServerToClientPacket {
|
||||
|
||||
/**
|
||||
* If available, the error that caused this package to appear
|
||||
*/
|
||||
public String error = "<unknown error>";
|
||||
|
||||
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
|
||||
System.err.println("[socket] " + "InvalidPacket: " + this.error);
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
|
||||
socketServer.log("InvalidPacket: " + this.error, webSocket);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,36 @@
|
||||
package de.dhbwstuttgart.server.packet;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import de.dhbwstuttgart.server.SocketClient;
|
||||
import de.dhbwstuttgart.server.SocketServer;
|
||||
import org.java_websocket.WebSocket;
|
||||
|
||||
/**
|
||||
* A packet to send simple informational messages between the client and the server
|
||||
*/
|
||||
public class MessagePacket implements IClientToServerPacket, IServerToClientPacket {
|
||||
|
||||
/**
|
||||
* The informational message from the server, that should be logged out outputted
|
||||
*/
|
||||
public String message;
|
||||
|
||||
@JsonIgnore
|
||||
public static MessagePacket create(String message) {
|
||||
MessagePacket packet = new MessagePacket();
|
||||
packet.message = message;
|
||||
return packet;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
|
||||
System.err.println("[socket] " + this.message);
|
||||
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
|
||||
socketServer.log(this.message, webSocket);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,90 @@
|
||||
package de.dhbwstuttgart.server.packet;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
/**
|
||||
* A wrapper for the packet to ensure correct serialization/deserialization and make it possible to detect the matching
|
||||
* packet type for deserialization.
|
||||
*/
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public class PacketContainer {
|
||||
|
||||
// The jackson serializer / deserializer tool
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
/*
|
||||
* The available packet types. The one type that is represented in the JSON should always be the ONLY non-null value.
|
||||
* They have to be public (for the moment) to let jackson fill them in while deserializing
|
||||
*/
|
||||
public ErrorPacket errorPacket = null;
|
||||
public MessagePacket messagePacket = null;
|
||||
public InvalidPacket invalidPacket = null;
|
||||
public UnifyRequestPacket unifyRequestPacket = null;
|
||||
public UnifyResultPacket unifyResultPacket = null;
|
||||
public DebugPacket debugPacket = null;
|
||||
|
||||
|
||||
/**
|
||||
* Generate the JSON string for the given packet
|
||||
*
|
||||
* @param packet The packet to serialize
|
||||
* @return The json representation of the packet
|
||||
*/
|
||||
public static String serialize(IPacket packet) throws JsonProcessingException {
|
||||
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
PacketContainer container = new PacketContainer();
|
||||
|
||||
if (packet instanceof ErrorPacket)
|
||||
container.errorPacket = (ErrorPacket) packet;
|
||||
else if (packet instanceof MessagePacket)
|
||||
container.messagePacket = (MessagePacket) packet;
|
||||
else if (packet instanceof UnifyRequestPacket)
|
||||
container.unifyRequestPacket = (UnifyRequestPacket) packet;
|
||||
else if (packet instanceof UnifyResultPacket)
|
||||
container.unifyResultPacket = (UnifyResultPacket) packet;
|
||||
else if (packet instanceof DebugPacket)
|
||||
container.debugPacket = (DebugPacket) packet;
|
||||
// Add new packets here and in the deserialize method
|
||||
|
||||
return objectMapper.writeValueAsString(container);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the JSON string to generate the matching packet object
|
||||
*
|
||||
* @param json The serialized representation of a packet container
|
||||
* @return The deserialized Packet object
|
||||
*/
|
||||
public static IPacket deserialize(String json) {
|
||||
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
|
||||
try {
|
||||
PacketContainer container = objectMapper.readValue(json, PacketContainer.class);
|
||||
|
||||
if (container.errorPacket != null)
|
||||
return container.errorPacket;
|
||||
if (container.messagePacket != null)
|
||||
return container.messagePacket;
|
||||
if (container.invalidPacket != null)
|
||||
return container.invalidPacket;
|
||||
if (container.unifyRequestPacket != null)
|
||||
return container.unifyRequestPacket;
|
||||
if (container.unifyResultPacket != null)
|
||||
return container.unifyResultPacket;
|
||||
if (container.debugPacket != null)
|
||||
return container.debugPacket;
|
||||
// Add new packets here and in the serialize method
|
||||
|
||||
throw new RuntimeException("Cannot map received json to any known packet class");
|
||||
} catch (Exception e) {
|
||||
System.out.println(e);
|
||||
InvalidPacket packet = new InvalidPacket();
|
||||
packet.error = e.getMessage();
|
||||
return packet;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -0,0 +1,145 @@
|
||||
package de.dhbwstuttgart.server.packet;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import de.dhbwstuttgart.server.SocketServer;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import org.java_websocket.WebSocket;
|
||||
|
||||
/**
|
||||
* A packet to send all required data for the unification algorithm to the server and request the unification
|
||||
*/
|
||||
public class UnifyRequestPacket implements IClientToServerPacket {
|
||||
|
||||
public SerialMap finiteClosure;
|
||||
public SerialMap constraintSet;
|
||||
public SerialMap unifyConstraintSet;
|
||||
public SerialMap serialKeyStorage;
|
||||
public SerialValue<?> placeholders;
|
||||
public SerialList<SerialMap> factoryplaceholders;
|
||||
|
||||
@JsonIgnore
|
||||
private KeyStorage keyStorage = new KeyStorage();
|
||||
@JsonIgnore
|
||||
private boolean keyStorageLoaded = false;
|
||||
|
||||
public UnifyRequestPacket() {}
|
||||
|
||||
public UnifyRequestPacket(
|
||||
FiniteClosure finiteClosure,
|
||||
ConstraintSet<Pair> constraintSet,
|
||||
ConstraintSet<UnifyPair> unifyConstraintSet,
|
||||
PlaceholderRegistry placeholderRegistry
|
||||
) {
|
||||
// store contraint and finite closure
|
||||
this.finiteClosure = finiteClosure.toSerial(keyStorage);
|
||||
this.constraintSet = constraintSet.toSerial(keyStorage);
|
||||
this.unifyConstraintSet = unifyConstraintSet.toSerial(keyStorage);
|
||||
// store placeholder registry
|
||||
var serialRegistry = placeholderRegistry.toSerial(keyStorage);
|
||||
this.placeholders = serialRegistry.getValue("ph");
|
||||
this.factoryplaceholders = serialRegistry.getList("factoryPh").assertListOfMaps();
|
||||
// store referenced objects separately
|
||||
this.serialKeyStorage = keyStorage.toSerial(keyStorage);
|
||||
}
|
||||
|
||||
|
||||
@JsonIgnore
|
||||
public void loadKeyStorage(UnifyContext context) {
|
||||
if (!keyStorageLoaded) {
|
||||
keyStorageLoaded = true;
|
||||
keyStorage = KeyStorage.fromSerial(this.serialKeyStorage, context);
|
||||
}
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
private FiniteClosure retrieveFiniteClosure(UnifyContext context) {
|
||||
this.loadKeyStorage(context);
|
||||
return FiniteClosure.fromSerial(this.finiteClosure, context, keyStorage);
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
private ConstraintSet<Pair> retrieveConstraintSet(UnifyContext context) {
|
||||
this.loadKeyStorage(context);
|
||||
return ConstraintSet.fromSerial(this.constraintSet, context, Pair.class, keyStorage);
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
private ConstraintSet<UnifyPair> retrieveUnifyConstraintSet(UnifyContext context) {
|
||||
this.loadKeyStorage(context);
|
||||
return ConstraintSet.fromSerial(this.unifyConstraintSet, context, UnifyPair.class, keyStorage);
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
|
||||
socketServer.sendMessage(webSocket, "You requested a unify! Please wait until I calculated everything...");
|
||||
System.out.println("Client " + webSocket.<SocketServer.SocketData>getAttachment().id + " requested a unification. Starting now...");
|
||||
|
||||
try {
|
||||
var placeholderRegistry = new PlaceholderRegistry();
|
||||
ArrayList<String> existingPlaceholders = (ArrayList) this.placeholders.getOf(ArrayList.class);
|
||||
existingPlaceholders.forEach(placeholderRegistry::addPlaceholder);
|
||||
|
||||
var unifyContext = new UnifyContext(Writer.nullWriter(), false, true,
|
||||
new UnifyResultModel(new ConstraintSet<>(), new FiniteClosure(new HashSet<>(), null, placeholderRegistry)),
|
||||
new UnifyTaskModel(), ForkJoinPool.commonPool(), placeholderRegistry
|
||||
);
|
||||
|
||||
this.factoryplaceholders.stream()
|
||||
.map(p -> (PlaceholderType)UnifyType.fromSerial(p, unifyContext))
|
||||
.forEach(placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS::add);
|
||||
|
||||
|
||||
// start the unification algorithm from the received data
|
||||
IFiniteClosure finiteClosure = this.retrieveFiniteClosure(unifyContext);
|
||||
ConstraintSet<Pair> constraintSet = this.retrieveConstraintSet(unifyContext);
|
||||
ConstraintSet<UnifyPair> unifyConstraintSet = this.retrieveUnifyConstraintSet(unifyContext);
|
||||
|
||||
var resultModel = new UnifyResultModel(constraintSet, finiteClosure);
|
||||
UnifyResultListenerImpl resultListener = new UnifyResultListenerImpl();
|
||||
resultModel.addUnifyResultListener(resultListener);
|
||||
|
||||
TypeUnify.unifyParallel(
|
||||
unifyConstraintSet.getUndConstraints(),
|
||||
unifyConstraintSet.getOderConstraints(),
|
||||
finiteClosure,
|
||||
unifyContext.newWithResultModel(resultModel)
|
||||
);
|
||||
|
||||
var resultSets = resultListener.getResults();
|
||||
|
||||
System.out.println("Finished unification for client " + webSocket.<SocketServer.SocketData>getAttachment().id);
|
||||
socketServer.sendMessage(webSocket, "Unification finished. Found " + resultSets.size() + " result sets");
|
||||
|
||||
if (webSocket.isOpen()) {
|
||||
UnifyResultPacket resultPacket = UnifyResultPacket.create(resultSets);
|
||||
webSocket.send(PacketContainer.serialize(resultPacket));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.err.println(e);
|
||||
SocketServer.log.error("e: ", e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,43 @@
|
||||
package de.dhbwstuttgart.server.packet;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import de.dhbwstuttgart.server.SocketClient;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.util.List;
|
||||
import org.java_websocket.WebSocket;
|
||||
|
||||
/**
|
||||
* A packet to send all calculated data from the unification algorithm back to the client
|
||||
*/
|
||||
public class UnifyResultPacket implements IServerToClientPacket {
|
||||
|
||||
public SerialList<ISerialNode> results;
|
||||
public SerialMap keyStorage;
|
||||
|
||||
public static UnifyResultPacket create(List<ResultSet> resultSets) {
|
||||
UnifyResultPacket serialized = new UnifyResultPacket();
|
||||
KeyStorage keyStorage = new KeyStorage();
|
||||
serialized.results = SerialList.fromMapped(resultSets, resultSet -> resultSet.toSerial(keyStorage));
|
||||
serialized.keyStorage = keyStorage.toSerial(keyStorage);
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public List<ResultSet> getResultSet(UnifyContext context) {
|
||||
return this.results.assertListOfMaps().stream()
|
||||
.map(resultData -> ResultSet.fromSerial(resultData, context)).toList();
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
|
||||
System.out.println("[socket] Received unify result");
|
||||
socketClient.setUnifyResultSets(this);
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -0,0 +1,16 @@
|
||||
package de.dhbwstuttgart.server.packet.dataContainers;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.NotImplementedException;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
|
||||
public interface ISerializableData {
|
||||
|
||||
public abstract ISerialNode toSerial(KeyStorage keyStorage);
|
||||
|
||||
public static Object fromSerial(SerialMap data, UnifyContext context) {
|
||||
throw new NotImplementedException("Missing implementation of \"fromSerial\" for a serializable element");
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,103 @@
|
||||
package de.dhbwstuttgart.server.packet.dataContainers;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public class KeyStorage implements ISerializableData {
|
||||
|
||||
/**
|
||||
* Store a unique identifier for every element, so it can be referenced in the json
|
||||
*/
|
||||
protected AtomicInteger identifierCount = new AtomicInteger();
|
||||
/**
|
||||
* Store the serialized element per identifier when serializing
|
||||
*/
|
||||
protected SerialMap serializedElements = new SerialMap();
|
||||
/**
|
||||
* Store the unserialized element per identifier when unserializing
|
||||
*/
|
||||
protected Map<String, ISerializableData> unserializedElements = new HashMap<>();
|
||||
|
||||
|
||||
/**
|
||||
* Retrieve or generate a new identifier for a constraint
|
||||
*/
|
||||
public String getIdentifier() {
|
||||
return this.identifierCount.incrementAndGet() + "_";
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the given element identifier belongs to an element that was already serialized
|
||||
*/
|
||||
public boolean isAlreadySerialized(String identifier) {
|
||||
return this.serializedElements.containsKey(identifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the given element identifier belongs to a element that was already unserialized
|
||||
*/
|
||||
public boolean isAlreadyUnserialized(String identifier) {
|
||||
return this.unserializedElements.containsKey(identifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a serialized element to prevent it from being serialized again
|
||||
*/
|
||||
public void putSerialized(String identifier, SerialMap serializedElement) {
|
||||
this.serializedElements.put(identifier, serializedElement);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a serialized element
|
||||
*/
|
||||
public SerialMap getSerialized(String identifier) {
|
||||
if (!this.serializedElements.containsKey(identifier)) {
|
||||
throw new RuntimeException("No serialized element of identifier " + identifier + " available to get");
|
||||
}
|
||||
return this.serializedElements.getMap(identifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register an unserialized element to prevent it from being unserialized again
|
||||
*/
|
||||
public void putUnserialized(String identifier, ISerializableData element) {
|
||||
this.unserializedElements.put(identifier, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve an unserialized element
|
||||
*/
|
||||
public <T extends ISerializableData> T getUnserialized(String identifier, Class<T> target) {
|
||||
if (!this.unserializedElements.containsKey(identifier)) {
|
||||
throw new RuntimeException("No unserialized element of identifier " + identifier + " available to get");
|
||||
}
|
||||
var element = this.unserializedElements.get(identifier);
|
||||
if (target.isInstance(element)) {
|
||||
return (T) element;
|
||||
}
|
||||
throw new RuntimeException("Failed to get unserialized element from KeyStorage. Expected instance of " +
|
||||
target.getName() + " but found " + element.getClass().getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("serializedElements", this.serializedElements);
|
||||
return serialized;
|
||||
}
|
||||
|
||||
public static KeyStorage fromSerial(SerialMap data, UnifyContext context) {
|
||||
var serializedConstraintsData = data.getMap("serializedElements");
|
||||
var constraintContext = new KeyStorage();
|
||||
for (var entry : serializedConstraintsData.entrySet()) {
|
||||
if (entry.getValue() instanceof SerialMap valueMap) {
|
||||
constraintContext.putSerialized(entry.getKey(), valueMap);
|
||||
}
|
||||
}
|
||||
return constraintContext;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,31 @@
|
||||
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
|
||||
|
||||
/**
|
||||
* Use the following classes for an intermediate serialized tree structure
|
||||
*/
|
||||
@JsonTypeInfo(
|
||||
use = JsonTypeInfo.Id.NAME,
|
||||
include = JsonTypeInfo.As.PROPERTY,
|
||||
property = "_t"
|
||||
)
|
||||
@JsonSubTypes({
|
||||
@JsonSubTypes.Type(value = SerialMap.class, name = "m"),
|
||||
@JsonSubTypes.Type(value = SerialList.class, name = "l"),
|
||||
@JsonSubTypes.Type(value = SerialValue.class, name = "v"),
|
||||
@JsonSubTypes.Type(value = SerialUUID.class, name = "u")
|
||||
})
|
||||
public interface ISerialNode {
|
||||
|
||||
default <T extends ISerialNode> T assertType(Class<T> type) {
|
||||
if (type.isInstance(this)) {
|
||||
return (T) this;
|
||||
}
|
||||
throw new RuntimeException("Expected ISerialNode to be of type " + type.getName()
|
||||
+ " but found " + this.getClass().getName() + " instead");
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,74 @@
|
||||
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class SerialList<I extends ISerialNode> extends ArrayList<I> implements ISerialNode {
|
||||
public SerialList() {}
|
||||
public SerialList(Collection<I> data) {
|
||||
this.addAll(data);
|
||||
}
|
||||
public SerialList(Stream<I> data) {
|
||||
this(data.toList());
|
||||
}
|
||||
public SerialList(I[] data) {
|
||||
this(Arrays.stream(data).toList());
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
@JsonIgnore
|
||||
public static <A extends ISerialNode> ArrayList<A> from(A ...values) {
|
||||
ArrayList<A> list = new SerialList<>();
|
||||
Collections.addAll(list, values);
|
||||
return list;
|
||||
}
|
||||
@JsonIgnore
|
||||
public static <A,B extends ISerialNode> SerialList<B> fromMapped(Stream<A> data, Function<A,B> mapper) {
|
||||
return new SerialList<>(data.map(mapper).toList());
|
||||
}
|
||||
@JsonIgnore
|
||||
public static <A,B extends ISerialNode> SerialList<B> fromMapped(Collection<A> data, Function<A,B> mapper) {
|
||||
return SerialList.fromMapped(data.stream(), mapper);
|
||||
}
|
||||
@JsonIgnore
|
||||
public static <A,B extends ISerialNode> SerialList<B> fromMapped(A[] data, Function<A,B> mapper) {
|
||||
return SerialList.fromMapped(Arrays.stream(data), mapper);
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public SerialList<SerialMap> assertListOfMaps() {
|
||||
if (this.isEmpty() || this.get(0) instanceof SerialMap) {
|
||||
return (SerialList<SerialMap>) this;
|
||||
}
|
||||
throw new RuntimeException("Required List to contain SerialMap elements but condition failed");
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public SerialList<SerialList<?>> assertListOfLists() {
|
||||
if (this.isEmpty() || this.get(0) instanceof SerialList) {
|
||||
return (SerialList<SerialList<?>>) this;
|
||||
}
|
||||
throw new RuntimeException("Required List to contain SerialList elements but condition failed");
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public SerialList<SerialValue<?>> assertListOfValues() {
|
||||
if (this.isEmpty() || this.get(0) instanceof SerialValue) {
|
||||
return (SerialList<SerialValue<?>>) this;
|
||||
}
|
||||
throw new RuntimeException("Required List to contain SerialValue elements but condition failed");
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public SerialList<SerialUUID> assertListOfUUIDs() {
|
||||
if (this.isEmpty() || this.get(0) instanceof SerialUUID) {
|
||||
return (SerialList<SerialUUID>) this;
|
||||
}
|
||||
throw new RuntimeException("Required List to contain SerialUUID elements but condition failed");
|
||||
}
|
||||
}
|
@@ -0,0 +1,84 @@
|
||||
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public class SerialMap extends HashMap<String, ISerialNode> implements ISerialNode {
|
||||
|
||||
public SerialMap() {
|
||||
super();
|
||||
}
|
||||
|
||||
public SerialMap(Map<String, ISerialNode> data) {
|
||||
super(data);
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public void put(String key, Boolean value) {
|
||||
this.put(key, new SerialValue<>(value));
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public void put(String key, String value) {
|
||||
this.put(key, new SerialValue<>(value));
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public void put(String key, Number value) {
|
||||
this.put(key, new SerialValue<>(value));
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
private <T> T get(String key, Class<T> expectedType) {
|
||||
if (!this.containsKey(key)) {
|
||||
throw new RuntimeException("Missing required value " + key + " in ObjectMap");
|
||||
}
|
||||
var element = this.get(key);
|
||||
if (element != null && element.getClass() != expectedType) {
|
||||
throw new RuntimeException(
|
||||
"Required value " + key + " to be of type " + expectedType.getName() + " but found " + element.getClass().getName()
|
||||
);
|
||||
}
|
||||
return (T)element;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public SerialList<?> getList(String key) {
|
||||
return this.get(key, SerialList.class);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@JsonIgnore
|
||||
public SerialList<?> getListOrNull(String key) {
|
||||
return this.containsKey(key) ? this.getList(key) : null;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public SerialMap getMap(String key) {
|
||||
return this.get(key, SerialMap.class);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@JsonIgnore
|
||||
public SerialMap getMapOrNull(String key) {
|
||||
return this.containsKey(key) ? this.getMap(key) : null;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public SerialValue<?> getValue(String key) {
|
||||
return this.get(key, SerialValue.class);
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public SerialUUID getUUID(String key) {
|
||||
return this.get(key, SerialUUID.class);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@JsonIgnore
|
||||
public SerialUUID getUUIDOrNull(String key) {
|
||||
return this.containsKey(key) ? this.getUUID(key) : null;
|
||||
}
|
||||
}
|
@@ -0,0 +1,13 @@
|
||||
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
|
||||
|
||||
public class SerialUUID implements ISerialNode {
|
||||
|
||||
public String uuid;
|
||||
|
||||
public SerialUUID() {}
|
||||
|
||||
public SerialUUID(String uuid) {
|
||||
this.uuid = uuid;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,28 @@
|
||||
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
|
||||
public class SerialValue<T> implements ISerialNode {
|
||||
public T value;
|
||||
public static final SerialValue<Object> NULL = new SerialValue<>(null);
|
||||
|
||||
public SerialValue() {}
|
||||
|
||||
public SerialValue(T value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public <A> SerialValue<A> assertValueOf(Class<A> targetClass) {
|
||||
if (this.value == null || targetClass.isInstance(this.value)) {
|
||||
return (SerialValue<A>) this;
|
||||
}
|
||||
throw new RuntimeException("Required Value to contain " + targetClass.getName() + " value but condition failed on" +
|
||||
" type " + this.value.getClass().getName());
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public <A> A getOf(Class<A> targetClass) {
|
||||
return this.assertValueOf(targetClass).value;
|
||||
}
|
||||
}
|
@@ -1,5 +1,8 @@
|
||||
package de.dhbwstuttgart.syntaxtree.factory;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.core.JavaTXServer;
|
||||
|
||||
public class NameGenerator {
|
||||
|
||||
private static String strNextName = "A";
|
||||
@@ -13,10 +16,6 @@ public class NameGenerator {
|
||||
public static void reset() {
|
||||
strNextName = "A";
|
||||
}
|
||||
|
||||
public static void resetTo(String name) {
|
||||
strNextName = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Berechnet einen neuen, eindeutigen Namen f�r eine neue
|
||||
@@ -30,7 +29,11 @@ public class NameGenerator {
|
||||
|
||||
// n�chster Name berechnen und in strNextName speichern
|
||||
inc( strNextName.length() - 1 );
|
||||
|
||||
|
||||
if (JavaTXServer.isRunning) {
|
||||
throw new RuntimeException("Using the NameGenerator on a server is not allowed");
|
||||
}
|
||||
JavaTXCompiler.defaultClientPlaceholderRegistry.addPlaceholder(strReturn);
|
||||
return strReturn;
|
||||
}
|
||||
|
||||
|
@@ -1,5 +1,6 @@
|
||||
package de.dhbwstuttgart.syntaxtree.factory;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import java.io.Writer;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.*;
|
||||
@@ -31,9 +32,13 @@ import org.antlr.v4.runtime.Token;
|
||||
|
||||
public class UnifyTypeFactory {
|
||||
|
||||
private static ArrayList<PlaceholderType> PLACEHOLDERS = new ArrayList<>();
|
||||
|
||||
public static FiniteClosure generateFC(List<ClassOrInterface> fromClasses, Writer logFile, ClassLoader classLoader, JavaTXCompiler compiler) throws ClassNotFoundException {
|
||||
public static FiniteClosure generateFC(
|
||||
List<ClassOrInterface> fromClasses,
|
||||
Writer logFile,
|
||||
ClassLoader classLoader,
|
||||
JavaTXCompiler compiler,
|
||||
PlaceholderRegistry placeholderRegistry
|
||||
) throws ClassNotFoundException {
|
||||
/*
|
||||
Die transitive Hülle muss funktionieren.
|
||||
Man darf schreiben List<A> extends AL<A>
|
||||
@@ -44,7 +49,7 @@ public class UnifyTypeFactory {
|
||||
Generell dürfen sie immer die gleichen Namen haben.
|
||||
TODO: die transitive Hülle bilden
|
||||
*/
|
||||
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader), logFile, compiler);
|
||||
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader, placeholderRegistry), logFile, compiler, placeholderRegistry);
|
||||
}
|
||||
|
||||
public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr, SourceLoc location){
|
||||
@@ -67,23 +72,23 @@ public class UnifyTypeFactory {
|
||||
* Convert from
|
||||
* ASTType -> UnifyType
|
||||
*/
|
||||
public static UnifyType convert(JavaTXCompiler compiler, RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType){
|
||||
public static UnifyType convert(JavaTXCompiler compiler, RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
|
||||
if (t instanceof GenericRefType){
|
||||
return UnifyTypeFactory.convert(compiler, (GenericRefType)t, innerType);
|
||||
return UnifyTypeFactory.convert(compiler, (GenericRefType)t, innerType, placeholderRegistry);
|
||||
} else if (t instanceof TypePlaceholder){
|
||||
return UnifyTypeFactory.convert(compiler, (TypePlaceholder)t, innerType);
|
||||
return UnifyTypeFactory.convert(compiler, (TypePlaceholder)t, innerType, placeholderRegistry);
|
||||
} else if (t instanceof ExtendsWildcardType){
|
||||
return UnifyTypeFactory.convert(compiler, (ExtendsWildcardType)t, innerType);
|
||||
return UnifyTypeFactory.convert(compiler, (ExtendsWildcardType)t, innerType, placeholderRegistry);
|
||||
} else if (t instanceof SuperWildcardType) {
|
||||
return UnifyTypeFactory.convert(compiler, (SuperWildcardType) t, innerType);
|
||||
return UnifyTypeFactory.convert(compiler, (SuperWildcardType) t, innerType, placeholderRegistry);
|
||||
} else if (t instanceof RefType){
|
||||
return UnifyTypeFactory.convert(compiler, (RefType)t, innerType);
|
||||
return UnifyTypeFactory.convert(compiler, (RefType)t, innerType, placeholderRegistry);
|
||||
}
|
||||
//Es wurde versucht ein Typ umzuwandeln, welcher noch nicht von der Factory abgedeckt ist
|
||||
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
|
||||
}
|
||||
|
||||
public static UnifyType convert(JavaTXCompiler compiler, RefType t, Boolean innerType){
|
||||
public static UnifyType convert(JavaTXCompiler compiler, RefType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
|
||||
//Check if it is a FunN Type:
|
||||
Pattern p = Pattern.compile("Fun(\\d+)[$][$]");
|
||||
Matcher m = p.matcher(t.getName().toString());
|
||||
@@ -91,76 +96,76 @@ public class UnifyTypeFactory {
|
||||
if(b){
|
||||
Integer N = Integer.valueOf(m.group(1));
|
||||
if((N + 1) == t.getParaList().size()){
|
||||
return convertFunN(compiler, t.getParaList(), false);
|
||||
return convertFunN(compiler, t.getParaList(), false, placeholderRegistry);
|
||||
}
|
||||
}
|
||||
UnifyType ret;
|
||||
List<UnifyType> params = new ArrayList<>();
|
||||
if (t.getParaList() != null) {
|
||||
for (RefTypeOrTPHOrWildcardOrGeneric pT : t.getParaList()) {
|
||||
params.add(UnifyTypeFactory.convert(compiler, pT, true));
|
||||
params.add(UnifyTypeFactory.convert(compiler, pT, true, placeholderRegistry));
|
||||
}
|
||||
}
|
||||
|
||||
var clazz = compiler.getClass(t.getName());
|
||||
if (clazz != null && clazz.isInterface() && clazz.isFunctionalInterface()) {
|
||||
var method = clazz.getMethods().stream().filter(x -> Modifier.isAbstract(x.modifier)).findFirst().orElseThrow();
|
||||
var methodParams = method.getParameterList().getFormalparalist().stream().map(x -> convert(compiler, x.getType(), true)).toList();
|
||||
var methodParams = method.getParameterList().getFormalparalist().stream().map(x -> convert(compiler, x.getType(), true, placeholderRegistry)).toList();
|
||||
var generics = StreamSupport.stream(clazz.getGenerics().spliterator(), false).map(GenericTypeVar::getName).toList();
|
||||
return new FunInterfaceType(t.getName().toString(), new TypeParams(params), methodParams, convert(compiler, method.getReturnType(), true), generics);
|
||||
return new FunInterfaceType(t.getName().toString(), new TypeParams(params), methodParams, convert(compiler, method.getReturnType(), true, placeholderRegistry), generics);
|
||||
}
|
||||
|
||||
return new ReferenceType(t.getName().toString(),new TypeParams(params));
|
||||
}
|
||||
|
||||
public static UnifyType convertFunN(JavaTXCompiler compiler, List<RefTypeOrTPHOrWildcardOrGeneric> paraList, Boolean innerType){
|
||||
public static UnifyType convertFunN(JavaTXCompiler compiler, List<RefTypeOrTPHOrWildcardOrGeneric> paraList, Boolean innerType, PlaceholderRegistry placeholderRegistry){
|
||||
UnifyType ret;
|
||||
List<UnifyType> params = new ArrayList<>();
|
||||
if(paraList != null && paraList.size() > 0){
|
||||
for(RefTypeOrTPHOrWildcardOrGeneric pT : paraList){
|
||||
params.add(UnifyTypeFactory.convert(compiler, pT, false));
|
||||
params.add(UnifyTypeFactory.convert(compiler, pT, false, placeholderRegistry));
|
||||
}
|
||||
}
|
||||
ret = FunNType.getFunNType(new TypeParams(params));
|
||||
return ret;
|
||||
}
|
||||
|
||||
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType){
|
||||
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType, PlaceholderRegistry placeholderRegistry) {
|
||||
if (tph.getName().equals("AFR")) {
|
||||
System.out.println("XXX"+innerType);
|
||||
}
|
||||
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance());
|
||||
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance(), placeholderRegistry);
|
||||
ntph.setVariance(tph.getVariance());
|
||||
ntph.setOrCons(tph.getOrCons());
|
||||
ntph.setWildcardtable(tph.getWildcardtable());
|
||||
int in = PLACEHOLDERS.indexOf(ntph);
|
||||
int in = placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.indexOf(ntph);
|
||||
if (in == -1) {
|
||||
PLACEHOLDERS.add(ntph);
|
||||
placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.add(ntph);
|
||||
ntph.setInnerType(innerType);
|
||||
return ntph;
|
||||
}
|
||||
else {
|
||||
PlaceholderType oldpht = PLACEHOLDERS.get(in);
|
||||
PlaceholderType oldpht = placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.get(in);
|
||||
oldpht.setInnerType(oldpht.isInnerType() || innerType);
|
||||
return oldpht;
|
||||
}
|
||||
}
|
||||
|
||||
public static UnifyType convert(JavaTXCompiler compiler, GenericRefType t, Boolean innerType){
|
||||
public static UnifyType convert(JavaTXCompiler compiler, GenericRefType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
|
||||
return new ReferenceType(t.getParsedName(), true);
|
||||
}
|
||||
|
||||
public static UnifyType convert(JavaTXCompiler compiler, WildcardType t, Boolean innerType){
|
||||
public static UnifyType convert(JavaTXCompiler compiler, WildcardType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
|
||||
if(t.isExtends())
|
||||
return new ExtendsType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false));
|
||||
return new ExtendsType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false, placeholderRegistry));
|
||||
else if(t.isSuper())
|
||||
return new SuperType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false));
|
||||
return new SuperType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false, placeholderRegistry));
|
||||
else throw new NotImplementedException();
|
||||
}
|
||||
|
||||
|
||||
public static ConstraintSet<UnifyPair> convert(JavaTXCompiler compiler, ConstraintSet<Pair> constraints) {
|
||||
return constraints.map(c -> UnifyTypeFactory.convert(compiler, c));
|
||||
public static ConstraintSet<UnifyPair> convert(JavaTXCompiler compiler, ConstraintSet<Pair> constraints, PlaceholderRegistry placeholderRegistry) {
|
||||
return constraints.map(c -> UnifyTypeFactory.convert(compiler, c, placeholderRegistry));
|
||||
}
|
||||
|
||||
//NEVER USED
|
||||
@@ -171,30 +176,30 @@ public class UnifyTypeFactory {
|
||||
// return unifyPairConstraint;
|
||||
//}
|
||||
|
||||
public static UnifyPair convert(JavaTXCompiler compiler, Pair p) {
|
||||
public static UnifyPair convert(JavaTXCompiler compiler, Pair p, PlaceholderRegistry placeholderRegistry) {
|
||||
UnifyPair ret = null;
|
||||
if(p.GetOperator().equals(PairOperator.SMALLERDOT)) {
|
||||
ret = generateSmallerDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
|
||||
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
|
||||
ret = generateSmallerDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
|
||||
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
|
||||
//return ret;
|
||||
}else if(p.GetOperator().equals(PairOperator.SMALLERNEQDOT)) {
|
||||
ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
|
||||
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
|
||||
ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
|
||||
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
|
||||
//return ret;
|
||||
}else if(p.GetOperator().equals(PairOperator.EQUALSDOT)) {
|
||||
ret = generateEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
|
||||
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
|
||||
ret = generateEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
|
||||
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
|
||||
//return ret;
|
||||
}else if(p.GetOperator().equals(PairOperator.SMALLER)){
|
||||
ret = generateSmallerPair(UnifyTypeFactory.convert(compiler, p.TA1, false),
|
||||
UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
|
||||
ret = generateSmallerPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry),
|
||||
UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
|
||||
}else throw new NotImplementedException();
|
||||
UnifyType lhs, rhs;
|
||||
if (((lhs = ret.getLhsType()) instanceof PlaceholderType)
|
||||
&& ((PlaceholderType)lhs).isWildcardable()
|
||||
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) {
|
||||
if (lhs.getName().equals("AQ")) {
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
((PlaceholderType)rhs).enableWildcardtable();
|
||||
}
|
||||
@@ -203,7 +208,7 @@ public class UnifyTypeFactory {
|
||||
&& ((PlaceholderType)rhs).isWildcardable()
|
||||
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) {
|
||||
if (rhs.getName().equals("AQ")) {
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
((PlaceholderType)lhs).enableWildcardtable();
|
||||
}
|
||||
@@ -214,16 +219,16 @@ public class UnifyTypeFactory {
|
||||
* Convert from
|
||||
* UnifyType -> ASTType
|
||||
*/
|
||||
public static Set<ResultPair> convert(Set<UnifyPair> unifyPairSet, Map<String,TypePlaceholder> tphs) {
|
||||
public static Set<ResultPair> convert(Set<UnifyPair> unifyPairSet, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
|
||||
return unifyPairSet.stream().map(
|
||||
unifyPair -> convert(unifyPair, tphs))
|
||||
unifyPair -> convert(unifyPair, tphs, placeholderRegistry))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
public static ResultPair convert(UnifyPair mp, Map<String,TypePlaceholder> tphs) {
|
||||
public static ResultPair convert(UnifyPair mp, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
|
||||
if (mp == null) { return null;} //kann bei basePairs passieren
|
||||
RefTypeOrTPHOrWildcardOrGeneric tl = UnifyTypeFactory.convert(mp.getLhsType(), tphs);
|
||||
RefTypeOrTPHOrWildcardOrGeneric tr = UnifyTypeFactory.convert(mp.getRhsType(), tphs);
|
||||
RefTypeOrTPHOrWildcardOrGeneric tl = UnifyTypeFactory.convert(mp.getLhsType(), tphs, placeholderRegistry);
|
||||
RefTypeOrTPHOrWildcardOrGeneric tr = UnifyTypeFactory.convert(mp.getRhsType(), tphs, placeholderRegistry);
|
||||
if(tl instanceof TypePlaceholder){
|
||||
if(tr instanceof TypePlaceholder) {
|
||||
|
||||
@@ -232,7 +237,7 @@ public class UnifyTypeFactory {
|
||||
//Einfach ignorieren TODO: Das hier muss ausgebessert werden:
|
||||
//return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, ASTFactory.createObjectType());
|
||||
}else{
|
||||
return new PairTPHsmallerTPH((TypePlaceholder)tl, (TypePlaceholder)tr, convert(mp.getBasePair(), tphs));
|
||||
return new PairTPHsmallerTPH((TypePlaceholder)tl, (TypePlaceholder)tr, convert(mp.getBasePair(), tphs, placeholderRegistry));
|
||||
}
|
||||
}else if(tr instanceof RefType){
|
||||
return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, (RefType) tr);
|
||||
@@ -244,51 +249,51 @@ public class UnifyTypeFactory {
|
||||
}else return new PairNoResult(tl, tr);//throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(ReferenceType t, Map<String,TypePlaceholder> tphs) {
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(ReferenceType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
|
||||
if(JavaClassName.Void.equals(t.getName()))return new Void(new NullToken());
|
||||
if (t.isGenTypeVar()) return new GenericRefType(t.getName(),new NullToken());
|
||||
RefType ret = new RefType(new JavaClassName(t.getName()),convert(t.getTypeParams(), tphs),new NullToken());
|
||||
RefType ret = new RefType(new JavaClassName(t.getName()),convert(t.getTypeParams(), tphs, placeholderRegistry),new NullToken());
|
||||
return ret;
|
||||
}
|
||||
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(FunNType t, Map<String,TypePlaceholder> tphs) {
|
||||
RefType ret = new RefType(new JavaClassName(t.getName()), convert(t.getTypeParams(), tphs), new NullToken());
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(FunNType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
|
||||
RefType ret = new RefType(new JavaClassName(t.getName()), convert(t.getTypeParams(), tphs, placeholderRegistry), new NullToken());
|
||||
return ret;
|
||||
}
|
||||
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(SuperType t, Map<String,TypePlaceholder> tphs) {
|
||||
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getSuperedType(), tphs);
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(SuperType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
|
||||
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getSuperedType(), tphs, placeholderRegistry);
|
||||
return new SuperWildcardType(innerType, new NullToken());
|
||||
}
|
||||
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(ExtendsType t, Map<String,TypePlaceholder> tphs) {
|
||||
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getExtendedType(), tphs);
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(ExtendsType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
|
||||
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getExtendedType(), tphs, placeholderRegistry);
|
||||
return new ExtendsWildcardType(innerType, new NullToken());
|
||||
}
|
||||
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs) {
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
|
||||
TypePlaceholder ret = tphs.get(t.getName());
|
||||
if(ret == null){ //Dieser TPH wurde vom Unifikationsalgorithmus erstellt
|
||||
ret = TypePlaceholder.fresh(new NullToken());
|
||||
ret = TypePlaceholder.fresh(new NullToken(), placeholderRegistry);
|
||||
tphs.put(t.getName(), ret);
|
||||
}
|
||||
ret.setVariance(t.getVariance());
|
||||
return ret;
|
||||
}
|
||||
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(UnifyType t, Map<String,TypePlaceholder> tphs) {
|
||||
if(t instanceof FunNType)return convert((FunNType) t, tphs);
|
||||
if(t instanceof ReferenceType)return convert((ReferenceType) t, tphs);
|
||||
if(t instanceof SuperType)return convert((SuperType) t, tphs);
|
||||
if(t instanceof ExtendsType)return convert((ExtendsType) t, tphs);
|
||||
if(t instanceof PlaceholderType)return convert((PlaceholderType) t, tphs);
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric convert(UnifyType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
|
||||
if(t instanceof FunNType)return convert((FunNType) t, tphs, placeholderRegistry);
|
||||
if(t instanceof ReferenceType)return convert((ReferenceType) t, tphs, placeholderRegistry);
|
||||
if(t instanceof SuperType)return convert((SuperType) t, tphs, placeholderRegistry);
|
||||
if(t instanceof ExtendsType)return convert((ExtendsType) t, tphs, placeholderRegistry);
|
||||
if(t instanceof PlaceholderType)return convert((PlaceholderType) t, tphs, placeholderRegistry);
|
||||
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
|
||||
}
|
||||
|
||||
private static List<RefTypeOrTPHOrWildcardOrGeneric> convert(TypeParams typeParams, Map<String,TypePlaceholder> tphs) {
|
||||
private static List<RefTypeOrTPHOrWildcardOrGeneric> convert(TypeParams typeParams, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
|
||||
List<RefTypeOrTPHOrWildcardOrGeneric> ret = new ArrayList<>();
|
||||
for(UnifyType uT : typeParams){
|
||||
RefTypeOrTPHOrWildcardOrGeneric toAdd = convert(uT, tphs);
|
||||
RefTypeOrTPHOrWildcardOrGeneric toAdd = convert(uT, tphs, placeholderRegistry);
|
||||
ret.add(toAdd);
|
||||
}
|
||||
return ret;
|
||||
|
@@ -1,8 +1,13 @@
|
||||
package de.dhbwstuttgart.syntaxtree.type;
|
||||
|
||||
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
import java.util.Objects;
|
||||
@@ -15,7 +20,7 @@ import java.util.Objects;
|
||||
*
|
||||
*/
|
||||
|
||||
public class ExtendsWildcardType extends WildcardType{
|
||||
public class ExtendsWildcardType extends WildcardType implements ISerializableData {
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
@@ -68,4 +73,22 @@ public class ExtendsWildcardType extends WildcardType{
|
||||
ExtendsWildcardType that = (ExtendsWildcardType) o;
|
||||
return that.innerType.equals(this.innerType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("innerType", this.innerType.toSerial(keyStorage));
|
||||
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static ExtendsWildcardType fromSerial(SerialMap data, UnifyContext context) {
|
||||
return new ExtendsWildcardType(
|
||||
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(data.getMap("innerType"), context),
|
||||
new NullToken()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -1,57 +1,77 @@
|
||||
package de.dhbwstuttgart.syntaxtree.type;
|
||||
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class GenericRefType extends RefTypeOrTPHOrWildcardOrGeneric
|
||||
{
|
||||
private String name;
|
||||
public class GenericRefType extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData {
|
||||
private String name;
|
||||
|
||||
public GenericRefType(String name, Token offset)
|
||||
{
|
||||
super(offset);
|
||||
this.name = name;
|
||||
}
|
||||
public GenericRefType(String name, Token offset) {
|
||||
super(offset);
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getParsedName(){
|
||||
return name.toString();
|
||||
}
|
||||
public String getParsedName() {
|
||||
return name.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(ASTVisitor visitor) {
|
||||
visitor.visit(this);
|
||||
}
|
||||
@Override
|
||||
public void accept(ASTVisitor visitor) {
|
||||
visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <A> A acceptTV(TypeVisitor<A> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
@Override
|
||||
public <A> A acceptTV(TypeVisitor<A> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(ResultSetVisitor visitor) {
|
||||
visitor.visit(this);
|
||||
}
|
||||
@Override
|
||||
public void accept(ResultSetVisitor visitor) {
|
||||
visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
GenericRefType that = (GenericRefType) o;
|
||||
return name.equals(that.name);
|
||||
}
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
GenericRefType that = (GenericRefType) o;
|
||||
return name.equals(that.name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name);
|
||||
}
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return "GTV " + this.name;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "GTV " + this.name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("name", this.name);
|
||||
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static GenericRefType fromSerial(SerialMap data, UnifyContext context) {
|
||||
return new GenericRefType(
|
||||
data.getValue("name").getOf(String.class),
|
||||
new NullToken()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,8 +1,15 @@
|
||||
package de.dhbwstuttgart.syntaxtree.type;
|
||||
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -11,122 +18,137 @@ import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
|
||||
public class RefType extends RefTypeOrTPHOrWildcardOrGeneric
|
||||
{
|
||||
protected final JavaClassName name;
|
||||
protected final List<RefTypeOrTPHOrWildcardOrGeneric> parameter;
|
||||
/**
|
||||
* Ist primitiveFlag auf true, muss beim Codegen dieser Reftype durch
|
||||
* den primitiven Datentyp ersetzt werden
|
||||
*
|
||||
* Bsp: java.lang.Integer mit Flag wird dann zu [int]
|
||||
*/
|
||||
boolean primitiveFlag = false; // TODO Should be final
|
||||
public class RefType extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData {
|
||||
protected final JavaClassName name;
|
||||
protected final List<RefTypeOrTPHOrWildcardOrGeneric> parameter;
|
||||
/**
|
||||
* Ist primitiveFlag auf true, muss beim Codegen dieser Reftype durch
|
||||
* den primitiven Datentyp ersetzt werden
|
||||
* <p>
|
||||
* Bsp: java.lang.Integer mit Flag wird dann zu [int]
|
||||
*/
|
||||
boolean primitiveFlag = false; // TODO Should be final
|
||||
|
||||
public RefType(JavaClassName fullyQualifiedName, Token offset)
|
||||
{
|
||||
this(fullyQualifiedName, new ArrayList<>(), offset);
|
||||
public RefType(JavaClassName fullyQualifiedName, Token offset) {
|
||||
this(fullyQualifiedName, new ArrayList<>(), offset);
|
||||
}
|
||||
|
||||
public boolean isPrimitive() {
|
||||
return primitiveFlag;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String params = "";
|
||||
if (parameter.size() > 0) {
|
||||
params += "<";
|
||||
Iterator<RefTypeOrTPHOrWildcardOrGeneric> it = parameter.iterator();
|
||||
while (it.hasNext()) {
|
||||
RefTypeOrTPHOrWildcardOrGeneric param = it.next();
|
||||
params += param.toString();
|
||||
if (it.hasNext()) params += ", ";
|
||||
}
|
||||
params += ">";
|
||||
}
|
||||
return this.name.toString() + params;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return this.name.hashCode();//Nur den Name hashen. Sorgt für langsame, aber funktionierende HashMaps
|
||||
}
|
||||
|
||||
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset) {
|
||||
this(fullyQualifiedName, parameter, offset, false);
|
||||
}
|
||||
|
||||
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset, boolean primitiveFlag) {
|
||||
super(offset);
|
||||
this.name = (fullyQualifiedName);
|
||||
this.parameter = parameter;
|
||||
this.primitiveFlag = primitiveFlag;
|
||||
}
|
||||
|
||||
public JavaClassName getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public List<RefTypeOrTPHOrWildcardOrGeneric> getParaList() {
|
||||
if (this.parameter == null) return new ArrayList<>();
|
||||
return this.parameter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Jrg Buerle<br/>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof RefType refObj)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isPrimitive() {
|
||||
return primitiveFlag;
|
||||
}
|
||||
if (!Objects.equals(this.name, refObj.name)) return false;
|
||||
boolean ret = true;
|
||||
|
||||
@Override
|
||||
public String toString(){
|
||||
String params = "";
|
||||
if(parameter.size()>0){
|
||||
params += "<";
|
||||
Iterator<RefTypeOrTPHOrWildcardOrGeneric> it = parameter.iterator();
|
||||
while(it.hasNext()){
|
||||
RefTypeOrTPHOrWildcardOrGeneric param = it.next();
|
||||
params += param.toString();
|
||||
if(it.hasNext())params += ", ";
|
||||
}
|
||||
params += ">";
|
||||
//if(!(super.equals(obj))) PL 2020-03-12 muss vll. einkommentiert werden
|
||||
// return false;
|
||||
|
||||
if (parameter == null || parameter.size() == 0) {
|
||||
ret &= (refObj.getParaList() == null || refObj.getParaList().isEmpty());
|
||||
} else {
|
||||
if (refObj.getParaList() == null) {
|
||||
ret = false;
|
||||
} else if (parameter.size() != refObj.getParaList().size()) {
|
||||
ret = false;
|
||||
} else {
|
||||
for (int i = 0; i < parameter.size(); i++) {
|
||||
ret &= parameter.get(i).equals(refObj.getParaList().get(i));
|
||||
}
|
||||
return this.name.toString() + params;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return this.name.hashCode();//Nur den Name hashen. Sorgt für langsame, aber funktionierende HashMaps
|
||||
}
|
||||
}
|
||||
|
||||
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset) {
|
||||
this(fullyQualifiedName, parameter, offset, false);
|
||||
}
|
||||
|
||||
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset, boolean primitiveFlag) {
|
||||
super(offset);
|
||||
this.name = (fullyQualifiedName);
|
||||
this.parameter = parameter;
|
||||
this.primitiveFlag = primitiveFlag;
|
||||
}
|
||||
@Override
|
||||
public void accept(ASTVisitor visitor) {
|
||||
visitor.visit(this);
|
||||
}
|
||||
|
||||
public JavaClassName getName()
|
||||
{
|
||||
return name;
|
||||
}
|
||||
@Override
|
||||
public <A> A acceptTV(TypeVisitor<A> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
public List<RefTypeOrTPHOrWildcardOrGeneric> getParaList(){
|
||||
if(this.parameter==null)return new ArrayList<>();
|
||||
return this.parameter;
|
||||
}
|
||||
@Override
|
||||
public void accept(ResultSetVisitor visitor) {
|
||||
visitor.visit(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Jrg Buerle<br/>
|
||||
* @return
|
||||
*/
|
||||
public boolean equals(Object obj)
|
||||
{
|
||||
if(obj instanceof RefType){
|
||||
if (!Objects.equals(this.name, ((RefType) obj).name)) return false;
|
||||
boolean ret = true;
|
||||
|
||||
//if(!(super.equals(obj))) PL 2020-03-12 muss vll. einkommentiert werden
|
||||
// return false;
|
||||
|
||||
if(parameter==null || parameter.size()==0){
|
||||
ret &= (((RefType)obj).getParaList()==null || ((RefType)obj).getParaList().size()==0);
|
||||
}
|
||||
else{
|
||||
if(((RefType)obj).getParaList()==null){
|
||||
ret = false;
|
||||
}
|
||||
else if(parameter.size() != ((RefType)obj).getParaList().size())
|
||||
{
|
||||
ret = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
for(int i = 0; i<parameter.size(); i++)
|
||||
{
|
||||
ret &= parameter.get(i).equals(((RefType)obj).getParaList().get(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
else{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ISerialNode toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("isPrimitive", this.primitiveFlag);
|
||||
serialized.put("name", this.name.toString());
|
||||
serialized.put("parameters", SerialList.fromMapped(this.parameter, param -> param.toSerial(keyStorage)));
|
||||
|
||||
@Override
|
||||
public void accept(ASTVisitor visitor) {
|
||||
visitor.visit(this);
|
||||
}
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <A> A acceptTV(TypeVisitor<A> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(ResultSetVisitor visitor) {
|
||||
visitor.visit(this);
|
||||
}
|
||||
public static RefType fromSerial(SerialMap data, UnifyContext context) {
|
||||
return new RefType(
|
||||
new JavaClassName(data.getValue("name").getOf(String.class)),
|
||||
data.getList("parameters").assertListOfMaps().stream()
|
||||
.map(param -> RefTypeOrTPHOrWildcardOrGeneric.fromSerial(param, context))
|
||||
.toList(),
|
||||
new NullToken(),
|
||||
data.getValue("isPrimitive").getOf(Boolean.class)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,11 +1,17 @@
|
||||
package de.dhbwstuttgart.syntaxtree.type;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
|
||||
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
|
||||
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode{
|
||||
public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode implements ISerializableData {
|
||||
public RefTypeOrTPHOrWildcardOrGeneric(Token offset) {
|
||||
super(offset);
|
||||
}
|
||||
@@ -18,5 +24,26 @@ public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode{
|
||||
|
||||
@Override
|
||||
public abstract boolean equals(Object o);
|
||||
|
||||
|
||||
@Override
|
||||
public ISerialNode toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("type", this.getClass().getSimpleName());
|
||||
// we only insert null for the object and expect the child classes to call this and override the value with themselves
|
||||
serialized.put("object", SerialValue.NULL);
|
||||
return serialized;
|
||||
}
|
||||
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric fromSerial(SerialMap data, UnifyContext context) {
|
||||
String type = data.getValue("type").getOf(String.class);
|
||||
SerialMap object = data.getMap("object");
|
||||
|
||||
if (type.equals(ExtendsWildcardType.class.getSimpleName())) return ExtendsWildcardType.fromSerial(object, context);
|
||||
else if (type.equals(GenericRefType.class.getSimpleName())) return GenericRefType.fromSerial(object, context);
|
||||
else if (type.equals(SuperWildcardType.class.getSimpleName())) return SuperWildcardType.fromSerial(object, context);
|
||||
else if (type.equals(RefType.class.getSimpleName())) return RefType.fromSerial(object, context);
|
||||
else if (type.equals(Void.class.getSimpleName())) return Void.fromSerial(object, context);
|
||||
else if (type.equals(TypePlaceholder.class.getSimpleName())) return TypePlaceholder.fromSerial(object, context);
|
||||
else throw new RuntimeException("Could not unserialize class of unhandled type " + type);
|
||||
}
|
||||
}
|
||||
|
@@ -1,9 +1,13 @@
|
||||
package de.dhbwstuttgart.syntaxtree.type;
|
||||
|
||||
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
|
||||
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
import java.util.Objects;
|
||||
@@ -16,7 +20,7 @@ import java.util.Objects;
|
||||
*
|
||||
*/
|
||||
|
||||
public class SuperWildcardType extends WildcardType{
|
||||
public class SuperWildcardType extends WildcardType implements ISerializableData {
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
@@ -80,4 +84,22 @@ public class SuperWildcardType extends WildcardType{
|
||||
SuperWildcardType that = (SuperWildcardType) o;
|
||||
return that.innerType.equals(this.innerType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("innerType", this.innerType.toSerial(keyStorage));
|
||||
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static SuperWildcardType fromSerial(SerialMap data, UnifyContext context) {
|
||||
return new SuperWildcardType(
|
||||
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(data.getMap("innerType"), context),
|
||||
new NullToken()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -1,9 +1,12 @@
|
||||
package de.dhbwstuttgart.syntaxtree.type;
|
||||
import java.util.Hashtable;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
|
||||
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
@@ -16,7 +19,7 @@ import org.antlr.v4.runtime.Token;
|
||||
* @author J�rg B�uerle
|
||||
* @version $Date: 2013/06/19 12:45:37 $
|
||||
*/
|
||||
public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
|
||||
public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData
|
||||
{
|
||||
private final String name;
|
||||
|
||||
@@ -65,7 +68,12 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
|
||||
public static TypePlaceholder fresh(Token position){
|
||||
return new TypePlaceholder(NameGenerator.makeNewName(), position, 0, true);
|
||||
}
|
||||
|
||||
|
||||
public static TypePlaceholder fresh(Token position, PlaceholderRegistry placeholderRegistry){
|
||||
String newName = placeholderRegistry.generateFreshPlaceholderName();
|
||||
return new TypePlaceholder(newName, position, 0, true);
|
||||
}
|
||||
|
||||
public static TypePlaceholder fresh(Token position, int variance, boolean wildcardable){
|
||||
return new TypePlaceholder(NameGenerator.makeNewName(), position, variance, wildcardable);
|
||||
}
|
||||
@@ -139,4 +147,26 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
|
||||
public Boolean getWildcardtable() {
|
||||
return wildcardable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("name", this.name);
|
||||
serialized.put("variance", this.variance);
|
||||
serialized.put("wildcardable", this.wildcardable);
|
||||
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static TypePlaceholder fromSerial(SerialMap data, UnifyContext context) {
|
||||
return new TypePlaceholder(
|
||||
data.getValue("name").getOf(String.class),
|
||||
new NullToken(),
|
||||
data.getValue("variance").getOf(Integer.class),
|
||||
data.getValue("wildcardable").getOf(Boolean.class)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -1,14 +1,32 @@
|
||||
package de.dhbwstuttgart.syntaxtree.type;
|
||||
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
|
||||
|
||||
public class Void extends RefType
|
||||
public class Void extends RefType implements ISerializableData
|
||||
{
|
||||
public Void(Token offset) {
|
||||
super(JavaClassName.Void, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ISerialNode toSerial(KeyStorage keyStorage) {
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
|
||||
serializedWrapper.put("object", new SerialMap());
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static Void fromSerial(SerialMap data, UnifyContext context) {
|
||||
return new Void(new NullToken());
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -9,13 +9,8 @@ import de.dhbwstuttgart.syntaxtree.Method;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
import javax.swing.text.html.Option;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
@@ -1,77 +1,159 @@
|
||||
package de.dhbwstuttgart.typeinference.constraints;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public class Constraint<A> extends HashSet<A> {
|
||||
private static final long serialVersionUID = 1L;
|
||||
private Boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
|
||||
private Boolean isImplemented = false;
|
||||
|
||||
/*
|
||||
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
|
||||
* auszuwaehlen
|
||||
*/
|
||||
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
|
||||
|
||||
private Constraint<A> extendConstraint = null;
|
||||
|
||||
public Constraint() {
|
||||
super();
|
||||
}
|
||||
|
||||
public Constraint(Boolean isInherited, Boolean isImplemented) {
|
||||
this.isInherited = isInherited;
|
||||
this.isImplemented = isImplemented;
|
||||
}
|
||||
|
||||
public Constraint(Boolean isInherited, Boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
|
||||
this.isInherited = isInherited;
|
||||
this.isImplemented = isImplemented;
|
||||
this.extendConstraint = extendConstraint;
|
||||
this.methodSignatureConstraint = methodSignatureConstraint;
|
||||
}
|
||||
|
||||
public void setIsInherited(Boolean isInherited) {
|
||||
this.isInherited = isInherited;
|
||||
}
|
||||
|
||||
public Boolean isInherited() {
|
||||
return isInherited;
|
||||
}
|
||||
|
||||
public Boolean isImplemented() {
|
||||
return isImplemented;
|
||||
}
|
||||
|
||||
public Constraint<A> getExtendConstraint() {
|
||||
return extendConstraint;
|
||||
}
|
||||
|
||||
public void setExtendConstraint(Constraint<A> c) {
|
||||
extendConstraint = c;
|
||||
}
|
||||
|
||||
public Set<A> getmethodSignatureConstraint() {
|
||||
return methodSignatureConstraint;
|
||||
}
|
||||
|
||||
public void setmethodSignatureConstraint(Set<A> c) {
|
||||
methodSignatureConstraint = c;
|
||||
}
|
||||
public class Constraint<A extends IConstraintElement> extends HashSet<A> implements ISerializableData {
|
||||
private static final long serialVersionUID = 1L;
|
||||
private Boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
|
||||
private Boolean isImplemented = false;
|
||||
|
||||
/*
|
||||
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
|
||||
* auszuwaehlen
|
||||
*/
|
||||
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
|
||||
|
||||
private Constraint<A> extendConstraint = null;
|
||||
|
||||
public Constraint() {
|
||||
super();
|
||||
}
|
||||
|
||||
public Constraint(Boolean isInherited, Boolean isImplemented) {
|
||||
this.isInherited = isInherited;
|
||||
this.isImplemented = isImplemented;
|
||||
}
|
||||
|
||||
public Constraint(Boolean isInherited, Boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
|
||||
this.isInherited = isInherited;
|
||||
this.isImplemented = isImplemented;
|
||||
this.extendConstraint = extendConstraint;
|
||||
this.methodSignatureConstraint = methodSignatureConstraint;
|
||||
}
|
||||
|
||||
public void setIsInherited(Boolean isInherited) {
|
||||
this.isInherited = isInherited;
|
||||
}
|
||||
|
||||
public Boolean isInherited() {
|
||||
return isInherited;
|
||||
}
|
||||
|
||||
public Boolean isImplemented() {
|
||||
return isImplemented;
|
||||
}
|
||||
|
||||
public Constraint<A> getExtendConstraint() {
|
||||
return extendConstraint;
|
||||
}
|
||||
|
||||
public void setExtendConstraint(Constraint<A> c) {
|
||||
extendConstraint = c;
|
||||
}
|
||||
|
||||
public Set<A> getmethodSignatureConstraint() {
|
||||
return methodSignatureConstraint;
|
||||
}
|
||||
|
||||
public void setmethodSignatureConstraint(Set<A> c) {
|
||||
methodSignatureConstraint = c;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return super.toString() + "\nisInherited = " + isInherited
|
||||
+ " isOveridden = " + isImplemented
|
||||
+ " msc[" + methodSignatureConstraint.size() + "] = " + methodSignatureConstraint
|
||||
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
|
||||
+ "\n";
|
||||
}
|
||||
|
||||
public String toStringBase() {
|
||||
return super.toString();
|
||||
}
|
||||
|
||||
private String serialUUID = null;
|
||||
|
||||
@Override
|
||||
public SerialUUID toSerial(KeyStorage keyStorage) {
|
||||
final String uuid = serialUUID == null ? keyStorage.getIdentifier() : serialUUID;
|
||||
if (serialUUID == null) serialUUID = uuid;
|
||||
|
||||
if (!keyStorage.isAlreadySerialized(uuid)) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
keyStorage.putSerialized(uuid, serialized);
|
||||
serialized.put("isInherited", isInherited);
|
||||
serialized.put("isImplemented", isImplemented);
|
||||
serialized.put("extendedConstraint", extendConstraint == null ? null :
|
||||
extendConstraint.toSerial(keyStorage));
|
||||
Function<A, ISerialNode> pairMapper = pair -> {
|
||||
if (pair instanceof Pair simplePair) return simplePair.toSerial(keyStorage);
|
||||
if (pair instanceof UnifyPair unifyPair) return unifyPair.toSerial(keyStorage);
|
||||
throw new RuntimeException("No serialization is supported for type " + pair.getClass().getName());
|
||||
};
|
||||
serialized.put("methodSignatureConstraint", methodSignatureConstraint == null ? null :
|
||||
SerialList.fromMapped(methodSignatureConstraint, pairMapper));
|
||||
serialized.put("setElements", SerialList.fromMapped(this, pairMapper));
|
||||
}
|
||||
|
||||
// return only the unique key
|
||||
return new SerialUUID(uuid);
|
||||
}
|
||||
|
||||
public static <T extends IConstraintElement> Constraint<T> fromSerial(SerialUUID serialUUID, UnifyContext context, Class<T> target, KeyStorage keyStorage) {
|
||||
String uuid = serialUUID.uuid;
|
||||
|
||||
if (!keyStorage.isAlreadyUnserialized(uuid)) {
|
||||
Constraint<T> constraint = new Constraint<>();
|
||||
// immediately add the object to the context to prevent infinite recursion
|
||||
keyStorage.putUnserialized(uuid, constraint);
|
||||
|
||||
// retrieve the serialized data und start unserializing it
|
||||
SerialMap data = keyStorage.getSerialized(uuid);
|
||||
constraint.isInherited = data.getValue("isInherited").getOf(Boolean.class);
|
||||
constraint.isImplemented = data.getValue("isImplemented").getOf(Boolean.class);
|
||||
constraint.extendConstraint = Optional.ofNullable(data.getUUIDOrNull("extendedConstraint"))
|
||||
.map(v -> Constraint.fromSerial(v, context, target, keyStorage))
|
||||
.orElse(null);
|
||||
|
||||
// to convert the maps back to elements, we sadly have to do some assumptions about the generic types...
|
||||
Function<ISerialNode, T> pairUnmapper = pairData -> {
|
||||
if (target == Pair.class && pairData instanceof SerialMap pairMap) {
|
||||
return (T) Pair.fromSerial(pairMap, context);
|
||||
}
|
||||
if (target == UnifyPair.class && pairData instanceof SerialUUID pairUUID) {
|
||||
return (T) UnifyPair.fromSerial(pairUUID, context, keyStorage);
|
||||
}
|
||||
throw new RuntimeException("No serialization is supported for target type " + target.getName());
|
||||
};
|
||||
|
||||
constraint.methodSignatureConstraint =
|
||||
Optional.ofNullable(data.getListOrNull("methodSignatureConstraint"))
|
||||
.map(l -> l.stream().map(pairUnmapper).collect(Collectors.toSet()))
|
||||
.orElse(null);
|
||||
constraint.addAll(
|
||||
data.getList("setElements")
|
||||
.stream().map(pairUnmapper).toList());
|
||||
}
|
||||
|
||||
return keyStorage.getUnserialized(uuid, Constraint.class);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return super.toString() + "\nisInherited = " + isInherited + " isOveridden = " + isImplemented
|
||||
+ methodSignatureConstraint
|
||||
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
|
||||
+ "\n" ;
|
||||
}
|
||||
|
||||
public String toStringBase() {
|
||||
return super.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,63 +1,80 @@
|
||||
package de.dhbwstuttgart.typeinference.constraints;
|
||||
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import java.util.*;
|
||||
import java.util.function.BinaryOperator;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class ConstraintSet<A> {
|
||||
Constraint<A> undConstraints = new Constraint<>();
|
||||
List<Set<Constraint<A>>> oderConstraints = new ArrayList<>();
|
||||
public class ConstraintSet<A extends IConstraintElement> implements ISerializableData {
|
||||
Constraint<A> undConstraints = new Constraint<>();
|
||||
List<Set<Constraint<A>>> oderConstraints = new ArrayList<>();
|
||||
|
||||
public void addUndConstraint(A p){
|
||||
undConstraints.add(p);
|
||||
}
|
||||
public void addUndConstraint(A p) {
|
||||
undConstraints.add(p);
|
||||
}
|
||||
|
||||
public void addOderConstraint(Set<Constraint<A>> methodConstraints) {
|
||||
oderConstraints.add(methodConstraints);
|
||||
}
|
||||
public void addOderConstraint(Set<Constraint<A>> methodConstraints) {
|
||||
oderConstraints.add(methodConstraints);
|
||||
}
|
||||
|
||||
public void addAllUndConstraint(Constraint<A> allUndConstraints){
|
||||
undConstraints.addAll(allUndConstraints);
|
||||
}
|
||||
|
||||
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints){
|
||||
this.oderConstraints.addAll(allOderConstraints);
|
||||
}
|
||||
|
||||
public void addAll(ConstraintSet constraints) {
|
||||
this.addAllUndConstraint(constraints.undConstraints);
|
||||
this.addAllOderConstraint(constraints.oderConstraints);
|
||||
}
|
||||
public void addAllUndConstraint(Constraint<A> allUndConstraints) {
|
||||
undConstraints.addAll(allUndConstraints);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(){
|
||||
BinaryOperator<String> b = (x,y) -> x+y;
|
||||
return "\nUND:" + this.undConstraints.toString() + "\n" +
|
||||
"ODER:" + this.oderConstraints.stream().reduce("", (x,y) -> x.toString()+ "\n" +y, b);
|
||||
//cartesianProduct().toString();
|
||||
}
|
||||
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints) {
|
||||
this.oderConstraints.addAll(allOderConstraints);
|
||||
}
|
||||
|
||||
public Set<List<Constraint<A>>> cartesianProduct(){
|
||||
Set<Constraint<A>> toAdd = new HashSet<>();
|
||||
toAdd.add(undConstraints);
|
||||
List<Set<Constraint<A>>> allConstraints = new ArrayList<>();
|
||||
allConstraints.add(toAdd);
|
||||
allConstraints.addAll(oderConstraints);
|
||||
return new GuavaSetOperations().cartesianProduct(allConstraints);
|
||||
}
|
||||
public void addAll(ConstraintSet constraints) {
|
||||
this.addAllUndConstraint(constraints.undConstraints);
|
||||
this.addAllOderConstraint(constraints.oderConstraints);
|
||||
}
|
||||
|
||||
public <B> ConstraintSet<B> map(Function<? super A, ? extends B> o) {
|
||||
Hashtable<Constraint<A>,Constraint<B>> CSA2CSB = new Hashtable<>();
|
||||
ConstraintSet<B> ret = new ConstraintSet<>();
|
||||
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new));
|
||||
List<Set<Constraint<B>>> newOder = new ArrayList<>();
|
||||
@Override
|
||||
public String toString() {
|
||||
BinaryOperator<String> b = (x, y) -> x + y;
|
||||
return "\nUND:\n" + this.undConstraints.toString() +
|
||||
"ODER:" + this.oderConstraints.stream().reduce("", (x, y) -> x + "\n\t" + y, b) +
|
||||
"\n";
|
||||
//cartesianProduct().toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof ConstraintSet<?> other)) return false;
|
||||
return Objects.equals(undConstraints, other.undConstraints)
|
||||
&& Objects.equals(oderConstraints, other.oderConstraints);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(undConstraints, oderConstraints);
|
||||
}
|
||||
|
||||
public Set<List<Constraint<A>>> cartesianProduct() {
|
||||
Set<Constraint<A>> toAdd = new HashSet<>();
|
||||
toAdd.add(undConstraints);
|
||||
List<Set<Constraint<A>>> allConstraints = new ArrayList<>();
|
||||
allConstraints.add(toAdd);
|
||||
allConstraints.addAll(oderConstraints);
|
||||
return new GuavaSetOperations().cartesianProduct(allConstraints);
|
||||
}
|
||||
|
||||
public <B extends IConstraintElement> ConstraintSet<B> map(Function<? super A, ? extends B> o) {
|
||||
Hashtable<Constraint<A>, Constraint<B>> CSA2CSB = new Hashtable<>();
|
||||
ConstraintSet<B> ret = new ConstraintSet<>();
|
||||
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new));
|
||||
List<Set<Constraint<B>>> newOder = new ArrayList<>();
|
||||
/*
|
||||
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||
oderConstraint.forEach(as -> {
|
||||
@@ -68,25 +85,25 @@ public class ConstraintSet<A> {
|
||||
CSA2CSB.put(as, newConst);} );
|
||||
}
|
||||
*/
|
||||
|
||||
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||
newOder.add(
|
||||
oderConstraint.stream().map((Constraint<A> as) -> {
|
||||
|
||||
Constraint<B> newConst = as.stream()
|
||||
.map(o)
|
||||
.collect(Collectors.toCollection((
|
||||
() -> new Constraint<B> (as.isInherited(),
|
||||
as.isImplemented(),
|
||||
(as.getExtendConstraint() != null)
|
||||
? as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new))
|
||||
: null,
|
||||
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new))))
|
||||
));
|
||||
|
||||
//CSA2CSB.put(as, newConst);
|
||||
|
||||
return newConst;
|
||||
|
||||
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
|
||||
newOder.add(
|
||||
oderConstraint.stream().map((Constraint<A> as) -> {
|
||||
|
||||
Constraint<B> newConst = as.stream()
|
||||
.map(o)
|
||||
.collect(Collectors.toCollection((
|
||||
() -> new Constraint<B>(as.isInherited(),
|
||||
as.isImplemented(),
|
||||
(as.getExtendConstraint() != null)
|
||||
? as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new))
|
||||
: null,
|
||||
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new))))
|
||||
));
|
||||
|
||||
//CSA2CSB.put(as, newConst);
|
||||
|
||||
return newConst;
|
||||
|
||||
/*
|
||||
Constraint<B> bs = CSA2CSB.get(as);
|
||||
@@ -95,36 +112,60 @@ public class ConstraintSet<A> {
|
||||
}
|
||||
return bs;
|
||||
*/
|
||||
}).collect(Collectors.toSet())
|
||||
);
|
||||
}
|
||||
|
||||
ret.oderConstraints = newOder;
|
||||
return ret;
|
||||
}).collect(Collectors.toSet())
|
||||
);
|
||||
}
|
||||
|
||||
public void forEach (Consumer<? super A> c) {
|
||||
undConstraints.stream().forEach(c);
|
||||
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
|
||||
as.stream().forEach(c));
|
||||
}
|
||||
|
||||
ret.oderConstraints = newOder;
|
||||
return ret;
|
||||
}
|
||||
|
||||
public void forEach(Consumer<? super A> c) {
|
||||
undConstraints.stream().forEach(c);
|
||||
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
|
||||
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
|
||||
as.stream().forEach(c));
|
||||
}
|
||||
|
||||
public Set<A> getAll () {
|
||||
Set<A> ret = new HashSet<>();
|
||||
ret.addAll(undConstraints);
|
||||
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||
oderConstraint.parallelStream().forEach((Constraint<A> as) -> ret.addAll(as));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
public List<Set<Constraint<A>>> getOderConstraints() {
|
||||
return oderConstraints;
|
||||
}
|
||||
|
||||
public Set<A> getUndConstraints() {
|
||||
return undConstraints;
|
||||
}
|
||||
|
||||
public Set<A> getAll() {
|
||||
Set<A> ret = new HashSet<>(undConstraints);
|
||||
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
|
||||
oderConstraint.parallelStream().forEach(ret::addAll);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
public List<Set<Constraint<A>>> getOderConstraints() {
|
||||
return oderConstraints;
|
||||
}
|
||||
|
||||
public Set<A> getUndConstraints() {
|
||||
return undConstraints;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("undConstraints", undConstraints.toSerial(keyStorage));
|
||||
serialized.put("oderConstraints", SerialList.fromMapped(oderConstraints, oderConstraintSet ->
|
||||
SerialList.fromMapped(oderConstraintSet, oderConstraint ->
|
||||
oderConstraint.toSerial(keyStorage))
|
||||
));
|
||||
return serialized;
|
||||
}
|
||||
|
||||
public static <T extends IConstraintElement> ConstraintSet<T> fromSerial(SerialMap data, UnifyContext context, Class<T> target, KeyStorage keyStorage) {
|
||||
ConstraintSet<T> constraintSet = new ConstraintSet<>();
|
||||
|
||||
constraintSet.undConstraints = Constraint.fromSerial(data.getUUID("undConstraints"), context, target, keyStorage);
|
||||
constraintSet.oderConstraints = data.getList("oderConstraints").assertListOfLists().stream()
|
||||
.map(oderConstraintSetData -> oderConstraintSetData.assertListOfUUIDs().stream()
|
||||
.map(oderConstraintData -> Constraint.fromSerial(oderConstraintData, context, target, keyStorage))
|
||||
.collect(Collectors.toSet())
|
||||
).toList();
|
||||
|
||||
return constraintSet;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -0,0 +1,4 @@
|
||||
package de.dhbwstuttgart.typeinference.constraints;
|
||||
|
||||
public interface IConstraintElement {
|
||||
}
|
@@ -1,72 +1,70 @@
|
||||
package de.dhbwstuttgart.typeinference.constraints;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.parser.SourceLoc;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
|
||||
public class Pair implements Serializable
|
||||
{
|
||||
public final RefTypeOrTPHOrWildcardOrGeneric TA1;
|
||||
public final RefTypeOrTPHOrWildcardOrGeneric TA2;
|
||||
public class Pair implements Serializable, IConstraintElement, ISerializableData {
|
||||
public final RefTypeOrTPHOrWildcardOrGeneric TA1;
|
||||
public final RefTypeOrTPHOrWildcardOrGeneric TA2;
|
||||
|
||||
private SourceLoc location;
|
||||
private SourceLoc location;
|
||||
|
||||
private PairOperator eOperator = PairOperator.SMALLER;
|
||||
private Boolean noUnification = false;
|
||||
|
||||
private PairOperator eOperator = PairOperator.SMALLER;
|
||||
private Boolean noUnification = false;
|
||||
|
||||
private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2 )
|
||||
{
|
||||
this.TA1 = TA1;
|
||||
this.TA2 = TA2;
|
||||
if(TA1 == null || TA2 == null)
|
||||
throw new NullPointerException();
|
||||
eOperator = PairOperator.SMALLER;
|
||||
}
|
||||
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp)
|
||||
{
|
||||
// Konstruktor
|
||||
this(TA1,TA2);
|
||||
this.eOperator = eOp;
|
||||
}
|
||||
private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2) {
|
||||
this.TA1 = TA1;
|
||||
this.TA2 = TA2;
|
||||
if (TA1 == null || TA2 == null)
|
||||
throw new NullPointerException();
|
||||
eOperator = PairOperator.SMALLER;
|
||||
}
|
||||
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator e0p, SourceLoc location) {
|
||||
this(TA1, TA2, e0p);
|
||||
this.location = location;
|
||||
}
|
||||
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification)
|
||||
{
|
||||
// Konstruktor
|
||||
this(TA1,TA2);
|
||||
this.eOperator = eOp;
|
||||
this.noUnification = noUnification;
|
||||
}
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp) {
|
||||
// Konstruktor
|
||||
this(TA1, TA2);
|
||||
this.eOperator = eOp;
|
||||
}
|
||||
|
||||
public SourceLoc getLocation() {
|
||||
return this.location;
|
||||
}
|
||||
|
||||
public String toString()
|
||||
{
|
||||
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen
|
||||
String strElement1 = "NULL";
|
||||
String strElement2 = "NULL";
|
||||
String Operator = "<.";
|
||||
|
||||
if( TA1 != null )
|
||||
strElement1 = TA1.toString();
|
||||
|
||||
if( TA2 != null )
|
||||
strElement2 = TA2.toString();
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator e0p, SourceLoc location) {
|
||||
this(TA1, TA2, e0p);
|
||||
this.location = location;
|
||||
}
|
||||
|
||||
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification) {
|
||||
// Konstruktor
|
||||
this(TA1, TA2);
|
||||
this.eOperator = eOp;
|
||||
this.noUnification = noUnification;
|
||||
}
|
||||
|
||||
public SourceLoc getLocation() {
|
||||
return this.location;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen
|
||||
String strElement1 = "NULL";
|
||||
String strElement2 = "NULL";
|
||||
String Operator = "<.";
|
||||
|
||||
if (TA1 != null)
|
||||
strElement1 = TA1.toString();
|
||||
|
||||
if (TA2 != null)
|
||||
strElement2 = TA2.toString();
|
||||
|
||||
/* PL ausskommentiert 2018-05-24
|
||||
if(OperatorEqual())
|
||||
@@ -76,80 +74,104 @@ public class Pair implements Serializable
|
||||
if(OperatorSmallerExtends())
|
||||
Operator = "<?";
|
||||
*/
|
||||
|
||||
return "\n(" + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
|
||||
|
||||
/*- Equals: " + bEqual*/
|
||||
}
|
||||
|
||||
/**
|
||||
* <br/>Author: J�rg B�uerle
|
||||
* @param obj
|
||||
* @return
|
||||
*/
|
||||
public boolean equals(Object obj)
|
||||
{
|
||||
boolean ret = true;
|
||||
ret &= (obj instanceof Pair);
|
||||
if(!ret)return ret;
|
||||
ret &= ((Pair)obj).TA1.equals(this.TA1);
|
||||
ret &= ((Pair)obj).TA2.equals(this.TA2);
|
||||
return ret;
|
||||
}
|
||||
return "\n(P: " + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ Equal ist.
|
||||
*/
|
||||
public boolean OperatorEqual()
|
||||
{
|
||||
return eOperator == PairOperator.EQUALSDOT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ Smaller ist.
|
||||
*/
|
||||
public boolean OperatorSmaller()
|
||||
{
|
||||
return eOperator == PairOperator.SMALLER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ SmallerExtends ist.
|
||||
*/
|
||||
public boolean OperatorSmallerExtends()
|
||||
{
|
||||
return eOperator == PairOperator.SMALLERDOTWC;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Gibt den Operator zurück.
|
||||
*/
|
||||
public PairOperator GetOperator()
|
||||
{
|
||||
return eOperator;
|
||||
}
|
||||
/*- Equals: " + bEqual*/
|
||||
}
|
||||
|
||||
public boolean OperatorSmallerDot() {
|
||||
return eOperator == PairOperator.SMALLERDOT;
|
||||
}
|
||||
|
||||
|
||||
static public Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
|
||||
HashMap<String, TypePlaceholder> ret = new HashMap<>();
|
||||
constraints.map((Pair p) -> {
|
||||
if (p.TA1 instanceof TypePlaceholder) {
|
||||
ret.put(((TypePlaceholder) p.TA1).getName(), (TypePlaceholder) p.TA1);
|
||||
}
|
||||
if (p.TA2 instanceof TypePlaceholder) {
|
||||
ret.put(((TypePlaceholder) p.TA2).getName(), (TypePlaceholder) p.TA2);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
/**
|
||||
* <br/>Author: J�rg B�uerle
|
||||
*
|
||||
* @param obj
|
||||
* @return
|
||||
*/
|
||||
public boolean equals(Object obj) {
|
||||
return (
|
||||
(obj instanceof Pair pairObj) &&
|
||||
pairObj.TA1.equals(this.TA1) &&
|
||||
pairObj.TA2.equals(this.TA2)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ Equal ist.
|
||||
*/
|
||||
public boolean OperatorEqual() {
|
||||
return eOperator == PairOperator.EQUALSDOT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ Smaller ist.
|
||||
*/
|
||||
public boolean OperatorSmaller() {
|
||||
return eOperator == PairOperator.SMALLER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Abfrage, ob Operator vom Typ SmallerExtends ist.
|
||||
*/
|
||||
public boolean OperatorSmallerExtends() {
|
||||
return eOperator == PairOperator.SMALLERDOTWC;
|
||||
}
|
||||
|
||||
/**
|
||||
* Author: Arne Lüdtke<br/>
|
||||
* Gibt den Operator zurück.
|
||||
*/
|
||||
public PairOperator GetOperator() {
|
||||
return eOperator;
|
||||
}
|
||||
|
||||
public boolean OperatorSmallerDot() {
|
||||
return eOperator == PairOperator.SMALLERDOT;
|
||||
}
|
||||
|
||||
|
||||
static public Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
|
||||
HashMap<String, TypePlaceholder> ret = new HashMap<>();
|
||||
constraints.map((Pair p) -> {
|
||||
if (p.TA1 instanceof TypePlaceholder) {
|
||||
ret.put(((TypePlaceholder) p.TA1).getName(), (TypePlaceholder) p.TA1);
|
||||
}
|
||||
if (p.TA2 instanceof TypePlaceholder) {
|
||||
ret.put(((TypePlaceholder) p.TA2).getName(), (TypePlaceholder) p.TA2);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
// because toString() will output TA1 and TA2 recursively, we can ignore potential infinite recursion here too
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("ta1", this.TA1.toSerial(keyStorage));
|
||||
serialized.put("ta2", this.TA2.toSerial(keyStorage));
|
||||
serialized.put("op", this.eOperator.toString());
|
||||
serialized.put("noUnification", this.noUnification ? 1 : 0);
|
||||
serialized.put("location", this.location == null ? null : this.location.toSerial(keyStorage));
|
||||
return serialized;
|
||||
}
|
||||
|
||||
public static Pair fromSerial(SerialMap data, UnifyContext context) {
|
||||
String op = data.getValue("op").getOf(String.class);
|
||||
SerialMap ta1 = data.getMap("ta1");
|
||||
SerialMap ta2 = data.getMap("ta2");
|
||||
Boolean noUnification = data.getValue("noUnification").getOf(Integer.class) == 1;
|
||||
SerialMap location = data.getMapOrNull("location");
|
||||
|
||||
var pair = new Pair(
|
||||
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(ta1, context),
|
||||
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(ta2, context),
|
||||
PairOperator.fromString(op),
|
||||
noUnification
|
||||
);
|
||||
if (location != null) pair.location = SourceLoc.fromSerial(location);
|
||||
return pair;
|
||||
}
|
||||
}
|
||||
// ino.end
|
||||
|
@@ -1,15 +1,19 @@
|
||||
package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.NotImplementedException;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
|
||||
/**
|
||||
* enthaelt alle Paare, die in einem Ergebnis nicht vorkommen koennen
|
||||
* sie sind noetig fuer origPairs in PairTPHsmallerTPH, da hier auch
|
||||
* Paare vorkommen koennen die keine Result sind (z.B. bei FunN$$)
|
||||
*/
|
||||
public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>{
|
||||
public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>
|
||||
implements ISerializableData {
|
||||
//public final TypePlaceholder left;
|
||||
//public final TypePlaceholder right;
|
||||
|
||||
@@ -17,7 +21,7 @@ public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, Re
|
||||
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
|
||||
* wichtig fuer generated Generics
|
||||
*/
|
||||
ResultPair origPair;
|
||||
ResultPair<?,?> origPair;
|
||||
|
||||
public PairNoResult(RefTypeOrTPHOrWildcardOrGeneric left, RefTypeOrTPHOrWildcardOrGeneric right){
|
||||
super(left, right);
|
||||
@@ -29,4 +33,24 @@ public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, Re
|
||||
throw new NotImplementedException();
|
||||
//visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("left", this.getLeft().toSerial(keyStorage));
|
||||
serialized.put("right", this.getRight().toSerial(keyStorage));
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static PairNoResult fromSerial2(SerialMap data, UnifyContext context) {
|
||||
SerialMap left = data.getMap("left");
|
||||
SerialMap right = data.getMap("right");
|
||||
return new PairNoResult(
|
||||
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
|
||||
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -1,9 +1,13 @@
|
||||
package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
|
||||
public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder> {
|
||||
public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder> implements ISerializableData {
|
||||
public PairTPHEqualTPH(TypePlaceholder tl, TypePlaceholder tr) {
|
||||
super(tl, tr);
|
||||
}
|
||||
@@ -12,4 +16,24 @@ public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder
|
||||
public void accept(ResultPairVisitor visitor) {
|
||||
visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("left", this.getLeft().toSerial(keyStorage));
|
||||
serialized.put("right", this.getRight().toSerial(keyStorage));
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static PairTPHEqualTPH fromSerial2(SerialMap data, UnifyContext context) {
|
||||
SerialMap left = data.getMap("left");
|
||||
SerialMap right = data.getMap("right");
|
||||
return new PairTPHEqualTPH(
|
||||
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
|
||||
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -1,13 +1,17 @@
|
||||
package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
|
||||
/**
|
||||
* Steht für A =. RefType
|
||||
*/
|
||||
public class PairTPHequalRefTypeOrWildcardType extends ResultPair{
|
||||
public class PairTPHequalRefTypeOrWildcardType extends ResultPair<TypePlaceholder, RefTypeOrTPHOrWildcardOrGeneric>
|
||||
implements ISerializableData {
|
||||
public final TypePlaceholder left;
|
||||
public final RefTypeOrTPHOrWildcardOrGeneric right;
|
||||
|
||||
@@ -26,4 +30,24 @@ public class PairTPHequalRefTypeOrWildcardType extends ResultPair{
|
||||
public String toString() {
|
||||
return "(" + left.toString() + " = " + right.toString() + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("left", this.getLeft().toSerial(keyStorage));
|
||||
serialized.put("right", this.getRight().toSerial(keyStorage));
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static PairTPHequalRefTypeOrWildcardType fromSerial2(SerialMap data, UnifyContext context) {
|
||||
SerialMap left = data.getMap("left");
|
||||
SerialMap right = data.getMap("right");
|
||||
return new PairTPHequalRefTypeOrWildcardType(
|
||||
(TypePlaceholder)RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
|
||||
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -1,12 +1,17 @@
|
||||
package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
|
||||
/**
|
||||
* Steht für: A <. B
|
||||
*/
|
||||
public class PairTPHsmallerTPH extends ResultPair{
|
||||
public class PairTPHsmallerTPH extends ResultPair<TypePlaceholder,TypePlaceholder>
|
||||
implements ISerializableData {
|
||||
public final TypePlaceholder left;
|
||||
public final TypePlaceholder right;
|
||||
|
||||
@@ -14,7 +19,7 @@ public class PairTPHsmallerTPH extends ResultPair{
|
||||
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
|
||||
* wichtig fuer generated Generics
|
||||
*/
|
||||
ResultPair origPair;
|
||||
ResultPair<?,?> origPair;
|
||||
|
||||
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right){
|
||||
super(left, right);
|
||||
@@ -22,7 +27,7 @@ public class PairTPHsmallerTPH extends ResultPair{
|
||||
this.right = right;
|
||||
}
|
||||
|
||||
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right, ResultPair origPair){
|
||||
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right, ResultPair<?,?> origPair){
|
||||
this(left, right);
|
||||
this.origPair = origPair;
|
||||
}
|
||||
@@ -36,4 +41,24 @@ public class PairTPHsmallerTPH extends ResultPair{
|
||||
public String toString() {
|
||||
return "(" + left.toString() + " < " + right.toString() + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("left", this.getLeft().toSerial(keyStorage));
|
||||
serialized.put("right", this.getRight().toSerial(keyStorage));
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static PairTPHsmallerTPH fromSerial2(SerialMap data, UnifyContext context) {
|
||||
SerialMap left = data.getMap("left");
|
||||
SerialMap right = data.getMap("right");
|
||||
return new PairTPHsmallerTPH(
|
||||
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
|
||||
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -1,11 +1,17 @@
|
||||
package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
|
||||
/**
|
||||
* Paare, welche das Unifikationsergebnis darstellen
|
||||
*/
|
||||
public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric> {
|
||||
public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric>
|
||||
implements ISerializableData {
|
||||
private final A left;
|
||||
private final B right;
|
||||
|
||||
@@ -58,5 +64,35 @@ public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B ext
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
String type = switch (this) {
|
||||
case PairNoResult _ -> "pnr";
|
||||
case PairTPHEqualTPH _ -> "ptet";
|
||||
case PairTPHsmallerTPH _ -> "ptst";
|
||||
case PairTPHequalRefTypeOrWildcardType _ -> "ptertwt";
|
||||
default -> throw new RuntimeException("No type defined for ResultPair of class " + this.getClass().getName());
|
||||
};
|
||||
serialized.put("type", type);
|
||||
// we only insert null for the object and expect the child classes to call this and override the value with themselves
|
||||
serialized.put("object", SerialValue.NULL);
|
||||
return serialized;
|
||||
}
|
||||
|
||||
public static <A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric> ResultPair<A,B>
|
||||
fromSerial(SerialMap data, UnifyContext context) {
|
||||
String type = data.getValue("type").getOf(String.class);
|
||||
SerialMap object = data.getMap("object");
|
||||
|
||||
return switch (type) {
|
||||
case "pnr" -> (ResultPair) PairNoResult.fromSerial2(object, context);
|
||||
case "ptet" -> (ResultPair) PairTPHEqualTPH.fromSerial2(object, context);
|
||||
case "ptst" -> (ResultPair) PairTPHsmallerTPH.fromSerial2(object, context);
|
||||
case "ptertwt" -> (ResultPair) PairTPHequalRefTypeOrWildcardType.fromSerial2(object, context);
|
||||
default -> throw new RuntimeException("Could not unserialize class of unhandled type " + type);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -1,5 +1,13 @@
|
||||
package de.dhbwstuttgart.typeinference.result;
|
||||
|
||||
import com.google.common.collect.Ordering;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
@@ -10,148 +18,177 @@ import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public class ResultSet {
|
||||
public class ResultSet implements ISerializableData {
|
||||
|
||||
public final Set<ResultPair> results;
|
||||
public Set<ResultPair<TypePlaceholder, TypePlaceholder>> genIns;
|
||||
public final Set<ResultPair> results;
|
||||
public Set<ResultPair<TypePlaceholder, TypePlaceholder>> genIns;
|
||||
|
||||
public ResultSet(Set<ResultPair> set){
|
||||
this.results = set;
|
||||
this.genIns = new HashSet<>();
|
||||
results.forEach(x -> { if (x instanceof PairTPHsmallerTPH) { this.genIns.add(x);}} );
|
||||
}
|
||||
|
||||
public boolean contains(ResultPair toCheck) {
|
||||
return this.results.contains(toCheck);
|
||||
}
|
||||
|
||||
public void remove(ResultPair toCheck) {
|
||||
results.remove(toCheck);
|
||||
public ResultSet(Set<ResultPair> set) {
|
||||
this.results = set;
|
||||
this.genIns = new HashSet<>();
|
||||
results.forEach(x -> {
|
||||
if (x instanceof PairTPHsmallerTPH) {
|
||||
this.genIns.add(x);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public boolean contains(ResultPair toCheck) {
|
||||
return this.results.contains(toCheck);
|
||||
}
|
||||
|
||||
public void remove(ResultPair toCheck) {
|
||||
results.remove(toCheck);
|
||||
}
|
||||
|
||||
public ResolvedType resolveType(RefTypeOrTPHOrWildcardOrGeneric type) {
|
||||
if (type instanceof TypePlaceholder)
|
||||
return new Resolver(this).resolve((TypePlaceholder) type);
|
||||
if (type instanceof GenericRefType) return new ResolvedType(type, new HashSet<>());
|
||||
if (type instanceof RefType) {
|
||||
RelatedTypeWalker related = new RelatedTypeWalker(null, this);
|
||||
type.accept(related);
|
||||
return new ResolvedType(type, related.relatedTPHs);
|
||||
} else {
|
||||
throw new NotImplementedException();
|
||||
//return new ResolvedType(type,new HashSet<>());
|
||||
}
|
||||
}
|
||||
|
||||
public ResolvedType resolveType(RefTypeOrTPHOrWildcardOrGeneric type) {
|
||||
if(type instanceof TypePlaceholder)
|
||||
return new Resolver(this).resolve((TypePlaceholder)type);
|
||||
if(type instanceof GenericRefType)return new ResolvedType(type, new HashSet<>());
|
||||
if(type instanceof RefType) {
|
||||
RelatedTypeWalker related = new RelatedTypeWalker(null, this);
|
||||
type.accept(related);
|
||||
return new ResolvedType(type, related.relatedTPHs);
|
||||
} else {
|
||||
throw new NotImplementedException();
|
||||
//return new ResolvedType(type,new HashSet<>());
|
||||
}
|
||||
public String toString() {
|
||||
var results = new ArrayList<>(this.results);
|
||||
results.sort(
|
||||
Comparator
|
||||
.comparingInt((ResultPair o) -> o.getLeft().toString().length())
|
||||
.thenComparing(o -> o.getLeft().toString())
|
||||
.thenComparingInt(o -> o.getRight().toString().length())
|
||||
.thenComparing(o -> o.getRight().toString())
|
||||
);
|
||||
return results.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o instanceof ResultSet other) {
|
||||
// sort both result lists
|
||||
var thisElements = new ArrayList<>(this.results);
|
||||
thisElements.sort(Ordering.usingToString());
|
||||
var otherElements = new ArrayList<>(other.results);
|
||||
otherElements.sort(Ordering.usingToString());
|
||||
|
||||
return thisElements.equals(otherElements);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return results.toString();
|
||||
}
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return results.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o instanceof ResultSet) {
|
||||
ResultSet other = (ResultSet)o;
|
||||
return this.results.equals(other.results);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();;
|
||||
serialized.put("results", SerialList.fromMapped(results, result -> result.toSerial(keyStorage)));
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return results.hashCode();
|
||||
}
|
||||
public static ResultSet fromSerial(SerialMap data, UnifyContext context) {
|
||||
var resultsData = data.getList("results").assertListOfMaps();
|
||||
return new ResultSet(resultsData.stream().map(resultData -> ResultPair.fromSerial(resultData, context)).collect(Collectors.toSet()));
|
||||
}
|
||||
}
|
||||
|
||||
class Resolver implements ResultSetVisitor {
|
||||
private final ResultSet result;
|
||||
private TypePlaceholder toResolve;
|
||||
private RefTypeOrTPHOrWildcardOrGeneric resolved;
|
||||
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
|
||||
private ResultPair<?,?> currentPair;
|
||||
private final ResultSet result;
|
||||
private TypePlaceholder toResolve;
|
||||
private RefTypeOrTPHOrWildcardOrGeneric resolved;
|
||||
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
|
||||
private ResultPair<?, ?> currentPair;
|
||||
|
||||
public Resolver(ResultSet resultPairs){
|
||||
this.result = resultPairs;
|
||||
public Resolver(ResultSet resultPairs) {
|
||||
this.result = resultPairs;
|
||||
}
|
||||
|
||||
public ResolvedType resolve(TypePlaceholder tph) {
|
||||
toResolve = tph;
|
||||
resolved = null;
|
||||
System.out.println(tph.toString());
|
||||
for (ResultPair<?, ?> resultPair : result.results) {
|
||||
if (resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)) {
|
||||
currentPair = resultPair;
|
||||
return resolve(((PairTPHEqualTPH) resultPair).getRight());
|
||||
}
|
||||
}
|
||||
for (ResultPair<?, ?> resultPair : result.results) {
|
||||
currentPair = resultPair;
|
||||
resultPair.accept(this);
|
||||
}
|
||||
if (resolved == null) {//TPH kommt nicht im Result vor:
|
||||
resolved = tph;
|
||||
}
|
||||
|
||||
public ResolvedType resolve(TypePlaceholder tph){
|
||||
toResolve = tph;
|
||||
resolved = null;
|
||||
System.out.println(tph.toString());
|
||||
for(ResultPair<?,?> resultPair : result.results) {
|
||||
if(resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)){
|
||||
currentPair = resultPair;
|
||||
return resolve(((PairTPHEqualTPH) resultPair).getRight());
|
||||
}
|
||||
}
|
||||
for(ResultPair<?,?> resultPair : result.results){
|
||||
currentPair = resultPair;
|
||||
resultPair.accept(this);
|
||||
}
|
||||
if(resolved==null){//TPH kommt nicht im Result vor:
|
||||
resolved = tph;
|
||||
}
|
||||
ResolvedType result = new ResolvedType(resolved, additionalTPHs);//resolved;
|
||||
result.setResultPair(currentPair);
|
||||
return result;
|
||||
}
|
||||
|
||||
ResolvedType result = new ResolvedType(resolved, additionalTPHs);//resolved;
|
||||
result.setResultPair(currentPair);
|
||||
return result;
|
||||
@Override
|
||||
public void visit(PairTPHsmallerTPH p) {
|
||||
currentPair = p;
|
||||
if (p.left.equals(toResolve)) {
|
||||
additionalTPHs.add(new GenericInsertPair(p.left, p.right));
|
||||
additionalTPHs.addAll(new RelatedTypeWalker(p.right, result).relatedTPHs);
|
||||
}
|
||||
if (p.right.equals(toResolve))
|
||||
additionalTPHs.addAll(new RelatedTypeWalker(p.left, result).relatedTPHs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHsmallerTPH p) {
|
||||
currentPair = p;
|
||||
if(p.left.equals(toResolve)){
|
||||
additionalTPHs.add(new GenericInsertPair(p.left, p.right));
|
||||
additionalTPHs.addAll(new RelatedTypeWalker(p.right, result).relatedTPHs);
|
||||
}
|
||||
if(p.right.equals(toResolve))
|
||||
additionalTPHs.addAll(new RelatedTypeWalker(p.left, result).relatedTPHs);
|
||||
@Override
|
||||
public void visit(PairTPHequalRefTypeOrWildcardType p) {
|
||||
currentPair = p;
|
||||
if (p.left.equals(toResolve)) {
|
||||
resolved = p.right;
|
||||
RelatedTypeWalker related = new RelatedTypeWalker(null, result);
|
||||
p.right.accept(related);
|
||||
additionalTPHs.addAll(related.relatedTPHs);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHequalRefTypeOrWildcardType p) {
|
||||
currentPair = p;
|
||||
if(p.left.equals(toResolve)){
|
||||
resolved = p.right;
|
||||
RelatedTypeWalker related = new RelatedTypeWalker(null, result);
|
||||
p.right.accept(related);
|
||||
additionalTPHs.addAll(related.relatedTPHs);
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public void visit(PairTPHEqualTPH p) {
|
||||
//Do nothing. Dieser Fall wird in der resolve-Methode abgefangen
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHEqualTPH p) {
|
||||
//Do nothing. Dieser Fall wird in der resolve-Methode abgefangen
|
||||
}
|
||||
@Override
|
||||
public void visit(RefType refType) {
|
||||
|
||||
@Override
|
||||
public void visit(RefType refType) {
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public void visit(GenericRefType genericRefType) {
|
||||
|
||||
@Override
|
||||
public void visit(GenericRefType genericRefType) {
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public void visit(SuperWildcardType superWildcardType) {
|
||||
|
||||
@Override
|
||||
public void visit(SuperWildcardType superWildcardType) {
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public void visit(TypePlaceholder typePlaceholder) {
|
||||
|
||||
@Override
|
||||
public void visit(TypePlaceholder typePlaceholder) {
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public void visit(ExtendsWildcardType extendsWildcardType) {
|
||||
|
||||
@Override
|
||||
public void visit(ExtendsWildcardType extendsWildcardType) {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -161,149 +198,150 @@ class Resolver implements ResultSetVisitor {
|
||||
@SuppressWarnings("rawtypes")
|
||||
class TPHResolver implements ResultSetVisitor {
|
||||
|
||||
private final TypePlaceholder tph;
|
||||
Set<GenericInsertPair> resolved = new HashSet<>();
|
||||
private final ResultSet resultSet;
|
||||
private final TypePlaceholder tph;
|
||||
Set<GenericInsertPair> resolved = new HashSet<>();
|
||||
private final ResultSet resultSet;
|
||||
|
||||
TPHResolver(TypePlaceholder tph, ResultSet resultSet){
|
||||
this.resultSet = resultSet;
|
||||
this.tph = tph;
|
||||
for(ResultPair p : resultSet.results){
|
||||
p.accept(this);
|
||||
}
|
||||
if(resolved.size() == 0){
|
||||
resolved.add(new GenericInsertPair(tph, null));
|
||||
}
|
||||
TPHResolver(TypePlaceholder tph, ResultSet resultSet) {
|
||||
this.resultSet = resultSet;
|
||||
this.tph = tph;
|
||||
for (ResultPair p : resultSet.results) {
|
||||
p.accept(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHsmallerTPH p) {
|
||||
if(p.left.equals(tph) || p.right.equals(tph)){
|
||||
resolved.add(new GenericInsertPair(p.left, p.right));
|
||||
}
|
||||
if (resolved.size() == 0) {
|
||||
resolved.add(new GenericInsertPair(tph, null));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHequalRefTypeOrWildcardType p) {
|
||||
TypePlaceholder otherSide = null;
|
||||
if(p.right.equals(tph)){
|
||||
otherSide = p.left;
|
||||
}
|
||||
if(otherSide != null){
|
||||
Set<ResultPair> newResultSet = new HashSet<>(this.resultSet.results);
|
||||
newResultSet.remove(p);
|
||||
resolved.addAll(new TPHResolver(otherSide, new ResultSet(newResultSet)).resolved);
|
||||
}
|
||||
@Override
|
||||
public void visit(PairTPHsmallerTPH p) {
|
||||
if (p.left.equals(tph) || p.right.equals(tph)) {
|
||||
resolved.add(new GenericInsertPair(p.left, p.right));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHEqualTPH p) {
|
||||
//ignorieren. Wird vom Resolver behandelt
|
||||
@Override
|
||||
public void visit(PairTPHequalRefTypeOrWildcardType p) {
|
||||
TypePlaceholder otherSide = null;
|
||||
if (p.right.equals(tph)) {
|
||||
otherSide = p.left;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(RefType refType) {
|
||||
|
||||
if (otherSide != null) {
|
||||
Set<ResultPair> newResultSet = new HashSet<>(this.resultSet.results);
|
||||
newResultSet.remove(p);
|
||||
resolved.addAll(new TPHResolver(otherSide, new ResultSet(newResultSet)).resolved);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(GenericRefType genericRefType) {
|
||||
@Override
|
||||
public void visit(PairTPHEqualTPH p) {
|
||||
//ignorieren. Wird vom Resolver behandelt
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public void visit(RefType refType) {
|
||||
|
||||
@Override
|
||||
public void visit(SuperWildcardType superWildcardType) {
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public void visit(GenericRefType genericRefType) {
|
||||
|
||||
@Override
|
||||
public void visit(TypePlaceholder typePlaceholder) {
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public void visit(SuperWildcardType superWildcardType) {
|
||||
|
||||
@Override
|
||||
public void visit(ExtendsWildcardType extendsWildcardType) {
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public void visit(TypePlaceholder typePlaceholder) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(ExtendsWildcardType extendsWildcardType) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
class RelatedTypeWalker implements ResultSetVisitor {
|
||||
|
||||
final Set<GenericInsertPair> relatedTPHs = new HashSet<>();
|
||||
private final TypePlaceholder toResolve;
|
||||
private final ResultSet resultSet;
|
||||
final Set<GenericInsertPair> relatedTPHs = new HashSet<>();
|
||||
private final TypePlaceholder toResolve;
|
||||
private final ResultSet resultSet;
|
||||
|
||||
/**
|
||||
* Läuft über das resultSet und speichert alle TPHs, welche mit start in Verbindung stehen
|
||||
* @param start - kann null sein, wenn der Walker für einen RefType benutzt wird
|
||||
* @param resultSet
|
||||
*/
|
||||
RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet){
|
||||
this.toResolve = start;
|
||||
this.resultSet = resultSet;
|
||||
int resolved = 0;
|
||||
do{
|
||||
resolved = relatedTPHs.size();
|
||||
for(ResultPair p : resultSet.results){
|
||||
p.accept(this);
|
||||
p.accept(this);
|
||||
}
|
||||
}while(resolved - relatedTPHs.size() > 0);
|
||||
}
|
||||
/**
|
||||
* Läuft über das resultSet und speichert alle TPHs, welche mit start in Verbindung stehen
|
||||
*
|
||||
* @param start - kann null sein, wenn der Walker für einen RefType benutzt wird
|
||||
* @param resultSet
|
||||
*/
|
||||
RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet) {
|
||||
this.toResolve = start;
|
||||
this.resultSet = resultSet;
|
||||
int resolved = 0;
|
||||
do {
|
||||
resolved = relatedTPHs.size();
|
||||
for (ResultPair p : resultSet.results) {
|
||||
p.accept(this);
|
||||
p.accept(this);
|
||||
}
|
||||
} while (resolved - relatedTPHs.size() > 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHsmallerTPH p) {
|
||||
if(p.getRight().equals(toResolve)){
|
||||
relatedTPHs.addAll(new TPHResolver(p.right, resultSet).resolved);
|
||||
//relatedTPHs.addAll(new RelatedTypeWalker(p.right, resultSet).relatedTPHs);
|
||||
}
|
||||
if(p.getLeft().equals(toResolve)){
|
||||
relatedTPHs.addAll(new TPHResolver(p.left, resultSet).resolved);
|
||||
//relatedTPHs.addAll(new RelatedTypeWalker(p.left, resultSet).relatedTPHs);
|
||||
}
|
||||
@Override
|
||||
public void visit(PairTPHsmallerTPH p) {
|
||||
if (p.getRight().equals(toResolve)) {
|
||||
relatedTPHs.addAll(new TPHResolver(p.right, resultSet).resolved);
|
||||
//relatedTPHs.addAll(new RelatedTypeWalker(p.right, resultSet).relatedTPHs);
|
||||
}
|
||||
if (p.getLeft().equals(toResolve)) {
|
||||
relatedTPHs.addAll(new TPHResolver(p.left, resultSet).resolved);
|
||||
//relatedTPHs.addAll(new RelatedTypeWalker(p.left, resultSet).relatedTPHs);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHequalRefTypeOrWildcardType p) {
|
||||
if(p.getLeft().equals(toResolve)){
|
||||
p.getRight().accept(this);
|
||||
}
|
||||
@Override
|
||||
public void visit(PairTPHequalRefTypeOrWildcardType p) {
|
||||
if (p.getLeft().equals(toResolve)) {
|
||||
p.getRight().accept(this);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(PairTPHEqualTPH p) {
|
||||
//Kann ignoriert werden. Diese Fälle werden vom Resolver behandelt
|
||||
}
|
||||
@Override
|
||||
public void visit(PairTPHEqualTPH p) {
|
||||
//Kann ignoriert werden. Diese Fälle werden vom Resolver behandelt
|
||||
}
|
||||
|
||||
/*
|
||||
Die folgenden Funktionen fügen alle TPHs an die relatedTPHs an, denen sie begegnen:
|
||||
Das wird verwendet, wenn alle relatedTPHs aus den Parametern eines RefTypes angefügt werden sollen
|
||||
*/
|
||||
|
||||
@Override
|
||||
public void visit(RefType refType) {
|
||||
for(RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()){
|
||||
param.accept(this);
|
||||
}
|
||||
@Override
|
||||
public void visit(RefType refType) {
|
||||
for (RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()) {
|
||||
param.accept(this);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(SuperWildcardType superWildcardType) {
|
||||
superWildcardType.getInnerType().accept(this);
|
||||
}
|
||||
@Override
|
||||
public void visit(SuperWildcardType superWildcardType) {
|
||||
superWildcardType.getInnerType().accept(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(TypePlaceholder typePlaceholder) {
|
||||
relatedTPHs.addAll(new TPHResolver(typePlaceholder, resultSet).resolved);
|
||||
}
|
||||
@Override
|
||||
public void visit(TypePlaceholder typePlaceholder) {
|
||||
relatedTPHs.addAll(new TPHResolver(typePlaceholder, resultSet).resolved);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(ExtendsWildcardType extendsWildcardType) {
|
||||
extendsWildcardType.getInnerType().accept(this);
|
||||
}
|
||||
@Override
|
||||
public void visit(ExtendsWildcardType extendsWildcardType) {
|
||||
extendsWildcardType.getInnerType().accept(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(GenericRefType genericRefType) {
|
||||
}
|
||||
@Override
|
||||
public void visit(GenericRefType genericRefType) {
|
||||
}
|
||||
}
|
@@ -1,6 +1,7 @@
|
||||
//PL 2018-12-19: Merge chekcen
|
||||
package de.dhbwstuttgart.typeinference.typeAlgo;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@@ -0,0 +1,62 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.concurrent.RecursiveTask;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/**
|
||||
* An intermediate class for the recursive steps of the TypeUnifyTask:
|
||||
* This allows to cancel parts of the recursion tree, instead of only the whole execution as before. But in
|
||||
* order for that to work, all cancellable child tasks must be added when they are created
|
||||
*
|
||||
* @param <T>
|
||||
*/
|
||||
public abstract class CancellableTask<T> extends RecursiveTask<T> {
|
||||
|
||||
private final AtomicBoolean executionCancelled = new AtomicBoolean(false);
|
||||
private final List<CancellableTask<?>> childTasks = new ArrayList<>();
|
||||
private CancellableTask<?> parentTask = null;
|
||||
|
||||
/**
|
||||
* Set the execution for this task and all its (recursive) children to be cancelled
|
||||
*/
|
||||
protected void cancelExecution() {
|
||||
// is this branch already cancelled? Then do nothing
|
||||
if (this.executionCancelled.get()) return;
|
||||
executionCancelled.set(true);
|
||||
this.cancelChildExecution();
|
||||
}
|
||||
|
||||
public void cancelChildExecution() {
|
||||
for (var childTask : childTasks) {
|
||||
// no need to cancel a branch that is already finished
|
||||
if (!childTask.isDone()) {
|
||||
childTask.cancelExecution();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void cancelSiblingTasks() {
|
||||
if (this.parentTask != null) {
|
||||
boolean thisWasCancelledBefore = this.executionCancelled.get();
|
||||
this.parentTask.cancelChildExecution();
|
||||
this.executionCancelled.set(thisWasCancelledBefore);
|
||||
}
|
||||
}
|
||||
|
||||
public Boolean isExecutionCancelled() {
|
||||
return executionCancelled.get();
|
||||
}
|
||||
|
||||
public void addChildTask(CancellableTask<?> childTask) {
|
||||
this.childTasks.add(childTask);
|
||||
childTask.setParentTask(this);
|
||||
}
|
||||
|
||||
private void setParentTask(CancellableTask<?> parentTask) {
|
||||
this.parentTask = parentTask;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,64 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.RecursiveTask;
|
||||
|
||||
public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
|
||||
|
||||
public static <E> Set<E> merge(List<Set<E>> list) {
|
||||
if (list.isEmpty()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
var task = new ConcurrentSetMergeTask<>(list, 0, list.size());
|
||||
return task.compute();
|
||||
}
|
||||
|
||||
private static final int LIST_THRESHOLD = 3;
|
||||
private static final int ELEMENT_THRESHOLD = 1000;
|
||||
|
||||
private final List<Set<T>> list;
|
||||
private final int start;
|
||||
private final int end;
|
||||
|
||||
private ConcurrentSetMergeTask(List<Set<T>> list, int start, int end) {
|
||||
this.list = list;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<T> compute() {
|
||||
int size = end - start;
|
||||
|
||||
int totalElements = 0;
|
||||
for (int i = start+1; i < end; i++) {
|
||||
totalElements += list.get(i).size();
|
||||
}
|
||||
|
||||
System.out.println("ConcurrentSetMerge? -> " + (size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD ? "true" : "false"));
|
||||
|
||||
|
||||
// size will always be at least one
|
||||
if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) {
|
||||
Set<T> result = this.list.get(start);
|
||||
for (int i = start+1; i < end; i++) {
|
||||
result.addAll(list.get(i));
|
||||
}
|
||||
return result;
|
||||
} else {
|
||||
int mid = start + (size / 2);
|
||||
ConcurrentSetMergeTask<T> leftTask = new ConcurrentSetMergeTask<>(list, start, mid);
|
||||
ConcurrentSetMergeTask<T> rightTask = new ConcurrentSetMergeTask<>(list, mid, end);
|
||||
|
||||
leftTask.fork();
|
||||
Set<T> rightResult = rightTask.compute();
|
||||
Set<T> leftResult = leftTask.join();
|
||||
|
||||
// Merge results
|
||||
leftResult.addAll(rightResult);
|
||||
return leftResult;
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,84 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* Calculate unique placeholder names
|
||||
*/
|
||||
public class PlaceholderRegistry implements ISerializableData {
|
||||
|
||||
private final Set<String> existingPlaceholders = ConcurrentHashMap.newKeySet();
|
||||
private final AtomicInteger placeholderCount = new AtomicInteger();
|
||||
public ArrayList<PlaceholderType> UnifyTypeFactory_PLACEHOLDERS = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Add a placeholder into the list of existing ones, as soon as a new PlaceholderType is created
|
||||
*
|
||||
* @param placeholderName The placeholder to add
|
||||
*/
|
||||
public void addPlaceholder(String placeholderName) {
|
||||
this.existingPlaceholders.add(placeholderName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a random placeholder name, that is unique to this context
|
||||
*
|
||||
* @return The generated name
|
||||
*/
|
||||
public String generateFreshPlaceholderName() {
|
||||
String name;
|
||||
do {
|
||||
int pc = placeholderCount.incrementAndGet();
|
||||
name = getUppercaseTokenFromInt(pc);
|
||||
}
|
||||
while (existingPlaceholders.contains(name));
|
||||
this.addPlaceholder(name);
|
||||
return name;
|
||||
}
|
||||
|
||||
public PlaceholderRegistry deepClone() {
|
||||
PlaceholderRegistry pr2 = new PlaceholderRegistry();
|
||||
this.existingPlaceholders.forEach(pr2::addPlaceholder);
|
||||
pr2.UnifyTypeFactory_PLACEHOLDERS.addAll(this.UnifyTypeFactory_PLACEHOLDERS);
|
||||
pr2.placeholderCount.set(this.placeholderCount.get());
|
||||
return pr2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a token that consists of uppercase letters and contains the given prefix and suffix from the value i
|
||||
*
|
||||
* @param i The value that will be represented as a token
|
||||
* @return The generated token
|
||||
*/
|
||||
private String getUppercaseTokenFromInt(int i) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
while (i >= 0) {
|
||||
sb.append((char)(i % 26 + 65));
|
||||
i = i / 26 - 1;
|
||||
}
|
||||
//sb.append(suffix);
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.existingPlaceholders.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("ph", new SerialValue<>(new ArrayList<>(this.existingPlaceholders)));
|
||||
serialized.put("factoryPh", SerialList.fromMapped(this.UnifyTypeFactory_PLACEHOLDERS, t -> t.toSerial(keyStorage)));
|
||||
return serialized;
|
||||
}
|
||||
}
|
@@ -38,14 +38,17 @@ import org.apache.commons.io.output.NullOutputStream;
|
||||
public class RuleSet implements IRuleSet{
|
||||
|
||||
Writer logFile;
|
||||
final PlaceholderRegistry placeholderRegistry;
|
||||
|
||||
public RuleSet() {
|
||||
public RuleSet(PlaceholderRegistry placeholderRegistry) {
|
||||
super();
|
||||
logFile = new OutputStreamWriter(new NullOutputStream());
|
||||
logFile = OutputStreamWriter.nullWriter();
|
||||
this.placeholderRegistry = placeholderRegistry;
|
||||
}
|
||||
|
||||
RuleSet(Writer logFile) {
|
||||
RuleSet(Writer logFile, PlaceholderRegistry placeholderRegistry) {
|
||||
this.logFile = logFile;
|
||||
this.placeholderRegistry = placeholderRegistry;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -864,7 +867,7 @@ public class RuleSet implements IRuleSet{
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNred: " + result + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("logFile-Error");
|
||||
@@ -939,10 +942,10 @@ public class RuleSet implements IRuleSet{
|
||||
|
||||
UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()];
|
||||
for(int i = 0; i < freshPlaceholders.length-1; i++) {
|
||||
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
|
||||
freshPlaceholders[i] = PlaceholderType.freshPlaceholder(placeholderRegistry);
|
||||
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
|
||||
}
|
||||
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
|
||||
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(placeholderRegistry);
|
||||
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
|
||||
result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), freshPlaceholders[funNLhsType.getTypeParams().size()-1], PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
|
||||
|
||||
@@ -960,7 +963,7 @@ public class RuleSet implements IRuleSet{
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNgreater: " + result + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("lofFile-Error");
|
||||
@@ -988,10 +991,10 @@ public class RuleSet implements IRuleSet{
|
||||
|
||||
UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()];
|
||||
for(int i = 0; i < freshPlaceholders.length-1; i++) {
|
||||
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
|
||||
freshPlaceholders[i] = PlaceholderType.freshPlaceholder(placeholderRegistry);
|
||||
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
|
||||
}
|
||||
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
|
||||
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(placeholderRegistry);
|
||||
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
|
||||
|
||||
result.add(new UnifyPair(freshPlaceholders[funNRhsType.getTypeParams().size()-1], funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
|
||||
@@ -1010,7 +1013,7 @@ public class RuleSet implements IRuleSet{
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNsmaller: " + result + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("lofFile-Error");
|
||||
@@ -1051,7 +1054,7 @@ public class RuleSet implements IRuleSet{
|
||||
if(isGen)
|
||||
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
|
||||
else {
|
||||
UnifyType freshTph = PlaceholderType.freshPlaceholder();
|
||||
UnifyType freshTph = PlaceholderType.freshPlaceholder(placeholderRegistry);
|
||||
result.add(new UnifyPair(rhsType, new ExtendsType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
|
||||
result.add(new UnifyPair(extendedType, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
|
||||
}
|
||||
@@ -1079,7 +1082,7 @@ public class RuleSet implements IRuleSet{
|
||||
if(isGen)
|
||||
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
|
||||
else {
|
||||
UnifyType freshTph = PlaceholderType.freshPlaceholder();
|
||||
UnifyType freshTph = PlaceholderType.freshPlaceholder(placeholderRegistry);
|
||||
result.add(new UnifyPair(rhsType, new SuperType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
|
||||
Set<UnifyType> fBounded = pair.getfBounded();
|
||||
fBounded.add(lhsType);
|
||||
|
@@ -1,41 +1,41 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.io.FileWriter;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class TypeUnify {
|
||||
|
||||
|
||||
private TypeUnify() {}
|
||||
|
||||
private static <T> T joinFuture(CompletableFuture<T> future) {
|
||||
try {
|
||||
return future.get();
|
||||
}
|
||||
catch (InterruptedException | ExecutionException exception) {
|
||||
throw new RuntimeException(exception);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* unify parallel ohne result modell
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
public static Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||
ForkJoinPool pool = TypeUnify.createThreadPool();
|
||||
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
|
||||
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
|
||||
logFile.flush();
|
||||
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
@@ -45,46 +45,32 @@ public class TypeUnify {
|
||||
|
||||
/**
|
||||
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
return ret;
|
||||
public static UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||
ForkJoinPool pool = TypeUnify.createThreadPool();
|
||||
UnifyContext context = unifyContext.newWithExecutor(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
|
||||
unifyTask.compute();
|
||||
return unifyContext.resultModel();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
public static Set<Set<UnifyPair>> unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||
ForkJoinPool pool = TypeUnify.createThreadPool();
|
||||
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
|
||||
var result = joinFuture(unifyTask.compute());
|
||||
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
logFile.flush();
|
||||
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
return ret;
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -97,20 +83,13 @@ public class TypeUnify {
|
||||
|
||||
/**
|
||||
* unify sequentiell mit oderconstraints
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
public static Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(false), 0);
|
||||
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
logFile.flush();
|
||||
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
@@ -118,4 +97,14 @@ public class TypeUnify {
|
||||
return res;
|
||||
}
|
||||
|
||||
private static ForkJoinPool createThreadPool() {
|
||||
Logger.print("Available processors: " + Runtime.getRuntime().availableProcessors());
|
||||
return new ForkJoinPool(
|
||||
Runtime.getRuntime().availableProcessors(),
|
||||
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
|
||||
null,
|
||||
false
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -13,54 +13,52 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
public class TypeUnify2Task extends TypeUnifyTask {
|
||||
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe;
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
||||
}
|
||||
|
||||
Set<UnifyPair> getNextSetElement() {
|
||||
return nextSetElement;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
if (one) {
|
||||
System.out.println("two");
|
||||
}
|
||||
one = true;
|
||||
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe;
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, UnifyContext context, int rekTiefe, Set<UnifyPair> methodSignatureConstraintUebergabe) {
|
||||
super(eq, oderConstraints, fc, context, rekTiefe);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
||||
}
|
||||
|
||||
public Set<UnifyPair> getNextSetElement() {
|
||||
return nextSetElement;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Set<Set<UnifyPair>>> compute() {
|
||||
if (one) {
|
||||
System.out.println("two");
|
||||
}
|
||||
one = true;
|
||||
CompletableFuture<Set<Set<UnifyPair>>> res =
|
||||
unify2(setToFlatten, eq, oderConstraintsField, fc, context.parallel(), rekTiefeField, methodSignatureConstraintUebergabe);
|
||||
/*if (isUndefinedPairSetSet(res)) {
|
||||
return new HashSet<>(); }
|
||||
else
|
||||
*/
|
||||
//writeLog("xxx");
|
||||
//noOfThread--;
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
else {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void closeLogFile() {
|
||||
*/
|
||||
//writeLog("xxx");
|
||||
//noOfThread--;
|
||||
if (this.myIsCancelled()) {
|
||||
return CompletableFuture.completedFuture(new HashSet<>());
|
||||
} else {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
logFile.close();
|
||||
}
|
||||
catch (IOException ioE) {
|
||||
System.err.println("no log-File" + thNo);
|
||||
}
|
||||
|
||||
}
|
||||
public void closeLogFile() {
|
||||
|
||||
try {
|
||||
context.logFile().close();
|
||||
} catch (IOException ioE) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,188 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A collection of capsuled (and thus static) functions to split up large algorithms in TypeUnifyTask
|
||||
*/
|
||||
public class TypeUnifyTaskHelper {
|
||||
|
||||
/**
|
||||
* Filter all topLevelSets for those with a single element that contain only one pair:
|
||||
* a <. theta,
|
||||
* theta <. a or
|
||||
* a =. theta
|
||||
*/
|
||||
public static Set<Set<UnifyPair>> getSingleElementSets(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets) {
|
||||
return topLevelSets.stream()
|
||||
.filter(x -> x.size() == 1)
|
||||
.map(y -> y.stream().findFirst().get()).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Varianzbestimmung Anfang
|
||||
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
|
||||
* Varianz = 1 => Argumentvariable
|
||||
* Varianz = -1 => Rückgabevariable
|
||||
* Varianz = 0 => unklar
|
||||
* Varianz = 2 => Operatoren oderConstraints
|
||||
*/
|
||||
public static int calculateVariance(List<Set<UnifyPair>> nextSetasList) {
|
||||
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
|
||||
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
|
||||
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
|
||||
.reduce((a, b) -> {
|
||||
if (a.intValue() == b.intValue()) return a;
|
||||
else return 0;
|
||||
})) //2 kommt insbesondere bei Oder-Constraints vor
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.findAny();
|
||||
|
||||
return xi.orElse(0);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public static int calculateOderConstraintVariance(List<Set<UnifyPair>> nextSetAsList) {
|
||||
Optional<Integer> optVariance =
|
||||
nextSetAsList
|
||||
.getFirst()
|
||||
.stream()
|
||||
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
|
||||
!(x.getRhsType() instanceof PlaceholderType) &&
|
||||
x.getPairOp() == PairOperator.EQUALSDOT)
|
||||
.map(x ->
|
||||
((PlaceholderType) x.getGroundBasePair().getLhsType()).getVariance())
|
||||
.reduce((n, m) -> (n != 0) ? n : m);
|
||||
|
||||
//Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
|
||||
//da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
|
||||
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
|
||||
return optVariance.orElse(2);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Find the first occurrence (if any) of a UnifyPair with operator EQUALSDOT while having
|
||||
* one side equal to its base pair counterpart
|
||||
*/
|
||||
public static Optional<UnifyPair> findEqualityConstrainedUnifyPair(Set<UnifyPair> nextSetElement) {
|
||||
return nextSetElement.stream().filter(x ->
|
||||
x.getPairOp()
|
||||
.equals(PairOperator.EQUALSDOT))
|
||||
.filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
|
||||
x.getLhsType()
|
||||
.equals(x.getBasePair().getLhsType()) ||
|
||||
x.getLhsType()
|
||||
.equals(x.getBasePair().getRhsType())
|
||||
).findFirst();
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all unifyPairs, that associate the identified type variable of origPair with any concrete type. That means:
|
||||
* If "a = type" is in origPair, then we get all UnifyPairs that contain either "a < typeA" or "typeB < a"
|
||||
*/
|
||||
public static Set<UnifyPair> findConstraintsWithSameTVAssociation(UnifyPair origPair, Set<Set<UnifyPair>> singleElementSets) {
|
||||
UnifyType tyVar = origPair.getLhsType();
|
||||
if (!(tyVar instanceof PlaceholderType)) {
|
||||
tyVar = origPair.getRhsType();
|
||||
}
|
||||
|
||||
UnifyType tyVarEF = tyVar;
|
||||
return singleElementSets.stream()
|
||||
.map(xx ->
|
||||
xx.iterator().next())
|
||||
.filter(x ->
|
||||
(x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
|
||||
||
|
||||
(x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType))
|
||||
)
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public static boolean doesFirstNextSetHasSameBase(List<Set<UnifyPair>> nextSetAsList) {
|
||||
if (nextSetAsList.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
UnifyPair firstBasePair = null;
|
||||
|
||||
for (var unifyPair : nextSetAsList.getFirst().stream().toList()) {
|
||||
var basePair = unifyPair.getBasePair();
|
||||
|
||||
// if any base pair is null, there is NOT always the same base!
|
||||
if (basePair == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (firstBasePair == null) {
|
||||
firstBasePair = basePair;
|
||||
}
|
||||
else if (!basePair.equals(firstBasePair)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts data from every element in the nested lists of results. What data depends on the given
|
||||
* extractor function
|
||||
*/
|
||||
public static Set<UnifyPair> collectFromThreadResult (
|
||||
Set<Set<UnifyPair>> currentThreadResult,
|
||||
Function<UnifyPair, Set<UnifyPair>> extractor
|
||||
) {
|
||||
return currentThreadResult.stream()
|
||||
.map(b ->
|
||||
b.stream()
|
||||
.map(extractor)
|
||||
.reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
})
|
||||
.orElse(new HashSet<>()))
|
||||
.reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
})
|
||||
.orElse(new HashSet<>());
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a list of PlaceholderTypes from a set of pairs, such that each resulting element:
|
||||
* - Is the LHS of a pair
|
||||
* - Is a PlaceholderType
|
||||
* - has a basePair Side that is a PlaceholderType with the same name
|
||||
*/
|
||||
public static List<PlaceholderType> extractMatchingPlaceholderTypes(Set<UnifyPair> pairs) {
|
||||
return pairs.stream()
|
||||
.filter(x -> {
|
||||
UnifyType lhs = x.getLhsType();
|
||||
UnifyType baseLhs = x.getBasePair().getLhsType();
|
||||
UnifyType baseRhs = x.getBasePair().getRhsType();
|
||||
return (lhs instanceof PlaceholderType) &&
|
||||
((baseLhs instanceof PlaceholderType && lhs.getName().equals(baseLhs.getName())) ||
|
||||
(baseRhs instanceof PlaceholderType && lhs.getName().equals(baseRhs.getName())));
|
||||
})
|
||||
.map(x -> (PlaceholderType) x.getLhsType())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,70 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.io.Writer;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public record UnifyContext(
|
||||
// main log file of a unification
|
||||
Writer logFile,
|
||||
// if logs should be made
|
||||
Boolean log,
|
||||
// if the unify algorithm should run in parallel
|
||||
Boolean parallel,
|
||||
// the model for storing calculated results
|
||||
UnifyResultModel resultModel,
|
||||
// the executor used for thread management in parallel execution
|
||||
ExecutorService executor,
|
||||
// a generator for new placeholders in this unify context
|
||||
PlaceholderRegistry placeholderRegistry,
|
||||
// a control structure to cancel the unification early
|
||||
UnifyTaskModel usedTasks
|
||||
) {
|
||||
|
||||
public UnifyContext(
|
||||
Writer logFile,
|
||||
Boolean log,
|
||||
Boolean parallel,
|
||||
UnifyResultModel resultModel,
|
||||
UnifyTaskModel usedTasks,
|
||||
ExecutorService executor,
|
||||
PlaceholderRegistry placeholderRegistry
|
||||
) {
|
||||
this(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
}
|
||||
|
||||
public UnifyContext(
|
||||
Writer logFile,
|
||||
Boolean log,
|
||||
Boolean parallel,
|
||||
UnifyResultModel resultModel,
|
||||
UnifyTaskModel usedTasks,
|
||||
PlaceholderRegistry placeholderRegistry
|
||||
) {
|
||||
this(logFile, log, parallel, resultModel, usedTasks, ForkJoinPool.commonPool(), placeholderRegistry);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Shortcuts for creating a similar context with some properties changed. This combined with the final properties
|
||||
* causes the UnifyContext to be essentially handled as a
|
||||
*/
|
||||
|
||||
public UnifyContext newWithLogFile(Writer logFile) {
|
||||
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
}
|
||||
|
||||
public UnifyContext newWithParallel(boolean parallel) {
|
||||
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
}
|
||||
|
||||
public UnifyContext newWithExecutor(ExecutorService executor) {
|
||||
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
}
|
||||
|
||||
public UnifyContext newWithResultModel(UnifyResultModel resultModel) {
|
||||
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
}
|
||||
|
||||
}
|
@@ -36,19 +36,19 @@ public class UnifyResultModel {
|
||||
listeners.remove(listenerToRemove);
|
||||
}
|
||||
|
||||
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
|
||||
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet, UnifyContext context) {
|
||||
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet(context.placeholderRegistry()).subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y; //alle Paare a <.? b erden durch a =. b ersetzt
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
|
||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
|
||||
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), fc);
|
||||
}
|
||||
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
|
||||
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons))))
|
||||
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons), context.placeholderRegistry())))
|
||||
.collect(Collectors.toList());
|
||||
UnifyResultEvent evt = new UnifyResultEvent(newResult);
|
||||
|
||||
|
@@ -0,0 +1,228 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.util.Tuple;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class Variance0Case extends VarianceCase {
|
||||
|
||||
protected final int variance = 0;
|
||||
|
||||
protected Variance0Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||
super(isOderConstraint, typeUnifyTask, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void selectNextData(
|
||||
TypeUnifyTask typeUnifyTask,
|
||||
List<Set<UnifyPair>> nextSetAsList,
|
||||
Optional<UnifyPair> optOrigPair
|
||||
|
||||
) {
|
||||
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
|
||||
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
|
||||
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
|
||||
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
|
||||
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||
} else {
|
||||
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
|
||||
}
|
||||
nextSetAsList.remove(a);
|
||||
} else if (this.isOderConstraint) {
|
||||
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||
nextSetAsList.remove(a);
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
} else {
|
||||
a = nextSetAsList.removeFirst();
|
||||
}
|
||||
|
||||
|
||||
Set<UnifyPair> finalA = a;
|
||||
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
|
||||
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
|
||||
nextSetasListRest = typeUnifyTask.oup.maxElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
|
||||
);
|
||||
} else {
|
||||
nextSetasListRest = typeUnifyTask.oup.minElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
|
||||
);
|
||||
}
|
||||
nextSetAsList.remove(a);
|
||||
} else if (this.isOderConstraint) {
|
||||
nextSetasListRest = typeUnifyTask.oup.maxElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
|
||||
);
|
||||
} else {
|
||||
for (int i = 0; i < Math.min(nextSetAsList.size(), 5); i++) {
|
||||
nextSetasListRest.add(nextSetAsList.removeFirst());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
|
||||
Set<Set<UnifyPair>> elems,
|
||||
Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
IFiniteClosure fc,
|
||||
int rekTiefe,
|
||||
Set<UnifyPair> methodSignatureConstraint,
|
||||
List<Set<UnifyPair>> nextSetAsList,
|
||||
Set<UnifyPair> sameEqSet,
|
||||
Set<Set<UnifyPair>> result,
|
||||
Set<Set<UnifyPair>> aParDef
|
||||
) {
|
||||
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
|
||||
new HashSet<>(), new HashSet<>()
|
||||
));
|
||||
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
(prevResults, currentThreadResult) -> {
|
||||
forkOrig.writeLog("final Orig 0");
|
||||
forkOrig.closeLogFile();
|
||||
return new Tuple<>(currentThreadResult, prevResults.getSecond());
|
||||
});
|
||||
|
||||
//forks.add(forkOrig);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("0 RM" + nSaL.toString());
|
||||
|
||||
if (!this.isOderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
TypeUnifyTask.noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkFuture,
|
||||
(prevResults, fork_res) -> {
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
prevResults.getSecond().add(fork_res);
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final 0");
|
||||
fork.closeLogFile();
|
||||
return prevResults;
|
||||
}
|
||||
);
|
||||
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
}
|
||||
|
||||
return resultValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyComputedResults(
|
||||
Set<Set<UnifyPair>> result,
|
||||
Set<Set<UnifyPair>> currentThreadResult,
|
||||
Set<UnifyPair> compResult,
|
||||
Set<UnifyPair> compRes
|
||||
) {
|
||||
writeLog("RES var=0 ADD:" + result.toString() + " " + currentThreadResult.toString());
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean eraseInvalidSets(
|
||||
int rekTiefe,
|
||||
Set<Set<UnifyPair>> aParDef,
|
||||
List<Set<UnifyPair>> nextSetAsList
|
||||
) {
|
||||
if (!this.isOderConstraint) {
|
||||
return true;
|
||||
} else {
|
||||
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
final List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
|
||||
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
final List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
writeLog("Removed: " + erased);
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
for (Set<UnifyPair> aPar : aParDef) {
|
||||
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
smallerSetasList.clear();
|
||||
smallerSetasList.addAll(typeUnifyTask.oup.smallerThan(aPar, nextSetAsList));
|
||||
notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
notErased.clear();
|
||||
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
|
||||
erased = new ArrayList<>(smallerSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
writeLog("Removed: " + erased);
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,214 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.util.Tuple;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class Variance1Case extends VarianceCase {
|
||||
|
||||
protected final int variance = 1;
|
||||
|
||||
protected Variance1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||
super(isOderConstraint, typeUnifyTask, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void selectNextData(
|
||||
TypeUnifyTask typeUnifyTask,
|
||||
List<Set<UnifyPair>> nextSetAsList,
|
||||
Optional<UnifyPair> optOrigPair
|
||||
) {
|
||||
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||
writeLog("Max: a in " + variance + " " + a);
|
||||
nextSetAsList.remove(a);
|
||||
if (this.isOderConstraint) {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
}
|
||||
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
|
||||
|
||||
//Alle maximale Elemente in nextSetasListRest bestimmen
|
||||
//nur für diese wird parallele Berechnung angestossen.
|
||||
Set<UnifyPair> finalA = a;
|
||||
nextSetasListRest = typeUnifyTask.oup.maxElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
|
||||
Set<Set<UnifyPair>> elems,
|
||||
Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
IFiniteClosure fc,
|
||||
int rekTiefe,
|
||||
Set<UnifyPair> methodSignatureConstraint,
|
||||
List<Set<UnifyPair>> nextSetAsList,
|
||||
Set<UnifyPair> sameEqSet,
|
||||
Set<Set<UnifyPair>> result,
|
||||
Set<Set<UnifyPair>> aParDef
|
||||
) {
|
||||
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
|
||||
new HashSet<>(), new HashSet<>()
|
||||
));
|
||||
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
(prevResults, currentThreadResult) -> {
|
||||
forkOrig.writeLog("final Orig 1");
|
||||
forkOrig.closeLogFile();
|
||||
return new Tuple<>(currentThreadResult, prevResults.getSecond());
|
||||
});
|
||||
|
||||
//forks.add(forkOrig);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("1 RM" + nSaL.toString());
|
||||
|
||||
if (!this.isOderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
TypeUnifyTask.noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkFuture,
|
||||
(prevResults, fork_res) -> {
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
prevResults.getSecond().add(fork_res);
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final 1");
|
||||
fork.closeLogFile();
|
||||
return prevResults;
|
||||
}
|
||||
);
|
||||
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
}
|
||||
|
||||
return resultValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyComputedResults(
|
||||
Set<Set<UnifyPair>> result,
|
||||
Set<Set<UnifyPair>> currentThreadResult,
|
||||
Set<UnifyPair> compResult,
|
||||
Set<UnifyPair> compRes
|
||||
) {
|
||||
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
|
||||
if (resOfCompare == -1) {
|
||||
writeLog("Geloescht result: " + result);
|
||||
result.clear();
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
else if (resOfCompare == 0) {
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
else if (resOfCompare == 1) {
|
||||
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
//result = result;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean eraseInvalidSets(
|
||||
int rekTiefe,
|
||||
Set<Set<UnifyPair>> aParDef,
|
||||
List<Set<UnifyPair>> nextSetAsList
|
||||
) {
|
||||
// System.out.println("");
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
writeLog("aParDef: " + aParDef.toString());
|
||||
aParDef.add(a);
|
||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||
if (this.isOderConstraint) {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
while (aParDefIt.hasNext()) {
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList);
|
||||
writeLog("smallerSetasList: " + smallerSetasList);
|
||||
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
writeLog("notInherited: " + notInherited + "\n");
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
notInherited.forEach(x -> {
|
||||
notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList));
|
||||
});
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||
writeLog("notErased: " + notErased + "\n");
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
} else {
|
||||
while (aParDefIt.hasNext()) {
|
||||
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
@@ -0,0 +1,137 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.util.Tuple;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
public class Variance2Case extends VarianceCase {
|
||||
|
||||
protected final int variance = 2;
|
||||
|
||||
protected Variance2Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||
super(isOderConstraint, typeUnifyTask, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void selectNextData(
|
||||
TypeUnifyTask typeUnifyTask,
|
||||
List<Set<UnifyPair>> nextSetAsList,
|
||||
Optional<UnifyPair> optOrigPair
|
||||
|
||||
) {
|
||||
a = nextSetAsList.removeFirst();
|
||||
//Fuer alle Elemente wird parallele Berechnung angestossen.
|
||||
nextSetasListRest = new ArrayList<>(nextSetAsList);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
|
||||
Set<Set<UnifyPair>> elems,
|
||||
Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
IFiniteClosure fc,
|
||||
int rekTiefe,
|
||||
Set<UnifyPair> methodSignatureConstraint,
|
||||
List<Set<UnifyPair>> nextSetAsList,
|
||||
Set<UnifyPair> sameEqSet,
|
||||
Set<Set<UnifyPair>> result,
|
||||
Set<Set<UnifyPair>> aParDef
|
||||
) {
|
||||
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValuesFuture;
|
||||
|
||||
writeLog("var2einstieg");
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValuesFuture = forkOrigFuture.thenApply((currentThreadResult) -> {
|
||||
forkOrig.writeLog("final Orig 2");
|
||||
forkOrig.closeLogFile();
|
||||
return new Tuple<>(currentThreadResult, new HashSet<>());
|
||||
});
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
|
||||
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
|
||||
Set<UnifyPair> methodSignatureConstraintForParallel = new HashSet<>(methodSignatureConstraint);
|
||||
Set<UnifyPair> nSaL = a;
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
methodSignatureConstraintForParallel.removeAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
|
||||
nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL); //PL einkommentiert 20-02-03
|
||||
methodSignatureConstraintForParallel.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValuesFuture = resultValuesFuture.thenCombine(forkFuture, (resultValues, fork_res) -> {
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
resultValues.getSecond().add(fork_res);
|
||||
fork.writeLog("final 2");
|
||||
fork.closeLogFile();
|
||||
return resultValues;
|
||||
});
|
||||
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
}
|
||||
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
|
||||
return resultValuesFuture;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyComputedResults(
|
||||
Set<Set<UnifyPair>> result,
|
||||
Set<Set<UnifyPair>> currentThreadResult,
|
||||
Set<UnifyPair> compResult,
|
||||
Set<UnifyPair> compRes
|
||||
) {
|
||||
// Nothing
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean eraseInvalidSets(
|
||||
int rekTiefe,
|
||||
Set<Set<UnifyPair>> aParDef,
|
||||
List<Set<UnifyPair>> nextSetAsList
|
||||
) {
|
||||
// Nothing
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,112 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import de.dhbwstuttgart.util.Tuple;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
public abstract class VarianceCase {
|
||||
|
||||
public static VarianceCase createFromVariance(int variance, boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||
return switch (variance) {
|
||||
case 0 -> new Variance0Case(isOderConstraint, typeUnifyTask, context);
|
||||
case 1 -> new Variance1Case(isOderConstraint, typeUnifyTask, context);
|
||||
case -1 -> new VarianceM1Case(isOderConstraint, typeUnifyTask, context);
|
||||
case 2 -> new Variance2Case(isOderConstraint, typeUnifyTask, context);
|
||||
default -> throw new RuntimeException("Invalid variance: " + variance);
|
||||
};
|
||||
}
|
||||
|
||||
protected final boolean isOderConstraint;
|
||||
protected final TypeUnifyTask typeUnifyTask;
|
||||
protected final UnifyContext context;
|
||||
|
||||
/**
|
||||
* Aktueller Fall
|
||||
*/
|
||||
public Set<UnifyPair> a;
|
||||
|
||||
/**
|
||||
* Liste der Faelle für die parallele Verarbeitung
|
||||
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
|
||||
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
|
||||
* Deshalb wird ihre Berechnung parallel angestossen.
|
||||
*/
|
||||
public List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
|
||||
* In der Regel ist dies genau ein Element
|
||||
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
|
||||
* gefuehrt hat.
|
||||
*/
|
||||
public List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
|
||||
|
||||
|
||||
protected VarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||
this.isOderConstraint = isOderConstraint;
|
||||
this.typeUnifyTask = typeUnifyTask;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Selects values for the next iteration in the run method:
|
||||
* - a : The element to ???
|
||||
* - nextSetAsList: The list of cases that have no relation to the selected a and will have to be worked on
|
||||
* - nextSetasListOderConstraints: The list of cases of which the receiver contains "? extends", typically one element
|
||||
*/
|
||||
public abstract void selectNextData(
|
||||
TypeUnifyTask typeUnifyTask,
|
||||
List<Set<UnifyPair>> nextSetAsList,
|
||||
Optional<UnifyPair> optOrigPair
|
||||
);
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public abstract CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
|
||||
Set<Set<UnifyPair>> elems,
|
||||
Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
IFiniteClosure fc,
|
||||
int rekTiefe,
|
||||
Set<UnifyPair> methodSignatureConstraint,
|
||||
List<Set<UnifyPair>> nextSetAsList,
|
||||
Set<UnifyPair> sameEqSet,
|
||||
Set<Set<UnifyPair>> result,
|
||||
Set<Set<UnifyPair>> aParDef
|
||||
);
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public abstract void applyComputedResults(
|
||||
Set<Set<UnifyPair>> result,
|
||||
Set<Set<UnifyPair>> currentThreadResult,
|
||||
Set<UnifyPair> compResult,
|
||||
Set<UnifyPair> compRes
|
||||
);
|
||||
|
||||
/**
|
||||
*
|
||||
* @return If the current iteration should be broken out of
|
||||
*/
|
||||
public abstract boolean eraseInvalidSets(
|
||||
int rekTiefe,
|
||||
Set<Set<UnifyPair>> aParDef,
|
||||
List<Set<UnifyPair>> nextSetAsList
|
||||
);
|
||||
|
||||
protected void writeLog(String s) {
|
||||
typeUnifyTask.writeLog(s);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,231 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.util.Tuple;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class VarianceM1Case extends VarianceCase {
|
||||
|
||||
protected final int variance = -1;
|
||||
|
||||
protected VarianceM1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||
super(isOderConstraint, typeUnifyTask, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void selectNextData(
|
||||
TypeUnifyTask typeUnifyTask,
|
||||
List<Set<UnifyPair>> nextSetAsList,
|
||||
Optional<UnifyPair> optOrigPair
|
||||
) {
|
||||
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
|
||||
writeLog("Min: a in " + variance + " " + a);
|
||||
if (this.isOderConstraint) {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
}
|
||||
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
|
||||
nextSetAsList.remove(a);
|
||||
|
||||
//Alle minimalen Elemente in nextSetasListRest bestimmen
|
||||
//nur für diese wird parallele Berechnung angestossen.
|
||||
Set<UnifyPair> finalA = a;
|
||||
nextSetasListRest = typeUnifyTask.oup.minElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
|
||||
Set<Set<UnifyPair>> elems,
|
||||
Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
IFiniteClosure fc,
|
||||
int rekTiefe,
|
||||
Set<UnifyPair> methodSignatureConstraint,
|
||||
List<Set<UnifyPair>> nextSetAsList,
|
||||
Set<UnifyPair> sameEqSet,
|
||||
Set<Set<UnifyPair>> result,
|
||||
Set<Set<UnifyPair>> aParDef
|
||||
) {
|
||||
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
|
||||
new HashSet<>(), new HashSet<>()
|
||||
));
|
||||
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
(prevResults, currentThreadResult) -> {
|
||||
forkOrig.writeLog("final Orig -1");
|
||||
forkOrig.closeLogFile();
|
||||
return new Tuple<>(currentThreadResult, prevResults.getSecond());
|
||||
});
|
||||
|
||||
//forks.add(forkOrig);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("-1 RM" + nSaL.toString());
|
||||
|
||||
if (!this.isOderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
TypeUnifyTask.noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkFuture,
|
||||
(prevResults, fork_res) -> {
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
prevResults.getSecond().add(fork_res);
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final -1");
|
||||
fork.closeLogFile();
|
||||
return prevResults;
|
||||
}
|
||||
);
|
||||
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
}
|
||||
|
||||
return resultValues;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void applyComputedResults(
|
||||
Set<Set<UnifyPair>> result,
|
||||
Set<Set<UnifyPair>> currentThreadResult,
|
||||
Set<UnifyPair> compResult,
|
||||
Set<UnifyPair> compRes
|
||||
) {
|
||||
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
|
||||
if (resOfCompare == 1) {
|
||||
writeLog("Geloescht result: " + result);
|
||||
result.clear();
|
||||
result.addAll(currentThreadResult);
|
||||
} else if (resOfCompare == 0) {
|
||||
result.addAll(currentThreadResult);
|
||||
} else if (resOfCompare == -1) {
|
||||
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
//result = result;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean eraseInvalidSets(
|
||||
int rekTiefe,
|
||||
Set<Set<UnifyPair>> aParDef,
|
||||
List<Set<UnifyPair>> nextSetAsList
|
||||
) {
|
||||
|
||||
// System.out.println("");
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
writeLog("aParDef: " + aParDef.toString());
|
||||
aParDef.add(a);
|
||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||
if (this.isOderConstraint) {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
while (aParDefIt.hasNext()) {
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> greaterSetasList = typeUnifyTask.oup.greaterThan(a_new, nextSetAsList);
|
||||
|
||||
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
|
||||
if (!((Constraint<UnifyPair>) a_new).isInherited()) {
|
||||
greaterSetasList.add(a_new);
|
||||
}
|
||||
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
|
||||
//Wenn x nicht vererbt ist, beginnt beim naechstgroesseren Element die naechste Ueberladung
|
||||
notInherited.forEach(x -> {
|
||||
notErased.addAll(typeUnifyTask.oup.greaterEqThan(x, greaterSetasList));
|
||||
});
|
||||
|
||||
//das kleineste Element ist das Element von dem a_new geerbt hat
|
||||
//muss deshalb geloescht werden
|
||||
Iterator<Set<UnifyPair>> notErasedIt = notErased.iterator();
|
||||
if (notErasedIt.hasNext()) {
|
||||
Set<UnifyPair> min = typeUnifyTask.oup.min(notErasedIt);
|
||||
notErased.remove(min);
|
||||
notErased.remove(((Constraint<UnifyPair>) min).getExtendConstraint());
|
||||
}
|
||||
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
} else {
|
||||
while (aParDefIt.hasNext()) {
|
||||
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> erased = typeUnifyTask.oup.greaterEqThan(a_new, nextSetAsList);
|
||||
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
@@ -1,6 +1,7 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.interfaces;
|
||||
|
||||
import java.util.List;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
@@ -18,9 +19,8 @@ import org.antlr.v4.runtime.Token;
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public interface IFiniteClosure {
|
||||
public interface IFiniteClosure extends ISerializableData {
|
||||
|
||||
public void setLogTrue();
|
||||
/**
|
||||
* Returns all types of the finite closure that are subtypes of the argument.
|
||||
* @return The set of subtypes of the argument.
|
||||
@@ -74,5 +74,5 @@ public interface IFiniteClosure {
|
||||
public Set<UnifyType> getChildren(UnifyType t);
|
||||
public Set<UnifyType> getAllTypesByName(String typeName);
|
||||
|
||||
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop);
|
||||
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop, UnifyContext context);
|
||||
}
|
||||
|
@@ -1,8 +1,9 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
@@ -11,7 +12,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
||||
/**
|
||||
* An extends wildcard type "? extends T".
|
||||
*/
|
||||
public final class ExtendsType extends WildcardType {
|
||||
public final class ExtendsType extends WildcardType implements ISerializableData {
|
||||
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
@@ -92,5 +93,21 @@ public final class ExtendsType extends WildcardType {
|
||||
return "? extends " + wildcardedType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("wildcardedType", this.wildcardedType.toSerial(keyStorage));
|
||||
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static ExtendsType fromSerial(SerialMap data, UnifyContext context) {
|
||||
return new ExtendsType(
|
||||
UnifyType.fromSerial(data.getMap("wildcardedType"), context)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,16 +1,20 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import java.io.FileWriter;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.sql.Array;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Hashtable;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.BiFunction;
|
||||
@@ -18,40 +22,28 @@ import java.util.function.BinaryOperator;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.google.common.collect.Ordering;
|
||||
|
||||
//PL 18-02-05/18-04-05 Unifier durch Matcher ersetzt
|
||||
//muss greater noch ersetzt werden ja erledigt 18--04-05
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.parser.SourceLoc;
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import de.dhbwstuttgart.typeinference.unify.MartelliMontanariUnify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.Match;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
|
||||
import de.dhbwstuttgart.util.Pair;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.apache.commons.io.output.NullWriter;
|
||||
|
||||
/**
|
||||
* The finite closure for the type unification
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class FiniteClosure //extends Ordering<UnifyType> //entfernt PL 2018-12-11
|
||||
implements IFiniteClosure {
|
||||
public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
|
||||
final JavaTXCompiler compiler;
|
||||
final PlaceholderRegistry placeholderRegistry;
|
||||
|
||||
Writer logFile;
|
||||
static Boolean log = false;
|
||||
public void setLogTrue() {
|
||||
log = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* A map that maps every type to the node in the inheritance graph that contains that type.
|
||||
*/
|
||||
@@ -81,8 +73,9 @@ implements IFiniteClosure {
|
||||
/**
|
||||
* Creates a new instance using the inheritance tree defined in the pairs.
|
||||
*/
|
||||
public FiniteClosure(Set<UnifyPair> pairs, Writer logFile, JavaTXCompiler compiler) {
|
||||
public FiniteClosure(Set<UnifyPair> pairs, Writer logFile, JavaTXCompiler compiler, PlaceholderRegistry placeholderRegistry) {
|
||||
this.compiler = compiler;
|
||||
this.placeholderRegistry = placeholderRegistry;
|
||||
this.logFile = logFile;
|
||||
this.pairs = new HashSet<>(pairs);
|
||||
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
|
||||
@@ -141,13 +134,13 @@ implements IFiniteClosure {
|
||||
}
|
||||
}
|
||||
|
||||
public FiniteClosure(Set<UnifyPair> constraints, Writer writer) {
|
||||
this(constraints, writer, null);
|
||||
public FiniteClosure(Set<UnifyPair> constraints, Writer writer, PlaceholderRegistry placeholderRegistry) {
|
||||
this(constraints, writer, null, placeholderRegistry);
|
||||
}
|
||||
|
||||
void testSmaller() {
|
||||
UnifyType tq1, tq2, tq3;
|
||||
tq1 = new ExtendsType(PlaceholderType.freshPlaceholder());
|
||||
tq1 = new ExtendsType(PlaceholderType.freshPlaceholder(placeholderRegistry));
|
||||
List<UnifyType> l1 = new ArrayList<>();
|
||||
List<UnifyType> l2 = new ArrayList<>();
|
||||
l1.add(tq1);
|
||||
@@ -207,7 +200,7 @@ implements IFiniteClosure {
|
||||
result.add(new Pair<>(t, fBounded));
|
||||
}
|
||||
catch (StackOverflowError e) {
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
|
||||
// if C<...> <* C<...> then ... (third case in definition of <*)
|
||||
@@ -698,10 +691,10 @@ implements IFiniteClosure {
|
||||
}
|
||||
*/
|
||||
|
||||
public int compare (UnifyType left, UnifyType right, PairOperator pairop) {
|
||||
public int compare (UnifyType left, UnifyType right, PairOperator pairop, UnifyContext context) {
|
||||
try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {}
|
||||
if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
|
||||
System.out.println("");
|
||||
// if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
|
||||
// System.out.println("");
|
||||
/*
|
||||
pairop = PairOperator.SMALLERDOTWC;
|
||||
List<UnifyType> al = new ArrayList<>();
|
||||
@@ -752,7 +745,7 @@ implements IFiniteClosure {
|
||||
}
|
||||
}
|
||||
UnifyPair up = new UnifyPair(left, right, pairop);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask();
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(context);
|
||||
HashSet<UnifyPair> hs = new HashSet<>();
|
||||
hs.add(up);
|
||||
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this);
|
||||
@@ -760,7 +753,7 @@ implements IFiniteClosure {
|
||||
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
|
||||
{try {
|
||||
logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no LogFile");}}
|
||||
@@ -774,7 +767,7 @@ implements IFiniteClosure {
|
||||
long smallerLen = smallerRes.stream().filter(delFun).count();
|
||||
try {
|
||||
logFile.write("\nsmallerLen: " + smallerLen +"\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no LogFile");}
|
||||
@@ -789,7 +782,7 @@ implements IFiniteClosure {
|
||||
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
|
||||
{try {
|
||||
logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no LogFile");}}
|
||||
@@ -803,4 +796,18 @@ implements IFiniteClosure {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("pairs", SerialList.fromMapped(this.pairs, unifyPair -> unifyPair.toSerial(keyStorage)));
|
||||
return serialized;
|
||||
}
|
||||
|
||||
public static FiniteClosure fromSerial(SerialMap data, UnifyContext context, KeyStorage keyStorage) {
|
||||
var pairList = data.getList("pairs").assertListOfUUIDs();
|
||||
Set<UnifyPair> pairs = pairList.stream()
|
||||
.map(pairData -> UnifyPair.fromSerial(pairData, context, keyStorage)).collect(Collectors.toSet());
|
||||
return new FiniteClosure(pairs, context.logFile(), context.placeholderRegistry());
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,14 +1,17 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
|
||||
|
||||
import java.lang.reflect.Modifier;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class FunInterfaceType extends ReferenceType {
|
||||
public class FunInterfaceType extends ReferenceType implements ISerializableData {
|
||||
final List<UnifyType> intfArgTypes;
|
||||
final UnifyType intfReturnType;
|
||||
final List<String> generics;
|
||||
@@ -46,4 +49,29 @@ public class FunInterfaceType extends ReferenceType {
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
SerialMap serialized = serializedWrapper.getMap("object");
|
||||
|
||||
serialized.put("intfArgTypes", SerialList.fromMapped(intfArgTypes, u -> u.toSerial(keyStorage)));
|
||||
serialized.put("intfReturnType", intfReturnType.toSerial(keyStorage));
|
||||
serialized.put("generics", SerialList.fromMapped(generics, SerialValue::new));
|
||||
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static FunInterfaceType fromSerial(SerialMap data, UnifyContext context) {
|
||||
var name = data.getValue("name").getOf(String.class);
|
||||
var params = data.getList("params").assertListOfMaps().stream().map(
|
||||
paramData -> UnifyType.fromSerial(paramData, context)).toList();
|
||||
var intfArgTypes = data.getList("intfArgTypes").assertListOfMaps().stream().map(
|
||||
argTypeData -> UnifyType.fromSerial(argTypeData, context)).toList();
|
||||
var intfReturnType = UnifyType.fromSerial(data.getMap("intfReturnType"), context);
|
||||
var generics = data.getList("generics").assertListOfValues().stream().map(
|
||||
generic -> generic.getOf(String.class)).toList();
|
||||
|
||||
return new FunInterfaceType(name, new TypeParams(params), intfArgTypes, intfReturnType, generics);
|
||||
}
|
||||
}
|
||||
|
@@ -1,8 +1,13 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
@@ -12,7 +17,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
||||
* A real function type in java.
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class FunNType extends UnifyType {
|
||||
public class FunNType extends UnifyType implements ISerializableData {
|
||||
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
@@ -99,5 +104,22 @@ public class FunNType extends UnifyType {
|
||||
|
||||
return other.getTypeParams().equals(typeParams);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
|
||||
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static FunNType fromSerial(SerialMap data, UnifyContext context) {
|
||||
List<UnifyType> params = data.getList("params").assertListOfMaps().stream().map(
|
||||
paramData -> UnifyType.fromSerial(paramData, context)).toList();
|
||||
return new FunNType(new TypeParams(params));
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,5 +1,6 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
@@ -26,9 +27,11 @@ import de.dhbwstuttgart.util.Pair;
|
||||
public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
|
||||
protected IFiniteClosure fc;
|
||||
|
||||
public OrderingUnifyPair(IFiniteClosure fc) {
|
||||
protected UnifyContext context;
|
||||
|
||||
public OrderingUnifyPair(IFiniteClosure fc, UnifyContext context) {
|
||||
this.fc = fc;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -39,15 +42,15 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
try {
|
||||
//if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht
|
||||
if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) {
|
||||
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC);
|
||||
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC, context);
|
||||
}
|
||||
else {
|
||||
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT);
|
||||
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT, context);
|
||||
}}
|
||||
catch (ClassCastException e) {
|
||||
try {
|
||||
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
|
||||
((FiniteClosure)fc).logFile.flush();
|
||||
// ((FiniteClosure)fc).logFile.flush();
|
||||
}
|
||||
catch (IOException ie) {
|
||||
}
|
||||
@@ -79,18 +82,18 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
||||
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
||||
{
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
|
||||
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
|
||||
{
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
}
|
||||
else {
|
||||
up = new UnifyPair(left, right, PairOperator.SMALLERDOT);
|
||||
}
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask();
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(context);
|
||||
HashSet<UnifyPair> hs = new HashSet<>();
|
||||
hs.add(up);
|
||||
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, fc);
|
||||
@@ -106,11 +109,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
||||
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
||||
{
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
if (right instanceof SuperType)
|
||||
{
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -411,13 +414,13 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
if (leftlewc.iterator().next().getLhsType() instanceof PlaceholderType) {
|
||||
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getLhsType(),y); return x; }, combiner);
|
||||
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getLhsType()) == null));
|
||||
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
|
||||
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
|
||||
}
|
||||
//4. Fall
|
||||
else {
|
||||
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getRhsType(),y); return x; }, combiner);
|
||||
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getRhsType()) == null));
|
||||
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
|
||||
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
|
||||
}
|
||||
if (!si.isPresent()) return 0;
|
||||
else return si.get();
|
||||
|
@@ -46,4 +46,15 @@ public enum PairOperator {
|
||||
default: return "=."; // EQUALSDOT
|
||||
}
|
||||
}
|
||||
|
||||
public static PairOperator fromString(String op) {
|
||||
switch (op) {
|
||||
case "<": return SMALLER;
|
||||
case "<.": return SMALLERDOT;
|
||||
case "<!=.": return SMALLERNEQDOT;
|
||||
case "<.?": return SMALLERDOTWC;
|
||||
case "=.": return EQUALSDOT;
|
||||
default: throw new RuntimeException("Unknown PairOperator: " + op);
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,119 +1,96 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* An unbounded placeholder type.
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public final class PlaceholderType extends UnifyType{
|
||||
|
||||
/**
|
||||
* Static list containing the names of all existing placeholders.
|
||||
* Used for generating fresh placeholders.
|
||||
*/
|
||||
public static final ArrayList<String> EXISTING_PLACEHOLDERS = new ArrayList<String>();
|
||||
|
||||
/**
|
||||
* Prefix of auto-generated placeholder names.
|
||||
*/
|
||||
protected static String nextName = "gen_";
|
||||
|
||||
/**
|
||||
* Random number generator used to generate fresh placeholder name.
|
||||
*/
|
||||
protected static Random rnd = new Random(43558747548978L);
|
||||
|
||||
|
||||
/**
|
||||
* True if this object was auto-generated, false if this object was user-generated.
|
||||
*/
|
||||
private final boolean IsGenerated;
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
|
||||
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
|
||||
*/
|
||||
private boolean wildcardable = true;
|
||||
|
||||
private boolean wildcardable = true;
|
||||
|
||||
/**
|
||||
* is innerType gibt an, ob der Type des PlaceholderType innerhalb eines Typkonstruktorsverwendet wird
|
||||
*/
|
||||
private boolean innerType = false;
|
||||
|
||||
private boolean innerType = false;
|
||||
|
||||
/**
|
||||
* variance shows the variance of the pair
|
||||
* 1: contravariant
|
||||
* -1 covariant
|
||||
* 0 invariant
|
||||
* PL 2018-03-21
|
||||
*/
|
||||
private int variance = 0;
|
||||
|
||||
/*
|
||||
* Fuer Oder-Constraints:
|
||||
* orCons = 1: Receiver
|
||||
* orCons = 0: Argument oder kein Oder-Constraint
|
||||
* orCons = -1: RetType
|
||||
*/
|
||||
private byte orCons = 0;
|
||||
|
||||
* variance shows the variance of the pair
|
||||
* 1: contravariant
|
||||
* -1 covariant
|
||||
* 0 invariant
|
||||
* PL 2018-03-21
|
||||
*/
|
||||
private int variance = 0;
|
||||
|
||||
/*
|
||||
* Fuer Oder-Constraints:
|
||||
* orCons = 1: Receiver
|
||||
* orCons = 0: Argument oder kein Oder-Constraint
|
||||
* orCons = -1: RetType
|
||||
*/
|
||||
private byte orCons = 0;
|
||||
|
||||
/**
|
||||
* Creates a new placeholder type with the specified name.
|
||||
*/
|
||||
public PlaceholderType(String name) {
|
||||
public PlaceholderType(String name, PlaceholderRegistry placeholderRegistry) {
|
||||
super(name, new TypeParams());
|
||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||
placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
|
||||
IsGenerated = false; // This type is user generated
|
||||
}
|
||||
|
||||
|
||||
public PlaceholderType(String name, int variance) {
|
||||
|
||||
|
||||
public PlaceholderType(String name, int variance, PlaceholderRegistry placeholderRegistry) {
|
||||
super(name, new TypeParams());
|
||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||
placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
|
||||
IsGenerated = false; // This type is user generated
|
||||
this.variance = variance;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new placeholdertype
|
||||
* Creates a new placeholdertype
|
||||
* @param isGenerated true if this placeholder is auto-generated, false if it is user-generated.
|
||||
*/
|
||||
protected PlaceholderType(String name, boolean isGenerated) {
|
||||
protected PlaceholderType(String name, boolean isGenerated, PlaceholderRegistry placeholderRegistry) {
|
||||
super(name, new TypeParams());
|
||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||
placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
|
||||
IsGenerated = isGenerated;
|
||||
}
|
||||
|
||||
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a fresh placeholder type with a name that does so far not exist.
|
||||
* Creates a fresh placeholder type with a name that does so far not exist from the chars A-Z.
|
||||
* A user could later instantiate a type using the same name that is equivalent to this type.
|
||||
* @return A fresh placeholder type.
|
||||
*/
|
||||
public synchronized static PlaceholderType freshPlaceholder() {
|
||||
String name = nextName + (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
|
||||
// Add random chars while the name is in use.
|
||||
while(EXISTING_PLACEHOLDERS.contains(name)) {
|
||||
name += (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
|
||||
}
|
||||
return new PlaceholderType(name, true);
|
||||
public static PlaceholderType freshPlaceholder(PlaceholderRegistry placeholderRegistry) {
|
||||
var name = placeholderRegistry.generateFreshPlaceholderName();
|
||||
return new PlaceholderType(name, true, placeholderRegistry);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* True if this placeholder is auto-generated, false if it is user-generated.
|
||||
*/
|
||||
@@ -124,51 +101,51 @@ public final class PlaceholderType extends UnifyType{
|
||||
public void setVariance(int v) {
|
||||
variance = v;
|
||||
}
|
||||
|
||||
|
||||
public int getVariance() {
|
||||
return variance;
|
||||
}
|
||||
|
||||
|
||||
public void reversVariance() {
|
||||
if (variance == 1) {
|
||||
setVariance(-1);
|
||||
} else {
|
||||
if (variance == -1) {
|
||||
setVariance(1);
|
||||
}}
|
||||
if (variance == -1) {
|
||||
setVariance(1);
|
||||
}}
|
||||
}
|
||||
|
||||
|
||||
public void setOrCons(byte i) {
|
||||
orCons = i;
|
||||
}
|
||||
|
||||
|
||||
public byte getOrCons() {
|
||||
return orCons;
|
||||
}
|
||||
|
||||
|
||||
public Boolean isWildcardable() {
|
||||
return wildcardable;
|
||||
}
|
||||
public void disableWildcardtable() {
|
||||
wildcardable = false;
|
||||
}
|
||||
|
||||
|
||||
public void enableWildcardtable() {
|
||||
wildcardable = true;
|
||||
}
|
||||
|
||||
|
||||
public void setWildcardtable(Boolean wildcardable) {
|
||||
this.wildcardable = wildcardable;
|
||||
}
|
||||
|
||||
|
||||
public Boolean isInnerType() {
|
||||
return innerType;
|
||||
}
|
||||
|
||||
|
||||
public void setInnerType(Boolean innerType) {
|
||||
this.innerType = innerType;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.smArg(this, fBounded);
|
||||
@@ -178,17 +155,17 @@ public final class PlaceholderType extends UnifyType{
|
||||
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.grArg(this, fBounded);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public UnifyType setTypeParams(TypeParams newTp) {
|
||||
return this; // Placeholders never have params.
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return typeName.hashCode();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
UnifyType apply(Unifier unif) {
|
||||
if(unif.hasSubstitute(this)) {
|
||||
@@ -200,15 +177,15 @@ public final class PlaceholderType extends UnifyType{
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(!(obj instanceof PlaceholderType))
|
||||
return false;
|
||||
|
||||
|
||||
return ((PlaceholderType) obj).getName().equals(typeName);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||
@@ -216,4 +193,39 @@ public final class PlaceholderType extends UnifyType{
|
||||
ret.add(this);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("name", this.typeName);
|
||||
// Placeholders never make use of the typeParams
|
||||
serialized.put("isGenerated", IsGenerated);
|
||||
serialized.put("wildcardable", wildcardable);
|
||||
serialized.put("isInnerType", innerType);
|
||||
serialized.put("variance", variance);
|
||||
serialized.put("orCons", orCons);
|
||||
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static PlaceholderType fromSerial(SerialMap data, UnifyContext context) {
|
||||
var name = data.getValue("name").getOf(String.class);
|
||||
var isGenerated = data.getValue("isGenerated").getOf(Boolean.class);
|
||||
var wildcardable = data.getValue("wildcardable").getOf(Boolean.class);
|
||||
var isInnerType = data.getValue("isInnerType").getOf(Boolean.class);
|
||||
var variance = data.getValue("variance").getOf(Integer.class);
|
||||
var orCons = data.getValue("orCons").getOf(Number.class).byteValue();
|
||||
|
||||
var placeholderType = new PlaceholderType(name, isGenerated, context.placeholderRegistry());
|
||||
placeholderType.setWildcardtable(wildcardable);
|
||||
placeholderType.setInnerType(isInnerType);
|
||||
placeholderType.setVariance(variance);
|
||||
placeholderType.setOrCons(orCons);
|
||||
|
||||
return placeholderType;
|
||||
}
|
||||
}
|
||||
|
@@ -1,6 +1,10 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import java.util.HashMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
@@ -11,45 +15,51 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
||||
* @author Florian Steurer
|
||||
*
|
||||
*/
|
||||
public class ReferenceType extends UnifyType {
|
||||
|
||||
public class ReferenceType extends UnifyType implements ISerializableData {
|
||||
|
||||
/**
|
||||
* The buffered hashCode
|
||||
*/
|
||||
private final int hashCode;
|
||||
|
||||
|
||||
/**
|
||||
* gibt an, ob der ReferenceType eine generische Typvariable ist
|
||||
*/
|
||||
private final boolean genericTypeVar;
|
||||
|
||||
|
||||
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
}
|
||||
|
||||
|
||||
public ReferenceType(String name, Boolean genericTypeVar) {
|
||||
super(name, new TypeParams());
|
||||
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
|
||||
this.genericTypeVar = genericTypeVar;
|
||||
}
|
||||
|
||||
|
||||
public ReferenceType(String name, UnifyType... params) {
|
||||
super(name, new TypeParams(params));
|
||||
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
|
||||
genericTypeVar = false;
|
||||
}
|
||||
|
||||
|
||||
public ReferenceType(String name, TypeParams params) {
|
||||
super(name, params);
|
||||
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
|
||||
genericTypeVar = false;
|
||||
}
|
||||
|
||||
public boolean isGenTypeVar () {
|
||||
private ReferenceType(String name, TypeParams params, boolean genericTypeVar) {
|
||||
super(name, params);
|
||||
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
|
||||
this.genericTypeVar = genericTypeVar;
|
||||
}
|
||||
|
||||
public boolean isGenTypeVar() {
|
||||
return genericTypeVar;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.smArg(this, fBounded);
|
||||
@@ -63,38 +73,59 @@ public class ReferenceType extends UnifyType {
|
||||
@Override
|
||||
UnifyType apply(Unifier unif) {
|
||||
TypeParams newParams = typeParams.apply(unif);
|
||||
|
||||
if(newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
|
||||
|
||||
if (newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
|
||||
return this;
|
||||
|
||||
|
||||
return new ReferenceType(typeName, newParams);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public UnifyType setTypeParams(TypeParams newTp) {
|
||||
if(newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
|
||||
if (newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
|
||||
return this; // reduced the amount of objects created
|
||||
return new ReferenceType(typeName, newTp);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(!(obj instanceof ReferenceType))
|
||||
if (!(obj instanceof ReferenceType))
|
||||
return false;
|
||||
|
||||
if(obj.hashCode() != this.hashCode())
|
||||
|
||||
if (obj.hashCode() != this.hashCode())
|
||||
return false;
|
||||
|
||||
|
||||
ReferenceType other = (ReferenceType) obj;
|
||||
|
||||
if(!other.getName().equals(typeName))
|
||||
|
||||
if (!other.getName().equals(typeName))
|
||||
return false;
|
||||
|
||||
return other.getTypeParams().equals(typeParams);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("name", this.typeName);
|
||||
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
|
||||
serialized.put("isGenericTypeVar", this.genericTypeVar);
|
||||
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static ReferenceType fromSerial(SerialMap data, UnifyContext context) {
|
||||
var name = data.getValue("name").getOf(String.class);
|
||||
var params = data.getList("params").assertListOfMaps().stream().map(
|
||||
paramData -> UnifyType.fromSerial(paramData, context)).toList();
|
||||
var isGenericTypeVar = data.getValue("isGenericTypeVar").getOf(Boolean.class);
|
||||
return new ReferenceType(name, new TypeParams(params), isGenericTypeVar);
|
||||
}
|
||||
}
|
@@ -1,6 +1,9 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import java.util.HashMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
@@ -85,4 +88,26 @@ public final class SuperType extends WildcardType {
|
||||
SuperType other = (SuperType) obj;
|
||||
return other.getSuperedType().equals(wildcardedType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("wildcardedType", this.wildcardedType.toSerial(keyStorage));
|
||||
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
|
||||
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
serializedWrapper.put("object", serialized);
|
||||
return serializedWrapper;
|
||||
}
|
||||
|
||||
public static SuperType fromSerial(SerialMap data, UnifyContext context) {
|
||||
var params = data.getList("params").assertListOfMaps().stream().map(
|
||||
paramData -> UnifyType.fromSerial(paramData, context)).toList();
|
||||
var superType = new SuperType(
|
||||
UnifyType.fromSerial(data.getMap("wildcardedType"), context)
|
||||
);
|
||||
superType.setTypeParams(new TypeParams(params));
|
||||
return superType;
|
||||
}
|
||||
}
|
||||
|
@@ -174,9 +174,9 @@ public final class TypeParams implements Iterable<UnifyType>{
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String res = "";
|
||||
StringBuilder res = new StringBuilder();
|
||||
for(UnifyType t : typeParams)
|
||||
res += t + ",";
|
||||
res.append(t).append(",");
|
||||
return "<" + res.substring(0, res.length()-1) + ">";
|
||||
}
|
||||
|
||||
|
@@ -50,8 +50,7 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
|
||||
Unifier tempU = new Unifier(source, target);
|
||||
// Every new substitution must be applied to previously added substitutions
|
||||
// otherwise the unifier needs to be applied multiple times to unify two terms
|
||||
for(PlaceholderType pt : substitutions.keySet())
|
||||
substitutions.put(pt, substitutions.get(pt).apply(tempU));
|
||||
substitutions.replaceAll((pt, ut) -> ut.apply(tempU));
|
||||
substitutions.put(source, target);
|
||||
}
|
||||
|
||||
@@ -93,8 +92,7 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
|
||||
// }
|
||||
//}
|
||||
if (!(p.getLhsType().equals(newLhs)) || !(p.getRhsType().equals(newRhs))) {//Die Anwendung von this hat was veraendert PL 2018-04-01
|
||||
Set<UnifyPair> suniUnifyPair = new HashSet<>();
|
||||
suniUnifyPair.addAll(thisAsPair.getAllSubstitutions());
|
||||
Set<UnifyPair> suniUnifyPair = new HashSet<>(thisAsPair.getAllSubstitutions());
|
||||
suniUnifyPair.add(thisAsPair);
|
||||
if (p.getLhsType() instanceof PlaceholderType //&& newLhs instanceof PlaceholderType entfernt PL 2018-04-13
|
||||
&& p.getPairOp() == PairOperator.EQUALSDOT) {
|
||||
@@ -172,13 +170,13 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String result = "{ ";
|
||||
StringBuilder result = new StringBuilder("{ ");
|
||||
for(Entry<PlaceholderType, UnifyType> entry : substitutions.entrySet())
|
||||
result += "(" + entry.getKey() + " -> " + entry.getValue() + "), ";
|
||||
result.append("(").append(entry.getKey()).append(" -> ").append(entry.getValue()).append("), ");
|
||||
if(!substitutions.isEmpty())
|
||||
result = result.substring(0, result.length()-2);
|
||||
result += " }";
|
||||
return result;
|
||||
result = new StringBuilder(result.substring(0, result.length() - 2));
|
||||
result.append(" }");
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -1,23 +1,26 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import com.google.common.collect.ObjectArrays;
|
||||
import de.dhbwstuttgart.parser.SourceLoc;
|
||||
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
|
||||
import de.dhbwstuttgart.typeinference.constraints.IConstraintElement;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
||||
/**
|
||||
* A pair which contains two types and an operator, e.q. (Integer <. a).
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class UnifyPair {
|
||||
public class UnifyPair implements IConstraintElement, ISerializableData {
|
||||
|
||||
private SourceLoc location;
|
||||
|
||||
@@ -151,8 +154,7 @@ public class UnifyPair {
|
||||
}
|
||||
|
||||
public Set<UnifyPair> getAllSubstitutions () {
|
||||
Set<UnifyPair> ret = new HashSet<>();
|
||||
ret.addAll(new ArrayList<>(getSubstitution()));
|
||||
Set<UnifyPair> ret = new HashSet<>(new ArrayList<>(getSubstitution()));
|
||||
if (basePair != null) {
|
||||
ret.addAll(new ArrayList<>(basePair.getAllSubstitutions()));
|
||||
}
|
||||
@@ -243,16 +245,16 @@ public class UnifyPair {
|
||||
public String toString() {
|
||||
String ret = "";
|
||||
if (lhs instanceof PlaceholderType) {
|
||||
ret = new Integer(((PlaceholderType)lhs).getVariance()).toString() + " "
|
||||
ret = Integer.valueOf(((PlaceholderType)lhs).getVariance()).toString() + " "
|
||||
+ "WC: " + ((PlaceholderType)lhs).isWildcardable()
|
||||
+ ", IT: " + ((PlaceholderType)lhs).isInnerType();
|
||||
}
|
||||
if (rhs instanceof PlaceholderType) {
|
||||
ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString() + " "
|
||||
ret = ret + ", " + Integer.valueOf(((PlaceholderType)rhs).getVariance()).toString() + " "
|
||||
+ "WC: " + ((PlaceholderType)rhs).isWildcardable()
|
||||
+ ", IT: " + ((PlaceholderType)rhs).isInnerType();
|
||||
}
|
||||
var res = "(" + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])";
|
||||
var res = "(UP: " + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])";
|
||||
var location = this.getLocation();
|
||||
if (location != null) {
|
||||
res += "@" + location.line() + " in " + location.file();
|
||||
@@ -269,6 +271,58 @@ public class UnifyPair {
|
||||
return ret;
|
||||
}
|
||||
*/
|
||||
private String serialUUID = null;
|
||||
|
||||
public SerialUUID toSerial(KeyStorage keyStorage) {
|
||||
final String uuid = serialUUID == null ? keyStorage.getIdentifier() : serialUUID;
|
||||
if (serialUUID == null) serialUUID = uuid;
|
||||
|
||||
if (!keyStorage.isAlreadySerialized(uuid)) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
keyStorage.putSerialized(uuid, serialized);
|
||||
serialized.put("lhs", this.lhs.toSerial(keyStorage));
|
||||
serialized.put("rhs", this.rhs.toSerial(keyStorage));
|
||||
serialized.put("op", this.pairOp.toString());
|
||||
serialized.put("basePair", this.basePair == null ? null : this.basePair.toSerial(keyStorage));
|
||||
serialized.put("location", this.location == null ? null : this.location.toSerial(keyStorage));
|
||||
serialized.put("substitution", SerialList.fromMapped(this.substitution, unifyPair -> unifyPair.toSerial(keyStorage)));
|
||||
serialized.put("fBounded", SerialList.fromMapped(this.fBounded, fbounded -> fbounded.toSerial(keyStorage)));
|
||||
}
|
||||
return new SerialUUID(uuid);
|
||||
}
|
||||
|
||||
public static UnifyPair fromSerial(SerialUUID serialUUID, UnifyContext context, KeyStorage keyStorage) {
|
||||
String uuid = serialUUID.uuid;
|
||||
if (!keyStorage.isAlreadyUnserialized(uuid)) {
|
||||
SerialMap data = keyStorage.getSerialized(uuid);
|
||||
SerialMap lhsData = data.getMap("lhs");
|
||||
SerialMap rhsData = data.getMap("rhs");
|
||||
String opData = data.getValue("op").getOf(String.class);
|
||||
|
||||
UnifyPair pair = new UnifyPair(
|
||||
UnifyType.fromSerial(lhsData, context),
|
||||
UnifyType.fromSerial(rhsData, context),
|
||||
PairOperator.fromString(opData)
|
||||
);
|
||||
// put the object into the storage before unserializing basePair recursively
|
||||
keyStorage.putUnserialized(uuid, pair);
|
||||
|
||||
SerialList<SerialUUID> substitutionData = data.getList("substitution").assertListOfUUIDs();
|
||||
SerialList<SerialMap> fBoundedData = data.getList("fBounded").assertListOfMaps();
|
||||
SerialUUID basePairData = data.getUUIDOrNull("basePair");
|
||||
SerialMap locationData = data.getMapOrNull("location");
|
||||
|
||||
pair.substitution = substitutionData.stream().map(substData -> UnifyPair.fromSerial(substData, context, keyStorage)).collect(Collectors.toSet());
|
||||
pair.fBounded = fBoundedData.stream().map(fBoundData -> UnifyType.fromSerial(fBoundData, context)).collect(Collectors.toSet());
|
||||
if (basePairData != null) {
|
||||
pair.basePair = UnifyPair.fromSerial(basePairData, context, keyStorage);
|
||||
}
|
||||
if (locationData != null) {
|
||||
pair.location = SourceLoc.fromSerial(locationData);
|
||||
}
|
||||
}
|
||||
return keyStorage.getUnserialized(uuid, UnifyPair.class);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@@ -1,12 +1,14 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.StatementVisitor;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
||||
|
||||
@@ -14,7 +16,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
||||
* Represents a java type.
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public abstract class UnifyType {
|
||||
public abstract class UnifyType implements ISerializableData {
|
||||
|
||||
/**
|
||||
* The name of the type e.q. "Integer", "? extends Integer" or "List" for (List<T>)
|
||||
@@ -29,7 +31,7 @@ public abstract class UnifyType {
|
||||
/**
|
||||
* Creates a new instance
|
||||
* @param name Name of the type (e.q. List for List<T>, Integer or ? extends Integer)
|
||||
* @param typeParams Parameters of the type (e.q. <T> for List<T>)
|
||||
* @param p Parameters of the type (e.q. <T> for List<T>)
|
||||
*/
|
||||
protected UnifyType(String name, TypeParams p) {
|
||||
typeName = name;
|
||||
@@ -87,20 +89,18 @@ public abstract class UnifyType {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String params = "";
|
||||
StringBuilder params = new StringBuilder();
|
||||
if(typeParams.size() != 0) {
|
||||
for(UnifyType param : typeParams)
|
||||
params += param.toString() + ",";
|
||||
params = "<" + params.substring(0, params.length()-1) + ">";
|
||||
params.append(param.toString()).append(",");
|
||||
params = new StringBuilder("<" + params.substring(0, params.length() - 1) + ">");
|
||||
}
|
||||
|
||||
return typeName + params;
|
||||
}
|
||||
|
||||
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||
ArrayList<PlaceholderType> ret = new ArrayList<>();
|
||||
ret.addAll(typeParams.getInvolvedPlaceholderTypes());
|
||||
return ret;
|
||||
return new ArrayList<>(typeParams.getInvolvedPlaceholderTypes());
|
||||
}
|
||||
|
||||
public Boolean wrongWildcard() {//default
|
||||
@@ -114,7 +114,40 @@ public abstract class UnifyType {
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj == null)return false;
|
||||
if(obj == null) return false;
|
||||
return this.toString().equals(obj.toString());
|
||||
}
|
||||
}
|
||||
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
String type = switch (this) {
|
||||
case FunInterfaceType _ -> "funi";
|
||||
case ReferenceType _ -> "ref";
|
||||
case ExtendsType _ -> "ext";
|
||||
case SuperType _ -> "sup";
|
||||
case FunNType _ -> "funn";
|
||||
case PlaceholderType _ -> "tph";
|
||||
default -> throw new RuntimeException("No type defined for UnifyType of class " + this.getClass().getName());
|
||||
};
|
||||
|
||||
serialized.put("type", type);
|
||||
// we only insert null for the object and expect the child classes to call this and override the value with themselves
|
||||
serialized.put("object", SerialValue.NULL);
|
||||
return serialized;
|
||||
}
|
||||
|
||||
public static UnifyType fromSerial(SerialMap data, UnifyContext context) {
|
||||
var type = data.getValue("type").getOf(String.class);
|
||||
var object = data.getMap("object");
|
||||
|
||||
return switch (type) {
|
||||
case "funi" -> FunInterfaceType.fromSerial(object, context);
|
||||
case "ref" -> ReferenceType.fromSerial(object, context);
|
||||
case "ext" -> ExtendsType.fromSerial(object, context);
|
||||
case "sup" -> SuperType.fromSerial(object, context);
|
||||
case "funn" -> FunNType.fromSerial(object, context);
|
||||
case "tph" -> PlaceholderType.fromSerial(object, context);
|
||||
default -> throw new RuntimeException("Could not unserialize class of unhandled type " + type);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@@ -1,5 +1,6 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
@@ -7,7 +8,7 @@ import java.util.Collection;
|
||||
* A wildcard type that is either a ExtendsType or a SuperType.
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public abstract class WildcardType extends UnifyType {
|
||||
public abstract class WildcardType extends UnifyType implements ISerializableData {
|
||||
|
||||
/**
|
||||
* The wildcarded type, e.q. Integer for ? extends Integer. Never a wildcard type itself.
|
||||
@@ -65,8 +66,7 @@ public abstract class WildcardType extends UnifyType {
|
||||
|
||||
@Override
|
||||
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||
ArrayList<PlaceholderType> ret = new ArrayList<>();
|
||||
ret.addAll(wildcardedType.getInvolvedPlaceholderTypes());
|
||||
ArrayList<PlaceholderType> ret = new ArrayList<>(wildcardedType.getInvolvedPlaceholderTypes());
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
9
src/main/java/de/dhbwstuttgart/util/Logger.java
Normal file
9
src/main/java/de/dhbwstuttgart/util/Logger.java
Normal file
@@ -0,0 +1,9 @@
|
||||
package de.dhbwstuttgart.util;
|
||||
|
||||
public class Logger {
|
||||
|
||||
public static void print(String s) {
|
||||
System.out.println(s);
|
||||
}
|
||||
|
||||
}
|
@@ -3,37 +3,26 @@ package de.dhbwstuttgart.util;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class Pair<T, T1> {
|
||||
private final T key;
|
||||
private final T1 value;
|
||||
public class Pair<T, T1> extends Tuple<T, T1> {
|
||||
|
||||
public Pair(T a, T1 b) {
|
||||
this.value = b;
|
||||
this.key = a;
|
||||
}
|
||||
public Pair(T a, T1 b) {
|
||||
super(a, b);
|
||||
}
|
||||
|
||||
public Optional<T1> getValue() {
|
||||
return Optional.of(value);
|
||||
}
|
||||
public Optional<T1> getValue() {
|
||||
return Optional.of(second);
|
||||
}
|
||||
|
||||
public T getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "(" + key.toString() + "," + value.toString() + ")\n";
|
||||
}
|
||||
public T getKey() {
|
||||
return first;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Pair<?, ?> pair = (Pair<?, ?>) o;
|
||||
return Objects.equals(key, pair.key) && Objects.equals(value, pair.value);
|
||||
}
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Pair<?, ?> pair = (Pair<?, ?>) o;
|
||||
return Objects.equals(first, pair.first) && Objects.equals(second, pair.second);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(key, value);
|
||||
}
|
||||
}
|
||||
|
38
src/main/java/de/dhbwstuttgart/util/Tuple.java
Normal file
38
src/main/java/de/dhbwstuttgart/util/Tuple.java
Normal file
@@ -0,0 +1,38 @@
|
||||
package de.dhbwstuttgart.util;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class Tuple<T1, T2> {
|
||||
protected final T1 first;
|
||||
protected final T2 second;
|
||||
|
||||
public Tuple(T1 a, T2 b) {
|
||||
this.second = b;
|
||||
this.first = a;
|
||||
}
|
||||
|
||||
public T1 getFirst() {
|
||||
return first;
|
||||
}
|
||||
|
||||
public T2 getSecond() {
|
||||
return second;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "(" + first.toString() + "," + second.toString() + ")\n";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Tuple<?, ?> pair = (Tuple<?, ?>) o;
|
||||
return Objects.equals(first, pair.first) && Objects.equals(second, pair.second);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(first, second);
|
||||
}
|
||||
}
|
@@ -303,7 +303,7 @@ public class TestComplete {
|
||||
var result = mul.invoke(instanceOfClass_s1, instanceOfClass_s2);
|
||||
System.out.println(instanceOfClass_s1.toString() + " * " + instanceOfClass_s2.toString() + " = " + result.toString());
|
||||
|
||||
assertEquals(result, 8);
|
||||
assertEquals(8, result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -444,8 +444,8 @@ public class TestComplete {
|
||||
var O = Arrays.stream(typeParaTypes).filter(t -> t.equals(paraTypes[0])).findFirst().get();
|
||||
var N2 = Arrays.stream(typeParaTypes).filter(t -> t.equals(paraTypes[1])).findFirst().get();
|
||||
assertEquals(N, N2);
|
||||
assertEquals(N.getBounds()[0], Object.class);
|
||||
assertEquals(O.getBounds()[0], Object.class);
|
||||
assertEquals(Object.class, N.getBounds()[0]);
|
||||
assertEquals(Object.class, O.getBounds()[0]);
|
||||
|
||||
var m2 = tph4.getDeclaredMethod("m2", Object.class);
|
||||
|
||||
@@ -500,8 +500,8 @@ public class TestComplete {
|
||||
var P = Arrays.stream(typeParaTypes).filter(t -> t.equals(paraTypes[0])).findFirst().get();
|
||||
var O = Arrays.stream(typeParaTypes).filter(t -> t.equals(paraTypes[1])).findFirst().get();
|
||||
|
||||
assertEquals(P.getBounds()[0], Object.class);
|
||||
assertEquals(O.getBounds()[0], Object.class);
|
||||
assertEquals(Object.class, P.getBounds()[0]);
|
||||
assertEquals(Object.class, O.getBounds()[0]);
|
||||
|
||||
var m2 = tph5.getDeclaredMethod("m2", Object.class);
|
||||
|
||||
@@ -605,7 +605,7 @@ public class TestComplete {
|
||||
var fac1 = classFiles.get("Fac1");
|
||||
var instance = fac1.getDeclaredConstructor().newInstance();
|
||||
var fac = fac1.getDeclaredMethod("fac", Integer.class);
|
||||
assertEquals(fac.invoke(instance, 10), 3628800);
|
||||
assertEquals(3628800, fac.invoke(instance, 10));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -676,16 +676,16 @@ public class TestComplete {
|
||||
var r2 = ctor.newInstance(10, 20f);
|
||||
var r3 = ctor.newInstance(10, r1);
|
||||
|
||||
assertEquals(swtch.invoke(instance, r1), 30);
|
||||
assertEquals(swtch.invoke(instance, r2), 20);
|
||||
assertEquals(swtch.invoke(instance, r3), 40);
|
||||
assertEquals(swtch.invoke(instance, 50), 50);
|
||||
assertEquals(30, swtch.invoke(instance, r1));
|
||||
assertEquals(20, swtch.invoke(instance, r2));
|
||||
assertEquals(40, swtch.invoke(instance, r3));
|
||||
assertEquals(50, swtch.invoke(instance, 50));
|
||||
try {
|
||||
assertEquals(swtch.invoke(instance, "Some string"), 0);
|
||||
assertEquals(0, swtch.invoke(instance, "Some string"));
|
||||
fail("No assertion thrown!");
|
||||
} catch (InvocationTargetException exception) {
|
||||
assertTrue(exception.getCause() instanceof IllegalArgumentException);
|
||||
assertEquals(exception.getCause().getMessage(), "Unhandled case value");
|
||||
assertEquals("Unhandled case value", exception.getCause().getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -702,7 +702,7 @@ public class TestComplete {
|
||||
var r1 = ctor.newInstance("asd", "asb");
|
||||
|
||||
|
||||
assertEquals(swtch.invoke(instance, r1), "asd");
|
||||
assertEquals("asd", swtch.invoke(instance, r1));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -753,8 +753,8 @@ public class TestComplete {
|
||||
var r2 = ctor.newInstance(1, clazzNumber.getConstructor().newInstance());
|
||||
|
||||
|
||||
assertEquals(swtch.invoke(instance, r1), "Second Element is a Text");
|
||||
assertEquals(swtch.invoke(instance, r2), "Second Element is a Number");
|
||||
assertEquals("Second Element is a Text", swtch.invoke(instance, r1));
|
||||
assertEquals("Second Element is a Number", swtch.invoke(instance, r2));
|
||||
}
|
||||
@Test
|
||||
public void testSwitchListRecord() throws Exception {
|
||||
@@ -776,10 +776,10 @@ public class TestComplete {
|
||||
|
||||
|
||||
var listWithMoreThanOneElement = constructorLinkedElem.newInstance(5, constructorLinkedElem.newInstance(1, constructorElem.newInstance(5)));
|
||||
assertEquals(swtch.invoke(instance, listWithMoreThanOneElement), 11);
|
||||
assertEquals(11, swtch.invoke(instance, listWithMoreThanOneElement));
|
||||
|
||||
var listWithOneElement = constructorLinkedElem.newInstance(5, constructorElem.newInstance(5));
|
||||
assertEquals(swtch.invoke(instance, listWithOneElement), 10);
|
||||
assertEquals(10, swtch.invoke(instance, listWithOneElement));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -795,9 +795,9 @@ public class TestComplete {
|
||||
var r2 = ctor.newInstance(20);
|
||||
var r3 = ctor.newInstance(30);
|
||||
|
||||
assertEquals(m.invoke(instance, r1), 1);
|
||||
assertEquals(m.invoke(instance, r2), 2);
|
||||
assertEquals(m.invoke(instance, r3), 3);
|
||||
assertEquals(1, m.invoke(instance, r1));
|
||||
assertEquals(2, m.invoke(instance, r2));
|
||||
assertEquals(3, m.invoke(instance, r3));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -820,10 +820,10 @@ public class TestComplete {
|
||||
|
||||
|
||||
var listWithMoreThanOneElement = constructorLinkedElem.newInstance(5, constructorLinkedElem.newInstance(1, constructorElem.newInstance(5)));
|
||||
assertEquals(swtch.invoke(instance, listWithMoreThanOneElement), 5);
|
||||
assertEquals(5, swtch.invoke(instance, listWithMoreThanOneElement));
|
||||
|
||||
var listWithOneElement = constructorLinkedElem.newInstance(5, constructorElem.newInstance(5));
|
||||
assertEquals(swtch.invoke(instance, listWithOneElement), 5);
|
||||
assertEquals(5, swtch.invoke(instance, listWithOneElement));
|
||||
}
|
||||
|
||||
|
||||
@@ -843,11 +843,11 @@ public class TestComplete {
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var main = clazz.getDeclaredMethod("main", String.class);
|
||||
|
||||
assertEquals(main.invoke(instance, "AaAaAa"), 1);
|
||||
assertEquals(main.invoke(instance, "AaAaBB"), 2);
|
||||
assertEquals(main.invoke(instance, "test"), 3);
|
||||
assertEquals(main.invoke(instance, "TEST"), 3);
|
||||
assertEquals(main.invoke(instance, "awawa"), 4);
|
||||
assertEquals(1, main.invoke(instance, "AaAaAa"));
|
||||
assertEquals(2, main.invoke(instance, "AaAaBB"));
|
||||
assertEquals(3, main.invoke(instance, "test"));
|
||||
assertEquals(3, main.invoke(instance, "TEST"));
|
||||
assertEquals(4, main.invoke(instance, "awawa"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -872,15 +872,15 @@ public class TestComplete {
|
||||
var ptInt = rec.getDeclaredConstructor(Number.class, Number.class).newInstance(1, 2);
|
||||
var ptFlt = rec.getDeclaredConstructor(Number.class, Number.class).newInstance(1f, 2f);
|
||||
|
||||
assertEquals(m1.invoke(instance, ptInt, ptFlt), 1);
|
||||
assertEquals(m1.invoke(instance, ptInt, ptInt), 2);
|
||||
assertEquals(m1.invoke(instance, ptFlt, ptInt), 3);
|
||||
assertEquals(m1.invoke(instance, ptFlt, ptFlt), 4);
|
||||
assertEquals(1, m1.invoke(instance, ptInt, ptFlt));
|
||||
assertEquals(2, m1.invoke(instance, ptInt, ptInt));
|
||||
assertEquals(3, m1.invoke(instance, ptFlt, ptInt));
|
||||
assertEquals(4, m1.invoke(instance, ptFlt, ptFlt));
|
||||
|
||||
assertEquals(m2.invoke(instance, ptInt), 5);
|
||||
assertEquals(m2.invoke(instance, ptFlt), 6);
|
||||
assertEquals(5, m2.invoke(instance, ptInt));
|
||||
assertEquals(6, m2.invoke(instance, ptFlt));
|
||||
|
||||
assertEquals(m3.invoke(instance, 10), 10);
|
||||
assertEquals(10, m3.invoke(instance, 10));
|
||||
}
|
||||
|
||||
@Ignore("Not implemented")
|
||||
@@ -896,13 +896,13 @@ public class TestComplete {
|
||||
var r2 = ctor.newInstance(r1);
|
||||
|
||||
var m = clazz.getDeclaredMethod("m", rec, rec);
|
||||
assertEquals(m.invoke(instance, r2, r1), 1);
|
||||
assertEquals(1, m.invoke(instance, r2, r1));
|
||||
|
||||
var r3 = ctor.newInstance(2f);
|
||||
var r4 = ctor.newInstance(r3);
|
||||
assertEquals(m.invoke(instance, r4, r3), 2);
|
||||
assertEquals(2, m.invoke(instance, r4, r3));
|
||||
|
||||
assertEquals(m.invoke(instance, r1, r1), 3);
|
||||
assertEquals(3, m.invoke(instance, r1, r1));
|
||||
}
|
||||
|
||||
@Ignore("Not implemented")
|
||||
@@ -923,8 +923,8 @@ public class TestComplete {
|
||||
var x = rctor.newInstance(rctor.newInstance(0, rctor.newInstance(0, r2ctor.newInstance(0))), r2ctor.newInstance(0));
|
||||
var y = rctor.newInstance(r2ctor.newInstance(0), r2ctor.newInstance(0));
|
||||
|
||||
assertEquals(m.invoke(instance, x, y), null);
|
||||
assertEquals(m.invoke(instance, y, y), null);
|
||||
assertNull(m.invoke(instance, x, y));
|
||||
assertNull(m.invoke(instance, y, y));
|
||||
|
||||
}
|
||||
|
||||
@@ -963,8 +963,8 @@ public class TestComplete {
|
||||
var x = rctor.newInstance(10);
|
||||
var d = rctor.newInstance(20.0);
|
||||
|
||||
assertEquals(m.invoke(instance, x, 0), 50);
|
||||
assertEquals(m.invoke(instance, d, 0), 40.0);
|
||||
assertEquals(50, m.invoke(instance, x, 0));
|
||||
assertEquals(40.0, m.invoke(instance, d, 0));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -979,7 +979,7 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Static.jav");
|
||||
var clazz = classFiles.get("Static");
|
||||
var m = clazz.getDeclaredMethod("m");
|
||||
assertEquals(m.invoke(null), 50);
|
||||
assertEquals(50, m.invoke(null));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -988,7 +988,7 @@ public class TestComplete {
|
||||
var clazz = classFiles.get("For");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var m = clazz.getDeclaredMethod("m", Integer.class);
|
||||
assertEquals(m.invoke(instance, 10), 60);
|
||||
assertEquals(60, m.invoke(instance, 10));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -997,7 +997,7 @@ public class TestComplete {
|
||||
var clazz = classFiles.get("ForEach");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var m = clazz.getDeclaredMethod("m");
|
||||
assertEquals(m.invoke(instance), 6);
|
||||
assertEquals(6, m.invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1015,7 +1015,7 @@ public class TestComplete {
|
||||
var clazz = classFiles.get("FunctionalInterface");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var m = clazz.getDeclaredMethod("m");
|
||||
assertEquals(m.invoke(instance), 200);
|
||||
assertEquals(200, m.invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1024,7 +1024,7 @@ public class TestComplete {
|
||||
var clazz = classFiles.get("Chain");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var m = clazz.getDeclaredMethod("m");
|
||||
assertEquals(m.invoke(instance), 5);
|
||||
assertEquals(5, m.invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1077,9 +1077,9 @@ public class TestComplete {
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
|
||||
assertNull(clazz.getDeclaredMethod("m").invoke(instance));
|
||||
assertEquals(clazz.getDeclaredMethod("m2").invoke(instance), 'C');
|
||||
assertEquals(clazz.getDeclaredMethod("m3").invoke(instance), 10L);
|
||||
assertEquals(clazz.getDeclaredMethod("m4").invoke(instance), 10.5F);
|
||||
assertEquals('C', clazz.getDeclaredMethod("m2").invoke(instance));
|
||||
assertEquals(10L, clazz.getDeclaredMethod("m3").invoke(instance));
|
||||
assertEquals(10.5F, clazz.getDeclaredMethod("m4").invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1087,7 +1087,7 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "OLConstructor.jav");
|
||||
var clazz = classFiles.get("Child");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getSuperclass().getDeclaredField("x").get(instance), 3);
|
||||
assertEquals(3, clazz.getSuperclass().getDeclaredField("x").get(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1095,11 +1095,11 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Op1.jav");
|
||||
var clazz = classFiles.get("Op1");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getDeclaredMethod("not").invoke(instance), true);
|
||||
assertEquals(clazz.getDeclaredMethod("or").invoke(instance), 10 | 20);
|
||||
assertEquals(clazz.getDeclaredMethod("and").invoke(instance), 10 & 20);
|
||||
assertEquals(clazz.getDeclaredMethod("xor").invoke(instance), 10 ^ 20);
|
||||
assertEquals(clazz.getDeclaredMethod("mod").invoke(instance), 10 % 2);
|
||||
assertEquals(true, clazz.getDeclaredMethod("not").invoke(instance));
|
||||
assertEquals(10 | 20, clazz.getDeclaredMethod("or").invoke(instance));
|
||||
assertEquals(10 & 20, clazz.getDeclaredMethod("and").invoke(instance));
|
||||
assertEquals(10 ^ 20, clazz.getDeclaredMethod("xor").invoke(instance));
|
||||
assertEquals(10 % 2, clazz.getDeclaredMethod("mod").invoke(instance));
|
||||
}
|
||||
|
||||
@Ignore("Not implemented")
|
||||
@@ -1116,18 +1116,18 @@ public class TestComplete {
|
||||
var clazzPublic = classFiles.get("Access");
|
||||
var clazzDefault = classFiles.get("AccessDefault");
|
||||
|
||||
assertEquals(clazzPublic.getModifiers(), Modifier.PUBLIC);
|
||||
assertEquals(clazzDefault.getModifiers(), 0);
|
||||
assertEquals(Modifier.PUBLIC, clazzPublic.getModifiers());
|
||||
assertEquals(0, clazzDefault.getModifiers());
|
||||
|
||||
assertEquals(clazzPublic.getDeclaredMethod("mPublic").getModifiers(), Modifier.PUBLIC);
|
||||
assertEquals(clazzPublic.getDeclaredMethod("mProtected").getModifiers(), Modifier.PROTECTED);
|
||||
assertEquals(clazzPublic.getDeclaredMethod("mDefault").getModifiers(), 0);
|
||||
assertEquals(clazzPublic.getDeclaredMethod("mPrivate").getModifiers(), Modifier.PRIVATE);
|
||||
assertEquals(Modifier.PUBLIC, clazzPublic.getDeclaredMethod("mPublic").getModifiers());
|
||||
assertEquals(Modifier.PROTECTED, clazzPublic.getDeclaredMethod("mProtected").getModifiers());
|
||||
assertEquals(0, clazzPublic.getDeclaredMethod("mDefault").getModifiers());
|
||||
assertEquals(Modifier.PRIVATE, clazzPublic.getDeclaredMethod("mPrivate").getModifiers());
|
||||
|
||||
assertEquals(clazzPublic.getDeclaredField("fPublic").getModifiers(), Modifier.PUBLIC);
|
||||
assertEquals(clazzPublic.getDeclaredField("fProtected").getModifiers(), Modifier.PROTECTED);
|
||||
assertEquals(clazzPublic.getDeclaredField("fDefault").getModifiers(), 0);
|
||||
assertEquals(clazzPublic.getDeclaredField("fPrivate").getModifiers(), Modifier.PRIVATE);
|
||||
assertEquals(Modifier.PUBLIC, clazzPublic.getDeclaredField("fPublic").getModifiers());
|
||||
assertEquals(Modifier.PROTECTED, clazzPublic.getDeclaredField("fProtected").getModifiers());
|
||||
assertEquals(0, clazzPublic.getDeclaredField("fDefault").getModifiers());
|
||||
assertEquals(Modifier.PRIVATE, clazzPublic.getDeclaredField("fPrivate").getModifiers());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1165,7 +1165,7 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Ternary.jav");
|
||||
var clazz = classFiles.get("Ternary");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getDeclaredMethod("main", Integer.class).invoke(instance, 5), "small");
|
||||
assertEquals("small", clazz.getDeclaredMethod("main", Integer.class).invoke(instance, 5));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1173,8 +1173,8 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "While.jav");
|
||||
var clazz = classFiles.get("While");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getDeclaredMethod("m", Integer.class).invoke(instance, 5), 5);
|
||||
assertEquals(clazz.getDeclaredMethod("m2").invoke(instance), 10);
|
||||
assertEquals(5, clazz.getDeclaredMethod("m", Integer.class).invoke(instance, 5));
|
||||
assertEquals(10, clazz.getDeclaredMethod("m2").invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1190,7 +1190,7 @@ public class TestComplete {
|
||||
var clazz = classFiles.get("Assign");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var m = clazz.getDeclaredMethod("m");
|
||||
assertEquals(m.invoke(instance), 20);
|
||||
assertEquals(20, m.invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1247,9 +1247,9 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Bug295.jav");
|
||||
var clazz = classFiles.get("Bug295");
|
||||
var instance = clazz.getDeclaredConstructor(Integer.class, Integer.class, Integer.class).newInstance(1, 2, 3);
|
||||
assertEquals(clazz.getDeclaredField("a").get(instance), 1);
|
||||
assertEquals(clazz.getDeclaredField("b").get(instance), 2);
|
||||
assertEquals(clazz.getDeclaredField("c").get(instance), 3);
|
||||
assertEquals(1, clazz.getDeclaredField("a").get(instance));
|
||||
assertEquals(2, clazz.getDeclaredField("b").get(instance));
|
||||
assertEquals(3, clazz.getDeclaredField("c").get(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1279,7 +1279,7 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Bug300.jav");
|
||||
var clazz = classFiles.get("Bug300");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getDeclaredMethod("m").invoke(instance), "Base");
|
||||
assertEquals("Base", clazz.getDeclaredMethod("m").invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1324,7 +1324,7 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Bug310.jav");
|
||||
var clazz = classFiles.get("Bug310");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getDeclaredMethod("toString").invoke(instance), "3");
|
||||
assertEquals("3", clazz.getDeclaredMethod("toString").invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@@ -1,8 +1,10 @@
|
||||
package finiteClosure;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.parser.SyntaxTreeGenerator.FCGenerator;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -14,14 +16,14 @@ public class SuperInterfacesTest {
|
||||
public void test() throws ClassNotFoundException {
|
||||
Collection<ClassOrInterface> classes = new ArrayList<>();
|
||||
classes.add(ASTFactory.createClass(TestClass.class));
|
||||
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader()));
|
||||
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), JavaTXCompiler.defaultClientPlaceholderRegistry));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGeneric() throws ClassNotFoundException {
|
||||
Collection<ClassOrInterface> classes = new ArrayList<>();
|
||||
classes.add(ASTFactory.createClass(TestClassGeneric.class));
|
||||
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader()));
|
||||
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), JavaTXCompiler.defaultClientPlaceholderRegistry));
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,51 +0,0 @@
|
||||
package languageServerInterfaceTest;
|
||||
|
||||
import de.dhbwstuttgart.core.ConsoleInterface;
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
|
||||
import de.dhbwstuttgart.syntaxtree.visual.ASTPrinter;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import de.dhbwstuttgart.languageServerInterface.LanguageServerInterface;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class LangaugeServerInterfaceTest {
|
||||
@Test
|
||||
@Ignore
|
||||
public void consoleInterfaceTest() throws IOException, ClassNotFoundException, URISyntaxException {
|
||||
|
||||
|
||||
LanguageServerInterface languageServerInterface = new LanguageServerInterface();
|
||||
var resp = languageServerInterface.getResultSetAndAbstractSyntax("/home/ruben/code/JavaCompilerCore/src/test/java/languageServerInterfaceTest/test.jav");
|
||||
|
||||
System.out.println("\n-----------------------------------------\n");
|
||||
System.out.println(ASTPrinter.print(resp.getAst()));
|
||||
System.out.println("\n-----------------------------------------\n");
|
||||
|
||||
LanguageServerInterface languageServerInterface2 = new LanguageServerInterface();
|
||||
var ast = languageServerInterface2.getAst("/home/ruben/code/JavaCompilerCore/src/test/java/languageServerInterfaceTest/test.jav", "N");
|
||||
|
||||
System.out.println("\n-----------------------------------------\n");
|
||||
System.out.println(ASTPrinter.print(ast));
|
||||
System.out.println("\n-----------------------------------------\n");
|
||||
System.out.println("");
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void testBytecodeGen() throws IOException, ClassNotFoundException, URISyntaxException {
|
||||
|
||||
//TODO: Ordner und Datei löschen wenn sie bereits existieren
|
||||
LanguageServerInterface languageServerInterface = new LanguageServerInterface();
|
||||
languageServerInterface.generateBytecode(new URI("c%3A/Users/ruben/Neuer%20Ordner%20%282%29/LSP-Vortrag/images/"));
|
||||
}
|
||||
}
|
@@ -1,5 +0,0 @@
|
||||
public class t{
|
||||
public mofus(){
|
||||
return 1;
|
||||
}
|
||||
}
|
101
src/test/java/server/PacketExampleData.java
Normal file
101
src/test/java/server/PacketExampleData.java
Normal file
@@ -0,0 +1,101 @@
|
||||
package server;
|
||||
|
||||
import de.dhbwstuttgart.parser.SourceLoc;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FunInterfaceType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import java.io.Writer;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
class PacketExampleData {
|
||||
|
||||
static ConstraintSet<UnifyPair> getExampleUnifyPairConstraintSet(
|
||||
UnifyContext unifyContext, String packagePath, int numUndCons, int numOderCons
|
||||
) {
|
||||
var constraintSet = new ConstraintSet<UnifyPair>();
|
||||
for (int i = 0; i < numUndCons; i++) {
|
||||
constraintSet.addUndConstraint(
|
||||
getExampleUnifyPair(unifyContext, packagePath+"undCons.")
|
||||
);
|
||||
}
|
||||
for (int i = 0; i < numOderCons; i++) {
|
||||
var oderConstraint = new HashSet<Constraint<UnifyPair>>();
|
||||
getExampleUnifyPairConstraint(unifyContext, packagePath+"oderCons.", i == 0);
|
||||
constraintSet.addOderConstraint(oderConstraint);
|
||||
}
|
||||
return constraintSet;
|
||||
}
|
||||
|
||||
static Constraint<UnifyPair> getExampleUnifyPairConstraint(UnifyContext unifyContext, String packagePath, boolean withExtends) {
|
||||
return new Constraint<>(
|
||||
!withExtends, true,
|
||||
withExtends ? getExampleUnifyPairConstraint(unifyContext, packagePath+"extendConstraint.", false) : null,
|
||||
withExtends ? new HashSet<>(List.of(
|
||||
getExampleUnifyPair(unifyContext, packagePath+"methodSignatureConstraint.zero."),
|
||||
getExampleUnifyPair(unifyContext, packagePath+"methodSignatureConstraint.one.")
|
||||
)) : null
|
||||
);
|
||||
}
|
||||
|
||||
static UnifyPair getExampleUnifyPair(UnifyContext unifyContext, String packagePath) {
|
||||
return new UnifyPair(
|
||||
new ReferenceType(packagePath + "something", false),
|
||||
new ExtendsType(
|
||||
new SuperType(
|
||||
new FunInterfaceType("lambda" + unifyContext.placeholderRegistry().generateFreshPlaceholderName(),
|
||||
new TypeParams(),
|
||||
List.of( // intfArgTypes
|
||||
FunNType.getFunNType(new TypeParams(
|
||||
List.of(
|
||||
PlaceholderType.freshPlaceholder(unifyContext.placeholderRegistry()),
|
||||
PlaceholderType.freshPlaceholder(unifyContext.placeholderRegistry())
|
||||
)
|
||||
))
|
||||
),
|
||||
PlaceholderType.freshPlaceholder(unifyContext.placeholderRegistry()), // intfReturnType
|
||||
List.of( // generics
|
||||
"ZA",
|
||||
"ZB",
|
||||
unifyContext.placeholderRegistry().generateFreshPlaceholderName()
|
||||
)
|
||||
)
|
||||
)
|
||||
),
|
||||
PairOperator.SMALLERDOT,
|
||||
new SourceLoc("test.jav", 10)
|
||||
);
|
||||
}
|
||||
|
||||
static UnifyContext createTestContext() {
|
||||
var placeholderRegistry = new PlaceholderRegistry();
|
||||
var nullWriter = Writer.nullWriter();
|
||||
return new UnifyContext(nullWriter, false, true,
|
||||
new UnifyResultModel(
|
||||
new ConstraintSet<>(),
|
||||
new FiniteClosure(
|
||||
new HashSet<>(),
|
||||
nullWriter,
|
||||
placeholderRegistry)),
|
||||
new UnifyTaskModel(),
|
||||
ForkJoinPool.commonPool(),
|
||||
placeholderRegistry
|
||||
);
|
||||
}
|
||||
|
||||
}
|
110
src/test/java/server/PacketTest.java
Normal file
110
src/test/java/server/PacketTest.java
Normal file
@@ -0,0 +1,110 @@
|
||||
package server;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import de.dhbwstuttgart.environment.ByteArrayClassLoader;
|
||||
import de.dhbwstuttgart.parser.SourceLoc;
|
||||
import de.dhbwstuttgart.server.packet.DebugPacket;
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.server.packet.InvalidPacket;
|
||||
import de.dhbwstuttgart.server.packet.PacketContainer;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import java.io.Writer;
|
||||
import java.util.HashSet;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.function.BiFunction;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class PacketTest {
|
||||
|
||||
@Test
|
||||
public void serializeUnifyPair() throws JsonProcessingException {
|
||||
UnifyContext unifyContext = PacketExampleData.createTestContext();
|
||||
|
||||
var original = PacketExampleData.getExampleUnifyPair(unifyContext, "de.test.");
|
||||
var reconstruction = serializeAndDeserialize(original, unifyContext,
|
||||
(o,k) -> UnifyPair.fromSerial((SerialUUID) o,unifyContext,k));
|
||||
|
||||
assertEquals(original.getClass(), reconstruction.getClass());
|
||||
assertEquals(original.toString(), reconstruction.toString());
|
||||
assertEquals(original, reconstruction);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void serializeUnifyPairConstraint() throws JsonProcessingException {
|
||||
UnifyContext unifyContext = PacketExampleData.createTestContext();
|
||||
|
||||
var original = PacketExampleData.getExampleUnifyPairConstraint(unifyContext, "de.", true);
|
||||
var reconstruction = serializeAndDeserialize(original, unifyContext,
|
||||
(o,k) -> Constraint.fromSerial((SerialUUID) o, unifyContext, UnifyPair.class, k));
|
||||
|
||||
assertEquals(original.getClass(), reconstruction.getClass());
|
||||
assertEquals(original.toString(), reconstruction.toString());
|
||||
assertEquals(original, reconstruction);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void serializeUnifyPairConstraintSet() throws JsonProcessingException {
|
||||
UnifyContext unifyContext = PacketExampleData.createTestContext();
|
||||
|
||||
var original = PacketExampleData.getExampleUnifyPairConstraintSet(unifyContext, "de.", 1, 2);
|
||||
var reconstruction = serializeAndDeserialize(original, unifyContext,
|
||||
(o,k) -> ConstraintSet.fromSerial((SerialMap) o, unifyContext, UnifyPair.class, k));
|
||||
|
||||
assertEquals(original.getClass(), reconstruction.getClass());
|
||||
assertEquals(original.toString(), reconstruction.toString());
|
||||
assertEquals(original, reconstruction);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Helper method for serializing an ISerialNode into JSON, then deserializing it
|
||||
*/
|
||||
private <T extends ISerialNode, R extends ISerializableData> R serializeAndDeserialize(
|
||||
ISerializableData object, UnifyContext unifyContext, BiFunction<T, KeyStorage, R> fromSerial
|
||||
) throws JsonProcessingException {
|
||||
|
||||
DebugPacket packet = new DebugPacket();
|
||||
KeyStorage keyStorage = new KeyStorage();
|
||||
var serializedObject = object.toSerial(keyStorage);
|
||||
if (serializedObject instanceof SerialUUID sObject) packet.a1 = sObject;
|
||||
if (serializedObject instanceof SerialMap sObject) packet.b1 = sObject;
|
||||
if (serializedObject instanceof SerialList<?> sObject) packet.c1 = sObject;
|
||||
if (serializedObject instanceof SerialValue<?> sObject) packet.d1 = sObject;
|
||||
packet.b2 = keyStorage.toSerial(keyStorage);
|
||||
|
||||
DebugPacket reconstructedPacket = serializeAndDeserializePacket(packet);
|
||||
|
||||
KeyStorage reconstructedKeyStorage = KeyStorage.fromSerial(reconstructedPacket.b2, unifyContext);
|
||||
ISerialNode reconstructedData = null;
|
||||
if (serializedObject instanceof SerialUUID) reconstructedData = packet.a1;
|
||||
if (serializedObject instanceof SerialMap) reconstructedData = packet.b1;
|
||||
if (serializedObject instanceof SerialList<?>) reconstructedData = packet.c1;
|
||||
if (serializedObject instanceof SerialValue<?>) reconstructedData = packet.d1;
|
||||
|
||||
assertNotNull(reconstructedData);
|
||||
return fromSerial.apply( (T) reconstructedData, reconstructedKeyStorage);
|
||||
}
|
||||
|
||||
private <T extends IPacket> T serializeAndDeserializePacket(T packet) throws JsonProcessingException {
|
||||
String json = PacketContainer.serialize(packet);
|
||||
IPacket reconstructedPacket = PacketContainer.deserialize(json);
|
||||
assertNotNull(reconstructedPacket);
|
||||
assertSame(packet.getClass(), reconstructedPacket.getClass());
|
||||
return (T) reconstructedPacket;
|
||||
}
|
||||
}
|
141
src/test/java/server/ServerTest.java
Normal file
141
src/test/java/server/ServerTest.java
Normal file
@@ -0,0 +1,141 @@
|
||||
package server;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.core.JavaTXServer;
|
||||
import de.dhbwstuttgart.environment.CompilationEnvironment;
|
||||
import de.dhbwstuttgart.server.SocketClient;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.nio.file.Path;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.stream.Collectors;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import targetast.TestCodegen;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
@Ignore("Server tests create huge overhead, so they are ignored until required")
|
||||
public class ServerTest {
|
||||
|
||||
@Test
|
||||
public void checkServer_Scalar() throws IOException, ClassNotFoundException {
|
||||
compareLocalAndServerResult("Scalar.jav");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void checkServer_Matrix() throws IOException, ClassNotFoundException {
|
||||
compareLocalAndServerResult("Matrix.jav");
|
||||
}
|
||||
|
||||
protected void compareLocalAndServerResult(final String filename) throws IOException, ClassNotFoundException {
|
||||
File file = Path.of(TestCodegen.path.toString(), filename).toFile();
|
||||
|
||||
// get information from compiler
|
||||
JavaTXCompiler compiler = new JavaTXCompiler(List.of(file));
|
||||
|
||||
// NOW: simulate the call to method typeInference. Once via server and once locally
|
||||
// if everything works, they should neither interfere with each other, nor differ in their result
|
||||
|
||||
// get the values from the compiler
|
||||
PlaceholderRegistry placeholderRegistry = JavaTXCompiler.defaultClientPlaceholderRegistry; //new PlaceholderRegistry();
|
||||
ConstraintSet<Pair> cons = compiler.getConstraints(file);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(compiler, cons, placeholderRegistry);
|
||||
unifyCons = unifyCons.map(ServerTest::distributeInnerVars);
|
||||
|
||||
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(
|
||||
ServerTest.getAllClasses(compiler, file).stream().toList(),
|
||||
Writer.nullWriter(),
|
||||
compiler.classLoader,
|
||||
compiler,
|
||||
placeholderRegistry
|
||||
);
|
||||
UnifyTaskModel usedTasks = new UnifyTaskModel();
|
||||
|
||||
|
||||
|
||||
// create the server
|
||||
JavaTXServer server = new JavaTXServer(5000);
|
||||
try (ExecutorService executor = Executors.newSingleThreadExecutor()) {
|
||||
// run the server in a separate thread
|
||||
executor.submit(server::listen);
|
||||
}
|
||||
|
||||
// run the unification on the server
|
||||
PlaceholderRegistry prCopy = JavaTXCompiler.defaultClientPlaceholderRegistry.deepClone();
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyContext context = new UnifyContext(Writer.nullWriter(), false, true, urm, usedTasks, prCopy);
|
||||
SocketClient socketClient = new SocketClient("ws://localhost:5000");
|
||||
List<ResultSet> serverResult = socketClient.execute(finiteClosure, cons, unifyCons, context);
|
||||
|
||||
// close the server
|
||||
server.forceStop();
|
||||
|
||||
// run the unification on the client (do this second, because it changes the initial placeholder registry)
|
||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||
urm.addUnifyResultListener(li);
|
||||
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, context);
|
||||
List<ResultSet> clientResult = li.getResults();
|
||||
|
||||
|
||||
// create the bytecode from both results
|
||||
var sf = compiler.sourceFiles.get(file);
|
||||
var serverBytecode = compiler.generateBytecode(sf, serverResult);
|
||||
var localBytecode = compiler.generateBytecode(sf, clientResult);
|
||||
|
||||
// test if the generated code is the same
|
||||
for (var serverEntry : serverBytecode.entrySet()) {
|
||||
var serverBytes = serverEntry.getValue();
|
||||
var localBytes = localBytecode.get(serverEntry.getKey());
|
||||
assertArrayEquals(serverBytes, localBytes);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected static UnifyPair distributeInnerVars(UnifyPair x) {
|
||||
UnifyType lhs, rhs;
|
||||
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
|
||||
((PlaceholderType) lhs).setInnerType(true);
|
||||
((PlaceholderType) rhs).setInnerType(true);
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
protected static Set<ClassOrInterface> getAllClasses(JavaTXCompiler compiler, File file)
|
||||
throws ClassNotFoundException, IOException
|
||||
{
|
||||
var sf = compiler.sourceFiles.get(file);
|
||||
Set<ClassOrInterface> allClasses = new HashSet<>();
|
||||
allClasses.addAll(compiler.getAvailableClasses(sf));
|
||||
allClasses.addAll(sf.getClasses());
|
||||
var newClasses = CompilationEnvironment.loadDefaultPackageClasses(sf.getPkgName(), file, compiler).stream().map(ASTFactory::createClass).collect(Collectors.toSet());
|
||||
for (var clazz : newClasses) {
|
||||
// Don't load classes that get recompiled
|
||||
if (sf.getClasses().stream().anyMatch(nf -> nf.getClassName().equals(clazz.getClassName())))
|
||||
continue;
|
||||
if (allClasses.stream().noneMatch(old -> old.getClassName().equals(clazz.getClassName())))
|
||||
allClasses.add(clazz);
|
||||
}
|
||||
return allClasses;
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user