Compare commits

...

48 Commits

Author SHA1 Message Date
Fabian Holzwarth
c54e012426 feat: introduce task cancelling 2025-06-30 22:20:52 +02:00
Fabian Holzwarth
52f6ebcf3c feat: select multiple elements for variance 0 2025-06-30 16:43:39 +02:00
Fabian Holzwarth
c80a0c8596 feat: fix error by reintroducing name generator and add server tests 2025-06-30 16:42:20 +02:00
Fabian Holzwarth
2278fb1b91 feat: undo removing NameGenerator to fix errors in ast generation 2025-06-30 12:46:41 +02:00
Fabian Holzwarth
32b16cd5fd feat: replace concurrent modification with correct function call 2025-06-30 11:49:53 +02:00
Fabian Holzwarth
fd30c5f63f feat: prevent reusing the placeholder registry in tests 2025-06-29 16:04:54 +02:00
Fabian Holzwarth
8bfd6ae255 feat: remove redundant lambda functions and Set-resizings 2025-06-28 14:48:43 +02:00
Fabian Holzwarth
ad2dfb13bd feat: speedup toString methods by using a StringBuilder instead of String concatenation 2025-06-28 14:30:12 +02:00
Fabian Holzwarth
501633a90c feat: fix test with null methodSignatureConstraint 2025-06-28 14:06:34 +02:00
Fabian Holzwarth
4defa50ca2 feat: added version check on connecting 2025-06-25 19:48:29 +02:00
Fabian Holzwarth
d65e90536a feat: replace NameGenerator with instance of PlaceholderRegistry to prevent duplicates 2025-06-25 19:15:28 +02:00
Fabian Holzwarth
3de7f1aa61 fix: try generating new placeholders only in current placeholderRegistry context to prevent duplicates 2025-06-25 17:38:56 +02:00
Fabian Holzwarth
029e40b775 feat: make packets directional and self handling 2025-06-25 17:35:49 +02:00
Fabian Holzwarth
459bfcdd5f feat: added tests for client-server communication 2025-06-23 16:13:43 +02:00
Fabian Holzwarth
02886c38ea feat: fixed error in object serialization 2025-06-23 16:13:21 +02:00
Fabian Holzwarth
57ffae0481 fix: fixed some serialization and deserialization issues 2025-06-22 15:11:49 +02:00
Fabian Holzwarth
d084d74a25 feat: fixed mismatch in PairOperator serialization 2025-06-22 10:10:32 +02:00
Fabian Holzwarth
cd15016f61 feat: allow subclasses when asserting values 2025-06-21 13:44:29 +02:00
Fabian Holzwarth
b0e5eee25c feat: rename Object... to Serial... and move into separate classes 2025-06-21 13:40:24 +02:00
Fabian Holzwarth
d1bd285be7 fix: replace reflection class check with simple string check 2025-06-21 13:23:01 +02:00
Fabian Holzwarth
a902fd5bee feat: replaced HashMaps with better type safety structure 2025-06-21 12:58:45 +02:00
Fabian Holzwarth
ced9fdc9f7 fix: non serialized constraitnContext 2025-06-20 19:09:33 +02:00
Fabian Holzwarth
53417bf298 feat: implement serialization and adjust packets to correct data types 2025-06-20 18:53:25 +02:00
Fabian Holzwarth
2d4da03f00 feat: implementing client-server model 2025-06-18 19:58:23 +02:00
Fabian Holzwarth
f7a13f5faa feat: turn UnifyContext into a record 2025-06-18 18:26:44 +02:00
Fabian Holzwarth
8fe80b4396 feat: move static placeholder generation into object 2025-06-18 17:47:29 +02:00
Fabian Holzwarth
eb1201ae5e feat: apply future-based approach to inner cartesian loop 2025-06-09 16:49:45 +02:00
Fabian Holzwarth
963ad76593 feat: make cartesian loop computation Future-based 2025-06-09 15:30:04 +02:00
Fabian Holzwarth
1eba09e3b0 feat: change cartesian while loop into recursive 2025-06-09 15:16:09 +02:00
Fabian Holzwarth
fc82125d14 feat: change TypeUnifyTask to use future-based logic 2025-06-09 14:53:37 +02:00
Fabian Holzwarth
dad468368b feat: make functions unify and unify2 future-based 2025-06-09 13:14:44 +02:00
Fabian Holzwarth
fdd4f3aa59 feat: implement variance-dependent calculation as Future based 2025-06-09 12:59:23 +02:00
Fabian Holzwarth
a0c11b60e8 Remove unnecessary parameter and fix some parallelization 2025-06-07 16:11:34 +02:00
Fabian Holzwarth
4cddf73e6d feat: small fixes for correct parameters 2025-06-07 14:38:18 +02:00
Fabian Holzwarth
5024a02447 feat: implement unify context and prepare variance code capsulation 2025-06-07 11:53:32 +02:00
Fabian Holzwarth
6c2d97b770 chore: code cleanup 2025-05-26 15:49:01 +02:00
Fabian Holzwarth
426c2916d3 feat: remove unnecessary synchronized blocks 2025-05-26 14:40:17 +02:00
Fabian Holzwarth
f722a00fbb feat: use the current thread for computation as well 2025-05-25 15:55:07 +02:00
Fabian Holzwarth
32797c9b9f feat: cleanup more cartesian product code 2025-05-24 12:43:42 +02:00
Fabian Holzwarth
87f655c85a feat: isolate constraint-filtering for one tv from computeCartesianRecursive 2025-05-23 16:10:37 +02:00
Fabian Holzwarth
613dceae1d feat: added Logger class, remove empty println start cleanup of computeCartesianRecursive 2025-05-23 14:12:25 +02:00
Fabian Holzwarth
81cac06e16 feat: add tool for merging many hash sets in parallel 2025-05-23 14:11:52 +02:00
Fabian Holzwarth
a47d5bc024 feat: slightly improved placeholder name generation 2025-05-23 14:04:48 +02:00
Fabian Holzwarth
e5916d455a feat: format and merge results in parallel 2025-05-19 17:05:18 +02:00
Fabian Holzwarth
ebb639e72e feat: remove log flushes 2025-05-18 16:29:19 +02:00
Fabian Holzwarth
f0a4a51ce6 feat: replace thread counter with thread pool 2025-05-18 15:40:31 +02:00
Fabian Holzwarth
7442880452 feat: limit placeholder generation to uppercase chars 2025-05-18 13:24:29 +02:00
Fabian Holzwarth
c4dc3b4245 feat: replace random based placeholder generation with deterministic approach 2025-05-18 12:41:56 +02:00
85 changed files with 6867 additions and 3584 deletions

40
independentTest.sh Executable file
View File

@@ -0,0 +1,40 @@
#!/usr/bin/env bash
REPO="https://gitea.hb.dhbw-stuttgart.de/f.holzwarth/JavaCompilerCore.git"
TDIR="./testBuild"
rm -rf "$TDIR" 2>/dev/null
mkdir $TDIR
cd $TDIR
git clone $REPO .
git checkout feat/unify-server
# git checkout dad468368b86bdd5a3d3b2754b17617cee0a9107 # 1:55
# git checkout a0c11b60e8c9d7addcbe0d3a09c9ce2924e9d5c0 # 2:25
# git checkout 4cddf73e6d6c9116d3e1705c4b27a8e7f18d80c3 # 2:27
# git checkout 6c2d97b7703d954e4a42eef3ec374bcf313af75c # 2:13
# git checkout f722a00fbb6e69423d48a890e4a6283471763e64 # 1:35
# git checkout f0a4a51ce65639ce9a9470ff0fdb538fdf9c02cc # 2:19
# git checkout 1391206dfe59263cdb22f93371cfd1dd5465d97f # 1:29
date "+%Y.%m.%d %H:%M:%S"
# mvn clean compile -DskipTests package
## prefix each stderr line with " | "
# exec 2> >(trap "" INT TERM; sed 's/^/ | /' >&2)
# echo -e "\nMatrix test:\n |"
# time java -jar target/JavaTXcompiler-0.1-jar-with-dependencies.jar resources/bytecode/javFiles/Matrix.jav >/dev/null;
mvn clean compile test
echo -e "\nCleanup... "
cd -
rm -rf "$TDIR" 2>/dev/null
echo -e "\nFinished "
date "+%Y.%m.%d %H:%M:%S"
echo -e "\n "

15
pom.xml
View File

@@ -44,6 +44,21 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<artifactId>asm</artifactId> <artifactId>asm</artifactId>
<version>9.5</version> <version>9.5</version>
</dependency> </dependency>
<dependency>
<groupId>org.java-websocket</groupId>
<artifactId>Java-WebSocket</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.17.2</version>
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@@ -5,42 +5,61 @@ import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.PrintStream; import java.io.PrintStream;
import java.util.*; import java.util.*;
public class ConsoleInterface {
private static final String directory = System.getProperty("user.dir");
public static void main(String[] args) throws IOException, ClassNotFoundException { public class ConsoleInterface {
List<File> input = new ArrayList<>(); private static final String directory = System.getProperty("user.dir");
List<File> classpath = new ArrayList<>();
String outputPath = null; public static void main(String[] args) throws IOException, ClassNotFoundException {
Iterator<String> it = Arrays.asList(args).iterator(); List<File> input = new ArrayList<>();
if(args.length == 0){ List<File> classpath = new ArrayList<>();
System.out.println("No input files given. Get help with --help"); String outputPath = null;
System.exit(1); Iterator<String> it = Arrays.asList(args).iterator();
}else if(args.length == 1 && args[0].equals("--help")){ Optional<Integer> serverPort = Optional.empty();
System.out.println("Usage: javatx [OPTION]... [FILE]...\n" + Optional<String> unifyServer = Optional.empty();
"\t-cp\tSet Classpath\n" +
"\t-d\tSet destination directory"); if (args.length == 0) {
System.exit(1); System.out.println("No input files given. Get help with --help");
System.exit(1);
} else if (args.length == 1 && args[0].equals("--help")) {
System.out.println("Usage: javatx [OPTION]... [FILE]...\n" +
"\t-cp\tSet Classpath\n" +
"\t-d\tSet destination directory\n" +
"\t[--server-mode <port>]\n" +
"\t[--unify-server <url>]\n");
System.exit(1);
}
while (it.hasNext()) {
String arg = it.next();
if (arg.equals("-d")) {
outputPath = it.next();
} else if (arg.startsWith("-d")) {
outputPath = arg.substring(2);
} else if (arg.equals("-cp") || arg.equals("-classpath")) {
String[] cps = it.next().split(":");
for (String cp : cps) {
classpath.add(new File(cp));
} }
while(it.hasNext()){ } else if (arg.equals("--server-mode")) {
String arg = it.next(); serverPort = Optional.of(Integer.parseInt(it.next()));
if(arg.equals("-d")){ } else if (arg.equals("--unify-server")) {
outputPath = it.next(); unifyServer = Optional.of(it.next());
}else if(arg.startsWith("-d")) { } else {
outputPath = arg.substring(2); input.add(new File(arg));
}else if(arg.equals("-cp") || arg.equals("-classpath")){ }
String[] cps = it.next().split(":"); }
for(String cp : cps){
classpath.add(new File(cp)); if (serverPort.isPresent()) {
} if (unifyServer.isPresent()) throw new RuntimeException("Cannot use unifyServer when in server mode!");
}else{
input.add(new File(arg)); JavaTXServer server = new JavaTXServer(serverPort.get());
} server.listen();
} }
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null); else {
//compiler.typeInference(); JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null, unifyServer);
compiler.generateBytecode(); //compiler.typeInference();
} compiler.generateBytecode();
}
}
} }

View File

@@ -12,6 +12,7 @@ import de.dhbwstuttgart.parser.SyntaxTreeGenerator.SyntaxTreeGenerator;
import de.dhbwstuttgart.parser.antlr.Java17Parser.SourceFileContext; import de.dhbwstuttgart.parser.antlr.Java17Parser.SourceFileContext;
import de.dhbwstuttgart.parser.scope.JavaClassName; import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.parser.scope.JavaClassRegistry; import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface; import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar; import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.Method; import de.dhbwstuttgart.syntaxtree.Method;
@@ -35,10 +36,13 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet; import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.typeAlgo.TYPE; import de.dhbwstuttgart.typeinference.typeAlgo.TYPE;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.RuleSet; import de.dhbwstuttgart.typeinference.unify.RuleSet;
import de.dhbwstuttgart.typeinference.unify.TypeUnify; import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.distributeVariance; import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator; import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType; import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
@@ -61,24 +65,29 @@ import org.apache.commons.io.output.NullOutputStream;
public class JavaTXCompiler { public class JavaTXCompiler {
// do not use this in any code, that can be executed serverside!
public static PlaceholderRegistry defaultClientPlaceholderRegistry = new PlaceholderRegistry();
// public static JavaTXCompiler INSTANCE; // public static JavaTXCompiler INSTANCE;
final CompilationEnvironment environment; final CompilationEnvironment environment;
Boolean resultmodel = true; Boolean resultmodel = true;
public final Map<File, SourceFile> sourceFiles = new HashMap<>(); public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll? Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel(); public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
public final DirectoryClassLoader classLoader; public final DirectoryClassLoader classLoader;
public final List<File> classPath; public final List<File> classPath;
private final File outputPath; private final File outputPath;
private final Optional<String> unifyServer;
public DirectoryClassLoader getClassLoader() { public DirectoryClassLoader getClassLoader() {
return classLoader; return classLoader;
} }
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException { public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(Arrays.asList(sourceFile), List.of(), new File(".")); this(Arrays.asList(sourceFile), List.of(), new File("."), Optional.empty());
} }
public JavaTXCompiler(File sourceFile, Boolean log) throws IOException, ClassNotFoundException { public JavaTXCompiler(File sourceFile, Boolean log) throws IOException, ClassNotFoundException {
@@ -87,10 +96,18 @@ public class JavaTXCompiler {
} }
public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException { public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException {
this(sourceFiles, List.of(), new File(".")); this(sourceFiles, List.of(), new File("."), Optional.empty());
} }
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath) throws IOException, ClassNotFoundException { public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath) throws IOException, ClassNotFoundException {
this(sources, contextPath, outputPath, Optional.empty());
}
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath, Optional<String> unifyServer) throws IOException, ClassNotFoundException {
// ensure new default placeholder registry for tests
defaultClientPlaceholderRegistry = new PlaceholderRegistry();
this.unifyServer = unifyServer;
var path = new ArrayList<>(contextPath); var path = new ArrayList<>(contextPath);
if (contextPath.isEmpty()) { if (contextPath.isEmpty()) {
// When no contextPaths are given, the working directory is the sources root // When no contextPaths are given, the working directory is the sources root
@@ -300,13 +317,14 @@ public class JavaTXCompiler {
Set<Set<UnifyPair>> results = new HashSet<>(); Set<Set<UnifyPair>> results = new HashSet<>();
UnifyResultModel urm = null; UnifyResultModel urm = null;
// urm.addUnifyResultListener(resultListener); // urm.addUnifyResultListener(resultListener);
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, defaultClientPlaceholderRegistry);
try { try {
logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile; logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile;
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this); IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this, context.placeholderRegistry());
System.out.println(finiteClosure); System.out.println(finiteClosure);
urm = new UnifyResultModel(cons, finiteClosure); urm = new UnifyResultModel(cons, finiteClosure);
urm.addUnifyResultListener(resultListener); urm.addUnifyResultListener(resultListener);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons); ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, context.placeholderRegistry());
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> { Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs; UnifyType lhs, rhs;
@@ -320,13 +338,12 @@ public class JavaTXCompiler {
logFile.write(unifyCons.toString()); logFile.write(unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars); unifyCons = unifyCons.map(distributeInnerVars);
logFile.write(unifyCons.toString()); logFile.write(unifyCons.toString());
TypeUnify unify = new TypeUnify();
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen // Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString() + "\n"); logFile.write("FC:\\" + finiteClosure.toString() + "\n");
for (SourceFile f : this.sourceFiles.values()) { for (SourceFile f : this.sourceFiles.values()) {
logFile.write(ASTTypePrinter.print(f)); logFile.write(ASTTypePrinter.print(f));
} }
logFile.flush(); // logFile.flush();
Set<PlaceholderType> varianceTPHold; Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>(); Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons); varianceTPH = varianceInheritanceConstraintSet(unifyCons);
@@ -342,7 +359,7 @@ public class JavaTXCompiler {
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/* List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new)) * .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
*/; */;
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks); TypeUnify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
} catch (IOException e) { } catch (IOException e) {
System.err.println("kein LogFile"); System.err.println("kein LogFile");
} }
@@ -365,13 +382,14 @@ public class JavaTXCompiler {
final ConstraintSet<Pair> cons = getConstraints(file); final ConstraintSet<Pair> cons = getConstraints(file);
Set<Set<UnifyPair>> results = new HashSet<>(); Set<Set<UnifyPair>> results = new HashSet<>();
PlaceholderRegistry placeholderRegistry = new PlaceholderRegistry();
try { try {
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/"); var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
if (log) logFolder.mkdirs(); if (log) logFolder.mkdirs();
Writer logFile = log ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream()); Writer logFile = log ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this); FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this, placeholderRegistry);
System.out.println(finiteClosure); System.out.println(finiteClosure);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons); ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, placeholderRegistry);
System.out.println("xxx1"); System.out.println("xxx1");
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> { Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs; UnifyType lhs, rhs;
@@ -387,12 +405,11 @@ public class JavaTXCompiler {
System.out.println("Unify:" + unifyCons.toString()); System.out.println("Unify:" + unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars); unifyCons = unifyCons.map(distributeInnerVars);
logFile.write("\nUnify_distributeInnerVars: " + unifyCons.toString()); logFile.write("\nUnify_distributeInnerVars: " + unifyCons.toString());
TypeUnify unify = new TypeUnify();
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen // Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString() + "\n"); logFile.write("FC:\\" + finiteClosure.toString() + "\n");
logFile.write(ASTTypePrinter.print(sf)); logFile.write(ASTTypePrinter.print(sf));
System.out.println(ASTTypePrinter.print(sf)); System.out.println(ASTTypePrinter.print(sf));
logFile.flush(); // logFile.flush();
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString()); System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
Set<PlaceholderType> varianceTPHold; Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>(); Set<PlaceholderType> varianceTPH = new HashSet<>();
@@ -410,16 +427,24 @@ public class JavaTXCompiler {
/* /*
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new)) * Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
*/; */;
if (resultmodel) {
if (unifyServer.isPresent()) {
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
SocketClient socketClient = new SocketClient(unifyServer.get());
return socketClient.execute(finiteClosure, cons, unifyCons, context);
}
else if (resultmodel) {
/* UnifyResultModel Anfang */ /* UnifyResultModel Anfang */
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure); UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl(); UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li); urm.addUnifyResultListener(li);
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks); UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
System.out.println("RESULT Final: " + li.getResults()); System.out.println("RESULT Final: " + li.getResults());
System.out.println("Constraints for Generated Generics: " + " ???"); System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n"); logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
logFile.flush(); // logFile.flush();
return li.getResults(); return li.getResults();
} }
/* UnifyResultModel End */ /* UnifyResultModel End */
@@ -427,34 +452,35 @@ public class JavaTXCompiler {
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(), // Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, // oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
// finiteClosure)); // finiteClosure));
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks); UnifyContext context = new UnifyContext(logFile, log, false, new UnifyResultModel(cons, finiteClosure), usedTasks, placeholderRegistry);
Set<Set<UnifyPair>> result = TypeUnify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
System.out.println("RESULT: " + result); System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString() + "\n"); logFile.write("RES: " + result.toString() + "\n");
logFile.flush(); // logFile.flush();
results.addAll(result); results.addAll(result);
results = results.stream().map(x -> { results = results.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> { Optional<Set<UnifyPair>> res = new RuleSet(placeholderRegistry).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) if (y.getPairOp() == PairOperator.SMALLERDOTWC)
y.setPairOp(PairOperator.EQUALSDOT); y.setPairOp(PairOperator.EQUALSDOT);
return y; // alle Paare a <.? b erden durch a =. b ersetzt return y; // alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new))); }).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure); return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure);
} else } else
return x; // wenn nichts veraendert wurde wird x zurueckgegeben return x; // wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new)); }).collect(Collectors.toCollection(HashSet::new));
System.out.println("RESULT Final: " + results); System.out.println("RESULT Final: " + results);
System.out.println("Constraints for Generated Generics: " + " ???"); System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + results.toString() + "\n"); logFile.write("RES_FINAL: " + results.toString() + "\n");
logFile.flush(); // logFile.flush();
logFile.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS); logFile.write("PLACEHOLDERS: " + placeholderRegistry);
logFile.flush(); // logFile.flush();
} }
} catch (IOException e) { } catch (IOException e) {
System.err.println("kein LogFile"); System.err.println("kein LogFile");
} }
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons))))).collect(Collectors.toList()); return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons), placeholderRegistry)))).collect(Collectors.toList());
} }
/** /**
@@ -586,10 +612,6 @@ public class JavaTXCompiler {
} }
} }
/**
* @param path - output-Directory can be null, then class file output is in the same directory as the parsed source files
* @return
*/
public Map<JavaClassName, byte[]> generateBytecode(File sourceFile) throws ClassNotFoundException, IOException { public Map<JavaClassName, byte[]> generateBytecode(File sourceFile) throws ClassNotFoundException, IOException {
var sf = sourceFiles.get(sourceFile); var sf = sourceFiles.get(sourceFile);
if (sf.isGenerated()) return null; if (sf.isGenerated()) return null;

View File

@@ -0,0 +1,31 @@
package de.dhbwstuttgart.core;
import de.dhbwstuttgart.server.SocketServer;
public class JavaTXServer {
public static boolean isRunning = false;
final SocketServer socketServer;
public JavaTXServer(int port) {
this.socketServer = new SocketServer(port);
isRunning = true;
}
public void listen() {
socketServer.start();
}
public void forceStop() {
try {
socketServer.stop();
}
catch (InterruptedException exception) {
System.err.println("Interrupted socketServer: " + exception);
}
isRunning = false;
}
}

View File

@@ -0,0 +1,11 @@
package de.dhbwstuttgart.exceptions;
/**
* Eine Runtime Exception, die für den Fall genutzt wird, dass eine Unifikation abgebrochen wird.
* Durch das Werfen einer Exception können Abbrüche auch aus Methodenaufrufen heraus
* geprüft werden, da zuvor nur ein return X; stattfinden würde.
*/
public class UnifyCancelException extends RuntimeException {
}

View File

@@ -1,4 +1,25 @@
package de.dhbwstuttgart.parser; package de.dhbwstuttgart.parser;
public record SourceLoc(String file, int line) { import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
public record SourceLoc(String file, int line) implements ISerializableData {
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
var serialized = new SerialMap();
serialized.put("file", file);
serialized.put("line", line);
return serialized;
}
public static SourceLoc fromSerial(SerialMap data) {
return new SourceLoc(
data.getValue("file").getOf(String.class),
data.getValue("line").getOf(Integer.class)
);
}
} }

View File

@@ -12,6 +12,7 @@ import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory; import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.*; import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.model.*; import de.dhbwstuttgart.typeinference.unify.model.*;
import java.util.*; import java.util.*;
@@ -26,16 +27,21 @@ public class FCGenerator {
* *
* @param availableClasses - Alle geparsten Klassen * @param availableClasses - Alle geparsten Klassen
*/ */
public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException { public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException {
return toFC(availableClasses, classLoader).stream().map(t -> UnifyTypeFactory.convert(compiler, t)).collect(Collectors.toSet()); return toFC(
availableClasses,
classLoader,
placeholderRegistry
).stream().map(t -> UnifyTypeFactory.convert(compiler, t, placeholderRegistry))
.collect(Collectors.toSet());
} }
public static Set<Pair> toFC(Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException { public static Set<Pair> toFC(Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException {
HashSet<Pair> pairs = new HashSet<>(); HashSet<Pair> pairs = new HashSet<>();
//PL 2018-09-18: gtvs vor die for-Schleife gezogen, damit immer die gleichen Typeplaceholder eingesetzt werden. //PL 2018-09-18: gtvs vor die for-Schleife gezogen, damit immer die gleichen Typeplaceholder eingesetzt werden.
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs = new HashMap<>(); HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs = new HashMap<>();
for(ClassOrInterface cly : availableClasses){ for(ClassOrInterface cly : availableClasses){
List<Pair> newPairs = getSuperTypes(cly, availableClasses, gtvs, classLoader); List<Pair> newPairs = getSuperTypes(cly, availableClasses, gtvs, classLoader, placeholderRegistry);
pairs.addAll(newPairs); pairs.addAll(newPairs);
//For all Functional Interfaces FI: FunN$$<... args auf dem Functional Interface ...> <. FI is added to FC //For all Functional Interfaces FI: FunN$$<... args auf dem Functional Interface ...> <. FI is added to FC
@@ -75,8 +81,13 @@ public class FCGenerator {
* @param forType * @param forType
* @return * @return
*/ */
private static List<Pair> getSuperTypes(ClassOrInterface forType, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException { private static List<Pair> getSuperTypes(
return getSuperTypes(forType, availableClasses, new HashMap<>(), classLoader); ClassOrInterface forType,
Collection<ClassOrInterface> availableClasses,
ClassLoader classLoader,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
return getSuperTypes(forType, availableClasses, new HashMap<>(), classLoader, placeholderRegistry);
} }
/** /**
@@ -87,8 +98,13 @@ public class FCGenerator {
* @return * @return
* @throws ClassNotFoundException * @throws ClassNotFoundException
*/ */
private static List<Pair> getSuperTypes(ClassOrInterface forType, Collection<ClassOrInterface> availableClasses, private static List<Pair> getSuperTypes(
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs, ClassLoader classLoader) throws ClassNotFoundException { ClassOrInterface forType,
Collection<ClassOrInterface> availableClasses,
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs,
ClassLoader classLoader,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>(); List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
//Die GTVs, die in forType hinzukommen: //Die GTVs, die in forType hinzukommen:
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> newGTVs = new HashMap<>(); HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> newGTVs = new HashMap<>();
@@ -147,7 +163,7 @@ public class FCGenerator {
if(superClass.getClassName().equals(ASTFactory.createObjectClass().getClassName())){ if(superClass.getClassName().equals(ASTFactory.createObjectClass().getClassName())){
superTypes = Arrays.asList(new Pair(ASTFactory.createObjectType(), ASTFactory.createObjectType(), PairOperator.SMALLER)); superTypes = Arrays.asList(new Pair(ASTFactory.createObjectType(), ASTFactory.createObjectType(), PairOperator.SMALLER));
}else{ }else{
superTypes = getSuperTypes(superClass, availableClasses, newGTVs, classLoader); superTypes = getSuperTypes(superClass, availableClasses, newGTVs, classLoader, placeholderRegistry);
} }
retList.add(ret); retList.add(ret);

View File

@@ -0,0 +1,158 @@
package de.dhbwstuttgart.server;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.IServerToClientPacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.enums.ReadyState;
import org.java_websocket.handshake.ServerHandshake;
/**
* The Client-side of the websocket
*/
public class SocketClient extends WebSocketClient {
// use a latch to wait until the connection is closed by the remote host
private final CountDownLatch closeLatch = new CountDownLatch(1);
// temporarily: The received unify result
// TODO: replace with uuid and future system, such that responses can be mapped by a uuid to fulfill a Future
private UnifyResultPacket unifyResultPacket;
public SocketClient(String url) {
super(URI.create(url), Map.of(
"packetProtocolVersion", SocketServer.packetProtocolVersion
));
// make sure, the url is in a valid format
final String regex = "^wss?://(\\w+(\\.\\w+)?)*:(\\d+)$";
final Matcher matcher = Pattern.compile(regex).matcher(url);
if (!matcher.find()) {
throw new RuntimeException("Provided string \"" + url + "\" is not a valid server URL! Use pattern ws(s?)://<host.name>:<port>");
}
}
public SocketClient(String host, int port, boolean secure) {
super(URI.create(String.format("%s://%s:%d/", secure ? "wss" : "ws", host, port)));
}
/**
* The main method for connecting, requesting and waiting for the server to unify.
* This is synchronized to prevent multiple webSockets connections at the moment, but it is not called from any
* thread except the main thread right now and is not necessary at all, probably. Maybe remove it later
*/
synchronized public List<ResultSet> execute(
FiniteClosure finiteClosure,
ConstraintSet<Pair> constraintSet,
ConstraintSet<UnifyPair> unifyConstraintSet,
UnifyContext context
) throws JsonProcessingException {
try {
// wait for the connection to be set up
this.connectBlocking();
// make sure the connection has been established successfully
if (this.getReadyState() != ReadyState.OPEN) {
throw new RuntimeException("WebSocket Client could not connect to remote host at " + this.uri);
}
// send the unify task request
UnifyRequestPacket packet = new UnifyRequestPacket(finiteClosure, constraintSet, unifyConstraintSet, context.placeholderRegistry());
String json = PacketContainer.serialize(packet);
this.send(json);
// block the thread, until the connection is closed by the remote host (usually after sending the results)
this.waitUntilClosed();
// wait for the connection to fully close
this.closeBlocking();
} catch (InterruptedException exception) {
System.err.println("Server connection interrupted: " + exception);
this.notifyAll();
throw new RuntimeException("Aborted server connection", exception);
}
catch (Exception exception) {
throw new RuntimeException("Exception occurred in server connection: ", exception);
}
// detect error cases, in which no error was thrown, but also no result was sent back from the server
if (this.unifyResultPacket == null) {
throw new RuntimeException("Did not receive server response but closed connection already");
}
return unifyResultPacket.getResultSet(context);
}
/**
* Specific client-side implementations to handle incoming packets
*/
protected void handleReceivedPacket(IPacket packet) {
if (packet instanceof IServerToClientPacket serverToClientPacket) {
try {
serverToClientPacket.onHandle(this.getConnection(), this);
}
catch (Exception exception) {
this.closeLatch.countDown();
this.close();
throw exception;
}
return;
}
System.err.println("Received package of invalid type + " + packet.getClass().getName());
this.close();
}
public void setUnifyResultSets(UnifyResultPacket unifyResultPacket) {
this.unifyResultPacket = unifyResultPacket;
}
@Override
public void onOpen(ServerHandshake handshakedata) {
System.out.println("Connected to server with status " + handshakedata.getHttpStatus());
}
@Override
public void onMessage(String message) {
// System.out.println("received: " + message);
IPacket packet = PacketContainer.deserialize(message);
this.handleReceivedPacket(packet);
}
@Override
public void onClose(int code, String reason, boolean remote) {
System.out.println(
"Disconnected from server " +
"with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
"(closed by remote: " + remote + ")"
);
this.closeLatch.countDown();
}
@Override
public void onError(Exception e) {
System.out.println("Error: " + e.getMessage());
e.printStackTrace();
}
public void waitUntilClosed() throws InterruptedException {
closeLatch.await();
}
}

View File

@@ -0,0 +1,203 @@
package de.dhbwstuttgart.server;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.server.packet.ErrorPacket;
import de.dhbwstuttgart.server.packet.IClientToServerPacket;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.MessagePacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
import java.net.InetSocketAddress;
import java.util.Objects;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.java_websocket.WebSocket;
import org.java_websocket.handshake.ClientHandshake;
import org.java_websocket.server.WebSocketServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SocketServer extends WebSocketServer {
/**
* Increase this every time a breaking change to the server communication is done.
* This will prevent errors when server version and client version do not match.
*/
public static final String packetProtocolVersion = "1";
public static final Logger log = LoggerFactory.getLogger(SocketServer.class);
public SocketServer(int port) {
super(new InetSocketAddress(port));
}
@Override
public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
String ppv = clientHandshake.getFieldValue("packetProtocolVersion");
if (!ppv.equals(packetProtocolVersion)) {
try {
ErrorPacket errorPacket = ErrorPacket.create(
"Mismatch in packet protocol version! Client (you): " + ppv + " and Server (me): " + packetProtocolVersion,
true
);
webSocket.send(PacketContainer.serialize(errorPacket));
}
catch (JsonProcessingException exception) {
System.err.println("Failed to serialize json: " + exception);
}
webSocket.close(1);
return;
}
SocketData socketData = new SocketData(UUID.randomUUID().toString());
webSocket.setAttachment(socketData);
System.out.println("New connection: " + socketData.id + " (with ppv " + ppv + ")");
try {
sendMessage(webSocket, "Welcome to the server!");
// wait 10 seconds for the client to send a task and close the connection if nothing has been received until then
try (ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor()) {
Runnable task = () -> {
if (webSocket.<SocketData>getAttachment().unhandledTasks.get() == 0 || !webSocket.isOpen()) {
return;
}
sendMessage(webSocket, "No task received after 10 seconds. Closing connection...");
webSocket.close();
};
executor.schedule(task, 10, TimeUnit.SECONDS);
executor.shutdown();
}
// and finally, when your program wants to exit
} catch (Exception e) {
log.error("e: ", e);
webSocket.close(1, e.getMessage());
}
}
@Override
public void onClose(WebSocket webSocket, int code, String reason, boolean remote) {
SocketData socketData = webSocket.getAttachment();
System.out.println("Connection closed: " + socketData.id);
System.out.println(
"Disconnected client " + socketData.id + " " +
"with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
"(closed by client: " + remote + ")"
);
}
@Override
public void onMessage(WebSocket webSocket, String s) {
// System.out.println("Received: " + s.substring(0, 50));
IPacket reconstructedPacket = PacketContainer.deserialize(s);
try {
this.onPacketReceived(webSocket, reconstructedPacket);
} catch (JsonProcessingException e) {
this.log("Error on processing incoming package: " + e.getMessage(), webSocket);
}
}
@Override
public void onError(WebSocket webSocket, Exception e) {
log(e.getMessage(), webSocket);
webSocket.close();
}
@Override
public void onStart() {
System.out.println("Websocket server started on port " + this.getPort());
}
/**
* A shorthand method for sending informational messages to the client
*/
public void sendMessage(WebSocket webSocket, String text) {
try {
MessagePacket message = new MessagePacket();
message.message = text;
webSocket.send(PacketContainer.serialize(message));
} catch (Exception e) {
System.err.println("Failed to send message: " + text);
log.error("e: ", e);
}
}
/**
* A shorthand method for sending error messages to the client
*/
public void sendError(WebSocket webSocket, String text) {
try {
ErrorPacket error = new ErrorPacket();
error.error = text;
webSocket.send(PacketContainer.serialize(error));
} catch (Exception e) {
System.err.println("Failed to send error: " + text);
log.error("e: ", e);
}
}
/**
* The server-side implementation on how to handle certain packets when received
*/
private void onPacketReceived(WebSocket webSocket, IPacket packet) throws JsonProcessingException {
SocketData socketData = webSocket.getAttachment();
// limit the amount of tasks per connection
final int maxTasks = 100;
if (socketData.totalTasks.get() >= maxTasks) {
sendError(webSocket, "Exceeded the maximum amount of " + maxTasks + " tasks per session");
webSocket.close();
return;
}
// only allow packets, that are meant to be handled by the server
if (!(packet instanceof IClientToServerPacket clientToServerPacket)) {
sendMessage(webSocket, "The package of type " + packet.getClass().getName() + " is not handled by the server!");
return;
}
// update the socket data
socketData.unhandledTasks.incrementAndGet();
socketData.totalTasks.incrementAndGet();
// add the packet to the queue, so it can be started by the worker
CompletableFuture.runAsync(() -> {
clientToServerPacket.onHandle(webSocket, this);
// if the websocket has 0 unhandled Tasks, close the connection
int remainingUnhandledTasks = socketData.unhandledTasks.decrementAndGet();
if (remainingUnhandledTasks <= 0) {
sendMessage(webSocket, "All requested tasks finished! Closing connection...");
webSocket.close();
}
});
}
public void log(String msg, WebSocket webSocket) {
SocketData socketData = webSocket == null ? new SocketData("???") : webSocket.getAttachment();
System.out.println("["+socketData.id+"] " + msg);
}
/**
* The data that is associated server-side with any connected client.
* This makes it possible to store information that can be mapped to any existing connection.
*/
public static class SocketData {
public final String id;
public final AtomicInteger unhandledTasks = new AtomicInteger(0);
public final AtomicInteger totalTasks = new AtomicInteger(0);
public SocketData(String id) {
this.id = id;
}
}
}

View File

@@ -0,0 +1,30 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketServer;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import org.java_websocket.WebSocket;
public class DebugPacket implements IClientToServerPacket, IServerToClientPacket {
public SerialUUID a1;
public SerialUUID a2;
public SerialMap b1;
public SerialMap b2;
public SerialList<? extends ISerialNode> c1;
public SerialList<? extends ISerialNode> c2;
public SerialValue<?> d1;
public SerialValue<?> d2;
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketServer) {}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {}
}

View File

@@ -0,0 +1,41 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A packet to send simple error messages between the client and the server
*/
public class ErrorPacket implements IClientToServerPacket, IServerToClientPacket {
/**
* The error endpoint for messages from the server, that should be logged out outputted
*/
public String error;
public boolean isFatal;
@JsonIgnore
public static ErrorPacket create(String error, boolean isFatal) {
ErrorPacket packet = new ErrorPacket();
packet.error = error;
packet.isFatal = isFatal;
return packet;
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
System.err.println("[socket] " + "ErrorPacket: " + this.error);
if (this.isFatal) {
throw new RuntimeException("Received fatal error from server: " + this.error);
}
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log("ErrorPacket: " + this.error, webSocket);
}
}

View File

@@ -0,0 +1,12 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
public interface IClientToServerPacket extends IPacket {
@JsonIgnore
void onHandle(WebSocket webSocket, SocketServer socketServer);
}

View File

@@ -0,0 +1,12 @@
package de.dhbwstuttgart.server.packet;
/**
* The shared interface for all packet of the client-server connection.
* A packet must always:
* - Have a default / no-parameter constructor
* - Have only serializable public properties (or disable them via jackson annotations)
*
*/
public interface IPacket {
}

View File

@@ -0,0 +1,12 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import org.java_websocket.WebSocket;
public interface IServerToClientPacket extends IPacket {
@JsonIgnore
void onHandle(WebSocket webSocket, SocketClient socketClient);
}

View File

@@ -0,0 +1,29 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A fallback packet that is generated, if the received json could not be mapped to an existing package
*/
public class InvalidPacket implements IClientToServerPacket, IServerToClientPacket {
/**
* If available, the error that caused this package to appear
*/
public String error = "<unknown error>";
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
System.err.println("[socket] " + "InvalidPacket: " + this.error);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log("InvalidPacket: " + this.error, webSocket);
}
}

View File

@@ -0,0 +1,36 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A packet to send simple informational messages between the client and the server
*/
public class MessagePacket implements IClientToServerPacket, IServerToClientPacket {
/**
* The informational message from the server, that should be logged out outputted
*/
public String message;
@JsonIgnore
public static MessagePacket create(String message) {
MessagePacket packet = new MessagePacket();
packet.message = message;
return packet;
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
System.err.println("[socket] " + this.message);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log(this.message, webSocket);
}
}

View File

@@ -0,0 +1,90 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* A wrapper for the packet to ensure correct serialization/deserialization and make it possible to detect the matching
* packet type for deserialization.
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
public class PacketContainer {
// The jackson serializer / deserializer tool
private static final ObjectMapper objectMapper = new ObjectMapper();
/*
* The available packet types. The one type that is represented in the JSON should always be the ONLY non-null value.
* They have to be public (for the moment) to let jackson fill them in while deserializing
*/
public ErrorPacket errorPacket = null;
public MessagePacket messagePacket = null;
public InvalidPacket invalidPacket = null;
public UnifyRequestPacket unifyRequestPacket = null;
public UnifyResultPacket unifyResultPacket = null;
public DebugPacket debugPacket = null;
/**
* Generate the JSON string for the given packet
*
* @param packet The packet to serialize
* @return The json representation of the packet
*/
public static String serialize(IPacket packet) throws JsonProcessingException {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
PacketContainer container = new PacketContainer();
if (packet instanceof ErrorPacket)
container.errorPacket = (ErrorPacket) packet;
else if (packet instanceof MessagePacket)
container.messagePacket = (MessagePacket) packet;
else if (packet instanceof UnifyRequestPacket)
container.unifyRequestPacket = (UnifyRequestPacket) packet;
else if (packet instanceof UnifyResultPacket)
container.unifyResultPacket = (UnifyResultPacket) packet;
else if (packet instanceof DebugPacket)
container.debugPacket = (DebugPacket) packet;
// Add new packets here and in the deserialize method
return objectMapper.writeValueAsString(container);
}
/**
* Use the JSON string to generate the matching packet object
*
* @param json The serialized representation of a packet container
* @return The deserialized Packet object
*/
public static IPacket deserialize(String json) {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
try {
PacketContainer container = objectMapper.readValue(json, PacketContainer.class);
if (container.errorPacket != null)
return container.errorPacket;
if (container.messagePacket != null)
return container.messagePacket;
if (container.invalidPacket != null)
return container.invalidPacket;
if (container.unifyRequestPacket != null)
return container.unifyRequestPacket;
if (container.unifyResultPacket != null)
return container.unifyResultPacket;
if (container.debugPacket != null)
return container.debugPacket;
// Add new packets here and in the serialize method
throw new RuntimeException("Cannot map received json to any known packet class");
} catch (Exception e) {
System.out.println(e);
InvalidPacket packet = new InvalidPacket();
packet.error = e.getMessage();
return packet;
}
}
}

View File

@@ -0,0 +1,145 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketServer;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.concurrent.ForkJoinPool;
import org.java_websocket.WebSocket;
/**
* A packet to send all required data for the unification algorithm to the server and request the unification
*/
public class UnifyRequestPacket implements IClientToServerPacket {
public SerialMap finiteClosure;
public SerialMap constraintSet;
public SerialMap unifyConstraintSet;
public SerialMap serialKeyStorage;
public SerialValue<?> placeholders;
public SerialList<SerialMap> factoryplaceholders;
@JsonIgnore
private KeyStorage keyStorage = new KeyStorage();
@JsonIgnore
private boolean keyStorageLoaded = false;
public UnifyRequestPacket() {}
public UnifyRequestPacket(
FiniteClosure finiteClosure,
ConstraintSet<Pair> constraintSet,
ConstraintSet<UnifyPair> unifyConstraintSet,
PlaceholderRegistry placeholderRegistry
) {
// store contraint and finite closure
this.finiteClosure = finiteClosure.toSerial(keyStorage);
this.constraintSet = constraintSet.toSerial(keyStorage);
this.unifyConstraintSet = unifyConstraintSet.toSerial(keyStorage);
// store placeholder registry
var serialRegistry = placeholderRegistry.toSerial(keyStorage);
this.placeholders = serialRegistry.getValue("ph");
this.factoryplaceholders = serialRegistry.getList("factoryPh").assertListOfMaps();
// store referenced objects separately
this.serialKeyStorage = keyStorage.toSerial(keyStorage);
}
@JsonIgnore
public void loadKeyStorage(UnifyContext context) {
if (!keyStorageLoaded) {
keyStorageLoaded = true;
keyStorage = KeyStorage.fromSerial(this.serialKeyStorage, context);
}
}
@JsonIgnore
private FiniteClosure retrieveFiniteClosure(UnifyContext context) {
this.loadKeyStorage(context);
return FiniteClosure.fromSerial(this.finiteClosure, context, keyStorage);
}
@JsonIgnore
private ConstraintSet<Pair> retrieveConstraintSet(UnifyContext context) {
this.loadKeyStorage(context);
return ConstraintSet.fromSerial(this.constraintSet, context, Pair.class, keyStorage);
}
@JsonIgnore
private ConstraintSet<UnifyPair> retrieveUnifyConstraintSet(UnifyContext context) {
this.loadKeyStorage(context);
return ConstraintSet.fromSerial(this.unifyConstraintSet, context, UnifyPair.class, keyStorage);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.sendMessage(webSocket, "You requested a unify! Please wait until I calculated everything...");
System.out.println("Client " + webSocket.<SocketServer.SocketData>getAttachment().id + " requested a unification. Starting now...");
try {
var placeholderRegistry = new PlaceholderRegistry();
ArrayList<String> existingPlaceholders = (ArrayList) this.placeholders.getOf(ArrayList.class);
existingPlaceholders.forEach(placeholderRegistry::addPlaceholder);
var unifyContext = new UnifyContext(Writer.nullWriter(), false, true,
new UnifyResultModel(new ConstraintSet<>(), new FiniteClosure(new HashSet<>(), null, placeholderRegistry)),
new UnifyTaskModel(), ForkJoinPool.commonPool(), placeholderRegistry
);
this.factoryplaceholders.stream()
.map(p -> (PlaceholderType)UnifyType.fromSerial(p, unifyContext))
.forEach(placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS::add);
// start the unification algorithm from the received data
IFiniteClosure finiteClosure = this.retrieveFiniteClosure(unifyContext);
ConstraintSet<Pair> constraintSet = this.retrieveConstraintSet(unifyContext);
ConstraintSet<UnifyPair> unifyConstraintSet = this.retrieveUnifyConstraintSet(unifyContext);
var resultModel = new UnifyResultModel(constraintSet, finiteClosure);
UnifyResultListenerImpl resultListener = new UnifyResultListenerImpl();
resultModel.addUnifyResultListener(resultListener);
TypeUnify.unifyParallel(
unifyConstraintSet.getUndConstraints(),
unifyConstraintSet.getOderConstraints(),
finiteClosure,
unifyContext.newWithResultModel(resultModel)
);
var resultSets = resultListener.getResults();
System.out.println("Finished unification for client " + webSocket.<SocketServer.SocketData>getAttachment().id);
socketServer.sendMessage(webSocket, "Unification finished. Found " + resultSets.size() + " result sets");
if (webSocket.isOpen()) {
UnifyResultPacket resultPacket = UnifyResultPacket.create(resultSets);
webSocket.send(PacketContainer.serialize(resultPacket));
}
} catch (Exception e) {
System.err.println(e);
SocketServer.log.error("e: ", e);
}
}
}

View File

@@ -0,0 +1,43 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.List;
import org.java_websocket.WebSocket;
/**
* A packet to send all calculated data from the unification algorithm back to the client
*/
public class UnifyResultPacket implements IServerToClientPacket {
public SerialList<ISerialNode> results;
public SerialMap keyStorage;
public static UnifyResultPacket create(List<ResultSet> resultSets) {
UnifyResultPacket serialized = new UnifyResultPacket();
KeyStorage keyStorage = new KeyStorage();
serialized.results = SerialList.fromMapped(resultSets, resultSet -> resultSet.toSerial(keyStorage));
serialized.keyStorage = keyStorage.toSerial(keyStorage);
return serialized;
}
@JsonIgnore
public List<ResultSet> getResultSet(UnifyContext context) {
return this.results.assertListOfMaps().stream()
.map(resultData -> ResultSet.fromSerial(resultData, context)).toList();
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
System.out.println("[socket] Received unify result");
socketClient.setUnifyResultSets(this);
}
}

View File

@@ -0,0 +1,16 @@
package de.dhbwstuttgart.server.packet.dataContainers;
import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
public interface ISerializableData {
public abstract ISerialNode toSerial(KeyStorage keyStorage);
public static Object fromSerial(SerialMap data, UnifyContext context) {
throw new NotImplementedException("Missing implementation of \"fromSerial\" for a serializable element");
}
}

View File

@@ -0,0 +1,103 @@
package de.dhbwstuttgart.server.packet.dataContainers;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
public class KeyStorage implements ISerializableData {
/**
* Store a unique identifier for every element, so it can be referenced in the json
*/
protected AtomicInteger identifierCount = new AtomicInteger();
/**
* Store the serialized element per identifier when serializing
*/
protected SerialMap serializedElements = new SerialMap();
/**
* Store the unserialized element per identifier when unserializing
*/
protected Map<String, ISerializableData> unserializedElements = new HashMap<>();
/**
* Retrieve or generate a new identifier for a constraint
*/
public String getIdentifier() {
return this.identifierCount.incrementAndGet() + "_";
}
/**
* Checks if the given element identifier belongs to an element that was already serialized
*/
public boolean isAlreadySerialized(String identifier) {
return this.serializedElements.containsKey(identifier);
}
/**
* Checks if the given element identifier belongs to a element that was already unserialized
*/
public boolean isAlreadyUnserialized(String identifier) {
return this.unserializedElements.containsKey(identifier);
}
/**
* Register a serialized element to prevent it from being serialized again
*/
public void putSerialized(String identifier, SerialMap serializedElement) {
this.serializedElements.put(identifier, serializedElement);
}
/**
* Retrieve a serialized element
*/
public SerialMap getSerialized(String identifier) {
if (!this.serializedElements.containsKey(identifier)) {
throw new RuntimeException("No serialized element of identifier " + identifier + " available to get");
}
return this.serializedElements.getMap(identifier);
}
/**
* Register an unserialized element to prevent it from being unserialized again
*/
public void putUnserialized(String identifier, ISerializableData element) {
this.unserializedElements.put(identifier, element);
}
/**
* Retrieve an unserialized element
*/
public <T extends ISerializableData> T getUnserialized(String identifier, Class<T> target) {
if (!this.unserializedElements.containsKey(identifier)) {
throw new RuntimeException("No unserialized element of identifier " + identifier + " available to get");
}
var element = this.unserializedElements.get(identifier);
if (target.isInstance(element)) {
return (T) element;
}
throw new RuntimeException("Failed to get unserialized element from KeyStorage. Expected instance of " +
target.getName() + " but found " + element.getClass().getName());
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("serializedElements", this.serializedElements);
return serialized;
}
public static KeyStorage fromSerial(SerialMap data, UnifyContext context) {
var serializedConstraintsData = data.getMap("serializedElements");
var constraintContext = new KeyStorage();
for (var entry : serializedConstraintsData.entrySet()) {
if (entry.getValue() instanceof SerialMap valueMap) {
constraintContext.putSerialized(entry.getKey(), valueMap);
}
}
return constraintContext;
}
}

View File

@@ -0,0 +1,31 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
/**
* Use the following classes for an intermediate serialized tree structure
*/
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY,
property = "_t"
)
@JsonSubTypes({
@JsonSubTypes.Type(value = SerialMap.class, name = "m"),
@JsonSubTypes.Type(value = SerialList.class, name = "l"),
@JsonSubTypes.Type(value = SerialValue.class, name = "v"),
@JsonSubTypes.Type(value = SerialUUID.class, name = "u")
})
public interface ISerialNode {
default <T extends ISerialNode> T assertType(Class<T> type) {
if (type.isInstance(this)) {
return (T) this;
}
throw new RuntimeException("Expected ISerialNode to be of type " + type.getName()
+ " but found " + this.getClass().getName() + " instead");
}
}

View File

@@ -0,0 +1,74 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.function.Function;
import java.util.stream.Stream;
public class SerialList<I extends ISerialNode> extends ArrayList<I> implements ISerialNode {
public SerialList() {}
public SerialList(Collection<I> data) {
this.addAll(data);
}
public SerialList(Stream<I> data) {
this(data.toList());
}
public SerialList(I[] data) {
this(Arrays.stream(data).toList());
}
@SafeVarargs
@JsonIgnore
public static <A extends ISerialNode> ArrayList<A> from(A ...values) {
ArrayList<A> list = new SerialList<>();
Collections.addAll(list, values);
return list;
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(Stream<A> data, Function<A,B> mapper) {
return new SerialList<>(data.map(mapper).toList());
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(Collection<A> data, Function<A,B> mapper) {
return SerialList.fromMapped(data.stream(), mapper);
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(A[] data, Function<A,B> mapper) {
return SerialList.fromMapped(Arrays.stream(data), mapper);
}
@JsonIgnore
public SerialList<SerialMap> assertListOfMaps() {
if (this.isEmpty() || this.get(0) instanceof SerialMap) {
return (SerialList<SerialMap>) this;
}
throw new RuntimeException("Required List to contain SerialMap elements but condition failed");
}
@JsonIgnore
public SerialList<SerialList<?>> assertListOfLists() {
if (this.isEmpty() || this.get(0) instanceof SerialList) {
return (SerialList<SerialList<?>>) this;
}
throw new RuntimeException("Required List to contain SerialList elements but condition failed");
}
@JsonIgnore
public SerialList<SerialValue<?>> assertListOfValues() {
if (this.isEmpty() || this.get(0) instanceof SerialValue) {
return (SerialList<SerialValue<?>>) this;
}
throw new RuntimeException("Required List to contain SerialValue elements but condition failed");
}
@JsonIgnore
public SerialList<SerialUUID> assertListOfUUIDs() {
if (this.isEmpty() || this.get(0) instanceof SerialUUID) {
return (SerialList<SerialUUID>) this;
}
throw new RuntimeException("Required List to contain SerialUUID elements but condition failed");
}
}

View File

@@ -0,0 +1,84 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nullable;
public class SerialMap extends HashMap<String, ISerialNode> implements ISerialNode {
public SerialMap() {
super();
}
public SerialMap(Map<String, ISerialNode> data) {
super(data);
}
@JsonIgnore
public void put(String key, Boolean value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
public void put(String key, String value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
public void put(String key, Number value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
private <T> T get(String key, Class<T> expectedType) {
if (!this.containsKey(key)) {
throw new RuntimeException("Missing required value " + key + " in ObjectMap");
}
var element = this.get(key);
if (element != null && element.getClass() != expectedType) {
throw new RuntimeException(
"Required value " + key + " to be of type " + expectedType.getName() + " but found " + element.getClass().getName()
);
}
return (T)element;
}
@JsonIgnore
public SerialList<?> getList(String key) {
return this.get(key, SerialList.class);
}
@Nullable
@JsonIgnore
public SerialList<?> getListOrNull(String key) {
return this.containsKey(key) ? this.getList(key) : null;
}
@JsonIgnore
public SerialMap getMap(String key) {
return this.get(key, SerialMap.class);
}
@Nullable
@JsonIgnore
public SerialMap getMapOrNull(String key) {
return this.containsKey(key) ? this.getMap(key) : null;
}
@JsonIgnore
public SerialValue<?> getValue(String key) {
return this.get(key, SerialValue.class);
}
@JsonIgnore
public SerialUUID getUUID(String key) {
return this.get(key, SerialUUID.class);
}
@Nullable
@JsonIgnore
public SerialUUID getUUIDOrNull(String key) {
return this.containsKey(key) ? this.getUUID(key) : null;
}
}

View File

@@ -0,0 +1,13 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
public class SerialUUID implements ISerialNode {
public String uuid;
public SerialUUID() {}
public SerialUUID(String uuid) {
this.uuid = uuid;
}
}

View File

@@ -0,0 +1,28 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
public class SerialValue<T> implements ISerialNode {
public T value;
public static final SerialValue<Object> NULL = new SerialValue<>(null);
public SerialValue() {}
public SerialValue(T value) {
this.value = value;
}
@JsonIgnore
public <A> SerialValue<A> assertValueOf(Class<A> targetClass) {
if (this.value == null || targetClass.isInstance(this.value)) {
return (SerialValue<A>) this;
}
throw new RuntimeException("Required Value to contain " + targetClass.getName() + " value but condition failed on" +
" type " + this.value.getClass().getName());
}
@JsonIgnore
public <A> A getOf(Class<A> targetClass) {
return this.assertValueOf(targetClass).value;
}
}

View File

@@ -1,5 +1,8 @@
package de.dhbwstuttgart.syntaxtree.factory; package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.core.JavaTXServer;
public class NameGenerator { public class NameGenerator {
private static String strNextName = "A"; private static String strNextName = "A";
@@ -26,7 +29,11 @@ public class NameGenerator {
// n�chster Name berechnen und in strNextName speichern // n�chster Name berechnen und in strNextName speichern
inc( strNextName.length() - 1 ); inc( strNextName.length() - 1 );
if (JavaTXServer.isRunning) {
throw new RuntimeException("Using the NameGenerator on a server is not allowed");
}
JavaTXCompiler.defaultClientPlaceholderRegistry.addPlaceholder(strReturn);
return strReturn; return strReturn;
} }

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.syntaxtree.factory; package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import java.io.Writer; import java.io.Writer;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.*; import java.util.*;
@@ -31,9 +32,13 @@ import org.antlr.v4.runtime.Token;
public class UnifyTypeFactory { public class UnifyTypeFactory {
private static ArrayList<PlaceholderType> PLACEHOLDERS = new ArrayList<>(); public static FiniteClosure generateFC(
List<ClassOrInterface> fromClasses,
public static FiniteClosure generateFC(List<ClassOrInterface> fromClasses, Writer logFile, ClassLoader classLoader, JavaTXCompiler compiler) throws ClassNotFoundException { Writer logFile,
ClassLoader classLoader,
JavaTXCompiler compiler,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
/* /*
Die transitive Hülle muss funktionieren. Die transitive Hülle muss funktionieren.
Man darf schreiben List<A> extends AL<A> Man darf schreiben List<A> extends AL<A>
@@ -44,7 +49,7 @@ public class UnifyTypeFactory {
Generell dürfen sie immer die gleichen Namen haben. Generell dürfen sie immer die gleichen Namen haben.
TODO: die transitive Hülle bilden TODO: die transitive Hülle bilden
*/ */
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader), logFile, compiler); return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader, placeholderRegistry), logFile, compiler, placeholderRegistry);
} }
public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr, SourceLoc location){ public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr, SourceLoc location){
@@ -67,23 +72,23 @@ public class UnifyTypeFactory {
* Convert from * Convert from
* ASTType -> UnifyType * ASTType -> UnifyType
*/ */
public static UnifyType convert(JavaTXCompiler compiler, RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType){ public static UnifyType convert(JavaTXCompiler compiler, RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
if (t instanceof GenericRefType){ if (t instanceof GenericRefType){
return UnifyTypeFactory.convert(compiler, (GenericRefType)t, innerType); return UnifyTypeFactory.convert(compiler, (GenericRefType)t, innerType, placeholderRegistry);
} else if (t instanceof TypePlaceholder){ } else if (t instanceof TypePlaceholder){
return UnifyTypeFactory.convert(compiler, (TypePlaceholder)t, innerType); return UnifyTypeFactory.convert(compiler, (TypePlaceholder)t, innerType, placeholderRegistry);
} else if (t instanceof ExtendsWildcardType){ } else if (t instanceof ExtendsWildcardType){
return UnifyTypeFactory.convert(compiler, (ExtendsWildcardType)t, innerType); return UnifyTypeFactory.convert(compiler, (ExtendsWildcardType)t, innerType, placeholderRegistry);
} else if (t instanceof SuperWildcardType) { } else if (t instanceof SuperWildcardType) {
return UnifyTypeFactory.convert(compiler, (SuperWildcardType) t, innerType); return UnifyTypeFactory.convert(compiler, (SuperWildcardType) t, innerType, placeholderRegistry);
} else if (t instanceof RefType){ } else if (t instanceof RefType){
return UnifyTypeFactory.convert(compiler, (RefType)t, innerType); return UnifyTypeFactory.convert(compiler, (RefType)t, innerType, placeholderRegistry);
} }
//Es wurde versucht ein Typ umzuwandeln, welcher noch nicht von der Factory abgedeckt ist //Es wurde versucht ein Typ umzuwandeln, welcher noch nicht von der Factory abgedeckt ist
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden"); throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
} }
public static UnifyType convert(JavaTXCompiler compiler, RefType t, Boolean innerType){ public static UnifyType convert(JavaTXCompiler compiler, RefType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
//Check if it is a FunN Type: //Check if it is a FunN Type:
Pattern p = Pattern.compile("Fun(\\d+)[$][$]"); Pattern p = Pattern.compile("Fun(\\d+)[$][$]");
Matcher m = p.matcher(t.getName().toString()); Matcher m = p.matcher(t.getName().toString());
@@ -91,76 +96,76 @@ public class UnifyTypeFactory {
if(b){ if(b){
Integer N = Integer.valueOf(m.group(1)); Integer N = Integer.valueOf(m.group(1));
if((N + 1) == t.getParaList().size()){ if((N + 1) == t.getParaList().size()){
return convertFunN(compiler, t.getParaList(), false); return convertFunN(compiler, t.getParaList(), false, placeholderRegistry);
} }
} }
UnifyType ret; UnifyType ret;
List<UnifyType> params = new ArrayList<>(); List<UnifyType> params = new ArrayList<>();
if (t.getParaList() != null) { if (t.getParaList() != null) {
for (RefTypeOrTPHOrWildcardOrGeneric pT : t.getParaList()) { for (RefTypeOrTPHOrWildcardOrGeneric pT : t.getParaList()) {
params.add(UnifyTypeFactory.convert(compiler, pT, true)); params.add(UnifyTypeFactory.convert(compiler, pT, true, placeholderRegistry));
} }
} }
var clazz = compiler.getClass(t.getName()); var clazz = compiler.getClass(t.getName());
if (clazz != null && clazz.isInterface() && clazz.isFunctionalInterface()) { if (clazz != null && clazz.isInterface() && clazz.isFunctionalInterface()) {
var method = clazz.getMethods().stream().filter(x -> Modifier.isAbstract(x.modifier)).findFirst().orElseThrow(); var method = clazz.getMethods().stream().filter(x -> Modifier.isAbstract(x.modifier)).findFirst().orElseThrow();
var methodParams = method.getParameterList().getFormalparalist().stream().map(x -> convert(compiler, x.getType(), true)).toList(); var methodParams = method.getParameterList().getFormalparalist().stream().map(x -> convert(compiler, x.getType(), true, placeholderRegistry)).toList();
var generics = StreamSupport.stream(clazz.getGenerics().spliterator(), false).map(GenericTypeVar::getName).toList(); var generics = StreamSupport.stream(clazz.getGenerics().spliterator(), false).map(GenericTypeVar::getName).toList();
return new FunInterfaceType(t.getName().toString(), new TypeParams(params), methodParams, convert(compiler, method.getReturnType(), true), generics); return new FunInterfaceType(t.getName().toString(), new TypeParams(params), methodParams, convert(compiler, method.getReturnType(), true, placeholderRegistry), generics);
} }
return new ReferenceType(t.getName().toString(),new TypeParams(params)); return new ReferenceType(t.getName().toString(),new TypeParams(params));
} }
public static UnifyType convertFunN(JavaTXCompiler compiler, List<RefTypeOrTPHOrWildcardOrGeneric> paraList, Boolean innerType){ public static UnifyType convertFunN(JavaTXCompiler compiler, List<RefTypeOrTPHOrWildcardOrGeneric> paraList, Boolean innerType, PlaceholderRegistry placeholderRegistry){
UnifyType ret; UnifyType ret;
List<UnifyType> params = new ArrayList<>(); List<UnifyType> params = new ArrayList<>();
if(paraList != null && paraList.size() > 0){ if(paraList != null && paraList.size() > 0){
for(RefTypeOrTPHOrWildcardOrGeneric pT : paraList){ for(RefTypeOrTPHOrWildcardOrGeneric pT : paraList){
params.add(UnifyTypeFactory.convert(compiler, pT, false)); params.add(UnifyTypeFactory.convert(compiler, pT, false, placeholderRegistry));
} }
} }
ret = FunNType.getFunNType(new TypeParams(params)); ret = FunNType.getFunNType(new TypeParams(params));
return ret; return ret;
} }
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType){ public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType, PlaceholderRegistry placeholderRegistry) {
if (tph.getName().equals("AFR")) { if (tph.getName().equals("AFR")) {
System.out.println("XXX"+innerType); System.out.println("XXX"+innerType);
} }
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance()); PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance(), placeholderRegistry);
ntph.setVariance(tph.getVariance()); ntph.setVariance(tph.getVariance());
ntph.setOrCons(tph.getOrCons()); ntph.setOrCons(tph.getOrCons());
ntph.setWildcardtable(tph.getWildcardtable()); ntph.setWildcardtable(tph.getWildcardtable());
int in = PLACEHOLDERS.indexOf(ntph); int in = placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.indexOf(ntph);
if (in == -1) { if (in == -1) {
PLACEHOLDERS.add(ntph); placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.add(ntph);
ntph.setInnerType(innerType); ntph.setInnerType(innerType);
return ntph; return ntph;
} }
else { else {
PlaceholderType oldpht = PLACEHOLDERS.get(in); PlaceholderType oldpht = placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.get(in);
oldpht.setInnerType(oldpht.isInnerType() || innerType); oldpht.setInnerType(oldpht.isInnerType() || innerType);
return oldpht; return oldpht;
} }
} }
public static UnifyType convert(JavaTXCompiler compiler, GenericRefType t, Boolean innerType){ public static UnifyType convert(JavaTXCompiler compiler, GenericRefType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
return new ReferenceType(t.getParsedName(), true); return new ReferenceType(t.getParsedName(), true);
} }
public static UnifyType convert(JavaTXCompiler compiler, WildcardType t, Boolean innerType){ public static UnifyType convert(JavaTXCompiler compiler, WildcardType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
if(t.isExtends()) if(t.isExtends())
return new ExtendsType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false)); return new ExtendsType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false, placeholderRegistry));
else if(t.isSuper()) else if(t.isSuper())
return new SuperType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false)); return new SuperType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false, placeholderRegistry));
else throw new NotImplementedException(); else throw new NotImplementedException();
} }
public static ConstraintSet<UnifyPair> convert(JavaTXCompiler compiler, ConstraintSet<Pair> constraints) { public static ConstraintSet<UnifyPair> convert(JavaTXCompiler compiler, ConstraintSet<Pair> constraints, PlaceholderRegistry placeholderRegistry) {
return constraints.map(c -> UnifyTypeFactory.convert(compiler, c)); return constraints.map(c -> UnifyTypeFactory.convert(compiler, c, placeholderRegistry));
} }
//NEVER USED //NEVER USED
@@ -171,30 +176,30 @@ public class UnifyTypeFactory {
// return unifyPairConstraint; // return unifyPairConstraint;
//} //}
public static UnifyPair convert(JavaTXCompiler compiler, Pair p) { public static UnifyPair convert(JavaTXCompiler compiler, Pair p, PlaceholderRegistry placeholderRegistry) {
UnifyPair ret = null; UnifyPair ret = null;
if(p.GetOperator().equals(PairOperator.SMALLERDOT)) { if(p.GetOperator().equals(PairOperator.SMALLERDOT)) {
ret = generateSmallerDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false) ret = generateSmallerDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation()); , UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret; //return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLERNEQDOT)) { }else if(p.GetOperator().equals(PairOperator.SMALLERNEQDOT)) {
ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false) ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation()); , UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret; //return ret;
}else if(p.GetOperator().equals(PairOperator.EQUALSDOT)) { }else if(p.GetOperator().equals(PairOperator.EQUALSDOT)) {
ret = generateEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false) ret = generateEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation()); , UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret; //return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLER)){ }else if(p.GetOperator().equals(PairOperator.SMALLER)){
ret = generateSmallerPair(UnifyTypeFactory.convert(compiler, p.TA1, false), ret = generateSmallerPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry),
UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation()); UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
}else throw new NotImplementedException(); }else throw new NotImplementedException();
UnifyType lhs, rhs; UnifyType lhs, rhs;
if (((lhs = ret.getLhsType()) instanceof PlaceholderType) if (((lhs = ret.getLhsType()) instanceof PlaceholderType)
&& ((PlaceholderType)lhs).isWildcardable() && ((PlaceholderType)lhs).isWildcardable()
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) { && (rhs = ret.getLhsType()) instanceof PlaceholderType) {
if (lhs.getName().equals("AQ")) { if (lhs.getName().equals("AQ")) {
System.out.println(""); // System.out.println("");
} }
((PlaceholderType)rhs).enableWildcardtable(); ((PlaceholderType)rhs).enableWildcardtable();
} }
@@ -203,7 +208,7 @@ public class UnifyTypeFactory {
&& ((PlaceholderType)rhs).isWildcardable() && ((PlaceholderType)rhs).isWildcardable()
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) { && (lhs = ret.getLhsType()) instanceof PlaceholderType) {
if (rhs.getName().equals("AQ")) { if (rhs.getName().equals("AQ")) {
System.out.println(""); // System.out.println("");
} }
((PlaceholderType)lhs).enableWildcardtable(); ((PlaceholderType)lhs).enableWildcardtable();
} }
@@ -214,16 +219,16 @@ public class UnifyTypeFactory {
* Convert from * Convert from
* UnifyType -> ASTType * UnifyType -> ASTType
*/ */
public static Set<ResultPair> convert(Set<UnifyPair> unifyPairSet, Map<String,TypePlaceholder> tphs) { public static Set<ResultPair> convert(Set<UnifyPair> unifyPairSet, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
return unifyPairSet.stream().map( return unifyPairSet.stream().map(
unifyPair -> convert(unifyPair, tphs)) unifyPair -> convert(unifyPair, tphs, placeholderRegistry))
.collect(Collectors.toSet()); .collect(Collectors.toSet());
} }
public static ResultPair convert(UnifyPair mp, Map<String,TypePlaceholder> tphs) { public static ResultPair convert(UnifyPair mp, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if (mp == null) { return null;} //kann bei basePairs passieren if (mp == null) { return null;} //kann bei basePairs passieren
RefTypeOrTPHOrWildcardOrGeneric tl = UnifyTypeFactory.convert(mp.getLhsType(), tphs); RefTypeOrTPHOrWildcardOrGeneric tl = UnifyTypeFactory.convert(mp.getLhsType(), tphs, placeholderRegistry);
RefTypeOrTPHOrWildcardOrGeneric tr = UnifyTypeFactory.convert(mp.getRhsType(), tphs); RefTypeOrTPHOrWildcardOrGeneric tr = UnifyTypeFactory.convert(mp.getRhsType(), tphs, placeholderRegistry);
if(tl instanceof TypePlaceholder){ if(tl instanceof TypePlaceholder){
if(tr instanceof TypePlaceholder) { if(tr instanceof TypePlaceholder) {
@@ -232,7 +237,7 @@ public class UnifyTypeFactory {
//Einfach ignorieren TODO: Das hier muss ausgebessert werden: //Einfach ignorieren TODO: Das hier muss ausgebessert werden:
//return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, ASTFactory.createObjectType()); //return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, ASTFactory.createObjectType());
}else{ }else{
return new PairTPHsmallerTPH((TypePlaceholder)tl, (TypePlaceholder)tr, convert(mp.getBasePair(), tphs)); return new PairTPHsmallerTPH((TypePlaceholder)tl, (TypePlaceholder)tr, convert(mp.getBasePair(), tphs, placeholderRegistry));
} }
}else if(tr instanceof RefType){ }else if(tr instanceof RefType){
return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, (RefType) tr); return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, (RefType) tr);
@@ -244,51 +249,51 @@ public class UnifyTypeFactory {
}else return new PairNoResult(tl, tr);//throw new NotImplementedException(); }else return new PairNoResult(tl, tr);//throw new NotImplementedException();
} }
public static RefTypeOrTPHOrWildcardOrGeneric convert(ReferenceType t, Map<String,TypePlaceholder> tphs) { public static RefTypeOrTPHOrWildcardOrGeneric convert(ReferenceType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if(JavaClassName.Void.equals(t.getName()))return new Void(new NullToken()); if(JavaClassName.Void.equals(t.getName()))return new Void(new NullToken());
if (t.isGenTypeVar()) return new GenericRefType(t.getName(),new NullToken()); if (t.isGenTypeVar()) return new GenericRefType(t.getName(),new NullToken());
RefType ret = new RefType(new JavaClassName(t.getName()),convert(t.getTypeParams(), tphs),new NullToken()); RefType ret = new RefType(new JavaClassName(t.getName()),convert(t.getTypeParams(), tphs, placeholderRegistry),new NullToken());
return ret; return ret;
} }
public static RefTypeOrTPHOrWildcardOrGeneric convert(FunNType t, Map<String,TypePlaceholder> tphs) { public static RefTypeOrTPHOrWildcardOrGeneric convert(FunNType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefType ret = new RefType(new JavaClassName(t.getName()), convert(t.getTypeParams(), tphs), new NullToken()); RefType ret = new RefType(new JavaClassName(t.getName()), convert(t.getTypeParams(), tphs, placeholderRegistry), new NullToken());
return ret; return ret;
} }
public static RefTypeOrTPHOrWildcardOrGeneric convert(SuperType t, Map<String,TypePlaceholder> tphs) { public static RefTypeOrTPHOrWildcardOrGeneric convert(SuperType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getSuperedType(), tphs); RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getSuperedType(), tphs, placeholderRegistry);
return new SuperWildcardType(innerType, new NullToken()); return new SuperWildcardType(innerType, new NullToken());
} }
public static RefTypeOrTPHOrWildcardOrGeneric convert(ExtendsType t, Map<String,TypePlaceholder> tphs) { public static RefTypeOrTPHOrWildcardOrGeneric convert(ExtendsType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getExtendedType(), tphs); RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getExtendedType(), tphs, placeholderRegistry);
return new ExtendsWildcardType(innerType, new NullToken()); return new ExtendsWildcardType(innerType, new NullToken());
} }
public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs) { public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
TypePlaceholder ret = tphs.get(t.getName()); TypePlaceholder ret = tphs.get(t.getName());
if(ret == null){ //Dieser TPH wurde vom Unifikationsalgorithmus erstellt if(ret == null){ //Dieser TPH wurde vom Unifikationsalgorithmus erstellt
ret = TypePlaceholder.fresh(new NullToken()); ret = TypePlaceholder.fresh(new NullToken(), placeholderRegistry);
tphs.put(t.getName(), ret); tphs.put(t.getName(), ret);
} }
ret.setVariance(t.getVariance()); ret.setVariance(t.getVariance());
return ret; return ret;
} }
public static RefTypeOrTPHOrWildcardOrGeneric convert(UnifyType t, Map<String,TypePlaceholder> tphs) { public static RefTypeOrTPHOrWildcardOrGeneric convert(UnifyType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if(t instanceof FunNType)return convert((FunNType) t, tphs); if(t instanceof FunNType)return convert((FunNType) t, tphs, placeholderRegistry);
if(t instanceof ReferenceType)return convert((ReferenceType) t, tphs); if(t instanceof ReferenceType)return convert((ReferenceType) t, tphs, placeholderRegistry);
if(t instanceof SuperType)return convert((SuperType) t, tphs); if(t instanceof SuperType)return convert((SuperType) t, tphs, placeholderRegistry);
if(t instanceof ExtendsType)return convert((ExtendsType) t, tphs); if(t instanceof ExtendsType)return convert((ExtendsType) t, tphs, placeholderRegistry);
if(t instanceof PlaceholderType)return convert((PlaceholderType) t, tphs); if(t instanceof PlaceholderType)return convert((PlaceholderType) t, tphs, placeholderRegistry);
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden"); throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
} }
private static List<RefTypeOrTPHOrWildcardOrGeneric> convert(TypeParams typeParams, Map<String,TypePlaceholder> tphs) { private static List<RefTypeOrTPHOrWildcardOrGeneric> convert(TypeParams typeParams, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
List<RefTypeOrTPHOrWildcardOrGeneric> ret = new ArrayList<>(); List<RefTypeOrTPHOrWildcardOrGeneric> ret = new ArrayList<>();
for(UnifyType uT : typeParams){ for(UnifyType uT : typeParams){
RefTypeOrTPHOrWildcardOrGeneric toAdd = convert(uT, tphs); RefTypeOrTPHOrWildcardOrGeneric toAdd = convert(uT, tphs, placeholderRegistry);
ret.add(toAdd); ret.add(toAdd);
} }
return ret; return ret;

View File

@@ -1,8 +1,13 @@
package de.dhbwstuttgart.syntaxtree.type; package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor; import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor; import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.Token;
import java.util.Objects; import java.util.Objects;
@@ -15,7 +20,7 @@ import java.util.Objects;
* *
*/ */
public class ExtendsWildcardType extends WildcardType{ public class ExtendsWildcardType extends WildcardType implements ISerializableData {
/** /**
* Author: Arne Lüdtke<br/> * Author: Arne Lüdtke<br/>
@@ -68,4 +73,22 @@ public class ExtendsWildcardType extends WildcardType{
ExtendsWildcardType that = (ExtendsWildcardType) o; ExtendsWildcardType that = (ExtendsWildcardType) o;
return that.innerType.equals(this.innerType); return that.innerType.equals(this.innerType);
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("innerType", this.innerType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static ExtendsWildcardType fromSerial(SerialMap data, UnifyContext context) {
return new ExtendsWildcardType(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(data.getMap("innerType"), context),
new NullToken()
);
}
} }

View File

@@ -1,57 +1,77 @@
package de.dhbwstuttgart.syntaxtree.type; package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor; import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor; import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.Token;
import java.util.Objects; import java.util.Objects;
public class GenericRefType extends RefTypeOrTPHOrWildcardOrGeneric public class GenericRefType extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData {
{ private String name;
private String name;
public GenericRefType(String name, Token offset) public GenericRefType(String name, Token offset) {
{ super(offset);
super(offset); this.name = name;
this.name = name; }
}
public String getParsedName(){ public String getParsedName() {
return name.toString(); return name.toString();
} }
@Override @Override
public void accept(ASTVisitor visitor) { public void accept(ASTVisitor visitor) {
visitor.visit(this); visitor.visit(this);
} }
@Override @Override
public <A> A acceptTV(TypeVisitor<A> visitor) { public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this); return visitor.visit(this);
} }
@Override @Override
public void accept(ResultSetVisitor visitor) { public void accept(ResultSetVisitor visitor) {
visitor.visit(this); visitor.visit(this);
} }
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
GenericRefType that = (GenericRefType) o; GenericRefType that = (GenericRefType) o;
return name.equals(that.name); return name.equals(that.name);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(name); return Objects.hash(name);
} }
@Override @Override
public String toString() public String toString() {
{ return "GTV " + this.name;
return "GTV " + this.name; }
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.name);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static GenericRefType fromSerial(SerialMap data, UnifyContext context) {
return new GenericRefType(
data.getValue("name").getOf(String.class),
new NullToken()
);
}
} }

View File

@@ -1,8 +1,15 @@
package de.dhbwstuttgart.syntaxtree.type; package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.parser.scope.JavaClassName; import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor; import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor; import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.Token;
import java.util.ArrayList; import java.util.ArrayList;
@@ -11,122 +18,137 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
public class RefType extends RefTypeOrTPHOrWildcardOrGeneric public class RefType extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData {
{ protected final JavaClassName name;
protected final JavaClassName name; protected final List<RefTypeOrTPHOrWildcardOrGeneric> parameter;
protected final List<RefTypeOrTPHOrWildcardOrGeneric> parameter; /**
/** * Ist primitiveFlag auf true, muss beim Codegen dieser Reftype durch
* Ist primitiveFlag auf true, muss beim Codegen dieser Reftype durch * den primitiven Datentyp ersetzt werden
* den primitiven Datentyp ersetzt werden * <p>
* * Bsp: java.lang.Integer mit Flag wird dann zu [int]
* Bsp: java.lang.Integer mit Flag wird dann zu [int] */
*/ boolean primitiveFlag = false; // TODO Should be final
boolean primitiveFlag = false; // TODO Should be final
public RefType(JavaClassName fullyQualifiedName, Token offset) public RefType(JavaClassName fullyQualifiedName, Token offset) {
{ this(fullyQualifiedName, new ArrayList<>(), offset);
this(fullyQualifiedName, new ArrayList<>(), offset); }
public boolean isPrimitive() {
return primitiveFlag;
}
@Override
public String toString() {
String params = "";
if (parameter.size() > 0) {
params += "<";
Iterator<RefTypeOrTPHOrWildcardOrGeneric> it = parameter.iterator();
while (it.hasNext()) {
RefTypeOrTPHOrWildcardOrGeneric param = it.next();
params += param.toString();
if (it.hasNext()) params += ", ";
}
params += ">";
}
return this.name.toString() + params;
}
@Override
public int hashCode() {
return this.name.hashCode();//Nur den Name hashen. Sorgt für langsame, aber funktionierende HashMaps
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset) {
this(fullyQualifiedName, parameter, offset, false);
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset, boolean primitiveFlag) {
super(offset);
this.name = (fullyQualifiedName);
this.parameter = parameter;
this.primitiveFlag = primitiveFlag;
}
public JavaClassName getName() {
return name;
}
public List<RefTypeOrTPHOrWildcardOrGeneric> getParaList() {
if (this.parameter == null) return new ArrayList<>();
return this.parameter;
}
/**
* Author: Jrg Buerle<br/>
*
* @return
*/
public boolean equals(Object obj) {
if (!(obj instanceof RefType refObj)) {
return false;
} }
public boolean isPrimitive() { if (!Objects.equals(this.name, refObj.name)) return false;
return primitiveFlag; boolean ret = true;
}
@Override //if(!(super.equals(obj))) PL 2020-03-12 muss vll. einkommentiert werden
public String toString(){ // return false;
String params = "";
if(parameter.size()>0){ if (parameter == null || parameter.size() == 0) {
params += "<"; ret &= (refObj.getParaList() == null || refObj.getParaList().isEmpty());
Iterator<RefTypeOrTPHOrWildcardOrGeneric> it = parameter.iterator(); } else {
while(it.hasNext()){ if (refObj.getParaList() == null) {
RefTypeOrTPHOrWildcardOrGeneric param = it.next(); ret = false;
params += param.toString(); } else if (parameter.size() != refObj.getParaList().size()) {
if(it.hasNext())params += ", "; ret = false;
} } else {
params += ">"; for (int i = 0; i < parameter.size(); i++) {
ret &= parameter.get(i).equals(refObj.getParaList().get(i));
} }
return this.name.toString() + params; }
} }
return ret;
@Override }
public int hashCode() {
return this.name.hashCode();//Nur den Name hashen. Sorgt für langsame, aber funktionierende HashMaps
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset) {
this(fullyQualifiedName, parameter, offset, false);
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset, boolean primitiveFlag) { @Override
super(offset); public void accept(ASTVisitor visitor) {
this.name = (fullyQualifiedName); visitor.visit(this);
this.parameter = parameter; }
this.primitiveFlag = primitiveFlag;
}
public JavaClassName getName() @Override
{ public <A> A acceptTV(TypeVisitor<A> visitor) {
return name; return visitor.visit(this);
} }
public List<RefTypeOrTPHOrWildcardOrGeneric> getParaList(){ @Override
if(this.parameter==null)return new ArrayList<>(); public void accept(ResultSetVisitor visitor) {
return this.parameter; visitor.visit(this);
} }
/** @Override
* Author: Jrg Buerle<br/> public ISerialNode toSerial(KeyStorage keyStorage) {
* @return SerialMap serialized = new SerialMap();
*/ serialized.put("isPrimitive", this.primitiveFlag);
public boolean equals(Object obj) serialized.put("name", this.name.toString());
{ serialized.put("parameters", SerialList.fromMapped(this.parameter, param -> param.toSerial(keyStorage)));
if(obj instanceof RefType){
if (!Objects.equals(this.name, ((RefType) obj).name)) return false;
boolean ret = true;
//if(!(super.equals(obj))) PL 2020-03-12 muss vll. einkommentiert werden
// return false;
if(parameter==null || parameter.size()==0){
ret &= (((RefType)obj).getParaList()==null || ((RefType)obj).getParaList().size()==0);
}
else{
if(((RefType)obj).getParaList()==null){
ret = false;
}
else if(parameter.size() != ((RefType)obj).getParaList().size())
{
ret = false;
}
else
{
for(int i = 0; i<parameter.size(); i++)
{
ret &= parameter.get(i).equals(((RefType)obj).getParaList().get(i));
}
}
}
return ret;
}
else{
return false;
}
}
@Override // create the wrapper and put this as the object
public void accept(ASTVisitor visitor) { var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
visitor.visit(this); serializedWrapper.put("object", serialized);
} return serializedWrapper;
}
@Override public static RefType fromSerial(SerialMap data, UnifyContext context) {
public <A> A acceptTV(TypeVisitor<A> visitor) { return new RefType(
return visitor.visit(this); new JavaClassName(data.getValue("name").getOf(String.class)),
} data.getList("parameters").assertListOfMaps().stream()
.map(param -> RefTypeOrTPHOrWildcardOrGeneric.fromSerial(param, context))
@Override .toList(),
public void accept(ResultSetVisitor visitor) { new NullToken(),
visitor.visit(this); data.getValue("isPrimitive").getOf(Boolean.class)
} );
}
} }

View File

@@ -1,11 +1,17 @@
package de.dhbwstuttgart.syntaxtree.type; package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.syntaxtree.ASTVisitor; import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode; import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor; import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.Token;
public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode{ public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode implements ISerializableData {
public RefTypeOrTPHOrWildcardOrGeneric(Token offset) { public RefTypeOrTPHOrWildcardOrGeneric(Token offset) {
super(offset); super(offset);
} }
@@ -18,5 +24,26 @@ public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode{
@Override @Override
public abstract boolean equals(Object o); public abstract boolean equals(Object o);
@Override
public ISerialNode toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("type", this.getClass().getSimpleName());
// we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL);
return serialized;
}
public static RefTypeOrTPHOrWildcardOrGeneric fromSerial(SerialMap data, UnifyContext context) {
String type = data.getValue("type").getOf(String.class);
SerialMap object = data.getMap("object");
if (type.equals(ExtendsWildcardType.class.getSimpleName())) return ExtendsWildcardType.fromSerial(object, context);
else if (type.equals(GenericRefType.class.getSimpleName())) return GenericRefType.fromSerial(object, context);
else if (type.equals(SuperWildcardType.class.getSimpleName())) return SuperWildcardType.fromSerial(object, context);
else if (type.equals(RefType.class.getSimpleName())) return RefType.fromSerial(object, context);
else if (type.equals(Void.class.getSimpleName())) return Void.fromSerial(object, context);
else if (type.equals(TypePlaceholder.class.getSimpleName())) return TypePlaceholder.fromSerial(object, context);
else throw new RuntimeException("Could not unserialize class of unhandled type " + type);
}
} }

View File

@@ -1,9 +1,13 @@
package de.dhbwstuttgart.syntaxtree.type; package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor; import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor; import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.Token;
import java.util.Objects; import java.util.Objects;
@@ -16,7 +20,7 @@ import java.util.Objects;
* *
*/ */
public class SuperWildcardType extends WildcardType{ public class SuperWildcardType extends WildcardType implements ISerializableData {
/** /**
* Author: Arne Lüdtke<br/> * Author: Arne Lüdtke<br/>
@@ -80,4 +84,22 @@ public class SuperWildcardType extends WildcardType{
SuperWildcardType that = (SuperWildcardType) o; SuperWildcardType that = (SuperWildcardType) o;
return that.innerType.equals(this.innerType); return that.innerType.equals(this.innerType);
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("innerType", this.innerType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static SuperWildcardType fromSerial(SerialMap data, UnifyContext context) {
return new SuperWildcardType(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(data.getMap("innerType"), context),
new NullToken()
);
}
} }

View File

@@ -1,9 +1,12 @@
package de.dhbwstuttgart.syntaxtree.type; package de.dhbwstuttgart.syntaxtree.type;
import java.util.Hashtable; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.parser.NullToken; import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.ASTVisitor; import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator; import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor; import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.Token;
@@ -16,7 +19,7 @@ import org.antlr.v4.runtime.Token;
* @author J�rg B�uerle * @author J�rg B�uerle
* @version $Date: 2013/06/19 12:45:37 $ * @version $Date: 2013/06/19 12:45:37 $
*/ */
public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData
{ {
private final String name; private final String name;
@@ -65,7 +68,12 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
public static TypePlaceholder fresh(Token position){ public static TypePlaceholder fresh(Token position){
return new TypePlaceholder(NameGenerator.makeNewName(), position, 0, true); return new TypePlaceholder(NameGenerator.makeNewName(), position, 0, true);
} }
public static TypePlaceholder fresh(Token position, PlaceholderRegistry placeholderRegistry){
String newName = placeholderRegistry.generateFreshPlaceholderName();
return new TypePlaceholder(newName, position, 0, true);
}
public static TypePlaceholder fresh(Token position, int variance, boolean wildcardable){ public static TypePlaceholder fresh(Token position, int variance, boolean wildcardable){
return new TypePlaceholder(NameGenerator.makeNewName(), position, variance, wildcardable); return new TypePlaceholder(NameGenerator.makeNewName(), position, variance, wildcardable);
} }
@@ -139,4 +147,26 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
public Boolean getWildcardtable() { public Boolean getWildcardtable() {
return wildcardable; return wildcardable;
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.name);
serialized.put("variance", this.variance);
serialized.put("wildcardable", this.wildcardable);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static TypePlaceholder fromSerial(SerialMap data, UnifyContext context) {
return new TypePlaceholder(
data.getValue("name").getOf(String.class),
new NullToken(),
data.getValue("variance").getOf(Integer.class),
data.getValue("wildcardable").getOf(Boolean.class)
);
}
} }

View File

@@ -1,14 +1,32 @@
package de.dhbwstuttgart.syntaxtree.type; package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.Token;
import de.dhbwstuttgart.parser.scope.JavaClassName; import de.dhbwstuttgart.parser.scope.JavaClassName;
public class Void extends RefType public class Void extends RefType implements ISerializableData
{ {
public Void(Token offset) { public Void(Token offset) {
super(JavaClassName.Void, offset); super(JavaClassName.Void, offset);
} }
@Override
public ISerialNode toSerial(KeyStorage keyStorage) {
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", new SerialMap());
return serializedWrapper;
}
public static Void fromSerial(SerialMap data, UnifyContext context) {
return new Void(new NullToken());
}
} }

View File

@@ -9,13 +9,8 @@ import de.dhbwstuttgart.syntaxtree.Method;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory; import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator; import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.syntaxtree.type.GenericRefType; import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import org.antlr.v4.runtime.Token;
import javax.swing.text.html.Option;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;

View File

@@ -1,77 +1,159 @@
package de.dhbwstuttgart.typeinference.constraints; package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.Collection; import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
public class Constraint<A> extends HashSet<A> { public class Constraint<A extends IConstraintElement> extends HashSet<A> implements ISerializableData {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private Boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt private Boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
private Boolean isImplemented = false; private Boolean isImplemented = false;
/* /*
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur * wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
* auszuwaehlen * auszuwaehlen
*/ */
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>(); /*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
private Constraint<A> extendConstraint = null; private Constraint<A> extendConstraint = null;
public Constraint() { public Constraint() {
super(); super();
} }
public Constraint(Boolean isInherited, Boolean isImplemented) { public Constraint(Boolean isInherited, Boolean isImplemented) {
this.isInherited = isInherited; this.isInherited = isInherited;
this.isImplemented = isImplemented; this.isImplemented = isImplemented;
} }
public Constraint(Boolean isInherited, Boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) { public Constraint(Boolean isInherited, Boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
this.isInherited = isInherited; this.isInherited = isInherited;
this.isImplemented = isImplemented; this.isImplemented = isImplemented;
this.extendConstraint = extendConstraint; this.extendConstraint = extendConstraint;
this.methodSignatureConstraint = methodSignatureConstraint; this.methodSignatureConstraint = methodSignatureConstraint;
} }
public void setIsInherited(Boolean isInherited) { public void setIsInherited(Boolean isInherited) {
this.isInherited = isInherited; this.isInherited = isInherited;
} }
public Boolean isInherited() { public Boolean isInherited() {
return isInherited; return isInherited;
} }
public Boolean isImplemented() { public Boolean isImplemented() {
return isImplemented; return isImplemented;
} }
public Constraint<A> getExtendConstraint() { public Constraint<A> getExtendConstraint() {
return extendConstraint; return extendConstraint;
} }
public void setExtendConstraint(Constraint<A> c) { public void setExtendConstraint(Constraint<A> c) {
extendConstraint = c; extendConstraint = c;
} }
public Set<A> getmethodSignatureConstraint() { public Set<A> getmethodSignatureConstraint() {
return methodSignatureConstraint; return methodSignatureConstraint;
} }
public void setmethodSignatureConstraint(Set<A> c) { public void setmethodSignatureConstraint(Set<A> c) {
methodSignatureConstraint = c; methodSignatureConstraint = c;
} }
public String toString() {
return super.toString() + "\nisInherited = " + isInherited
+ " isOveridden = " + isImplemented
+ " msc[" + methodSignatureConstraint.size() + "] = " + methodSignatureConstraint
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
+ "\n";
}
public String toStringBase() {
return super.toString();
}
private String serialUUID = null;
@Override
public SerialUUID toSerial(KeyStorage keyStorage) {
final String uuid = serialUUID == null ? keyStorage.getIdentifier() : serialUUID;
if (serialUUID == null) serialUUID = uuid;
if (!keyStorage.isAlreadySerialized(uuid)) {
SerialMap serialized = new SerialMap();
keyStorage.putSerialized(uuid, serialized);
serialized.put("isInherited", isInherited);
serialized.put("isImplemented", isImplemented);
serialized.put("extendedConstraint", extendConstraint == null ? null :
extendConstraint.toSerial(keyStorage));
Function<A, ISerialNode> pairMapper = pair -> {
if (pair instanceof Pair simplePair) return simplePair.toSerial(keyStorage);
if (pair instanceof UnifyPair unifyPair) return unifyPair.toSerial(keyStorage);
throw new RuntimeException("No serialization is supported for type " + pair.getClass().getName());
};
serialized.put("methodSignatureConstraint", methodSignatureConstraint == null ? null :
SerialList.fromMapped(methodSignatureConstraint, pairMapper));
serialized.put("setElements", SerialList.fromMapped(this, pairMapper));
}
// return only the unique key
return new SerialUUID(uuid);
}
public static <T extends IConstraintElement> Constraint<T> fromSerial(SerialUUID serialUUID, UnifyContext context, Class<T> target, KeyStorage keyStorage) {
String uuid = serialUUID.uuid;
if (!keyStorage.isAlreadyUnserialized(uuid)) {
Constraint<T> constraint = new Constraint<>();
// immediately add the object to the context to prevent infinite recursion
keyStorage.putUnserialized(uuid, constraint);
// retrieve the serialized data und start unserializing it
SerialMap data = keyStorage.getSerialized(uuid);
constraint.isInherited = data.getValue("isInherited").getOf(Boolean.class);
constraint.isImplemented = data.getValue("isImplemented").getOf(Boolean.class);
constraint.extendConstraint = Optional.ofNullable(data.getUUIDOrNull("extendedConstraint"))
.map(v -> Constraint.fromSerial(v, context, target, keyStorage))
.orElse(null);
// to convert the maps back to elements, we sadly have to do some assumptions about the generic types...
Function<ISerialNode, T> pairUnmapper = pairData -> {
if (target == Pair.class && pairData instanceof SerialMap pairMap) {
return (T) Pair.fromSerial(pairMap, context);
}
if (target == UnifyPair.class && pairData instanceof SerialUUID pairUUID) {
return (T) UnifyPair.fromSerial(pairUUID, context, keyStorage);
}
throw new RuntimeException("No serialization is supported for target type " + target.getName());
};
constraint.methodSignatureConstraint =
Optional.ofNullable(data.getListOrNull("methodSignatureConstraint"))
.map(l -> l.stream().map(pairUnmapper).collect(Collectors.toSet()))
.orElse(null);
constraint.addAll(
data.getList("setElements")
.stream().map(pairUnmapper).toList());
}
return keyStorage.getUnserialized(uuid, Constraint.class);
}
public String toString() {
return super.toString() + "\nisInherited = " + isInherited + " isOveridden = " + isImplemented
+ methodSignatureConstraint
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
+ "\n" ;
}
public String toStringBase() {
return super.toString();
}
} }

View File

@@ -1,63 +1,80 @@
package de.dhbwstuttgart.typeinference.constraints; package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations; import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import java.util.*; import java.util.*;
import java.util.function.BinaryOperator; import java.util.function.BinaryOperator;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class ConstraintSet<A> { public class ConstraintSet<A extends IConstraintElement> implements ISerializableData {
Constraint<A> undConstraints = new Constraint<>(); Constraint<A> undConstraints = new Constraint<>();
List<Set<Constraint<A>>> oderConstraints = new ArrayList<>(); List<Set<Constraint<A>>> oderConstraints = new ArrayList<>();
public void addUndConstraint(A p){ public void addUndConstraint(A p) {
undConstraints.add(p); undConstraints.add(p);
} }
public void addOderConstraint(Set<Constraint<A>> methodConstraints) { public void addOderConstraint(Set<Constraint<A>> methodConstraints) {
oderConstraints.add(methodConstraints); oderConstraints.add(methodConstraints);
} }
public void addAllUndConstraint(Constraint<A> allUndConstraints){ public void addAllUndConstraint(Constraint<A> allUndConstraints) {
undConstraints.addAll(allUndConstraints); undConstraints.addAll(allUndConstraints);
} }
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints){
this.oderConstraints.addAll(allOderConstraints);
}
public void addAll(ConstraintSet constraints) {
this.addAllUndConstraint(constraints.undConstraints);
this.addAllOderConstraint(constraints.oderConstraints);
}
@Override public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints) {
public String toString(){ this.oderConstraints.addAll(allOderConstraints);
BinaryOperator<String> b = (x,y) -> x+y; }
return "\nUND:" + this.undConstraints.toString() + "\n" +
"ODER:" + this.oderConstraints.stream().reduce("", (x,y) -> x.toString()+ "\n" +y, b);
//cartesianProduct().toString();
}
public Set<List<Constraint<A>>> cartesianProduct(){ public void addAll(ConstraintSet constraints) {
Set<Constraint<A>> toAdd = new HashSet<>(); this.addAllUndConstraint(constraints.undConstraints);
toAdd.add(undConstraints); this.addAllOderConstraint(constraints.oderConstraints);
List<Set<Constraint<A>>> allConstraints = new ArrayList<>(); }
allConstraints.add(toAdd);
allConstraints.addAll(oderConstraints);
return new GuavaSetOperations().cartesianProduct(allConstraints);
}
public <B> ConstraintSet<B> map(Function<? super A, ? extends B> o) { @Override
Hashtable<Constraint<A>,Constraint<B>> CSA2CSB = new Hashtable<>(); public String toString() {
ConstraintSet<B> ret = new ConstraintSet<>(); BinaryOperator<String> b = (x, y) -> x + y;
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new)); return "\nUND:\n" + this.undConstraints.toString() +
List<Set<Constraint<B>>> newOder = new ArrayList<>(); "ODER:" + this.oderConstraints.stream().reduce("", (x, y) -> x + "\n\t" + y, b) +
"\n";
//cartesianProduct().toString();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ConstraintSet<?> other)) return false;
return Objects.equals(undConstraints, other.undConstraints)
&& Objects.equals(oderConstraints, other.oderConstraints);
}
@Override
public int hashCode() {
return Objects.hash(undConstraints, oderConstraints);
}
public Set<List<Constraint<A>>> cartesianProduct() {
Set<Constraint<A>> toAdd = new HashSet<>();
toAdd.add(undConstraints);
List<Set<Constraint<A>>> allConstraints = new ArrayList<>();
allConstraints.add(toAdd);
allConstraints.addAll(oderConstraints);
return new GuavaSetOperations().cartesianProduct(allConstraints);
}
public <B extends IConstraintElement> ConstraintSet<B> map(Function<? super A, ? extends B> o) {
Hashtable<Constraint<A>, Constraint<B>> CSA2CSB = new Hashtable<>();
ConstraintSet<B> ret = new ConstraintSet<>();
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new));
List<Set<Constraint<B>>> newOder = new ArrayList<>();
/* /*
for(Set<Constraint<A>> oderConstraint : oderConstraints){ for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.forEach(as -> { oderConstraint.forEach(as -> {
@@ -68,25 +85,25 @@ public class ConstraintSet<A> {
CSA2CSB.put(as, newConst);} ); CSA2CSB.put(as, newConst);} );
} }
*/ */
for(Set<Constraint<A>> oderConstraint : oderConstraints){ for (Set<Constraint<A>> oderConstraint : oderConstraints) {
newOder.add( newOder.add(
oderConstraint.stream().map((Constraint<A> as) -> { oderConstraint.stream().map((Constraint<A> as) -> {
Constraint<B> newConst = as.stream() Constraint<B> newConst = as.stream()
.map(o) .map(o)
.collect(Collectors.toCollection(( .collect(Collectors.toCollection((
() -> new Constraint<B> (as.isInherited(), () -> new Constraint<B>(as.isInherited(),
as.isImplemented(), as.isImplemented(),
(as.getExtendConstraint() != null) (as.getExtendConstraint() != null)
? as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new)) ? as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new))
: null, : null,
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new)))) as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new))))
)); ));
//CSA2CSB.put(as, newConst); //CSA2CSB.put(as, newConst);
return newConst; return newConst;
/* /*
Constraint<B> bs = CSA2CSB.get(as); Constraint<B> bs = CSA2CSB.get(as);
@@ -95,36 +112,60 @@ public class ConstraintSet<A> {
} }
return bs; return bs;
*/ */
}).collect(Collectors.toSet()) }).collect(Collectors.toSet())
); );
}
ret.oderConstraints = newOder;
return ret;
} }
public void forEach (Consumer<? super A> c) { ret.oderConstraints = newOder;
undConstraints.stream().forEach(c); return ret;
for(Set<Constraint<A>> oderConstraint : oderConstraints){ }
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
as.stream().forEach(c)); public void forEach(Consumer<? super A> c) {
} undConstraints.stream().forEach(c);
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
as.stream().forEach(c));
} }
}
public Set<A> getAll () {
Set<A> ret = new HashSet<>(); public Set<A> getAll() {
ret.addAll(undConstraints); Set<A> ret = new HashSet<>(undConstraints);
for(Set<Constraint<A>> oderConstraint : oderConstraints){ for (Set<Constraint<A>> oderConstraint : oderConstraints) {
oderConstraint.parallelStream().forEach((Constraint<A> as) -> ret.addAll(as)); oderConstraint.parallelStream().forEach(ret::addAll);
}
return ret;
}
public List<Set<Constraint<A>>> getOderConstraints() {
return oderConstraints;
}
public Set<A> getUndConstraints() {
return undConstraints;
} }
return ret;
}
public List<Set<Constraint<A>>> getOderConstraints() {
return oderConstraints;
}
public Set<A> getUndConstraints() {
return undConstraints;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("undConstraints", undConstraints.toSerial(keyStorage));
serialized.put("oderConstraints", SerialList.fromMapped(oderConstraints, oderConstraintSet ->
SerialList.fromMapped(oderConstraintSet, oderConstraint ->
oderConstraint.toSerial(keyStorage))
));
return serialized;
}
public static <T extends IConstraintElement> ConstraintSet<T> fromSerial(SerialMap data, UnifyContext context, Class<T> target, KeyStorage keyStorage) {
ConstraintSet<T> constraintSet = new ConstraintSet<>();
constraintSet.undConstraints = Constraint.fromSerial(data.getUUID("undConstraints"), context, target, keyStorage);
constraintSet.oderConstraints = data.getList("oderConstraints").assertListOfLists().stream()
.map(oderConstraintSetData -> oderConstraintSetData.assertListOfUUIDs().stream()
.map(oderConstraintData -> Constraint.fromSerial(oderConstraintData, context, target, keyStorage))
.collect(Collectors.toSet())
).toList();
return constraintSet;
}
} }

View File

@@ -0,0 +1,4 @@
package de.dhbwstuttgart.typeinference.constraints;
public interface IConstraintElement {
}

View File

@@ -1,72 +1,70 @@
package de.dhbwstuttgart.typeinference.constraints; package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.io.Serializable; import java.io.Serializable;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.parser.SourceLoc; import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric; import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder; import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator; import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import org.antlr.v4.runtime.Token;
public class Pair implements Serializable public class Pair implements Serializable, IConstraintElement, ISerializableData {
{ public final RefTypeOrTPHOrWildcardOrGeneric TA1;
public final RefTypeOrTPHOrWildcardOrGeneric TA1; public final RefTypeOrTPHOrWildcardOrGeneric TA2;
public final RefTypeOrTPHOrWildcardOrGeneric TA2;
private SourceLoc location; private SourceLoc location;
private PairOperator eOperator = PairOperator.SMALLER; private PairOperator eOperator = PairOperator.SMALLER;
private Boolean noUnification = false; private Boolean noUnification = false;
private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2 )
{
this.TA1 = TA1;
this.TA2 = TA2;
if(TA1 == null || TA2 == null)
throw new NullPointerException();
eOperator = PairOperator.SMALLER;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp) private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2) {
{ this.TA1 = TA1;
// Konstruktor this.TA2 = TA2;
this(TA1,TA2); if (TA1 == null || TA2 == null)
this.eOperator = eOp; throw new NullPointerException();
} eOperator = PairOperator.SMALLER;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator e0p, SourceLoc location) { public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp) {
this(TA1, TA2, e0p); // Konstruktor
this.location = location; this(TA1, TA2);
} this.eOperator = eOp;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification)
{
// Konstruktor
this(TA1,TA2);
this.eOperator = eOp;
this.noUnification = noUnification;
}
public SourceLoc getLocation() { public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator e0p, SourceLoc location) {
return this.location; this(TA1, TA2, e0p);
} this.location = location;
}
public String toString()
{ public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification) {
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen // Konstruktor
String strElement1 = "NULL"; this(TA1, TA2);
String strElement2 = "NULL"; this.eOperator = eOp;
String Operator = "<."; this.noUnification = noUnification;
}
if( TA1 != null )
strElement1 = TA1.toString(); public SourceLoc getLocation() {
return this.location;
if( TA2 != null ) }
strElement2 = TA2.toString();
public String toString() {
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen
String strElement1 = "NULL";
String strElement2 = "NULL";
String Operator = "<.";
if (TA1 != null)
strElement1 = TA1.toString();
if (TA2 != null)
strElement2 = TA2.toString();
/* PL ausskommentiert 2018-05-24 /* PL ausskommentiert 2018-05-24
if(OperatorEqual()) if(OperatorEqual())
@@ -76,80 +74,104 @@ public class Pair implements Serializable
if(OperatorSmallerExtends()) if(OperatorSmallerExtends())
Operator = "<?"; Operator = "<?";
*/ */
return "\n(" + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
/*- Equals: " + bEqual*/
}
/** return "\n(P: " + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
* <br/>Author: J�rg B�uerle
* @param obj
* @return
*/
public boolean equals(Object obj)
{
boolean ret = true;
ret &= (obj instanceof Pair);
if(!ret)return ret;
ret &= ((Pair)obj).TA1.equals(this.TA1);
ret &= ((Pair)obj).TA2.equals(this.TA2);
return ret;
}
/** /*- Equals: " + bEqual*/
* Author: Arne Lüdtke<br/> }
* Abfrage, ob Operator vom Typ Equal ist.
*/
public boolean OperatorEqual()
{
return eOperator == PairOperator.EQUALSDOT;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Smaller ist.
*/
public boolean OperatorSmaller()
{
return eOperator == PairOperator.SMALLER;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ SmallerExtends ist.
*/
public boolean OperatorSmallerExtends()
{
return eOperator == PairOperator.SMALLERDOTWC;
}
/**
* Author: Arne Lüdtke<br/>
* Gibt den Operator zurück.
*/
public PairOperator GetOperator()
{
return eOperator;
}
public boolean OperatorSmallerDot() { /**
return eOperator == PairOperator.SMALLERDOT; * <br/>Author: J�rg B�uerle
} *
* @param obj
* @return
static public Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) { */
HashMap<String, TypePlaceholder> ret = new HashMap<>(); public boolean equals(Object obj) {
constraints.map((Pair p) -> { return (
if (p.TA1 instanceof TypePlaceholder) { (obj instanceof Pair pairObj) &&
ret.put(((TypePlaceholder) p.TA1).getName(), (TypePlaceholder) p.TA1); pairObj.TA1.equals(this.TA1) &&
} pairObj.TA2.equals(this.TA2)
if (p.TA2 instanceof TypePlaceholder) { );
ret.put(((TypePlaceholder) p.TA2).getName(), (TypePlaceholder) p.TA2); }
}
return null; /**
}); * Author: Arne Lüdtke<br/>
return ret; * Abfrage, ob Operator vom Typ Equal ist.
} */
public boolean OperatorEqual() {
return eOperator == PairOperator.EQUALSDOT;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Smaller ist.
*/
public boolean OperatorSmaller() {
return eOperator == PairOperator.SMALLER;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ SmallerExtends ist.
*/
public boolean OperatorSmallerExtends() {
return eOperator == PairOperator.SMALLERDOTWC;
}
/**
* Author: Arne Lüdtke<br/>
* Gibt den Operator zurück.
*/
public PairOperator GetOperator() {
return eOperator;
}
public boolean OperatorSmallerDot() {
return eOperator == PairOperator.SMALLERDOT;
}
static public Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
HashMap<String, TypePlaceholder> ret = new HashMap<>();
constraints.map((Pair p) -> {
if (p.TA1 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA1).getName(), (TypePlaceholder) p.TA1);
}
if (p.TA2 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA2).getName(), (TypePlaceholder) p.TA2);
}
return null;
});
return ret;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
// because toString() will output TA1 and TA2 recursively, we can ignore potential infinite recursion here too
SerialMap serialized = new SerialMap();
serialized.put("ta1", this.TA1.toSerial(keyStorage));
serialized.put("ta2", this.TA2.toSerial(keyStorage));
serialized.put("op", this.eOperator.toString());
serialized.put("noUnification", this.noUnification ? 1 : 0);
serialized.put("location", this.location == null ? null : this.location.toSerial(keyStorage));
return serialized;
}
public static Pair fromSerial(SerialMap data, UnifyContext context) {
String op = data.getValue("op").getOf(String.class);
SerialMap ta1 = data.getMap("ta1");
SerialMap ta2 = data.getMap("ta2");
Boolean noUnification = data.getValue("noUnification").getOf(Integer.class) == 1;
SerialMap location = data.getMapOrNull("location");
var pair = new Pair(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(ta1, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(ta2, context),
PairOperator.fromString(op),
noUnification
);
if (location != null) pair.location = SourceLoc.fromSerial(location);
return pair;
}
} }
// ino.end // ino.end

View File

@@ -1,15 +1,19 @@
package de.dhbwstuttgart.typeinference.result; package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.exceptions.NotImplementedException; import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric; import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/** /**
* enthaelt alle Paare, die in einem Ergebnis nicht vorkommen koennen * enthaelt alle Paare, die in einem Ergebnis nicht vorkommen koennen
* sie sind noetig fuer origPairs in PairTPHsmallerTPH, da hier auch * sie sind noetig fuer origPairs in PairTPHsmallerTPH, da hier auch
* Paare vorkommen koennen die keine Result sind (z.B. bei FunN$$) * Paare vorkommen koennen die keine Result sind (z.B. bei FunN$$)
*/ */
public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>{ public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
//public final TypePlaceholder left; //public final TypePlaceholder left;
//public final TypePlaceholder right; //public final TypePlaceholder right;
@@ -17,7 +21,7 @@ public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, Re
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde * urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
* wichtig fuer generated Generics * wichtig fuer generated Generics
*/ */
ResultPair origPair; ResultPair<?,?> origPair;
public PairNoResult(RefTypeOrTPHOrWildcardOrGeneric left, RefTypeOrTPHOrWildcardOrGeneric right){ public PairNoResult(RefTypeOrTPHOrWildcardOrGeneric left, RefTypeOrTPHOrWildcardOrGeneric right){
super(left, right); super(left, right);
@@ -29,4 +33,24 @@ public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, Re
throw new NotImplementedException(); throw new NotImplementedException();
//visitor.visit(this); //visitor.visit(this);
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairNoResult fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairNoResult(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
} }

View File

@@ -1,9 +1,13 @@
package de.dhbwstuttgart.typeinference.result; package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric; import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder; import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder> { public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder> implements ISerializableData {
public PairTPHEqualTPH(TypePlaceholder tl, TypePlaceholder tr) { public PairTPHEqualTPH(TypePlaceholder tl, TypePlaceholder tr) {
super(tl, tr); super(tl, tr);
} }
@@ -12,4 +16,24 @@ public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder
public void accept(ResultPairVisitor visitor) { public void accept(ResultPairVisitor visitor) {
visitor.visit(this); visitor.visit(this);
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairTPHEqualTPH fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairTPHEqualTPH(
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
} }

View File

@@ -1,13 +1,17 @@
package de.dhbwstuttgart.typeinference.result; package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.syntaxtree.type.RefType; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric; import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder; import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/** /**
* Steht für A =. RefType * Steht für A =. RefType
*/ */
public class PairTPHequalRefTypeOrWildcardType extends ResultPair{ public class PairTPHequalRefTypeOrWildcardType extends ResultPair<TypePlaceholder, RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
public final TypePlaceholder left; public final TypePlaceholder left;
public final RefTypeOrTPHOrWildcardOrGeneric right; public final RefTypeOrTPHOrWildcardOrGeneric right;
@@ -26,4 +30,24 @@ public class PairTPHequalRefTypeOrWildcardType extends ResultPair{
public String toString() { public String toString() {
return "(" + left.toString() + " = " + right.toString() + ")"; return "(" + left.toString() + " = " + right.toString() + ")";
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairTPHequalRefTypeOrWildcardType fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairTPHequalRefTypeOrWildcardType(
(TypePlaceholder)RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
} }

View File

@@ -1,12 +1,17 @@
package de.dhbwstuttgart.typeinference.result; package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric; import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder; import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/** /**
* Steht für: A <. B * Steht für: A <. B
*/ */
public class PairTPHsmallerTPH extends ResultPair{ public class PairTPHsmallerTPH extends ResultPair<TypePlaceholder,TypePlaceholder>
implements ISerializableData {
public final TypePlaceholder left; public final TypePlaceholder left;
public final TypePlaceholder right; public final TypePlaceholder right;
@@ -14,7 +19,7 @@ public class PairTPHsmallerTPH extends ResultPair{
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde * urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
* wichtig fuer generated Generics * wichtig fuer generated Generics
*/ */
ResultPair origPair; ResultPair<?,?> origPair;
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right){ public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right){
super(left, right); super(left, right);
@@ -22,7 +27,7 @@ public class PairTPHsmallerTPH extends ResultPair{
this.right = right; this.right = right;
} }
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right, ResultPair origPair){ public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right, ResultPair<?,?> origPair){
this(left, right); this(left, right);
this.origPair = origPair; this.origPair = origPair;
} }
@@ -36,4 +41,24 @@ public class PairTPHsmallerTPH extends ResultPair{
public String toString() { public String toString() {
return "(" + left.toString() + " < " + right.toString() + ")"; return "(" + left.toString() + " < " + right.toString() + ")";
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairTPHsmallerTPH fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairTPHsmallerTPH(
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
} }

View File

@@ -1,11 +1,17 @@
package de.dhbwstuttgart.typeinference.result; package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric; import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/** /**
* Paare, welche das Unifikationsergebnis darstellen * Paare, welche das Unifikationsergebnis darstellen
*/ */
public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric> { public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
private final A left; private final A left;
private final B right; private final B right;
@@ -58,5 +64,35 @@ public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B ext
return false; return false;
return true; return true;
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
String type = switch (this) {
case PairNoResult _ -> "pnr";
case PairTPHEqualTPH _ -> "ptet";
case PairTPHsmallerTPH _ -> "ptst";
case PairTPHequalRefTypeOrWildcardType _ -> "ptertwt";
default -> throw new RuntimeException("No type defined for ResultPair of class " + this.getClass().getName());
};
serialized.put("type", type);
// we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL);
return serialized;
}
public static <A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric> ResultPair<A,B>
fromSerial(SerialMap data, UnifyContext context) {
String type = data.getValue("type").getOf(String.class);
SerialMap object = data.getMap("object");
return switch (type) {
case "pnr" -> (ResultPair) PairNoResult.fromSerial2(object, context);
case "ptet" -> (ResultPair) PairTPHEqualTPH.fromSerial2(object, context);
case "ptst" -> (ResultPair) PairTPHsmallerTPH.fromSerial2(object, context);
case "ptertwt" -> (ResultPair) PairTPHequalRefTypeOrWildcardType.fromSerial2(object, context);
default -> throw new RuntimeException("Could not unserialize class of unhandled type " + type);
};
}
} }

View File

@@ -1,5 +1,13 @@
package de.dhbwstuttgart.typeinference.result; package de.dhbwstuttgart.typeinference.result;
import com.google.common.collect.Ordering;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
@@ -10,148 +18,177 @@ import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric; import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType; import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder; import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import java.util.stream.Collectors;
@SuppressWarnings("rawtypes") @SuppressWarnings("rawtypes")
public class ResultSet { public class ResultSet implements ISerializableData {
public final Set<ResultPair> results; public final Set<ResultPair> results;
public Set<ResultPair<TypePlaceholder, TypePlaceholder>> genIns; public Set<ResultPair<TypePlaceholder, TypePlaceholder>> genIns;
public ResultSet(Set<ResultPair> set){ public ResultSet(Set<ResultPair> set) {
this.results = set; this.results = set;
this.genIns = new HashSet<>(); this.genIns = new HashSet<>();
results.forEach(x -> { if (x instanceof PairTPHsmallerTPH) { this.genIns.add(x);}} ); results.forEach(x -> {
} if (x instanceof PairTPHsmallerTPH) {
this.genIns.add(x);
public boolean contains(ResultPair toCheck) { }
return this.results.contains(toCheck); });
} }
public void remove(ResultPair toCheck) { public boolean contains(ResultPair toCheck) {
results.remove(toCheck); return this.results.contains(toCheck);
}
public void remove(ResultPair toCheck) {
results.remove(toCheck);
}
public ResolvedType resolveType(RefTypeOrTPHOrWildcardOrGeneric type) {
if (type instanceof TypePlaceholder)
return new Resolver(this).resolve((TypePlaceholder) type);
if (type instanceof GenericRefType) return new ResolvedType(type, new HashSet<>());
if (type instanceof RefType) {
RelatedTypeWalker related = new RelatedTypeWalker(null, this);
type.accept(related);
return new ResolvedType(type, related.relatedTPHs);
} else {
throw new NotImplementedException();
//return new ResolvedType(type,new HashSet<>());
} }
}
public ResolvedType resolveType(RefTypeOrTPHOrWildcardOrGeneric type) { public String toString() {
if(type instanceof TypePlaceholder) var results = new ArrayList<>(this.results);
return new Resolver(this).resolve((TypePlaceholder)type); results.sort(
if(type instanceof GenericRefType)return new ResolvedType(type, new HashSet<>()); Comparator
if(type instanceof RefType) { .comparingInt((ResultPair o) -> o.getLeft().toString().length())
RelatedTypeWalker related = new RelatedTypeWalker(null, this); .thenComparing(o -> o.getLeft().toString())
type.accept(related); .thenComparingInt(o -> o.getRight().toString().length())
return new ResolvedType(type, related.relatedTPHs); .thenComparing(o -> o.getRight().toString())
} else { );
throw new NotImplementedException(); return results.toString();
//return new ResolvedType(type,new HashSet<>()); }
}
@Override
public boolean equals(Object o) {
if (o instanceof ResultSet other) {
// sort both result lists
var thisElements = new ArrayList<>(this.results);
thisElements.sort(Ordering.usingToString());
var otherElements = new ArrayList<>(other.results);
otherElements.sort(Ordering.usingToString());
return thisElements.equals(otherElements);
} else {
return false;
} }
}
public String toString() { @Override
return results.toString(); public int hashCode() {
} return results.hashCode();
}
@Override @Override
public boolean equals(Object o) { public SerialMap toSerial(KeyStorage keyStorage) {
if (o instanceof ResultSet) { SerialMap serialized = new SerialMap();;
ResultSet other = (ResultSet)o; serialized.put("results", SerialList.fromMapped(results, result -> result.toSerial(keyStorage)));
return this.results.equals(other.results); return serialized;
} else { }
return false;
}
}
@Override public static ResultSet fromSerial(SerialMap data, UnifyContext context) {
public int hashCode() { var resultsData = data.getList("results").assertListOfMaps();
return results.hashCode(); return new ResultSet(resultsData.stream().map(resultData -> ResultPair.fromSerial(resultData, context)).collect(Collectors.toSet()));
} }
} }
class Resolver implements ResultSetVisitor { class Resolver implements ResultSetVisitor {
private final ResultSet result; private final ResultSet result;
private TypePlaceholder toResolve; private TypePlaceholder toResolve;
private RefTypeOrTPHOrWildcardOrGeneric resolved; private RefTypeOrTPHOrWildcardOrGeneric resolved;
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>(); private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
private ResultPair<?,?> currentPair; private ResultPair<?, ?> currentPair;
public Resolver(ResultSet resultPairs){ public Resolver(ResultSet resultPairs) {
this.result = resultPairs; this.result = resultPairs;
}
public ResolvedType resolve(TypePlaceholder tph) {
toResolve = tph;
resolved = null;
System.out.println(tph.toString());
for (ResultPair<?, ?> resultPair : result.results) {
if (resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)) {
currentPair = resultPair;
return resolve(((PairTPHEqualTPH) resultPair).getRight());
}
}
for (ResultPair<?, ?> resultPair : result.results) {
currentPair = resultPair;
resultPair.accept(this);
}
if (resolved == null) {//TPH kommt nicht im Result vor:
resolved = tph;
} }
public ResolvedType resolve(TypePlaceholder tph){ ResolvedType result = new ResolvedType(resolved, additionalTPHs);//resolved;
toResolve = tph; result.setResultPair(currentPair);
resolved = null; return result;
System.out.println(tph.toString()); }
for(ResultPair<?,?> resultPair : result.results) {
if(resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)){
currentPair = resultPair;
return resolve(((PairTPHEqualTPH) resultPair).getRight());
}
}
for(ResultPair<?,?> resultPair : result.results){
currentPair = resultPair;
resultPair.accept(this);
}
if(resolved==null){//TPH kommt nicht im Result vor:
resolved = tph;
}
ResolvedType result = new ResolvedType(resolved, additionalTPHs);//resolved; @Override
result.setResultPair(currentPair); public void visit(PairTPHsmallerTPH p) {
return result; currentPair = p;
if (p.left.equals(toResolve)) {
additionalTPHs.add(new GenericInsertPair(p.left, p.right));
additionalTPHs.addAll(new RelatedTypeWalker(p.right, result).relatedTPHs);
} }
if (p.right.equals(toResolve))
additionalTPHs.addAll(new RelatedTypeWalker(p.left, result).relatedTPHs);
}
@Override @Override
public void visit(PairTPHsmallerTPH p) { public void visit(PairTPHequalRefTypeOrWildcardType p) {
currentPair = p; currentPair = p;
if(p.left.equals(toResolve)){ if (p.left.equals(toResolve)) {
additionalTPHs.add(new GenericInsertPair(p.left, p.right)); resolved = p.right;
additionalTPHs.addAll(new RelatedTypeWalker(p.right, result).relatedTPHs); RelatedTypeWalker related = new RelatedTypeWalker(null, result);
} p.right.accept(related);
if(p.right.equals(toResolve)) additionalTPHs.addAll(related.relatedTPHs);
additionalTPHs.addAll(new RelatedTypeWalker(p.left, result).relatedTPHs);
} }
}
@Override @Override
public void visit(PairTPHequalRefTypeOrWildcardType p) { public void visit(PairTPHEqualTPH p) {
currentPair = p; //Do nothing. Dieser Fall wird in der resolve-Methode abgefangen
if(p.left.equals(toResolve)){ }
resolved = p.right;
RelatedTypeWalker related = new RelatedTypeWalker(null, result);
p.right.accept(related);
additionalTPHs.addAll(related.relatedTPHs);
}
}
@Override @Override
public void visit(PairTPHEqualTPH p) { public void visit(RefType refType) {
//Do nothing. Dieser Fall wird in der resolve-Methode abgefangen
}
@Override }
public void visit(RefType refType) {
} @Override
public void visit(GenericRefType genericRefType) {
@Override }
public void visit(GenericRefType genericRefType) {
} @Override
public void visit(SuperWildcardType superWildcardType) {
@Override }
public void visit(SuperWildcardType superWildcardType) {
} @Override
public void visit(TypePlaceholder typePlaceholder) {
@Override }
public void visit(TypePlaceholder typePlaceholder) {
} @Override
public void visit(ExtendsWildcardType extendsWildcardType) {
@Override }
public void visit(ExtendsWildcardType extendsWildcardType) {
}
} }
@@ -161,149 +198,150 @@ class Resolver implements ResultSetVisitor {
@SuppressWarnings("rawtypes") @SuppressWarnings("rawtypes")
class TPHResolver implements ResultSetVisitor { class TPHResolver implements ResultSetVisitor {
private final TypePlaceholder tph; private final TypePlaceholder tph;
Set<GenericInsertPair> resolved = new HashSet<>(); Set<GenericInsertPair> resolved = new HashSet<>();
private final ResultSet resultSet; private final ResultSet resultSet;
TPHResolver(TypePlaceholder tph, ResultSet resultSet){ TPHResolver(TypePlaceholder tph, ResultSet resultSet) {
this.resultSet = resultSet; this.resultSet = resultSet;
this.tph = tph; this.tph = tph;
for(ResultPair p : resultSet.results){ for (ResultPair p : resultSet.results) {
p.accept(this); p.accept(this);
}
if(resolved.size() == 0){
resolved.add(new GenericInsertPair(tph, null));
}
} }
if (resolved.size() == 0) {
@Override resolved.add(new GenericInsertPair(tph, null));
public void visit(PairTPHsmallerTPH p) {
if(p.left.equals(tph) || p.right.equals(tph)){
resolved.add(new GenericInsertPair(p.left, p.right));
}
} }
}
@Override @Override
public void visit(PairTPHequalRefTypeOrWildcardType p) { public void visit(PairTPHsmallerTPH p) {
TypePlaceholder otherSide = null; if (p.left.equals(tph) || p.right.equals(tph)) {
if(p.right.equals(tph)){ resolved.add(new GenericInsertPair(p.left, p.right));
otherSide = p.left;
}
if(otherSide != null){
Set<ResultPair> newResultSet = new HashSet<>(this.resultSet.results);
newResultSet.remove(p);
resolved.addAll(new TPHResolver(otherSide, new ResultSet(newResultSet)).resolved);
}
} }
}
@Override @Override
public void visit(PairTPHEqualTPH p) { public void visit(PairTPHequalRefTypeOrWildcardType p) {
//ignorieren. Wird vom Resolver behandelt TypePlaceholder otherSide = null;
if (p.right.equals(tph)) {
otherSide = p.left;
} }
if (otherSide != null) {
@Override Set<ResultPair> newResultSet = new HashSet<>(this.resultSet.results);
public void visit(RefType refType) { newResultSet.remove(p);
resolved.addAll(new TPHResolver(otherSide, new ResultSet(newResultSet)).resolved);
} }
}
@Override @Override
public void visit(GenericRefType genericRefType) { public void visit(PairTPHEqualTPH p) {
//ignorieren. Wird vom Resolver behandelt
}
} @Override
public void visit(RefType refType) {
@Override }
public void visit(SuperWildcardType superWildcardType) {
} @Override
public void visit(GenericRefType genericRefType) {
@Override }
public void visit(TypePlaceholder typePlaceholder) {
} @Override
public void visit(SuperWildcardType superWildcardType) {
@Override }
public void visit(ExtendsWildcardType extendsWildcardType) {
} @Override
public void visit(TypePlaceholder typePlaceholder) {
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
}
} }
@SuppressWarnings("rawtypes") @SuppressWarnings("rawtypes")
class RelatedTypeWalker implements ResultSetVisitor { class RelatedTypeWalker implements ResultSetVisitor {
final Set<GenericInsertPair> relatedTPHs = new HashSet<>(); final Set<GenericInsertPair> relatedTPHs = new HashSet<>();
private final TypePlaceholder toResolve; private final TypePlaceholder toResolve;
private final ResultSet resultSet; private final ResultSet resultSet;
/** /**
* Läuft über das resultSet und speichert alle TPHs, welche mit start in Verbindung stehen * Läuft über das resultSet und speichert alle TPHs, welche mit start in Verbindung stehen
* @param start - kann null sein, wenn der Walker für einen RefType benutzt wird *
* @param resultSet * @param start - kann null sein, wenn der Walker für einen RefType benutzt wird
*/ * @param resultSet
RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet){ */
this.toResolve = start; RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet) {
this.resultSet = resultSet; this.toResolve = start;
int resolved = 0; this.resultSet = resultSet;
do{ int resolved = 0;
resolved = relatedTPHs.size(); do {
for(ResultPair p : resultSet.results){ resolved = relatedTPHs.size();
p.accept(this); for (ResultPair p : resultSet.results) {
p.accept(this); p.accept(this);
} p.accept(this);
}while(resolved - relatedTPHs.size() > 0); }
} } while (resolved - relatedTPHs.size() > 0);
}
@Override @Override
public void visit(PairTPHsmallerTPH p) { public void visit(PairTPHsmallerTPH p) {
if(p.getRight().equals(toResolve)){ if (p.getRight().equals(toResolve)) {
relatedTPHs.addAll(new TPHResolver(p.right, resultSet).resolved); relatedTPHs.addAll(new TPHResolver(p.right, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.right, resultSet).relatedTPHs); //relatedTPHs.addAll(new RelatedTypeWalker(p.right, resultSet).relatedTPHs);
}
if(p.getLeft().equals(toResolve)){
relatedTPHs.addAll(new TPHResolver(p.left, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.left, resultSet).relatedTPHs);
}
} }
if (p.getLeft().equals(toResolve)) {
relatedTPHs.addAll(new TPHResolver(p.left, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.left, resultSet).relatedTPHs);
}
}
@Override @Override
public void visit(PairTPHequalRefTypeOrWildcardType p) { public void visit(PairTPHequalRefTypeOrWildcardType p) {
if(p.getLeft().equals(toResolve)){ if (p.getLeft().equals(toResolve)) {
p.getRight().accept(this); p.getRight().accept(this);
}
} }
}
@Override @Override
public void visit(PairTPHEqualTPH p) { public void visit(PairTPHEqualTPH p) {
//Kann ignoriert werden. Diese Fälle werden vom Resolver behandelt //Kann ignoriert werden. Diese Fälle werden vom Resolver behandelt
} }
/* /*
Die folgenden Funktionen fügen alle TPHs an die relatedTPHs an, denen sie begegnen: Die folgenden Funktionen fügen alle TPHs an die relatedTPHs an, denen sie begegnen:
Das wird verwendet, wenn alle relatedTPHs aus den Parametern eines RefTypes angefügt werden sollen Das wird verwendet, wenn alle relatedTPHs aus den Parametern eines RefTypes angefügt werden sollen
*/ */
@Override @Override
public void visit(RefType refType) { public void visit(RefType refType) {
for(RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()){ for (RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()) {
param.accept(this); param.accept(this);
}
} }
}
@Override @Override
public void visit(SuperWildcardType superWildcardType) { public void visit(SuperWildcardType superWildcardType) {
superWildcardType.getInnerType().accept(this); superWildcardType.getInnerType().accept(this);
} }
@Override @Override
public void visit(TypePlaceholder typePlaceholder) { public void visit(TypePlaceholder typePlaceholder) {
relatedTPHs.addAll(new TPHResolver(typePlaceholder, resultSet).resolved); relatedTPHs.addAll(new TPHResolver(typePlaceholder, resultSet).resolved);
} }
@Override @Override
public void visit(ExtendsWildcardType extendsWildcardType) { public void visit(ExtendsWildcardType extendsWildcardType) {
extendsWildcardType.getInnerType().accept(this); extendsWildcardType.getInnerType().accept(this);
} }
@Override @Override
public void visit(GenericRefType genericRefType) { public void visit(GenericRefType genericRefType) {
} }
} }

View File

@@ -1,6 +1,7 @@
//PL 2018-12-19: Merge chekcen //PL 2018-12-19: Merge chekcen
package de.dhbwstuttgart.typeinference.typeAlgo; package de.dhbwstuttgart.typeinference.typeAlgo;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;

View File

@@ -0,0 +1,62 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* An intermediate class for the recursive steps of the TypeUnifyTask:
* This allows to cancel parts of the recursion tree, instead of only the whole execution as before. But in
* order for that to work, all cancellable child tasks must be added when they are created
*
* @param <T>
*/
public abstract class CancellableTask<T> extends RecursiveTask<T> {
private final AtomicBoolean executionCancelled = new AtomicBoolean(false);
private final List<CancellableTask<?>> childTasks = new ArrayList<>();
private CancellableTask<?> parentTask = null;
/**
* Set the execution for this task and all its (recursive) children to be cancelled
*/
protected void cancelExecution() {
// is this branch already cancelled? Then do nothing
if (this.executionCancelled.get()) return;
executionCancelled.set(true);
this.cancelChildExecution();
}
public void cancelChildExecution() {
for (var childTask : childTasks) {
// no need to cancel a branch that is already finished
if (!childTask.isDone()) {
childTask.cancelExecution();
}
}
}
protected void cancelSiblingTasks() {
if (this.parentTask != null) {
boolean thisWasCancelledBefore = this.executionCancelled.get();
this.parentTask.cancelChildExecution();
this.executionCancelled.set(thisWasCancelledBefore);
}
}
public Boolean isExecutionCancelled() {
return executionCancelled.get();
}
public void addChildTask(CancellableTask<?> childTask) {
this.childTasks.add(childTask);
childTask.setParentTask(this);
}
private void setParentTask(CancellableTask<?> parentTask) {
this.parentTask = parentTask;
}
}

View File

@@ -0,0 +1,64 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.RecursiveTask;
public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
public static <E> Set<E> merge(List<Set<E>> list) {
if (list.isEmpty()) {
return new HashSet<>();
}
var task = new ConcurrentSetMergeTask<>(list, 0, list.size());
return task.compute();
}
private static final int LIST_THRESHOLD = 3;
private static final int ELEMENT_THRESHOLD = 1000;
private final List<Set<T>> list;
private final int start;
private final int end;
private ConcurrentSetMergeTask(List<Set<T>> list, int start, int end) {
this.list = list;
this.start = start;
this.end = end;
}
@Override
protected Set<T> compute() {
int size = end - start;
int totalElements = 0;
for (int i = start+1; i < end; i++) {
totalElements += list.get(i).size();
}
System.out.println("ConcurrentSetMerge? -> " + (size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD ? "true" : "false"));
// size will always be at least one
if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) {
Set<T> result = this.list.get(start);
for (int i = start+1; i < end; i++) {
result.addAll(list.get(i));
}
return result;
} else {
int mid = start + (size / 2);
ConcurrentSetMergeTask<T> leftTask = new ConcurrentSetMergeTask<>(list, start, mid);
ConcurrentSetMergeTask<T> rightTask = new ConcurrentSetMergeTask<>(list, mid, end);
leftTask.fork();
Set<T> rightResult = rightTask.compute();
Set<T> leftResult = leftTask.join();
// Merge results
leftResult.addAll(rightResult);
return leftResult;
}
}
}

View File

@@ -0,0 +1,84 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import java.util.ArrayList;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Calculate unique placeholder names
*/
public class PlaceholderRegistry implements ISerializableData {
private final Set<String> existingPlaceholders = ConcurrentHashMap.newKeySet();
private final AtomicInteger placeholderCount = new AtomicInteger();
public ArrayList<PlaceholderType> UnifyTypeFactory_PLACEHOLDERS = new ArrayList<>();
/**
* Add a placeholder into the list of existing ones, as soon as a new PlaceholderType is created
*
* @param placeholderName The placeholder to add
*/
public void addPlaceholder(String placeholderName) {
this.existingPlaceholders.add(placeholderName);
}
/**
* Generate a random placeholder name, that is unique to this context
*
* @return The generated name
*/
public String generateFreshPlaceholderName() {
String name;
do {
int pc = placeholderCount.incrementAndGet();
name = getUppercaseTokenFromInt(pc);
}
while (existingPlaceholders.contains(name));
this.addPlaceholder(name);
return name;
}
public PlaceholderRegistry deepClone() {
PlaceholderRegistry pr2 = new PlaceholderRegistry();
this.existingPlaceholders.forEach(pr2::addPlaceholder);
pr2.UnifyTypeFactory_PLACEHOLDERS.addAll(this.UnifyTypeFactory_PLACEHOLDERS);
pr2.placeholderCount.set(this.placeholderCount.get());
return pr2;
}
/**
* Generate a token that consists of uppercase letters and contains the given prefix and suffix from the value i
*
* @param i The value that will be represented as a token
* @return The generated token
*/
private String getUppercaseTokenFromInt(int i) {
StringBuilder sb = new StringBuilder();
while (i >= 0) {
sb.append((char)(i % 26 + 65));
i = i / 26 - 1;
}
//sb.append(suffix);
return sb.toString();
}
@Override
public String toString() {
return this.existingPlaceholders.toString();
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("ph", new SerialValue<>(new ArrayList<>(this.existingPlaceholders)));
serialized.put("factoryPh", SerialList.fromMapped(this.UnifyTypeFactory_PLACEHOLDERS, t -> t.toSerial(keyStorage)));
return serialized;
}
}

View File

@@ -38,14 +38,17 @@ import org.apache.commons.io.output.NullOutputStream;
public class RuleSet implements IRuleSet{ public class RuleSet implements IRuleSet{
Writer logFile; Writer logFile;
final PlaceholderRegistry placeholderRegistry;
public RuleSet() { public RuleSet(PlaceholderRegistry placeholderRegistry) {
super(); super();
logFile = new OutputStreamWriter(new NullOutputStream()); logFile = OutputStreamWriter.nullWriter();
this.placeholderRegistry = placeholderRegistry;
} }
RuleSet(Writer logFile) { RuleSet(Writer logFile, PlaceholderRegistry placeholderRegistry) {
this.logFile = logFile; this.logFile = logFile;
this.placeholderRegistry = placeholderRegistry;
} }
@Override @Override
@@ -864,7 +867,7 @@ public class RuleSet implements IRuleSet{
try { try {
logFile.write("FUNgreater: " + pair + "\n"); logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNred: " + result + "\n"); logFile.write("FUNred: " + result + "\n");
logFile.flush(); // logFile.flush();
} }
catch (IOException e) { catch (IOException e) {
System.out.println("logFile-Error"); System.out.println("logFile-Error");
@@ -939,10 +942,10 @@ public class RuleSet implements IRuleSet{
UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()]; UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) { for(int i = 0; i < freshPlaceholders.length-1; i++) {
freshPlaceholders[i] = PlaceholderType.freshPlaceholder(); freshPlaceholders[i] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance); ((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
} }
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(); freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance); ((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), freshPlaceholders[funNLhsType.getTypeParams().size()-1], PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair())); result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), freshPlaceholders[funNLhsType.getTypeParams().size()-1], PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
@@ -960,7 +963,7 @@ public class RuleSet implements IRuleSet{
try { try {
logFile.write("FUNgreater: " + pair + "\n"); logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNgreater: " + result + "\n"); logFile.write("FUNgreater: " + result + "\n");
logFile.flush(); // logFile.flush();
} }
catch (IOException e) { catch (IOException e) {
System.out.println("lofFile-Error"); System.out.println("lofFile-Error");
@@ -988,10 +991,10 @@ public class RuleSet implements IRuleSet{
UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()]; UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) { for(int i = 0; i < freshPlaceholders.length-1; i++) {
freshPlaceholders[i] = PlaceholderType.freshPlaceholder(); freshPlaceholders[i] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance); ((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
} }
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(); freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance); ((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
result.add(new UnifyPair(freshPlaceholders[funNRhsType.getTypeParams().size()-1], funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair())); result.add(new UnifyPair(freshPlaceholders[funNRhsType.getTypeParams().size()-1], funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
@@ -1010,7 +1013,7 @@ public class RuleSet implements IRuleSet{
try { try {
logFile.write("FUNgreater: " + pair + "\n"); logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNsmaller: " + result + "\n"); logFile.write("FUNsmaller: " + result + "\n");
logFile.flush(); // logFile.flush();
} }
catch (IOException e) { catch (IOException e) {
System.out.println("lofFile-Error"); System.out.println("lofFile-Error");
@@ -1051,7 +1054,7 @@ public class RuleSet implements IRuleSet{
if(isGen) if(isGen)
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair())); result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
else { else {
UnifyType freshTph = PlaceholderType.freshPlaceholder(); UnifyType freshTph = PlaceholderType.freshPlaceholder(placeholderRegistry);
result.add(new UnifyPair(rhsType, new ExtendsType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair())); result.add(new UnifyPair(rhsType, new ExtendsType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.add(new UnifyPair(extendedType, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair())); result.add(new UnifyPair(extendedType, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
} }
@@ -1079,7 +1082,7 @@ public class RuleSet implements IRuleSet{
if(isGen) if(isGen)
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair())); result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
else { else {
UnifyType freshTph = PlaceholderType.freshPlaceholder(); UnifyType freshTph = PlaceholderType.freshPlaceholder(placeholderRegistry);
result.add(new UnifyPair(rhsType, new SuperType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair())); result.add(new UnifyPair(rhsType, new SuperType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
Set<UnifyType> fBounded = pair.getfBounded(); Set<UnifyType> fBounded = pair.getfBounded();
fBounded.add(lhsType); fBounded.add(lhsType);

View File

@@ -1,41 +1,41 @@
package de.dhbwstuttgart.typeinference.unify; package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter; import de.dhbwstuttgart.util.Logger;
import java.io.IOException; import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify { public class TypeUnify {
private TypeUnify() {}
private static <T> T joinFuture(CompletableFuture<T> future) {
try {
return future.get();
}
catch (InterruptedException | ExecutionException exception) {
throw new RuntimeException(exception);
}
}
/** /**
* unify parallel ohne result modell * unify parallel ohne result modell
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @return
*/ */
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) { public static Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks); ForkJoinPool pool = TypeUnify.createThreadPool();
ForkJoinPool pool = new ForkJoinPool(); UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
pool.invoke(unifyTask); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
Set<Set<UnifyPair>> res = unifyTask.join(); Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
try { try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n"); unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
logFile.flush(); // logFile.flush();
} }
catch (IOException e) { catch (IOException e) {
System.err.println("no log-File"); System.err.println("no log-File");
@@ -45,46 +45,32 @@ public class TypeUnify {
/** /**
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind * unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @param ret
* @return
*/ */
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) { public static UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks); ForkJoinPool pool = TypeUnify.createThreadPool();
ForkJoinPool pool = new ForkJoinPool(); UnifyContext context = unifyContext.newWithExecutor(pool);
pool.invoke(unifyTask); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
return ret; unifyTask.compute();
return unifyContext.resultModel();
} }
/** /**
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind * unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @param ret
* @return
*/ */
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) { public static Set<Set<UnifyPair>> unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks); ForkJoinPool pool = TypeUnify.createThreadPool();
ForkJoinPool pool = new ForkJoinPool(); UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
pool.invoke(unifyTask); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
Set<Set<UnifyPair>> res = unifyTask.join(); var result = joinFuture(unifyTask.compute());
try { try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n"); unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
logFile.flush(); // logFile.flush();
} }
catch (IOException e) { catch (IOException e) {
System.err.println("no log-File"); System.err.println("no log-File");
} }
return ret; return result;
} }
/* /*
@@ -97,20 +83,13 @@ public class TypeUnify {
/** /**
* unify sequentiell mit oderconstraints * unify sequentiell mit oderconstraints
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @return
*/ */
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) { public static Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(false), 0);
Set<Set<UnifyPair>> res = unifyTask.compute(); Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
try { try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n"); unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
logFile.flush(); // logFile.flush();
} }
catch (IOException e) { catch (IOException e) {
System.err.println("no log-File"); System.err.println("no log-File");
@@ -118,4 +97,14 @@ public class TypeUnify {
return res; return res;
} }
private static ForkJoinPool createThreadPool() {
Logger.print("Available processors: " + Runtime.getRuntime().availableProcessors());
return new ForkJoinPool(
Runtime.getRuntime().availableProcessors(),
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
null,
false
);
}
} }

View File

@@ -13,54 +13,52 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.concurrent.CompletableFuture;
public class TypeUnify2Task extends TypeUnifyTask { public class TypeUnify2Task extends TypeUnifyTask {
Set<Set<UnifyPair>> setToFlatten;
Set<UnifyPair> methodSignatureConstraintUebergabe;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) { Set<Set<UnifyPair>> setToFlatten;
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks); Set<UnifyPair> methodSignatureConstraintUebergabe;
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement; public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, UnifyContext context, int rekTiefe, Set<UnifyPair> methodSignatureConstraintUebergabe) {
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe; super(eq, oderConstraints, fc, context, rekTiefe);
} this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
Set<UnifyPair> getNextSetElement() { this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
return nextSetElement; }
}
public Set<UnifyPair> getNextSetElement() {
@Override return nextSetElement;
protected Set<Set<UnifyPair>> compute() { }
if (one) {
System.out.println("two"); @Override
} public CompletableFuture<Set<Set<UnifyPair>>> compute() {
one = true; if (one) {
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe); System.out.println("two");
}
one = true;
CompletableFuture<Set<Set<UnifyPair>>> res =
unify2(setToFlatten, eq, oderConstraintsField, fc, context.parallel(), rekTiefeField, methodSignatureConstraintUebergabe);
/*if (isUndefinedPairSetSet(res)) { /*if (isUndefinedPairSetSet(res)) {
return new HashSet<>(); } return new HashSet<>(); }
else else
*/ */
//writeLog("xxx"); //writeLog("xxx");
//noOfThread--; //noOfThread--;
synchronized (usedTasks) { if (this.myIsCancelled()) {
if (this.myIsCancelled()) { return CompletableFuture.completedFuture(new HashSet<>());
return new HashSet<>(); } else {
} return res;
else { }
return res; }
}
}
}
public void closeLogFile() {
try { public void closeLogFile() {
logFile.close();
} try {
catch (IOException ioE) { context.logFile().close();
System.err.println("no log-File" + thNo); } catch (IOException ioE) {
} System.err.println("no log-File");
}
}
}
} }

View File

@@ -0,0 +1,188 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* A collection of capsuled (and thus static) functions to split up large algorithms in TypeUnifyTask
*/
public class TypeUnifyTaskHelper {
/**
* Filter all topLevelSets for those with a single element that contain only one pair:
* a <. theta,
* theta <. a or
* a =. theta
*/
public static Set<Set<UnifyPair>> getSingleElementSets(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets) {
return topLevelSets.stream()
.filter(x -> x.size() == 1)
.map(y -> y.stream().findFirst().get()).collect(Collectors.toSet());
}
/**
* Varianzbestimmung Anfang
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
* Varianz = 1 => Argumentvariable
* Varianz = -1 => Rückgabevariable
* Varianz = 0 => unklar
* Varianz = 2 => Operatoren oderConstraints
*/
public static int calculateVariance(List<Set<UnifyPair>> nextSetasList) {
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
.reduce((a, b) -> {
if (a.intValue() == b.intValue()) return a;
else return 0;
})) //2 kommt insbesondere bei Oder-Constraints vor
.filter(Optional::isPresent)
.map(Optional::get)
.findAny();
return xi.orElse(0);
}
/**
*
*/
public static int calculateOderConstraintVariance(List<Set<UnifyPair>> nextSetAsList) {
Optional<Integer> optVariance =
nextSetAsList
.getFirst()
.stream()
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
!(x.getRhsType() instanceof PlaceholderType) &&
x.getPairOp() == PairOperator.EQUALSDOT)
.map(x ->
((PlaceholderType) x.getGroundBasePair().getLhsType()).getVariance())
.reduce((n, m) -> (n != 0) ? n : m);
//Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
//da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
return optVariance.orElse(2);
}
/**
* Find the first occurrence (if any) of a UnifyPair with operator EQUALSDOT while having
* one side equal to its base pair counterpart
*/
public static Optional<UnifyPair> findEqualityConstrainedUnifyPair(Set<UnifyPair> nextSetElement) {
return nextSetElement.stream().filter(x ->
x.getPairOp()
.equals(PairOperator.EQUALSDOT))
.filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType()
.equals(x.getBasePair().getLhsType()) ||
x.getLhsType()
.equals(x.getBasePair().getRhsType())
).findFirst();
}
/**
* Find all unifyPairs, that associate the identified type variable of origPair with any concrete type. That means:
* If "a = type" is in origPair, then we get all UnifyPairs that contain either "a < typeA" or "typeB < a"
*/
public static Set<UnifyPair> findConstraintsWithSameTVAssociation(UnifyPair origPair, Set<Set<UnifyPair>> singleElementSets) {
UnifyType tyVar = origPair.getLhsType();
if (!(tyVar instanceof PlaceholderType)) {
tyVar = origPair.getRhsType();
}
UnifyType tyVarEF = tyVar;
return singleElementSets.stream()
.map(xx ->
xx.iterator().next())
.filter(x ->
(x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
||
(x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType))
)
.collect(Collectors.toCollection(HashSet::new));
}
/**
*
*/
public static boolean doesFirstNextSetHasSameBase(List<Set<UnifyPair>> nextSetAsList) {
if (nextSetAsList.isEmpty()) {
return false;
}
UnifyPair firstBasePair = null;
for (var unifyPair : nextSetAsList.getFirst().stream().toList()) {
var basePair = unifyPair.getBasePair();
// if any base pair is null, there is NOT always the same base!
if (basePair == null) {
return false;
}
if (firstBasePair == null) {
firstBasePair = basePair;
}
else if (!basePair.equals(firstBasePair)) {
return false;
}
}
return true;
}
/**
* Extracts data from every element in the nested lists of results. What data depends on the given
* extractor function
*/
public static Set<UnifyPair> collectFromThreadResult (
Set<Set<UnifyPair>> currentThreadResult,
Function<UnifyPair, Set<UnifyPair>> extractor
) {
return currentThreadResult.stream()
.map(b ->
b.stream()
.map(extractor)
.reduce((y, z) -> {
y.addAll(z);
return y;
})
.orElse(new HashSet<>()))
.reduce((y, z) -> {
y.addAll(z);
return y;
})
.orElse(new HashSet<>());
}
/**
* Extract a list of PlaceholderTypes from a set of pairs, such that each resulting element:
* - Is the LHS of a pair
* - Is a PlaceholderType
* - has a basePair Side that is a PlaceholderType with the same name
*/
public static List<PlaceholderType> extractMatchingPlaceholderTypes(Set<UnifyPair> pairs) {
return pairs.stream()
.filter(x -> {
UnifyType lhs = x.getLhsType();
UnifyType baseLhs = x.getBasePair().getLhsType();
UnifyType baseRhs = x.getBasePair().getRhsType();
return (lhs instanceof PlaceholderType) &&
((baseLhs instanceof PlaceholderType && lhs.getName().equals(baseLhs.getName())) ||
(baseRhs instanceof PlaceholderType && lhs.getName().equals(baseRhs.getName())));
})
.map(x -> (PlaceholderType) x.getLhsType())
.collect(Collectors.toCollection(ArrayList::new));
}
}

View File

@@ -0,0 +1,70 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.Writer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.atomic.AtomicInteger;
public record UnifyContext(
// main log file of a unification
Writer logFile,
// if logs should be made
Boolean log,
// if the unify algorithm should run in parallel
Boolean parallel,
// the model for storing calculated results
UnifyResultModel resultModel,
// the executor used for thread management in parallel execution
ExecutorService executor,
// a generator for new placeholders in this unify context
PlaceholderRegistry placeholderRegistry,
// a control structure to cancel the unification early
UnifyTaskModel usedTasks
) {
public UnifyContext(
Writer logFile,
Boolean log,
Boolean parallel,
UnifyResultModel resultModel,
UnifyTaskModel usedTasks,
ExecutorService executor,
PlaceholderRegistry placeholderRegistry
) {
this(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext(
Writer logFile,
Boolean log,
Boolean parallel,
UnifyResultModel resultModel,
UnifyTaskModel usedTasks,
PlaceholderRegistry placeholderRegistry
) {
this(logFile, log, parallel, resultModel, usedTasks, ForkJoinPool.commonPool(), placeholderRegistry);
}
/*
* Shortcuts for creating a similar context with some properties changed. This combined with the final properties
* causes the UnifyContext to be essentially handled as a
*/
public UnifyContext newWithLogFile(Writer logFile) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithParallel(boolean parallel) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithExecutor(ExecutorService executor) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithResultModel(UnifyResultModel resultModel) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
}

View File

@@ -36,19 +36,19 @@ public class UnifyResultModel {
listeners.remove(listenerToRemove); listeners.remove(listenerToRemove);
} }
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) { public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet, UnifyContext context) {
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> { Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> { Optional<Set<UnifyPair>> res = new RuleSet(context.placeholderRegistry()).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT); if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
return y; //alle Paare a <.? b erden durch a =. b ersetzt return y; //alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new))); }).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc); return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), fc);
} }
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new)); }).collect(Collectors.toCollection(HashSet::new));
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs -> List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons)))) new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons), context.placeholderRegistry())))
.collect(Collectors.toList()); .collect(Collectors.toList());
UnifyResultEvent evt = new UnifyResultEvent(newResult); UnifyResultEvent evt = new UnifyResultEvent(newResult);

View File

@@ -0,0 +1,228 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
public class Variance0Case extends VarianceCase {
protected final int variance = 0;
protected Variance0Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
} else {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
}
nextSetAsList.remove(a);
} else if (this.isOderConstraint) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
nextSetAsList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} else {
a = nextSetAsList.removeFirst();
}
Set<UnifyPair> finalA = a;
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
} else {
nextSetasListRest = typeUnifyTask.oup.minElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
);
}
nextSetAsList.remove(a);
} else if (this.isOderConstraint) {
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
} else {
for (int i = 0; i < Math.min(nextSetAsList.size(), 5); i++) {
nextSetasListRest.add(nextSetAsList.removeFirst());
}
}
}
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
new HashSet<>(), new HashSet<>()
));
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(forkOrig);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkOrigFuture,
(prevResults, currentThreadResult) -> {
forkOrig.writeLog("final Orig 0");
forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, prevResults.getSecond());
});
//forks.add(forkOrig);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
writeLog("0 RM" + nSaL.toString());
if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
} else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
writeLog("fork_res: " + fork_res.toString());
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.getSecond().add(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final 0");
fork.closeLogFile();
return prevResults;
}
);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
}
return resultValues;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
writeLog("RES var=0 ADD:" + result.toString() + " " + currentThreadResult.toString());
result.addAll(currentThreadResult);
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
if (!this.isOderConstraint) {
return true;
} else {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
final List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
final List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
for (Set<UnifyPair> aPar : aParDef) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
smallerSetasList.clear();
smallerSetasList.addAll(typeUnifyTask.oup.smallerThan(aPar, nextSetAsList));
notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
notErased.clear();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -0,0 +1,214 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
public class Variance1Case extends VarianceCase {
protected final int variance = 1;
protected Variance1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
writeLog("Max: a in " + variance + " " + a);
nextSetAsList.remove(a);
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
//Alle maximale Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
Set<UnifyPair> finalA = a;
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
}
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
new HashSet<>(), new HashSet<>()
));
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
typeUnifyTask.addChildTask(forkOrig);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkOrigFuture,
(prevResults, currentThreadResult) -> {
forkOrig.writeLog("final Orig 1");
forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, prevResults.getSecond());
});
//forks.add(forkOrig);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
writeLog("1 RM" + nSaL.toString());
if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
} else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
writeLog("fork_res: " + fork_res.toString());
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.getSecond().add(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final 1");
fork.closeLogFile();
return prevResults;
}
);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
}
return resultValues;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == -1) {
writeLog("Geloescht result: " + result);
result.clear();
result.addAll(currentThreadResult);
}
else if (resOfCompare == 0) {
result.addAll(currentThreadResult);
}
else if (resOfCompare == 1) {
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList);
writeLog("smallerSetasList: " + smallerSetasList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
.collect(Collectors.toCollection(ArrayList::new));
writeLog("notInherited: " + notInherited + "\n");
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> {
notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList));
});
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
writeLog("notErased: " + notErased + "\n");
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -0,0 +1,137 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
public class Variance2Case extends VarianceCase {
protected final int variance = 2;
protected Variance2Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = nextSetAsList.removeFirst();
//Fuer alle Elemente wird parallele Berechnung angestossen.
nextSetasListRest = new ArrayList<>(nextSetAsList);
}
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValuesFuture;
writeLog("var2einstieg");
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(forkOrig);
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValuesFuture = forkOrigFuture.thenApply((currentThreadResult) -> {
forkOrig.writeLog("final Orig 2");
forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, new HashSet<>());
});
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
Set<UnifyPair> methodSignatureConstraintForParallel = new HashSet<>(methodSignatureConstraint);
Set<UnifyPair> nSaL = a;
while (!nextSetasListRest.isEmpty()) {
methodSignatureConstraintForParallel.removeAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL); //PL einkommentiert 20-02-03
methodSignatureConstraintForParallel.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
typeUnifyTask.addChildTask(fork);
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValuesFuture = resultValuesFuture.thenCombine(forkFuture, (resultValues, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
resultValues.getSecond().add(fork_res);
fork.writeLog("final 2");
fork.closeLogFile();
return resultValues;
});
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
}
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
return resultValuesFuture;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
// Nothing
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// Nothing
return false;
}
}

View File

@@ -0,0 +1,112 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Logger;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
public abstract class VarianceCase {
public static VarianceCase createFromVariance(int variance, boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
return switch (variance) {
case 0 -> new Variance0Case(isOderConstraint, typeUnifyTask, context);
case 1 -> new Variance1Case(isOderConstraint, typeUnifyTask, context);
case -1 -> new VarianceM1Case(isOderConstraint, typeUnifyTask, context);
case 2 -> new Variance2Case(isOderConstraint, typeUnifyTask, context);
default -> throw new RuntimeException("Invalid variance: " + variance);
};
}
protected final boolean isOderConstraint;
protected final TypeUnifyTask typeUnifyTask;
protected final UnifyContext context;
/**
* Aktueller Fall
*/
public Set<UnifyPair> a;
/**
* Liste der Faelle für die parallele Verarbeitung
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
* Deshalb wird ihre Berechnung parallel angestossen.
*/
public List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
/**
* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
* In der Regel ist dies genau ein Element
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
* gefuehrt hat.
*/
public List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
protected VarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
this.isOderConstraint = isOderConstraint;
this.typeUnifyTask = typeUnifyTask;
this.context = context;
}
/**
* Selects values for the next iteration in the run method:
* - a : The element to ???
* - nextSetAsList: The list of cases that have no relation to the selected a and will have to be worked on
* - nextSetasListOderConstraints: The list of cases of which the receiver contains "? extends", typically one element
*/
public abstract void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
);
/**
*
*/
public abstract CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
);
/**
*
*/
public abstract void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
);
/**
*
* @return If the current iteration should be broken out of
*/
public abstract boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
);
protected void writeLog(String s) {
typeUnifyTask.writeLog(s);
}
}

View File

@@ -0,0 +1,231 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
public class VarianceM1Case extends VarianceCase {
protected final int variance = -1;
protected VarianceM1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
writeLog("Min: a in " + variance + " " + a);
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
nextSetAsList.remove(a);
//Alle minimalen Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
Set<UnifyPair> finalA = a;
nextSetasListRest = typeUnifyTask.oup.minElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
);
}
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
new HashSet<>(), new HashSet<>()
));
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
typeUnifyTask.addChildTask(forkOrig);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkOrigFuture,
(prevResults, currentThreadResult) -> {
forkOrig.writeLog("final Orig -1");
forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, prevResults.getSecond());
});
//forks.add(forkOrig);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString());
if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
} else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
writeLog("fork_res: " + fork_res.toString());
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.getSecond().add(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final -1");
fork.closeLogFile();
return prevResults;
}
);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
}
return resultValues;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == 1) {
writeLog("Geloescht result: " + result);
result.clear();
result.addAll(currentThreadResult);
} else if (resOfCompare == 0) {
result.addAll(currentThreadResult);
} else if (resOfCompare == -1) {
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
writeLog("Removed: " + nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> greaterSetasList = typeUnifyTask.oup.greaterThan(a_new, nextSetAsList);
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
if (!((Constraint<UnifyPair>) a_new).isInherited()) {
greaterSetasList.add(a_new);
}
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
//Wenn x nicht vererbt ist, beginnt beim naechstgroesseren Element die naechste Ueberladung
notInherited.forEach(x -> {
notErased.addAll(typeUnifyTask.oup.greaterEqThan(x, greaterSetasList));
});
//das kleineste Element ist das Element von dem a_new geerbt hat
//muss deshalb geloescht werden
Iterator<Set<UnifyPair>> notErasedIt = notErased.iterator();
if (notErasedIt.hasNext()) {
Set<UnifyPair> min = typeUnifyTask.oup.min(notErasedIt);
notErased.remove(min);
notErased.remove(((Constraint<UnifyPair>) min).getExtendConstraint());
}
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = typeUnifyTask.oup.greaterEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -1,6 +1,7 @@
package de.dhbwstuttgart.typeinference.unify.interfaces; package de.dhbwstuttgart.typeinference.unify.interfaces;
import java.util.List; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
@@ -18,9 +19,8 @@ import org.antlr.v4.runtime.Token;
* *
* @author Florian Steurer * @author Florian Steurer
*/ */
public interface IFiniteClosure { public interface IFiniteClosure extends ISerializableData {
public void setLogTrue();
/** /**
* Returns all types of the finite closure that are subtypes of the argument. * Returns all types of the finite closure that are subtypes of the argument.
* @return The set of subtypes of the argument. * @return The set of subtypes of the argument.
@@ -74,5 +74,5 @@ public interface IFiniteClosure {
public Set<UnifyType> getChildren(UnifyType t); public Set<UnifyType> getChildren(UnifyType t);
public Set<UnifyType> getAllTypesByName(String typeName); public Set<UnifyType> getAllTypesByName(String typeName);
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop); public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop, UnifyContext context);
} }

View File

@@ -1,8 +1,9 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import java.util.Collection; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import java.util.HashMap; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set; import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -11,7 +12,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
/** /**
* An extends wildcard type "? extends T". * An extends wildcard type "? extends T".
*/ */
public final class ExtendsType extends WildcardType { public final class ExtendsType extends WildcardType implements ISerializableData {
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) { public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht); return visitor.visit(this, ht);
@@ -92,5 +93,21 @@ public final class ExtendsType extends WildcardType {
return "? extends " + wildcardedType; return "? extends " + wildcardedType;
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("wildcardedType", this.wildcardedType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static ExtendsType fromSerial(SerialMap data, UnifyContext context) {
return new ExtendsType(
UnifyType.fromSerial(data.getMap("wildcardedType"), context)
);
}
} }

View File

@@ -1,16 +1,20 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import java.io.FileWriter; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;
import java.lang.reflect.Modifier;
import java.sql.Array;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Hashtable; import java.util.Hashtable;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.function.BiFunction; import java.util.function.BiFunction;
@@ -18,40 +22,28 @@ import java.util.function.BinaryOperator;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import com.google.common.collect.Ordering;
//PL 18-02-05/18-04-05 Unifier durch Matcher ersetzt //PL 18-02-05/18-04-05 Unifier durch Matcher ersetzt
//muss greater noch ersetzt werden ja erledigt 18--04-05 //muss greater noch ersetzt werden ja erledigt 18--04-05
import de.dhbwstuttgart.core.JavaTXCompiler; import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.parser.SourceLoc; import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.MartelliMontanariUnify;
import de.dhbwstuttgart.typeinference.unify.Match; import de.dhbwstuttgart.typeinference.unify.Match;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask; import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.util.Pair; import de.dhbwstuttgart.util.Pair;
import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.Token;
import org.apache.commons.io.output.NullWriter;
/** /**
* The finite closure for the type unification * The finite closure for the type unification
* @author Florian Steurer * @author Florian Steurer
*/ */
public class FiniteClosure //extends Ordering<UnifyType> //entfernt PL 2018-12-11 public class FiniteClosure implements IFiniteClosure, ISerializableData {
implements IFiniteClosure {
final JavaTXCompiler compiler; final JavaTXCompiler compiler;
final PlaceholderRegistry placeholderRegistry;
Writer logFile; Writer logFile;
static Boolean log = false;
public void setLogTrue() {
log = true;
}
/** /**
* A map that maps every type to the node in the inheritance graph that contains that type. * A map that maps every type to the node in the inheritance graph that contains that type.
*/ */
@@ -81,8 +73,9 @@ implements IFiniteClosure {
/** /**
* Creates a new instance using the inheritance tree defined in the pairs. * Creates a new instance using the inheritance tree defined in the pairs.
*/ */
public FiniteClosure(Set<UnifyPair> pairs, Writer logFile, JavaTXCompiler compiler) { public FiniteClosure(Set<UnifyPair> pairs, Writer logFile, JavaTXCompiler compiler, PlaceholderRegistry placeholderRegistry) {
this.compiler = compiler; this.compiler = compiler;
this.placeholderRegistry = placeholderRegistry;
this.logFile = logFile; this.logFile = logFile;
this.pairs = new HashSet<>(pairs); this.pairs = new HashSet<>(pairs);
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>(); inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
@@ -141,13 +134,13 @@ implements IFiniteClosure {
} }
} }
public FiniteClosure(Set<UnifyPair> constraints, Writer writer) { public FiniteClosure(Set<UnifyPair> constraints, Writer writer, PlaceholderRegistry placeholderRegistry) {
this(constraints, writer, null); this(constraints, writer, null, placeholderRegistry);
} }
void testSmaller() { void testSmaller() {
UnifyType tq1, tq2, tq3; UnifyType tq1, tq2, tq3;
tq1 = new ExtendsType(PlaceholderType.freshPlaceholder()); tq1 = new ExtendsType(PlaceholderType.freshPlaceholder(placeholderRegistry));
List<UnifyType> l1 = new ArrayList<>(); List<UnifyType> l1 = new ArrayList<>();
List<UnifyType> l2 = new ArrayList<>(); List<UnifyType> l2 = new ArrayList<>();
l1.add(tq1); l1.add(tq1);
@@ -207,7 +200,7 @@ implements IFiniteClosure {
result.add(new Pair<>(t, fBounded)); result.add(new Pair<>(t, fBounded));
} }
catch (StackOverflowError e) { catch (StackOverflowError e) {
System.out.println(""); // System.out.println("");
} }
// if C<...> <* C<...> then ... (third case in definition of <*) // if C<...> <* C<...> then ... (third case in definition of <*)
@@ -698,10 +691,10 @@ implements IFiniteClosure {
} }
*/ */
public int compare (UnifyType left, UnifyType right, PairOperator pairop) { public int compare (UnifyType left, UnifyType right, PairOperator pairop, UnifyContext context) {
try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {} try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {}
if (left.getName().equals("Matrix") || right.getName().equals("Matrix")) // if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
System.out.println(""); // System.out.println("");
/* /*
pairop = PairOperator.SMALLERDOTWC; pairop = PairOperator.SMALLERDOTWC;
List<UnifyType> al = new ArrayList<>(); List<UnifyType> al = new ArrayList<>();
@@ -752,7 +745,7 @@ implements IFiniteClosure {
} }
} }
UnifyPair up = new UnifyPair(left, right, pairop); UnifyPair up = new UnifyPair(left, right, pairop);
TypeUnifyTask unifyTask = new TypeUnifyTask(); TypeUnifyTask unifyTask = new TypeUnifyTask(context);
HashSet<UnifyPair> hs = new HashSet<>(); HashSet<UnifyPair> hs = new HashSet<>();
hs.add(up); hs.add(up);
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this); Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this);
@@ -760,7 +753,7 @@ implements IFiniteClosure {
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList")) //if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
{try { {try {
logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString()); logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
logFile.flush(); // logFile.flush();
} }
catch (IOException e) { catch (IOException e) {
System.err.println("no LogFile");}} System.err.println("no LogFile");}}
@@ -774,7 +767,7 @@ implements IFiniteClosure {
long smallerLen = smallerRes.stream().filter(delFun).count(); long smallerLen = smallerRes.stream().filter(delFun).count();
try { try {
logFile.write("\nsmallerLen: " + smallerLen +"\n"); logFile.write("\nsmallerLen: " + smallerLen +"\n");
logFile.flush(); // logFile.flush();
} }
catch (IOException e) { catch (IOException e) {
System.err.println("no LogFile");} System.err.println("no LogFile");}
@@ -789,7 +782,7 @@ implements IFiniteClosure {
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList")) //if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
{try { {try {
logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString()); logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
logFile.flush(); // logFile.flush();
} }
catch (IOException e) { catch (IOException e) {
System.err.println("no LogFile");}} System.err.println("no LogFile");}}
@@ -803,4 +796,18 @@ implements IFiniteClosure {
} }
} }
} }
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("pairs", SerialList.fromMapped(this.pairs, unifyPair -> unifyPair.toSerial(keyStorage)));
return serialized;
}
public static FiniteClosure fromSerial(SerialMap data, UnifyContext context, KeyStorage keyStorage) {
var pairList = data.getList("pairs").assertListOfUUIDs();
Set<UnifyPair> pairs = pairList.stream()
.map(pairData -> UnifyPair.fromSerial(pairData, context, keyStorage)).collect(Collectors.toSet());
return new FiniteClosure(pairs, context.logFile(), context.placeholderRegistry());
}
} }

View File

@@ -1,14 +1,17 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.parser.scope.JavaClassName; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import java.lang.reflect.Modifier; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
public class FunInterfaceType extends ReferenceType { public class FunInterfaceType extends ReferenceType implements ISerializableData {
final List<UnifyType> intfArgTypes; final List<UnifyType> intfArgTypes;
final UnifyType intfReturnType; final UnifyType intfReturnType;
final List<String> generics; final List<String> generics;
@@ -46,4 +49,29 @@ public class FunInterfaceType extends ReferenceType {
return args; return args;
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
var serializedWrapper = super.toSerial(keyStorage);
SerialMap serialized = serializedWrapper.getMap("object");
serialized.put("intfArgTypes", SerialList.fromMapped(intfArgTypes, u -> u.toSerial(keyStorage)));
serialized.put("intfReturnType", intfReturnType.toSerial(keyStorage));
serialized.put("generics", SerialList.fromMapped(generics, SerialValue::new));
return serializedWrapper;
}
public static FunInterfaceType fromSerial(SerialMap data, UnifyContext context) {
var name = data.getValue("name").getOf(String.class);
var params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
var intfArgTypes = data.getList("intfArgTypes").assertListOfMaps().stream().map(
argTypeData -> UnifyType.fromSerial(argTypeData, context)).toList();
var intfReturnType = UnifyType.fromSerial(data.getMap("intfReturnType"), context);
var generics = data.getList("generics").assertListOfValues().stream().map(
generic -> generic.getOf(String.class)).toList();
return new FunInterfaceType(name, new TypeParams(params), intfArgTypes, intfReturnType, generics);
}
} }

View File

@@ -1,8 +1,13 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.List;
import java.util.Set; import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -12,7 +17,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
* A real function type in java. * A real function type in java.
* @author Florian Steurer * @author Florian Steurer
*/ */
public class FunNType extends UnifyType { public class FunNType extends UnifyType implements ISerializableData {
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) { public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht); return visitor.visit(this, ht);
@@ -99,5 +104,22 @@ public class FunNType extends UnifyType {
return other.getTypeParams().equals(typeParams); return other.getTypeParams().equals(typeParams);
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static FunNType fromSerial(SerialMap data, UnifyContext context) {
List<UnifyType> params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
return new FunNType(new TypeParams(params));
}
} }

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
@@ -26,9 +27,11 @@ import de.dhbwstuttgart.util.Pair;
public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> { public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
protected IFiniteClosure fc; protected IFiniteClosure fc;
protected UnifyContext context;
public OrderingUnifyPair(IFiniteClosure fc) {
public OrderingUnifyPair(IFiniteClosure fc, UnifyContext context) {
this.fc = fc; this.fc = fc;
this.context = context;
} }
/* /*
@@ -39,15 +42,15 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
try { try {
//if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht //if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht
if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) { if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC); return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC, context);
} }
else { else {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT); return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT, context);
}} }}
catch (ClassCastException e) { catch (ClassCastException e) {
try { try {
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n"); ((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
((FiniteClosure)fc).logFile.flush(); // ((FiniteClosure)fc).logFile.flush();
} }
catch (IOException ie) { catch (IOException ie) {
} }
@@ -79,18 +82,18 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector")) && (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType))) && (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{ {
System.out.println(""); // System.out.println("");
} }
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object"))) if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object")))) ||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
{ {
System.out.println(""); // System.out.println("");
} }
} }
else { else {
up = new UnifyPair(left, right, PairOperator.SMALLERDOT); up = new UnifyPair(left, right, PairOperator.SMALLERDOT);
} }
TypeUnifyTask unifyTask = new TypeUnifyTask(); TypeUnifyTask unifyTask = new TypeUnifyTask(context);
HashSet<UnifyPair> hs = new HashSet<>(); HashSet<UnifyPair> hs = new HashSet<>();
hs.add(up); hs.add(up);
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, fc); Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, fc);
@@ -106,11 +109,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector")) && (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType))) && (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{ {
System.out.println(""); // System.out.println("");
} }
if (right instanceof SuperType) if (right instanceof SuperType)
{ {
System.out.println(""); // System.out.println("");
} }
} }
else { else {
@@ -411,13 +414,13 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
if (leftlewc.iterator().next().getLhsType() instanceof PlaceholderType) { if (leftlewc.iterator().next().getLhsType() instanceof PlaceholderType) {
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getLhsType(),y); return x; }, combiner); hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getLhsType(),y); return x; }, combiner);
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getLhsType()) == null)); Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getLhsType()) == null));
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } ); si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
} }
//4. Fall //4. Fall
else { else {
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getRhsType(),y); return x; }, combiner); hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getRhsType(),y); return x; }, combiner);
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getRhsType()) == null)); Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getRhsType()) == null));
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } ); si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
} }
if (!si.isPresent()) return 0; if (!si.isPresent()) return 0;
else return si.get(); else return si.get();

View File

@@ -46,4 +46,15 @@ public enum PairOperator {
default: return "=."; // EQUALSDOT default: return "=."; // EQUALSDOT
} }
} }
public static PairOperator fromString(String op) {
switch (op) {
case "<": return SMALLER;
case "<.": return SMALLERDOT;
case "<!=.": return SMALLERNEQDOT;
case "<.?": return SMALLERDOTWC;
case "=.": return EQUALSDOT;
default: throw new RuntimeException("Unknown PairOperator: " + op);
}
}
} }

View File

@@ -1,119 +1,96 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import java.io.File; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import java.io.FileWriter; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import java.util.ArrayList; import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import java.util.Collection; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor; import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Set;
/** /**
* An unbounded placeholder type. * An unbounded placeholder type.
* @author Florian Steurer * @author Florian Steurer
*/ */
public final class PlaceholderType extends UnifyType{ public final class PlaceholderType extends UnifyType{
/**
* Static list containing the names of all existing placeholders.
* Used for generating fresh placeholders.
*/
public static final ArrayList<String> EXISTING_PLACEHOLDERS = new ArrayList<String>();
/**
* Prefix of auto-generated placeholder names.
*/
protected static String nextName = "gen_";
/**
* Random number generator used to generate fresh placeholder name.
*/
protected static Random rnd = new Random(43558747548978L);
/** /**
* True if this object was auto-generated, false if this object was user-generated. * True if this object was auto-generated, false if this object was user-generated.
*/ */
private final boolean IsGenerated; private final boolean IsGenerated;
/** /**
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf * isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
*/ */
private boolean wildcardable = true; private boolean wildcardable = true;
/** /**
* is innerType gibt an, ob der Type des PlaceholderType innerhalb eines Typkonstruktorsverwendet wird * is innerType gibt an, ob der Type des PlaceholderType innerhalb eines Typkonstruktorsverwendet wird
*/ */
private boolean innerType = false; private boolean innerType = false;
/** /**
* variance shows the variance of the pair * variance shows the variance of the pair
* 1: contravariant * 1: contravariant
* -1 covariant * -1 covariant
* 0 invariant * 0 invariant
* PL 2018-03-21 * PL 2018-03-21
*/ */
private int variance = 0; private int variance = 0;
/* /*
* Fuer Oder-Constraints: * Fuer Oder-Constraints:
* orCons = 1: Receiver * orCons = 1: Receiver
* orCons = 0: Argument oder kein Oder-Constraint * orCons = 0: Argument oder kein Oder-Constraint
* orCons = -1: RetType * orCons = -1: RetType
*/ */
private byte orCons = 0; private byte orCons = 0;
/** /**
* Creates a new placeholder type with the specified name. * Creates a new placeholder type with the specified name.
*/ */
public PlaceholderType(String name) { public PlaceholderType(String name, PlaceholderRegistry placeholderRegistry) {
super(name, new TypeParams()); super(name, new TypeParams());
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
IsGenerated = false; // This type is user generated IsGenerated = false; // This type is user generated
} }
public PlaceholderType(String name, int variance) { public PlaceholderType(String name, int variance, PlaceholderRegistry placeholderRegistry) {
super(name, new TypeParams()); super(name, new TypeParams());
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
IsGenerated = false; // This type is user generated IsGenerated = false; // This type is user generated
this.variance = variance; this.variance = variance;
} }
/** /**
* Creates a new placeholdertype * Creates a new placeholdertype
* @param isGenerated true if this placeholder is auto-generated, false if it is user-generated. * @param isGenerated true if this placeholder is auto-generated, false if it is user-generated.
*/ */
protected PlaceholderType(String name, boolean isGenerated) { protected PlaceholderType(String name, boolean isGenerated, PlaceholderRegistry placeholderRegistry) {
super(name, new TypeParams()); super(name, new TypeParams());
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
IsGenerated = isGenerated; IsGenerated = isGenerated;
} }
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) { public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht); return visitor.visit(this, ht);
} }
/** /**
* Creates a fresh placeholder type with a name that does so far not exist. * Creates a fresh placeholder type with a name that does so far not exist from the chars A-Z.
* A user could later instantiate a type using the same name that is equivalent to this type. * A user could later instantiate a type using the same name that is equivalent to this type.
* @return A fresh placeholder type. * @return A fresh placeholder type.
*/ */
public synchronized static PlaceholderType freshPlaceholder() { public static PlaceholderType freshPlaceholder(PlaceholderRegistry placeholderRegistry) {
String name = nextName + (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z' var name = placeholderRegistry.generateFreshPlaceholderName();
// Add random chars while the name is in use. return new PlaceholderType(name, true, placeholderRegistry);
while(EXISTING_PLACEHOLDERS.contains(name)) {
name += (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
}
return new PlaceholderType(name, true);
} }
/** /**
* True if this placeholder is auto-generated, false if it is user-generated. * True if this placeholder is auto-generated, false if it is user-generated.
*/ */
@@ -124,51 +101,51 @@ public final class PlaceholderType extends UnifyType{
public void setVariance(int v) { public void setVariance(int v) {
variance = v; variance = v;
} }
public int getVariance() { public int getVariance() {
return variance; return variance;
} }
public void reversVariance() { public void reversVariance() {
if (variance == 1) { if (variance == 1) {
setVariance(-1); setVariance(-1);
} else { } else {
if (variance == -1) { if (variance == -1) {
setVariance(1); setVariance(1);
}} }}
} }
public void setOrCons(byte i) { public void setOrCons(byte i) {
orCons = i; orCons = i;
} }
public byte getOrCons() { public byte getOrCons() {
return orCons; return orCons;
} }
public Boolean isWildcardable() { public Boolean isWildcardable() {
return wildcardable; return wildcardable;
} }
public void disableWildcardtable() { public void disableWildcardtable() {
wildcardable = false; wildcardable = false;
} }
public void enableWildcardtable() { public void enableWildcardtable() {
wildcardable = true; wildcardable = true;
} }
public void setWildcardtable(Boolean wildcardable) { public void setWildcardtable(Boolean wildcardable) {
this.wildcardable = wildcardable; this.wildcardable = wildcardable;
} }
public Boolean isInnerType() { public Boolean isInnerType() {
return innerType; return innerType;
} }
public void setInnerType(Boolean innerType) { public void setInnerType(Boolean innerType) {
this.innerType = innerType; this.innerType = innerType;
} }
@Override @Override
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) { Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
return fc.smArg(this, fBounded); return fc.smArg(this, fBounded);
@@ -178,17 +155,17 @@ public final class PlaceholderType extends UnifyType{
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) { Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
return fc.grArg(this, fBounded); return fc.grArg(this, fBounded);
} }
@Override @Override
public UnifyType setTypeParams(TypeParams newTp) { public UnifyType setTypeParams(TypeParams newTp) {
return this; // Placeholders never have params. return this; // Placeholders never have params.
} }
@Override @Override
public int hashCode() { public int hashCode() {
return typeName.hashCode(); return typeName.hashCode();
} }
@Override @Override
UnifyType apply(Unifier unif) { UnifyType apply(Unifier unif) {
if(unif.hasSubstitute(this)) { if(unif.hasSubstitute(this)) {
@@ -200,15 +177,15 @@ public final class PlaceholderType extends UnifyType{
} }
return this; return this;
} }
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if(!(obj instanceof PlaceholderType)) if(!(obj instanceof PlaceholderType))
return false; return false;
return ((PlaceholderType) obj).getName().equals(typeName); return ((PlaceholderType) obj).getName().equals(typeName);
} }
@Override @Override
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() { public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
@@ -216,4 +193,39 @@ public final class PlaceholderType extends UnifyType{
ret.add(this); ret.add(this);
return ret; return ret;
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.typeName);
// Placeholders never make use of the typeParams
serialized.put("isGenerated", IsGenerated);
serialized.put("wildcardable", wildcardable);
serialized.put("isInnerType", innerType);
serialized.put("variance", variance);
serialized.put("orCons", orCons);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PlaceholderType fromSerial(SerialMap data, UnifyContext context) {
var name = data.getValue("name").getOf(String.class);
var isGenerated = data.getValue("isGenerated").getOf(Boolean.class);
var wildcardable = data.getValue("wildcardable").getOf(Boolean.class);
var isInnerType = data.getValue("isInnerType").getOf(Boolean.class);
var variance = data.getValue("variance").getOf(Integer.class);
var orCons = data.getValue("orCons").getOf(Number.class).byteValue();
var placeholderType = new PlaceholderType(name, isGenerated, context.placeholderRegistry());
placeholderType.setWildcardtable(wildcardable);
placeholderType.setInnerType(isInnerType);
placeholderType.setVariance(variance);
placeholderType.setOrCons(orCons);
return placeholderType;
}
} }

View File

@@ -1,6 +1,10 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import java.util.HashMap; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set; import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -11,45 +15,51 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
* @author Florian Steurer * @author Florian Steurer
* *
*/ */
public class ReferenceType extends UnifyType { public class ReferenceType extends UnifyType implements ISerializableData {
/** /**
* The buffered hashCode * The buffered hashCode
*/ */
private final int hashCode; private final int hashCode;
/** /**
* gibt an, ob der ReferenceType eine generische Typvariable ist * gibt an, ob der ReferenceType eine generische Typvariable ist
*/ */
private final boolean genericTypeVar; private final boolean genericTypeVar;
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) { public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht); return visitor.visit(this, ht);
} }
public ReferenceType(String name, Boolean genericTypeVar) { public ReferenceType(String name, Boolean genericTypeVar) {
super(name, new TypeParams()); super(name, new TypeParams());
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode(); hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
this.genericTypeVar = genericTypeVar; this.genericTypeVar = genericTypeVar;
} }
public ReferenceType(String name, UnifyType... params) { public ReferenceType(String name, UnifyType... params) {
super(name, new TypeParams(params)); super(name, new TypeParams(params));
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode(); hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
genericTypeVar = false; genericTypeVar = false;
} }
public ReferenceType(String name, TypeParams params) { public ReferenceType(String name, TypeParams params) {
super(name, params); super(name, params);
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode(); hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
genericTypeVar = false; genericTypeVar = false;
} }
public boolean isGenTypeVar () { private ReferenceType(String name, TypeParams params, boolean genericTypeVar) {
super(name, params);
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
this.genericTypeVar = genericTypeVar;
}
public boolean isGenTypeVar() {
return genericTypeVar; return genericTypeVar;
} }
@Override @Override
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) { Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
return fc.smArg(this, fBounded); return fc.smArg(this, fBounded);
@@ -63,38 +73,59 @@ public class ReferenceType extends UnifyType {
@Override @Override
UnifyType apply(Unifier unif) { UnifyType apply(Unifier unif) {
TypeParams newParams = typeParams.apply(unif); TypeParams newParams = typeParams.apply(unif);
if(newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams)) if (newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
return this; return this;
return new ReferenceType(typeName, newParams); return new ReferenceType(typeName, newParams);
} }
@Override @Override
public UnifyType setTypeParams(TypeParams newTp) { public UnifyType setTypeParams(TypeParams newTp) {
if(newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams)) if (newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
return this; // reduced the amount of objects created return this; // reduced the amount of objects created
return new ReferenceType(typeName, newTp); return new ReferenceType(typeName, newTp);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return hashCode; return hashCode;
} }
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if(!(obj instanceof ReferenceType)) if (!(obj instanceof ReferenceType))
return false; return false;
if(obj.hashCode() != this.hashCode()) if (obj.hashCode() != this.hashCode())
return false; return false;
ReferenceType other = (ReferenceType) obj; ReferenceType other = (ReferenceType) obj;
if(!other.getName().equals(typeName)) if (!other.getName().equals(typeName))
return false; return false;
return other.getTypeParams().equals(typeParams); return other.getTypeParams().equals(typeParams);
} }
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.typeName);
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
serialized.put("isGenericTypeVar", this.genericTypeVar);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static ReferenceType fromSerial(SerialMap data, UnifyContext context) {
var name = data.getValue("name").getOf(String.class);
var params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
var isGenericTypeVar = data.getValue("isGenericTypeVar").getOf(Boolean.class);
return new ReferenceType(name, new TypeParams(params), isGenericTypeVar);
}
}

View File

@@ -1,6 +1,9 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import java.util.HashMap; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set; import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -85,4 +88,26 @@ public final class SuperType extends WildcardType {
SuperType other = (SuperType) obj; SuperType other = (SuperType) obj;
return other.getSuperedType().equals(wildcardedType); return other.getSuperedType().equals(wildcardedType);
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("wildcardedType", this.wildcardedType.toSerial(keyStorage));
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static SuperType fromSerial(SerialMap data, UnifyContext context) {
var params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
var superType = new SuperType(
UnifyType.fromSerial(data.getMap("wildcardedType"), context)
);
superType.setTypeParams(new TypeParams(params));
return superType;
}
} }

View File

@@ -174,9 +174,9 @@ public final class TypeParams implements Iterable<UnifyType>{
@Override @Override
public String toString() { public String toString() {
String res = ""; StringBuilder res = new StringBuilder();
for(UnifyType t : typeParams) for(UnifyType t : typeParams)
res += t + ","; res.append(t).append(",");
return "<" + res.substring(0, res.length()-1) + ">"; return "<" + res.substring(0, res.length()-1) + ">";
} }

View File

@@ -50,8 +50,7 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
Unifier tempU = new Unifier(source, target); Unifier tempU = new Unifier(source, target);
// Every new substitution must be applied to previously added substitutions // Every new substitution must be applied to previously added substitutions
// otherwise the unifier needs to be applied multiple times to unify two terms // otherwise the unifier needs to be applied multiple times to unify two terms
for(PlaceholderType pt : substitutions.keySet()) substitutions.replaceAll((pt, ut) -> ut.apply(tempU));
substitutions.put(pt, substitutions.get(pt).apply(tempU));
substitutions.put(source, target); substitutions.put(source, target);
} }
@@ -93,8 +92,7 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
// } // }
//} //}
if (!(p.getLhsType().equals(newLhs)) || !(p.getRhsType().equals(newRhs))) {//Die Anwendung von this hat was veraendert PL 2018-04-01 if (!(p.getLhsType().equals(newLhs)) || !(p.getRhsType().equals(newRhs))) {//Die Anwendung von this hat was veraendert PL 2018-04-01
Set<UnifyPair> suniUnifyPair = new HashSet<>(); Set<UnifyPair> suniUnifyPair = new HashSet<>(thisAsPair.getAllSubstitutions());
suniUnifyPair.addAll(thisAsPair.getAllSubstitutions());
suniUnifyPair.add(thisAsPair); suniUnifyPair.add(thisAsPair);
if (p.getLhsType() instanceof PlaceholderType //&& newLhs instanceof PlaceholderType entfernt PL 2018-04-13 if (p.getLhsType() instanceof PlaceholderType //&& newLhs instanceof PlaceholderType entfernt PL 2018-04-13
&& p.getPairOp() == PairOperator.EQUALSDOT) { && p.getPairOp() == PairOperator.EQUALSDOT) {
@@ -172,13 +170,13 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
@Override @Override
public String toString() { public String toString() {
String result = "{ "; StringBuilder result = new StringBuilder("{ ");
for(Entry<PlaceholderType, UnifyType> entry : substitutions.entrySet()) for(Entry<PlaceholderType, UnifyType> entry : substitutions.entrySet())
result += "(" + entry.getKey() + " -> " + entry.getValue() + "), "; result.append("(").append(entry.getKey()).append(" -> ").append(entry.getValue()).append("), ");
if(!substitutions.isEmpty()) if(!substitutions.isEmpty())
result = result.substring(0, result.length()-2); result = new StringBuilder(result.substring(0, result.length() - 2));
result += " }"; result.append(" }");
return result; return result.toString();
} }
@Override @Override

View File

@@ -1,23 +1,26 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import com.google.common.collect.ObjectArrays;
import de.dhbwstuttgart.parser.SourceLoc; import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import org.antlr.v4.runtime.Token; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.typeinference.constraints.IConstraintElement;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
/** /**
* A pair which contains two types and an operator, e.q. (Integer <. a). * A pair which contains two types and an operator, e.q. (Integer <. a).
* @author Florian Steurer * @author Florian Steurer
*/ */
public class UnifyPair { public class UnifyPair implements IConstraintElement, ISerializableData {
private SourceLoc location; private SourceLoc location;
@@ -151,8 +154,7 @@ public class UnifyPair {
} }
public Set<UnifyPair> getAllSubstitutions () { public Set<UnifyPair> getAllSubstitutions () {
Set<UnifyPair> ret = new HashSet<>(); Set<UnifyPair> ret = new HashSet<>(new ArrayList<>(getSubstitution()));
ret.addAll(new ArrayList<>(getSubstitution()));
if (basePair != null) { if (basePair != null) {
ret.addAll(new ArrayList<>(basePair.getAllSubstitutions())); ret.addAll(new ArrayList<>(basePair.getAllSubstitutions()));
} }
@@ -243,16 +245,16 @@ public class UnifyPair {
public String toString() { public String toString() {
String ret = ""; String ret = "";
if (lhs instanceof PlaceholderType) { if (lhs instanceof PlaceholderType) {
ret = new Integer(((PlaceholderType)lhs).getVariance()).toString() + " " ret = Integer.valueOf(((PlaceholderType)lhs).getVariance()).toString() + " "
+ "WC: " + ((PlaceholderType)lhs).isWildcardable() + "WC: " + ((PlaceholderType)lhs).isWildcardable()
+ ", IT: " + ((PlaceholderType)lhs).isInnerType(); + ", IT: " + ((PlaceholderType)lhs).isInnerType();
} }
if (rhs instanceof PlaceholderType) { if (rhs instanceof PlaceholderType) {
ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString() + " " ret = ret + ", " + Integer.valueOf(((PlaceholderType)rhs).getVariance()).toString() + " "
+ "WC: " + ((PlaceholderType)rhs).isWildcardable() + "WC: " + ((PlaceholderType)rhs).isWildcardable()
+ ", IT: " + ((PlaceholderType)rhs).isInnerType(); + ", IT: " + ((PlaceholderType)rhs).isInnerType();
} }
var res = "(" + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])"; var res = "(UP: " + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])";
var location = this.getLocation(); var location = this.getLocation();
if (location != null) { if (location != null) {
res += "@" + location.line() + " in " + location.file(); res += "@" + location.line() + " in " + location.file();
@@ -269,6 +271,58 @@ public class UnifyPair {
return ret; return ret;
} }
*/ */
private String serialUUID = null;
public SerialUUID toSerial(KeyStorage keyStorage) {
final String uuid = serialUUID == null ? keyStorage.getIdentifier() : serialUUID;
if (serialUUID == null) serialUUID = uuid;
if (!keyStorage.isAlreadySerialized(uuid)) {
SerialMap serialized = new SerialMap();
keyStorage.putSerialized(uuid, serialized);
serialized.put("lhs", this.lhs.toSerial(keyStorage));
serialized.put("rhs", this.rhs.toSerial(keyStorage));
serialized.put("op", this.pairOp.toString());
serialized.put("basePair", this.basePair == null ? null : this.basePair.toSerial(keyStorage));
serialized.put("location", this.location == null ? null : this.location.toSerial(keyStorage));
serialized.put("substitution", SerialList.fromMapped(this.substitution, unifyPair -> unifyPair.toSerial(keyStorage)));
serialized.put("fBounded", SerialList.fromMapped(this.fBounded, fbounded -> fbounded.toSerial(keyStorage)));
}
return new SerialUUID(uuid);
}
public static UnifyPair fromSerial(SerialUUID serialUUID, UnifyContext context, KeyStorage keyStorage) {
String uuid = serialUUID.uuid;
if (!keyStorage.isAlreadyUnserialized(uuid)) {
SerialMap data = keyStorage.getSerialized(uuid);
SerialMap lhsData = data.getMap("lhs");
SerialMap rhsData = data.getMap("rhs");
String opData = data.getValue("op").getOf(String.class);
UnifyPair pair = new UnifyPair(
UnifyType.fromSerial(lhsData, context),
UnifyType.fromSerial(rhsData, context),
PairOperator.fromString(opData)
);
// put the object into the storage before unserializing basePair recursively
keyStorage.putUnserialized(uuid, pair);
SerialList<SerialUUID> substitutionData = data.getList("substitution").assertListOfUUIDs();
SerialList<SerialMap> fBoundedData = data.getList("fBounded").assertListOfMaps();
SerialUUID basePairData = data.getUUIDOrNull("basePair");
SerialMap locationData = data.getMapOrNull("location");
pair.substitution = substitutionData.stream().map(substData -> UnifyPair.fromSerial(substData, context, keyStorage)).collect(Collectors.toSet());
pair.fBounded = fBoundedData.stream().map(fBoundData -> UnifyType.fromSerial(fBoundData, context)).collect(Collectors.toSet());
if (basePairData != null) {
pair.basePair = UnifyPair.fromSerial(basePairData, context, keyStorage);
}
if (locationData != null) {
pair.location = SourceLoc.fromSerial(locationData);
}
}
return keyStorage.getUnserialized(uuid, UnifyPair.class);
}
} }

View File

@@ -1,12 +1,14 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Set; import java.util.Set;
import de.dhbwstuttgart.syntaxtree.StatementVisitor;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor; import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
@@ -14,7 +16,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
* Represents a java type. * Represents a java type.
* @author Florian Steurer * @author Florian Steurer
*/ */
public abstract class UnifyType { public abstract class UnifyType implements ISerializableData {
/** /**
* The name of the type e.q. "Integer", "? extends Integer" or "List" for (List<T>) * The name of the type e.q. "Integer", "? extends Integer" or "List" for (List<T>)
@@ -29,7 +31,7 @@ public abstract class UnifyType {
/** /**
* Creates a new instance * Creates a new instance
* @param name Name of the type (e.q. List for List<T>, Integer or ? extends Integer) * @param name Name of the type (e.q. List for List<T>, Integer or ? extends Integer)
* @param typeParams Parameters of the type (e.q. <T> for List<T>) * @param p Parameters of the type (e.q. <T> for List<T>)
*/ */
protected UnifyType(String name, TypeParams p) { protected UnifyType(String name, TypeParams p) {
typeName = name; typeName = name;
@@ -87,20 +89,18 @@ public abstract class UnifyType {
@Override @Override
public String toString() { public String toString() {
String params = ""; StringBuilder params = new StringBuilder();
if(typeParams.size() != 0) { if(typeParams.size() != 0) {
for(UnifyType param : typeParams) for(UnifyType param : typeParams)
params += param.toString() + ","; params.append(param.toString()).append(",");
params = "<" + params.substring(0, params.length()-1) + ">"; params = new StringBuilder("<" + params.substring(0, params.length() - 1) + ">");
} }
return typeName + params; return typeName + params;
} }
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() { public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>(); return new ArrayList<>(typeParams.getInvolvedPlaceholderTypes());
ret.addAll(typeParams.getInvolvedPlaceholderTypes());
return ret;
} }
public Boolean wrongWildcard() {//default public Boolean wrongWildcard() {//default
@@ -114,7 +114,40 @@ public abstract class UnifyType {
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if(obj == null)return false; if(obj == null) return false;
return this.toString().equals(obj.toString()); return this.toString().equals(obj.toString());
} }
}
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
String type = switch (this) {
case FunInterfaceType _ -> "funi";
case ReferenceType _ -> "ref";
case ExtendsType _ -> "ext";
case SuperType _ -> "sup";
case FunNType _ -> "funn";
case PlaceholderType _ -> "tph";
default -> throw new RuntimeException("No type defined for UnifyType of class " + this.getClass().getName());
};
serialized.put("type", type);
// we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL);
return serialized;
}
public static UnifyType fromSerial(SerialMap data, UnifyContext context) {
var type = data.getValue("type").getOf(String.class);
var object = data.getMap("object");
return switch (type) {
case "funi" -> FunInterfaceType.fromSerial(object, context);
case "ref" -> ReferenceType.fromSerial(object, context);
case "ext" -> ExtendsType.fromSerial(object, context);
case "sup" -> SuperType.fromSerial(object, context);
case "funn" -> FunNType.fromSerial(object, context);
case "tph" -> PlaceholderType.fromSerial(object, context);
default -> throw new RuntimeException("Could not unserialize class of unhandled type " + type);
};
}
}

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
@@ -7,7 +8,7 @@ import java.util.Collection;
* A wildcard type that is either a ExtendsType or a SuperType. * A wildcard type that is either a ExtendsType or a SuperType.
* @author Florian Steurer * @author Florian Steurer
*/ */
public abstract class WildcardType extends UnifyType { public abstract class WildcardType extends UnifyType implements ISerializableData {
/** /**
* The wildcarded type, e.q. Integer for ? extends Integer. Never a wildcard type itself. * The wildcarded type, e.q. Integer for ? extends Integer. Never a wildcard type itself.
@@ -65,8 +66,7 @@ public abstract class WildcardType extends UnifyType {
@Override @Override
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() { public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>(); ArrayList<PlaceholderType> ret = new ArrayList<>(wildcardedType.getInvolvedPlaceholderTypes());
ret.addAll(wildcardedType.getInvolvedPlaceholderTypes());
return ret; return ret;
} }
} }

View File

@@ -0,0 +1,9 @@
package de.dhbwstuttgart.util;
public class Logger {
public static void print(String s) {
System.out.println(s);
}
}

View File

@@ -3,37 +3,26 @@ package de.dhbwstuttgart.util;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
public class Pair<T, T1> { public class Pair<T, T1> extends Tuple<T, T1> {
private final T key;
private final T1 value;
public Pair(T a, T1 b) { public Pair(T a, T1 b) {
this.value = b; super(a, b);
this.key = a; }
}
public Optional<T1> getValue() { public Optional<T1> getValue() {
return Optional.of(value); return Optional.of(second);
} }
public T getKey() { public T getKey() {
return key; return first;
} }
public String toString() {
return "(" + key.toString() + "," + value.toString() + ")\n";
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
Pair<?, ?> pair = (Pair<?, ?>) o; Pair<?, ?> pair = (Pair<?, ?>) o;
return Objects.equals(key, pair.key) && Objects.equals(value, pair.value); return Objects.equals(first, pair.first) && Objects.equals(second, pair.second);
} }
@Override
public int hashCode() {
return Objects.hash(key, value);
}
} }

View File

@@ -0,0 +1,38 @@
package de.dhbwstuttgart.util;
import java.util.Objects;
public class Tuple<T1, T2> {
protected final T1 first;
protected final T2 second;
public Tuple(T1 a, T2 b) {
this.second = b;
this.first = a;
}
public T1 getFirst() {
return first;
}
public T2 getSecond() {
return second;
}
public String toString() {
return "(" + first.toString() + "," + second.toString() + ")\n";
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Tuple<?, ?> pair = (Tuple<?, ?>) o;
return Objects.equals(first, pair.first) && Objects.equals(second, pair.second);
}
@Override
public int hashCode() {
return Objects.hash(first, second);
}
}

View File

@@ -1,8 +1,10 @@
package finiteClosure; package finiteClosure;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.parser.SyntaxTreeGenerator.FCGenerator; import de.dhbwstuttgart.parser.SyntaxTreeGenerator.FCGenerator;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface; import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory; import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import org.junit.Test; import org.junit.Test;
import java.util.ArrayList; import java.util.ArrayList;
@@ -14,14 +16,14 @@ public class SuperInterfacesTest {
public void test() throws ClassNotFoundException { public void test() throws ClassNotFoundException {
Collection<ClassOrInterface> classes = new ArrayList<>(); Collection<ClassOrInterface> classes = new ArrayList<>();
classes.add(ASTFactory.createClass(TestClass.class)); classes.add(ASTFactory.createClass(TestClass.class));
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader())); System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), JavaTXCompiler.defaultClientPlaceholderRegistry));
} }
@Test @Test
public void testGeneric() throws ClassNotFoundException { public void testGeneric() throws ClassNotFoundException {
Collection<ClassOrInterface> classes = new ArrayList<>(); Collection<ClassOrInterface> classes = new ArrayList<>();
classes.add(ASTFactory.createClass(TestClassGeneric.class)); classes.add(ASTFactory.createClass(TestClassGeneric.class));
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader())); System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), JavaTXCompiler.defaultClientPlaceholderRegistry));
} }
} }

View File

@@ -0,0 +1,101 @@
package server;
import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FunInterfaceType;
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import java.io.Writer;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ForkJoinPool;
class PacketExampleData {
static ConstraintSet<UnifyPair> getExampleUnifyPairConstraintSet(
UnifyContext unifyContext, String packagePath, int numUndCons, int numOderCons
) {
var constraintSet = new ConstraintSet<UnifyPair>();
for (int i = 0; i < numUndCons; i++) {
constraintSet.addUndConstraint(
getExampleUnifyPair(unifyContext, packagePath+"undCons.")
);
}
for (int i = 0; i < numOderCons; i++) {
var oderConstraint = new HashSet<Constraint<UnifyPair>>();
getExampleUnifyPairConstraint(unifyContext, packagePath+"oderCons.", i == 0);
constraintSet.addOderConstraint(oderConstraint);
}
return constraintSet;
}
static Constraint<UnifyPair> getExampleUnifyPairConstraint(UnifyContext unifyContext, String packagePath, boolean withExtends) {
return new Constraint<>(
!withExtends, true,
withExtends ? getExampleUnifyPairConstraint(unifyContext, packagePath+"extendConstraint.", false) : null,
withExtends ? new HashSet<>(List.of(
getExampleUnifyPair(unifyContext, packagePath+"methodSignatureConstraint.zero."),
getExampleUnifyPair(unifyContext, packagePath+"methodSignatureConstraint.one.")
)) : null
);
}
static UnifyPair getExampleUnifyPair(UnifyContext unifyContext, String packagePath) {
return new UnifyPair(
new ReferenceType(packagePath + "something", false),
new ExtendsType(
new SuperType(
new FunInterfaceType("lambda" + unifyContext.placeholderRegistry().generateFreshPlaceholderName(),
new TypeParams(),
List.of( // intfArgTypes
FunNType.getFunNType(new TypeParams(
List.of(
PlaceholderType.freshPlaceholder(unifyContext.placeholderRegistry()),
PlaceholderType.freshPlaceholder(unifyContext.placeholderRegistry())
)
))
),
PlaceholderType.freshPlaceholder(unifyContext.placeholderRegistry()), // intfReturnType
List.of( // generics
"ZA",
"ZB",
unifyContext.placeholderRegistry().generateFreshPlaceholderName()
)
)
)
),
PairOperator.SMALLERDOT,
new SourceLoc("test.jav", 10)
);
}
static UnifyContext createTestContext() {
var placeholderRegistry = new PlaceholderRegistry();
var nullWriter = Writer.nullWriter();
return new UnifyContext(nullWriter, false, true,
new UnifyResultModel(
new ConstraintSet<>(),
new FiniteClosure(
new HashSet<>(),
nullWriter,
placeholderRegistry)),
new UnifyTaskModel(),
ForkJoinPool.commonPool(),
placeholderRegistry
);
}
}

View File

@@ -0,0 +1,110 @@
package server;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.environment.ByteArrayClassLoader;
import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.server.packet.DebugPacket;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.InvalidPacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.io.Writer;
import java.util.HashSet;
import java.util.concurrent.ForkJoinPool;
import java.util.function.BiFunction;
import org.junit.Test;
import static org.junit.Assert.*;
public class PacketTest {
@Test
public void serializeUnifyPair() throws JsonProcessingException {
UnifyContext unifyContext = PacketExampleData.createTestContext();
var original = PacketExampleData.getExampleUnifyPair(unifyContext, "de.test.");
var reconstruction = serializeAndDeserialize(original, unifyContext,
(o,k) -> UnifyPair.fromSerial((SerialUUID) o,unifyContext,k));
assertEquals(original.getClass(), reconstruction.getClass());
assertEquals(original.toString(), reconstruction.toString());
assertEquals(original, reconstruction);
}
@Test
public void serializeUnifyPairConstraint() throws JsonProcessingException {
UnifyContext unifyContext = PacketExampleData.createTestContext();
var original = PacketExampleData.getExampleUnifyPairConstraint(unifyContext, "de.", true);
var reconstruction = serializeAndDeserialize(original, unifyContext,
(o,k) -> Constraint.fromSerial((SerialUUID) o, unifyContext, UnifyPair.class, k));
assertEquals(original.getClass(), reconstruction.getClass());
assertEquals(original.toString(), reconstruction.toString());
assertEquals(original, reconstruction);
}
@Test
public void serializeUnifyPairConstraintSet() throws JsonProcessingException {
UnifyContext unifyContext = PacketExampleData.createTestContext();
var original = PacketExampleData.getExampleUnifyPairConstraintSet(unifyContext, "de.", 1, 2);
var reconstruction = serializeAndDeserialize(original, unifyContext,
(o,k) -> ConstraintSet.fromSerial((SerialMap) o, unifyContext, UnifyPair.class, k));
assertEquals(original.getClass(), reconstruction.getClass());
assertEquals(original.toString(), reconstruction.toString());
assertEquals(original, reconstruction);
}
/**
* Helper method for serializing an ISerialNode into JSON, then deserializing it
*/
private <T extends ISerialNode, R extends ISerializableData> R serializeAndDeserialize(
ISerializableData object, UnifyContext unifyContext, BiFunction<T, KeyStorage, R> fromSerial
) throws JsonProcessingException {
DebugPacket packet = new DebugPacket();
KeyStorage keyStorage = new KeyStorage();
var serializedObject = object.toSerial(keyStorage);
if (serializedObject instanceof SerialUUID sObject) packet.a1 = sObject;
if (serializedObject instanceof SerialMap sObject) packet.b1 = sObject;
if (serializedObject instanceof SerialList<?> sObject) packet.c1 = sObject;
if (serializedObject instanceof SerialValue<?> sObject) packet.d1 = sObject;
packet.b2 = keyStorage.toSerial(keyStorage);
DebugPacket reconstructedPacket = serializeAndDeserializePacket(packet);
KeyStorage reconstructedKeyStorage = KeyStorage.fromSerial(reconstructedPacket.b2, unifyContext);
ISerialNode reconstructedData = null;
if (serializedObject instanceof SerialUUID) reconstructedData = packet.a1;
if (serializedObject instanceof SerialMap) reconstructedData = packet.b1;
if (serializedObject instanceof SerialList<?>) reconstructedData = packet.c1;
if (serializedObject instanceof SerialValue<?>) reconstructedData = packet.d1;
assertNotNull(reconstructedData);
return fromSerial.apply( (T) reconstructedData, reconstructedKeyStorage);
}
private <T extends IPacket> T serializeAndDeserializePacket(T packet) throws JsonProcessingException {
String json = PacketContainer.serialize(packet);
IPacket reconstructedPacket = PacketContainer.deserialize(json);
assertNotNull(reconstructedPacket);
assertSame(packet.getClass(), reconstructedPacket.getClass());
return (T) reconstructedPacket;
}
}

View File

@@ -0,0 +1,141 @@
package server;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.core.JavaTXServer;
import de.dhbwstuttgart.environment.CompilationEnvironment;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
import org.junit.Ignore;
import org.junit.Test;
import targetast.TestCodegen;
import static org.junit.Assert.*;
@Ignore("Server tests create huge overhead, so they are ignored until required")
public class ServerTest {
@Test
public void checkServer_Scalar() throws IOException, ClassNotFoundException {
compareLocalAndServerResult("Scalar.jav");
}
@Test
public void checkServer_Matrix() throws IOException, ClassNotFoundException {
compareLocalAndServerResult("Matrix.jav");
}
protected void compareLocalAndServerResult(final String filename) throws IOException, ClassNotFoundException {
File file = Path.of(TestCodegen.path.toString(), filename).toFile();
// get information from compiler
JavaTXCompiler compiler = new JavaTXCompiler(List.of(file));
// NOW: simulate the call to method typeInference. Once via server and once locally
// if everything works, they should neither interfere with each other, nor differ in their result
// get the values from the compiler
PlaceholderRegistry placeholderRegistry = JavaTXCompiler.defaultClientPlaceholderRegistry; //new PlaceholderRegistry();
ConstraintSet<Pair> cons = compiler.getConstraints(file);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(compiler, cons, placeholderRegistry);
unifyCons = unifyCons.map(ServerTest::distributeInnerVars);
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(
ServerTest.getAllClasses(compiler, file).stream().toList(),
Writer.nullWriter(),
compiler.classLoader,
compiler,
placeholderRegistry
);
UnifyTaskModel usedTasks = new UnifyTaskModel();
// create the server
JavaTXServer server = new JavaTXServer(5000);
try (ExecutorService executor = Executors.newSingleThreadExecutor()) {
// run the server in a separate thread
executor.submit(server::listen);
}
// run the unification on the server
PlaceholderRegistry prCopy = JavaTXCompiler.defaultClientPlaceholderRegistry.deepClone();
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyContext context = new UnifyContext(Writer.nullWriter(), false, true, urm, usedTasks, prCopy);
SocketClient socketClient = new SocketClient("ws://localhost:5000");
List<ResultSet> serverResult = socketClient.execute(finiteClosure, cons, unifyCons, context);
// close the server
server.forceStop();
// run the unification on the client (do this second, because it changes the initial placeholder registry)
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, context);
List<ResultSet> clientResult = li.getResults();
// create the bytecode from both results
var sf = compiler.sourceFiles.get(file);
var serverBytecode = compiler.generateBytecode(sf, serverResult);
var localBytecode = compiler.generateBytecode(sf, clientResult);
// test if the generated code is the same
for (var serverEntry : serverBytecode.entrySet()) {
var serverBytes = serverEntry.getValue();
var localBytes = localBytecode.get(serverEntry.getKey());
assertArrayEquals(serverBytes, localBytes);
}
}
protected static UnifyPair distributeInnerVars(UnifyPair x) {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
return x;
}
protected static Set<ClassOrInterface> getAllClasses(JavaTXCompiler compiler, File file)
throws ClassNotFoundException, IOException
{
var sf = compiler.sourceFiles.get(file);
Set<ClassOrInterface> allClasses = new HashSet<>();
allClasses.addAll(compiler.getAvailableClasses(sf));
allClasses.addAll(sf.getClasses());
var newClasses = CompilationEnvironment.loadDefaultPackageClasses(sf.getPkgName(), file, compiler).stream().map(ASTFactory::createClass).collect(Collectors.toSet());
for (var clazz : newClasses) {
// Don't load classes that get recompiled
if (sf.getClasses().stream().anyMatch(nf -> nf.getClassName().equals(clazz.getClassName())))
continue;
if (allClasses.stream().noneMatch(old -> old.getClassName().equals(clazz.getClassName())))
allClasses.add(clazz);
}
return allClasses;
}
}