39 Commits

Author SHA1 Message Date
Fabian Holzwarth
1af31e4513 feat: update parallelization for 0variance 2025-08-04 13:41:17 +02:00
Fabian Holzwarth
5b06f0a249 Merge branch 'feat/unify-server' into feat/unify-server-0variance 2025-07-21 16:27:55 +02:00
Fabian Holzwarth
3d99f282f5 feat: cleanup code 2025-07-21 16:12:56 +02:00
Fabian Holzwarth
512b10542e feat: adjusted parallelization 2025-07-21 15:41:04 +02:00
Fabian Holzwarth
3b1185d9d0 feat: paralellize 0-variance cases 2025-07-20 15:06:50 +02:00
Fabian Holzwarth
303c91dc87 chore: making classnames more expressive and cleanup some structures 2025-07-19 12:50:30 +02:00
Fabian Holzwarth
603a8b176a feat: implement partically cancellable tasks 2025-07-16 11:16:30 +02:00
Fabian Holzwarth
f396189a4b feat: add timestamp to server output 2025-07-12 13:44:18 +02:00
Fabian Holzwarth
e7f4a94908 feat: fixed old condition on server timeout 2025-07-12 13:22:54 +02:00
Fabian Holzwarth
ce49a4b9a5 feat: reduce temporary objects and repeated loops 2025-07-12 13:14:55 +02:00
Fabian Holzwarth
03b3692724 feat: update indepenedentest 2025-07-11 12:50:27 +02:00
Fabian Holzwarth
f0022d2b6f feat: use presized hashMaps to reduce resizing 2025-07-11 11:22:06 +02:00
Fabian Holzwarth
b1015cfa82 feat: update logging and add success-level 2025-07-07 16:19:04 +02:00
Fabian Holzwarth
b63a27a0a0 feat: improve server by assinging configured thread pools 2025-07-07 15:59:46 +02:00
Fabian Holzwarth
3b0a53d3c4 feat: add cross dependency, fix: socket closing and error messages 2025-07-07 15:19:56 +02:00
Fabian Holzwarth
50dbbf5f86 feat: implement generalized socket client, server logger and cleanup code 2025-07-07 14:01:27 +02:00
Fabian Holzwarth
130c491ac0 feat: more Boxing replacements 2025-07-06 15:49:59 +02:00
Fabian Holzwarth
9f9b264ac4 feat: replace unnecessary boxing with primitives 2025-07-06 15:18:51 +02:00
Fabian Holzwarth
1393db05c2 feat: implement lazy evaluation for logger outputs 2025-07-06 13:37:47 +02:00
Fabian Holzwarth
93e1a8787c feat: do not create a new context, if nothing changes 2025-07-05 11:43:10 +02:00
Fabian Holzwarth
0129d7540f feat use perMessagDeflate compression in websocket and use logger for message outpute 2025-07-05 11:16:06 +02:00
Fabian Holzwarth
7ea8337aee feat: remove unused logging library 2025-07-05 11:15:33 +02:00
Fabian Holzwarth
28458d405f feat: ignore server test 2025-07-02 15:47:55 +02:00
Fabian Holzwarth
1b905cb3e2 feat: implement loggers for the rest of the compiler 2025-07-01 23:06:09 +02:00
Fabian Holzwarth
d02c3583e9 feat: implement new logger into type inference code 2025-07-01 22:16:29 +02:00
Fabian Holzwarth
ca98e83fd2 feat: added logger 2025-07-01 21:21:39 +02:00
Fabian Holzwarth
c80a0c8596 feat: fix error by reintroducing name generator and add server tests 2025-06-30 16:42:20 +02:00
Fabian Holzwarth
2278fb1b91 feat: undo removing NameGenerator to fix errors in ast generation 2025-06-30 12:46:41 +02:00
Fabian Holzwarth
32b16cd5fd feat: replace concurrent modification with correct function call 2025-06-30 11:49:53 +02:00
Fabian Holzwarth
fd30c5f63f feat: prevent reusing the placeholder registry in tests 2025-06-29 16:04:54 +02:00
Fabian Holzwarth
8bfd6ae255 feat: remove redundant lambda functions and Set-resizings 2025-06-28 14:48:43 +02:00
Fabian Holzwarth
ad2dfb13bd feat: speedup toString methods by using a StringBuilder instead of String concatenation 2025-06-28 14:30:12 +02:00
Fabian Holzwarth
501633a90c feat: fix test with null methodSignatureConstraint 2025-06-28 14:06:34 +02:00
Fabian Holzwarth
4defa50ca2 feat: added version check on connecting 2025-06-25 19:48:29 +02:00
Fabian Holzwarth
d65e90536a feat: replace NameGenerator with instance of PlaceholderRegistry to prevent duplicates 2025-06-25 19:15:28 +02:00
Fabian Holzwarth
3de7f1aa61 fix: try generating new placeholders only in current placeholderRegistry context to prevent duplicates 2025-06-25 17:38:56 +02:00
Fabian Holzwarth
029e40b775 feat: make packets directional and self handling 2025-06-25 17:35:49 +02:00
Fabian Holzwarth
459bfcdd5f feat: added tests for client-server communication 2025-06-23 16:13:43 +02:00
Fabian Holzwarth
02886c38ea feat: fixed error in object serialization 2025-06-23 16:13:21 +02:00
92 changed files with 3138 additions and 1367 deletions

View File

@@ -10,6 +10,7 @@ mkdir $TDIR
cd $TDIR cd $TDIR
git clone $REPO . git clone $REPO .
git checkout feat/unify-server git checkout feat/unify-server
# git checkout 93e1a8787cd94c73f4538f6a348f58613893a584
# git checkout dad468368b86bdd5a3d3b2754b17617cee0a9107 # 1:55 # git checkout dad468368b86bdd5a3d3b2754b17617cee0a9107 # 1:55
# git checkout a0c11b60e8c9d7addcbe0d3a09c9ce2924e9d5c0 # 2:25 # git checkout a0c11b60e8c9d7addcbe0d3a09c9ce2924e9d5c0 # 2:25
# git checkout 4cddf73e6d6c9116d3e1705c4b27a8e7f18d80c3 # 2:27 # git checkout 4cddf73e6d6c9116d3e1705c4b27a8e7f18d80c3 # 2:27
@@ -19,15 +20,14 @@ git checkout feat/unify-server
# git checkout 1391206dfe59263cdb22f93371cfd1dd5465d97f # 1:29 # git checkout 1391206dfe59263cdb22f93371cfd1dd5465d97f # 1:29
date "+%Y.%m.%d %H:%M:%S" date "+%Y.%m.%d %H:%M:%S"
# sed -i -e 's/source>21/source>23/g' pom.xml
# mvn clean compile -DskipTests package # sed -i -e 's/target>21/target>23/g' pom.xml
## prefix each stderr line with " | "
# exec 2> >(trap "" INT TERM; sed 's/^/ | /' >&2)
# echo -e "\nMatrix test:\n |"
# time java -jar target/JavaTXcompiler-0.1-jar-with-dependencies.jar resources/bytecode/javFiles/Matrix.jav >/dev/null;
mvn clean compile test mvn clean compile -DskipTests package
time java -jar target/JavaTXcompiler-0.1-jar-with-dependencies.jar resources/bytecode/javFiles/Matrix.jav;
# mvn clean compile test
echo -e "\nCleanup... " echo -e "\nCleanup... "

View File

@@ -59,6 +59,11 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<artifactId>jackson-databind</artifactId> <artifactId>jackson-databind</artifactId>
<version>2.17.2</version> <version>2.17.2</version>
</dependency> </dependency>
<dependency>
<groupId>com.diogonunes</groupId>
<artifactId>JColor</artifactId>
<version>5.5.1</version>
</dependency>
</dependencies> </dependencies>
<build> <build>
@@ -153,4 +158,4 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<url>file:///${project.basedir}/maven-repository/</url> <url>file:///${project.basedir}/maven-repository/</url>
</repository> </repository>
</distributionManagement> </distributionManagement>
</project> </project>

View File

@@ -0,0 +1,51 @@
class C1 {
C1 self() {
return this;
}
}
class C2 {
C2 self() {
return this;
}
}
class Example {
untypedMethod(var) {
return var.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self();
}
}

View File

@@ -0,0 +1,43 @@
import java.lang.Integer;
import java.lang.Boolean;
import java.util.Queue;
import java.util.Vector;
import java.util.List;
import java.util.ArrayDeque;
class Pos {
public Integer x;
public Integer y;
public Pos(Integer x, Integer y) {
this.x = x;
this.y = y;
}
}
class GridSearch {
Pos search(Vector<Vector<Boolean>> grid) {
var w = grid.size();
var h = grid.getFirst().size();
// keep a queue on which cells to check
var cellQueue = new ArrayDeque<Pos>();
cellQueue.add(new Pos(0,0));
while (!cellQueue.isEmpty()) {
var pos = cellQueue.poll();
// if the target was found: return the position
var value = grid.get(pos.x).get(pos.y);
if (value) {
return pos;
}
// keep searching on neighboring tiles
if (pos.x < w-1) cellQueue.add(new Pos(pos.x + 1, pos.y));
if (pos.y < h-1) cellQueue.add(new Pos(pos.x, pos.y + 1));
}
return (Pos)null;
}
}

View File

@@ -0,0 +1,42 @@
import java.util.List;
import java.util.AbstractList;
import java.util.Vector;
import java.lang.Integer;
class Pixel {
public color;
}
class Mask {
mask;
Mask(mask) {
this.mask = mask;
}
apply(pixels) {
var w = mask.size();
var h = mask.get(0).size();
var imgW = pixels.size();
var imgH = pixels.get(0).size();
for (var x = 0; x < imgW - w; x++) {
for (var y = 0; y < imgH - h; y++) {
var total = 0;
for (var xd = 0; xd < w; xd++) {
for (var yd = 0; yd < h; yd++) {
var p = pixels.get(x + xd).get(y + yd);
var m = mask.get(xd).get(yd);
total = total + (p.color * m);
}
}
pixels.get(x).get(y).color = total;
}
}
return pixels;
}
}

View File

@@ -0,0 +1,39 @@
import java.lang.Integer;
import java.lang.Boolean;
import java.util.ArrayList;
import java.util.HashMap;
public class PascalsTriangle {
create(n) {
var rows = new ArrayList<ArrayList<Integer>>();
var evens = new ArrayList<ArrayList<Boolean>>();
if (n <= 0) return rows;
// first row
rows.add(new ArrayList<Integer>(1));
evens.add(new ArrayList<Boolean>(false));
for (int y = 1; y < n; y++) {
var row = new ArrayList<Integer>();
var evensRow = new ArrayList<Boolean>();
row.add(1);
evensRow.add(false);
for (int x = 1; x < y-1; x++) {
int tl = rows.getLast().get(x-1);
int tr = rows.getLast().get(x);
row.add(tl + tr);
evensRow.add(((tl + tr) % 2) == 1);
}
row.add(1);
rows.add(row);
evensRow.add(false);
evens.add(evensRow);
}
return rows;
}
}

View File

@@ -0,0 +1,17 @@
import java.util.List;
import java.lang.Integer;
//import java.util.Collection;
public class Merge2 {
public merge(a, b) {
a.addAll(b);
return a;
}
public sort(in){
var firstHalf = in.subList(1,2);
return merge(sort(firstHalf), sort(in));
}
}

View File

@@ -8,6 +8,7 @@ import de.dhbwstuttgart.target.generate.ASTToTargetAST;
import de.dhbwstuttgart.target.tree.*; import de.dhbwstuttgart.target.tree.*;
import de.dhbwstuttgart.target.tree.expression.*; import de.dhbwstuttgart.target.tree.expression.*;
import de.dhbwstuttgart.target.tree.type.*; import de.dhbwstuttgart.target.tree.type.*;
import de.dhbwstuttgart.util.Logger;
import org.objectweb.asm.*; import org.objectweb.asm.*;
import java.lang.invoke.*; import java.lang.invoke.*;
@@ -19,6 +20,8 @@ import static de.dhbwstuttgart.target.tree.expression.TargetBinaryOp.*;
import static de.dhbwstuttgart.target.tree.expression.TargetLiteral.*; import static de.dhbwstuttgart.target.tree.expression.TargetLiteral.*;
public class Codegen { public class Codegen {
public static Logger logger = new Logger("codegen");
private final TargetStructure clazz; private final TargetStructure clazz;
private final ClassWriter cw; private final ClassWriter cw;
public final String className; public final String className;
@@ -1317,7 +1320,7 @@ public class Codegen {
types.add(Type.getObjectType(guard.inner().type().getInternalName())); types.add(Type.getObjectType(guard.inner().type().getInternalName()));
// TODO Same here we need to evaluate constant; // TODO Same here we need to evaluate constant;
} else { } else {
System.out.println(label); logger.info(label);
throw new NotImplementedException(); throw new NotImplementedException();
} }
} }

View File

@@ -120,7 +120,7 @@ public class FunNGenerator {
superFunNMethodDescriptor.append(")V"); superFunNMethodDescriptor.append(")V");
} }
System.out.println(superFunNMethodSignature); Codegen.logger.info(superFunNMethodSignature);
ClassWriter classWriter = new ClassWriter(0); ClassWriter classWriter = new ClassWriter(0);
MethodVisitor methodVisitor; MethodVisitor methodVisitor;

View File

@@ -1,13 +1,19 @@
package de.dhbwstuttgart.core; package de.dhbwstuttgart.core;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.util.Logger;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.*; import java.util.*;
public class ConsoleInterface { public class ConsoleInterface {
private static final String directory = System.getProperty("user.dir");
/**
* Leave the argument configurations here for the rest of the code to read
*/
public static Logger.LogLevel logLevel = Logger.LogLevel.ERROR;
public static boolean writeLogFiles = false;
public static Optional<String> unifyServerUrl = Optional.empty();
public static void main(String[] args) throws IOException, ClassNotFoundException { public static void main(String[] args) throws IOException, ClassNotFoundException {
List<File> input = new ArrayList<>(); List<File> input = new ArrayList<>();
@@ -15,7 +21,6 @@ public class ConsoleInterface {
String outputPath = null; String outputPath = null;
Iterator<String> it = Arrays.asList(args).iterator(); Iterator<String> it = Arrays.asList(args).iterator();
Optional<Integer> serverPort = Optional.empty(); Optional<Integer> serverPort = Optional.empty();
Optional<String> unifyServer = Optional.empty();
if (args.length == 0) { if (args.length == 0) {
System.out.println("No input files given. Get help with --help"); System.out.println("No input files given. Get help with --help");
@@ -25,7 +30,9 @@ public class ConsoleInterface {
"\t-cp\tSet Classpath\n" + "\t-cp\tSet Classpath\n" +
"\t-d\tSet destination directory\n" + "\t-d\tSet destination directory\n" +
"\t[--server-mode <port>]\n" + "\t[--server-mode <port>]\n" +
"\t[--unify-server <url>]\n"); "\t[--unify-server <url>]\n" +
"\t[--write-logs]\n" +
"\t[-v|-vv-|-vvv]");
System.exit(1); System.exit(1);
} }
while (it.hasNext()) { while (it.hasNext()) {
@@ -42,22 +49,31 @@ public class ConsoleInterface {
} else if (arg.equals("--server-mode")) { } else if (arg.equals("--server-mode")) {
serverPort = Optional.of(Integer.parseInt(it.next())); serverPort = Optional.of(Integer.parseInt(it.next()));
} else if (arg.equals("--unify-server")) { } else if (arg.equals("--unify-server")) {
unifyServer = Optional.of(it.next()); unifyServerUrl = Optional.of(it.next());
} else if (arg.equals("--write-logs")) {
ConsoleInterface.writeLogFiles = true;
} else if (arg.startsWith("-v")) {
logLevel = switch (arg) {
case "-v" -> Logger.LogLevel.WARNING;
case "-vv" -> Logger.LogLevel.INFO;
case "-vvv" -> Logger.LogLevel.DEBUG;
default -> throw new IllegalArgumentException("Argument " + arg + " is not a valid verbosity level");
};
} else { } else {
input.add(new File(arg)); input.add(new File(arg));
} }
} }
if (serverPort.isPresent()) { if (serverPort.isPresent()) {
if (unifyServer.isPresent()) throw new RuntimeException("Cannot use unifyServer when in server mode!"); if (unifyServerUrl.isPresent()) throw new RuntimeException("Cannot use unifyServer when in server mode!");
JavaTXServer server = new JavaTXServer(serverPort.get());
JavaTXServer server = new JavaTXServer(); server.listen();
server.listen(serverPort.get());
} }
else { else {
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null, unifyServer); JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null);
//compiler.typeInference(); //compiler.typeInference();
compiler.generateBytecode(); compiler.generateBytecode();
SocketClient.closeIfOpen();
} }
} }

View File

@@ -13,6 +13,10 @@ import de.dhbwstuttgart.parser.antlr.Java17Parser.SourceFileContext;
import de.dhbwstuttgart.parser.scope.JavaClassName; import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.parser.scope.JavaClassRegistry; import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
import de.dhbwstuttgart.server.SocketClient; import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.packet.SetAutoclosePacket;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface; import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar; import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.Method; import de.dhbwstuttgart.syntaxtree.Method;
@@ -20,6 +24,7 @@ import de.dhbwstuttgart.syntaxtree.ParameterList;
import de.dhbwstuttgart.syntaxtree.SourceFile; import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.syntaxtree.GenericDeclarationList; import de.dhbwstuttgart.syntaxtree.GenericDeclarationList;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory; import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory; import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType; import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
import de.dhbwstuttgart.syntaxtree.type.GenericRefType; import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
@@ -53,6 +58,7 @@ import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel; import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel; import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.util.Logger;
import java.io.*; import java.io.*;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.nio.file.Path; import java.nio.file.Path;
@@ -65,43 +71,39 @@ import org.apache.commons.io.output.NullOutputStream;
public class JavaTXCompiler { public class JavaTXCompiler {
// do not use this in any code, that can be executed serverside!
public static PlaceholderRegistry defaultClientPlaceholderRegistry = new PlaceholderRegistry();
public static Logger defaultLogger = new Logger();
// public static JavaTXCompiler INSTANCE; // public static JavaTXCompiler INSTANCE;
final CompilationEnvironment environment; final CompilationEnvironment environment;
Boolean resultmodel = true; Boolean resultmodel = true;
public final Map<File, SourceFile> sourceFiles = new HashMap<>(); public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel(); public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
public final DirectoryClassLoader classLoader; public final DirectoryClassLoader classLoader;
public final List<File> classPath; public final List<File> classPath;
private final File outputPath; private final File outputPath;
private final Optional<String> unifyServer;
public DirectoryClassLoader getClassLoader() { public DirectoryClassLoader getClassLoader() {
return classLoader; return classLoader;
} }
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(Arrays.asList(sourceFile), List.of(), new File("."), Optional.empty());
}
public JavaTXCompiler(File sourceFile, Boolean log) throws IOException, ClassNotFoundException { public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(sourceFile); this(Collections.singletonList(sourceFile), List.of(), new File("."));
this.log = log;
} }
public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException { public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException {
this(sourceFiles, List.of(), new File("."), Optional.empty()); this(sourceFiles, List.of(), new File("."));
} }
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath) throws IOException, ClassNotFoundException { public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath) throws IOException, ClassNotFoundException {
this(sources, contextPath, outputPath, Optional.empty()); // ensure new default placeholder registry for tests
} defaultClientPlaceholderRegistry = new PlaceholderRegistry();
NameGenerator.reset();
ASTToTargetAST.OBJECT = ASTFactory.createObjectType();
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath, Optional<String> unifyServer) throws IOException, ClassNotFoundException {
this.unifyServer = unifyServer;
var path = new ArrayList<>(contextPath); var path = new ArrayList<>(contextPath);
if (contextPath.isEmpty()) { if (contextPath.isEmpty()) {
// When no contextPaths are given, the working directory is the sources root // When no contextPaths are given, the working directory is the sources root
@@ -311,52 +313,51 @@ public class JavaTXCompiler {
Set<Set<UnifyPair>> results = new HashSet<>(); Set<Set<UnifyPair>> results = new HashSet<>();
UnifyResultModel urm = null; UnifyResultModel urm = null;
// urm.addUnifyResultListener(resultListener); // urm.addUnifyResultListener(resultListener);
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, new PlaceholderRegistry()); logFile = logFile == null ? new FileWriter("log_" + sourceFiles.keySet().iterator().next().getName()) : logFile;
try { Logger logger = new Logger(logFile, "TypeInferenceAsync");
logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile; UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, defaultClientPlaceholderRegistry);
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this, context.placeholderRegistry());
System.out.println(finiteClosure);
urm = new UnifyResultModel(cons, finiteClosure);
urm.addUnifyResultListener(resultListener);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, context.placeholderRegistry());
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> { IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logger, getClassLoader(), this, context.placeholderRegistry());
UnifyType lhs, rhs; logger.info(finiteClosure.toString());
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) { urm = new UnifyResultModel(cons, finiteClosure);
((PlaceholderType) lhs).setInnerType(true); urm.addUnifyResultListener(resultListener);
((PlaceholderType) rhs).setInnerType(true); ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, context.placeholderRegistry());
}
return x;
}; Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
logFile.write(unifyCons.toString()); UnifyType lhs, rhs;
unifyCons = unifyCons.map(distributeInnerVars); if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
logFile.write(unifyCons.toString()); ((PlaceholderType) lhs).setInnerType(true);
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen ((PlaceholderType) rhs).setInnerType(true);
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
for (SourceFile f : this.sourceFiles.values()) {
logFile.write(ASTTypePrinter.print(f));
} }
// logFile.flush(); return x;
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/* };
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 && logger.debug(unifyCons.toString());
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH)); unifyCons = unifyCons.map(distributeInnerVars);
*/ logger.debug(unifyCons.toString());
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logger.debug("FC:\\" + finiteClosure.toString() + "\n");
// logFile, log); for (SourceFile f : this.sourceFiles.values()) {
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure); logger.debug(ASTTypePrinter.print(f));
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
*/;
TypeUnify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
} catch (IOException e) {
System.err.println("kein LogFile");
} }
// logFile.flush();
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
*/;
TypeUnify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
return urm; return urm;
} }
@@ -377,103 +378,106 @@ public class JavaTXCompiler {
final ConstraintSet<Pair> cons = getConstraints(file); final ConstraintSet<Pair> cons = getConstraints(file);
Set<Set<UnifyPair>> results = new HashSet<>(); Set<Set<UnifyPair>> results = new HashSet<>();
PlaceholderRegistry placeholderRegistry = new PlaceholderRegistry(); PlaceholderRegistry placeholderRegistry = new PlaceholderRegistry();
try {
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
if (log) logFolder.mkdirs();
Writer logFile = log ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this, placeholderRegistry);
System.out.println(finiteClosure);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, placeholderRegistry);
System.out.println("xxx1");
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
return x;
}; var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
if (ConsoleInterface.writeLogFiles && !logFolder.mkdirs()) throw new RuntimeException("Could not creat directoy for log files: " + logFolder);
Writer logFile = ConsoleInterface.writeLogFiles ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
Logger logger = new Logger(logFile, "TypeInference");
logFile.write("Unify:" + unifyCons.toString()); FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logger, classLoader, this, placeholderRegistry);
System.out.println("Unify:" + unifyCons.toString()); logger.info(finiteClosure.toString());
unifyCons = unifyCons.map(distributeInnerVars); ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, placeholderRegistry);
logFile.write("\nUnify_distributeInnerVars: " + unifyCons.toString()); logger.info("xxx1");
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
logFile.write("FC:\\" + finiteClosure.toString() + "\n"); UnifyType lhs, rhs;
logFile.write(ASTTypePrinter.print(sf)); if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
System.out.println(ASTTypePrinter.print(sf)); ((PlaceholderType) lhs).setInnerType(true);
// logFile.flush(); ((PlaceholderType) rhs).setInnerType(true);
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()// .stream().map(x -> {
/*
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
*/;
if (unifyServer.isPresent()) {
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
var socketClient = new SocketClient(unifyServer.get());
return socketClient.execute(finiteClosure, cons, unifyCons, context);
}
else if (resultmodel) {
/* UnifyResultModel Anfang */
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
System.out.println("RESULT Final: " + li.getResults());
System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
// logFile.flush();
return li.getResults();
} }
/* UnifyResultModel End */ return x;
else {
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
// finiteClosure));
UnifyContext context = new UnifyContext(logFile, log, false, new UnifyResultModel(cons, finiteClosure), usedTasks, placeholderRegistry);
Set<Set<UnifyPair>> result = TypeUnify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString() + "\n");
// logFile.flush();
results.addAll(result);
results = results.stream().map(x -> { };
Optional<Set<UnifyPair>> res = new RuleSet(placeholderRegistry).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) logger.debug("Unify:" + unifyCons.toString());
y.setPairOp(PairOperator.EQUALSDOT); logger.info("Unify:" + unifyCons.toString());
return y; // alle Paare a <.? b erden durch a =. b ersetzt unifyCons = unifyCons.map(distributeInnerVars);
}).collect(Collectors.toCollection(HashSet::new))); logger.debug("\nUnify_distributeInnerVars: " + unifyCons.toString());
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert // Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure); logger.debug("FC:\\" + finiteClosure.toString() + "\n");
} else logger.debug(ASTTypePrinter.print(sf));
return x; // wenn nichts veraendert wurde wird x zurueckgegeben logger.info(ASTTypePrinter.print(sf));
}).collect(Collectors.toCollection(HashSet::new)); // logFile.flush();
System.out.println("RESULT Final: " + results); logger.info("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
System.out.println("Constraints for Generated Generics: " + " ???"); Set<PlaceholderType> varianceTPHold;
logFile.write("RES_FINAL: " + results.toString() + "\n"); Set<PlaceholderType> varianceTPH = new HashSet<>();
// logFile.flush(); varianceTPH = varianceInheritanceConstraintSet(unifyCons);
logFile.write("PLACEHOLDERS: " + placeholderRegistry);
// logFile.flush(); /*
} * PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
} catch (IOException e) { * ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
System.err.println("kein LogFile"); */
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()// .stream().map(x -> {
/*
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
*/;
if (ConsoleInterface.unifyServerUrl.isPresent()) {
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, placeholderRegistry);
SocketFuture<UnifyResultPacket> future = SocketClient.execute(
UnifyRequestPacket.create(finiteClosure, cons, unifyCons, context.placeholderRegistry())
);
SocketClient.execute(SetAutoclosePacket.create());
return future.get().getResultSet(context);
} }
else if (resultmodel) {
/* UnifyResultModel Anfang */
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, placeholderRegistry);
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
logger.info("RESULT Final: " + li.getResults());
logger.info("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
// logFile.flush();
return li.getResults();
}
/* UnifyResultModel End */
else {
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
// finiteClosure));
UnifyContext context = new UnifyContext(logger, false, new UnifyResultModel(cons, finiteClosure), usedTasks, placeholderRegistry);
Set<Set<UnifyPair>> result = TypeUnify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
logger.info("RESULT: " + result);
logFile.write("RES: " + result.toString() + "\n");
// logFile.flush();
results.addAll(result);
results = results.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet(placeholderRegistry).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
y.setPairOp(PairOperator.EQUALSDOT);
return y; // alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure);
} else
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
logger.info("RESULT Final: " + results);
logger.info("Constraints for Generated Generics: " + " ???");
logger.debug("RES_FINAL: " + results.toString() + "\n");
// logFile.flush();
logger.debug("PLACEHOLDERS: " + placeholderRegistry);
// logFile.flush();
}
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons), placeholderRegistry)))).collect(Collectors.toList()); return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons), placeholderRegistry)))).collect(Collectors.toList());
} }
@@ -660,15 +664,15 @@ public class JavaTXCompiler {
FileOutputStream output; FileOutputStream output;
for (JavaClassName name : classFiles.keySet()) { for (JavaClassName name : classFiles.keySet()) {
byte[] bytecode = classFiles.get(name); byte[] bytecode = classFiles.get(name);
System.out.println("generating " + name + ".class file ..."); defaultLogger.info("generating " + name + ".class file ...");
var subPath = preserveHierarchy ? path : Path.of(path.toString(), name.getPackageName().split("\\.")).toFile(); var subPath = preserveHierarchy ? path : Path.of(path.toString(), name.getPackageName().split("\\.")).toFile();
File outputFile = new File(subPath, name.getClassName() + ".class"); File outputFile = new File(subPath, name.getClassName() + ".class");
outputFile.getAbsoluteFile().getParentFile().mkdirs(); outputFile.getAbsoluteFile().getParentFile().mkdirs();
System.out.println(outputFile); defaultLogger.info(outputFile.toString());
output = new FileOutputStream(outputFile); output = new FileOutputStream(outputFile);
output.write(bytecode); output.write(bytecode);
output.close(); output.close();
System.out.println(name + ".class file generated"); defaultLogger.success(name + ".class file generated");
} }
} }

View File

@@ -4,13 +4,27 @@ import de.dhbwstuttgart.server.SocketServer;
public class JavaTXServer { public class JavaTXServer {
public void listen(int port) { public static boolean isRunning = false;
final SocketServer socketServer;
public JavaTXServer(int port) {
this.socketServer = new SocketServer(port);
}
public void listen() {
isRunning = true;
socketServer.start();
}
public void forceStop() {
try { try {
SocketServer socketServer = new SocketServer(port); socketServer.stop();
socketServer.start();
} catch (Exception e) {
e.printStackTrace();
} }
catch (InterruptedException exception) {
System.err.println("Interrupted socketServer: " + exception);
}
isRunning = false;
} }

View File

@@ -7,6 +7,7 @@ import de.dhbwstuttgart.parser.antlr.Java17Parser;
import de.dhbwstuttgart.parser.scope.JavaClassRegistry; import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
import de.dhbwstuttgart.syntaxtree.SourceFile; import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.util.Logger;
import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CharStreams; import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.CommonTokenStream;
@@ -17,6 +18,9 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
public class JavaTXParser { public class JavaTXParser {
public static Logger logger = new Logger("Parser");
public static Java17Parser.SourceFileContext parse(File source) throws IOException, java.lang.ClassNotFoundException { public static Java17Parser.SourceFileContext parse(File source) throws IOException, java.lang.ClassNotFoundException {
InputStream stream = new FileInputStream(source); InputStream stream = new FileInputStream(source);
// DEPRECATED: ANTLRInputStream input = new ANTLRInputStream(stream); // DEPRECATED: ANTLRInputStream input = new ANTLRInputStream(stream);

View File

@@ -28,15 +28,20 @@ public class FCGenerator {
* @param availableClasses - Alle geparsten Klassen * @param availableClasses - Alle geparsten Klassen
*/ */
public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException { public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException {
return toFC(availableClasses, classLoader).stream().map(t -> UnifyTypeFactory.convert(compiler, t, placeholderRegistry)).collect(Collectors.toSet()); return toFC(
availableClasses,
classLoader,
placeholderRegistry
).stream().map(t -> UnifyTypeFactory.convert(compiler, t, placeholderRegistry))
.collect(Collectors.toSet());
} }
public static Set<Pair> toFC(Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException { public static Set<Pair> toFC(Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException {
HashSet<Pair> pairs = new HashSet<>(); HashSet<Pair> pairs = new HashSet<>();
//PL 2018-09-18: gtvs vor die for-Schleife gezogen, damit immer die gleichen Typeplaceholder eingesetzt werden. //PL 2018-09-18: gtvs vor die for-Schleife gezogen, damit immer die gleichen Typeplaceholder eingesetzt werden.
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs = new HashMap<>(); HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs = new HashMap<>();
for(ClassOrInterface cly : availableClasses){ for(ClassOrInterface cly : availableClasses){
List<Pair> newPairs = getSuperTypes(cly, availableClasses, gtvs, classLoader); List<Pair> newPairs = getSuperTypes(cly, availableClasses, gtvs, classLoader, placeholderRegistry);
pairs.addAll(newPairs); pairs.addAll(newPairs);
//For all Functional Interfaces FI: FunN$$<... args auf dem Functional Interface ...> <. FI is added to FC //For all Functional Interfaces FI: FunN$$<... args auf dem Functional Interface ...> <. FI is added to FC
@@ -76,8 +81,13 @@ public class FCGenerator {
* @param forType * @param forType
* @return * @return
*/ */
private static List<Pair> getSuperTypes(ClassOrInterface forType, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException { private static List<Pair> getSuperTypes(
return getSuperTypes(forType, availableClasses, new HashMap<>(), classLoader); ClassOrInterface forType,
Collection<ClassOrInterface> availableClasses,
ClassLoader classLoader,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
return getSuperTypes(forType, availableClasses, new HashMap<>(), classLoader, placeholderRegistry);
} }
/** /**
@@ -88,8 +98,13 @@ public class FCGenerator {
* @return * @return
* @throws ClassNotFoundException * @throws ClassNotFoundException
*/ */
private static List<Pair> getSuperTypes(ClassOrInterface forType, Collection<ClassOrInterface> availableClasses, private static List<Pair> getSuperTypes(
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs, ClassLoader classLoader) throws ClassNotFoundException { ClassOrInterface forType,
Collection<ClassOrInterface> availableClasses,
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs,
ClassLoader classLoader,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>(); List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
//Die GTVs, die in forType hinzukommen: //Die GTVs, die in forType hinzukommen:
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> newGTVs = new HashMap<>(); HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> newGTVs = new HashMap<>();
@@ -148,7 +163,7 @@ public class FCGenerator {
if(superClass.getClassName().equals(ASTFactory.createObjectClass().getClassName())){ if(superClass.getClassName().equals(ASTFactory.createObjectClass().getClassName())){
superTypes = Arrays.asList(new Pair(ASTFactory.createObjectType(), ASTFactory.createObjectType(), PairOperator.SMALLER)); superTypes = Arrays.asList(new Pair(ASTFactory.createObjectType(), ASTFactory.createObjectType(), PairOperator.SMALLER));
}else{ }else{
superTypes = getSuperTypes(superClass, availableClasses, newGTVs, classLoader); superTypes = getSuperTypes(superClass, availableClasses, newGTVs, classLoader, placeholderRegistry);
} }
retList.add(ret); retList.add(ret);

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.parser.SyntaxTreeGenerator; package de.dhbwstuttgart.parser.SyntaxTreeGenerator;
import de.dhbwstuttgart.parser.JavaTXParser;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@@ -259,7 +260,7 @@ public class StatementGenerator {
ret.setStatement(); ret.setStatement();
return ret; return ret;
default: default:
System.out.println(stmt.getClass()); JavaTXParser.logger.info(stmt.getClass());
throw new NotImplementedException(); throw new NotImplementedException();
} }
} }

View File

@@ -74,7 +74,7 @@ public class TypeGenerator {
throw new NotImplementedException(); throw new NotImplementedException();
} }
} else if (!typeContext.LBRACK().isEmpty()) { // ArrayType über eckige Klammer prüfen } else if (!typeContext.LBRACK().isEmpty()) { // ArrayType über eckige Klammer prüfen
// System.out.println(unannTypeContext.getText()); // JavaTXParser.logger.info(unannTypeContext.getText());
throw new NotImplementedException(); throw new NotImplementedException();
} }
/* /*

View File

@@ -0,0 +1,41 @@
package de.dhbwstuttgart.server;
import de.dhbwstuttgart.util.Logger;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import org.java_websocket.WebSocket;
public class ServerTaskLogger extends Logger {
private final WebSocket webSocket;
private final SocketServer socketServer;
private final LogLevel customLogLevel;
public ServerTaskLogger(WebSocket webSocket, SocketServer socketServer, LogLevel customLogLevel) {
this.webSocket = webSocket;
this.socketServer = socketServer;
this.customLogLevel = customLogLevel;
}
@Override
public boolean isLogLevelActive(LogLevel logLevel) {
return logLevel.isHigherOrEqualTo(customLogLevel);
}
@Override
protected void print(String s, LogLevel logLevel) {
String coloredPrefix = this.getPrefix(logLevel);
if (logLevel.isHigherOrEqualTo(LogLevel.ERROR)) {
socketServer.sendError(webSocket, coloredPrefix + s, false);
}
else {
socketServer.sendMessage(webSocket, coloredPrefix + s);
}
}
@Override
protected void write(String s) {
// under no circumstances write anything to a file
}
}

View File

@@ -1,25 +1,25 @@
package de.dhbwstuttgart.server; package de.dhbwstuttgart.server;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.server.packet.ErrorPacket; import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.server.packet.IClientToServerPacket;
import de.dhbwstuttgart.server.packet.IPacket; import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.InvalidPacket; import de.dhbwstuttgart.server.packet.IServerToClientPacket;
import de.dhbwstuttgart.server.packet.MessagePacket;
import de.dhbwstuttgart.server.packet.PacketContainer; import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket; import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.server.packet.UnifyResultPacket; import de.dhbwstuttgart.server.packet.UnifyResultPacket;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet; import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet; import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.UnifyContext; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure; import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.io.FileWriter; import de.dhbwstuttgart.util.Logger;
import java.net.URI; import java.net.URI;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@@ -33,36 +33,34 @@ import org.java_websocket.handshake.ServerHandshake;
*/ */
public class SocketClient extends WebSocketClient { public class SocketClient extends WebSocketClient {
// use a latch to wait until the connection is closed by the remote host public static Logger logger = new Logger("SocketClient");
private final CountDownLatch closeLatch = new CountDownLatch(1);
// temporarily: The received unify result packet
private UnifyResultPacket unifyResultPacket = null;
public SocketClient(String url) { /**
super(URI.create(url)); * The singleton object
// make sure, the url is in a valid format */
private static SocketClient socketClient = null;
/**
* List of futures that are still waiting to be fulfilled
*/
private final Map<String, SocketFuture<?>> responseFutures = new HashMap<>();
private SocketClient(String url) {
super(
URI.create(url), // target url
//SocketServer.perMessageDeflateDraft, // enable compression
Map.of( // headers
"packetProtocolVersion", SocketServer.packetProtocolVersion
)
);
// make sure the url is in a valid format
final String regex = "^wss?://(\\w+(\\.\\w+)?)*:(\\d+)$"; final String regex = "^wss?://(\\w+(\\.\\w+)?)*:(\\d+)$";
final Matcher matcher = Pattern.compile(regex).matcher(url); final Matcher matcher = Pattern.compile(regex).matcher(url);
if (!matcher.find()) { if (!matcher.find()) {
throw new RuntimeException("Provided string \"" + url + "\" is not a valid server URL! Use pattern ws(s?)://<host.name>:<port>"); throw new RuntimeException("Provided string \"" + url + "\" is not a valid server URL! Use pattern ws(s?)://<host.name>:<port>");
} }
}
public SocketClient(String host, int port, boolean secure) {
super(URI.create(String.format("%s://%s:%d/", secure ? "wss" : "ws", host, port)));
}
/**
* The main method for connecting, requesting and waiting for the server to unify.
* This is synchronized to prevent multiple webSockets connections at the moment, but it is not called from any
* thread except the main thread right now and is not necessary at all, probably. Maybe remove it later
*/
synchronized public List<ResultSet> execute(
FiniteClosure finiteClosure,
ConstraintSet<Pair> constraintSet,
ConstraintSet<UnifyPair> unifyConstraintSet,
UnifyContext context
) throws JsonProcessingException {
try { try {
// wait for the connection to be set up // wait for the connection to be set up
this.connectBlocking(); this.connectBlocking();
@@ -70,83 +68,135 @@ public class SocketClient extends WebSocketClient {
if (this.getReadyState() != ReadyState.OPEN) { if (this.getReadyState() != ReadyState.OPEN) {
throw new RuntimeException("WebSocket Client could not connect to remote host at " + this.uri); throw new RuntimeException("WebSocket Client could not connect to remote host at " + this.uri);
} }
// send the unify task request
UnifyRequestPacket packet = new UnifyRequestPacket(finiteClosure, constraintSet, unifyConstraintSet);
String json = PacketContainer.serialize(packet);
try (FileWriter w = new FileWriter("./log.json")) {
w.write(json);
}
this.send(json);
// block the thread, until the connection is closed by the remote host (usually after sending the results)
this.waitUntilClosed();
// wait for the connection to fully close
this.closeBlocking();
} catch (InterruptedException exception) { } catch (InterruptedException exception) {
System.err.println("Server connection interrupted: " + exception); throw new RuntimeException(exception);
this.notifyAll();
throw new RuntimeException("Aborted server connection", exception);
}
catch (Exception exception) {
throw new RuntimeException("Exception occurred in server unify: ", exception);
} }
// detect error cases, in which no error was thrown, but also no result was sent back from the server // add a shutdown hook to close the connection when the process ends or is stopped by a SIGINT signal
if (this.unifyResultPacket == null) { Runtime.getRuntime().addShutdownHook(new Thread(this::close));
throw new RuntimeException("Did not receive server response but closed connection already"); }
}
return unifyResultPacket.getResultSet(context); private SocketClient(String host, int port, boolean secure) throws InterruptedException {
this(String.format("%s://%s:%d/", secure ? "wss" : "ws", host, port));
} }
/** /**
* Specific client-side implementations to handle incoming packets * Singleton access method, creates one if none is available
*
* @return The one and only socketClient
*/
private static SocketClient initializeClient() {
if (socketClient == null) {
socketClient = new SocketClient(ConsoleInterface.unifyServerUrl.get());
}
return socketClient;
}
/**
* Send a packet to the server (connection will be created, if none is found) and return a future
* for the response packet
*/
synchronized public static <T extends IServerToClientPacket> SocketFuture<T> execute(IClientToServerPacket<T> packet) {
SocketClient client = initializeClient();
/*
* Create a future that will be associated with the packet and eventually completed
*/
SocketFuture<T> future = packet.getFuture();
if (!future.isDone()) {
client.responseFutures.put(future.futureId, future);
}
/*
* Establish connection, if not already done.
* Serialize the packet and send it to the server.
* Return the future to be handled by the caller.
*/
try {
String json = PacketContainer.serialize(packet);
client.send(json);
} catch (Exception exception) {
logger.exception(exception);
throw new RuntimeException("Exception occurred in server connection: ", exception);
}
return future;
}
/**
* Shortcut for waiting and retrieving the response immediately
*
* @param packet The packet to send
* @param <T> The type of response packet to await
* @return The response packet, once it is received
*/
public static <T extends IServerToClientPacket> T executeAndGet(IClientToServerPacket<T> packet) {
return SocketClient.execute(packet).get();
}
/**
* Specific client-side implementations to handle incoming packets
*/ */
protected void handleReceivedPacket(IPacket packet) { protected void handleReceivedPacket(IPacket packet) {
if (packet instanceof InvalidPacket) { if (!(packet instanceof IServerToClientPacket serverToClientPacket)) {
System.err.println("[socket] " + ((InvalidPacket) packet).error); System.err.println("Received package of invalid type + " + packet.getClass().getName());
} else if (packet instanceof MessagePacket) { this.close();
System.out.println("[socket] " + ((MessagePacket) packet).message); return;
} else if (packet instanceof ErrorPacket) { }
System.err.println("[socket] " + ((ErrorPacket) packet).error);
} else if (packet instanceof UnifyResultPacket) { serverToClientPacket.onHandle(this.getConnection(), this);
System.out.println("[socket] Received unify result"); }
unifyResultPacket = (UnifyResultPacket) packet;
/**
* Complete a registered future, so it can be handled by whoever executed the creator task
*
* @param id The associated id for this future
* @param trigger The object triggering the completion
*/
public void completeResponseFuture(String id, IServerToClientPacket trigger) {
SocketFuture<?> future = this.responseFutures.remove(id);
if (future == null) return;
if (!future.accept(trigger)) {
throw new RuntimeException("Packet " + trigger.getClass().getName() + " tried to complete future, but was not allowed to");
}
}
public static void closeIfOpen() {
if (socketClient != null && socketClient.isOpen()) {
socketClient.close();
} }
} }
@Override @Override
public void onOpen(ServerHandshake handshakedata) { public void onOpen(ServerHandshake handshakedata) {
System.out.println("Connected to server with status " + handshakedata.getHttpStatus()); logger.success("Connected to server with status " + handshakedata.getHttpStatus());
} }
@Override @Override
public void onMessage(String message) { public void onMessage(String message) {
// System.out.println("received: " + message); // logger.info("received: " + message);
IPacket packet = PacketContainer.deserialize(message); IPacket packet = PacketContainer.deserialize(message);
this.handleReceivedPacket(packet); this.handleReceivedPacket(packet);
} }
@Override @Override
public void onClose(int code, String reason, boolean remote) { public void onClose(int code, String reason, boolean remote) {
System.out.println( logger.info(
"Disconnected from server " + "Disconnected from server " +
"with code " + code + " " + "with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") + (reason.isEmpty() ? "" : "and reason " + reason + " ") +
"(closed by remote: " + remote + ")" "(closed by remote: " + remote + ")"
); );
this.closeLatch.countDown();
if (!this.responseFutures.isEmpty()) {
throw new RuntimeException("Server closed before all required tasks were answered");
}
} }
@Override @Override
public void onError(Exception e) { public void onError(Exception e) {
System.out.println("Error: " + e.getMessage()); logger.exception(e);
e.printStackTrace(); throw new RuntimeException(e);
} }
public void waitUntilClosed() throws InterruptedException {
closeLatch.await();
}
} }

View File

@@ -0,0 +1,48 @@
package de.dhbwstuttgart.server;
import de.dhbwstuttgart.server.packet.IServerToClientPacket;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
public class SocketFuture<T extends IServerToClientPacket> extends CompletableFuture<T> {
public final String futureId = UUID.randomUUID().toString();
public final List<Class<T>> allowedTriggers;
public SocketFuture(List<Class<T>> allowedTriggers) {
this.allowedTriggers = allowedTriggers;
}
public boolean accept(IServerToClientPacket trigger) {
if (this.allowedTriggers.contains(trigger.getClass())) {
this.complete((T)trigger);
return true;
}
return false;
}
@Override
public T get() {
try {
return super.get();
}
catch (InterruptedException | ExecutionException exception) {
throw new RuntimeException(exception);
}
}
/**
* Special case where the future is immediately fulfilled without a response package similar to
* <code>CompletableFuture.completedFuture()</code> but without a value
*/
public static <R extends IServerToClientPacket> SocketFuture<R> completedFuture() {
SocketFuture<R> dummyFuture = new SocketFuture<>(new ArrayList<>(0));
dummyFuture.complete(null);
return dummyFuture;
}
}

View File

@@ -1,80 +1,119 @@
package de.dhbwstuttgart.server; package de.dhbwstuttgart.server;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.server.packet.ErrorPacket;
import de.dhbwstuttgart.server.packet.IClientToServerPacket;
import de.dhbwstuttgart.server.packet.IPacket; import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.MessagePacket; import de.dhbwstuttgart.server.packet.MessagePacket;
import de.dhbwstuttgart.server.packet.PacketContainer; import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket; import de.dhbwstuttgart.util.Logger;
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.io.Writer;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.HashSet; import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.ExecutorService; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.java_websocket.WebSocket; import org.java_websocket.WebSocket;
import org.java_websocket.drafts.Draft;
import org.java_websocket.drafts.Draft_6455;
import org.java_websocket.extensions.permessage_deflate.PerMessageDeflateExtension;
import org.java_websocket.handshake.ClientHandshake; import org.java_websocket.handshake.ClientHandshake;
import org.java_websocket.server.WebSocketServer; import org.java_websocket.server.WebSocketServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SocketServer extends WebSocketServer { public class SocketServer extends WebSocketServer {
private static final Logger log = LoggerFactory.getLogger(SocketServer.class); public static Logger logger = new Logger("SocketServer");
public static final int maxTasksPerSession = 100;
private static boolean serverRunning = false;
/**
* Increase this every time a breaking change to the server communication is done.
* This will prevent errors when the server version and client version do not match.
*/
public static final String packetProtocolVersion = "1";
// create an executor for tasks that will always keep at least one task around
private final ThreadPoolExecutor taskExecutor = new ThreadPoolExecutor(1, Integer.MAX_VALUE,60L, TimeUnit.SECONDS, new SynchronousQueue<>());
// create an executor for scheduling timeouts
private final ScheduledExecutorService timeoutExecutor = Executors.newSingleThreadScheduledExecutor();
public SocketServer(int port) { public SocketServer(int port) {
super(new InetSocketAddress(port)); super(new InetSocketAddress(port));
this.setConnectionLostTimeout(30);
serverRunning = true;
// add a shutdown hook to close all connections when the process ends or is stopped by a SIGINT signal
Runtime.getRuntime().addShutdownHook(new Thread(this::onShutdown));
}
public static boolean isServerRunning() {
return serverRunning;
}
private void onShutdown() {
serverRunning = false;
try {
for (var webSocket : this.getConnections()) {
this.sendError(webSocket, "Sorry, i am shutting down. You are now on your own, good Luck!", true);
webSocket.close();
}
this.stop();
taskExecutor.shutdown();
timeoutExecutor.shutdown();
} catch (InterruptedException exception) {
// we are shutting down anyway
}
} }
@Override @Override
public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) { public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
System.out.println("New connection: " + webSocket.getResourceDescriptor()); String ppv = clientHandshake.getFieldValue("packetProtocolVersion");
webSocket.setAttachment(new SocketData(UUID.randomUUID().toString())); if (!ppv.equals(packetProtocolVersion)) {
this.sendError(webSocket,
"Mismatch in packet protocol version! Client (you): \"" + ppv + "\" and Server (me): \"" + packetProtocolVersion + "\"",
true
);
webSocket.close(1);
return;
}
SocketData socketData = new SocketData(webSocket);
logger.info("New connection: " + socketData.id + " (with ppv " + ppv + ")");
try { try {
sendMessage(webSocket, "Welcome to the server!"); sendMessage(webSocket, "Welcome to the server!");
// wait 10 seconds for the client to send a task and close the connection, if nothing has been received until then // wait 10 seconds for the client to send a task and close the connection if nothing has been received until then
ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); final int secondsUntilTimeout = 10;
Runnable task = () -> { timeoutExecutor.schedule(() -> {
if (webSocket.<SocketData>getAttachment().hasSentTask || !webSocket.isOpen()) { if (webSocket.<SocketData>getAttachment().totalTasks.get() > 0 || !webSocket.isOpen()) {
return; return;
} }
sendMessage(webSocket, "No task received after 10 seconds. Closing connection..."); sendMessage(webSocket, "No task received after " + secondsUntilTimeout + " seconds. Closing connection...");
webSocket.close(); webSocket.close();
}; },
executor.schedule(task, 10, TimeUnit.SECONDS); secondsUntilTimeout,
executor.shutdown(); TimeUnit.SECONDS
);
// and finally, when your program wants to exit // and finally, when your program wants to exit
} catch (Exception e) { } catch (Exception e) {
log.error("e: ", e); logger.exception(e);
webSocket.close(1, e.getMessage()); webSocket.close(1, e.getMessage());
} }
} }
@Override @Override
public void onClose(WebSocket webSocket, int code, String reason, boolean remote) { public void onClose(WebSocket webSocket, int code, String reason, boolean remote) {
System.out.println("Connection closed: " + webSocket.getResourceDescriptor()); SocketData socketData = webSocket.getAttachment();
System.out.println( logger.info("Connection closed: " + socketData.id);
"Disconnected client " + webSocket.getResourceDescriptor() + " " + logger.info(
"Disconnected client " + socketData.id + " " +
"with code " + code + " " + "with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") + (reason.isEmpty() ? "" : "and reason " + reason + " ") +
"(closed by client: " + remote + ")" "(closed by client: " + remote + ")"
@@ -84,19 +123,28 @@ public class SocketServer extends WebSocketServer {
@Override @Override
public void onMessage(WebSocket webSocket, String s) { public void onMessage(WebSocket webSocket, String s) {
// System.out.println("Received: " + s.substring(0, 50)); // logger.info("Received: " + s.substring(0, 50));
IPacket reconstructedPacket = PacketContainer.deserialize(s); IPacket reconstructedPacket = PacketContainer.deserialize(s);
this.onPacketReceived(webSocket, reconstructedPacket); try {
this.onPacketReceived(webSocket, reconstructedPacket);
} catch (JsonProcessingException e) {
logger.exception(e);
this.log(webSocket, "Error on processing incoming package: " + e.getMessage());
}
} }
@Override @Override
public void onError(WebSocket webSocket, Exception e) { public void onError(WebSocket webSocket, Exception e) {
webSocket.close(); if (webSocket != null) {
log(webSocket, e.getMessage());
webSocket.close();
}
logger.exception(e);
} }
@Override @Override
public void onStart() { public void onStart() {
System.out.println("Websocket server started"); logger.success("Websocket server started on port " + this.getPort());
} }
/** /**
@@ -104,81 +152,84 @@ public class SocketServer extends WebSocketServer {
*/ */
public void sendMessage(WebSocket webSocket, String text) { public void sendMessage(WebSocket webSocket, String text) {
try { try {
MessagePacket message = new MessagePacket(); MessagePacket message = MessagePacket.create(text);
message.message = text;
webSocket.send(PacketContainer.serialize(message)); webSocket.send(PacketContainer.serialize(message));
} catch (Exception e) { } catch (Exception e) {
System.err.println("Failed to send message: " + text); System.err.println("Failed to send message: " + text);
System.err.println(e); logger.exception(e);
} }
} }
/** /**
* The server-side implementation on how to handle certain packets when received * A shorthand method for sending error messages to the client
*/ */
private void onPacketReceived(WebSocket webSocket, IPacket packet) { public void sendError(WebSocket webSocket, String text, boolean isFatal) {
if (packet instanceof UnifyRequestPacket unifyRequestPacket) { try {
sendMessage(webSocket, "You requested a unify! Please wait until I calculated everything..."); ErrorPacket error = ErrorPacket.create(text, isFatal);
System.out.println("Client " + webSocket.<SocketData>getAttachment().id + " requested a unification. Starting now..."); webSocket.send(PacketContainer.serialize(error));
webSocket.<SocketData>getAttachment().hasSentTask = true; } catch (Exception e) {
logger.exception(e);
try { log(webSocket, "Failed to send error: " + text);
var placeholderRegistry = new PlaceholderRegistry();
var unifyContext = new UnifyContext(Writer.nullWriter(), false, true,
new UnifyResultModel(new ConstraintSet<>(), new FiniteClosure(new HashSet<>(), null, placeholderRegistry)),
new UnifyTaskModel(), ForkJoinPool.commonPool(), placeholderRegistry
);
// start the unification algorithm from the received data
IFiniteClosure finiteClosure = unifyRequestPacket.retrieveFiniteClosure(unifyContext);
ConstraintSet<Pair> constraintSet = unifyRequestPacket.retrieveConstraintSet(unifyContext);
ConstraintSet<UnifyPair> unifyConstraintSet = unifyRequestPacket.retrieveUnifyConstraintSet(unifyContext);
var resultModel = new UnifyResultModel(constraintSet, finiteClosure);
UnifyResultListenerImpl resultListener = new UnifyResultListenerImpl();
resultModel.addUnifyResultListener(resultListener);
TypeUnify.unifyParallel(
unifyConstraintSet.getUndConstraints(),
unifyConstraintSet.getOderConstraints(),
finiteClosure,
unifyContext.newWithResultModel(resultModel)
);
var resultSets = resultListener.getResults();
System.out.println("Finished unification for client " + webSocket.<SocketData>getAttachment().id);
sendMessage(webSocket, "Unification finished. Found " + resultSets.size() + " result sets");
if (webSocket.isOpen()) {
UnifyResultPacket resultPacket = UnifyResultPacket.create(resultSets);
webSocket.send(PacketContainer.serialize(resultPacket));
}
} catch (Exception e) {
System.err.println(e);
log.error("e: ", e);
}
webSocket.close();
} else {
sendMessage(webSocket, "The package of type " + packet.getClass().getName() + " is not handled by the server!");
} }
} }
/**
* The server-side implementation on how to handle certain packets when received
*/
private void onPacketReceived(WebSocket webSocket, IPacket packet) throws JsonProcessingException {
SocketData socketData = webSocket.getAttachment();
// limit the number of tasks per connection
if (socketData.totalTasks.get() >= maxTasksPerSession) {
sendError(webSocket, "Exceeded the maximum amount of " + maxTasksPerSession + " tasks per session", true);
webSocket.close();
return;
}
// only allow packets that are meant to be handled by the server
if (!(packet instanceof IClientToServerPacket<?> clientToServerPacket)) {
sendMessage(webSocket, "The package of type " + packet.getClass().getName() + " is not handled by the server!");
return;
}
// update the socket data
socketData.unhandledTasks.incrementAndGet();
socketData.totalTasks.incrementAndGet();
// add the packet to the queue so it can be started by the worker
CompletableFuture.runAsync(() -> {
clientToServerPacket.onHandle(webSocket, this);
int remainingUnhandledTasks = socketData.unhandledTasks.decrementAndGet();
if (socketData.closeIfNoTasksLeft) {
// if the websocket has 0 unhandled Tasks, close the connection
if (remainingUnhandledTasks <= 0) {
sendMessage(webSocket, "All requested tasks finished! Closing connection...");
webSocket.close();
}
}
}, taskExecutor);
}
public void log(WebSocket webSocket, String msg) {
String socketId = (webSocket == null) ? "???" : webSocket.<SocketData>getAttachment().id;
logger.info("[" + socketId + "] " + msg);
}
/** /**
* The data that is associated server-side with any connected client. * The data that is associated server-side with any connected client.
* This makes it possible to store information that can be mapped to any existing connection. * This makes it possible to store information that can be mapped to any existing connection.
*/ */
static class SocketData { public static class SocketData {
public final String id; public final String id;
// used for the timeout of 10 seconds, until an unused open connection is automatically closed public final AtomicInteger unhandledTasks = new AtomicInteger(0);
public boolean hasSentTask = false; public final AtomicInteger totalTasks = new AtomicInteger(0);
public boolean closeIfNoTasksLeft = false;
public SocketData(String id) { public SocketData(WebSocket webSocket) {
this.id = id; this.id = UUID.randomUUID().toString();
webSocket.setAttachment(this);
} }
} }
} }

View File

@@ -0,0 +1,35 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import org.java_websocket.WebSocket;
public class DebugPacket implements IClientToServerPacket.Void, IServerToClientPacket {
public SerialUUID a1;
public SerialUUID a2;
public SerialMap b1;
public SerialMap b2;
public SerialList<? extends ISerialNode> c1;
public SerialList<? extends ISerialNode> c2;
public SerialValue<?> d1;
public SerialValue<?> d2;
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -1,14 +1,36 @@
package de.dhbwstuttgart.server.packet; package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/** /**
* A packet to send simple error messages between the client and the server * A packet to send simple error messages between the client and the server
*/ */
public class ErrorPacket implements IPacket { public class ErrorPacket implements IServerToClientPacket {
/** /**
* The error endpoint for messages from the server, that should be logged out outputted * The error endpoint for messages from the server that should be logged out as errors and possibly abort the process
*/ */
public String error; public String error;
public boolean isFatal;
@JsonIgnore
public static ErrorPacket create(String error, boolean isFatal) {
ErrorPacket packet = new ErrorPacket();
packet.error = error;
packet.isFatal = isFatal;
return packet;
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.exception(new RuntimeException(this.error));
if (this.isFatal) {
socketClient.close(1, "Received fatal error from server");
}
}
} }

View File

@@ -0,0 +1,26 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A packet that will be sent to the server. Use <code>Void</code> Sub-Interface for packets without response
*
* @param <T> The response packet that will fulfill the future.
*/
public interface IClientToServerPacket<T extends IServerToClientPacket> extends IPacket {
@JsonIgnore
void onHandle(WebSocket webSocket, SocketServer socketServer);
@JsonIgnore
SocketFuture<T> getFuture();
/**
* Special case, where the packet will remain unanswered by the server
*/
interface Void extends IClientToServerPacket<IServerToClientPacket> {}
}

View File

@@ -1,18 +1,12 @@
package de.dhbwstuttgart.server.packet; package de.dhbwstuttgart.server.packet;
/** /**
* The shared interface for all packet of the server connection. * The shared interface for all packet of the client-server connection.
* A packet must always: * A packet must always:
* - Have a default / no-parameter constructor * - Have a default / no-parameter constructor
* - Have only serializable public properties (or disable them via jackson annotations) * - Have only serializable public properties (or disable them via jackson annotations)
* A packet should have, for easy usage and consisteny:
* - a static create() method
* *
*/ */
public interface IPacket { public interface IPacket {
interface IDataContainer<T> {
T toObject();
}
} }

View File

@@ -0,0 +1,12 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import org.java_websocket.WebSocket;
public interface IServerToClientPacket extends IPacket {
@JsonIgnore
void onHandle(WebSocket webSocket, SocketClient socketClient);
}

View File

@@ -1,13 +1,35 @@
package de.dhbwstuttgart.server.packet; package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/** /**
* A fallback packet that is generated, if the received json could not be mapped to an existing package * A fallback packet that is generated if the received JSON could not be mapped to an existing package
*/ */
public class InvalidPacket implements IPacket { public class InvalidPacket implements IClientToServerPacket.Void, IServerToClientPacket {
/** /**
* If available, the error that caused this package to appear * If available, the error that caused this package to appear
*/ */
public String error = "<unknown error>"; public String error = "<unknown error>";
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.error("InvalidPacket: " + this.error);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log(webSocket, "InvalidPacket: " + this.error);
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
} }

View File

@@ -1,13 +1,40 @@
package de.dhbwstuttgart.server.packet; package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/** /**
* A packet to send simple informational messages between the client and the server * A packet to send simple informational messages between the client and the server
*/ */
public class MessagePacket implements IPacket { public class MessagePacket implements IClientToServerPacket.Void, IServerToClientPacket {
/** /**
* The informational message from the server, that should be logged out outputted * The informational message from the server that should be logged out outputted
*/ */
public String message; public String message;
@JsonIgnore
public static MessagePacket create(String message) {
MessagePacket packet = new MessagePacket();
packet.message = message;
return packet;
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.info("SocketMessage: " + this.message);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log(webSocket, this.message);
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
} }

View File

@@ -3,6 +3,7 @@ package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import de.dhbwstuttgart.util.Logger;
/** /**
* A wrapper for the packet to ensure correct serialization/deserialization and make it possible to detect the matching * A wrapper for the packet to ensure correct serialization/deserialization and make it possible to detect the matching
@@ -23,13 +24,15 @@ public class PacketContainer {
public InvalidPacket invalidPacket = null; public InvalidPacket invalidPacket = null;
public UnifyRequestPacket unifyRequestPacket = null; public UnifyRequestPacket unifyRequestPacket = null;
public UnifyResultPacket unifyResultPacket = null; public UnifyResultPacket unifyResultPacket = null;
public DebugPacket debugPacket = null;
public SetAutoclosePacket setAutoclosePacket = null;
/** /**
* Generate the JSON string for the given packet * Generate the JSON string for the given packet
* *
* @param packet The packet to serialize * @param packet The packet to serialize
* @return The json representation of the packet * @return The JSON representation of the packet
*/ */
public static String serialize(IPacket packet) throws JsonProcessingException { public static String serialize(IPacket packet) throws JsonProcessingException {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
@@ -43,7 +46,13 @@ public class PacketContainer {
container.unifyRequestPacket = (UnifyRequestPacket) packet; container.unifyRequestPacket = (UnifyRequestPacket) packet;
else if (packet instanceof UnifyResultPacket) else if (packet instanceof UnifyResultPacket)
container.unifyResultPacket = (UnifyResultPacket) packet; container.unifyResultPacket = (UnifyResultPacket) packet;
else if (packet instanceof DebugPacket)
container.debugPacket = (DebugPacket) packet;
else if (packet instanceof SetAutoclosePacket)
container.setAutoclosePacket = (SetAutoclosePacket) packet;
// Add new packets here and in the deserialize method // Add new packets here and in the deserialize method
else
throw new RuntimeException("Cannot map packet to any known packet class");
return objectMapper.writeValueAsString(container); return objectMapper.writeValueAsString(container);
} }
@@ -70,10 +79,15 @@ public class PacketContainer {
return container.unifyRequestPacket; return container.unifyRequestPacket;
if (container.unifyResultPacket != null) if (container.unifyResultPacket != null)
return container.unifyResultPacket; return container.unifyResultPacket;
if (container.debugPacket != null)
return container.debugPacket;
if (container.setAutoclosePacket != null)
return container.setAutoclosePacket;
// Add new packets here and in the serialize method // Add new packets here and in the serialize method
throw new RuntimeException("Cannot map received json to any known packet class"); throw new RuntimeException("Cannot map received json to any known packet class");
} catch (Exception e) { } catch (Exception e) {
(new Logger()).exception(e);
InvalidPacket packet = new InvalidPacket(); InvalidPacket packet = new InvalidPacket();
packet.error = e.getMessage(); packet.error = e.getMessage();
return packet; return packet;

View File

@@ -0,0 +1,32 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* Normally, a connection stays open until either the client or the server process ends.
* Send this packet to inform the server that the connection can be closed once all tasks are done
*/
public class SetAutoclosePacket implements IClientToServerPacket.Void {
public int dummyProperty = 1;
@JsonIgnore
public static SetAutoclosePacket create() {
return new SetAutoclosePacket();
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
webSocket.<SocketServer.SocketData>getAttachment().closeIfNoTasksLeft = true;
socketServer.log(webSocket, "Marked connection as autoclose");
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -1,41 +1,72 @@
package de.dhbwstuttgart.server.packet; package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.server.ServerTaskLogger;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet; import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure; import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ForkJoinPool;
import org.java_websocket.WebSocket;
/** /**
* A packet to send all required data for the unification algorithm to the server and request the unification * A packet to send all required data for the unification algorithm to the server and request the unification
*/ */
public class UnifyRequestPacket implements IPacket { public class UnifyRequestPacket implements IClientToServerPacket<UnifyResultPacket> {
public SerialMap finiteClosure; public SerialMap finiteClosure;
public SerialMap constraintSet; public SerialMap constraintSet;
public SerialMap unifyConstraintSet; public SerialMap unifyConstraintSet;
public SerialMap serialKeyStorage; public SerialMap serialKeyStorage;
public SerialValue<?> placeholders;
public SerialList<SerialMap> factoryplaceholders;
public String futureId;
public int logLevel;
@JsonIgnore @JsonIgnore
private KeyStorage keyStorage = new KeyStorage(); private KeyStorage keyStorage = new KeyStorage();
@JsonIgnore @JsonIgnore
private boolean keyStorageLoaded = false; private boolean keyStorageLoaded = false;
public UnifyRequestPacket() {} public static UnifyRequestPacket create(
public UnifyRequestPacket(
FiniteClosure finiteClosure, FiniteClosure finiteClosure,
ConstraintSet<Pair> constraintSet, ConstraintSet<Pair> constraintSet,
ConstraintSet<UnifyPair> unifyConstraintSet ConstraintSet<UnifyPair> unifyConstraintSet,
PlaceholderRegistry placeholderRegistry
) { ) {
this.finiteClosure = finiteClosure.toSerial(keyStorage); UnifyRequestPacket packet = new UnifyRequestPacket();
this.constraintSet = constraintSet.toSerial(keyStorage); // store constraint and finite closure
this.unifyConstraintSet = unifyConstraintSet.toSerial(keyStorage); packet.finiteClosure = finiteClosure.toSerial(packet.keyStorage);
this.serialKeyStorage = keyStorage.toSerial(keyStorage); packet.constraintSet = constraintSet.toSerial(packet.keyStorage);
packet.unifyConstraintSet = unifyConstraintSet.toSerial(packet.keyStorage);
// store placeholder registry
var serialRegistry = placeholderRegistry.toSerial(packet.keyStorage);
packet.placeholders = serialRegistry.getValue("ph");
packet.factoryplaceholders = serialRegistry.getList("factoryPh").assertListOfMaps();
// store referenced objects separately
packet.serialKeyStorage = packet.keyStorage.toSerial(packet.keyStorage);
packet.logLevel = ConsoleInterface.logLevel.getValue();
return packet;
} }
@@ -48,21 +79,86 @@ public class UnifyRequestPacket implements IPacket {
} }
@JsonIgnore @JsonIgnore
public FiniteClosure retrieveFiniteClosure(UnifyContext context) { private FiniteClosure retrieveFiniteClosure(UnifyContext context) {
this.loadKeyStorage(context); this.loadKeyStorage(context);
return FiniteClosure.fromSerial(this.finiteClosure, context, keyStorage); return FiniteClosure.fromSerial(this.finiteClosure, context, keyStorage);
} }
@JsonIgnore @JsonIgnore
public ConstraintSet<Pair> retrieveConstraintSet(UnifyContext context) { private ConstraintSet<Pair> retrieveConstraintSet(UnifyContext context) {
this.loadKeyStorage(context); this.loadKeyStorage(context);
return ConstraintSet.fromSerial(this.constraintSet, context, Pair.class, keyStorage); return ConstraintSet.fromSerial(this.constraintSet, context, Pair.class, keyStorage);
} }
@JsonIgnore @JsonIgnore
public ConstraintSet<UnifyPair> retrieveUnifyConstraintSet(UnifyContext context) { private ConstraintSet<UnifyPair> retrieveUnifyConstraintSet(UnifyContext context) {
this.loadKeyStorage(context); this.loadKeyStorage(context);
return ConstraintSet.fromSerial(this.unifyConstraintSet, context, UnifyPair.class, keyStorage); return ConstraintSet.fromSerial(this.unifyConstraintSet, context, UnifyPair.class, keyStorage);
} }
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.sendMessage(webSocket, "You requested a unify! Please wait until I calculated everything...");
socketServer.log(webSocket, "Client requested a unification. Starting now...");
try {
var placeholderRegistry = new PlaceholderRegistry();
ArrayList<String> existingPlaceholders = (ArrayList) this.placeholders.getOf(ArrayList.class);
existingPlaceholders.forEach(placeholderRegistry::addPlaceholder);
Logger logger = new ServerTaskLogger(
webSocket,
socketServer,
Logger.LogLevel.fromValue(
Math.max(this.logLevel, Logger.LogLevel.INFO.getValue())
)
);
var unifyContext = new UnifyContext(logger, true,
new UnifyResultModel(new ConstraintSet<>(), new FiniteClosure(new HashSet<>(), logger, placeholderRegistry)),
new UnifyTaskModel(), ForkJoinPool.commonPool(), placeholderRegistry
);
this.factoryplaceholders.stream()
.map(p -> (PlaceholderType)UnifyType.fromSerial(p, unifyContext))
.forEach(placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS::add);
// start the unification algorithm from the received data
IFiniteClosure finiteClosure = this.retrieveFiniteClosure(unifyContext);
ConstraintSet<Pair> constraintSet = this.retrieveConstraintSet(unifyContext);
ConstraintSet<UnifyPair> unifyConstraintSet = this.retrieveUnifyConstraintSet(unifyContext);
var resultModel = new UnifyResultModel(constraintSet, finiteClosure);
UnifyResultListenerImpl resultListener = new UnifyResultListenerImpl();
resultModel.addUnifyResultListener(resultListener);
TypeUnify.unifyParallel(
unifyConstraintSet.getUndConstraints(),
unifyConstraintSet.getOderConstraints(),
finiteClosure,
unifyContext.newWithResultModel(resultModel)
);
var resultSets = resultListener.getResults();
socketServer.log(webSocket, "Finished unification");
socketServer.sendMessage(webSocket, "Unification finished. Found " + resultSets.size() + " result sets");
if (webSocket.isOpen()) {
UnifyResultPacket resultPacket = UnifyResultPacket.create(resultSets, futureId);
webSocket.send(PacketContainer.serialize(resultPacket));
}
} catch (Exception e) {
SocketServer.logger.exception(e);
socketServer.log(webSocket, e.getMessage());
}
}
@JsonIgnore
public SocketFuture<UnifyResultPacket> getFuture() {
var future = new SocketFuture<>(List.of(UnifyResultPacket.class));
futureId = future.futureId;
return future;
}
} }

View File

@@ -1,6 +1,7 @@
package de.dhbwstuttgart.server.packet; package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode; import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
@@ -8,20 +9,23 @@ import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.result.ResultSet; import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.UnifyContext; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.List; import java.util.List;
import org.java_websocket.WebSocket;
/** /**
* A packet to send all calculated data from the unification algorithm back to the client * A packet to send all calculated data from the unification algorithm back to the client
*/ */
public class UnifyResultPacket implements IPacket { public class UnifyResultPacket implements IServerToClientPacket {
public SerialList<ISerialNode> results; public SerialList<ISerialNode> results;
public SerialMap keyStorage; public SerialMap keyStorage;
public String futureId;
public static UnifyResultPacket create(List<ResultSet> resultSets) { public static UnifyResultPacket create(List<ResultSet> resultSets, String futureId) {
UnifyResultPacket serialized = new UnifyResultPacket(); UnifyResultPacket serialized = new UnifyResultPacket();
KeyStorage keyStorage = new KeyStorage(); KeyStorage keyStorage = new KeyStorage();
serialized.results = SerialList.fromMapped(resultSets, resultSet -> resultSet.toSerial(keyStorage)); serialized.results = SerialList.fromMapped(resultSets, resultSet -> resultSet.toSerial(keyStorage));
serialized.keyStorage = keyStorage.toSerial(keyStorage); serialized.keyStorage = keyStorage.toSerial(keyStorage);
serialized.futureId = futureId;
return serialized; return serialized;
} }
@@ -31,4 +35,11 @@ public class UnifyResultPacket implements IPacket {
.map(resultData -> ResultSet.fromSerial(resultData, context)).toList(); .map(resultData -> ResultSet.fromSerial(resultData, context)).toList();
} }
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.info("Received unify result");
socketClient.completeResponseFuture(futureId, this);
}
} }

View File

@@ -4,13 +4,14 @@ import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
public class KeyStorage implements ISerializableData { public class KeyStorage implements ISerializableData {
/** /**
* Store a unique identifier for every element, so it can be referenced in the json * Store a unique identifier for every element, so it can be referenced in the json
*/ */
protected Map<ISerializableData, String> identifiers = new HashMap<>(); protected AtomicInteger identifierCount = new AtomicInteger();
/** /**
* Store the serialized element per identifier when serializing * Store the serialized element per identifier when serializing
*/ */
@@ -24,10 +25,8 @@ public class KeyStorage implements ISerializableData {
/** /**
* Retrieve or generate a new identifier for a constraint * Retrieve or generate a new identifier for a constraint
*/ */
public String getIdentifier(ISerializableData element) { public String getIdentifier() {
final String identifier = this.identifiers.getOrDefault(element, "_" + identifiers.size()); return this.identifierCount.incrementAndGet() + "_";
this.identifiers.putIfAbsent(element, identifier);
return identifier;
} }
/** /**

View File

@@ -10,7 +10,7 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
@JsonTypeInfo( @JsonTypeInfo(
use = JsonTypeInfo.Id.NAME, use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY, include = JsonTypeInfo.As.PROPERTY,
property = "type" property = "_t"
) )
@JsonSubTypes({ @JsonSubTypes({
@JsonSubTypes.Type(value = SerialMap.class, name = "m"), @JsonSubTypes.Type(value = SerialMap.class, name = "m"),

View File

@@ -0,0 +1,8 @@
package de.dhbwstuttgart.syntaxtree;
import de.dhbwstuttgart.util.Logger;
public class SyntaxTree {
public static Logger logger = new Logger("SyntaxTree");
}

View File

@@ -1,5 +1,8 @@
package de.dhbwstuttgart.syntaxtree.factory; package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.core.JavaTXServer;
public class NameGenerator { public class NameGenerator {
private static String strNextName = "A"; private static String strNextName = "A";
@@ -26,7 +29,11 @@ public class NameGenerator {
// n�chster Name berechnen und in strNextName speichern // n�chster Name berechnen und in strNextName speichern
inc( strNextName.length() - 1 ); inc( strNextName.length() - 1 );
if (JavaTXServer.isRunning) {
throw new RuntimeException("Using the NameGenerator on a server is not allowed");
}
JavaTXCompiler.defaultClientPlaceholderRegistry.addPlaceholder(strReturn);
return strReturn; return strReturn;
} }

View File

@@ -1,6 +1,8 @@
package de.dhbwstuttgart.syntaxtree.factory; package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.syntaxtree.SyntaxTree;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry; import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.util.Logger;
import java.io.Writer; import java.io.Writer;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.*; import java.util.*;
@@ -34,7 +36,7 @@ public class UnifyTypeFactory {
public static FiniteClosure generateFC( public static FiniteClosure generateFC(
List<ClassOrInterface> fromClasses, List<ClassOrInterface> fromClasses,
Writer logFile, Logger logger,
ClassLoader classLoader, ClassLoader classLoader,
JavaTXCompiler compiler, JavaTXCompiler compiler,
PlaceholderRegistry placeholderRegistry PlaceholderRegistry placeholderRegistry
@@ -49,7 +51,7 @@ public class UnifyTypeFactory {
Generell dürfen sie immer die gleichen Namen haben. Generell dürfen sie immer die gleichen Namen haben.
TODO: die transitive Hülle bilden TODO: die transitive Hülle bilden
*/ */
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader, placeholderRegistry), logFile, compiler, placeholderRegistry); return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader, placeholderRegistry), logger, compiler, placeholderRegistry);
} }
public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr, SourceLoc location){ public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr, SourceLoc location){
@@ -132,7 +134,7 @@ public class UnifyTypeFactory {
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType, PlaceholderRegistry placeholderRegistry) { public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType, PlaceholderRegistry placeholderRegistry) {
if (tph.getName().equals("AFR")) { if (tph.getName().equals("AFR")) {
System.out.println("XXX"+innerType); SyntaxTree.logger.info("XXX"+innerType);
} }
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance(), placeholderRegistry); PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance(), placeholderRegistry);
ntph.setVariance(tph.getVariance()); ntph.setVariance(tph.getVariance());
@@ -199,7 +201,7 @@ public class UnifyTypeFactory {
&& ((PlaceholderType)lhs).isWildcardable() && ((PlaceholderType)lhs).isWildcardable()
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) { && (rhs = ret.getLhsType()) instanceof PlaceholderType) {
if (lhs.getName().equals("AQ")) { if (lhs.getName().equals("AQ")) {
// System.out.println(""); // SyntaxTree.logger.info("");
} }
((PlaceholderType)rhs).enableWildcardtable(); ((PlaceholderType)rhs).enableWildcardtable();
} }
@@ -208,7 +210,7 @@ public class UnifyTypeFactory {
&& ((PlaceholderType)rhs).isWildcardable() && ((PlaceholderType)rhs).isWildcardable()
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) { && (lhs = ret.getLhsType()) instanceof PlaceholderType) {
if (rhs.getName().equals("AQ")) { if (rhs.getName().equals("AQ")) {
// System.out.println(""); // SyntaxTree.logger.info("");
} }
((PlaceholderType)lhs).enableWildcardtable(); ((PlaceholderType)lhs).enableWildcardtable();
} }
@@ -274,7 +276,7 @@ public class UnifyTypeFactory {
public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) { public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
TypePlaceholder ret = tphs.get(t.getName()); TypePlaceholder ret = tphs.get(t.getName());
if(ret == null){ //Dieser TPH wurde vom Unifikationsalgorithmus erstellt if(ret == null){ //Dieser TPH wurde vom Unifikationsalgorithmus erstellt
ret = TypePlaceholder.fresh(new NullToken()); ret = TypePlaceholder.fresh(new NullToken(), placeholderRegistry);
tphs.put(t.getName(), ret); tphs.put(t.getName(), ret);
} }
ret.setVariance(t.getVariance()); ret.setVariance(t.getVariance());

View File

@@ -2,6 +2,7 @@ package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.parser.NullToken; import de.dhbwstuttgart.parser.NullToken;
@@ -67,7 +68,12 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric implements
public static TypePlaceholder fresh(Token position){ public static TypePlaceholder fresh(Token position){
return new TypePlaceholder(NameGenerator.makeNewName(), position, 0, true); return new TypePlaceholder(NameGenerator.makeNewName(), position, 0, true);
} }
public static TypePlaceholder fresh(Token position, PlaceholderRegistry placeholderRegistry){
String newName = placeholderRegistry.generateFreshPlaceholderName();
return new TypePlaceholder(newName, position, 0, true);
}
public static TypePlaceholder fresh(Token position, int variance, boolean wildcardable){ public static TypePlaceholder fresh(Token position, int variance, boolean wildcardable){
return new TypePlaceholder(NameGenerator.makeNewName(), position, variance, wildcardable); return new TypePlaceholder(NameGenerator.makeNewName(), position, variance, wildcardable);
} }

View File

@@ -0,0 +1,8 @@
package de.dhbwstuttgart.target;
import de.dhbwstuttgart.util.Logger;
public class Target {
public static Logger logger = new Logger("Target");
}

View File

@@ -12,6 +12,7 @@ import de.dhbwstuttgart.syntaxtree.Record;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory; import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.statement.*; import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*; import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.*; import de.dhbwstuttgart.target.tree.*;
import de.dhbwstuttgart.target.tree.expression.*; import de.dhbwstuttgart.target.tree.expression.*;
import de.dhbwstuttgart.target.tree.type.*; import de.dhbwstuttgart.target.tree.type.*;
@@ -337,10 +338,10 @@ public class ASTToTargetAST {
var result = r0.stream().map(l -> l.stream().toList()).toList(); var result = r0.stream().map(l -> l.stream().toList()).toList();
System.out.println("============== OUTPUT =============="); Target.logger.info("============== OUTPUT ==============");
for (var l : result) { for (var l : result) {
for (var m : l) System.out.println(m.name() + " " + m.getSignature()); for (var m : l) Target.logger.info(m.name() + " " + m.getSignature());
System.out.println(); Target.logger.info("");
} }
return result; return result;
} }

View File

@@ -1,10 +1,12 @@
package de.dhbwstuttgart.target.generate; package de.dhbwstuttgart.target.generate;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.parser.NullToken; import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.*; import de.dhbwstuttgart.syntaxtree.*;
import de.dhbwstuttgart.syntaxtree.statement.*; import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*; import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.syntaxtree.type.Void; import de.dhbwstuttgart.syntaxtree.type.Void;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.type.TargetGenericType; import de.dhbwstuttgart.target.tree.type.TargetGenericType;
import de.dhbwstuttgart.target.tree.type.TargetType; import de.dhbwstuttgart.target.tree.type.TargetType;
import de.dhbwstuttgart.typeinference.result.PairTPHEqualTPH; import de.dhbwstuttgart.typeinference.result.PairTPHEqualTPH;
@@ -138,17 +140,17 @@ public abstract class GenerateGenerics {
this.astToTargetAST = astToTargetAST; this.astToTargetAST = astToTargetAST;
for (var constraint : constraints.results) { for (var constraint : constraints.results) {
if (constraint instanceof PairTPHsmallerTPH p) { if (constraint instanceof PairTPHsmallerTPH p) {
System.out.println(p.left + " " + p.left.getVariance()); Target.logger.info(p.left + " " + p.left.getVariance());
simplifiedConstraints.add(new PairLT(new TPH(p.left), new TPH(p.right))); simplifiedConstraints.add(new PairLT(new TPH(p.left), new TPH(p.right)));
} else if (constraint instanceof PairTPHEqualTPH p) { } else if (constraint instanceof PairTPHEqualTPH p) {
equality.put(p.getLeft(), p.getRight()); equality.put(p.getLeft(), p.getRight());
} else if (constraint instanceof PairTPHequalRefTypeOrWildcardType p) { } else if (constraint instanceof PairTPHequalRefTypeOrWildcardType p) {
System.out.println(p.left + " = " + p.right); Target.logger.info(p.left + " = " + p.right);
concreteTypes.put(new TPH(p.left), p.right); concreteTypes.put(new TPH(p.left), p.right);
} }
} }
System.out.println("Simplified constraints: " + simplifiedConstraints); Target.logger.info("Simplified constraints: " + simplifiedConstraints);
} }
/*public record GenericsState(Map<TPH, RefTypeOrTPHOrWildcardOrGeneric> concreteTypes, Map<TypePlaceholder, TypePlaceholder> equality) {} /*public record GenericsState(Map<TPH, RefTypeOrTPHOrWildcardOrGeneric> concreteTypes, Map<TypePlaceholder, TypePlaceholder> equality) {}
@@ -248,7 +250,7 @@ public abstract class GenerateGenerics {
equality.put(entry.getKey(), to); equality.put(entry.getKey(), to);
} }
} }
System.out.println(from + " -> " + to + " " + from.getVariance()); Target.logger.info(from + " -> " + to + " " + from.getVariance());
//from.setVariance(to.getVariance()); //from.setVariance(to.getVariance());
equality.put(from, to); equality.put(from, to);
referenced.remove(new TPH(from)); referenced.remove(new TPH(from));
@@ -317,7 +319,7 @@ public abstract class GenerateGenerics {
Set<TPH> T2s = new HashSet<>(); Set<TPH> T2s = new HashSet<>();
findTphs(superType, T2s); findTphs(superType, T2s);
System.out.println("T1s: " + T1s + " T2s: " + T2s); Target.logger.info("T1s: " + T1s + " T2s: " + T2s);
//Ende //Ende
superType = methodCall.receiverType; superType = methodCall.receiverType;
@@ -332,7 +334,7 @@ public abstract class GenerateGenerics {
var optMethod = astToTargetAST.findMethod(owner, methodCall.name, methodCall.signatureArguments().stream().map(astToTargetAST::convert).toList()); var optMethod = astToTargetAST.findMethod(owner, methodCall.name, methodCall.signatureArguments().stream().map(astToTargetAST::convert).toList());
if (optMethod.isEmpty()) return; if (optMethod.isEmpty()) return;
var method2 = optMethod.get(); var method2 = optMethod.get();
System.out.println("In: " + method.getName() + " Method: " + method2.getName()); Target.logger.info("In: " + method.getName() + " Method: " + method2.getName());
var generics = family(owner, method2); var generics = family(owner, method2);
// transitive and // transitive and
@@ -365,7 +367,7 @@ public abstract class GenerateGenerics {
if (!T1s.contains(R1) || !T2s.contains(R2)) continue; if (!T1s.contains(R1) || !T2s.contains(R2)) continue;
var newPair = new PairLT(R1, R2); var newPair = new PairLT(R1, R2);
System.out.println("New pair: " + newPair); Target.logger.info("New pair: " + newPair);
newPairs.add(newPair); newPairs.add(newPair);
if (!containsRelation(result, newPair)) if (!containsRelation(result, newPair))
@@ -567,7 +569,7 @@ public abstract class GenerateGenerics {
Set<Pair> generics(ClassOrInterface owner, Method method) { Set<Pair> generics(ClassOrInterface owner, Method method) {
if (computedGenericsOfMethods.containsKey(method)) { if (computedGenericsOfMethods.containsKey(method)) {
var cached = computedGenericsOfMethods.get(method); var cached = computedGenericsOfMethods.get(method);
System.out.println("Cached " + method.getName() + ": " + cached); Target.logger.info("Cached " + method.getName() + ": " + cached);
return cached; return cached;
} }
@@ -596,7 +598,7 @@ public abstract class GenerateGenerics {
normalize(result, classGenerics, usedTphs); normalize(result, classGenerics, usedTphs);
System.out.println(this.getClass().getSimpleName() + " " + method.name + ": " + result); Target.logger.info(this.getClass().getSimpleName() + " " + method.name + ": " + result);
return result; return result;
} }
@@ -675,7 +677,7 @@ public abstract class GenerateGenerics {
normalize(javaResult, null, referencedByClass); normalize(javaResult, null, referencedByClass);
System.out.println(this.getClass().getSimpleName() + " Class " + classOrInterface.getClassName().getClassName() + ": " + javaResult); Target.logger.info(this.getClass().getSimpleName() + " Class " + classOrInterface.getClassName().getClassName() + ": " + javaResult);
return javaResult; return javaResult;
} }
@@ -726,7 +728,7 @@ public abstract class GenerateGenerics {
if (!added) break; if (!added) break;
} }
System.out.println(chain + " " + chain.stream().map(e -> e.resolve().getVariance()).toList()); Target.logger.info(chain + " " + chain.stream().map(e -> e.resolve().getVariance()).toList());
var variance = chain.get(0).resolve().getVariance(); var variance = chain.get(0).resolve().getVariance();
if (variance != 1) continue; if (variance != 1) continue;
var index = 0; var index = 0;
@@ -764,7 +766,7 @@ public abstract class GenerateGenerics {
} }
for (var pair : elementsToAddToEquality) { for (var pair : elementsToAddToEquality) {
System.out.println(pair); Target.logger.info(pair);
addToEquality(pair.left, pair.right, referenced); addToEquality(pair.left, pair.right, referenced);
} }
} }
@@ -917,11 +919,11 @@ public abstract class GenerateGenerics {
} }
} }
if (infima.size() > 1) { if (infima.size() > 1) {
System.out.println(infima); Target.logger.info(infima);
for (var pair : infima) { for (var pair : infima) {
var returnTypes = findTypeVariables(method.getReturnType()); var returnTypes = findTypeVariables(method.getReturnType());
var chain = findConnectionToReturnType(returnTypes, input, new HashSet<>(), pair.left); var chain = findConnectionToReturnType(returnTypes, input, new HashSet<>(), pair.left);
System.out.println("Find: " + pair.left + " " + chain); Target.logger.info("Find: " + pair.left + " " + chain);
chain.remove(pair.left); chain.remove(pair.left);
if (chain.size() > 0) { if (chain.size() > 0) {
for (var tph : chain) for (var tph : chain)
@@ -959,8 +961,8 @@ public abstract class GenerateGenerics {
} }
} }
newTph.setVariance(variance); newTph.setVariance(variance);
System.out.println(infima + " " + infima.stream().map(i -> i.right.resolve().getVariance()).toList()); Target.logger.info(infima + " " + infima.stream().map(i -> i.right.resolve().getVariance()).toList());
System.out.println("Infima new TPH " + newTph + " variance " + variance); Target.logger.info("Infima new TPH " + newTph + " variance " + variance);
//referenced.add(newTph); //referenced.add(newTph);
addToPairs(input, new PairLT(left, new TPH(newTph))); addToPairs(input, new PairLT(left, new TPH(newTph)));

View File

@@ -1,6 +1,5 @@
package de.dhbwstuttgart.target.generate; package de.dhbwstuttgart.target.generate;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.exceptions.DebugException; import de.dhbwstuttgart.exceptions.DebugException;
import de.dhbwstuttgart.exceptions.NotImplementedException; import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.parser.SyntaxTreeGenerator.AssignToLocal; import de.dhbwstuttgart.parser.SyntaxTreeGenerator.AssignToLocal;
@@ -8,6 +7,7 @@ import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.*; import de.dhbwstuttgart.syntaxtree.*;
import de.dhbwstuttgart.syntaxtree.statement.*; import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*; import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.MethodParameter; import de.dhbwstuttgart.target.tree.MethodParameter;
import de.dhbwstuttgart.target.tree.TargetMethod; import de.dhbwstuttgart.target.tree.TargetMethod;
import de.dhbwstuttgart.target.tree.expression.*; import de.dhbwstuttgart.target.tree.expression.*;
@@ -15,6 +15,7 @@ import de.dhbwstuttgart.target.tree.type.*;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.*; import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.StreamSupport; import java.util.stream.StreamSupport;
@@ -120,7 +121,7 @@ public class StatementToTargetExpression implements ASTVisitor {
@Override @Override
public void visit(BoolExpression bool) { public void visit(BoolExpression bool) {
System.out.println("BoolExpression"); Target.logger.info("BoolExpression");
} }
@Override @Override
@@ -218,6 +219,22 @@ public class StatementToTargetExpression implements ASTVisitor {
if (methodCall.receiver instanceof ExpressionReceiver expressionReceiver && expressionReceiver.expr instanceof This) { if (methodCall.receiver instanceof ExpressionReceiver expressionReceiver && expressionReceiver.expr instanceof This) {
if (receiverClass == null) throw new DebugException("Class " + receiverName + " does not exist!"); if (receiverClass == null) throw new DebugException("Class " + receiverName + " does not exist!");
var thisMethod = converter.findMethod(receiverClass, methodCall.name, signature); var thisMethod = converter.findMethod(receiverClass, methodCall.name, signature);
if (thisMethod.isEmpty()) {
Target.logger.error("Expected: " + receiverClass.getClassName() + "." + methodCall.name + "(" +
signature.stream().map(TargetType::toSignature).collect(Collectors.joining())+ ")" );
AtomicBoolean hasM = new AtomicBoolean(false);
receiverClass.getMethods().forEach(m -> {
if (Objects.equals(m.getName(), methodCall.name)) {
hasM.set(true);
Target.logger.error("But only has: " + m.name + "(" +
m.getParameterList().getFormalparalist().stream().map(t -> t.getType().toString()).collect(Collectors.joining())+ ")" );
}
});
if (!hasM.get())
Target.logger.error("But does not contain method at all");
}
ClassOrInterface finalReceiverClass = receiverClass; ClassOrInterface finalReceiverClass = receiverClass;
foundMethod = thisMethod.orElseGet(() -> findMethod(finalReceiverClass.getSuperClass().getName(), methodCall.name, signature).orElseThrow()); foundMethod = thisMethod.orElseGet(() -> findMethod(finalReceiverClass.getSuperClass().getName(), methodCall.name, signature).orElseThrow());
} else if (!isFunNType) { } else if (!isFunNType) {
@@ -234,7 +251,7 @@ public class StatementToTargetExpression implements ASTVisitor {
isInterface = receiverClass.isInterface(); isInterface = receiverClass.isInterface();
} }
System.out.println(argList); Target.logger.info(argList);
result = new TargetMethodCall(converter.convert(methodCall.getType()), returnType, argList, converter.convert(methodCall.receiver), methodCall.getArgumentList().getArguments().stream().map(converter::convert).toList(), receiverType, methodCall.name, isStatic, isInterface, isPrivate); result = new TargetMethodCall(converter.convert(methodCall.getType()), returnType, argList, converter.convert(methodCall.receiver), methodCall.getArgumentList().getArguments().stream().map(converter::convert).toList(), receiverType, methodCall.name, isStatic, isInterface, isPrivate);
} }

View File

@@ -9,13 +9,8 @@ import de.dhbwstuttgart.syntaxtree.Method;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory; import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator; import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.syntaxtree.type.GenericRefType; import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import org.antlr.v4.runtime.Token;
import javax.swing.text.html.Option;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;

View File

@@ -19,11 +19,11 @@ public class MethodAssumption extends Assumption{
private ClassOrInterface receiver; private ClassOrInterface receiver;
private RefTypeOrTPHOrWildcardOrGeneric retType; private RefTypeOrTPHOrWildcardOrGeneric retType;
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params; List<? extends RefTypeOrTPHOrWildcardOrGeneric> params;
private final Boolean isInherited; private final boolean isInherited;
private final Boolean isOverridden; private final boolean isOverridden;
public MethodAssumption(ClassOrInterface receiver, RefTypeOrTPHOrWildcardOrGeneric retType, public MethodAssumption(ClassOrInterface receiver, RefTypeOrTPHOrWildcardOrGeneric retType,
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope, Boolean isInherited, Boolean isOverridden){ List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope, boolean isInherited, boolean isOverridden){
super(scope); super(scope);
this.receiver = receiver; this.receiver = receiver;
this.retType = retType; this.retType = retType;
@@ -73,11 +73,11 @@ public class MethodAssumption extends Assumption{
return TYPEStmt.getReceiverType(receiver, resolver); return TYPEStmt.getReceiverType(receiver, resolver);
} }
public Boolean isInherited() { public boolean isInherited() {
return isInherited; return isInherited;
} }
public Boolean isOverridden() { public boolean isOverridden() {
return isOverridden; return isOverridden;
} }
} }

View File

@@ -18,11 +18,12 @@ import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import javax.annotation.Nullable;
public class Constraint<A extends IConstraintElement> extends HashSet<A> implements ISerializableData { public class Constraint<A extends IConstraintElement> extends HashSet<A> implements ISerializableData {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private Boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt private boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
private Boolean isImplemented = false; private boolean isImplemented = false;
/* /*
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur * wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
@@ -36,27 +37,31 @@ public class Constraint<A extends IConstraintElement> extends HashSet<A> impleme
super(); super();
} }
public Constraint(Boolean isInherited, Boolean isImplemented) { public Constraint(int initialCapacity) {
super(initialCapacity);
}
public Constraint(boolean isInherited, boolean isImplemented) {
this.isInherited = isInherited; this.isInherited = isInherited;
this.isImplemented = isImplemented; this.isImplemented = isImplemented;
} }
public Constraint(Boolean isInherited, Boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) { public Constraint(boolean isInherited, boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
this.isInherited = isInherited; this.isInherited = isInherited;
this.isImplemented = isImplemented; this.isImplemented = isImplemented;
this.extendConstraint = extendConstraint; this.extendConstraint = extendConstraint;
this.methodSignatureConstraint = methodSignatureConstraint; this.methodSignatureConstraint = methodSignatureConstraint;
} }
public void setIsInherited(Boolean isInherited) { public void setIsInherited(boolean isInherited) {
this.isInherited = isInherited; this.isInherited = isInherited;
} }
public Boolean isInherited() { public boolean isInherited() {
return isInherited; return isInherited;
} }
public Boolean isImplemented() { public boolean isImplemented() {
return isImplemented; return isImplemented;
} }
@@ -76,9 +81,18 @@ public class Constraint<A extends IConstraintElement> extends HashSet<A> impleme
methodSignatureConstraint = c; methodSignatureConstraint = c;
} }
public <B extends IConstraintElement> Constraint<B> createdMapped(Function<A,B> mapper) {
Constraint<B> result = new Constraint<>(this.size());
for (A element : this) {
result.add(mapper.apply(element));
}
return result;
}
public String toString() { public String toString() {
return super.toString() + "\nisInherited = " + isInherited + " isOveridden = " + isImplemented return super.toString() + "\nisInherited = " + isInherited
+ methodSignatureConstraint + " isOveridden = " + isImplemented
+ " msc[" + methodSignatureConstraint.size() + "] = " + methodSignatureConstraint
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" ) //" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
+ "\n"; + "\n";
} }
@@ -87,21 +101,27 @@ public class Constraint<A extends IConstraintElement> extends HashSet<A> impleme
return super.toString(); return super.toString();
} }
private String serialUUID = null;
@Override @Override
public SerialUUID toSerial(KeyStorage keyStorage) { public SerialUUID toSerial(KeyStorage keyStorage) {
final String uuid = keyStorage.getIdentifier(this); final String uuid = serialUUID == null ? keyStorage.getIdentifier() : serialUUID;
if (serialUUID == null) serialUUID = uuid;
if (!keyStorage.isAlreadySerialized(uuid)) { if (!keyStorage.isAlreadySerialized(uuid)) {
SerialMap serialized = new SerialMap(); SerialMap serialized = new SerialMap();
keyStorage.putSerialized(uuid, serialized); keyStorage.putSerialized(uuid, serialized);
serialized.put("isInherited", isInherited); serialized.put("isInherited", isInherited);
serialized.put("extendedConstraint", extendConstraint == null ? null : extendConstraint.toSerial(keyStorage)); serialized.put("isImplemented", isImplemented);
serialized.put("extendedConstraint", extendConstraint == null ? null :
extendConstraint.toSerial(keyStorage));
Function<A, ISerialNode> pairMapper = pair -> { Function<A, ISerialNode> pairMapper = pair -> {
if (pair instanceof Pair simplePair) return simplePair.toSerial(keyStorage); if (pair instanceof Pair simplePair) return simplePair.toSerial(keyStorage);
if (pair instanceof UnifyPair unifyPair) return unifyPair.toSerial(keyStorage); if (pair instanceof UnifyPair unifyPair) return unifyPair.toSerial(keyStorage);
throw new RuntimeException("No serialization is supported for type " + pair.getClass().getName()); throw new RuntimeException("No serialization is supported for type " + pair.getClass().getName());
}; };
serialized.put("methodSignatureConstraint", SerialList.fromMapped(methodSignatureConstraint, pairMapper)); serialized.put("methodSignatureConstraint", methodSignatureConstraint == null ? null :
SerialList.fromMapped(methodSignatureConstraint, pairMapper));
serialized.put("setElements", SerialList.fromMapped(this, pairMapper)); serialized.put("setElements", SerialList.fromMapped(this, pairMapper));
} }
@@ -120,6 +140,7 @@ public class Constraint<A extends IConstraintElement> extends HashSet<A> impleme
// retrieve the serialized data und start unserializing it // retrieve the serialized data und start unserializing it
SerialMap data = keyStorage.getSerialized(uuid); SerialMap data = keyStorage.getSerialized(uuid);
constraint.isInherited = data.getValue("isInherited").getOf(Boolean.class); constraint.isInherited = data.getValue("isInherited").getOf(Boolean.class);
constraint.isImplemented = data.getValue("isImplemented").getOf(Boolean.class);
constraint.extendConstraint = Optional.ofNullable(data.getUUIDOrNull("extendedConstraint")) constraint.extendConstraint = Optional.ofNullable(data.getUUIDOrNull("extendedConstraint"))
.map(v -> Constraint.fromSerial(v, context, target, keyStorage)) .map(v -> Constraint.fromSerial(v, context, target, keyStorage))
.orElse(null); .orElse(null);
@@ -135,11 +156,13 @@ public class Constraint<A extends IConstraintElement> extends HashSet<A> impleme
throw new RuntimeException("No serialization is supported for target type " + target.getName()); throw new RuntimeException("No serialization is supported for target type " + target.getName());
}; };
constraint.methodSignatureConstraint = data.getList("methodSignatureConstraint") constraint.methodSignatureConstraint =
.stream().map(pairUnmapper).collect(Collectors.toSet()); Optional.ofNullable(data.getListOrNull("methodSignatureConstraint"))
.map(l -> l.stream().map(pairUnmapper).collect(Collectors.toSet()))
.orElse(null);
constraint.addAll( constraint.addAll(
data.getList("setElements") data.getList("setElements")
.stream().map(pairUnmapper).collect(Collectors.toSet())); .stream().map(pairUnmapper).toList());
} }
return keyStorage.getUnserialized(uuid, Constraint.class); return keyStorage.getUnserialized(uuid, Constraint.class);

View File

@@ -43,11 +43,24 @@ public class ConstraintSet<A extends IConstraintElement> implements ISerializabl
@Override @Override
public String toString() { public String toString() {
BinaryOperator<String> b = (x, y) -> x + y; BinaryOperator<String> b = (x, y) -> x + y;
return "\nUND:" + this.undConstraints.toString() + "\n" + return "\nUND:\n" + this.undConstraints.toString() +
"ODER:" + this.oderConstraints.stream().reduce("", (x, y) -> x.toString() + "\n" + y, b); "ODER:" + this.oderConstraints.stream().reduce("", (x, y) -> x + "\n\t" + y, b) +
"\n";
//cartesianProduct().toString(); //cartesianProduct().toString();
} }
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ConstraintSet<?> other)) return false;
return Objects.equals(undConstraints, other.undConstraints)
&& Objects.equals(oderConstraints, other.oderConstraints);
}
@Override
public int hashCode() {
return Objects.hash(undConstraints, oderConstraints);
}
public Set<List<Constraint<A>>> cartesianProduct() { public Set<List<Constraint<A>>> cartesianProduct() {
Set<Constraint<A>> toAdd = new HashSet<>(); Set<Constraint<A>> toAdd = new HashSet<>();
toAdd.add(undConstraints); toAdd.add(undConstraints);
@@ -116,10 +129,9 @@ public class ConstraintSet<A extends IConstraintElement> implements ISerializabl
} }
public Set<A> getAll() { public Set<A> getAll() {
Set<A> ret = new HashSet<>(); Set<A> ret = new HashSet<>(undConstraints);
ret.addAll(undConstraints);
for (Set<Constraint<A>> oderConstraint : oderConstraints) { for (Set<Constraint<A>> oderConstraint : oderConstraints) {
oderConstraint.parallelStream().forEach((Constraint<A> as) -> ret.addAll(as)); oderConstraint.parallelStream().forEach(ret::addAll);
} }
return ret; return ret;
} }

View File

@@ -21,7 +21,7 @@ public class Pair implements Serializable, IConstraintElement, ISerializableData
private SourceLoc location; private SourceLoc location;
private PairOperator eOperator = PairOperator.SMALLER; private PairOperator eOperator = PairOperator.SMALLER;
private Boolean noUnification = false; private boolean noUnification = false;
private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2) { private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2) {
@@ -43,7 +43,7 @@ public class Pair implements Serializable, IConstraintElement, ISerializableData
this.location = location; this.location = location;
} }
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification) { public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, boolean noUnification) {
// Konstruktor // Konstruktor
this(TA1, TA2); this(TA1, TA2);
this.eOperator = eOp; this.eOperator = eOp;
@@ -75,7 +75,7 @@ public class Pair implements Serializable, IConstraintElement, ISerializableData
Operator = "<?"; Operator = "<?";
*/ */
return "\n(" + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")"; return "\n(P: " + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
/*- Equals: " + bEqual*/ /*- Equals: " + bEqual*/
} }
@@ -161,7 +161,7 @@ public class Pair implements Serializable, IConstraintElement, ISerializableData
String op = data.getValue("op").getOf(String.class); String op = data.getValue("op").getOf(String.class);
SerialMap ta1 = data.getMap("ta1"); SerialMap ta1 = data.getMap("ta1");
SerialMap ta2 = data.getMap("ta2"); SerialMap ta2 = data.getMap("ta2");
Boolean noUnification = data.getValue("noUnification").getOf(Integer.class) == 1; boolean noUnification = data.getValue("noUnification").getOf(Integer.class) == 1;
SerialMap location = data.getMapOrNull("location"); SerialMap location = data.getMapOrNull("location");
var pair = new Pair( var pair = new Pair(

View File

@@ -38,7 +38,7 @@ implements ISerializableData {
public SerialMap toSerial(KeyStorage keyStorage) { public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap(); SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage)); serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getLeft().toSerial(keyStorage)); serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object // create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage); var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized); serializedWrapper.put("object", serialized);

View File

@@ -21,19 +21,19 @@ public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder
public SerialMap toSerial(KeyStorage keyStorage) { public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap(); SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage)); serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getLeft().toSerial(keyStorage)); serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object // create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage); var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized); serializedWrapper.put("object", serialized);
return serializedWrapper; return serializedWrapper;
} }
public static PairNoResult fromSerial2(SerialMap data, UnifyContext context) { public static PairTPHEqualTPH fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left"); SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right"); SerialMap right = data.getMap("right");
return new PairNoResult( return new PairTPHEqualTPH(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context), (TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context) (TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
); );
} }
} }

View File

@@ -33,20 +33,20 @@ implements ISerializableData {
@Override @Override
public SerialMap toSerial(KeyStorage keyStorage) { public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();; SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage)); serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getLeft().toSerial(keyStorage)); serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object // create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage); var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized); serializedWrapper.put("object", serialized);
return serializedWrapper; return serializedWrapper;
} }
public static PairNoResult fromSerial2(SerialMap data, UnifyContext context) { public static PairTPHequalRefTypeOrWildcardType fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left"); SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right"); SerialMap right = data.getMap("right");
return new PairNoResult( return new PairTPHequalRefTypeOrWildcardType(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context), (TypePlaceholder)RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
); );
} }

View File

@@ -46,19 +46,19 @@ public class PairTPHsmallerTPH extends ResultPair<TypePlaceholder,TypePlaceholde
public SerialMap toSerial(KeyStorage keyStorage) { public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap(); SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage)); serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getLeft().toSerial(keyStorage)); serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object // create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage); var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized); serializedWrapper.put("object", serialized);
return serializedWrapper; return serializedWrapper;
} }
public static PairNoResult fromSerial2(SerialMap data, UnifyContext context) { public static PairTPHsmallerTPH fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left"); SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right"); SerialMap right = data.getMap("right");
return new PairNoResult( return new PairTPHsmallerTPH(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context), (TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context) (TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
); );
} }
} }

View File

@@ -69,7 +69,14 @@ implements ISerializableData {
@Override @Override
public SerialMap toSerial(KeyStorage keyStorage) { public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap(); SerialMap serialized = new SerialMap();
serialized.put("type", this.getClass().getSimpleName()); String type = switch (this) {
case PairNoResult _ -> "pnr";
case PairTPHEqualTPH _ -> "ptet";
case PairTPHsmallerTPH _ -> "ptst";
case PairTPHequalRefTypeOrWildcardType _ -> "ptertwt";
default -> throw new RuntimeException("No type defined for ResultPair of class " + this.getClass().getName());
};
serialized.put("type", type);
// we only insert null for the object and expect the child classes to call this and override the value with themselves // we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL); serialized.put("object", SerialValue.NULL);
return serialized; return serialized;
@@ -80,10 +87,12 @@ implements ISerializableData {
String type = data.getValue("type").getOf(String.class); String type = data.getValue("type").getOf(String.class);
SerialMap object = data.getMap("object"); SerialMap object = data.getMap("object");
if (type.equals(PairNoResult.class.getSimpleName())) return (ResultPair)PairNoResult.fromSerial2(object, context); return switch (type) {
else if (type.equals(PairTPHEqualTPH.class.getSimpleName())) return (ResultPair)PairTPHEqualTPH.fromSerial2(object, context); case "pnr" -> (ResultPair) PairNoResult.fromSerial2(object, context);
else if (type.equals(PairTPHsmallerTPH.class.getSimpleName())) return (ResultPair)PairTPHsmallerTPH.fromSerial2(object, context); case "ptet" -> (ResultPair) PairTPHEqualTPH.fromSerial2(object, context);
else if (type.equals(PairTPHequalRefTypeOrWildcardType.class.getSimpleName())) return (ResultPair)PairTPHequalRefTypeOrWildcardType.fromSerial2(object, context); case "ptst" -> (ResultPair) PairTPHsmallerTPH.fromSerial2(object, context);
else throw new RuntimeException("Could not unserialize class of unhandled type " + type); case "ptertwt" -> (ResultPair) PairTPHequalRefTypeOrWildcardType.fromSerial2(object, context);
default -> throw new RuntimeException("Could not unserialize class of unhandled type " + type);
};
} }
} }

View File

@@ -1,10 +1,15 @@
package de.dhbwstuttgart.typeinference.result; package de.dhbwstuttgart.typeinference.result;
import com.google.common.collect.Ordering;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import de.dhbwstuttgart.typeinference.unify.UnifyContext; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
@@ -25,7 +30,7 @@ public class ResultSet implements ISerializableData {
public ResultSet(Set<ResultPair> set) { public ResultSet(Set<ResultPair> set) {
this.results = set; this.results = set;
this.genIns = new HashSet<>(); this.genIns = TypeUnifyTaskHelper.getPresizedHashSet(results.size());
results.forEach(x -> { results.forEach(x -> {
if (x instanceof PairTPHsmallerTPH) { if (x instanceof PairTPHsmallerTPH) {
this.genIns.add(x); this.genIns.add(x);
@@ -56,14 +61,27 @@ public class ResultSet implements ISerializableData {
} }
public String toString() { public String toString() {
var results = new ArrayList<>(this.results);
results.sort(
Comparator
.comparingInt((ResultPair o) -> o.getLeft().toString().length())
.thenComparing(o -> o.getLeft().toString())
.thenComparingInt(o -> o.getRight().toString().length())
.thenComparing(o -> o.getRight().toString())
);
return results.toString(); return results.toString();
} }
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (o instanceof ResultSet) { if (o instanceof ResultSet other) {
ResultSet other = (ResultSet) o; // sort both result lists
return this.results.equals(other.results); var thisElements = new ArrayList<>(this.results);
thisElements.sort(Ordering.usingToString());
var otherElements = new ArrayList<>(other.results);
otherElements.sort(Ordering.usingToString());
return thisElements.equals(otherElements);
} else { } else {
return false; return false;
} }
@@ -94,6 +112,8 @@ class Resolver implements ResultSetVisitor {
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>(); private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
private ResultPair<?, ?> currentPair; private ResultPair<?, ?> currentPair;
public static Logger logger = new Logger("Resolver");
public Resolver(ResultSet resultPairs) { public Resolver(ResultSet resultPairs) {
this.result = resultPairs; this.result = resultPairs;
} }
@@ -101,7 +121,7 @@ class Resolver implements ResultSetVisitor {
public ResolvedType resolve(TypePlaceholder tph) { public ResolvedType resolve(TypePlaceholder tph) {
toResolve = tph; toResolve = tph;
resolved = null; resolved = null;
System.out.println(tph.toString()); logger.info(tph.toString());
for (ResultPair<?, ?> resultPair : result.results) { for (ResultPair<?, ?> resultPair : result.results) {
if (resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)) { if (resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)) {
currentPair = resultPair; currentPair = resultPair;

View File

@@ -14,6 +14,7 @@ import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceBlockInformation;
import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceInformation; import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceInformation;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet; import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator; import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.util.BiRelation; import de.dhbwstuttgart.util.BiRelation;
import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.Token;
@@ -33,7 +34,7 @@ public class TYPE {
public ConstraintSet getConstraints() { public ConstraintSet getConstraints() {
ConstraintSet ret = new ConstraintSet(); ConstraintSet ret = new ConstraintSet();
for (ClassOrInterface cl : sf.KlassenVektor) { for (ClassOrInterface cl : sf.KlassenVektor) {
var allClasses = new HashSet<ClassOrInterface>(); Set<ClassOrInterface> allClasses = TypeUnifyTaskHelper.getPresizedHashSet(allAvailableClasses.size() + sf.availableClasses.size());
allClasses.addAll(allAvailableClasses); allClasses.addAll(allAvailableClasses);
allClasses.addAll(sf.availableClasses); allClasses.addAll(sf.availableClasses);
ret.addAll(getConstraintsClass(cl, new TypeInferenceInformation(allClasses))); ret.addAll(getConstraintsClass(cl, new TypeInferenceInformation(allClasses)));
@@ -68,7 +69,7 @@ public class TYPE {
for(SourceFile sourceFile : sfs){ for(SourceFile sourceFile : sfs){
for(JavaClassName importName : sourceFile.imports){ for(JavaClassName importName : sourceFile.imports){
System.out.println(importName); context.logger().info(importName);
try { try {
classes.add(ASTFactory.createClass(classLoader.loadClass(importName.toString()))); classes.add(ASTFactory.createClass(classLoader.loadClass(importName.toString())));
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {

View File

@@ -1,6 +1,8 @@
//PL 2018-12-19: Merge chekcen //PL 2018-12-19: Merge chekcen
package de.dhbwstuttgart.typeinference.typeAlgo; package de.dhbwstuttgart.typeinference.typeAlgo;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@@ -116,17 +118,18 @@ public class TYPEStmt implements StatementVisitor {
@Override @Override
public void visit(FieldVar fieldVar) { public void visit(FieldVar fieldVar) {
fieldVar.receiver.accept(this); fieldVar.receiver.accept(this);
Set<Constraint> oderConstraints = new HashSet<>(); List<FieldAssumption> fieldAssumptions = info.getFields(fieldVar.fieldVarName);
Set<Constraint> oderConstraints = TypeUnifyTaskHelper.getPresizedHashSet(fieldAssumptions.size());
for (FieldAssumption fieldAssumption : info.getFields(fieldVar.fieldVarName)) { for (FieldAssumption fieldAssumption : fieldAssumptions) {
Constraint constraint = new Constraint(); Constraint constraint = new Constraint();
GenericsResolver resolver = getResolverInstance(); GenericsResolver resolver = getResolverInstance();
constraint.add(new Pair(fieldVar.receiver.getType(), fieldAssumption.getReceiverType(resolver), PairOperator.SMALLERDOT, loc(fieldVar.getOffset()))); // PL 2019-12-09: SMALLERDOT eingefuegt, EQUALSDOT entfernt, wenn ds Field privat ist muesste es EQUALSDOT lauten constraint.add(new Pair(fieldVar.receiver.getType(), fieldAssumption.getReceiverType(resolver), PairOperator.SMALLERDOT, loc(fieldVar.getOffset()))); // PL 2019-12-09: SMALLERDOT eingefuegt, EQUALSDOT entfernt, wenn ds Field privat ist muesste es EQUALSDOT lauten
constraint.add(new Pair(fieldVar.getType(), fieldAssumption.getType(resolver), PairOperator.EQUALSDOT, loc(fieldVar.getOffset()))); constraint.add(new Pair(fieldVar.getType(), fieldAssumption.getType(resolver), PairOperator.EQUALSDOT, loc(fieldVar.getOffset())));
oderConstraints.add(constraint); oderConstraints.add(constraint);
} }
if (oderConstraints.size() == 0) if (oderConstraints.isEmpty())
throw new TypeinferenceException("Kein Feld " + fieldVar.fieldVarName + " gefunden", fieldVar.getOffset()); throw new TypeinferenceException("Kein Feld " + fieldVar.fieldVarName + " gefunden", fieldVar.getOffset());
constraintsSet.addOderConstraint(oderConstraints); constraintsSet.addOderConstraint(oderConstraints);
} }
@@ -141,7 +144,7 @@ public class TYPEStmt implements StatementVisitor {
@Override @Override
public void visit(ForEachStmt forEachStmt) { public void visit(ForEachStmt forEachStmt) {
var iterableType = new RefType(ASTFactory.createClass(java.lang.Iterable.class).getClassName(), Arrays.asList(new ExtendsWildcardType(forEachStmt.statement.getType(), new NullToken())), new NullToken()); var iterableType = new RefType(ASTFactory.createClass(java.lang.Iterable.class).getClassName(), List.of(new ExtendsWildcardType(forEachStmt.statement.getType(), new NullToken())), new NullToken());
constraintsSet.addUndConstraint(new Pair(forEachStmt.expression.getType(), iterableType, PairOperator.SMALLERDOT, loc(forEachStmt.getOffset()))); constraintsSet.addUndConstraint(new Pair(forEachStmt.expression.getType(), iterableType, PairOperator.SMALLERDOT, loc(forEachStmt.getOffset())));
forEachStmt.statement.accept(this); forEachStmt.statement.accept(this);
forEachStmt.expression.accept(this); forEachStmt.expression.accept(this);
@@ -189,7 +192,7 @@ public class TYPEStmt implements StatementVisitor {
methodCall.receiver.accept(this); methodCall.receiver.accept(this);
// Overloading: // Overloading:
Set<Constraint<Pair>> methodConstraints = new HashSet<>(); Set<Constraint<Pair>> methodConstraints = new HashSet<>();
for (MethodAssumption m : this.getMethods(methodCall.name, methodCall.arglist, info)) { for (MethodAssumption m : TYPEStmt.getMethods(methodCall.name, methodCall.arglist, info)) {
GenericsResolver resolver = getResolverInstance(); GenericsResolver resolver = getResolverInstance();
Set<Constraint<Pair>> oneMethodConstraints = generateConstraint(methodCall, m, info, resolver); Set<Constraint<Pair>> oneMethodConstraints = generateConstraint(methodCall, m, info, resolver);
methodConstraints.addAll(oneMethodConstraints); methodConstraints.addAll(oneMethodConstraints);
@@ -199,7 +202,7 @@ public class TYPEStmt implements StatementVisitor {
* oneMethodConstraint.setExtendConstraint(extendsOneMethodConstraint); extendsOneMethodConstraint.setExtendConstraint(oneMethodConstraint); methodConstraints.add(extendsOneMethodConstraint); * oneMethodConstraint.setExtendConstraint(extendsOneMethodConstraint); extendsOneMethodConstraint.setExtendConstraint(oneMethodConstraint); methodConstraints.add(extendsOneMethodConstraint);
*/ */
} }
if (methodConstraints.size() < 1) { if (methodConstraints.isEmpty()) {
throw new TypeinferenceException("Methode " + methodCall.name + " ist nicht vorhanden!", methodCall.getOffset()); throw new TypeinferenceException("Methode " + methodCall.name + " ist nicht vorhanden!", methodCall.getOffset());
} }
constraintsSet.addOderConstraint(methodConstraints); constraintsSet.addOderConstraint(methodConstraints);
@@ -212,7 +215,7 @@ public class TYPEStmt implements StatementVisitor {
for (MethodAssumption m : this.getConstructors(info, (RefType) methodCall.getType(), methodCall.getArgumentList())) { for (MethodAssumption m : this.getConstructors(info, (RefType) methodCall.getType(), methodCall.getArgumentList())) {
methodConstraints.add(generateConstructorConstraint(methodCall, m, info, getResolverInstance())); methodConstraints.add(generateConstructorConstraint(methodCall, m, info, getResolverInstance()));
} }
if (methodConstraints.size() < 1) { if (methodConstraints.isEmpty()) {
throw new TypeinferenceException("Konstruktor in Klasse " + methodCall.getType().toString() + " ist nicht vorhanden!", methodCall.getOffset()); throw new TypeinferenceException("Konstruktor in Klasse " + methodCall.getType().toString() + " ist nicht vorhanden!", methodCall.getOffset());
} }
constraintsSet.addOderConstraint(methodConstraints); constraintsSet.addOderConstraint(methodConstraints);
@@ -282,8 +285,13 @@ public class TYPEStmt implements StatementVisitor {
// see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17 // see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17
// Expression muss zu Numeric Convertierbar sein. also von Numeric erben // Expression muss zu Numeric Convertierbar sein. also von Numeric erben
Constraint<Pair> numeric; Constraint<Pair> numeric;
HashSet<JavaClassName> classNames = TypeUnifyTaskHelper.getPresizedHashSet(info.getAvailableClasses().size());
for (var classEl : info.getAvailableClasses()) {
classNames.add(classEl.getClassName());
}
// PL eingefuegt 2018-07-17 // PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(bytee.getName())) { if (classNames.contains(bytee.getName())) {
numeric = new Constraint<>(); numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), bytee, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.lexpr.getType(), bytee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), bytee, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.rexpr.getType(), bytee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -291,7 +299,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric); numericAdditionOrStringConcatenation.add(numeric);
} }
// PL eingefuegt 2018-07-17 // PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(shortt.getName())) { if (classNames.contains(shortt.getName())) {
numeric = new Constraint<>(); numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), shortt, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.lexpr.getType(), shortt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), shortt, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.rexpr.getType(), shortt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -299,7 +307,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric); numericAdditionOrStringConcatenation.add(numeric);
} }
// PL eingefuegt 2018-07-17 // PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(integer.getName())) { if (classNames.contains(integer.getName())) {
numeric = new Constraint<>(); numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), integer, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.lexpr.getType(), integer, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), integer, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.rexpr.getType(), integer, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -307,7 +315,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric); numericAdditionOrStringConcatenation.add(numeric);
} }
// PL eingefuegt 2018-07-17 // PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(longg.getName())) { if (classNames.contains(longg.getName())) {
numeric = new Constraint<>(); numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), longg, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.lexpr.getType(), longg, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), longg, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.rexpr.getType(), longg, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -315,7 +323,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric); numericAdditionOrStringConcatenation.add(numeric);
} }
// PL eingefuegt 2018-07-17 // PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(floatt.getName())) { if (classNames.contains(floatt.getName())) {
numeric = new Constraint<>(); numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), floatt, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.lexpr.getType(), floatt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), floatt, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.rexpr.getType(), floatt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -323,7 +331,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric); numericAdditionOrStringConcatenation.add(numeric);
} }
// PL eingefuegt 2018-07-17 // PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(doublee.getName())) { if (classNames.contains(doublee.getName())) {
numeric = new Constraint<>(); numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), doublee, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.lexpr.getType(), doublee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), doublee, PairOperator.SMALLERDOT, loc(binary.getOffset()))); numeric.add(new Pair(binary.rexpr.getType(), doublee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -338,7 +346,7 @@ public class TYPEStmt implements StatementVisitor {
if (binary.operation.equals(BinaryExpr.Operator.ADD)) { if (binary.operation.equals(BinaryExpr.Operator.ADD)) {
// Dann kann der Ausdruck auch das aneinanderfügen zweier Strings sein: ("a" + "b") oder (1 + 2) // Dann kann der Ausdruck auch das aneinanderfügen zweier Strings sein: ("a" + "b") oder (1 + 2)
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(string.getName())) { if (classNames.contains(string.getName())) {
Constraint<Pair> stringConcat = new Constraint<>(); Constraint<Pair> stringConcat = new Constraint<>();
stringConcat.add(new Pair(binary.lexpr.getType(), string, PairOperator.EQUALSDOT, loc(binary.getOffset()))); stringConcat.add(new Pair(binary.lexpr.getType(), string, PairOperator.EQUALSDOT, loc(binary.getOffset())));
stringConcat.add(new Pair(binary.rexpr.getType(), string, PairOperator.EQUALSDOT, loc(binary.getOffset()))); stringConcat.add(new Pair(binary.rexpr.getType(), string, PairOperator.EQUALSDOT, loc(binary.getOffset())));
@@ -346,7 +354,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(stringConcat); numericAdditionOrStringConcatenation.add(stringConcat);
} }
} }
if (numericAdditionOrStringConcatenation.size() < 1) { if (numericAdditionOrStringConcatenation.isEmpty()) {
throw new TypeinferenceException("Kein Typ für " + binary.operation.toString() + " vorhanden", binary.getOffset()); throw new TypeinferenceException("Kein Typ für " + binary.operation.toString() + " vorhanden", binary.getOffset());
} }
constraintsSet.addOderConstraint(numericAdditionOrStringConcatenation); constraintsSet.addOderConstraint(numericAdditionOrStringConcatenation);
@@ -693,8 +701,8 @@ public class TYPEStmt implements StatementVisitor {
Set<Pair> methodSignatureConstraint = generatemethodSignatureConstraint(forMethod, assumption, info, resolver); Set<Pair> methodSignatureConstraint = generatemethodSignatureConstraint(forMethod, assumption, info, resolver);
//System.out.println("methodSignatureConstraint: " + methodSignatureConstraint); //context.logger().info("methodSignatureConstraint: " + methodSignatureConstraint);
//System.out.println("methodConstraint: " + methodConstraint); //context.logger().info("methodConstraint: " + methodConstraint);
methodConstraint.setmethodSignatureConstraint(methodSignatureConstraint); methodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
extendsMethodConstraint.setmethodSignatureConstraint(methodSignatureConstraint); extendsMethodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
@@ -732,7 +740,7 @@ public class TYPEStmt implements StatementVisitor {
} }
// Zuordnung von MethodCall.signature(ReturnType) zu dem ReturnType der ausgewaehlten Methode (assumption.returnType) // Zuordnung von MethodCall.signature(ReturnType) zu dem ReturnType der ausgewaehlten Methode (assumption.returnType)
ret.add(new Pair(foMethod.signature.get(foMethod.signature.size() - 1), assumption.getReturnType(), PairOperator.EQUALSDOT)); ret.add(new Pair(foMethod.signature.getLast(), assumption.getReturnType(), PairOperator.EQUALSDOT));
return ret; return ret;
} }
@@ -745,8 +753,8 @@ public class TYPEStmt implements StatementVisitor {
// funNParams.add(TypePlaceholder.fresh(new NullToken())); // funNParams.add(TypePlaceholder.fresh(new NullToken()));
funNParams.add(new GenericRefType(NameGenerator.makeNewName(), new NullToken())); funNParams.add(new GenericRefType(NameGenerator.makeNewName(), new NullToken()));
} }
funNParams.get(funNParams.size() - 1); funNParams.getLast();
ret.add(new MethodAssumption(new FunNClass(funNParams), funNParams.get(funNParams.size() - 1), funNParams.subList(0, funNParams.size() - 1), new TypeScope() { ret.add(new MethodAssumption(new FunNClass(funNParams), funNParams.getLast(), funNParams.subList(0, funNParams.size() - 1), new TypeScope() {
@Override @Override
public Iterable<? extends GenericTypeVar> getGenerics() { public Iterable<? extends GenericTypeVar> getGenerics() {
throw new NotImplementedException(); throw new NotImplementedException();
@@ -841,7 +849,7 @@ public class TYPEStmt implements StatementVisitor {
for (var child : switchStmt.getBlocks()) { for (var child : switchStmt.getBlocks()) {
for (var label : child.getLabels()) { for (var label : child.getLabels()) {
if (label.getPattern() == null) { if (label.getPattern() == null) {
//System.out.println("DefaultCase"); //context.logger().info("DefaultCase");
} else { } else {
constraintsSet.addUndConstraint( constraintsSet.addUndConstraint(
new Pair( new Pair(
@@ -882,13 +890,9 @@ public class TYPEStmt implements StatementVisitor {
child.getLabels().forEach(el -> { child.getLabels().forEach(el -> {
if (el.getType() instanceof RefType) { if (el.getType() instanceof RefType) {
var recType = el; if (el.getPattern() instanceof RecordPattern pattern) {
recursivelyAddRecordConstraints(pattern);
if (el.getPattern() instanceof RecordPattern) { }
var pattern = (RecordPattern) recType.getPattern();
recursivelyAddRecordConstraints(pattern);
}
} }
}); });
@@ -904,13 +908,13 @@ public class TYPEStmt implements StatementVisitor {
var allClasses = info.getAvailableClasses(); var allClasses = info.getAvailableClasses();
var interestingClasses = allClasses.stream().filter(as -> as.getClassName().equals(((RefType) pattern.getType()).getName())).toList(); var interestingClasses = allClasses.stream().filter(as -> as.getClassName().equals(((RefType) pattern.getType()).getName())).toList();
var constructors = interestingClasses.get(0).getConstructors(); var constructors = interestingClasses.getFirst().getConstructors();
int counter = 0; int counter = 0;
for (var subPattern : pattern.getSubPattern()) { for (var subPattern : pattern.getSubPattern()) {
for (Constructor con : constructors) { for (Constructor con : constructors) {
//System.out.println("----------------------\n" + subPattern.getType() + " | " + con.getParameterList().getParameterAt(counter).getType() + "\n----------------------\n"); //context.logger().info("----------------------\n" + subPattern.getType() + " | " + con.getParameterList().getParameterAt(counter).getType() + "\n----------------------\n");
constraintsSet.addUndConstraint(new Pair(subPattern.getType(), con.getParameterList().getParameterAt(counter).getType(), PairOperator.SMALLERDOT, loc(con.getParameterList().getParameterAt(counter).getOffset()))); constraintsSet.addUndConstraint(new Pair(subPattern.getType(), con.getParameterList().getParameterAt(counter).getType(), PairOperator.SMALLERDOT, loc(con.getParameterList().getParameterAt(counter).getOffset())));
} }
if (subPattern instanceof RecordPattern) recursivelyAddRecordConstraints((RecordPattern) subPattern); if (subPattern instanceof RecordPattern) recursivelyAddRecordConstraints((RecordPattern) subPattern);

View File

@@ -0,0 +1,91 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* An intermediate class for the recursive steps of the TypeUnifyTask:
* This allows canceling parts of the recursion tree, instead of only the whole execution as before. But in
* order for that to work, all cancellable child tasks must be added when they are created
*
* @param <T>
*/
public abstract class CancellableTask<T> extends RecursiveTask<T> {
private final AtomicBoolean executionCancelled = new AtomicBoolean(false);
private final List<CancellableTask<?>> childTasks = new LinkedList<>();
private CancellableTask<?> parentTask = null;
/**
* Set the execution for this task and all its (recursive) children to be canceled
*/
protected void cancelExecution() {
// is this branch already canceled? Then do nothing
if (this.executionCancelled.getAndSet(true)) return;
this.cancelChildExecution();
}
private void cancelChildExecution() {
synchronized (this.childTasks) {
for (var childTask : childTasks) {
// no need to cancel a branch that is already finished
if (!childTask.isDone()) {
childTask.cancelExecution();
}
}
}
}
private void cancelChildExecutionAfter(CancellableTask<?> checkpointTask) {
boolean reachedCheckpoint = false;
int i = 0;
for (var childTask : childTasks) {
if (!reachedCheckpoint) {
reachedCheckpoint = childTask == checkpointTask;
}
else {
// no need to cancel a branch that is already finished
if (!childTask.isDone()) {
childTask.cancelExecution();
}
i++;
}
}
System.out.println("Skipped " + i + " younger siblings");
}
protected void cancelSiblingTasks() {
if (this.parentTask != null) {
boolean thisWasCancelledBefore = this.executionCancelled.get();
this.parentTask.cancelChildExecution();
this.executionCancelled.set(thisWasCancelledBefore);
}
}
public void cancelYoungerSiblingTasks() {
if (this.parentTask != null) {
this.parentTask.cancelChildExecutionAfter(this);
}
}
public Boolean isExecutionCancelled() {
return executionCancelled.get();
}
public void addChildTask(CancellableTask<?> childTask) {
this.childTasks.add(childTask);
childTask.setParentTask(this);
if (this.executionCancelled.get()) {
childTask.executionCancelled.set(true);
}
}
private void setParentTask(CancellableTask<?> parentTask) {
this.parentTask = parentTask;
}
}

View File

@@ -37,9 +37,6 @@ public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
totalElements += list.get(i).size(); totalElements += list.get(i).size();
} }
System.out.println("ConcurrentSetMerge? -> " + (size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD ? "true" : "false"));
// size will always be at least one // size will always be at least one
if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) { if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) {
Set<T> result = this.list.get(start); Set<T> result = this.list.get(start);

View File

@@ -94,8 +94,8 @@ public class MartelliMontanariUnify implements IUnify {
// SUBST - Rule // SUBST - Rule
if(lhsType instanceof PlaceholderType) { if(lhsType instanceof PlaceholderType) {
mgu.add((PlaceholderType) lhsType, rhsType); mgu.add((PlaceholderType) lhsType, rhsType);
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen. //PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
termsList = termsList.stream().map(x -> mgu.apply(x)).collect(Collectors.toCollection(ArrayList::new)); termsList.replaceAll(mgu::apply);
idx = idx+1 == termsList.size() ? 0 : idx+1; idx = idx+1 == termsList.size() ? 0 : idx+1;
continue; continue;
} }

View File

@@ -1,5 +1,10 @@
package de.dhbwstuttgart.typeinference.unify; package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType; import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Set; import java.util.Set;
@@ -9,11 +14,10 @@ import java.util.concurrent.atomic.AtomicInteger;
/** /**
* Calculate unique placeholder names * Calculate unique placeholder names
*/ */
public class PlaceholderRegistry { public class PlaceholderRegistry implements ISerializableData {
private final Set<String> existingPlaceholders = ConcurrentHashMap.newKeySet(); private final Set<String> existingPlaceholders = ConcurrentHashMap.newKeySet();
private final AtomicInteger placeholderCount = new AtomicInteger(); private final AtomicInteger placeholderCount = new AtomicInteger();
public ArrayList<PlaceholderType> UnifyTypeFactory_PLACEHOLDERS = new ArrayList<>(); public ArrayList<PlaceholderType> UnifyTypeFactory_PLACEHOLDERS = new ArrayList<>();
/** /**
@@ -37,9 +41,18 @@ public class PlaceholderRegistry {
name = getUppercaseTokenFromInt(pc); name = getUppercaseTokenFromInt(pc);
} }
while (existingPlaceholders.contains(name)); while (existingPlaceholders.contains(name));
this.addPlaceholder(name);
return name; return name;
} }
public PlaceholderRegistry deepClone() {
PlaceholderRegistry pr2 = new PlaceholderRegistry();
this.existingPlaceholders.forEach(pr2::addPlaceholder);
pr2.UnifyTypeFactory_PLACEHOLDERS.addAll(this.UnifyTypeFactory_PLACEHOLDERS);
pr2.placeholderCount.set(this.placeholderCount.get());
return pr2;
}
/** /**
* Generate a token that consists of uppercase letters and contains the given prefix and suffix from the value i * Generate a token that consists of uppercase letters and contains the given prefix and suffix from the value i
* *
@@ -49,7 +62,7 @@ public class PlaceholderRegistry {
private String getUppercaseTokenFromInt(int i) { private String getUppercaseTokenFromInt(int i) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
while (i >= 0) { while (i >= 0) {
sb.append((char)(i % 26 + 97)); sb.append((char)(i % 26 + 65));
i = i / 26 - 1; i = i / 26 - 1;
} }
//sb.append(suffix); //sb.append(suffix);
@@ -60,4 +73,12 @@ public class PlaceholderRegistry {
public String toString() { public String toString() {
return this.existingPlaceholders.toString(); return this.existingPlaceholders.toString();
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("ph", new SerialValue<>(new ArrayList<>(this.existingPlaceholders)));
serialized.put("factoryPh", SerialList.fromMapped(this.UnifyTypeFactory_PLACEHOLDERS, t -> t.toSerial(keyStorage)));
return serialized;
}
} }

View File

@@ -1,10 +1,12 @@
package de.dhbwstuttgart.typeinference.unify; package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.ListIterator;
import java.util.Optional; import java.util.Optional;
import java.util.Queue; import java.util.Queue;
import java.util.Set; import java.util.Set;
@@ -12,24 +14,16 @@ import java.util.Stack;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.exceptions.DebugException; import de.dhbwstuttgart.exceptions.DebugException;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet; import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.model.*; import de.dhbwstuttgart.typeinference.unify.model.*;
import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import org.apache.commons.io.output.NullOutputStream;
/** /**
* Implementation of the type inference rules. * Implementation of the type inference rules.
* @author Florian Steurer * @author Florian Steurer
@@ -37,17 +31,17 @@ import org.apache.commons.io.output.NullOutputStream;
*/ */
public class RuleSet implements IRuleSet{ public class RuleSet implements IRuleSet{
Writer logFile; Logger logger;
final PlaceholderRegistry placeholderRegistry; final PlaceholderRegistry placeholderRegistry;
public RuleSet(PlaceholderRegistry placeholderRegistry) { public RuleSet(PlaceholderRegistry placeholderRegistry) {
super(); super();
logFile = OutputStreamWriter.nullWriter(); logger = Logger.NULL_LOGGER;
this.placeholderRegistry = placeholderRegistry; this.placeholderRegistry = placeholderRegistry;
} }
RuleSet(Writer logFile, PlaceholderRegistry placeholderRegistry) { RuleSet(Logger logger, PlaceholderRegistry placeholderRegistry) {
this.logFile = logFile; this.logger = logger;
this.placeholderRegistry = placeholderRegistry; this.placeholderRegistry = placeholderRegistry;
} }
@@ -300,8 +294,8 @@ public class RuleSet implements IRuleSet{
if(dFromFc == null || !dFromFc.getTypeParams().arePlaceholders() || dFromFc.getTypeParams().size() != cFromFc.getTypeParams().size()) if(dFromFc == null || !dFromFc.getTypeParams().arePlaceholders() || dFromFc.getTypeParams().size() != cFromFc.getTypeParams().size())
return Optional.empty(); return Optional.empty();
//System.out.println("cFromFc: " + cFromFc); //context.logger().info("cFromFc: " + cFromFc);
//System.out.println("dFromFc: " + dFromFc); //context.logger().info("dFromFc: " + dFromFc);
int[] pi = pi(cFromFc.getTypeParams(), dFromFc.getTypeParams()); int[] pi = pi(cFromFc.getTypeParams(), dFromFc.getTypeParams());
if(pi.length == 0) if(pi.length == 0)
@@ -510,17 +504,17 @@ public class RuleSet implements IRuleSet{
TypeParams typeDParams = typeD.getTypeParams(); TypeParams typeDParams = typeD.getTypeParams();
TypeParams typeDgenParams = typeDgen.getTypeParams(); TypeParams typeDgenParams = typeDgen.getTypeParams();
//System.out.println("Pair: " +pair); //context.logger().info("Pair: " +pair);
//System.out.println("typeD: " +typeD); //context.logger().info("typeD: " +typeD);
//System.out.println("typeDParams: " +typeDParams); //context.logger().info("typeDParams: " +typeDParams);
//System.out.println("typeDgen: " +typeD); //context.logger().info("typeDgen: " +typeD);
//System.out.println("typeDgenParams: " +typeDgenParams); //context.logger().info("typeDgenParams: " +typeDgenParams);
Unifier unif = Unifier.identity(); Unifier unif = Unifier.identity();
for(int i = 0; i < typeDParams.size(); i++) { for(int i = 0; i < typeDParams.size(); i++) {
//System.out.println("ADAPT" +typeDgenParams); //context.logger().info("ADAPT" +typeDgenParams);
if (typeDgenParams.get(i) instanceof PlaceholderType) if (typeDgenParams.get(i) instanceof PlaceholderType)
unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i)); unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
else System.out.println("ERROR"); else logger.exception(new Exception("ERROR in adapt rule: cannot add non placeholder type"));
} }
return Optional.of(new UnifyPair(unif.apply(newLhs), typeDs, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair())); return Optional.of(new UnifyPair(unif.apply(newLhs), typeDs, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
} }
@@ -655,15 +649,17 @@ public class RuleSet implements IRuleSet{
@Override @Override
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs, List<Set<Constraint<UnifyPair>>> oderConstraints) { public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs, List<Set<Constraint<UnifyPair>>> oderConstraints) {
HashMap<UnifyType, Integer> typeMap = new HashMap<>(); // Statistically, typeMap will fill up quickly and resize multiple times. To reduce this, we start with a higher capacity
HashMap<UnifyType, Integer> typeMap = new HashMap<>(200);
Stack<UnifyType> occuringTypes = new Stack<>(); Stack<UnifyType> occuringTypes = new Stack<>();
occuringTypes.ensureCapacity(pairs.size() * 3);
for(UnifyPair pair : pairs) { for(UnifyPair pair : pairs) {
occuringTypes.push(pair.getLhsType()); occuringTypes.push(pair.getLhsType());
occuringTypes.push(pair.getRhsType()); occuringTypes.push(pair.getRhsType());
} }
while(!occuringTypes.isEmpty()) { while(!occuringTypes.isEmpty()) {
UnifyType t1 = occuringTypes.pop(); UnifyType t1 = occuringTypes.pop();
if(!typeMap.containsKey(t1)) if(!typeMap.containsKey(t1))
@@ -675,12 +671,12 @@ public class RuleSet implements IRuleSet{
if(t1 instanceof SuperType) if(t1 instanceof SuperType)
occuringTypes.push(((SuperType) t1).getSuperedType()); occuringTypes.push(((SuperType) t1).getSuperedType());
else else
t1.getTypeParams().forEach(x -> occuringTypes.push(x)); t1.getTypeParams().forEach(occuringTypes::push);
} }
Queue<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs); LinkedList<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
ArrayList<UnifyPair> result = new ArrayList<UnifyPair>(); ArrayList<UnifyPair> result = new ArrayList<UnifyPair>();
boolean applied = false; boolean applied = false;
while(!result1.isEmpty()) { while(!result1.isEmpty()) {
UnifyPair pair = result1.poll(); UnifyPair pair = result1.poll();
PlaceholderType lhsType = null; PlaceholderType lhsType = null;
@@ -698,19 +694,30 @@ public class RuleSet implements IRuleSet{
&& !((rhsType instanceof WildcardType) && ((WildcardType)rhsType).getWildcardedType().equals(lhsType))) //PL eigefuegt 2018-02-18 && !((rhsType instanceof WildcardType) && ((WildcardType)rhsType).getWildcardedType().equals(lhsType))) //PL eigefuegt 2018-02-18
{ {
Unifier uni = new Unifier(lhsType, rhsType); Unifier uni = new Unifier(lhsType, rhsType);
result = result.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(ArrayList::new)); // apply unifier to result and result1 in place
result1 = result1.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(LinkedList::new)); result.replaceAll(p -> uni.apply(pair, p));
ListIterator<UnifyPair> result1Iterator = result1.listIterator();
while (result1Iterator.hasNext()) {
UnifyPair x = result1Iterator.next();
result1Iterator.set(uni.apply(pair, x));
}
Function<? super Constraint<UnifyPair>,? extends Constraint<UnifyPair>> applyUni = b -> b.stream().map( Function<? super Constraint<UnifyPair>,? extends Constraint<UnifyPair>> applyUni = b -> b.stream().map(
x -> uni.apply(pair,x)).collect(Collectors.toCollection((b.getExtendConstraint() != null) x -> uni.apply(pair,x)).collect(Collectors.toCollection((b.getExtendConstraint() != null)
? () -> new Constraint<UnifyPair>( ? () -> new Constraint<UnifyPair>(
b.isInherited(), b.isInherited(),
b.isImplemented(), b.isImplemented(),
b.getExtendConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(Constraint::new)), b.getExtendConstraint().createdMapped(x -> uni.apply(pair,x)),
b.getmethodSignatureConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(HashSet::new))) b.getmethodSignatureConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(HashSet::new)))
: () -> new Constraint<UnifyPair>(b.isInherited(), b.isImplemented()) : () -> new Constraint<UnifyPair>(b.isInherited(), b.isImplemented())
)); ));
oderConstraints.replaceAll(oc -> oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new))); oderConstraints.replaceAll(oc -> {
HashSet<Constraint<UnifyPair>> mapped = new HashSet<>(oc.size());
for (var element : oc) {
mapped.add(applyUni.apply(element));
}
return mapped;
});
/* /*
oderConstraints = oderConstraints.stream().map( oderConstraints = oderConstraints.stream().map(
a -> a.stream().map(applyUni a -> a.stream().map(applyUni
@@ -864,14 +871,11 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType(); UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); } if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} ); } );
try {
logFile.write("FUNgreater: " + pair + "\n"); logger.debug(() -> "FUNgreater: " + pair);
logFile.write("FUNred: " + result + "\n"); logger.debug(() -> "FUNred: " + result);
// logFile.flush();
}
catch (IOException e) {
System.out.println("logFile-Error");
}
return Optional.of(result); return Optional.of(result);
} }
@@ -937,8 +941,8 @@ public class RuleSet implements IRuleSet{
Set<UnifyPair> result = new HashSet<UnifyPair>(); Set<UnifyPair> result = new HashSet<UnifyPair>();
Integer variance = ((PlaceholderType)rhsType).getVariance(); int variance = ((PlaceholderType)rhsType).getVariance();
Integer inversVariance = distributeVariance.inverseVariance(variance); int inversVariance = distributeVariance.inverseVariance(variance);
UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()]; UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) { for(int i = 0; i < freshPlaceholders.length-1; i++) {
@@ -956,18 +960,14 @@ public class RuleSet implements IRuleSet{
result.add(new UnifyPair(rhsType, funNLhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair())); result.add(new UnifyPair(rhsType, funNLhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.stream().forEach(x -> { UnifyType l = x.getLhsType(); result.stream().forEach(x -> { UnifyType l = x.getLhsType();
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); } if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
UnifyType r = x.getRhsType(); UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); } if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} ); } );
try {
logFile.write("FUNgreater: " + pair + "\n"); logger.debug(() -> "FUNgreater: " + pair);
logFile.write("FUNgreater: " + result + "\n"); logger.debug(() -> "FUNgreater: " + result);
// logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
return Optional.of(result); return Optional.of(result);
} }
@@ -986,8 +986,8 @@ public class RuleSet implements IRuleSet{
Set<UnifyPair> result = new HashSet<UnifyPair>(); Set<UnifyPair> result = new HashSet<UnifyPair>();
Integer variance = ((PlaceholderType)lhsType).getVariance(); int variance = ((PlaceholderType)lhsType).getVariance();
Integer inversVariance = distributeVariance.inverseVariance(variance); int inversVariance = distributeVariance.inverseVariance(variance);
UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()]; UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) { for(int i = 0; i < freshPlaceholders.length-1; i++) {
@@ -1006,18 +1006,15 @@ public class RuleSet implements IRuleSet{
result.add(new UnifyPair(lhsType, funNRhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair())); result.add(new UnifyPair(lhsType, funNRhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.stream().forEach(x -> { UnifyType l = x.getLhsType(); result.stream().forEach(x -> { UnifyType l = x.getLhsType();
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); } if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
UnifyType r = x.getRhsType(); UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); } if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} ); } );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNsmaller: " + result + "\n"); logger.debug(() -> "FUNgreater: " + pair);
// logFile.flush(); logger.debug(() -> "FUNsmaller: " + result);
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
return Optional.of(result); return Optional.of(result);
} }

View File

@@ -1,10 +1,7 @@
package de.dhbwstuttgart.typeinference.unify; package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.util.Logger; import de.dhbwstuttgart.util.Logger;
import java.io.IOException; import java.io.IOException;
import java.io.Writer;
import java.lang.reflect.Type;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
@@ -32,17 +29,12 @@ public class TypeUnify {
* unify parallel ohne result modell * unify parallel ohne result modell
*/ */
public static Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) { public static Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool(); ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool); UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute()); Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
try {
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n"); unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
// logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return res; return res;
} }
@@ -50,28 +42,23 @@ public class TypeUnify {
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind * unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
*/ */
public static UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) { public static UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool(); ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithExecutor(pool); UnifyContext context = unifyContext.newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
unifyTask.compute(); unifyTask.compute();
return unifyContext.resultModel(); return unifyContext.resultModel();
} }
/** /**
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind * unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
*/ */
public static Set<Set<UnifyPair>> unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) { public static Set<Set<UnifyPair>> unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool(); ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool); UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
var result = joinFuture(unifyTask.compute()); var result = joinFuture(unifyTask.compute());
try {
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n"); unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
// logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return result; return result;
} }
@@ -89,18 +76,12 @@ public class TypeUnify {
public static Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) { public static Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(false), 0); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(false), 0);
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute()); Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
try { unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
// logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return res; return res;
} }
private static ForkJoinPool createThreadPool() { private static ForkJoinPool createThreadPool(Logger logger) {
Logger.print("Available processors: " + Runtime.getRuntime().availableProcessors()); logger.info("Available processors: " + Runtime.getRuntime().availableProcessors());
return new ForkJoinPool( return new ForkJoinPool(
Runtime.getRuntime().availableProcessors(), Runtime.getRuntime().availableProcessors(),
ForkJoinPool.defaultForkJoinWorkerThreadFactory, ForkJoinPool.defaultForkJoinWorkerThreadFactory,

View File

@@ -34,7 +34,7 @@ public class TypeUnify2Task extends TypeUnifyTask {
@Override @Override
public CompletableFuture<Set<Set<UnifyPair>>> compute() { public CompletableFuture<Set<Set<UnifyPair>>> compute() {
if (one) { if (one) {
System.out.println("two"); context.logger().info("two");
} }
one = true; one = true;
CompletableFuture<Set<Set<UnifyPair>>> res = CompletableFuture<Set<Set<UnifyPair>>> res =
@@ -45,7 +45,7 @@ public class TypeUnify2Task extends TypeUnifyTask {
*/ */
//writeLog("xxx"); //writeLog("xxx");
//noOfThread--; //noOfThread--;
if (this.myIsCancelled()) { if (this.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new HashSet<>()); return CompletableFuture.completedFuture(new HashSet<>());
} else { } else {
return res; return res;
@@ -53,12 +53,6 @@ public class TypeUnify2Task extends TypeUnifyTask {
} }
public void closeLogFile() { public void closeLogFile() {
context.logger().close();
try {
context.logFile().close();
} catch (IOException ioE) {
System.err.println("no log-File");
}
} }
} }

View File

@@ -3,6 +3,7 @@ package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.exceptions.TypeinferenceException; import de.dhbwstuttgart.exceptions.TypeinferenceException;
import de.dhbwstuttgart.parser.NullToken; import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.ServerTaskLogger;
import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.cartesianproduct.VarianceCase; import de.dhbwstuttgart.typeinference.unify.cartesianproduct.VarianceCase;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -26,11 +27,7 @@ import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
import de.dhbwstuttgart.util.Logger; import de.dhbwstuttgart.util.Logger;
import de.dhbwstuttgart.util.Pair; import de.dhbwstuttgart.util.Pair;
import de.dhbwstuttgart.util.Tuple; import de.dhbwstuttgart.util.Tuple;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Serial; import java.io.Serial;
import java.io.Writer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
@@ -45,13 +42,10 @@ import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.RecursiveTask; import java.util.concurrent.RecursiveTask;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiFunction; import java.util.function.BiFunction;
import java.util.function.BinaryOperator; import java.util.function.BinaryOperator;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.io.output.NullOutputStream;
/** /**
@@ -59,14 +53,14 @@ import org.apache.commons.io.output.NullOutputStream;
* *
* @author Florian Steurer * @author Florian Steurer
*/ */
public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<UnifyPair>>>> { public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<UnifyPair>>>> {
@Serial @Serial
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private static int i = 0; private static int i = 0;
private final boolean printtag = false; private final boolean printtag = false;
final UnifyContext context; public final UnifyContext context;
/** /**
* Element, das aus dem nextSet den Gleichunen dieses Threads hinzugefuegt wurde * Element, das aus dem nextSet den Gleichunen dieses Threads hinzugefuegt wurde
@@ -108,21 +102,19 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
int rekTiefeField; int rekTiefeField;
Integer nOfUnify = 0; int nOfUnify = 0;
Integer noUndefPair = 0; int noUndefPair = 0;
Integer noAllErasedElements = 0; int noAllErasedElements = 0;
// some statistics for local output (they will not make sense when executed on the server) // some statistics for local output (they will not make sense when executed on the server)
public static int noBacktracking; public static int noBacktracking;
public static int noShortendElements; public static int noShortendElements;
public static int noou = 0; public static int noou = 0;
Boolean myIsCanceled = false;
public TypeUnifyTask(UnifyContext context) { public TypeUnifyTask(UnifyContext context) {
this.context = context.newWithLogFile(new OutputStreamWriter(NullOutputStream.INSTANCE)); this.context = context.newWithLogger(Logger.NULL_LOGGER);
rules = new RuleSet(context.placeholderRegistry()); rules = new RuleSet(context.placeholderRegistry());
} }
@@ -157,24 +149,19 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
this.fc = fc; this.fc = fc;
this.oup = new OrderingUnifyPair(fc, context); this.oup = new OrderingUnifyPair(fc, context);
Writer logFileWriter = OutputStreamWriter.nullWriter(); this.context = (context.logger() instanceof ServerTaskLogger) ? context : context.newWithLogger(
if (context.log()) { Logger.forFile(
try { System.getProperty("user.dir") + "/logFiles/" + "Thread",
logFileWriter = new FileWriter(System.getProperty("user.dir") + "/logFiles/" + "Thread"); "Unify"
logFileWriter.write(""); )
} catch (IOException e) { );
System.err.println("log-File nicht vorhanden");
}
}
this.context = context.newWithLogFile(logFileWriter);
/*Abbruchtest /*Abbruchtest
if (thNo > 10) { if (thNo > 10) {
System.out.println("cancel"); context.logger().info("cancel");
usedTasks.cancel(); usedTasks.cancel();
writeLog(nOfUnify.toString() + "cancel"); writeLog(nOfUnify.toString() + "cancel");
System.out.println("cancel"); context.logger().info("cancel");
try { try {
logFile.write("Abbruch"); logFile.write("Abbruch");
} }
@@ -183,7 +170,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
} }
} }
*/ */
rules = new RuleSet(context.logFile(), context.placeholderRegistry()); rules = new RuleSet(context.logger(), context.placeholderRegistry());
this.rekTiefeField = rekTiefe; this.rekTiefeField = rekTiefe;
context.usedTasks().add(this); context.usedTasks().add(this);
} }
@@ -220,17 +207,10 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
} }
} }
*/ */
void myCancel(Boolean b) {
myIsCanceled = true;
}
public boolean myIsCancelled() {
return myIsCanceled;
}
public CompletableFuture<Set<Set<UnifyPair>>> compute() { public CompletableFuture<Set<Set<UnifyPair>>> compute() {
if (one) { if (one) {
System.out.println("two"); context.logger().info("two");
} }
one = true; one = true;
Set<UnifyPair> neweq = new HashSet<>(eq); Set<UnifyPair> neweq = new HashSet<>(eq);
@@ -247,11 +227,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
var unifyFuture = unify(neweq, remainingOderconstraints, fc, context.parallel(), rekTiefeField, methodSignatureConstraint); var unifyFuture = unify(neweq, remainingOderconstraints, fc, context.parallel(), rekTiefeField, methodSignatureConstraint);
return unifyFuture.thenApply(res -> { return unifyFuture.thenApply(res -> {
try { context.logger().close();
context.logFile().close();
} catch (IOException ioE) {
System.err.println("no log-File");
}
if (isUndefinedPairSetSet(res)) { if (isUndefinedPairSetSet(res)) {
//fuer debug-Zwecke //fuer debug-Zwecke
ArrayList<ArrayList<UnifyPair>> al = res.stream() ArrayList<ArrayList<UnifyPair>> al = res.stream()
@@ -260,7 +236,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
throw new TypeinferenceException("Unresolved constraints: " + res, new NullToken()); //return new HashSet<>(); throw new TypeinferenceException("Unresolved constraints: " + res, new NullToken()); //return new HashSet<>();
} }
if (this.myIsCancelled()) { if (this.isExecutionCancelled()) {
return new HashSet<>(); return new HashSet<>();
} }
@@ -292,19 +268,19 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// ).collect(Collectors.toCollection(HashSet::new)); // ).collect(Collectors.toCollection(HashSet::new));
//writeLog(nOfUnify.toString() + " AA: " + aas.toString()); //writeLog(nOfUnify.toString() + " AA: " + aas.toString());
//if (aas.isEmpty()) { //if (aas.isEmpty()) {
// System.out.println(""); // context.logger().info("");
//} //}
//.collect(Collectors.toCollection(HashSet::new))); //.collect(Collectors.toCollection(HashSet::new)));
if (this.myIsCancelled()) { if (this.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new HashSet<>()); return CompletableFuture.completedFuture(new HashSet<>());
} }
rekTiefe++; rekTiefe++;
nOfUnify++; nOfUnify++;
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString()); context.logger().debug(() -> nOfUnify + " Unifikation: " + eq.toString());
writeLog(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString()); context.logger().debug(() -> nOfUnify + " Oderconstraints: " + oderConstraints.toString());
/* /*
* Variancen auf alle Gleichungen vererben * Variancen auf alle Gleichungen vererben
@@ -315,10 +291,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
/* /*
* ? extends ? extends Theta rausfiltern * ? extends ? extends Theta rausfiltern
*/ */
Set<UnifyPair> doubleExt = eq.stream().filter(UnifyPair::wrongWildcard).map(x -> { Set<UnifyPair> doubleExt = eq.stream().filter(UnifyPair::wrongWildcard).peek(UnifyPair::setUndefinedPair)
x.setUndefinedPair();
return x;
})
.collect(Collectors.toCollection(HashSet::new)); .collect(Collectors.toCollection(HashSet::new));
if (!doubleExt.isEmpty()) { if (!doubleExt.isEmpty()) {
Set<Set<UnifyPair>> ret = new HashSet<>(); Set<Set<UnifyPair>> ret = new HashSet<>();
@@ -329,15 +302,10 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
/* /*
* Occurs-Check durchfuehren * Occurs-Check durchfuehren
*/ */
Set<UnifyPair> ocurrPairs = eq.stream().filter(x -> { Set<UnifyPair> ocurrPairs = TypeUnifyTaskHelper.occursCheck(eq);
UnifyType lhs, rhs;
return (lhs = x.getLhsType()) instanceof PlaceholderType Set<UnifyPair> finalOcurrPairs = ocurrPairs;
&& !((rhs = x.getRhsType()) instanceof PlaceholderType) context.logger().debug(() -> "ocurrPairs: " + finalOcurrPairs);
&& rhs.getTypeParams().occurs((PlaceholderType) lhs);
})
.peek(UnifyPair::setUndefinedPair)
.collect(Collectors.toCollection(HashSet::new));
writeLog("ocurrPairs: " + ocurrPairs);
if (!ocurrPairs.isEmpty()) { if (!ocurrPairs.isEmpty()) {
Set<Set<UnifyPair>> ret = new HashSet<>(); Set<Set<UnifyPair>> ret = new HashSet<>();
ret.add(ocurrPairs); ret.add(ocurrPairs);
@@ -358,15 +326,9 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
/* In commit dfd91b5f8b7fca1cb5f302eec4b0ba3330271c9b eingefuegt ANFANG */ /* In commit dfd91b5f8b7fca1cb5f302eec4b0ba3330271c9b eingefuegt ANFANG */
Set<UnifyPair> occurcheck = new HashSet<>(eq0); Set<UnifyPair> occurcheck = new HashSet<>(eq0);
occurcheck.removeAll(eq0Prime); occurcheck.removeAll(eq0Prime);
ocurrPairs = occurcheck.stream().filter(x -> { ocurrPairs = TypeUnifyTaskHelper.occursCheck(occurcheck);
UnifyType lhs, rhs; Set<UnifyPair> finalOcurrPairs1 = ocurrPairs;
return (lhs = x.getLhsType()) instanceof PlaceholderType context.logger().debug(() -> "ocurrPairs: " + finalOcurrPairs1);
&& !((rhs = x.getRhsType()) instanceof PlaceholderType)
&& rhs.getTypeParams().occurs((PlaceholderType) lhs);
})
.peek(UnifyPair::setUndefinedPair)
.collect(Collectors.toCollection(HashSet::new));
writeLog("ocurrPairs: " + ocurrPairs);
if (!ocurrPairs.isEmpty()) { if (!ocurrPairs.isEmpty()) {
Set<Set<UnifyPair>> ret = new HashSet<>(); Set<Set<UnifyPair>> ret = new HashSet<>();
ret.add(ocurrPairs); ret.add(ocurrPairs);
@@ -378,8 +340,8 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
eq0.forEach(UnifyPair::disableCondWildcards); eq0.forEach(UnifyPair::disableCondWildcards);
writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString()); context.logger().debug(() ->nOfUnify + " Unifikation nach applyTypeUnificationRules: " + eq.toString());
writeLog(nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString()); context.logger().debug(() -> nOfUnify + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
/* /*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs * Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
@@ -399,7 +361,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// cartesian product of the sets created by pattern matching. // cartesian product of the sets created by pattern matching.
List<Set<? extends Set<UnifyPair>>> topLevelSets = new ArrayList<>(); List<Set<? extends Set<UnifyPair>>> topLevelSets = new ArrayList<>();
//System.out.println(eq2s); //context.logger().info(eq2s);
if (!eq1s.isEmpty()) { // Do not add empty sets or the cartesian product will always be empty. if (!eq1s.isEmpty()) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>(); Set<Set<UnifyPair>> wrap = new HashSet<>();
@@ -423,7 +385,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// Sets that originate from pair pattern matching // Sets that originate from pair pattern matching
// Sets of the "second level" // Sets of the "second level"
Set<UnifyPair> undefinedPairs = new HashSet<>(); Set<UnifyPair> undefinedPairs = new HashSet<>();
if (printtag) System.out.println("eq2s " + eq2s); if (printtag) context.logger().info("eq2s " + eq2s);
//writeLog("BufferSet: " + bufferSet.toString()+"\n"); //writeLog("BufferSet: " + bufferSet.toString()+"\n");
List<Set<Constraint<UnifyPair>>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints); List<Set<Constraint<UnifyPair>>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints);
Set<Set<Set<? extends Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput); Set<Set<Set<? extends Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput);
@@ -431,21 +393,21 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
//nicht ausgewertet Faculty Beispiel im 1. Schritt //nicht ausgewertet Faculty Beispiel im 1. Schritt
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren //PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
//Typen getestet werden. //Typen getestet werden.
writeLog(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput.toString()); context.logger().debug(() -> nOfUnify + " Oderconstraints2: " + oderConstraintsOutput.toString());
if (printtag) System.out.println("secondLevelSets:" + secondLevelSets); if (printtag) context.logger().info("secondLevelSets:" + secondLevelSets);
// If pairs occured that did not match one of the cartesian product cases, // If pairs occured that did not match one of the cartesian product cases,
// those pairs are contradictory and the unification is impossible. // those pairs are contradictory and the unification is impossible.
if (!undefinedPairs.isEmpty()) { if (!undefinedPairs.isEmpty()) {
noUndefPair++; noUndefPair++;
for (UnifyPair up : undefinedPairs) { for (UnifyPair up : undefinedPairs) {
writeLog(noUndefPair.toString() + " UndefinedPairs; " + up); context.logger().debug(() -> noUndefPair + " UndefinedPairs; " + up);
writeLog("BasePair; " + up.getBasePair()); context.logger().debug(() -> "BasePair; " + up.getBasePair());
} }
Set<Set<UnifyPair>> error = new HashSet<>(); Set<Set<UnifyPair>> error = new HashSet<>();
undefinedPairs = undefinedPairs.stream().peek(UnifyPair::setUndefinedPair) undefinedPairs = undefinedPairs.stream().peek(UnifyPair::setUndefinedPair)
.collect(Collectors.toCollection(HashSet::new)); .collect(Collectors.toCollection(HashSet::new));
error.add(undefinedPairs); error.add(undefinedPairs);
undefinedPairs.forEach(x -> writeLog("AllSubst: " + x.getAllSubstitutions().toString())); undefinedPairs.forEach(x -> context.logger().debug(() -> "AllSubst: " + x.getAllSubstitutions().toString()));
return CompletableFuture.completedFuture(error); return CompletableFuture.completedFuture(error);
} }
@@ -455,16 +417,16 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// Alternative: Sub cartesian products of the second level (pattern matched) sets // Alternative: Sub cartesian products of the second level (pattern matched) sets
// "the big (x)" // "the big (x)"
/* for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) { /* for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
//System.out.println("secondLevelSet "+secondLevelSet.size()); //context.logger().info("secondLevelSet "+secondLevelSet.size());
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet); List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList); Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList);
//System.out.println("CardResult: "+cartResult.size()); //context.logger().info("CardResult: "+cartResult.size());
// Flatten and add to top level sets // Flatten and add to top level sets
Set<Set<UnifyPair>> flat = new HashSet<>(); Set<Set<UnifyPair>> flat = new HashSet<>();
int j = 0; int j = 0;
for(List<Set<UnifyPair>> s : cartResult) { for(List<Set<UnifyPair>> s : cartResult) {
j++; j++;
//System.out.println("s from CardResult: "+cartResult.size() + " " + j); //context.logger().info("s from CardResult: "+cartResult.size() + " " + j);
Set<UnifyPair> flat1 = new HashSet<>(); Set<UnifyPair> flat1 = new HashSet<>();
for(Set<UnifyPair> s1 : s) for(Set<UnifyPair> s1 : s)
flat1.addAll(s1); flat1.addAll(s1);
@@ -478,8 +440,8 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
for (Set<Set<? extends Set<UnifyPair>>> secondLevelSet : secondLevelSets) { for (Set<Set<? extends Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
topLevelSets.addAll(secondLevelSet); topLevelSets.addAll(secondLevelSet);
} }
//System.out.println(topLevelSets); //context.logger().info(topLevelSets);
//System.out.println(); //context.logger().info();
//Aufruf von computeCartesianRecursive ANFANG //Aufruf von computeCartesianRecursive ANFANG
@@ -501,7 +463,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// .collect(Collectors.toCollection(HashSet::new)); // .collect(Collectors.toCollection(HashSet::new));
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
if (this.myIsCancelled()) { if (this.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new HashSet<>()); return CompletableFuture.completedFuture(new HashSet<>());
} }
@@ -520,18 +482,18 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
* Step 5: Substitution * Step 5: Substitution
*/ */
//writeLog("vor Subst: " + eqPrime); //writeLog("vor Subst: " + eqPrime);
writeLog("vor Subst: " + oderConstraints); context.logger().debug(() -> "vor Subst: " + oderConstraints);
String ocString = oderConstraints.toString(); String ocString = oderConstraints.toString();
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime, newOderConstraints); Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime, newOderConstraints);
Set<Set<UnifyPair>> unifyres1 = null; Set<Set<UnifyPair>> unifyres1 = null;
Set<Set<UnifyPair>> unifyres2 = null; Set<Set<UnifyPair>> unifyres2 = null;
if (!ocString.equals(newOderConstraints.toString())) if (!ocString.equals(newOderConstraints.toString()))
writeLog("nach Subst: " + newOderConstraints); context.logger().debug(() -> "nach Subst: " + newOderConstraints);
{// sequentiell (Step 6b is included) {// sequentiell (Step 6b is included)
if (printtag) System.out.println("nextStep: " + eqPrimePrime); if (printtag) context.logger().info("nextStep: " + eqPrimePrime);
if (eqPrime.equals(eq) && eqPrimePrime.isEmpty() if (eqPrime.equals(eq) && eqPrimePrime.isEmpty()
&& oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch && oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent()) //PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
@@ -550,12 +512,12 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
return eqPrimePrimeSet; return eqPrimePrimeSet;
}); });
if (finalresult && isSolvedForm(eqPrime)) { if (finalresult && isSolvedForm(eqPrime)) {
writeLog("eqPrime:" + eqPrime.toString() + "\n"); context.logger().debug(() -> "eqPrime:" + eqPrime.toString() + "\n");
/* methodconstraintsets werden zum Ergebnis hinzugefuegt /* methodconstraintsets werden zum Ergebnis hinzugefuegt
* Anfang * Anfang
*/ */
//System.out.println("methodSignatureConstraint Return: " + methodSignatureConstraint + "\n"); //context.logger().info("methodSignatureConstraint Return: " + methodSignatureConstraint + "\n");
eqPrimePrimeSetFuture = eqPrimePrimeSetFuture.thenApply(eqPrimePrimeSet -> { eqPrimePrimeSetFuture = eqPrimePrimeSetFuture.thenApply(eqPrimePrimeSet -> {
eqPrimePrimeSet.forEach(x -> x.addAll(methodSignatureConstraint)); eqPrimePrimeSet.forEach(x -> x.addAll(methodSignatureConstraint));
@@ -606,7 +568,8 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
return eqPrimePrimeSetFuture.thenApply(eqPrimePrimeSet -> { return eqPrimePrimeSetFuture.thenApply(eqPrimePrimeSet -> {
eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new)); eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new));
if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) { if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) {
writeLog("Result1 " + eqPrimePrimeSet); Set<Set<UnifyPair>> finalEqPrimePrimeSet = eqPrimePrimeSet;
context.logger().debug(() -> "Result1 " + finalEqPrimePrimeSet);
} }
return eqPrimePrimeSet; return eqPrimePrimeSet;
}); });
@@ -678,7 +641,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
} else { } else {
//Varianz-Bestimmung Oder-Constraints //Varianz-Bestimmung Oder-Constraints
if (printtag) { if (printtag) {
System.out.println("nextSetasList " + nextSetAsList); context.logger().info("nextSetasList " + nextSetAsList);
} }
variance = TypeUnifyTaskHelper.calculateOderConstraintVariance(nextSetAsList); variance = TypeUnifyTaskHelper.calculateOderConstraintVariance(nextSetAsList);
} }
@@ -694,14 +657,16 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
*/ */
Set<UnifyPair> sameEqSet = new HashSet<>(); Set<UnifyPair> sameEqSet = new HashSet<>();
//optOrigPair enthaelt ggf. das Paar a = ty \in nextSet //optOrigPair enthaelt ggf. das Paar a = ty \in nextSet
Optional<UnifyPair> optOrigPair = Optional.empty(); Optional<UnifyPair> optOrigPair;
if (!oderConstraint) { if (!oderConstraint) {
optOrigPair = TypeUnifyTaskHelper.findEqualityConstrainedUnifyPair(nextSetElement); optOrigPair = TypeUnifyTaskHelper.findEqualityConstrainedUnifyPair(nextSetElement);
writeLog("optOrigPair: " + optOrigPair); context.logger().debug(() -> "optOrigPair: " + optOrigPair);
if (optOrigPair.isPresent()) { if (optOrigPair.isPresent()) {
sameEqSet = TypeUnifyTaskHelper.findConstraintsWithSameTVAssociation(optOrigPair.get(), singleElementSets); sameEqSet = TypeUnifyTaskHelper.findConstraintsWithSameTVAssociation(optOrigPair.get(), singleElementSets);
} }
} else {
optOrigPair = Optional.empty();
} }
@@ -711,7 +676,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
return resultFuture.thenApply(result -> { return resultFuture.thenApply(result -> {
//2020-02-02: if (variance ==2) Hier Aufruf von filterOverriding einfuegen //2020-02-02: if (variance ==2) Hier Aufruf von filterOverriding einfuegen
writeLog("Return computeCR: " + result.toString()); context.logger().debug(() ->"Return computeCR: " + result.toString());
return result; return result;
}); });
} }
@@ -738,23 +703,23 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
VarianceCase varianceCase = VarianceCase.createFromVariance(variance, oderConstraint, this, context); VarianceCase varianceCase = VarianceCase.createFromVariance(variance, oderConstraint, this, context);
writeLog("nextSet: " + nextSet.toString()); context.logger().debug(() -> "nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + prevNextSetAsList.toString()); context.logger().debug(() -> "nextSetasList: " + prevNextSetAsList.toString());
varianceCase.selectNextData(this, prevNextSetAsList, optOrigPair); varianceCase.selectNextData(this, prevNextSetAsList, optOrigPair);
if (oderConstraint) {//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen if (oderConstraint) {//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) varianceCase.a).getmethodSignatureConstraint()); methodSignatureConstraint.addAll(((Constraint<UnifyPair>) varianceCase.a).getmethodSignatureConstraint());
writeLog("ERSTELLUNG methodSignatureConstraint: " + methodSignatureConstraint); context.logger().debug(() -> "ERSTELLUNG methodSignatureConstraint: " + methodSignatureConstraint);
//System.out.println("ERSTELLUNG methodSignatureConstraint: " +noOfThread+" "+methodSignatureConstraint); //context.logger().info("ERSTELLUNG methodSignatureConstraint: " +noOfThread+" "+methodSignatureConstraint);
//System.out.println("a: " +a); //context.logger().info("a: " +a);
//System.out.println("eq: " +eq); //context.logger().info("eq: " +eq);
//System.out.println(); //context.logger().info();
} }
i++; i++;
Set<Set<UnifyPair>> elems = new HashSet<>(singleElementSets); Set<Set<UnifyPair>> elems = new HashSet<>(singleElementSets);
writeLog("a1: " + rekTiefe + " " + "variance: " + variance + " " + varianceCase.a.toString() + "\n"); context.logger().debug(() -> "a1: " + rekTiefe + " " + "variance: " + variance + " " + varianceCase.a.toString() + "\n");
Set<Set<UnifyPair>> aParDef = new HashSet<>(); Set<Set<UnifyPair>> aParDef = new HashSet<>();
@@ -764,7 +729,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
if (!oderConstraint && !sameEqSet.isEmpty() && !checkNoContradiction(varianceCase.a, sameEqSet, prevResult)) { if (!oderConstraint && !sameEqSet.isEmpty() && !checkNoContradiction(varianceCase.a, sameEqSet, prevResult)) {
noShortendElements++; noShortendElements++;
// continue // continue
return this.innerCartesianLoop(variance, rekTiefe, oderConstraint, parallel, prevResult, null, nextSet, return this.innerCartesianLoop(variance, rekTiefe, false, parallel, prevResult, null, nextSet,
prevNextSetAsList, optOrigPair, methodSignatureConstraint, singleElementSets, sameEqSet, oderConstraints); prevNextSetAsList, optOrigPair, methodSignatureConstraint, singleElementSets, sameEqSet, oderConstraints);
} }
@@ -775,7 +740,8 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// Ergebnisvariable für die parallele Verabeitung: Tupel aus // Ergebnisvariable für die parallele Verabeitung: Tupel aus
// - forkOrig result : currentThreadResult (frueher "res") // - forkOrig result : currentThreadResult (frueher "res")
// - fork results : forkResults (frueher "add_res") // - fork results : forkResults (frueher "add_res")
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> parallelResultDataFuture; CompletableFuture<VarianceCase.ComputationResults> parallelResultDataFuture;
if (parallel) { if (parallel) {
parallelResultDataFuture = varianceCase.computeParallel( parallelResultDataFuture = varianceCase.computeParallel(
@@ -786,13 +752,16 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// same as variance = 0 // same as variance = 0
elems.add(varianceCase.a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859 elems.add(varianceCase.a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
parallelResultDataFuture = this.unify2(elems, eq, oderConstraints, fc, false, rekTiefe, new HashSet<>(methodSignatureConstraint)) parallelResultDataFuture = this.unify2(elems, eq, oderConstraints, fc, false, rekTiefe, new HashSet<>(methodSignatureConstraint))
.thenApply(currentThreadResult -> new Tuple<>(currentThreadResult, new HashSet<>())); .thenApply(VarianceCase.ComputationResults::new);
} }
if (this.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new HashSet<>());
}
return parallelResultDataFuture.thenCompose(parallelResultData -> { return parallelResultDataFuture.thenCompose(parallelResultData -> {
Set<Set<UnifyPair>> currentThreadResult = parallelResultData.getFirst(); Set<Set<UnifyPair>> currentThreadResult = parallelResultData.mainResult;
Set<Set<Set<UnifyPair>>> forkResults = parallelResultData.getSecond(); Set<Set<Set<UnifyPair>>> forkResults = parallelResultData.forkResults;
Set<Set<UnifyPair>> result = prevResult; Set<Set<UnifyPair>> result = prevResult;
List<Set<UnifyPair>> nextSetAsList = prevNextSetAsList; List<Set<UnifyPair>> nextSetAsList = prevNextSetAsList;
@@ -801,7 +770,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
//Ab hier alle parallele Berechnungen wieder zusammengeführt. //Ab hier alle parallele Berechnungen wieder zusammengeführt.
if (oderConstraint) {//Wenn weiteres Element nextSetasList genommen wird, muss die vorherige methodsignatur geloescht werden if (oderConstraint) {//Wenn weiteres Element nextSetasList genommen wird, muss die vorherige methodsignatur geloescht werden
methodSignatureConstraint.removeAll(((Constraint<UnifyPair>) varianceCase.a).getmethodSignatureConstraint()); methodSignatureConstraint.removeAll(((Constraint<UnifyPair>) varianceCase.a).getmethodSignatureConstraint());
//System.out.println("REMOVE: " +methodSignatureConstraint); //context.logger().info("REMOVE: " +methodSignatureConstraint);
} }
if (!isUndefinedPairSetSet(currentThreadResult) && isUndefinedPairSetSet(result)) { if (!isUndefinedPairSetSet(currentThreadResult) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen //wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
@@ -819,18 +788,16 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a //Alle Variablen bestimmen die nicht hinzugefügt wurden in a
//PL 2018-12-28: Hier gab es eine ClassCastException, war nicht reproduzierbar //PL 2018-12-28: Hier gab es eine ClassCastException, war nicht reproduzierbar
// System.out.println(""); // context.logger().info("");
List<PlaceholderType> vars_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(varianceCase.a); List<PlaceholderType> vars_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(varianceCase.a);
Set<UnifyPair> fstElemRes = currentThreadResult.iterator().next(); Set<UnifyPair> fstElemRes = currentThreadResult.iterator().next();
Set<UnifyPair> compRes = fstElemRes.stream().filter(x -> vars_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new)); Set<UnifyPair> compRes = fstElemRes.stream().filter(x -> vars_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last //Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
//System.out.println(a_last); //context.logger().info(a_last);
try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen
a_last.forEach(x -> { a_last.forEach(x -> context.logger().debug(() -> "a_last_elem:" + x + " basepair: " + x.getBasePair()));//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());
});//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
List<PlaceholderType> varsLast_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(a_last); List<PlaceholderType> varsLast_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(a_last);
//[(java.util.Vector<java.lang.Integer> <. gen_aq, , 1), (CEK =. ? extends gen_aq, 1)] KANN VORKOMMEN //[(java.util.Vector<java.lang.Integer> <. gen_aq, , 1), (CEK =. ? extends gen_aq, 1)] KANN VORKOMMEN
//erstes Element genügt, da vars immer auf die gleichen Elemente zugeordnet werden muessen //erstes Element genügt, da vars immer auf die gleichen Elemente zugeordnet werden muessen
@@ -839,11 +806,12 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
varianceCase.applyComputedResults(result, currentThreadResult, compResult, compRes); varianceCase.applyComputedResults(result, currentThreadResult, compResult, compRes);
} catch (NullPointerException e) { } catch (NullPointerException e) {
writeLog("NullPointerException: " + a_last.toString()); context.logger().debug(() -> "NullPointerException: " + a_last.toString());
} }
} else { } else {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RES Fst: result: " + result.toString() + " currentThreadResult: " + currentThreadResult.toString()); Set<Set<UnifyPair>> finalResult = result;
context.logger().debug(() -> "RES Fst: result: " + finalResult.toString() + " currentThreadResult: " + currentThreadResult.toString());
result.addAll(currentThreadResult); result.addAll(currentThreadResult);
} }
} }
@@ -861,14 +829,15 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen //wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = par_res; result = par_res;
if (!par_res.isEmpty() && par_res.iterator().next() instanceof WildcardType) { if (!par_res.isEmpty() && par_res.iterator().next() instanceof WildcardType) {
// System.out.println(); // context.logger().info();
} }
} else { } else {
if ((isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) if ((isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result))
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result)) || (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) { || result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden //alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RES var1 ADD:" + result.toString() + " " + par_res.toString()); Set<Set<UnifyPair>> finalResult1 = result;
context.logger().debug(() ->"RES var1 ADD:" + finalResult1.toString() + " " + par_res.toString());
result.addAll(par_res); result.addAll(par_res);
} }
} }
@@ -878,24 +847,26 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */ /* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
if (!result.isEmpty() && (!isUndefinedPairSetSet(currentThreadResult) || !aParDef.isEmpty())) { if (!result.isEmpty() && (!isUndefinedPairSetSet(currentThreadResult) || !aParDef.isEmpty())) {
/*
if (nextSetAsList.iterator().hasNext() if (nextSetAsList.iterator().hasNext()
&& nextSetAsList.getFirst().stream().anyMatch(x -> x.getLhsType().getName().equals("B")) && nextSetAsList.getFirst().stream().anyMatch(x -> x.getLhsType().getName().equals("B"))
&& nextSetAsList.size() > 1) { && nextSetAsList.size() > 1) {
// System.out.print(""); // System.out.print("");
} }
*/
// Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator(); // Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator();
boolean shouldBreak = varianceCase.eraseInvalidSets(rekTiefe, aParDef, nextSetAsList); boolean shouldBreak = varianceCase.eraseInvalidSets(rekTiefe, aParDef, nextSetAsList);
if (shouldBreak) { if (shouldBreak) {
// this.cancelYoungerSiblingTasks();
return CompletableFuture.completedFuture(result); return CompletableFuture.completedFuture(result);
} }
writeLog("a: " + rekTiefe + " variance: " + variance + varianceCase.a.toString()); context.logger().debug(() -> "a: " + rekTiefe + " variance: " + variance + varianceCase.a.toString());
} }
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */ /* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
if (isUndefinedPairSetSet(currentThreadResult) && aParDef.isEmpty()) { if (isUndefinedPairSetSet(currentThreadResult) && aParDef.isEmpty()) {
int nofstred = 0;
Set<UnifyPair> abhSubst = TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getAllSubstitutions); Set<UnifyPair> abhSubst = TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getAllSubstitutions);
abhSubst.addAll( abhSubst.addAll(
TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getThisAndAllBases) TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getThisAndAllBases)
@@ -925,9 +896,10 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
*/ */
if (currentThreadResult.size() > 1) { if (currentThreadResult.size() > 1) {
// System.out.println(); // context.logger().info();
} }
writeLog("nextSetasList vor filter-Aufruf: " + nextSetAsList); List<Set<UnifyPair>> finalNextSetAsList = nextSetAsList;
context.logger().debug(() -> "nextSetasList vor filter-Aufruf: " + finalNextSetAsList);
if (!oderConstraint) {//PL 2023-02-08 eingefuegt: Bei oderconstraints sind Subststitutionen nicht als Substitutionen in idesem Sinne zu sehen if (!oderConstraint) {//PL 2023-02-08 eingefuegt: Bei oderconstraints sind Subststitutionen nicht als Substitutionen in idesem Sinne zu sehen
nextSetAsList = nextSetAsList.stream().filter(x -> { nextSetAsList = nextSetAsList.stream().filter(x -> {
//Boolean ret = false; //Boolean ret = false;
@@ -938,24 +910,29 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
})//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10 })//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
} }
writeLog("nextSetasList nach filter-Aufruf: " + nextSetAsList);
nofstred = nextSetAsList.size(); if (context.logger().isLogLevelActive(Logger.LogLevel.DEBUG)) {
//NOCH NICHT korrekt PL 2018-10-12 List<Set<UnifyPair>> finalNextSetAsList1 = nextSetAsList;
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) context.logger().debug(() -> "nextSetasList nach filter-Aufruf: " + finalNextSetAsList1);
// .collect(Collectors.toCollection(ArrayList::new)); int nofstred = nextSetAsList.size();
writeLog("currentThreadResult (undef): " + currentThreadResult.toString()); //NOCH NICHT korrekt PL 2018-10-12
writeLog("abhSubst: " + abhSubst.toString()); //nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
writeLog("a2: " + rekTiefe + " " + varianceCase.a.toString()); // .collect(Collectors.toCollection(ArrayList::new));
writeLog("Durchschnitt: " + durchschnitt.toString()); context.logger().debug("currentThreadResult (undef): " + currentThreadResult.toString());
writeLog("nextSet: " + nextSet.toString()); context.logger().debug("abhSubst: " + abhSubst.toString());
writeLog("nextSetasList: " + nextSetAsList.toString()); context.logger().debug("a2: " + rekTiefe + " " + varianceCase.a.toString());
writeLog("Number first erased Elements (undef): " + (len - nofstred)); context.logger().debug("Durchschnitt: " + durchschnitt.toString());
writeLog("Number second erased Elements (undef): " + (nofstred - nextSetAsList.size())); context.logger().debug("nextSet: " + nextSet.toString());
writeLog("Number erased Elements (undef): " + (len - nextSetAsList.size())); context.logger().debug("nextSetasList: " + nextSetAsList.toString());
noAllErasedElements += (len - nextSetAsList.size()); context.logger().debug("Number first erased Elements (undef): " + (len - nofstred));
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString()); context.logger().debug("Number second erased Elements (undef): " + (nofstred - nextSetAsList.size()));
writeLog("Number of Backtracking: " + noBacktracking++); context.logger().debug("Number erased Elements (undef): " + (len - nextSetAsList.size()));
// System.out.println(""); noAllErasedElements += (len - nextSetAsList.size());
context.logger().debug("Number of all erased Elements (undef): " + noAllErasedElements);
context.logger().debug("Number of Backtracking: " + noBacktracking++);
}
// context.logger().info("");
} }
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) { //if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
// return result; // return result;
@@ -964,7 +941,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// result.removeIf(y -> isUndefinedPairSet(y)); // result.removeIf(y -> isUndefinedPairSet(y));
//} //}
//else result.stream().filter(y -> !isUndefinedPairSet(y)); //else result.stream().filter(y -> !isUndefinedPairSet(y));
writeLog("currentThreadResult: " + currentThreadResult.toString()); context.logger().debug(() -> "currentThreadResult: " + currentThreadResult.toString());
return this.innerCartesianLoop(variance, rekTiefe, oderConstraint, parallel, result, varianceCase.a, nextSet, return this.innerCartesianLoop(variance, rekTiefe, oderConstraint, parallel, result, varianceCase.a, nextSet,
nextSetAsList, optOrigPair, methodSignatureConstraint, singleElementSets, sameEqSet, oderConstraints); nextSetAsList, optOrigPair, methodSignatureConstraint, singleElementSets, sameEqSet, oderConstraints);
@@ -981,7 +958,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
* the error constraints. Error constraints are added * the error constraints. Error constraints are added
* @result contradiction of (a = ty) in sameEqSet * @result contradiction of (a = ty) in sameEqSet
*/ */
public Boolean checkNoContradiction(Set<UnifyPair> a, Set<UnifyPair> sameEqSet, Set<Set<UnifyPair>> result) { public boolean checkNoContradiction(Set<UnifyPair> a, Set<UnifyPair> sameEqSet, Set<Set<UnifyPair>> result) {
//optAPair enthaelt ggf. das Paar a = ty' \in a //optAPair enthaelt ggf. das Paar a = ty' \in a
//unterscheidet sich von optOrigPair, da dort a = ty //unterscheidet sich von optOrigPair, da dort a = ty
@@ -1001,7 +978,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
UnifyPair aPair = optAPair.get(); UnifyPair aPair = optAPair.get();
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair()); //writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
writeLog("checkA: " + aPair + "sameEqSet: " + sameEqSet); context.logger().debug(() ->"checkA: " + aPair + "sameEqSet: " + sameEqSet);
for (UnifyPair sameEq : sameEqSet) { for (UnifyPair sameEq : sameEqSet) {
if (sameEq.getLhsType() instanceof PlaceholderType) { if (sameEq.getLhsType() instanceof PlaceholderType) {
Set<UnifyPair> localEq = new HashSet<>(); Set<UnifyPair> localEq = new HashSet<>();
@@ -1022,7 +999,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
if (result.isEmpty() || isUndefinedPairSetSet(result)) { if (result.isEmpty() || isUndefinedPairSetSet(result)) {
result.addAll(localRes); result.addAll(localRes);
} }
writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet); context.logger().debug(() ->"FALSE: " + aPair + "sameEqSet: " + sameEqSet);
return false; return false;
} }
} else { } else {
@@ -1044,12 +1021,12 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
if (result.isEmpty() || isUndefinedPairSetSet(result)) { if (result.isEmpty() || isUndefinedPairSetSet(result)) {
result.addAll(localRes); result.addAll(localRes);
} }
writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet); context.logger().debug(() ->"FALSE: " + aPair + "sameEqSet: " + sameEqSet);
return false; return false;
} }
} }
} }
writeLog("TRUE: " + aPair + "sameEqSet: " + sameEqSet); context.logger().debug(() ->"TRUE: " + aPair + "sameEqSet: " + sameEqSet);
return true; return true;
} }
return true; return true;
@@ -1135,7 +1112,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// Through application of the rules, every pair should have one of the above forms. // Through application of the rules, every pair should have one of the above forms.
// Pairs that do not have one of the aboves form are contradictory. // Pairs that do not have one of the aboves form are contradictory.
else { else {
writeLog("Second erase:" + checkPair); context.logger().debug(() ->"Second erase:" + checkPair);
return false; return false;
} }
//*/ //*/
@@ -1334,7 +1311,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
* (as in case 1 where sigma is added to the innermost set). * (as in case 1 where sigma is added to the innermost set).
*/ */
protected Set<Set<Set<? extends Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, List<Set<Constraint<UnifyPair>>> oderConstraintsInput, IFiniteClosure fc, Set<UnifyPair> undefined, List<Set<Constraint<UnifyPair>>> oderConstraintsOutput) { protected Set<Set<Set<? extends Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, List<Set<Constraint<UnifyPair>>> oderConstraintsInput, IFiniteClosure fc, Set<UnifyPair> undefined, List<Set<Constraint<UnifyPair>>> oderConstraintsOutput) {
writeLog("eq2s: " + eq2s.toString()); context.logger().debug(() ->"eq2s: " + eq2s.toString());
oderConstraintsOutput.addAll(oderConstraintsInput); oderConstraintsOutput.addAll(oderConstraintsInput);
List<Set<Set<? extends Set<UnifyPair>>>> result = new ArrayList<>(9); List<Set<Set<? extends Set<UnifyPair>>>> result = new ArrayList<>(9);
@@ -1421,10 +1398,12 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
} }
} }
writeLog("eq2s: " + eq2s); if (context.logger().isLogLevelActive(Logger.LogLevel.DEBUG)) {
writeLog("eq2sAsListFst: " + eq2sAsListFst); context.logger().debug("eq2s: " + eq2s);
writeLog("eq2sAsListSnd: " + eq2sAsListSnd); context.logger().debug("eq2sAsListFst: " + eq2sAsListFst);
writeLog("eq2sAsListBack: " + eq2sAsListBack); context.logger().debug("eq2sAsListSnd: " + eq2sAsListSnd);
context.logger().debug("eq2sAsListBack: " + eq2sAsListBack);
}
eq2sAsList.addAll(eq2sAsListFst); eq2sAsList.addAll(eq2sAsListFst);
eq2sAsList.addAll(eq2sAsListSnd); eq2sAsList.addAll(eq2sAsListSnd);
@@ -1437,7 +1416,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
if (!oderConstraintsOutput.isEmpty()) { if (!oderConstraintsOutput.isEmpty()) {
Set<Constraint<UnifyPair>> ret = oderConstraintsOutput.removeFirst(); Set<Constraint<UnifyPair>> ret = oderConstraintsOutput.removeFirst();
//if (ret.iterator().next().iterator().next().getLhsType().getName().equals("M")) //if (ret.iterator().next().iterator().next().getLhsType().getName().equals("M"))
// System.out.println("M"); // context.logger().info("M");
//Set<UnifyPair> retFlat = new HashSet<>(); //Set<UnifyPair> retFlat = new HashSet<>();
//ret.stream().forEach(x -> retFlat.addAll(x)); //ret.stream().forEach(x -> retFlat.addAll(x));
@@ -1477,7 +1456,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// Case 1: (a <. Theta') // Case 1: (a <. Theta')
if (((pairOp == PairOperator.SMALLERDOT) || (pairOp == PairOperator.SMALLERNEQDOT)) && lhsType instanceof PlaceholderType) { if (((pairOp == PairOperator.SMALLERDOT) || (pairOp == PairOperator.SMALLERNEQDOT)) && lhsType instanceof PlaceholderType) {
//System.out.println(pair); //context.logger().info(pair);
if (first) { //writeLog(pair.toString()+"\n"); if (first) { //writeLog(pair.toString()+"\n");
Set<Set<UnifyPair>> x1 = new HashSet<>(); Set<Set<UnifyPair>> x1 = new HashSet<>();
if (pair.getRhsType().getName().equals("void")) { if (pair.getRhsType().getName().equals("void")) {
@@ -1499,7 +1478,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
x1.remove(remElem); x1.remove(remElem);
} }
/* ZU LOESCHEN ANFANG /* ZU LOESCHEN ANFANG
//System.out.println(x1); //context.logger().info(x1);
Set<UnifyPair> sameEqSet = eq2sAsList.stream() Set<UnifyPair> sameEqSet = eq2sAsList.stream()
.filter(x -> ((x.getLhsType().equals(lhsType) || x.getRhsType().equals(lhsType)) && !x.equals(pair))) .filter(x -> ((x.getLhsType().equals(lhsType) || x.getRhsType().equals(lhsType)) && !x.equals(pair)))
.collect(Collectors.toCollection(HashSet::new)); .collect(Collectors.toCollection(HashSet::new));
@@ -1717,11 +1696,11 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// && ((ReferenceType)thetaPrime).getTypeParams().iterator().next() instanceof PlaceholderType) //.getName().equals("java.util.Vector")) // && ((ReferenceType)thetaPrime).getTypeParams().iterator().next() instanceof PlaceholderType) //.getName().equals("java.util.Vector"))
// && ((ReferenceType)((ReferenceType)thetaPrime).getTypeParams().iterator().next()).getTypeParams().iterator().next().getName().equals("java.lang.Integer")) { // && ((ReferenceType)((ReferenceType)thetaPrime).getTypeParams().iterator().next()).getTypeParams().iterator().next().getName().equals("java.lang.Integer")) {
// { // {
// System.out.println(""); // context.logger().info("");
//} //}
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());//cs= [java.util.Vector<NP>, java.util.Vector<java.util.Vector<java.lang.Integer>>, ????java.util.Vector<gen_hv>???] Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());//cs= [java.util.Vector<NP>, java.util.Vector<java.util.Vector<java.lang.Integer>>, ????java.util.Vector<gen_hv>???]
writeLog("cs: " + cs.toString()); context.logger().debug(() ->"cs: " + cs.toString());
//PL 18-02-06 entfernt, kommt durch unify wieder rein //PL 18-02-06 entfernt, kommt durch unify wieder rein
//cs.add(thetaPrime); //cs.add(thetaPrime);
//PL 18-02-06 entfernt //PL 18-02-06 entfernt
@@ -1732,8 +1711,9 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
aa.putAll(b); aa.putAll(b);
return aa; return aa;
}; };
HashMap<PlaceholderType, PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream() var involvedPlaceholderTypes = x.getInvolvedPlaceholderTypes();
.reduce(new HashMap<PlaceholderType, PlaceholderType>(), HashMap<PlaceholderType, PlaceholderType> hm = involvedPlaceholderTypes.stream()
.reduce(TypeUnifyTaskHelper.getPresizedHashMap(involvedPlaceholderTypes.size()),
(aa, b) -> { (aa, b) -> {
aa.put(b, PlaceholderType.freshPlaceholder(context.placeholderRegistry())); aa.put(b, PlaceholderType.freshPlaceholder(context.placeholderRegistry()));
return aa; return aa;
@@ -1753,7 +1733,8 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
if ((match.match(ml)).isEmpty()) { if ((match.match(ml)).isEmpty()) {
thetaQs.remove(c); thetaQs.remove(c);
} }
writeLog("thetaQs von " + c + ": " + thetaQs.toString()); Set<UnifyType> finalThetaQs = thetaQs;
context.logger().debug(() ->"thetaQs von " + c + ": " + finalThetaQs.toString());
//Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new)); //Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
//thetaQs.add(thetaPrime); //PL 18-02-05 wieder geloescht //thetaQs.add(thetaPrime); //PL 18-02-05 wieder geloescht
//PL 2017-10-03: War auskommentiert habe ich wieder einkommentiert, //PL 2017-10-03: War auskommentiert habe ich wieder einkommentiert,
@@ -1777,7 +1758,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
for (TypeParams tp : permuteParams(candidateParams)) for (TypeParams tp : permuteParams(candidateParams))
thetaQPrimes.add(c.setTypeParams(tp)); thetaQPrimes.add(c.setTypeParams(tp));
} }
writeLog("thetaQPrimes von " + c + ": " + thetaQPrimes.toString()); context.logger().debug(() ->"thetaQPrimes von " + c + ": " + thetaQPrimes.toString());
for (UnifyType tqp : thetaQPrimes) {//PL 2020-03-08 umbauen in der Schleife wird nur unifizierbarer Typ gesucht break am Ende for (UnifyType tqp : thetaQPrimes) {//PL 2020-03-08 umbauen in der Schleife wird nur unifizierbarer Typ gesucht break am Ende
Collection<PlaceholderType> tphs = tqp.getInvolvedPlaceholderTypes(); Collection<PlaceholderType> tphs = tqp.getInvolvedPlaceholderTypes();
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime); Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
@@ -1850,7 +1831,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
} }
} }
} }
writeLog("result von " + pair + ": " + result); context.logger().debug(() ->"result von " + pair + ": " + result);
return result; return result;
} }
@@ -1961,7 +1942,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
return ((match.match(termList).isPresent()) || x); return ((match.match(termList).isPresent()) || x);
}; };
//if (parai.getName().equals("java.lang.Integer")) { //if (parai.getName().equals("java.lang.Integer")) {
// System.out.println(""); // context.logger().info("");
//} //}
BinaryOperator<Boolean> bo = (x, y) -> (x || y); BinaryOperator<Boolean> bo = (x, y) -> (x || y);
if (fBounded.stream().reduce(false, f, bo)) { if (fBounded.stream().reduce(false, f, bo)) {
@@ -2051,22 +2032,4 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
permuteParams(candidates, idx + 1, result, current); permuteParams(candidates, idx + 1, result, current);
} }
} }
public void writeLog(String str) {
if (context.log() && finalresult) {
synchronized (context.logFile()) {
try {
/*
logFile.write("Thread no.:" + thNo + "\n");
logFile.write("noOfThread:" + noOfThread + "\n");
logFile.write("parallel:" + parallel + "\n");
*/
context.logFile().write(str + "\n\n");
// logFile.flush();
} catch (IOException e) {
System.err.println("kein LogFile");
}
}
}
}
} }

View File

@@ -5,6 +5,7 @@ import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType; import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
@@ -42,11 +43,11 @@ public class TypeUnifyTaskHelper {
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0) .filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance()) .map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
.reduce((a, b) -> { .reduce((a, b) -> {
if (a == b) return a; if (a.intValue() == b.intValue()) return a;
else return 0; else return 0;
})) //2 kommt insbesondere bei Oder-Constraints vor })) //2 kommt insbesondere bei Oder-Constraints vor
.filter(d -> d.isPresent()) .filter(Optional::isPresent)
.map(e -> e.get()) .map(Optional::get)
.findAny(); .findAny();
return xi.orElse(0); return xi.orElse(0);
@@ -185,4 +186,36 @@ public class TypeUnifyTaskHelper {
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
} }
public static Set<UnifyPair> occursCheck(final Set<UnifyPair> eq) {
Set<UnifyPair> ocurrPairs = new HashSet<>(eq.size());
for (UnifyPair x : eq) {
UnifyType lhs = x.getLhsType();
UnifyType rhs = x.getRhsType();
if (lhs instanceof PlaceholderType lhsPlaceholder &&
!(rhs instanceof PlaceholderType) &&
rhs.getTypeParams().occurs(lhsPlaceholder))
{
x.setUndefinedPair();
ocurrPairs.add(x);
}
}
return ocurrPairs;
}
public static <T> HashSet<T> getPresizedHashSet(int minElements) {
if (minElements < 16) return new HashSet<>();
// HashSet and HashMap will resize at 75% load, so we account for that by multiplying with 1.5
int n = (int)(minElements * 1.5);
return new HashSet<>(n);
}
public static <S,T> HashMap<S,T> getPresizedHashMap(int minElements) {
if (minElements < 16) return new HashMap<>();
// HashSet and HashMap will resize at 75% load, so we account for that by multiplying with 1.5
int n = (int)(minElements * 1.5);
return new HashMap<>(n);
}
} }

View File

@@ -1,17 +1,15 @@
package de.dhbwstuttgart.typeinference.unify; package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.util.Logger;
import java.io.Writer; import java.io.Writer;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.atomic.AtomicInteger;
public record UnifyContext( public record UnifyContext(
// main log file of a unification // main logger of a unification
Writer logFile, Logger logger,
// if logs should be made
Boolean log,
// if the unify algorithm should run in parallel // if the unify algorithm should run in parallel
Boolean parallel, boolean parallel,
// the model for storing calculated results // the model for storing calculated results
UnifyResultModel resultModel, UnifyResultModel resultModel,
// the executor used for thread management in parallel execution // the executor used for thread management in parallel execution
@@ -23,26 +21,24 @@ public record UnifyContext(
) { ) {
public UnifyContext( public UnifyContext(
Writer logFile, Logger logger,
Boolean log, boolean parallel,
Boolean parallel,
UnifyResultModel resultModel, UnifyResultModel resultModel,
UnifyTaskModel usedTasks, UnifyTaskModel usedTasks,
ExecutorService executor, ExecutorService executor,
PlaceholderRegistry placeholderRegistry PlaceholderRegistry placeholderRegistry
) { ) {
this(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks); this(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
} }
public UnifyContext( public UnifyContext(
Writer logFile, Logger logger,
Boolean log, boolean parallel,
Boolean parallel,
UnifyResultModel resultModel, UnifyResultModel resultModel,
UnifyTaskModel usedTasks, UnifyTaskModel usedTasks,
PlaceholderRegistry placeholderRegistry PlaceholderRegistry placeholderRegistry
) { ) {
this(logFile, log, parallel, resultModel, usedTasks, ForkJoinPool.commonPool(), placeholderRegistry); this(logger, parallel, resultModel, usedTasks, ForkJoinPool.commonPool(), placeholderRegistry);
} }
@@ -51,20 +47,21 @@ public record UnifyContext(
* causes the UnifyContext to be essentially handled as a * causes the UnifyContext to be essentially handled as a
*/ */
public UnifyContext newWithLogFile(Writer logFile) { public UnifyContext newWithLogger(Logger logger) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks); return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
} }
public UnifyContext newWithParallel(boolean parallel) { public UnifyContext newWithParallel(boolean parallel) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks); if (this.parallel == parallel) return this;
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
} }
public UnifyContext newWithExecutor(ExecutorService executor) { public UnifyContext newWithExecutor(ExecutorService executor) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks); return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
} }
public UnifyContext newWithResultModel(UnifyResultModel resultModel) { public UnifyContext newWithResultModel(UnifyResultModel resultModel) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks); return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
} }
} }

View File

@@ -12,7 +12,7 @@ public class UnifyTaskModel {
public synchronized void cancel() { public synchronized void cancel() {
for(TypeUnifyTask t : usedTasks) { for(TypeUnifyTask t : usedTasks) {
t.myCancel(true); t.cancelExecution();
} }
} }
} }

View File

@@ -1,6 +1,5 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct; package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task; import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask; import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
@@ -17,11 +16,11 @@ import java.util.Set;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class Variance1Case extends VarianceCase { public class ContravarianceCase extends VarianceCase {
protected final int variance = 1; protected final int variance = 1;
protected Variance1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) { protected ContravarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context); super(isOderConstraint, typeUnifyTask, context);
} }
@@ -32,12 +31,12 @@ public class Variance1Case extends VarianceCase {
Optional<UnifyPair> optOrigPair Optional<UnifyPair> optOrigPair
) { ) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator()); a = typeUnifyTask.oup.max(nextSetAsList.iterator());
writeLog("Max: a in " + variance + " " + a); context.logger().debug("Max: a in " + variance + " " + a);
nextSetAsList.remove(a); nextSetAsList.remove(a);
if (this.isOderConstraint) { if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint()); nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} }
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints); context.logger().debug(() -> "nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
//Alle maximale Elemente in nextSetasListRest bestimmen //Alle maximale Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen. //nur für diese wird parallele Berechnung angestossen.
@@ -49,7 +48,7 @@ public class Variance1Case extends VarianceCase {
@Override @Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel( public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems, Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq, Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints, List<Set<Constraint<UnifyPair>>> oderConstraints,
@@ -61,10 +60,6 @@ public class Variance1Case extends VarianceCase {
Set<Set<UnifyPair>> result, Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef Set<Set<UnifyPair>> aParDef
) { ) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
new HashSet<>(), new HashSet<>()
));
Set<UnifyPair> newEqOrig = new HashSet<>(eq); Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems); Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
@@ -72,27 +67,28 @@ public class Variance1Case extends VarianceCase {
/* FORK ANFANG */ /* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
typeUnifyTask.addChildTask(forkOrig);
// schedule compute() on another thread // schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f); CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkOrigFuture, CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
(prevResults, currentThreadResult) -> { (currentThreadResult) -> {
forkOrig.writeLog("final Orig 1"); forkOrig.context.logger().debug("final Orig 1");
forkOrig.closeLogFile(); forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, prevResults.getSecond()); return new ComputationResults(currentThreadResult);
}); });
//forks.add(forkOrig); //forks.add(forkOrig);
if (typeUnifyTask.myIsCancelled()) { if (typeUnifyTask.isExecutionCancelled()) {
throw new UnifyCancelException(); return CompletableFuture.completedFuture(new ComputationResults());
} }
/* FORK ENDE */ /* FORK ENDE */
writeLog("a in " + variance + " " + a); context.logger().debug("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString()); context.logger().debug("nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) { while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst(); Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL); nextSetAsList.remove(nSaL);
writeLog("1 RM" + nSaL.toString()); context.logger().debug("1 RM" + nSaL.toString());
if (!this.isOderConstraint) { if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht //ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
@@ -108,27 +104,28 @@ public class Variance1Case extends VarianceCase {
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL); newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint)); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread // schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f); CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture, resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> { (prevResults, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) { if (typeUnifyTask.isExecutionCancelled()) {
throw new UnifyCancelException(); return new ComputationResults();
} }
writeLog("fork_res: " + fork_res.toString()); context.logger().debug("fork_res: " + fork_res.toString());
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString()); context.logger().debug(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.getSecond().add(fork_res); prevResults.addForkResult(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) { if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement()); aParDef.add(fork.getNextSetElement());
} }
fork.writeLog("final 1"); fork.context.logger().debug("final 1");
fork.closeLogFile(); fork.closeLogFile();
return prevResults; return prevResults;
} }
); );
if (typeUnifyTask.myIsCancelled()) { if (typeUnifyTask.isExecutionCancelled()) {
throw new UnifyCancelException(); return CompletableFuture.completedFuture(new ComputationResults());
} }
} }
@@ -144,7 +141,7 @@ public class Variance1Case extends VarianceCase {
) { ) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes); int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == -1) { if (resOfCompare == -1) {
writeLog("Geloescht result: " + result); context.logger().debug("Geloescht result: " + result);
result.clear(); result.clear();
result.addAll(currentThreadResult); result.addAll(currentThreadResult);
} }
@@ -152,7 +149,7 @@ public class Variance1Case extends VarianceCase {
result.addAll(currentThreadResult); result.addAll(currentThreadResult);
} }
else if (resOfCompare == 1) { else if (resOfCompare == 1) {
writeLog("Geloescht currentThreadResult: " + currentThreadResult); context.logger().debug("Geloescht currentThreadResult: " + currentThreadResult);
//result = result; //result = result;
} }
} }
@@ -163,35 +160,35 @@ public class Variance1Case extends VarianceCase {
Set<Set<UnifyPair>> aParDef, Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList List<Set<UnifyPair>> nextSetAsList
) { ) {
// System.out.println(""); // context.logger().info("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString()); context.logger().debug("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString()); context.logger().debug("aParDef: " + aParDef.toString());
aParDef.add(a); aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator(); Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) { if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints); nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>(); nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints); context.logger().debug("Removed: " + nextSetasListOderConstraints);
while (aParDefIt.hasNext()) { while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next(); Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList); List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList);
writeLog("smallerSetasList: " + smallerSetasList); context.logger().debug("smallerSetasList: " + smallerSetasList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream() List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented()) .filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
writeLog("notInherited: " + notInherited + "\n"); context.logger().debug("notInherited: " + notInherited + "\n");
List<Set<UnifyPair>> notErased = new ArrayList<>(); List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> { notInherited.forEach(x -> {
notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)); notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList));
}); });
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList); List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
writeLog("notErased: " + notErased + "\n"); context.logger().debug("notErased: " + notErased + "\n");
erased.removeAll(notErased); erased.removeAll(notErased);
nextSetAsList.removeAll(erased); nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased); context.logger().debug("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList); context.logger().debug("Not Removed: " + nextSetAsList);
} }
} else { } else {
@@ -201,9 +198,9 @@ public class Variance1Case extends VarianceCase {
List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList); List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased); nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased); context.logger().debug("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList); context.logger().debug("Not Removed: " + nextSetAsList);
} }
} }

View File

@@ -1,6 +1,5 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct; package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task; import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask; import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
@@ -17,11 +16,11 @@ import java.util.Set;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class VarianceM1Case extends VarianceCase { public class CovarianceCase extends VarianceCase {
protected final int variance = -1; protected final int variance = -1;
protected VarianceM1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) { protected CovarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context); super(isOderConstraint, typeUnifyTask, context);
} }
@@ -32,11 +31,11 @@ public class VarianceM1Case extends VarianceCase {
Optional<UnifyPair> optOrigPair Optional<UnifyPair> optOrigPair
) { ) {
a = typeUnifyTask.oup.min(nextSetAsList.iterator()); a = typeUnifyTask.oup.min(nextSetAsList.iterator());
writeLog("Min: a in " + variance + " " + a); context.logger().debug(() -> "Min: a in " + variance + " " + a);
if (this.isOderConstraint) { if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint()); nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} }
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints); context.logger().debug(() -> "nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
nextSetAsList.remove(a); nextSetAsList.remove(a);
//Alle minimalen Elemente in nextSetasListRest bestimmen //Alle minimalen Elemente in nextSetasListRest bestimmen
@@ -49,7 +48,7 @@ public class VarianceM1Case extends VarianceCase {
@Override @Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel( public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems, Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq, Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints, List<Set<Constraint<UnifyPair>>> oderConstraints,
@@ -61,10 +60,6 @@ public class VarianceM1Case extends VarianceCase {
Set<Set<UnifyPair>> result, Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef Set<Set<UnifyPair>> aParDef
) { ) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
new HashSet<>(), new HashSet<>()
));
Set<UnifyPair> newEqOrig = new HashSet<>(eq); Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems); Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
@@ -72,28 +67,29 @@ public class VarianceM1Case extends VarianceCase {
/* FORK ANFANG */ /* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
typeUnifyTask.addChildTask(forkOrig);
// schedule compute() on another thread // schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f); CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkOrigFuture, CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
(prevResults, currentThreadResult) -> { (currentThreadResult) -> {
forkOrig.writeLog("final Orig -1"); forkOrig.context.logger().debug("final Orig -1");
forkOrig.closeLogFile(); forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, prevResults.getSecond()); return new ComputationResults(currentThreadResult);
}); });
//forks.add(forkOrig); //forks.add(forkOrig);
if (typeUnifyTask.myIsCancelled()) { if (typeUnifyTask.isExecutionCancelled()) {
throw new UnifyCancelException(); return resultValues;
} }
/* FORK ENDE */ /* FORK ENDE */
writeLog("a in " + variance + " " + a); context.logger().debug(() -> "a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString()); context.logger().debug(() -> "nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) { while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst(); Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL); nextSetAsList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString()); context.logger().debug(() -> "-1 RM" + nSaL.toString());
if (!this.isOderConstraint) { if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht //ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
@@ -109,27 +105,28 @@ public class VarianceM1Case extends VarianceCase {
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL); newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint)); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread // schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f); CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture, resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> { (prevResults, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) { if (typeUnifyTask.isExecutionCancelled()) {
throw new UnifyCancelException(); return prevResults;
} }
writeLog("fork_res: " + fork_res.toString()); context.logger().debug(() -> "fork_res: " + fork_res.toString());
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString()); context.logger().debug(() -> Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.getSecond().add(fork_res); prevResults.addForkResult(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) { if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement()); aParDef.add(fork.getNextSetElement());
} }
fork.writeLog("final -1"); fork.context.logger().debug("final -1");
fork.closeLogFile(); fork.closeLogFile();
return prevResults; return prevResults;
} }
); );
if (typeUnifyTask.myIsCancelled()) { if (typeUnifyTask.isExecutionCancelled()) {
throw new UnifyCancelException(); return resultValues;
} }
} }
@@ -146,13 +143,13 @@ public class VarianceM1Case extends VarianceCase {
) { ) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes); int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == 1) { if (resOfCompare == 1) {
writeLog("Geloescht result: " + result); context.logger().debug(() -> "Geloescht result: " + result);
result.clear(); result.clear();
result.addAll(currentThreadResult); result.addAll(currentThreadResult);
} else if (resOfCompare == 0) { } else if (resOfCompare == 0) {
result.addAll(currentThreadResult); result.addAll(currentThreadResult);
} else if (resOfCompare == -1) { } else if (resOfCompare == -1) {
writeLog("Geloescht currentThreadResult: " + currentThreadResult); context.logger().debug(() -> "Geloescht currentThreadResult: " + currentThreadResult);
//result = result; //result = result;
} }
} }
@@ -164,14 +161,14 @@ public class VarianceM1Case extends VarianceCase {
List<Set<UnifyPair>> nextSetAsList List<Set<UnifyPair>> nextSetAsList
) { ) {
// System.out.println(""); // context.logger().info("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString()); context.logger().debug(() -> "a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString()); context.logger().debug(() -> "aParDef: " + aParDef.toString());
aParDef.add(a); aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator(); Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) { if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints); nextSetAsList.removeAll(nextSetasListOderConstraints);
writeLog("Removed: " + nextSetasListOderConstraints); context.logger().debug(() -> "Removed: " + nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>(); nextSetasListOderConstraints = new ArrayList<>();
while (aParDefIt.hasNext()) { while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next(); Set<UnifyPair> a_new = aParDefIt.next();
@@ -204,9 +201,9 @@ public class VarianceM1Case extends VarianceCase {
erased.removeAll(notErased); erased.removeAll(notErased);
nextSetAsList.removeAll(erased); nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased); context.logger().debug(() -> "Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList); context.logger().debug(() -> "Not Removed: " + nextSetAsList);
} }
} else { } else {
@@ -217,9 +214,9 @@ public class VarianceM1Case extends VarianceCase {
nextSetAsList.removeAll(erased); nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased); context.logger().debug(() -> "Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList); context.logger().debug(() -> "Not Removed: " + nextSetAsList);
} }
} }

View File

@@ -1,6 +1,5 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct; package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task; import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask; import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
@@ -15,11 +14,12 @@ import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
public class Variance2Case extends VarianceCase { public class InvarianceOrConstraintCase extends VarianceCase {
// either for invariance or for oderConstraints
protected final int variance = 2; protected final int variance = 2;
protected Variance2Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) { protected InvarianceOrConstraintCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context); super(isOderConstraint, typeUnifyTask, context);
} }
@@ -37,7 +37,7 @@ public class Variance2Case extends VarianceCase {
@Override @Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel( public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems, Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq, Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints, List<Set<Constraint<UnifyPair>>> oderConstraints,
@@ -49,9 +49,7 @@ public class Variance2Case extends VarianceCase {
Set<Set<UnifyPair>> result, Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef Set<Set<UnifyPair>> aParDef
) { ) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValuesFuture; context.logger().debug("var2einstieg");
writeLog("var2einstieg");
Set<TypeUnify2Task> forks = new HashSet<>(); Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq); Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems); Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
@@ -60,19 +58,21 @@ public class Variance2Case extends VarianceCase {
/* FORK ANFANG */ /* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint)); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(forkOrig);
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f); CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValuesFuture = forkOrigFuture.thenApply((currentThreadResult) -> { CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply((currentThreadResult) -> {
forkOrig.writeLog("final Orig 2"); forkOrig.context.logger().debug("final Orig 2");
forkOrig.closeLogFile(); forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, new HashSet<>()); return new ComputationResults(currentThreadResult);
}); });
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException(); if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
} }
/* FORK ENDE */ /* FORK ENDE */
writeLog("a in " + variance + " " + a); context.logger().debug(() -> "a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString()); context.logger().debug(() -> "nextSetasListRest: " + nextSetasListRest.toString());
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert //Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist. //und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
@@ -89,27 +89,24 @@ public class Variance2Case extends VarianceCase {
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL); newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel)); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
typeUnifyTask.addChildTask(fork);
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f); CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValuesFuture = resultValuesFuture.thenCombine(forkFuture, (resultValues, fork_res) -> { resultValues = resultValues.thenCombine(forkFuture, (prevResults, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) { if (typeUnifyTask.isExecutionCancelled()) {
throw new UnifyCancelException(); return prevResults;
} }
resultValues.getSecond().add(fork_res); prevResults.addForkResult(fork_res);
fork.writeLog("final 2"); fork.context.logger().debug("final 2");
fork.closeLogFile(); fork.closeLogFile();
return resultValues; return prevResults;
}); });
if (typeUnifyTask.myIsCancelled()) { if (typeUnifyTask.isExecutionCancelled()) {
throw new UnifyCancelException(); return resultValues;
} }
} }
if (typeUnifyTask.myIsCancelled()) { return resultValues;
throw new UnifyCancelException();
}
return resultValuesFuture;
} }
@Override @Override

View File

@@ -0,0 +1,241 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
public class UnknownVarianceCase extends VarianceCase {
protected final int variance = 0;
protected final AtomicBoolean shouldBreak = new AtomicBoolean(false);
protected UnknownVarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
} else {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
}
nextSetAsList.remove(a);
} else if (this.isOderConstraint) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
nextSetAsList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} else {
a = nextSetAsList.removeFirst();
}
Set<UnifyPair> finalA = a;
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
} else {
nextSetasListRest = typeUnifyTask.oup.minElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
);
}
} else if (this.isOderConstraint) {
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
} else {
nextSetasListRest = (nextSetAsList.size() > 5) ? nextSetAsList.subList(0, 5) : nextSetAsList;
}
nextSetAsList.removeAll(nextSetasListRest);
// */
}
@Override
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
Set<UnifyPair> newMethodSignatureConstraintOrig = new HashSet<>(methodSignatureConstraint);
if (isOderConstraint) {
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
}
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, newMethodSignatureConstraintOrig);
typeUnifyTask.addChildTask(forkOrig);
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
(currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig 0");
forkOrig.closeLogFile();
return new ComputationResults(currentThreadResult);
});
int i = 0;
Set<Set<UnifyPair>>[] additionalResults = new HashSet[nextSetasListRest.size()];
Constraint<UnifyPair>[] extendConstraints = new Constraint[nextSetasListRest.size()];
while (!nextSetasListRest.isEmpty()) {
final int finalI = i++;
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
context.logger().debug(() -> "0 RM" + nSaL.toString());
if (this.isOderConstraint) {
Constraint<UnifyPair> extendConstraint = ((Constraint<UnifyPair>) nSaL).getExtendConstraint();
extendConstraints[finalI] = extendConstraint;
}
else if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
Set<UnifyPair> newMethodSignatureConstraint = new HashSet<>(methodSignatureConstraint);
if (isOderConstraint) {
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
}
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, newMethodSignatureConstraint);
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture, (compResult, forkResult) -> {
additionalResults[finalI] = forkResult;
context.logger().error("finalI: " + finalI);
return compResult;
});
}
int finalI1 = i;
return resultValues.thenCompose(compResult -> {
var oldResult = compResult.mainResult;
for (int e = 0; e < finalI1; e++) {
Set<Set<UnifyPair>> currentResult = additionalResults[e];
boolean oldResultInvalid = typeUnifyTask.isUndefinedPairSetSet(oldResult);
boolean currentResultInvalid = typeUnifyTask.isUndefinedPairSetSet(currentResult);
if (!oldResult.isEmpty() && !oldResultInvalid) {
boolean shouldBreak = this.eraseInvalidSets(rekTiefe, new HashSet<>(), nextSetAsList);
if (shouldBreak) {
return CompletableFuture.completedFuture(compResult);
}
}
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(extendConstraints[e]);
}
if (!currentResultInvalid && oldResultInvalid) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
oldResult = currentResult;
} else if (oldResultInvalid == currentResultInvalid || oldResult.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
Set<Set<UnifyPair>> finalOldResult = oldResult;
context.logger().debug(() -> "RES var1 ADD:" + finalOldResult.toString() + " " + currentResult.toString());
oldResult.addAll(currentResult);
}
}
compResult.mainResult = oldResult;
return CompletableFuture.completedFuture(compResult);
});
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
context.logger().debug("RES var=0 ADD:" + result.toString() + " " + currentThreadResult.toString());
result.addAll(currentThreadResult);
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
if (!this.isOderConstraint) {
return true;
} else {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
context.logger().debug("Removed: " + nextSetasListOderConstraints);
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
context.logger().debug("Removed: " + erased);
context.logger().debug("Not Removed: " + nextSetAsList);
for (Set<UnifyPair> aPar : aParDef) {
smallerSetasList.clear();
smallerSetasList.addAll(typeUnifyTask.oup.smallerThan(aPar, nextSetAsList));
notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
notErased.clear();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
context.logger().debug("Removed: " + erased);
context.logger().debug("Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -1,112 +0,0 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
public class Variance0Case extends VarianceCase {
protected final int variance = 0;
protected Variance0Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
} else {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
}
nextSetAsList.remove(a);
} else if (this.isOderConstraint) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
nextSetAsList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} else {
a = nextSetAsList.removeFirst();
}
}
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
return typeUnifyTask.unify2(elems, eq, oderConstraints, fc, context.parallel(), rekTiefe, new HashSet<>(methodSignatureConstraint)).thenApply(
unify2Result -> new Tuple<>(unify2Result, new HashSet<>())
);
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
writeLog("RES var=1 ADD:" + result.toString() + " " + currentThreadResult.toString());
result.addAll(currentThreadResult);
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
if (!this.isOderConstraint) {
return true;
} else {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
return false;
}
}

View File

@@ -5,9 +5,9 @@ import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Logger;
import de.dhbwstuttgart.util.Tuple; import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
@@ -17,11 +17,11 @@ public abstract class VarianceCase {
public static VarianceCase createFromVariance(int variance, boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) { public static VarianceCase createFromVariance(int variance, boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
return switch (variance) { return switch (variance) {
case 0 -> new Variance0Case(isOderConstraint, typeUnifyTask, context); case 0 -> new UnknownVarianceCase(isOderConstraint, typeUnifyTask, context);
case 1 -> new Variance1Case(isOderConstraint, typeUnifyTask, context); case 1 -> new ContravarianceCase(isOderConstraint, typeUnifyTask, context);
case -1 -> new VarianceM1Case(isOderConstraint, typeUnifyTask, context); case -1 -> new CovarianceCase(isOderConstraint, typeUnifyTask, context);
case 2 -> new Variance2Case(isOderConstraint, typeUnifyTask, context); case 2 -> new InvarianceOrConstraintCase(isOderConstraint, typeUnifyTask, context);
default -> throw new RuntimeException("Invalid variance: " + variance); default -> throw new RuntimeException("Invalid variance: " + variance);
}; };
} }
@@ -72,7 +72,7 @@ public abstract class VarianceCase {
/** /**
* *
*/ */
public abstract CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel( public abstract CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems, Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq, Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints, List<Set<Constraint<UnifyPair>>> oderConstraints,
@@ -105,8 +105,28 @@ public abstract class VarianceCase {
List<Set<UnifyPair>> nextSetAsList List<Set<UnifyPair>> nextSetAsList
); );
protected void writeLog(String s) { /**
typeUnifyTask.writeLog(s); * Wrapper class for the parallel computation results
} */
public static class ComputationResults {
public Set<Set<UnifyPair>> mainResult;
public Set<Set<Set<UnifyPair>>> forkResults;
public ComputationResults() {
this(new HashSet<>(), new HashSet<>());
}
public ComputationResults(Set<Set<UnifyPair>> mainResult) {
this(mainResult, new HashSet<>());
}
public ComputationResults(Set<Set<UnifyPair>> mainResult, Set<Set<Set<UnifyPair>>> forkResults) {
this.mainResult = mainResult;
this.forkResults = forkResults;
}
void addForkResult(Set<Set<UnifyPair>> forkResult) {
forkResults.add(forkResult);
}
}
} }

View File

@@ -12,15 +12,12 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
public class distributeVariance extends visitUnifyTypeVisitor<Integer> { public class distributeVariance extends visitUnifyTypeVisitor<Integer> {
public static int inverseVariance(int variance) { public static int inverseVariance(int variance) {
Integer ret = 0; return switch (variance) {
if (variance == 1) { case 1 -> -1;
ret = -1; case -1 -> 1;
} default -> 0;
if (variance == -1) { };
ret = 1;
}
return ret;
} }
@@ -42,7 +39,7 @@ public class distributeVariance extends visitUnifyTypeVisitor<Integer> {
List<UnifyType> param = new ArrayList<>(funnty.getTypeParams().get().length); List<UnifyType> param = new ArrayList<>(funnty.getTypeParams().get().length);
param.addAll(Arrays.asList(funnty.getTypeParams().get())); param.addAll(Arrays.asList(funnty.getTypeParams().get()));
UnifyType resultType = param.remove(param.size()-1); UnifyType resultType = param.remove(param.size()-1);
Integer htInverse = inverseVariance(ht); int htInverse = inverseVariance(ht);
param = param.stream() param = param.stream()
.map(x -> x.accept(this, htInverse)) .map(x -> x.accept(this, htInverse))
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));

View File

@@ -1,7 +1,7 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set; import java.util.Set;
@@ -12,7 +12,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
/** /**
* An extends wildcard type "? extends T". * An extends wildcard type "? extends T".
*/ */
public final class ExtendsType extends WildcardType { public final class ExtendsType extends WildcardType implements ISerializableData {
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) { public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht); return visitor.visit(this, ht);
@@ -24,9 +24,6 @@ public final class ExtendsType extends WildcardType {
*/ */
public ExtendsType(UnifyType extendedType) { public ExtendsType(UnifyType extendedType) {
super("? extends " + extendedType.getName(), extendedType); super("? extends " + extendedType.getName(), extendedType);
if (extendedType instanceof ExtendsType) {
System.out.print("");
}
} }
/** /**

View File

@@ -5,7 +5,9 @@ import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry; import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import de.dhbwstuttgart.typeinference.unify.UnifyContext; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.util.Logger;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;
import java.util.ArrayList; import java.util.ArrayList;
@@ -42,7 +44,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
final JavaTXCompiler compiler; final JavaTXCompiler compiler;
final PlaceholderRegistry placeholderRegistry; final PlaceholderRegistry placeholderRegistry;
Writer logFile; Logger logger;
/** /**
* A map that maps every type to the node in the inheritance graph that contains that type. * A map that maps every type to the node in the inheritance graph that contains that type.
@@ -73,10 +75,10 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
/** /**
* Creates a new instance using the inheritance tree defined in the pairs. * Creates a new instance using the inheritance tree defined in the pairs.
*/ */
public FiniteClosure(Set<UnifyPair> pairs, Writer logFile, JavaTXCompiler compiler, PlaceholderRegistry placeholderRegistry) { public FiniteClosure(Set<UnifyPair> pairs, Logger logger, JavaTXCompiler compiler, PlaceholderRegistry placeholderRegistry) {
this.compiler = compiler; this.compiler = compiler;
this.placeholderRegistry = placeholderRegistry; this.placeholderRegistry = placeholderRegistry;
this.logFile = logFile; this.logger = logger;
this.pairs = new HashSet<>(pairs); this.pairs = new HashSet<>(pairs);
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>(); inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
@@ -125,7 +127,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
} }
// Build the alternative representation with strings as keys // Build the alternative representation with strings as keys
strInheritanceGraph = new HashMap<>(); strInheritanceGraph = TypeUnifyTaskHelper.getPresizedHashMap(inheritanceGraph.size());
for(UnifyType key : inheritanceGraph.keySet()) { for(UnifyType key : inheritanceGraph.keySet()) {
if(!strInheritanceGraph.containsKey(key.getName())) if(!strInheritanceGraph.containsKey(key.getName()))
strInheritanceGraph.put(key.getName(), new HashSet<>()); strInheritanceGraph.put(key.getName(), new HashSet<>());
@@ -134,8 +136,8 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
} }
} }
public FiniteClosure(Set<UnifyPair> constraints, Writer writer, PlaceholderRegistry placeholderRegistry) { public FiniteClosure(Set<UnifyPair> constraints, Logger logger, PlaceholderRegistry placeholderRegistry) {
this(constraints, writer, null, placeholderRegistry); this(constraints, logger, null, placeholderRegistry);
} }
void testSmaller() { void testSmaller() {
@@ -159,7 +161,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
Set<UnifyType> ret; Set<UnifyType> ret;
if ((ret = smallerHash.get(new hashKeyType(type))) != null) { if ((ret = smallerHash.get(new hashKeyType(type))) != null) {
//System.out.println(greaterHash); //context.logger().info(greaterHash);
return new HashSet<>(ret); return new HashSet<>(ret);
} }
@@ -200,7 +202,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
result.add(new Pair<>(t, fBounded)); result.add(new Pair<>(t, fBounded));
} }
catch (StackOverflowError e) { catch (StackOverflowError e) {
// System.out.println(""); // context.logger().info("");
} }
// if C<...> <* C<...> then ... (third case in definition of <*) // if C<...> <* C<...> then ... (third case in definition of <*)
@@ -237,9 +239,9 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
result.add(new Pair<>(theta1.apply(sigma), fBounded)); result.add(new Pair<>(theta1.apply(sigma), fBounded));
} }
} }
HashSet<UnifyType> resut = result.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new)); HashSet<UnifyType> resut = result.stream().map(Pair::getKey).collect(Collectors.toCollection(HashSet::new));
if(resut.equals(types.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new)))) if(resut.equals(types.stream().map(Pair::getKey).collect(Collectors.toCollection(HashSet::new))))
return resut; return resut;
return computeSmaller(result); return computeSmaller(result);
} }
@@ -271,9 +273,9 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
@Override @Override
//Eingefuegt PL 2018-05-24 F-Bounded Problematik //Eingefuegt PL 2018-05-24 F-Bounded Problematik
public Set<UnifyType> greater(UnifyType type, Set<UnifyType> fBounded, SourceLoc location) { public Set<UnifyType> greater(UnifyType type, Set<UnifyType> fBounded, SourceLoc location) {
Set<UnifyType> ret; Set<UnifyType> ret = greaterHash.get(new hashKeyType(type));
if ((ret = greaterHash.get(new hashKeyType(type))) != null) { if (ret != null) {
//System.out.println(greaterHash); //context.logger().info(greaterHash);
return new HashSet<>(ret); return new HashSet<>(ret);
} }
@@ -323,7 +325,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
Set<UnifyType> fBoundedNew = new HashSet<>(fBounded); Set<UnifyType> fBoundedNew = new HashSet<>(fBounded);
fBoundedNew.add(theta1); fBoundedNew.add(theta1);
Set<UnifyType> theta2Set = candidate.getContentOfPredecessors(); Set<UnifyType> theta2Set = candidate.getContentOfPredecessors();
//System.out.println(""); //context.logger().info("");
for(UnifyType theta2 : theta2Set) { for(UnifyType theta2 : theta2Set) {
result.add(theta2.apply(sigma)); result.add(theta2.apply(sigma));
PairResultFBounded.add(new Pair<>(theta2.apply(sigma), fBoundedNew)); PairResultFBounded.add(new Pair<>(theta2.apply(sigma), fBoundedNew));
@@ -357,7 +359,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
return ((match.match(termList).isPresent()) || x); return ((match.match(termList).isPresent()) || x);
}; };
//if (parai.getName().equals("java.lang.Integer")) { //if (parai.getName().equals("java.lang.Integer")) {
// System.out.println(""); // context.logger().info("");
//} //}
BinaryOperator<Boolean> bo = (a,b) -> (a || b); BinaryOperator<Boolean> bo = (a,b) -> (a || b);
if (lfBounded.stream().reduce(false,f,bo)) { if (lfBounded.stream().reduce(false,f,bo)) {
@@ -370,7 +372,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
} }
} }
permuteParams(paramCandidates).forEach(x -> result.add(t.setTypeParams(x))); permuteParams(paramCandidates).forEach(x -> result.add(t.setTypeParams(x)));
//System.out.println(""); //context.logger().info("");
} }
} }
@@ -430,7 +432,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
return ((match.match(termList).isPresent()) || x); return ((match.match(termList).isPresent()) || x);
}; };
if (parai.getName().equals("java.lang.Integer")) { if (parai.getName().equals("java.lang.Integer")) {
System.out.println(""); context.logger().info("");
} }
BinaryOperator<Boolean> bo = (a,b) -> (a || b); BinaryOperator<Boolean> bo = (a,b) -> (a || b);
if (fBounded.stream().reduce(false,f,bo)) continue; //F-Bounded Endlosrekursion if (fBounded.stream().reduce(false,f,bo)) continue; //F-Bounded Endlosrekursion
@@ -469,7 +471,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
} }
HashSet<UnifyType> resut = result.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new)); HashSet<UnifyType> resut = result.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new));
System.out.println(resut); context.logger().info(resut);
if(resut.equals(types.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new)))) if(resut.equals(types.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new))))
return resut; return resut;
return computeGreater(result); return computeGreater(result);
@@ -505,35 +507,42 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
public Set<UnifyType> grArg(ReferenceType type, Set<UnifyType> fBounded) { public Set<UnifyType> grArg(ReferenceType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>(); Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type); result.add(type);
smaller(type, fBounded).forEach(x -> result.add(new SuperType(x))); smaller(type, fBounded).forEach(x -> result.add(new SuperType(x)));
greater(type,fBounded).forEach(x -> result.add(new ExtendsType(x))); greater(type,fBounded).forEach(x -> result.add(new ExtendsType(x)));
return result; return result;
} }
@Override @Override
public Set<UnifyType> grArg(FunNType type, Set<UnifyType> fBounded) { public Set<UnifyType> grArg(FunNType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>(); Set<UnifyType> smaller = smaller(type, fBounded);
Set<UnifyType> greater = greater(type, fBounded);
Set<UnifyType> result = new HashSet<UnifyType>((int)((1 + smaller.size() + greater.size()) * 1.5));
result.add(type); result.add(type);
smaller(type, fBounded).forEach(x -> result.add(new SuperType(x))); smaller.forEach(x -> result.add(new SuperType(x)));
greater(type, fBounded).forEach(x -> result.add(new ExtendsType(x))); greater.forEach(x -> result.add(new ExtendsType(x)));
return result; return result;
} }
@Override @Override
public Set<UnifyType> grArg(ExtendsType type, Set<UnifyType> fBounded) { public Set<UnifyType> grArg(ExtendsType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getExtendedType(); UnifyType t = type.getExtendedType();
greater(t, fBounded).forEach(x -> result.add(new ExtendsType(x))); Set<UnifyType> greater = greater(t, fBounded);
Set<UnifyType> result = new HashSet<UnifyType>((int)((1 + greater.size()) * 1.5));
result.add(type);
greater.forEach(x -> result.add(new ExtendsType(x)));
return result; return result;
} }
@Override @Override
public Set<UnifyType> grArg(SuperType type, Set<UnifyType> fBounded) { public Set<UnifyType> grArg(SuperType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getSuperedType(); UnifyType t = type.getSuperedType();
smaller(t, fBounded).forEach(x -> result.add(new SuperType(x))); Set<UnifyType> smaller = smaller(t, fBounded);
Set<UnifyType> result = TypeUnifyTaskHelper.getPresizedHashSet(1 + smaller.size());
result.add(type);
smaller.forEach(x -> result.add(new SuperType(x)));
return result; return result;
} }
@@ -565,29 +574,33 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
@Override @Override
public Set<UnifyType> smArg(ExtendsType type, Set<UnifyType> fBounded) { public Set<UnifyType> smArg(ExtendsType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getExtendedType(); UnifyType t = type.getExtendedType();
result.add(t); Set<UnifyType> smaller = smaller(t, fBounded);
smaller(t, fBounded).forEach(x -> {
result.add(new ExtendsType(x)); Set<UnifyType> result = TypeUnifyTaskHelper.getPresizedHashSet(2 * (1 + smaller.size()));
result.add(x); result.add(type);
}); result.add(t);
smaller.forEach(x -> {
result.add(new ExtendsType(x));
result.add(x);
});
return result; return result;
} }
@Override @Override
public Set<UnifyType> smArg(SuperType type, Set<UnifyType> fBounded) { public Set<UnifyType> smArg(SuperType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getSuperedType(); UnifyType t = type.getSuperedType();
result.add(t); Set<UnifyType> greater = greater(t, fBounded);
//*** ACHTUNG das koennte FALSCH sein PL 2018-05-23 evtl. HashSet durch smArg durchschleifen
greater(t, fBounded).forEach(x -> { Set<UnifyType> result = TypeUnifyTaskHelper.getPresizedHashSet(2 * (1 + greater.size()));
result.add(new SuperType(x)); result.add(type);
result.add(x); result.add(t);
}); //*** ACHTUNG das koennte FALSCH sein PL 2018-05-23 evtl. HashSet durch smArg durchschleifen
greater.forEach(x -> {
result.add(new SuperType(x));
result.add(x);
});
return result; return result;
} }
@@ -602,7 +615,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
public Set<UnifyType> getAllTypesByName(String typeName) { public Set<UnifyType> getAllTypesByName(String typeName) {
if(!strInheritanceGraph.containsKey(typeName)) if(!strInheritanceGraph.containsKey(typeName))
return new HashSet<>(); return new HashSet<>();
return strInheritanceGraph.get(typeName).stream().map(x -> x.getContent()).collect(Collectors.toCollection(HashSet::new)); return strInheritanceGraph.get(typeName).stream().map(Node::getContent).collect(Collectors.toCollection(HashSet::new));
} }
@Override @Override
@@ -692,9 +705,9 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
*/ */
public int compare (UnifyType left, UnifyType right, PairOperator pairop, UnifyContext context) { public int compare (UnifyType left, UnifyType right, PairOperator pairop, UnifyContext context) {
try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {} logger.debug(() -> "left: "+ left + " right: " + right + " pairop: " + pairop +"\n");
// if (left.getName().equals("Matrix") || right.getName().equals("Matrix")) // if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
// System.out.println(""); // context.logger().info("");
/* /*
pairop = PairOperator.SMALLERDOTWC; pairop = PairOperator.SMALLERDOTWC;
List<UnifyType> al = new ArrayList<>(); List<UnifyType> al = new ArrayList<>();
@@ -751,13 +764,8 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this); Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this);
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList")) //if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
{try { logger.debug(() -> "\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
// logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}}
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok. //Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
Predicate<UnifyPair> delFun = x -> !((x.getLhsType() instanceof PlaceholderType || Predicate<UnifyPair> delFun = x -> !((x.getLhsType() instanceof PlaceholderType ||
x.getRhsType() instanceof PlaceholderType) x.getRhsType() instanceof PlaceholderType)
@@ -765,12 +773,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
((WildcardType)x.getLhsType()).getWildcardedType().equals(x.getRhsType())) ((WildcardType)x.getLhsType()).getWildcardedType().equals(x.getRhsType()))
); );
long smallerLen = smallerRes.stream().filter(delFun).count(); long smallerLen = smallerRes.stream().filter(delFun).count();
try { logger.debug(() -> "\nsmallerLen: " + smallerLen +"\n");
logFile.write("\nsmallerLen: " + smallerLen +"\n");
// logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}
if (smallerLen == 0) return -1; if (smallerLen == 0) return -1;
else { else {
up = new UnifyPair(right, left, pairop); up = new UnifyPair(right, left, pairop);
@@ -780,13 +783,8 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
Set<UnifyPair> greaterRes = unifyTask.applyTypeUnificationRules(hs, this); Set<UnifyPair> greaterRes = unifyTask.applyTypeUnificationRules(hs, this);
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList")) //if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
{try { logger.debug(() -> "\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
// logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}}
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok. //Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
long greaterLen = greaterRes.stream().filter(delFun).count(); long greaterLen = greaterRes.stream().filter(delFun).count();
if (greaterLen == 0) return 1; if (greaterLen == 0) return 1;
@@ -800,17 +798,6 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
public SerialMap toSerial(KeyStorage keyStorage) { public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap(); SerialMap serialized = new SerialMap();
serialized.put("pairs", SerialList.fromMapped(this.pairs, unifyPair -> unifyPair.toSerial(keyStorage))); serialized.put("pairs", SerialList.fromMapped(this.pairs, unifyPair -> unifyPair.toSerial(keyStorage)));
/*/
if (serialized != null) {
throw new RuntimeException(
"Check both: \n"
+ "-> " + this.pairs.toArray()[51] + "\n"
+ "-> " + this.pairs.toArray()[65] + "\n"
);
}
//*/
return serialized; return serialized;
} }
@@ -818,7 +805,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
var pairList = data.getList("pairs").assertListOfUUIDs(); var pairList = data.getList("pairs").assertListOfUUIDs();
Set<UnifyPair> pairs = pairList.stream() Set<UnifyPair> pairs = pairList.stream()
.map(pairData -> UnifyPair.fromSerial(pairData, context, keyStorage)).collect(Collectors.toSet()); .map(pairData -> UnifyPair.fromSerial(pairData, context, keyStorage)).collect(Collectors.toSet());
return new FiniteClosure(pairs, context.logFile(), context.placeholderRegistry()); return new FiniteClosure(pairs, context.logger(), context.placeholderRegistry());
} }
} }

View File

@@ -1,14 +1,17 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.parser.scope.JavaClassName; import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import java.lang.reflect.Modifier; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
public class FunInterfaceType extends ReferenceType { public class FunInterfaceType extends ReferenceType implements ISerializableData {
final List<UnifyType> intfArgTypes; final List<UnifyType> intfArgTypes;
final UnifyType intfReturnType; final UnifyType intfReturnType;
final List<String> generics; final List<String> generics;
@@ -46,4 +49,29 @@ public class FunInterfaceType extends ReferenceType {
return args; return args;
} }
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
var serializedWrapper = super.toSerial(keyStorage);
SerialMap serialized = serializedWrapper.getMap("object");
serialized.put("intfArgTypes", SerialList.fromMapped(intfArgTypes, u -> u.toSerial(keyStorage)));
serialized.put("intfReturnType", intfReturnType.toSerial(keyStorage));
serialized.put("generics", SerialList.fromMapped(generics, SerialValue::new));
return serializedWrapper;
}
public static FunInterfaceType fromSerial(SerialMap data, UnifyContext context) {
var name = data.getValue("name").getOf(String.class);
var params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
var intfArgTypes = data.getList("intfArgTypes").assertListOfMaps().stream().map(
argTypeData -> UnifyType.fromSerial(argTypeData, context)).toList();
var intfReturnType = UnifyType.fromSerial(data.getMap("intfReturnType"), context);
var generics = data.getList("generics").assertListOfValues().stream().map(
generic -> generic.getOf(String.class)).toList();
return new FunInterfaceType(name, new TypeParams(params), intfArgTypes, intfReturnType, generics);
}
} }

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
@@ -16,7 +17,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
* A real function type in java. * A real function type in java.
* @author Florian Steurer * @author Florian Steurer
*/ */
public class FunNType extends UnifyType { public class FunNType extends UnifyType implements ISerializableData {
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) { public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht); return visitor.visit(this, ht);
@@ -80,7 +81,7 @@ public class FunNType extends UnifyType {
} }
@Override @Override
public Boolean wrongWildcard() { public boolean wrongWildcard() {
return (new ArrayList<UnifyType>(Arrays.asList(getTypeParams() return (new ArrayList<UnifyType>(Arrays.asList(getTypeParams()
.get())).stream().filter(x -> (x instanceof WildcardType)).findFirst().isPresent()); .get())).stream().filter(x -> (x instanceof WildcardType)).findFirst().isPresent());
} }

View File

@@ -48,12 +48,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT, context); return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT, context);
}} }}
catch (ClassCastException e) { catch (ClassCastException e) {
try { ((FiniteClosure)fc).logger.debug(() -> "ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
// ((FiniteClosure)fc).logFile.flush();
}
catch (IOException ie) {
}
return -99; return -99;
} }
} }
@@ -61,7 +56,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
/* /*
public int compareEq (UnifyPair left, UnifyPair right) { public int compareEq (UnifyPair left, UnifyPair right) {
if (left == null || right == null) if (left == null || right == null)
System.out.println("Fehler"); context.logger().info("Fehler");
if (left.getLhsType() instanceof PlaceholderType) { if (left.getLhsType() instanceof PlaceholderType) {
return fc.compare(left.getRhsType(), right.getRhsType(), left.getPairOp()); return fc.compare(left.getRhsType(), right.getRhsType(), left.getPairOp());
} }
@@ -82,12 +77,12 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector")) && (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType))) && (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{ {
// System.out.println(""); // context.logger().info("");
} }
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object"))) if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object")))) ||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
{ {
// System.out.println(""); // context.logger().info("");
} }
} }
else { else {
@@ -109,11 +104,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector")) && (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType))) && (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{ {
// System.out.println(""); // context.logger().info("");
} }
if (right instanceof SuperType) if (right instanceof SuperType)
{ {
// System.out.println(""); // context.logger().info("");
} }
} }
else { else {
@@ -175,85 +170,81 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
left.add(p2); left.add(p2);
left.add(p4); left.add(p4);
*/ */
Set<UnifyPair> lefteq = left.stream() Set<UnifyPair> lefteq = new HashSet<>();
.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT)) Set<UnifyPair> leftle = new HashSet<>();
.collect(Collectors.toCollection(HashSet::new)); Set<UnifyPair> leftlewc = new HashSet<>();
Set<UnifyPair> righteq = right.stream() Set<UnifyPair> lefteqOder = new HashSet<>();
.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT)) Set<UnifyPair> lefteqRet = new HashSet<>();
.collect(Collectors.toCollection(HashSet::new)); Set<UnifyPair> leftleOder = new HashSet<>();
Set<UnifyPair> leftle = left.stream() for (var x : left) {
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType) boolean isLeftPlaceholder = x.getLhsType() instanceof PlaceholderType;
&& x.getPairOp() == PairOperator.SMALLERDOT)) boolean isRightPlaceholder = x.getRhsType() instanceof PlaceholderType;
.collect(Collectors.toCollection(HashSet::new)); boolean hasPlaceholder = isLeftPlaceholder || isRightPlaceholder;
Set<UnifyPair> rightle = right.stream()
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType) if (isLeftPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) lefteq.add(x);
&& x.getPairOp() == PairOperator.SMALLERDOT)) if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOT) leftle.add(x);
.collect(Collectors.toCollection(HashSet::new)); if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOTWC) leftlewc.add(x);
Set<UnifyPair> leftlewc = left.stream()
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType) UnifyPair y = x.getGroundBasePair();
&& x.getPairOp() == PairOperator.SMALLERDOTWC)) boolean isBasePairLeftPlaceholder = y.getLhsType() instanceof PlaceholderType;
.collect(Collectors.toCollection(HashSet::new)); boolean isBasePairRightPlaceholder = y.getRhsType() instanceof PlaceholderType;
Set<UnifyPair> rightlewc = right.stream() if (isBasePairLeftPlaceholder && !isBasePairRightPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) {
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType) lefteqOder.add(x);
&& x.getPairOp() == PairOperator.SMALLERDOTWC)) }
.collect(Collectors.toCollection(HashSet::new)); else if (isBasePairRightPlaceholder && ((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1) {
//System.out.println(left.toString()); lefteqRet.add(x);
}
else if (x.getPairOp() == PairOperator.SMALLERDOT) {
leftleOder.add(x);
}
}
Set<UnifyPair> righteq = new HashSet<>();
Set<UnifyPair> rightle = new HashSet<>();
Set<UnifyPair> rightlewc = new HashSet<>();
Set<UnifyPair> righteqOder = new HashSet<>();
Set<UnifyPair> righteqRet = new HashSet<>();
Set<UnifyPair> rightleOder = new HashSet<>();
for (var x : right) {
boolean isLeftPlaceholder = x.getLhsType() instanceof PlaceholderType;
boolean isRightPlaceholder = x.getRhsType() instanceof PlaceholderType;
boolean hasPlaceholder = isLeftPlaceholder || isRightPlaceholder;
if (isLeftPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) righteq.add(x);
if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOT) rightle.add(x);
if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOTWC) rightlewc.add(x);
UnifyPair y = x.getGroundBasePair();
boolean isBasePairLeftPlaceholder = y.getLhsType() instanceof PlaceholderType;
boolean isBasePairRightPlaceholder = y.getRhsType() instanceof PlaceholderType;
if (isBasePairLeftPlaceholder && !isBasePairRightPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) {
righteqOder.add(x);
}
else if (isBasePairRightPlaceholder && ((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1) {
righteqRet.add(x);
}
else if (x.getPairOp() == PairOperator.SMALLERDOT) {
rightleOder.add(x);
}
}
//context.logger().info(left.toString());
//Fall 2 //Fall 2
//if (lefteq.iterator().next().getLhsType().getName().equals("AJO")) { //if (lefteq.iterator().next().getLhsType().getName().equals("AJO")) {
// System.out.print(""); // System.out.print("");
//} //}
//ODER-CONSTRAINT //ODER-CONSTRAINT
Set<UnifyPair> leftBase = left.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new)); // Set<UnifyPair> leftBase = left.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> rightBase = right.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new)); // Set<UnifyPair> rightBase = right.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> lefteqOder = left.stream()
.filter(x -> { UnifyPair y = x.getGroundBasePair();
/*try {
((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n");
((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("y: " + y.toString() +"\n");
((FiniteClosure)fc).logFile.write("y.getLhsType() : " + y.getLhsType() .toString() +"\n\n");
((FiniteClosure)fc).logFile.write("y.getRhsType(): " + y.getRhsType().toString() +"\n");
((FiniteClosure)fc).logFile.write("x.getPairOp(): " + x.getPairOp().toString() +"\n\n");
}
catch (IOException ie) {
} */
return (y.getLhsType() instanceof PlaceholderType &&
!(y.getRhsType() instanceof PlaceholderType) &&
x.getPairOp() == PairOperator.EQUALSDOT);})
.collect(Collectors.toCollection(HashSet::new));
left.removeAll(lefteqOder); left.removeAll(lefteqOder);
Set<UnifyPair> righteqOder = right.stream()
.filter(x -> { UnifyPair y = x.getGroundBasePair();
return (y.getLhsType() instanceof PlaceholderType &&
!(y.getRhsType() instanceof PlaceholderType) &&
x.getPairOp() == PairOperator.EQUALSDOT);})
.collect(Collectors.toCollection(HashSet::new));
right.removeAll(righteqOder);
Set<UnifyPair> lefteqRet = left.stream()
.filter(x -> { UnifyPair y = x.getGroundBasePair();
return (y.getRhsType() instanceof PlaceholderType &&
((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1);})
.collect(Collectors.toCollection(HashSet::new));
left.removeAll(lefteqRet); left.removeAll(lefteqRet);
Set<UnifyPair> righteqRet = right.stream() right.removeAll(righteqOder);
.filter(x -> { UnifyPair y = x.getGroundBasePair();
return (y.getRhsType() instanceof PlaceholderType &&
((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1);})
.collect(Collectors.toCollection(HashSet::new));
right.removeAll(righteqRet); right.removeAll(righteqRet);
Set<UnifyPair> leftleOder = left.stream()
.filter(x -> (x.getPairOp() == PairOperator.SMALLERDOT))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> rightleOder = right.stream()
.filter(x -> (x.getPairOp() == PairOperator.SMALLERDOT))
.collect(Collectors.toCollection(HashSet::new));
/* /*
synchronized(this) { synchronized(this) {
try { try {
@@ -275,89 +266,73 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
*/ */
Integer compareEq; int compareEq;
if (lefteqOder.size() == 1 && righteqOder.size() == 1 && lefteqRet.size() == 1 && righteqRet.size() == 1) { if (lefteqOder.size() == 1 && righteqOder.size() == 1 && lefteqRet.size() == 1 && righteqRet.size() == 1) {
Match m = new Match(); Match m = new Match();
if ((compareEq = compareEq(lefteqOder.iterator().next().getGroundBasePair(), righteqOder.iterator().next().getGroundBasePair())) == -1) { compareEq = compareEq(lefteqOder.iterator().next().getGroundBasePair(), righteqOder.iterator().next().getGroundBasePair());
ArrayList<UnifyPair> matchList =
rightleOder.stream().map(x -> { if (compareEq == -1) {
UnifyPair leftElem = leftleOder.stream() ArrayList<UnifyPair> matchList =
rightleOder.stream().map(x -> {
UnifyPair leftElem = leftleOder.stream()
.filter(y -> y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType())) .filter(y -> y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType()))
.findAny().get(); .findAny().orElseThrow();
return new UnifyPair(x.getRhsType(), leftElem.getRhsType(), PairOperator.EQUALSDOT);}) return new UnifyPair(x.getRhsType(), leftElem.getRhsType(), PairOperator.EQUALSDOT);}
.collect(Collectors.toCollection(ArrayList::new)); )
if (m.match(matchList).isPresent()) { .collect(Collectors.toCollection(ArrayList::new));
//try { ((FiniteClosure)fc).logFile.write("result1: -1 \n\n"); } catch (IOException ie) {}
return -1; return (m.match(matchList).isPresent()) ? -1 : 0;
}
else {
//try { ((FiniteClosure)fc).logFile.write("result1: 0 \n\n"); } catch (IOException ie) {}
return 0;
}
} else if (compareEq == 1) { } else if (compareEq == 1) {
ArrayList<UnifyPair> matchList = ArrayList<UnifyPair> matchList =
leftleOder.stream().map(x -> { leftleOder.stream().map(x -> {
UnifyPair rightElem = rightleOder.stream() UnifyPair rightElem = rightleOder.stream()
.filter(y -> .filter(y -> y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType()))
y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType())) .findAny().orElseThrow();
.findAny().get(); return new UnifyPair(x.getRhsType(), rightElem.getRhsType(), PairOperator.EQUALSDOT);}
return new UnifyPair(x.getRhsType(), rightElem.getRhsType(), PairOperator.EQUALSDOT);}) )
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
if (m.match(matchList).isPresent()) { return (m.match(matchList).isPresent()) ? 1 : 0;
//try { ((FiniteClosure)fc).logFile.write("result2: 1 \n\n"); } catch (IOException ie) {}
return 1;
}
else {
//try { ((FiniteClosure)fc).logFile.write("result2: 0 \n\n"); } catch (IOException ie) {}
return 0;
}
} else { } else {
/* /*
synchronized(this) { ((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n");
try { ((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n"); ((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n"); ((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n"); ((FiniteClosure)fc).logFile.write("lefteqOder: " + lefteqOder.toString() +"\n");
((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n"); ((FiniteClosure)fc).logFile.write("righteqOder: " + righteqOder.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("lefteqOder: " + lefteqOder.toString() +"\n"); ((FiniteClosure)fc).logFile.write("lefteqRet: " + lefteqRet.toString() +"\n");
((FiniteClosure)fc).logFile.write("righteqOder: " + righteqOder.toString() +"\n\n"); ((FiniteClosure)fc).logFile.write("righteqRet: " + righteqRet.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("lefteqRet: " + lefteqRet.toString() +"\n"); ((FiniteClosure)fc).logFile.write("leftleOder: " + leftleOder.toString() +"\n");
((FiniteClosure)fc).logFile.write("righteqRet: " + righteqRet.toString() +"\n\n"); ((FiniteClosure)fc).logFile.write("rightleOder: " + rightleOder.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("leftleOder: " + leftleOder.toString() +"\n"); ((FiniteClosure)fc).logFile.write("result3: 0 \n\n");
((FiniteClosure)fc).logFile.write("rightleOder: " + rightleOder.toString() +"\n\n"); ((FiniteClosure)fc).logFile.flush();
((FiniteClosure)fc).logFile.write("result3: 0 \n\n");
((FiniteClosure)fc).logFile.flush();
}
catch (IOException ie) {
}
}
*/ */
return 0; return 0;
} }
} }
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof ExtendsType && leftle.size() == 1 && righteq.size() == 0 && rightle.size() == 1) { if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof ExtendsType && leftle.size() == 1 && righteq.isEmpty() && rightle.size() == 1) {
return 1; return 1;
} }
//Fall 2 //Fall 2
if (lefteq.size() == 0 && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof ExtendsType && rightle.size() == 1) { if (lefteq.isEmpty() && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof ExtendsType && rightle.size() == 1) {
return -1; return -1;
} }
//Fall 3 //Fall 3
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof SuperType && leftle.size() == 1 && righteq.size() == 0 && rightle.size() == 1) { if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof SuperType && leftle.size() == 1 && righteq.isEmpty() && rightle.size() == 1) {
return -1; return -1;
} }
//Fall 3 //Fall 3
if (lefteq.size() == 0 && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof SuperType && rightle.size() == 1) { if (lefteq.isEmpty() && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof SuperType && rightle.size() == 1) {
return 1; return 1;
} }
//Fall 5 //Fall 5
if (lefteq.size() == 1 && leftle.size() == 0 && righteq.size() == 1 && rightle.size() == 1) { if (lefteq.size() == 1 && leftle.isEmpty() && righteq.size() == 1 && rightle.size() == 1) {
return -1; return -1;
} }
//Fall 5 //Fall 5
if (lefteq.size() == 1 && leftle.size() == 1 && righteq.size() == 1 && rightle.size() == 0) { if (lefteq.size() == 1 && leftle.size() == 1 && righteq.size() == 1 && rightle.isEmpty()) {
return 1; return 1;
} }
//Fall 5 //Fall 5
@@ -365,7 +340,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
return 0; return 0;
} }
// Nur Paare a =. Theta // Nur Paare a =. Theta
if (leftle.size() == 0 && rightle.size() == 0 && leftlewc.size() == 0 && rightlewc.size() ==0) { if (leftle.isEmpty() && rightle.isEmpty() && leftlewc.isEmpty() && rightlewc.isEmpty()) {
Stream<UnifyPair> lseq = lefteq.stream(); //left.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT)); Stream<UnifyPair> lseq = lefteq.stream(); //left.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT));
Stream<UnifyPair> rseq = righteq.stream(); //right.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT)); Stream<UnifyPair> rseq = righteq.stream(); //right.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT));
BinaryOperator<HashMap<UnifyType,UnifyPair>> combiner = (x,y) -> { x.putAll(y); return x;}; BinaryOperator<HashMap<UnifyType,UnifyPair>> combiner = (x,y) -> { x.putAll(y); return x;};
@@ -373,13 +348,10 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
lseq = lseq.filter(x -> !(hm.get(x.getLhsType()) == null));//NOCHMALS UEBERPRUEFEN!!!! lseq = lseq.filter(x -> !(hm.get(x.getLhsType()) == null));//NOCHMALS UEBERPRUEFEN!!!!
lseq = lseq.filter(x -> !x.equals(hm.get(x.getLhsType()))); //Elemente die gleich sind muessen nicht verglichen werden lseq = lseq.filter(x -> !x.equals(hm.get(x.getLhsType()))); //Elemente die gleich sind muessen nicht verglichen werden
Optional<Integer> si = lseq.map(x -> compareEq(x, hm.get(x.getLhsType()))).reduce((x,y)-> { if (x == y) return x; else return 0; } ); Optional<Integer> si = lseq.map(x -> compareEq(x, hm.get(x.getLhsType()))).reduce((x,y)-> { if (x == y) return x; else return 0; } );
if (!si.isPresent()) return 0; return si.orElse(0);
else return si.get();
} }
//Fall 1 und 4 //Fall 1 und 4
if (lefteq.size() >= 1 && righteq.size() >= 1 && (leftlewc.size() > 0 || rightlewc.size() > 0)) { if (!lefteq.isEmpty() && !righteq.isEmpty() && (!leftlewc.isEmpty() || !rightlewc.isEmpty())) {
if (lefteq.iterator().next().getLhsType().getName().equals("D"))
System.out.print("");
//Set<PlaceholderType> varsleft = lefteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new)); //Set<PlaceholderType> varsleft = lefteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
//Set<PlaceholderType> varsright = righteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new)); //Set<PlaceholderType> varsright = righteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
//filtern des Paares a = Theta, das durch a <. Thata' generiert wurde (nur im Fall 1 relevant) andere Substitutioen werden rausgefiltert //filtern des Paares a = Theta, das durch a <. Thata' generiert wurde (nur im Fall 1 relevant) andere Substitutioen werden rausgefiltert
@@ -400,12 +372,12 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
//TODO: Hier wird bei Wildcards nicht das richtige compare aufgerufen PL 18-04-20 //TODO: Hier wird bei Wildcards nicht das richtige compare aufgerufen PL 18-04-20
Pair<Integer, Set<UnifyPair>> int_Unifier = compare(lseq.getRhsType(), rseq.getRhsType()); Pair<Integer, Set<UnifyPair>> int_Unifier = compare(lseq.getRhsType(), rseq.getRhsType());
Unifier uni = new Unifier(); Unifier uni = new Unifier();
int_Unifier.getValue().get().forEach(x -> uni.add((PlaceholderType) x.getLhsType(), x.getRhsType())); int_Unifier.getValue().orElseThrow().forEach(x -> uni.add((PlaceholderType) x.getLhsType(), x.getRhsType()));
if (!lseq.getRhsType().getName().equals(rseq.getRhsType().getName()) if (!lseq.getRhsType().getName().equals(rseq.getRhsType().getName())
|| leftlewc.size() == 0 || rightlewc.size() == 0) return int_Unifier.getKey(); || leftlewc.isEmpty() || rightlewc.isEmpty()) return int_Unifier.getKey();
else { else {
Set <UnifyPair> lsleuni = leftlewc.stream().map(x -> uni.apply(x)).collect(Collectors.toCollection(HashSet::new)); Set <UnifyPair> lsleuni = leftlewc.stream().map(uni::apply).collect(Collectors.toCollection(HashSet::new));
Set <UnifyPair> rsleuni = rightlewc.stream().map(x -> uni.apply(x)).collect(Collectors.toCollection(HashSet::new)); Set <UnifyPair> rsleuni = rightlewc.stream().map(uni::apply).collect(Collectors.toCollection(HashSet::new));
BinaryOperator<HashMap<UnifyType,UnifyPair>> combiner = (x,y) -> { x.putAll(y); return x;}; BinaryOperator<HashMap<UnifyType,UnifyPair>> combiner = (x,y) -> { x.putAll(y); return x;};
HashMap<UnifyType,UnifyPair> hm; HashMap<UnifyType,UnifyPair> hm;
@@ -422,11 +394,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getRhsType()) == null)); Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getRhsType()) == null));
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } ); si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
} }
if (!si.isPresent()) return 0; return si.orElse(0);
else return si.get();
} }
} else { } else {
if (leftlewc.size() > 0) { if (!leftlewc.isEmpty()) {
/*
Set<UnifyPair> subst; Set<UnifyPair> subst;
subst = leftlewc.stream().map(x -> { subst = leftlewc.stream().map(x -> {
if (x.getLhsType() instanceof PlaceholderType) { if (x.getLhsType() instanceof PlaceholderType) {
@@ -435,6 +407,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
else { else {
return new UnifyPair(x.getRhsType(), x.getLhsType(), PairOperator.EQUALSDOT); return new UnifyPair(x.getRhsType(), x.getLhsType(), PairOperator.EQUALSDOT);
}}).collect(Collectors.toCollection(HashSet::new)); }}).collect(Collectors.toCollection(HashSet::new));
*/
Unifier uni = new Unifier(); Unifier uni = new Unifier();
lseq = uni.apply(lseq); lseq = uni.apply(lseq);
} }

View File

@@ -134,15 +134,15 @@ public final class PlaceholderType extends UnifyType{
wildcardable = true; wildcardable = true;
} }
public void setWildcardtable(Boolean wildcardable) { public void setWildcardtable(boolean wildcardable) {
this.wildcardable = wildcardable; this.wildcardable = wildcardable;
} }
public Boolean isInnerType() { public boolean isInnerType() {
return innerType; return innerType;
} }
public void setInnerType(Boolean innerType) { public void setInnerType(boolean innerType) {
this.innerType = innerType; this.innerType = innerType;
} }
@@ -201,6 +201,7 @@ public final class PlaceholderType extends UnifyType{
serialized.put("name", this.typeName); serialized.put("name", this.typeName);
// Placeholders never make use of the typeParams // Placeholders never make use of the typeParams
serialized.put("isGenerated", IsGenerated); serialized.put("isGenerated", IsGenerated);
serialized.put("wildcardable", wildcardable);
serialized.put("isInnerType", innerType); serialized.put("isInnerType", innerType);
serialized.put("variance", variance); serialized.put("variance", variance);
serialized.put("orCons", orCons); serialized.put("orCons", orCons);
@@ -214,11 +215,13 @@ public final class PlaceholderType extends UnifyType{
public static PlaceholderType fromSerial(SerialMap data, UnifyContext context) { public static PlaceholderType fromSerial(SerialMap data, UnifyContext context) {
var name = data.getValue("name").getOf(String.class); var name = data.getValue("name").getOf(String.class);
var isGenerated = data.getValue("isGenerated").getOf(Boolean.class); var isGenerated = data.getValue("isGenerated").getOf(Boolean.class);
var wildcardable = data.getValue("wildcardable").getOf(Boolean.class);
var isInnerType = data.getValue("isInnerType").getOf(Boolean.class); var isInnerType = data.getValue("isInnerType").getOf(Boolean.class);
var variance = data.getValue("variance").getOf(Integer.class); var variance = data.getValue("variance").getOf(Integer.class);
var orCons = data.getValue("orCons").getOf(Number.class).byteValue(); var orCons = data.getValue("orCons").getOf(Number.class).byteValue();
var placeholderType = new PlaceholderType(name, isGenerated, context.placeholderRegistry()); var placeholderType = new PlaceholderType(name, isGenerated, context.placeholderRegistry());
placeholderType.setWildcardtable(wildcardable);
placeholderType.setInnerType(isInnerType); placeholderType.setInnerType(isInnerType);
placeholderType.setVariance(variance); placeholderType.setVariance(variance);
placeholderType.setOrCons(orCons); placeholderType.setOrCons(orCons);

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
@@ -14,7 +15,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
* @author Florian Steurer * @author Florian Steurer
* *
*/ */
public class ReferenceType extends UnifyType { public class ReferenceType extends UnifyType implements ISerializableData {
/** /**
* The buffered hashCode * The buffered hashCode
@@ -31,7 +32,7 @@ public class ReferenceType extends UnifyType {
return visitor.visit(this, ht); return visitor.visit(this, ht);
} }
public ReferenceType(String name, Boolean genericTypeVar) { public ReferenceType(String name, boolean genericTypeVar) {
super(name, new TypeParams()); super(name, new TypeParams());
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode(); hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
this.genericTypeVar = genericTypeVar; this.genericTypeVar = genericTypeVar;

View File

@@ -1,6 +1,7 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage; import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap; import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext; import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set; import java.util.Set;
@@ -92,6 +93,7 @@ public final class SuperType extends WildcardType {
public SerialMap toSerial(KeyStorage keyStorage) { public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap(); SerialMap serialized = new SerialMap();
serialized.put("wildcardedType", this.wildcardedType.toSerial(keyStorage)); serialized.put("wildcardedType", this.wildcardedType.toSerial(keyStorage));
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
// create the wrapper and put this as the object // create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage); var serializedWrapper = super.toSerial(keyStorage);
@@ -100,8 +102,12 @@ public final class SuperType extends WildcardType {
} }
public static SuperType fromSerial(SerialMap data, UnifyContext context) { public static SuperType fromSerial(SerialMap data, UnifyContext context) {
return new SuperType( var params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
var superType = new SuperType(
UnifyType.fromSerial(data.getMap("wildcardedType"), context) UnifyType.fromSerial(data.getMap("wildcardedType"), context)
); );
superType.setTypeParams(new TypeParams(params));
return superType;
} }
} }

View File

@@ -174,9 +174,9 @@ public final class TypeParams implements Iterable<UnifyType>{
@Override @Override
public String toString() { public String toString() {
String res = ""; StringBuilder res = new StringBuilder();
for(UnifyType t : typeParams) for(UnifyType t : typeParams)
res += t + ","; res.append(t).append(",");
return "<" + res.substring(0, res.length()-1) + ">"; return "<" + res.substring(0, res.length()-1) + ">";
} }

View File

@@ -50,8 +50,7 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
Unifier tempU = new Unifier(source, target); Unifier tempU = new Unifier(source, target);
// Every new substitution must be applied to previously added substitutions // Every new substitution must be applied to previously added substitutions
// otherwise the unifier needs to be applied multiple times to unify two terms // otherwise the unifier needs to be applied multiple times to unify two terms
for(PlaceholderType pt : substitutions.keySet()) substitutions.replaceAll((pt, ut) -> ut.apply(tempU));
substitutions.put(pt, substitutions.get(pt).apply(tempU));
substitutions.put(source, target); substitutions.put(source, target);
} }
@@ -93,8 +92,7 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
// } // }
//} //}
if (!(p.getLhsType().equals(newLhs)) || !(p.getRhsType().equals(newRhs))) {//Die Anwendung von this hat was veraendert PL 2018-04-01 if (!(p.getLhsType().equals(newLhs)) || !(p.getRhsType().equals(newRhs))) {//Die Anwendung von this hat was veraendert PL 2018-04-01
Set<UnifyPair> suniUnifyPair = new HashSet<>(); Set<UnifyPair> suniUnifyPair = new HashSet<>(thisAsPair.getAllSubstitutions());
suniUnifyPair.addAll(thisAsPair.getAllSubstitutions());
suniUnifyPair.add(thisAsPair); suniUnifyPair.add(thisAsPair);
if (p.getLhsType() instanceof PlaceholderType //&& newLhs instanceof PlaceholderType entfernt PL 2018-04-13 if (p.getLhsType() instanceof PlaceholderType //&& newLhs instanceof PlaceholderType entfernt PL 2018-04-13
&& p.getPairOp() == PairOperator.EQUALSDOT) { && p.getPairOp() == PairOperator.EQUALSDOT) {
@@ -172,13 +170,13 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
@Override @Override
public String toString() { public String toString() {
String result = "{ "; StringBuilder result = new StringBuilder("{ ");
for(Entry<PlaceholderType, UnifyType> entry : substitutions.entrySet()) for(Entry<PlaceholderType, UnifyType> entry : substitutions.entrySet())
result += "(" + entry.getKey() + " -> " + entry.getValue() + "), "; result.append("(").append(entry.getKey()).append(" -> ").append(entry.getValue()).append("), ");
if(!substitutions.isEmpty()) if(!substitutions.isEmpty())
result = result.substring(0, result.length()-2); result = new StringBuilder(result.substring(0, result.length() - 2));
result += " }"; result.append(" }");
return result; return result.toString();
} }
@Override @Override

View File

@@ -154,8 +154,7 @@ public class UnifyPair implements IConstraintElement, ISerializableData {
} }
public Set<UnifyPair> getAllSubstitutions () { public Set<UnifyPair> getAllSubstitutions () {
Set<UnifyPair> ret = new HashSet<>(); Set<UnifyPair> ret = new HashSet<>(new ArrayList<>(getSubstitution()));
ret.addAll(new ArrayList<>(getSubstitution()));
if (basePair != null) { if (basePair != null) {
ret.addAll(new ArrayList<>(basePair.getAllSubstitutions())); ret.addAll(new ArrayList<>(basePair.getAllSubstitutions()));
} }
@@ -202,7 +201,7 @@ public class UnifyPair implements IConstraintElement, ISerializableData {
} }
public Boolean wrongWildcard() { public boolean wrongWildcard() {
return lhs.wrongWildcard() || rhs.wrongWildcard(); return lhs.wrongWildcard() || rhs.wrongWildcard();
} }
@@ -255,7 +254,7 @@ public class UnifyPair implements IConstraintElement, ISerializableData {
+ "WC: " + ((PlaceholderType)rhs).isWildcardable() + "WC: " + ((PlaceholderType)rhs).isWildcardable()
+ ", IT: " + ((PlaceholderType)rhs).isInnerType(); + ", IT: " + ((PlaceholderType)rhs).isInnerType();
} }
var res = "(" + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])"; var res = "(UP: " + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])";
var location = this.getLocation(); var location = this.getLocation();
if (location != null) { if (location != null) {
res += "@" + location.line() + " in " + location.file(); res += "@" + location.line() + " in " + location.file();
@@ -272,9 +271,12 @@ public class UnifyPair implements IConstraintElement, ISerializableData {
return ret; return ret;
} }
*/ */
private String serialUUID = null;
public SerialUUID toSerial(KeyStorage keyStorage) { public SerialUUID toSerial(KeyStorage keyStorage) {
String uuid = keyStorage.getIdentifier(this); final String uuid = serialUUID == null ? keyStorage.getIdentifier() : serialUUID;
if (serialUUID == null) serialUUID = uuid;
if (!keyStorage.isAlreadySerialized(uuid)) { if (!keyStorage.isAlreadySerialized(uuid)) {
SerialMap serialized = new SerialMap(); SerialMap serialized = new SerialMap();
keyStorage.putSerialized(uuid, serialized); keyStorage.putSerialized(uuid, serialized);

View File

@@ -89,23 +89,21 @@ public abstract class UnifyType implements ISerializableData {
@Override @Override
public String toString() { public String toString() {
String params = ""; StringBuilder params = new StringBuilder();
if(typeParams.size() != 0) { if(typeParams.size() != 0) {
for(UnifyType param : typeParams) for(UnifyType param : typeParams)
params += param.toString() + ","; params.append(param.toString()).append(",");
params = "<" + params.substring(0, params.length()-1) + ">"; params = new StringBuilder("<" + params.substring(0, params.length() - 1) + ">");
} }
return typeName + params; return typeName + params;
} }
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() { public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>(); return new ArrayList<>(typeParams.getInvolvedPlaceholderTypes());
ret.addAll(typeParams.getInvolvedPlaceholderTypes());
return ret;
} }
public Boolean wrongWildcard() {//default public boolean wrongWildcard() {//default
return false; return false;
} }
@@ -116,13 +114,23 @@ public abstract class UnifyType implements ISerializableData {
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if(obj == null)return false; if(obj == null) return false;
return this.toString().equals(obj.toString()); return this.toString().equals(obj.toString());
} }
public SerialMap toSerial(KeyStorage keyStorage) { public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap(); SerialMap serialized = new SerialMap();
serialized.put("type", this.getClass().toString()); String type = switch (this) {
case FunInterfaceType _ -> "funi";
case ReferenceType _ -> "ref";
case ExtendsType _ -> "ext";
case SuperType _ -> "sup";
case FunNType _ -> "funn";
case PlaceholderType _ -> "tph";
default -> throw new RuntimeException("No type defined for UnifyType of class " + this.getClass().getName());
};
serialized.put("type", type);
// we only insert null for the object and expect the child classes to call this and override the value with themselves // we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL); serialized.put("object", SerialValue.NULL);
return serialized; return serialized;
@@ -132,23 +140,14 @@ public abstract class UnifyType implements ISerializableData {
var type = data.getValue("type").getOf(String.class); var type = data.getValue("type").getOf(String.class);
var object = data.getMap("object"); var object = data.getMap("object");
if (type.equals(ReferenceType.class.toString())) { return switch (type) {
return ReferenceType.fromSerial(object, context); case "funi" -> FunInterfaceType.fromSerial(object, context);
} case "ref" -> ReferenceType.fromSerial(object, context);
else if (type.equals(ExtendsType.class.toString())) { case "ext" -> ExtendsType.fromSerial(object, context);
return ExtendsType.fromSerial(object, context); case "sup" -> SuperType.fromSerial(object, context);
} case "funn" -> FunNType.fromSerial(object, context);
else if (type.equals(SuperType.class.toString())) { case "tph" -> PlaceholderType.fromSerial(object, context);
return SuperType.fromSerial(object, context); default -> throw new RuntimeException("Could not unserialize class of unhandled type " + type);
} };
else if (type.equals(FunNType.class.toString())) {
return FunNType.fromSerial(object, context);
}
else if (type.equals(PlaceholderType.class.toString())) {
return PlaceholderType.fromSerial(object, context);
}
else {
throw new RuntimeException("Could not unserialize class of unhandled type " + type);
}
} }
} }

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.typeinference.unify.model; package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
@@ -7,7 +8,7 @@ import java.util.Collection;
* A wildcard type that is either a ExtendsType or a SuperType. * A wildcard type that is either a ExtendsType or a SuperType.
* @author Florian Steurer * @author Florian Steurer
*/ */
public abstract class WildcardType extends UnifyType { public abstract class WildcardType extends UnifyType implements ISerializableData {
/** /**
* The wildcarded type, e.q. Integer for ? extends Integer. Never a wildcard type itself. * The wildcarded type, e.q. Integer for ? extends Integer. Never a wildcard type itself.
@@ -41,7 +42,7 @@ public abstract class WildcardType extends UnifyType {
} }
@Override @Override
public Boolean wrongWildcard () {//This is an error public boolean wrongWildcard () {//This is an error
return (wildcardedType instanceof WildcardType); return (wildcardedType instanceof WildcardType);
} }
@@ -65,8 +66,7 @@ public abstract class WildcardType extends UnifyType {
@Override @Override
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() { public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>(); ArrayList<PlaceholderType> ret = new ArrayList<>(wildcardedType.getInvolvedPlaceholderTypes());
ret.addAll(wildcardedType.getInvolvedPlaceholderTypes());
return ret; return ret;
} }
} }

View File

@@ -1,9 +1,338 @@
package de.dhbwstuttgart.util; package de.dhbwstuttgart.util;
import com.diogonunes.jcolor.Attribute;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.server.SocketServer;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Arrays;
import java.util.Objects;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static com.diogonunes.jcolor.Ansi.colorize;
public class Logger { public class Logger {
public static void print(String s) { public static final Logger NULL_LOGGER = new NullLogger();
System.out.println(s); private static final DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
protected final Writer writer;
protected final String prefix;
public Logger() {
this(null, "");
} }
} public Logger(String prefix) {
this(null, prefix);
}
public Logger(Writer writer, String prefix) {
this.prefix = (Objects.equals(prefix, "")) ? "" : "["+prefix+"] ";
this.writer = writer;
}
/**
* Create a new logger object from the path provided
*
* @param filePath The path to the output file. Recommended file extension ".log"
* @return The Logger object for this output file
*/
public static Logger forFile(String filePath, String prefix) {
File file = Path.of(filePath).toFile();
try {
Writer fileWriter = new FileWriter(file);
return new Logger(fileWriter, prefix);
}
catch (IOException exception) {
throw new RuntimeException("Failed to created Logger for file " + filePath, exception);
}
}
/**
* Create a new logger object that inherits the writer of the given logger object
*
* @param logger The old logger object that will provide the writer
* @return The new prefix for the new logger object
*/
public static Logger inherit(Logger logger, String newPrefix) {
return new Logger(logger.writer, newPrefix);
}
/**
* Tint the prefix in the color of the logLevel
* @param logLevel The logLevel to set the tint to
* @return The tinted string (using ANSI sequences)
*/
protected String getPrefix(LogLevel logLevel) {
String fullPrefix = prefix + "[" + logLevel + "] ";
return switch (logLevel) {
case DEBUG -> colorize(fullPrefix, Attribute.BRIGHT_MAGENTA_TEXT());
case INFO -> colorize(fullPrefix, Attribute.BLUE_TEXT());
case WARNING -> colorize(fullPrefix, Attribute.YELLOW_TEXT());
case ERROR -> colorize(fullPrefix, Attribute.RED_TEXT());
case SUCCESS -> colorize(fullPrefix, Attribute.GREEN_TEXT());
};
}
/**
* Print text to the output or error stream, depending on the logLevel
* @param s The string to print
* @param logLevel If logLevel == error, then print to stderr or print to stdout otherwise
*/
protected void print(String s, LogLevel logLevel) {
String coloredPrefix = this.getPrefix(logLevel);
// if we are running the server, prepend the timestamp
if(SocketServer.isServerRunning()) {
String timestamp = LocalDateTime.now().format(timeFormatter);
coloredPrefix = "[" + timestamp + "] " + coloredPrefix;
}
// output to the correct output-stream
if (logLevel.getValue() == LogLevel.ERROR.getValue()) {
System.out.println(coloredPrefix + s);
}
else {
System.err.println(coloredPrefix + s);
}
}
public boolean isLogLevelActive(LogLevel logLevel) {
return logLevel.isHigherOrEqualTo(ConsoleInterface.logLevel);
}
/**
* Write text to the attached writer if there is any
* @param s The string to print
*/
protected void write(String s) {
if (writer != null && ConsoleInterface.writeLogFiles) {
// writing to file should only be done when necessary
synchronized (writer) {
try {
writer.write(s);
}
catch (IOException exception) {
throw new RuntimeException("Failed writing to file", exception);
}
}
}
}
public String findLogCaller() {
StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
final String thisFileName = stackTrace[0].getFileName();
int i = 0;
StackTraceElement currentElement = stackTrace[i];
while (++i < stackTrace.length) {
currentElement = stackTrace[i];
if (!Objects.equals(thisFileName, currentElement.getFileName())) {
break;
}
}
return ".(" + currentElement.getFileName() + ":" + currentElement.getLineNumber() + ")";
}
/**
* Base method for logging a string value. Should mostly be used by the Logger internal functions that
* abstract the logLevel away from the parameters
*
* @hidden Only relevant for the Logger and very special cases with dynamic logLevel
* @param s The text to log
* @param logLevel The logLevel on which the text should be logged
*/
public void log(String s, LogLevel logLevel) {
if (isLogLevelActive(logLevel)) {
// prepend the call to the logger instance
// s = findLogCaller() + "\n" + s;
this.print(s, logLevel);
this.write(s);
}
}
public void log(Supplier<String> supp, LogLevel logLevel) {
if (isLogLevelActive(logLevel)) {
this.log(supp.get(), logLevel);
}
}
public void log(Object obj, LogLevel logLevel) {
if (isLogLevelActive(logLevel)) {
this.log(obj.toString(), logLevel);
}
}
/**
* Output a debug message
* @param s The string to log
*/
public void debug(String s) {
this.log(s, LogLevel.DEBUG);
}
public void debug(Object o) {
this.log(o, LogLevel.DEBUG);
}
public void debug(Supplier<String> supp) {
this.log(supp, LogLevel.DEBUG);
}
/**
* Output an info message
* @param s The string to log
*/
public void info(String s) {
this.log(s, LogLevel.INFO);
}
public void info(Object o) {
this.log(o, LogLevel.INFO);
}
public void info(Supplier<String> supp) {
this.log(supp, LogLevel.INFO);
}
/**
* Output a warning message
* @param s The string to log
*/
public void warn(String s) {
this.log(s, LogLevel.WARNING);
}
public void warn(Object o) {
this.log(o, LogLevel.WARNING);
}
public void warn(Supplier<String> supp) {
this.log(supp, LogLevel.WARNING);
}
/**
* Output an error message
* @param s The string to log
*/
public void error(String s) {
this.log(s, LogLevel.ERROR);
}
public void error(Object o) {
this.log(o, LogLevel.ERROR);
}
public void error(Supplier<String> supp) {
this.log(supp, LogLevel.ERROR);
}
/**
* Output a success message
* @param s The string to log
*/
public void success(String s) {
this.log(s, LogLevel.SUCCESS);
}
public void success(Object o) {
this.log(o, LogLevel.SUCCESS);
}
public void success(Supplier<String> supp) {
this.log(supp, LogLevel.SUCCESS);
}
/**
* Special logging function that prints a throwable object and all of its recursive causes (including stacktrace)
* as an error
*
* @param throwable The Throwable object to output
*/
public void exception(Throwable throwable) {
// Format the exception output
String s = "Exception: " + throwable.getMessage() + "\n" +
Arrays.stream(throwable.getStackTrace()).map(stackTraceElement ->
" | " + stackTraceElement.toString()
).collect(Collectors.joining("\n"));
// if there is a cause printed afterward, announce it with the print of the exception
if (throwable.getCause() != null) {
s += "\n\nCaused by: ";
}
// print the exception
this.error(s);
// print the cause recursively
if (throwable.getCause() != null) {
this.exception(throwable.getCause());
}
}
public void close() {
if (this.writer != null) {
try {
this.writer.close();
}
catch (IOException exception) {
throw new RuntimeException("Failed to close a loggers writer. Was it maybe already closed? ", exception);
}
}
}
/**
* An enum representing the different log levels as integers:
* <ul>
* <li>DEBUG: highly specific output only for debugging</li>
* <li>INFO: informational output about the current state of the program</li>
* <li>WARNING: warnings about potential issues or an unexpected state</li>
* <li>ERROR: invalid states, errors and exceptions</li>
* <li>SUCCESS: successfully executed key steps of the program</li>
* </ul>
*/
public enum LogLevel {
/** Highly specific output only for debugging */
DEBUG(0),
/** Informational output about the current state of the program */
INFO(1),
/** Warnings about potential issues or an unexpected state **/
WARNING(2),
/** Invalid states, errors and exceptions */
ERROR(3),
/** Successfully executed key steps of the program */
SUCCESS(4);
private final int value;
LogLevel(final int newValue) {
value = newValue;
}
public boolean isHigherOrEqualTo(LogLevel other) {
return this.value >= other.value;
}
public int getValue() {
return value;
}
public static LogLevel fromValue(int value) {
return switch (value) {
case 0 -> LogLevel.DEBUG;
case 1 -> LogLevel.INFO;
case 2 -> LogLevel.WARNING;
case 3 -> LogLevel.ERROR;
case 4 -> LogLevel.SUCCESS;
default -> throw new RuntimeException("Invalid log level value: " + value);
};
}
}
/**
* A special case of logger that will never output anything
*/
private static class NullLogger extends Logger {
@Override
public void log(String s, LogLevel logLevel) {
// Do nothing. Yay
}
}
}

View File

@@ -18,7 +18,7 @@ public class TestTypeDeployment {
public void testTypeDeployment() throws Exception { public void testTypeDeployment() throws Exception {
var path = Path.of(System.getProperty("user.dir"), "/resources/bytecode/javFiles/Cycle.jav"); var path = Path.of(System.getProperty("user.dir"), "/resources/bytecode/javFiles/Cycle.jav");
var file = path.toFile(); var file = path.toFile();
var compiler = new JavaTXCompiler(file, false); var compiler = new JavaTXCompiler(file);
var parsedSource = compiler.sourceFiles.get(file); var parsedSource = compiler.sourceFiles.get(file);
var tiResults = compiler.typeInference(file); var tiResults = compiler.typeInference(file);
Set<TypeInsert> tips = new HashSet<>(); Set<TypeInsert> tips = new HashSet<>();

View File

@@ -1,8 +1,10 @@
package finiteClosure; package finiteClosure;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.parser.SyntaxTreeGenerator.FCGenerator; import de.dhbwstuttgart.parser.SyntaxTreeGenerator.FCGenerator;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface; import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory; import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import org.junit.Test; import org.junit.Test;
import java.util.ArrayList; import java.util.ArrayList;
@@ -14,14 +16,14 @@ public class SuperInterfacesTest {
public void test() throws ClassNotFoundException { public void test() throws ClassNotFoundException {
Collection<ClassOrInterface> classes = new ArrayList<>(); Collection<ClassOrInterface> classes = new ArrayList<>();
classes.add(ASTFactory.createClass(TestClass.class)); classes.add(ASTFactory.createClass(TestClass.class));
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader())); System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), JavaTXCompiler.defaultClientPlaceholderRegistry));
} }
@Test @Test
public void testGeneric() throws ClassNotFoundException { public void testGeneric() throws ClassNotFoundException {
Collection<ClassOrInterface> classes = new ArrayList<>(); Collection<ClassOrInterface> classes = new ArrayList<>();
classes.add(ASTFactory.createClass(TestClassGeneric.class)); classes.add(ASTFactory.createClass(TestClassGeneric.class));
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader())); System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), JavaTXCompiler.defaultClientPlaceholderRegistry));
} }
} }

View File

@@ -0,0 +1,101 @@
package server;
import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FunInterfaceType;
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.util.Logger;
import java.io.Writer;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ForkJoinPool;
class PacketExampleData {
static ConstraintSet<UnifyPair> getExampleUnifyPairConstraintSet(
UnifyContext unifyContext, String packagePath, int numUndCons, int numOderCons
) {
var constraintSet = new ConstraintSet<UnifyPair>();
for (int i = 0; i < numUndCons; i++) {
constraintSet.addUndConstraint(
getExampleUnifyPair(unifyContext, packagePath+"undCons.")
);
}
for (int i = 0; i < numOderCons; i++) {
var oderConstraint = new HashSet<Constraint<UnifyPair>>();
getExampleUnifyPairConstraint(unifyContext, packagePath+"oderCons.", i == 0);
constraintSet.addOderConstraint(oderConstraint);
}
return constraintSet;
}
static Constraint<UnifyPair> getExampleUnifyPairConstraint(UnifyContext unifyContext, String packagePath, boolean withExtends) {
return new Constraint<>(
!withExtends, true,
withExtends ? getExampleUnifyPairConstraint(unifyContext, packagePath+"extendConstraint.", false) : null,
withExtends ? new HashSet<>(List.of(
getExampleUnifyPair(unifyContext, packagePath+"methodSignatureConstraint.zero."),
getExampleUnifyPair(unifyContext, packagePath+"methodSignatureConstraint.one.")
)) : null
);
}
static UnifyPair getExampleUnifyPair(UnifyContext unifyContext, String packagePath) {
return new UnifyPair(
new ReferenceType(packagePath + "something", false),
new ExtendsType(
new SuperType(
new FunInterfaceType("lambda" + unifyContext.placeholderRegistry().generateFreshPlaceholderName(),
new TypeParams(),
List.of( // intfArgTypes
FunNType.getFunNType(new TypeParams(
List.of(
PlaceholderType.freshPlaceholder(unifyContext.placeholderRegistry()),
PlaceholderType.freshPlaceholder(unifyContext.placeholderRegistry())
)
))
),
PlaceholderType.freshPlaceholder(unifyContext.placeholderRegistry()), // intfReturnType
List.of( // generics
"ZA",
"ZB",
unifyContext.placeholderRegistry().generateFreshPlaceholderName()
)
)
)
),
PairOperator.SMALLERDOT,
new SourceLoc("test.jav", 10)
);
}
static UnifyContext createTestContext() {
var placeholderRegistry = new PlaceholderRegistry();
return new UnifyContext(Logger.NULL_LOGGER, true,
new UnifyResultModel(
new ConstraintSet<>(),
new FiniteClosure(
new HashSet<>(),
Logger.NULL_LOGGER,
placeholderRegistry)),
new UnifyTaskModel(),
ForkJoinPool.commonPool(),
placeholderRegistry
);
}
}

View File

@@ -0,0 +1,110 @@
package server;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.environment.ByteArrayClassLoader;
import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.server.packet.DebugPacket;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.InvalidPacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.io.Writer;
import java.util.HashSet;
import java.util.concurrent.ForkJoinPool;
import java.util.function.BiFunction;
import org.junit.Test;
import static org.junit.Assert.*;
public class PacketTest {
@Test
public void serializeUnifyPair() throws JsonProcessingException {
UnifyContext unifyContext = PacketExampleData.createTestContext();
var original = PacketExampleData.getExampleUnifyPair(unifyContext, "de.test.");
var reconstruction = serializeAndDeserialize(original, unifyContext,
(o,k) -> UnifyPair.fromSerial((SerialUUID) o,unifyContext,k));
assertEquals(original.getClass(), reconstruction.getClass());
assertEquals(original.toString(), reconstruction.toString());
assertEquals(original, reconstruction);
}
@Test
public void serializeUnifyPairConstraint() throws JsonProcessingException {
UnifyContext unifyContext = PacketExampleData.createTestContext();
var original = PacketExampleData.getExampleUnifyPairConstraint(unifyContext, "de.", true);
var reconstruction = serializeAndDeserialize(original, unifyContext,
(o,k) -> Constraint.fromSerial((SerialUUID) o, unifyContext, UnifyPair.class, k));
assertEquals(original.getClass(), reconstruction.getClass());
assertEquals(original.toString(), reconstruction.toString());
assertEquals(original, reconstruction);
}
@Test
public void serializeUnifyPairConstraintSet() throws JsonProcessingException {
UnifyContext unifyContext = PacketExampleData.createTestContext();
var original = PacketExampleData.getExampleUnifyPairConstraintSet(unifyContext, "de.", 1, 2);
var reconstruction = serializeAndDeserialize(original, unifyContext,
(o,k) -> ConstraintSet.fromSerial((SerialMap) o, unifyContext, UnifyPair.class, k));
assertEquals(original.getClass(), reconstruction.getClass());
assertEquals(original.toString(), reconstruction.toString());
assertEquals(original, reconstruction);
}
/**
* Helper method for serializing an ISerialNode into JSON, then deserializing it
*/
private <T extends ISerialNode, R extends ISerializableData> R serializeAndDeserialize(
ISerializableData object, UnifyContext unifyContext, BiFunction<T, KeyStorage, R> fromSerial
) throws JsonProcessingException {
DebugPacket packet = new DebugPacket();
KeyStorage keyStorage = new KeyStorage();
var serializedObject = object.toSerial(keyStorage);
if (serializedObject instanceof SerialUUID sObject) packet.a1 = sObject;
if (serializedObject instanceof SerialMap sObject) packet.b1 = sObject;
if (serializedObject instanceof SerialList<?> sObject) packet.c1 = sObject;
if (serializedObject instanceof SerialValue<?> sObject) packet.d1 = sObject;
packet.b2 = keyStorage.toSerial(keyStorage);
DebugPacket reconstructedPacket = serializeAndDeserializePacket(packet);
KeyStorage reconstructedKeyStorage = KeyStorage.fromSerial(reconstructedPacket.b2, unifyContext);
ISerialNode reconstructedData = null;
if (serializedObject instanceof SerialUUID) reconstructedData = packet.a1;
if (serializedObject instanceof SerialMap) reconstructedData = packet.b1;
if (serializedObject instanceof SerialList<?>) reconstructedData = packet.c1;
if (serializedObject instanceof SerialValue<?>) reconstructedData = packet.d1;
assertNotNull(reconstructedData);
return fromSerial.apply( (T) reconstructedData, reconstructedKeyStorage);
}
private <T extends IPacket> T serializeAndDeserializePacket(T packet) throws JsonProcessingException {
String json = PacketContainer.serialize(packet);
IPacket reconstructedPacket = PacketContainer.deserialize(json);
assertNotNull(reconstructedPacket);
assertSame(packet.getClass(), reconstructedPacket.getClass());
return (T) reconstructedPacket;
}
}

View File

@@ -0,0 +1,152 @@
package server;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.core.JavaTXServer;
import de.dhbwstuttgart.environment.CompilationEnvironment;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.packet.SetAutoclosePacket;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.util.Logger;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
import org.junit.Ignore;
import org.junit.Test;
import targetast.TestCodegen;
import static org.junit.Assert.*;
@Ignore("Server tests create huge overhead, so they are ignored until required")
public class ServerTest {
public ServerTest() {
ConsoleInterface.unifyServerUrl = Optional.of("ws://localhost:5000");
}
@Test
public void checkServer_Scalar() throws IOException, ClassNotFoundException {
compareLocalAndServerResult("Scalar.jav");
}
@Test
public void checkServer_Matrix() throws IOException, ClassNotFoundException {
compareLocalAndServerResult("Matrix.jav");
}
protected void compareLocalAndServerResult(final String filename) throws IOException, ClassNotFoundException {
File file = Path.of(TestCodegen.path.toString(), filename).toFile();
// get information from the compiler
JavaTXCompiler compiler = new JavaTXCompiler(List.of(file));
// NOW: simulate the call to method typeInference. Once via server and once locally
// if everything works, they should neither interfere with each other nor differ in their result
// get the values from the compiler
PlaceholderRegistry placeholderRegistry = JavaTXCompiler.defaultClientPlaceholderRegistry; //new PlaceholderRegistry();
ConstraintSet<Pair> cons = compiler.getConstraints(file);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(compiler, cons, placeholderRegistry);
unifyCons = unifyCons.map(ServerTest::distributeInnerVars);
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(
ServerTest.getAllClasses(compiler, file).stream().toList(),
Logger.NULL_LOGGER,
compiler.classLoader,
compiler,
placeholderRegistry
);
UnifyTaskModel usedTasks = new UnifyTaskModel();
// create the server
JavaTXServer server = new JavaTXServer(5000);
try (ExecutorService executor = Executors.newSingleThreadExecutor()) {
// run the server in a separate thread
executor.submit(server::listen);
}
// run the unification on the server
PlaceholderRegistry prCopy = JavaTXCompiler.defaultClientPlaceholderRegistry.deepClone();
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyContext context = new UnifyContext(Logger.NULL_LOGGER, true, urm, usedTasks, prCopy);
SocketClient.execute(SetAutoclosePacket.create());
List<ResultSet> serverResult = SocketClient.executeAndGet(
UnifyRequestPacket.create(finiteClosure, cons, unifyCons, context.placeholderRegistry())
).getResultSet(context);
// close the server
server.forceStop();
// run the unification on the client (do this second, because it changes the initial placeholder registry)
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, context);
List<ResultSet> clientResult = li.getResults();
// create the bytecode from both results
var sf = compiler.sourceFiles.get(file);
var serverBytecode = compiler.generateBytecode(sf, serverResult);
var localBytecode = compiler.generateBytecode(sf, clientResult);
// test if the generated code is the same
for (var serverEntry : serverBytecode.entrySet()) {
var serverBytes = serverEntry.getValue();
var localBytes = localBytecode.get(serverEntry.getKey());
assertArrayEquals(serverBytes, localBytes);
}
}
protected static UnifyPair distributeInnerVars(UnifyPair x) {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
return x;
}
protected static Set<ClassOrInterface> getAllClasses(JavaTXCompiler compiler, File file)
throws ClassNotFoundException, IOException
{
var sf = compiler.sourceFiles.get(file);
Set<ClassOrInterface> allClasses = new HashSet<>();
allClasses.addAll(compiler.getAvailableClasses(sf));
allClasses.addAll(sf.getClasses());
var newClasses = CompilationEnvironment.loadDefaultPackageClasses(sf.getPkgName(), file, compiler).stream().map(ASTFactory::createClass).collect(Collectors.toSet());
for (var clazz : newClasses) {
// Don't load classes that get recompiled
if (sf.getClasses().stream().anyMatch(nf -> nf.getClassName().equals(clazz.getClassName())))
continue;
if (allClasses.stream().noneMatch(old -> old.getClassName().equals(clazz.getClassName())))
allClasses.add(clazz);
}
return allClasses;
}
}

22
testOut/test.java Normal file
View File

@@ -0,0 +1,22 @@
import java.util.Vector;
import java.util.List;
class Main {
public static void main(String[] args) {
var grid = new Vector<Vector<Boolean>>(List.of(
new Vector<Boolean>(List.of(false, false, false)),
new Vector<Boolean>(List.of(false, false, false)),
new Vector<Boolean>(List.of(false, true, false))
));
BFS img = new BFS();
Pos pos = img.search(grid);
System.out.println("Found at: x=" + pos.x + " | y=" + pos.y);
}
}