30 Commits

Author SHA1 Message Date
Fabian Holzwarth
1af31e4513 feat: update parallelization for 0variance 2025-08-04 13:41:17 +02:00
Fabian Holzwarth
5b06f0a249 Merge branch 'feat/unify-server' into feat/unify-server-0variance 2025-07-21 16:27:55 +02:00
Fabian Holzwarth
3d99f282f5 feat: cleanup code 2025-07-21 16:12:56 +02:00
Fabian Holzwarth
512b10542e feat: adjusted parallelization 2025-07-21 15:41:04 +02:00
Fabian Holzwarth
3b1185d9d0 feat: paralellize 0-variance cases 2025-07-20 15:06:50 +02:00
Fabian Holzwarth
303c91dc87 chore: making classnames more expressive and cleanup some structures 2025-07-19 12:50:30 +02:00
Fabian Holzwarth
603a8b176a feat: implement partically cancellable tasks 2025-07-16 11:16:30 +02:00
Fabian Holzwarth
f396189a4b feat: add timestamp to server output 2025-07-12 13:44:18 +02:00
Fabian Holzwarth
e7f4a94908 feat: fixed old condition on server timeout 2025-07-12 13:22:54 +02:00
Fabian Holzwarth
ce49a4b9a5 feat: reduce temporary objects and repeated loops 2025-07-12 13:14:55 +02:00
Fabian Holzwarth
03b3692724 feat: update indepenedentest 2025-07-11 12:50:27 +02:00
Fabian Holzwarth
f0022d2b6f feat: use presized hashMaps to reduce resizing 2025-07-11 11:22:06 +02:00
Fabian Holzwarth
b1015cfa82 feat: update logging and add success-level 2025-07-07 16:19:04 +02:00
Fabian Holzwarth
b63a27a0a0 feat: improve server by assinging configured thread pools 2025-07-07 15:59:46 +02:00
Fabian Holzwarth
3b0a53d3c4 feat: add cross dependency, fix: socket closing and error messages 2025-07-07 15:19:56 +02:00
Fabian Holzwarth
50dbbf5f86 feat: implement generalized socket client, server logger and cleanup code 2025-07-07 14:01:27 +02:00
Fabian Holzwarth
130c491ac0 feat: more Boxing replacements 2025-07-06 15:49:59 +02:00
Fabian Holzwarth
9f9b264ac4 feat: replace unnecessary boxing with primitives 2025-07-06 15:18:51 +02:00
Fabian Holzwarth
1393db05c2 feat: implement lazy evaluation for logger outputs 2025-07-06 13:37:47 +02:00
Fabian Holzwarth
93e1a8787c feat: do not create a new context, if nothing changes 2025-07-05 11:43:10 +02:00
Fabian Holzwarth
0129d7540f feat use perMessagDeflate compression in websocket and use logger for message outpute 2025-07-05 11:16:06 +02:00
Fabian Holzwarth
7ea8337aee feat: remove unused logging library 2025-07-05 11:15:33 +02:00
Fabian Holzwarth
28458d405f feat: ignore server test 2025-07-02 15:47:55 +02:00
Fabian Holzwarth
1b905cb3e2 feat: implement loggers for the rest of the compiler 2025-07-01 23:06:09 +02:00
Fabian Holzwarth
d02c3583e9 feat: implement new logger into type inference code 2025-07-01 22:16:29 +02:00
Fabian Holzwarth
ca98e83fd2 feat: added logger 2025-07-01 21:21:39 +02:00
Fabian Holzwarth
c80a0c8596 feat: fix error by reintroducing name generator and add server tests 2025-06-30 16:42:20 +02:00
Fabian Holzwarth
2278fb1b91 feat: undo removing NameGenerator to fix errors in ast generation 2025-06-30 12:46:41 +02:00
Fabian Holzwarth
32b16cd5fd feat: replace concurrent modification with correct function call 2025-06-30 11:49:53 +02:00
Fabian Holzwarth
fd30c5f63f feat: prevent reusing the placeholder registry in tests 2025-06-29 16:04:54 +02:00
81 changed files with 2573 additions and 1244 deletions

View File

@@ -10,6 +10,7 @@ mkdir $TDIR
cd $TDIR
git clone $REPO .
git checkout feat/unify-server
# git checkout 93e1a8787cd94c73f4538f6a348f58613893a584
# git checkout dad468368b86bdd5a3d3b2754b17617cee0a9107 # 1:55
# git checkout a0c11b60e8c9d7addcbe0d3a09c9ce2924e9d5c0 # 2:25
# git checkout 4cddf73e6d6c9116d3e1705c4b27a8e7f18d80c3 # 2:27
@@ -19,15 +20,14 @@ git checkout feat/unify-server
# git checkout 1391206dfe59263cdb22f93371cfd1dd5465d97f # 1:29
date "+%Y.%m.%d %H:%M:%S"
# mvn clean compile -DskipTests package
## prefix each stderr line with " | "
# exec 2> >(trap "" INT TERM; sed 's/^/ | /' >&2)
# echo -e "\nMatrix test:\n |"
# time java -jar target/JavaTXcompiler-0.1-jar-with-dependencies.jar resources/bytecode/javFiles/Matrix.jav >/dev/null;
# sed -i -e 's/source>21/source>23/g' pom.xml
# sed -i -e 's/target>21/target>23/g' pom.xml
mvn clean compile test
mvn clean compile -DskipTests package
time java -jar target/JavaTXcompiler-0.1-jar-with-dependencies.jar resources/bytecode/javFiles/Matrix.jav;
# mvn clean compile test
echo -e "\nCleanup... "

View File

@@ -59,6 +59,11 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<artifactId>jackson-databind</artifactId>
<version>2.17.2</version>
</dependency>
<dependency>
<groupId>com.diogonunes</groupId>
<artifactId>JColor</artifactId>
<version>5.5.1</version>
</dependency>
</dependencies>
<build>
@@ -153,4 +158,4 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<url>file:///${project.basedir}/maven-repository/</url>
</repository>
</distributionManagement>
</project>
</project>

View File

@@ -0,0 +1,51 @@
class C1 {
C1 self() {
return this;
}
}
class C2 {
C2 self() {
return this;
}
}
class Example {
untypedMethod(var) {
return var.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self();
}
}

View File

@@ -0,0 +1,43 @@
import java.lang.Integer;
import java.lang.Boolean;
import java.util.Queue;
import java.util.Vector;
import java.util.List;
import java.util.ArrayDeque;
class Pos {
public Integer x;
public Integer y;
public Pos(Integer x, Integer y) {
this.x = x;
this.y = y;
}
}
class GridSearch {
Pos search(Vector<Vector<Boolean>> grid) {
var w = grid.size();
var h = grid.getFirst().size();
// keep a queue on which cells to check
var cellQueue = new ArrayDeque<Pos>();
cellQueue.add(new Pos(0,0));
while (!cellQueue.isEmpty()) {
var pos = cellQueue.poll();
// if the target was found: return the position
var value = grid.get(pos.x).get(pos.y);
if (value) {
return pos;
}
// keep searching on neighboring tiles
if (pos.x < w-1) cellQueue.add(new Pos(pos.x + 1, pos.y));
if (pos.y < h-1) cellQueue.add(new Pos(pos.x, pos.y + 1));
}
return (Pos)null;
}
}

View File

@@ -0,0 +1,42 @@
import java.util.List;
import java.util.AbstractList;
import java.util.Vector;
import java.lang.Integer;
class Pixel {
public color;
}
class Mask {
mask;
Mask(mask) {
this.mask = mask;
}
apply(pixels) {
var w = mask.size();
var h = mask.get(0).size();
var imgW = pixels.size();
var imgH = pixels.get(0).size();
for (var x = 0; x < imgW - w; x++) {
for (var y = 0; y < imgH - h; y++) {
var total = 0;
for (var xd = 0; xd < w; xd++) {
for (var yd = 0; yd < h; yd++) {
var p = pixels.get(x + xd).get(y + yd);
var m = mask.get(xd).get(yd);
total = total + (p.color * m);
}
}
pixels.get(x).get(y).color = total;
}
}
return pixels;
}
}

View File

@@ -0,0 +1,39 @@
import java.lang.Integer;
import java.lang.Boolean;
import java.util.ArrayList;
import java.util.HashMap;
public class PascalsTriangle {
create(n) {
var rows = new ArrayList<ArrayList<Integer>>();
var evens = new ArrayList<ArrayList<Boolean>>();
if (n <= 0) return rows;
// first row
rows.add(new ArrayList<Integer>(1));
evens.add(new ArrayList<Boolean>(false));
for (int y = 1; y < n; y++) {
var row = new ArrayList<Integer>();
var evensRow = new ArrayList<Boolean>();
row.add(1);
evensRow.add(false);
for (int x = 1; x < y-1; x++) {
int tl = rows.getLast().get(x-1);
int tr = rows.getLast().get(x);
row.add(tl + tr);
evensRow.add(((tl + tr) % 2) == 1);
}
row.add(1);
rows.add(row);
evensRow.add(false);
evens.add(evensRow);
}
return rows;
}
}

View File

@@ -0,0 +1,17 @@
import java.util.List;
import java.lang.Integer;
//import java.util.Collection;
public class Merge2 {
public merge(a, b) {
a.addAll(b);
return a;
}
public sort(in){
var firstHalf = in.subList(1,2);
return merge(sort(firstHalf), sort(in));
}
}

View File

@@ -8,6 +8,7 @@ import de.dhbwstuttgart.target.generate.ASTToTargetAST;
import de.dhbwstuttgart.target.tree.*;
import de.dhbwstuttgart.target.tree.expression.*;
import de.dhbwstuttgart.target.tree.type.*;
import de.dhbwstuttgart.util.Logger;
import org.objectweb.asm.*;
import java.lang.invoke.*;
@@ -19,6 +20,8 @@ import static de.dhbwstuttgart.target.tree.expression.TargetBinaryOp.*;
import static de.dhbwstuttgart.target.tree.expression.TargetLiteral.*;
public class Codegen {
public static Logger logger = new Logger("codegen");
private final TargetStructure clazz;
private final ClassWriter cw;
public final String className;
@@ -1317,7 +1320,7 @@ public class Codegen {
types.add(Type.getObjectType(guard.inner().type().getInternalName()));
// TODO Same here we need to evaluate constant;
} else {
System.out.println(label);
logger.info(label);
throw new NotImplementedException();
}
}

View File

@@ -120,7 +120,7 @@ public class FunNGenerator {
superFunNMethodDescriptor.append(")V");
}
System.out.println(superFunNMethodSignature);
Codegen.logger.info(superFunNMethodSignature);
ClassWriter classWriter = new ClassWriter(0);
MethodVisitor methodVisitor;

View File

@@ -1,13 +1,19 @@
package de.dhbwstuttgart.core;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.util.Logger;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.*;
public class ConsoleInterface {
private static final String directory = System.getProperty("user.dir");
/**
* Leave the argument configurations here for the rest of the code to read
*/
public static Logger.LogLevel logLevel = Logger.LogLevel.ERROR;
public static boolean writeLogFiles = false;
public static Optional<String> unifyServerUrl = Optional.empty();
public static void main(String[] args) throws IOException, ClassNotFoundException {
List<File> input = new ArrayList<>();
@@ -15,7 +21,6 @@ public class ConsoleInterface {
String outputPath = null;
Iterator<String> it = Arrays.asList(args).iterator();
Optional<Integer> serverPort = Optional.empty();
Optional<String> unifyServer = Optional.empty();
if (args.length == 0) {
System.out.println("No input files given. Get help with --help");
@@ -25,7 +30,9 @@ public class ConsoleInterface {
"\t-cp\tSet Classpath\n" +
"\t-d\tSet destination directory\n" +
"\t[--server-mode <port>]\n" +
"\t[--unify-server <url>]\n");
"\t[--unify-server <url>]\n" +
"\t[--write-logs]\n" +
"\t[-v|-vv-|-vvv]");
System.exit(1);
}
while (it.hasNext()) {
@@ -42,22 +49,31 @@ public class ConsoleInterface {
} else if (arg.equals("--server-mode")) {
serverPort = Optional.of(Integer.parseInt(it.next()));
} else if (arg.equals("--unify-server")) {
unifyServer = Optional.of(it.next());
unifyServerUrl = Optional.of(it.next());
} else if (arg.equals("--write-logs")) {
ConsoleInterface.writeLogFiles = true;
} else if (arg.startsWith("-v")) {
logLevel = switch (arg) {
case "-v" -> Logger.LogLevel.WARNING;
case "-vv" -> Logger.LogLevel.INFO;
case "-vvv" -> Logger.LogLevel.DEBUG;
default -> throw new IllegalArgumentException("Argument " + arg + " is not a valid verbosity level");
};
} else {
input.add(new File(arg));
}
}
if (serverPort.isPresent()) {
if (unifyServer.isPresent()) throw new RuntimeException("Cannot use unifyServer when in server mode!");
JavaTXServer server = new JavaTXServer();
server.listen(serverPort.get());
if (unifyServerUrl.isPresent()) throw new RuntimeException("Cannot use unifyServer when in server mode!");
JavaTXServer server = new JavaTXServer(serverPort.get());
server.listen();
}
else {
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null, unifyServer);
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null);
//compiler.typeInference();
compiler.generateBytecode();
SocketClient.closeIfOpen();
}
}

View File

@@ -13,6 +13,10 @@ import de.dhbwstuttgart.parser.antlr.Java17Parser.SourceFileContext;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.packet.SetAutoclosePacket;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.Method;
@@ -20,6 +24,7 @@ import de.dhbwstuttgart.syntaxtree.ParameterList;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.syntaxtree.GenericDeclarationList;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
@@ -53,6 +58,7 @@ import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.util.Logger;
import java.io.*;
import java.lang.reflect.Modifier;
import java.nio.file.Path;
@@ -65,43 +71,39 @@ import org.apache.commons.io.output.NullOutputStream;
public class JavaTXCompiler {
// do not use this in any code, that can be executed serverside!
public static PlaceholderRegistry defaultClientPlaceholderRegistry = new PlaceholderRegistry();
public static Logger defaultLogger = new Logger();
// public static JavaTXCompiler INSTANCE;
final CompilationEnvironment environment;
Boolean resultmodel = true;
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
public final DirectoryClassLoader classLoader;
public final List<File> classPath;
private final File outputPath;
private final Optional<String> unifyServer;
public DirectoryClassLoader getClassLoader() {
return classLoader;
}
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(Arrays.asList(sourceFile), List.of(), new File("."), Optional.empty());
}
public JavaTXCompiler(File sourceFile, Boolean log) throws IOException, ClassNotFoundException {
this(sourceFile);
this.log = log;
this(Collections.singletonList(sourceFile), List.of(), new File("."));
}
public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException {
this(sourceFiles, List.of(), new File("."), Optional.empty());
this(sourceFiles, List.of(), new File("."));
}
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath) throws IOException, ClassNotFoundException {
this(sources, contextPath, outputPath, Optional.empty());
}
// ensure new default placeholder registry for tests
defaultClientPlaceholderRegistry = new PlaceholderRegistry();
NameGenerator.reset();
ASTToTargetAST.OBJECT = ASTFactory.createObjectType();
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath, Optional<String> unifyServer) throws IOException, ClassNotFoundException {
this.unifyServer = unifyServer;
var path = new ArrayList<>(contextPath);
if (contextPath.isEmpty()) {
// When no contextPaths are given, the working directory is the sources root
@@ -311,52 +313,51 @@ public class JavaTXCompiler {
Set<Set<UnifyPair>> results = new HashSet<>();
UnifyResultModel urm = null;
// urm.addUnifyResultListener(resultListener);
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, PlaceholderRegistry.defaultRegistry);
try {
logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile;
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this, context.placeholderRegistry());
System.out.println(finiteClosure);
urm = new UnifyResultModel(cons, finiteClosure);
urm.addUnifyResultListener(resultListener);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, context.placeholderRegistry());
logFile = logFile == null ? new FileWriter("log_" + sourceFiles.keySet().iterator().next().getName()) : logFile;
Logger logger = new Logger(logFile, "TypeInferenceAsync");
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, defaultClientPlaceholderRegistry);
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
return x;
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logger, getClassLoader(), this, context.placeholderRegistry());
logger.info(finiteClosure.toString());
urm = new UnifyResultModel(cons, finiteClosure);
urm.addUnifyResultListener(resultListener);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, context.placeholderRegistry());
};
logFile.write(unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logFile.write(unifyCons.toString());
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
for (SourceFile f : this.sourceFiles.values()) {
logFile.write(ASTTypePrinter.print(f));
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
// logFile.flush();
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
return x;
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
*/;
TypeUnify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
} catch (IOException e) {
System.err.println("kein LogFile");
};
logger.debug(unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logger.debug(unifyCons.toString());
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logger.debug("FC:\\" + finiteClosure.toString() + "\n");
for (SourceFile f : this.sourceFiles.values()) {
logger.debug(ASTTypePrinter.print(f));
}
// logFile.flush();
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
*/;
TypeUnify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
return urm;
}
@@ -377,103 +378,106 @@ public class JavaTXCompiler {
final ConstraintSet<Pair> cons = getConstraints(file);
Set<Set<UnifyPair>> results = new HashSet<>();
PlaceholderRegistry placeholderRegistry = new PlaceholderRegistry();
try {
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
if (log) logFolder.mkdirs();
Writer logFile = log ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this, placeholderRegistry);
System.out.println(finiteClosure);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, placeholderRegistry);
System.out.println("xxx1");
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
return x;
};
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
if (ConsoleInterface.writeLogFiles && !logFolder.mkdirs()) throw new RuntimeException("Could not creat directoy for log files: " + logFolder);
Writer logFile = ConsoleInterface.writeLogFiles ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
Logger logger = new Logger(logFile, "TypeInference");
logFile.write("Unify:" + unifyCons.toString());
System.out.println("Unify:" + unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logFile.write("\nUnify_distributeInnerVars: " + unifyCons.toString());
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
logFile.write(ASTTypePrinter.print(sf));
System.out.println(ASTTypePrinter.print(sf));
// logFile.flush();
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()// .stream().map(x -> {
/*
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
*/;
if (unifyServer.isPresent()) {
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
SocketClient socketClient = new SocketClient(unifyServer.get());
return socketClient.execute(finiteClosure, cons, unifyCons, context);
}
else if (resultmodel) {
/* UnifyResultModel Anfang */
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
System.out.println("RESULT Final: " + li.getResults());
System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
// logFile.flush();
return li.getResults();
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logger, classLoader, this, placeholderRegistry);
logger.info(finiteClosure.toString());
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, placeholderRegistry);
logger.info("xxx1");
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
/* UnifyResultModel End */
else {
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
// finiteClosure));
UnifyContext context = new UnifyContext(logFile, log, false, new UnifyResultModel(cons, finiteClosure), usedTasks, placeholderRegistry);
Set<Set<UnifyPair>> result = TypeUnify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString() + "\n");
// logFile.flush();
results.addAll(result);
return x;
results = results.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet(placeholderRegistry).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
y.setPairOp(PairOperator.EQUALSDOT);
return y; // alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure);
} else
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
System.out.println("RESULT Final: " + results);
System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + results.toString() + "\n");
// logFile.flush();
logFile.write("PLACEHOLDERS: " + placeholderRegistry);
// logFile.flush();
}
} catch (IOException e) {
System.err.println("kein LogFile");
};
logger.debug("Unify:" + unifyCons.toString());
logger.info("Unify:" + unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logger.debug("\nUnify_distributeInnerVars: " + unifyCons.toString());
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logger.debug("FC:\\" + finiteClosure.toString() + "\n");
logger.debug(ASTTypePrinter.print(sf));
logger.info(ASTTypePrinter.print(sf));
// logFile.flush();
logger.info("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()// .stream().map(x -> {
/*
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
*/;
if (ConsoleInterface.unifyServerUrl.isPresent()) {
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, placeholderRegistry);
SocketFuture<UnifyResultPacket> future = SocketClient.execute(
UnifyRequestPacket.create(finiteClosure, cons, unifyCons, context.placeholderRegistry())
);
SocketClient.execute(SetAutoclosePacket.create());
return future.get().getResultSet(context);
}
else if (resultmodel) {
/* UnifyResultModel Anfang */
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, placeholderRegistry);
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
logger.info("RESULT Final: " + li.getResults());
logger.info("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
// logFile.flush();
return li.getResults();
}
/* UnifyResultModel End */
else {
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
// finiteClosure));
UnifyContext context = new UnifyContext(logger, false, new UnifyResultModel(cons, finiteClosure), usedTasks, placeholderRegistry);
Set<Set<UnifyPair>> result = TypeUnify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
logger.info("RESULT: " + result);
logFile.write("RES: " + result.toString() + "\n");
// logFile.flush();
results.addAll(result);
results = results.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet(placeholderRegistry).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
y.setPairOp(PairOperator.EQUALSDOT);
return y; // alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure);
} else
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
logger.info("RESULT Final: " + results);
logger.info("Constraints for Generated Generics: " + " ???");
logger.debug("RES_FINAL: " + results.toString() + "\n");
// logFile.flush();
logger.debug("PLACEHOLDERS: " + placeholderRegistry);
// logFile.flush();
}
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons), placeholderRegistry)))).collect(Collectors.toList());
}
@@ -660,15 +664,15 @@ public class JavaTXCompiler {
FileOutputStream output;
for (JavaClassName name : classFiles.keySet()) {
byte[] bytecode = classFiles.get(name);
System.out.println("generating " + name + ".class file ...");
defaultLogger.info("generating " + name + ".class file ...");
var subPath = preserveHierarchy ? path : Path.of(path.toString(), name.getPackageName().split("\\.")).toFile();
File outputFile = new File(subPath, name.getClassName() + ".class");
outputFile.getAbsoluteFile().getParentFile().mkdirs();
System.out.println(outputFile);
defaultLogger.info(outputFile.toString());
output = new FileOutputStream(outputFile);
output.write(bytecode);
output.close();
System.out.println(name + ".class file generated");
defaultLogger.success(name + ".class file generated");
}
}

View File

@@ -4,13 +4,27 @@ import de.dhbwstuttgart.server.SocketServer;
public class JavaTXServer {
public void listen(int port) {
public static boolean isRunning = false;
final SocketServer socketServer;
public JavaTXServer(int port) {
this.socketServer = new SocketServer(port);
}
public void listen() {
isRunning = true;
socketServer.start();
}
public void forceStop() {
try {
SocketServer socketServer = new SocketServer(port);
socketServer.start();
} catch (Exception e) {
e.printStackTrace();
socketServer.stop();
}
catch (InterruptedException exception) {
System.err.println("Interrupted socketServer: " + exception);
}
isRunning = false;
}

View File

@@ -7,6 +7,7 @@ import de.dhbwstuttgart.parser.antlr.Java17Parser;
import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.util.Logger;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
@@ -17,6 +18,9 @@ import java.util.ArrayList;
import java.util.List;
public class JavaTXParser {
public static Logger logger = new Logger("Parser");
public static Java17Parser.SourceFileContext parse(File source) throws IOException, java.lang.ClassNotFoundException {
InputStream stream = new FileInputStream(source);
// DEPRECATED: ANTLRInputStream input = new ANTLRInputStream(stream);

View File

@@ -111,7 +111,7 @@ public class FCGenerator {
//Generics mit gleichem Namen müssen den selben TPH bekommen
for(GenericTypeVar gtv : forType.getGenerics()){
if(!gtvs.containsKey(gtv.getName())){
TypePlaceholder replacePlaceholder = TypePlaceholder.fresh(new NullToken(), placeholderRegistry);
TypePlaceholder replacePlaceholder = TypePlaceholder.fresh(new NullToken());
gtvs.put(gtv.getName(), replacePlaceholder);
newGTVs.put(gtv.getName(), replacePlaceholder);
}

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.parser.SyntaxTreeGenerator;
import de.dhbwstuttgart.parser.JavaTXParser;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
@@ -259,7 +260,7 @@ public class StatementGenerator {
ret.setStatement();
return ret;
default:
System.out.println(stmt.getClass());
JavaTXParser.logger.info(stmt.getClass());
throw new NotImplementedException();
}
}

View File

@@ -74,7 +74,7 @@ public class TypeGenerator {
throw new NotImplementedException();
}
} else if (!typeContext.LBRACK().isEmpty()) { // ArrayType über eckige Klammer prüfen
// System.out.println(unannTypeContext.getText());
// JavaTXParser.logger.info(unannTypeContext.getText());
throw new NotImplementedException();
}
/*

View File

@@ -0,0 +1,41 @@
package de.dhbwstuttgart.server;
import de.dhbwstuttgart.util.Logger;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import org.java_websocket.WebSocket;
public class ServerTaskLogger extends Logger {
private final WebSocket webSocket;
private final SocketServer socketServer;
private final LogLevel customLogLevel;
public ServerTaskLogger(WebSocket webSocket, SocketServer socketServer, LogLevel customLogLevel) {
this.webSocket = webSocket;
this.socketServer = socketServer;
this.customLogLevel = customLogLevel;
}
@Override
public boolean isLogLevelActive(LogLevel logLevel) {
return logLevel.isHigherOrEqualTo(customLogLevel);
}
@Override
protected void print(String s, LogLevel logLevel) {
String coloredPrefix = this.getPrefix(logLevel);
if (logLevel.isHigherOrEqualTo(LogLevel.ERROR)) {
socketServer.sendError(webSocket, coloredPrefix + s, false);
}
else {
socketServer.sendMessage(webSocket, coloredPrefix + s);
}
}
@Override
protected void write(String s) {
// under no circumstances write anything to a file
}
}

View File

@@ -1,6 +1,8 @@
package de.dhbwstuttgart.server;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.server.packet.IClientToServerPacket;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.IServerToClientPacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
@@ -12,10 +14,12 @@ import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Logger;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -29,39 +33,33 @@ import org.java_websocket.handshake.ServerHandshake;
*/
public class SocketClient extends WebSocketClient {
// use a latch to wait until the connection is closed by the remote host
private final CountDownLatch closeLatch = new CountDownLatch(1);
// temporarily: The received unify result
// TODO: replace with uuid and future system, such that responses can be mapped by a uuid to fulfill a Future
private UnifyResultPacket unifyResultPacket;
public static Logger logger = new Logger("SocketClient");
public SocketClient(String url) {
super(URI.create(url), Map.of(
"packetProtocolVersion", SocketServer.packetProtocolVersion
));
// make sure, the url is in a valid format
/**
* The singleton object
*/
private static SocketClient socketClient = null;
/**
* List of futures that are still waiting to be fulfilled
*/
private final Map<String, SocketFuture<?>> responseFutures = new HashMap<>();
private SocketClient(String url) {
super(
URI.create(url), // target url
//SocketServer.perMessageDeflateDraft, // enable compression
Map.of( // headers
"packetProtocolVersion", SocketServer.packetProtocolVersion
)
);
// make sure the url is in a valid format
final String regex = "^wss?://(\\w+(\\.\\w+)?)*:(\\d+)$";
final Matcher matcher = Pattern.compile(regex).matcher(url);
if (!matcher.find()) {
throw new RuntimeException("Provided string \"" + url + "\" is not a valid server URL! Use pattern ws(s?)://<host.name>:<port>");
}
}
public SocketClient(String host, int port, boolean secure) {
super(URI.create(String.format("%s://%s:%d/", secure ? "wss" : "ws", host, port)));
}
/**
* The main method for connecting, requesting and waiting for the server to unify.
* This is synchronized to prevent multiple webSockets connections at the moment, but it is not called from any
* thread except the main thread right now and is not necessary at all, probably. Maybe remove it later
*/
synchronized public List<ResultSet> execute(
FiniteClosure finiteClosure,
ConstraintSet<Pair> constraintSet,
ConstraintSet<UnifyPair> unifyConstraintSet,
UnifyContext context
) throws JsonProcessingException {
try {
// wait for the connection to be set up
@@ -70,89 +68,135 @@ public class SocketClient extends WebSocketClient {
if (this.getReadyState() != ReadyState.OPEN) {
throw new RuntimeException("WebSocket Client could not connect to remote host at " + this.uri);
}
// send the unify task request
UnifyRequestPacket packet = new UnifyRequestPacket(finiteClosure, constraintSet, unifyConstraintSet, context.placeholderRegistry());
String json = PacketContainer.serialize(packet);
this.send(json);
// block the thread, until the connection is closed by the remote host (usually after sending the results)
this.waitUntilClosed();
// wait for the connection to fully close
this.closeBlocking();
} catch (InterruptedException exception) {
System.err.println("Server connection interrupted: " + exception);
this.notifyAll();
throw new RuntimeException("Aborted server connection", exception);
}
catch (Exception exception) {
throw new RuntimeException("Exception occurred in server connection: ", exception);
throw new RuntimeException(exception);
}
// detect error cases, in which no error was thrown, but also no result was sent back from the server
if (this.unifyResultPacket == null) {
throw new RuntimeException("Did not receive server response but closed connection already");
}
// add a shutdown hook to close the connection when the process ends or is stopped by a SIGINT signal
Runtime.getRuntime().addShutdownHook(new Thread(this::close));
}
return unifyResultPacket.getResultSet(context);
private SocketClient(String host, int port, boolean secure) throws InterruptedException {
this(String.format("%s://%s:%d/", secure ? "wss" : "ws", host, port));
}
/**
* Specific client-side implementations to handle incoming packets
* Singleton access method, creates one if none is available
*
* @return The one and only socketClient
*/
private static SocketClient initializeClient() {
if (socketClient == null) {
socketClient = new SocketClient(ConsoleInterface.unifyServerUrl.get());
}
return socketClient;
}
/**
* Send a packet to the server (connection will be created, if none is found) and return a future
* for the response packet
*/
synchronized public static <T extends IServerToClientPacket> SocketFuture<T> execute(IClientToServerPacket<T> packet) {
SocketClient client = initializeClient();
/*
* Create a future that will be associated with the packet and eventually completed
*/
SocketFuture<T> future = packet.getFuture();
if (!future.isDone()) {
client.responseFutures.put(future.futureId, future);
}
/*
* Establish connection, if not already done.
* Serialize the packet and send it to the server.
* Return the future to be handled by the caller.
*/
try {
String json = PacketContainer.serialize(packet);
client.send(json);
} catch (Exception exception) {
logger.exception(exception);
throw new RuntimeException("Exception occurred in server connection: ", exception);
}
return future;
}
/**
* Shortcut for waiting and retrieving the response immediately
*
* @param packet The packet to send
* @param <T> The type of response packet to await
* @return The response packet, once it is received
*/
public static <T extends IServerToClientPacket> T executeAndGet(IClientToServerPacket<T> packet) {
return SocketClient.execute(packet).get();
}
/**
* Specific client-side implementations to handle incoming packets
*/
protected void handleReceivedPacket(IPacket packet) {
if (packet instanceof IServerToClientPacket serverToClientPacket) {
try {
serverToClientPacket.onHandle(this.getConnection(), this);
}
catch (Exception exception) {
this.closeLatch.countDown();
this.close();
throw exception;
}
if (!(packet instanceof IServerToClientPacket serverToClientPacket)) {
System.err.println("Received package of invalid type + " + packet.getClass().getName());
this.close();
return;
}
System.err.println("Received package of invalid type + " + packet.getClass().getName());
this.close();
serverToClientPacket.onHandle(this.getConnection(), this);
}
public void setUnifyResultSets(UnifyResultPacket unifyResultPacket) {
this.unifyResultPacket = unifyResultPacket;
/**
* Complete a registered future, so it can be handled by whoever executed the creator task
*
* @param id The associated id for this future
* @param trigger The object triggering the completion
*/
public void completeResponseFuture(String id, IServerToClientPacket trigger) {
SocketFuture<?> future = this.responseFutures.remove(id);
if (future == null) return;
if (!future.accept(trigger)) {
throw new RuntimeException("Packet " + trigger.getClass().getName() + " tried to complete future, but was not allowed to");
}
}
public static void closeIfOpen() {
if (socketClient != null && socketClient.isOpen()) {
socketClient.close();
}
}
@Override
public void onOpen(ServerHandshake handshakedata) {
System.out.println("Connected to server with status " + handshakedata.getHttpStatus());
logger.success("Connected to server with status " + handshakedata.getHttpStatus());
}
@Override
public void onMessage(String message) {
// System.out.println("received: " + message);
// logger.info("received: " + message);
IPacket packet = PacketContainer.deserialize(message);
this.handleReceivedPacket(packet);
}
@Override
public void onClose(int code, String reason, boolean remote) {
System.out.println(
logger.info(
"Disconnected from server " +
"with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
"(closed by remote: " + remote + ")"
);
this.closeLatch.countDown();
if (!this.responseFutures.isEmpty()) {
throw new RuntimeException("Server closed before all required tasks were answered");
}
}
@Override
public void onError(Exception e) {
System.out.println("Error: " + e.getMessage());
e.printStackTrace();
logger.exception(e);
throw new RuntimeException(e);
}
public void waitUntilClosed() throws InterruptedException {
closeLatch.await();
}
}

View File

@@ -0,0 +1,48 @@
package de.dhbwstuttgart.server;
import de.dhbwstuttgart.server.packet.IServerToClientPacket;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
public class SocketFuture<T extends IServerToClientPacket> extends CompletableFuture<T> {
public final String futureId = UUID.randomUUID().toString();
public final List<Class<T>> allowedTriggers;
public SocketFuture(List<Class<T>> allowedTriggers) {
this.allowedTriggers = allowedTriggers;
}
public boolean accept(IServerToClientPacket trigger) {
if (this.allowedTriggers.contains(trigger.getClass())) {
this.complete((T)trigger);
return true;
}
return false;
}
@Override
public T get() {
try {
return super.get();
}
catch (InterruptedException | ExecutionException exception) {
throw new RuntimeException(exception);
}
}
/**
* Special case where the future is immediately fulfilled without a response package similar to
* <code>CompletableFuture.completedFuture()</code> but without a value
*/
public static <R extends IServerToClientPacket> SocketFuture<R> completedFuture() {
SocketFuture<R> dummyFuture = new SocketFuture<>(new ArrayList<>(0));
dummyFuture.complete(null);
return dummyFuture;
}
}

View File

@@ -6,76 +6,104 @@ import de.dhbwstuttgart.server.packet.IClientToServerPacket;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.MessagePacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.util.Logger;
import java.net.InetSocketAddress;
import java.util.Objects;
import java.util.Collections;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.java_websocket.WebSocket;
import org.java_websocket.drafts.Draft;
import org.java_websocket.drafts.Draft_6455;
import org.java_websocket.extensions.permessage_deflate.PerMessageDeflateExtension;
import org.java_websocket.handshake.ClientHandshake;
import org.java_websocket.server.WebSocketServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SocketServer extends WebSocketServer {
public static Logger logger = new Logger("SocketServer");
public static final int maxTasksPerSession = 100;
private static boolean serverRunning = false;
/**
* Increase this every time a breaking change to the server communication is done.
* This will prevent errors when server version and client version do not match.
* This will prevent errors when the server version and client version do not match.
*/
public static final String packetProtocolVersion = "1";
public static final Logger log = LoggerFactory.getLogger(SocketServer.class);
// create an executor for tasks that will always keep at least one task around
private final ThreadPoolExecutor taskExecutor = new ThreadPoolExecutor(1, Integer.MAX_VALUE,60L, TimeUnit.SECONDS, new SynchronousQueue<>());
// create an executor for scheduling timeouts
private final ScheduledExecutorService timeoutExecutor = Executors.newSingleThreadScheduledExecutor();
public SocketServer(int port) {
super(new InetSocketAddress(port));
this.setConnectionLostTimeout(30);
serverRunning = true;
// add a shutdown hook to close all connections when the process ends or is stopped by a SIGINT signal
Runtime.getRuntime().addShutdownHook(new Thread(this::onShutdown));
}
public static boolean isServerRunning() {
return serverRunning;
}
private void onShutdown() {
serverRunning = false;
try {
for (var webSocket : this.getConnections()) {
this.sendError(webSocket, "Sorry, i am shutting down. You are now on your own, good Luck!", true);
webSocket.close();
}
this.stop();
taskExecutor.shutdown();
timeoutExecutor.shutdown();
} catch (InterruptedException exception) {
// we are shutting down anyway
}
}
@Override
public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
String ppv = clientHandshake.getFieldValue("packetProtocolVersion");
if (!ppv.equals(packetProtocolVersion)) {
try {
ErrorPacket errorPacket = ErrorPacket.create(
"Mismatch in packet protocol version! Client (you): " + ppv + " and Server (me): " + packetProtocolVersion,
true
);
webSocket.send(PacketContainer.serialize(errorPacket));
}
catch (JsonProcessingException exception) {
System.err.println("Failed to serialize json: " + exception);
}
this.sendError(webSocket,
"Mismatch in packet protocol version! Client (you): \"" + ppv + "\" and Server (me): \"" + packetProtocolVersion + "\"",
true
);
webSocket.close(1);
return;
}
SocketData socketData = new SocketData(UUID.randomUUID().toString());
webSocket.setAttachment(socketData);
System.out.println("New connection: " + socketData.id + " (with ppv " + ppv + ")");
SocketData socketData = new SocketData(webSocket);
logger.info("New connection: " + socketData.id + " (with ppv " + ppv + ")");
try {
sendMessage(webSocket, "Welcome to the server!");
// wait 10 seconds for the client to send a task and close the connection if nothing has been received until then
try (ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor()) {
Runnable task = () -> {
if (webSocket.<SocketData>getAttachment().unhandledTasks.get() == 0 || !webSocket.isOpen()) {
final int secondsUntilTimeout = 10;
timeoutExecutor.schedule(() -> {
if (webSocket.<SocketData>getAttachment().totalTasks.get() > 0 || !webSocket.isOpen()) {
return;
}
sendMessage(webSocket, "No task received after 10 seconds. Closing connection...");
sendMessage(webSocket, "No task received after " + secondsUntilTimeout + " seconds. Closing connection...");
webSocket.close();
};
executor.schedule(task, 10, TimeUnit.SECONDS);
executor.shutdown();
}
},
secondsUntilTimeout,
TimeUnit.SECONDS
);
// and finally, when your program wants to exit
} catch (Exception e) {
log.error("e: ", e);
logger.exception(e);
webSocket.close(1, e.getMessage());
}
}
@@ -83,8 +111,8 @@ public class SocketServer extends WebSocketServer {
@Override
public void onClose(WebSocket webSocket, int code, String reason, boolean remote) {
SocketData socketData = webSocket.getAttachment();
System.out.println("Connection closed: " + socketData.id);
System.out.println(
logger.info("Connection closed: " + socketData.id);
logger.info(
"Disconnected client " + socketData.id + " " +
"with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
@@ -95,24 +123,28 @@ public class SocketServer extends WebSocketServer {
@Override
public void onMessage(WebSocket webSocket, String s) {
// System.out.println("Received: " + s.substring(0, 50));
// logger.info("Received: " + s.substring(0, 50));
IPacket reconstructedPacket = PacketContainer.deserialize(s);
try {
this.onPacketReceived(webSocket, reconstructedPacket);
} catch (JsonProcessingException e) {
this.log("Error on processing incoming package: " + e.getMessage(), webSocket);
logger.exception(e);
this.log(webSocket, "Error on processing incoming package: " + e.getMessage());
}
}
@Override
public void onError(WebSocket webSocket, Exception e) {
log(e.getMessage(), webSocket);
webSocket.close();
if (webSocket != null) {
log(webSocket, e.getMessage());
webSocket.close();
}
logger.exception(e);
}
@Override
public void onStart() {
System.out.println("Websocket server started on port " + this.getPort());
logger.success("Websocket server started on port " + this.getPort());
}
/**
@@ -120,45 +152,42 @@ public class SocketServer extends WebSocketServer {
*/
public void sendMessage(WebSocket webSocket, String text) {
try {
MessagePacket message = new MessagePacket();
message.message = text;
MessagePacket message = MessagePacket.create(text);
webSocket.send(PacketContainer.serialize(message));
} catch (Exception e) {
System.err.println("Failed to send message: " + text);
log.error("e: ", e);
logger.exception(e);
}
}
/**
* A shorthand method for sending error messages to the client
*/
public void sendError(WebSocket webSocket, String text) {
public void sendError(WebSocket webSocket, String text, boolean isFatal) {
try {
ErrorPacket error = new ErrorPacket();
error.error = text;
ErrorPacket error = ErrorPacket.create(text, isFatal);
webSocket.send(PacketContainer.serialize(error));
} catch (Exception e) {
System.err.println("Failed to send error: " + text);
log.error("e: ", e);
logger.exception(e);
log(webSocket, "Failed to send error: " + text);
}
}
/**
* The server-side implementation on how to handle certain packets when received
* The server-side implementation on how to handle certain packets when received
*/
private void onPacketReceived(WebSocket webSocket, IPacket packet) throws JsonProcessingException {
SocketData socketData = webSocket.getAttachment();
// limit the amount of tasks per connection
final int maxTasks = 100;
if (socketData.totalTasks.get() >= maxTasks) {
sendError(webSocket, "Exceeded the maximum amount of " + maxTasks + " tasks per session");
// limit the number of tasks per connection
if (socketData.totalTasks.get() >= maxTasksPerSession) {
sendError(webSocket, "Exceeded the maximum amount of " + maxTasksPerSession + " tasks per session", true);
webSocket.close();
return;
}
// only allow packets, that are meant to be handled by the server
if (!(packet instanceof IClientToServerPacket clientToServerPacket)) {
// only allow packets that are meant to be handled by the server
if (!(packet instanceof IClientToServerPacket<?> clientToServerPacket)) {
sendMessage(webSocket, "The package of type " + packet.getClass().getName() + " is not handled by the server!");
return;
}
@@ -167,22 +196,24 @@ public class SocketServer extends WebSocketServer {
socketData.unhandledTasks.incrementAndGet();
socketData.totalTasks.incrementAndGet();
// add the packet to the queue, so it can be started by the worker
// add the packet to the queue so it can be started by the worker
CompletableFuture.runAsync(() -> {
clientToServerPacket.onHandle(webSocket, this);
// if the websocket has 0 unhandled Tasks, close the connection
int remainingUnhandledTasks = socketData.unhandledTasks.decrementAndGet();
if (remainingUnhandledTasks <= 0) {
sendMessage(webSocket, "All requested tasks finished! Closing connection...");
webSocket.close();
if (socketData.closeIfNoTasksLeft) {
// if the websocket has 0 unhandled Tasks, close the connection
if (remainingUnhandledTasks <= 0) {
sendMessage(webSocket, "All requested tasks finished! Closing connection...");
webSocket.close();
}
}
});
}, taskExecutor);
}
public void log(String msg, WebSocket webSocket) {
SocketData socketData = webSocket == null ? new SocketData("???") : webSocket.getAttachment();
System.out.println("["+socketData.id+"] " + msg);
public void log(WebSocket webSocket, String msg) {
String socketId = (webSocket == null) ? "???" : webSocket.<SocketData>getAttachment().id;
logger.info("[" + socketId + "] " + msg);
}
/**
@@ -194,10 +225,11 @@ public class SocketServer extends WebSocketServer {
public final String id;
public final AtomicInteger unhandledTasks = new AtomicInteger(0);
public final AtomicInteger totalTasks = new AtomicInteger(0);
public boolean closeIfNoTasksLeft = false;
public SocketData(String id) {
this.id = id;
public SocketData(WebSocket webSocket) {
this.id = UUID.randomUUID().toString();
webSocket.setAttachment(this);
}
}
}

View File

@@ -2,6 +2,7 @@ package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
@@ -10,7 +11,7 @@ import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import org.java_websocket.WebSocket;
public class DebugPacket implements IClientToServerPacket, IServerToClientPacket {
public class DebugPacket implements IClientToServerPacket.Void, IServerToClientPacket {
public SerialUUID a1;
public SerialUUID a2;
@@ -22,9 +23,13 @@ public class DebugPacket implements IClientToServerPacket, IServerToClientPacket
public SerialValue<?> d2;
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketServer) {}
public void onHandle(WebSocket webSocket, SocketClient socketClient) {}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -2,16 +2,17 @@ package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A packet to send simple error messages between the client and the server
*/
public class ErrorPacket implements IClientToServerPacket, IServerToClientPacket {
public class ErrorPacket implements IServerToClientPacket {
/**
* The error endpoint for messages from the server, that should be logged out outputted
* The error endpoint for messages from the server that should be logged out as errors and possibly abort the process
*/
public String error;
public boolean isFatal;
@@ -27,15 +28,9 @@ public class ErrorPacket implements IClientToServerPacket, IServerToClientPacket
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
System.err.println("[socket] " + "ErrorPacket: " + this.error);
SocketClient.logger.exception(new RuntimeException(this.error));
if (this.isFatal) {
throw new RuntimeException("Received fatal error from server: " + this.error);
socketClient.close(1, "Received fatal error from server");
}
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log("ErrorPacket: " + this.error, webSocket);
}
}

View File

@@ -1,12 +1,26 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
public interface IClientToServerPacket extends IPacket {
/**
* A packet that will be sent to the server. Use <code>Void</code> Sub-Interface for packets without response
*
* @param <T> The response packet that will fulfill the future.
*/
public interface IClientToServerPacket<T extends IServerToClientPacket> extends IPacket {
@JsonIgnore
void onHandle(WebSocket webSocket, SocketServer socketServer);
@JsonIgnore
SocketFuture<T> getFuture();
/**
* Special case, where the packet will remain unanswered by the server
*/
interface Void extends IClientToServerPacket<IServerToClientPacket> {}
}

View File

@@ -2,13 +2,14 @@ package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A fallback packet that is generated, if the received json could not be mapped to an existing package
* A fallback packet that is generated if the received JSON could not be mapped to an existing package
*/
public class InvalidPacket implements IClientToServerPacket, IServerToClientPacket {
public class InvalidPacket implements IClientToServerPacket.Void, IServerToClientPacket {
/**
* If available, the error that caused this package to appear
@@ -18,12 +19,17 @@ public class InvalidPacket implements IClientToServerPacket, IServerToClientPack
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
System.err.println("[socket] " + "InvalidPacket: " + this.error);
SocketClient.logger.error("InvalidPacket: " + this.error);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log("InvalidPacket: " + this.error, webSocket);
socketServer.log(webSocket, "InvalidPacket: " + this.error);
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -2,16 +2,17 @@ package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A packet to send simple informational messages between the client and the server
*/
public class MessagePacket implements IClientToServerPacket, IServerToClientPacket {
public class MessagePacket implements IClientToServerPacket.Void, IServerToClientPacket {
/**
* The informational message from the server, that should be logged out outputted
* The informational message from the server that should be logged out outputted
*/
public String message;
@@ -24,13 +25,16 @@ public class MessagePacket implements IClientToServerPacket, IServerToClientPack
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
System.err.println("[socket] " + this.message);
SocketClient.logger.info("SocketMessage: " + this.message);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log(this.message, webSocket);
socketServer.log(webSocket, this.message);
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -3,6 +3,7 @@ package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.dhbwstuttgart.util.Logger;
/**
* A wrapper for the packet to ensure correct serialization/deserialization and make it possible to detect the matching
@@ -24,13 +25,14 @@ public class PacketContainer {
public UnifyRequestPacket unifyRequestPacket = null;
public UnifyResultPacket unifyResultPacket = null;
public DebugPacket debugPacket = null;
public SetAutoclosePacket setAutoclosePacket = null;
/**
* Generate the JSON string for the given packet
*
* @param packet The packet to serialize
* @return The json representation of the packet
* @return The JSON representation of the packet
*/
public static String serialize(IPacket packet) throws JsonProcessingException {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
@@ -46,7 +48,11 @@ public class PacketContainer {
container.unifyResultPacket = (UnifyResultPacket) packet;
else if (packet instanceof DebugPacket)
container.debugPacket = (DebugPacket) packet;
else if (packet instanceof SetAutoclosePacket)
container.setAutoclosePacket = (SetAutoclosePacket) packet;
// Add new packets here and in the deserialize method
else
throw new RuntimeException("Cannot map packet to any known packet class");
return objectMapper.writeValueAsString(container);
}
@@ -75,11 +81,13 @@ public class PacketContainer {
return container.unifyResultPacket;
if (container.debugPacket != null)
return container.debugPacket;
if (container.setAutoclosePacket != null)
return container.setAutoclosePacket;
// Add new packets here and in the serialize method
throw new RuntimeException("Cannot map received json to any known packet class");
} catch (Exception e) {
System.out.println(e);
(new Logger()).exception(e);
InvalidPacket packet = new InvalidPacket();
packet.error = e.getMessage();
return packet;

View File

@@ -0,0 +1,32 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* Normally, a connection stays open until either the client or the server process ends.
* Send this packet to inform the server that the connection can be closed once all tasks are done
*/
public class SetAutoclosePacket implements IClientToServerPacket.Void {
public int dummyProperty = 1;
@JsonIgnore
public static SetAutoclosePacket create() {
return new SetAutoclosePacket();
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
webSocket.<SocketServer.SocketData>getAttachment().closeIfNoTasksLeft = true;
socketServer.log(webSocket, "Marked connection as autoclose");
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -1,6 +1,9 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.server.ServerTaskLogger;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
@@ -19,18 +22,17 @@ import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ForkJoinPool;
import org.java_websocket.WebSocket;
/**
* A packet to send all required data for the unification algorithm to the server and request the unification
*/
public class UnifyRequestPacket implements IClientToServerPacket {
public class UnifyRequestPacket implements IClientToServerPacket<UnifyResultPacket> {
public SerialMap finiteClosure;
public SerialMap constraintSet;
@@ -38,30 +40,33 @@ public class UnifyRequestPacket implements IClientToServerPacket {
public SerialMap serialKeyStorage;
public SerialValue<?> placeholders;
public SerialList<SerialMap> factoryplaceholders;
public String futureId;
public int logLevel;
@JsonIgnore
private KeyStorage keyStorage = new KeyStorage();
@JsonIgnore
private boolean keyStorageLoaded = false;
public UnifyRequestPacket() {}
public UnifyRequestPacket(
public static UnifyRequestPacket create(
FiniteClosure finiteClosure,
ConstraintSet<Pair> constraintSet,
ConstraintSet<UnifyPair> unifyConstraintSet,
PlaceholderRegistry placeholderRegistry
) {
// store contraint and finite closure
this.finiteClosure = finiteClosure.toSerial(keyStorage);
this.constraintSet = constraintSet.toSerial(keyStorage);
this.unifyConstraintSet = unifyConstraintSet.toSerial(keyStorage);
UnifyRequestPacket packet = new UnifyRequestPacket();
// store constraint and finite closure
packet.finiteClosure = finiteClosure.toSerial(packet.keyStorage);
packet.constraintSet = constraintSet.toSerial(packet.keyStorage);
packet.unifyConstraintSet = unifyConstraintSet.toSerial(packet.keyStorage);
// store placeholder registry
var serialRegistry = placeholderRegistry.toSerial(keyStorage);
this.placeholders = serialRegistry.getValue("ph");
this.factoryplaceholders = serialRegistry.getList("factoryPh").assertListOfMaps();
var serialRegistry = placeholderRegistry.toSerial(packet.keyStorage);
packet.placeholders = serialRegistry.getValue("ph");
packet.factoryplaceholders = serialRegistry.getList("factoryPh").assertListOfMaps();
// store referenced objects separately
this.serialKeyStorage = keyStorage.toSerial(keyStorage);
packet.serialKeyStorage = packet.keyStorage.toSerial(packet.keyStorage);
packet.logLevel = ConsoleInterface.logLevel.getValue();
return packet;
}
@@ -94,15 +99,22 @@ public class UnifyRequestPacket implements IClientToServerPacket {
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.sendMessage(webSocket, "You requested a unify! Please wait until I calculated everything...");
System.out.println("Client " + webSocket.<SocketServer.SocketData>getAttachment().id + " requested a unification. Starting now...");
socketServer.log(webSocket, "Client requested a unification. Starting now...");
try {
var placeholderRegistry = new PlaceholderRegistry();
ArrayList<String> existingPlaceholders = (ArrayList) this.placeholders.getOf(ArrayList.class);
existingPlaceholders.forEach(placeholderRegistry::addPlaceholder);
var unifyContext = new UnifyContext(Writer.nullWriter(), false, true,
new UnifyResultModel(new ConstraintSet<>(), new FiniteClosure(new HashSet<>(), null, placeholderRegistry)),
Logger logger = new ServerTaskLogger(
webSocket,
socketServer,
Logger.LogLevel.fromValue(
Math.max(this.logLevel, Logger.LogLevel.INFO.getValue())
)
);
var unifyContext = new UnifyContext(logger, true,
new UnifyResultModel(new ConstraintSet<>(), new FiniteClosure(new HashSet<>(), logger, placeholderRegistry)),
new UnifyTaskModel(), ForkJoinPool.commonPool(), placeholderRegistry
);
@@ -129,17 +141,24 @@ public class UnifyRequestPacket implements IClientToServerPacket {
var resultSets = resultListener.getResults();
System.out.println("Finished unification for client " + webSocket.<SocketServer.SocketData>getAttachment().id);
socketServer.log(webSocket, "Finished unification");
socketServer.sendMessage(webSocket, "Unification finished. Found " + resultSets.size() + " result sets");
if (webSocket.isOpen()) {
UnifyResultPacket resultPacket = UnifyResultPacket.create(resultSets);
UnifyResultPacket resultPacket = UnifyResultPacket.create(resultSets, futureId);
webSocket.send(PacketContainer.serialize(resultPacket));
}
} catch (Exception e) {
System.err.println(e);
SocketServer.log.error("e: ", e);
SocketServer.logger.exception(e);
socketServer.log(webSocket, e.getMessage());
}
}
@JsonIgnore
public SocketFuture<UnifyResultPacket> getFuture() {
var future = new SocketFuture<>(List.of(UnifyResultPacket.class));
futureId = future.futureId;
return future;
}
}

View File

@@ -18,12 +18,14 @@ public class UnifyResultPacket implements IServerToClientPacket {
public SerialList<ISerialNode> results;
public SerialMap keyStorage;
public String futureId;
public static UnifyResultPacket create(List<ResultSet> resultSets) {
public static UnifyResultPacket create(List<ResultSet> resultSets, String futureId) {
UnifyResultPacket serialized = new UnifyResultPacket();
KeyStorage keyStorage = new KeyStorage();
serialized.results = SerialList.fromMapped(resultSets, resultSet -> resultSet.toSerial(keyStorage));
serialized.keyStorage = keyStorage.toSerial(keyStorage);
serialized.futureId = futureId;
return serialized;
}
@@ -35,8 +37,8 @@ public class UnifyResultPacket implements IServerToClientPacket {
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
System.out.println("[socket] Received unify result");
socketClient.setUnifyResultSets(this);
SocketClient.logger.info("Received unify result");
socketClient.completeResponseFuture(futureId, this);
}

View File

@@ -0,0 +1,8 @@
package de.dhbwstuttgart.syntaxtree;
import de.dhbwstuttgart.util.Logger;
public class SyntaxTree {
public static Logger logger = new Logger("SyntaxTree");
}

View File

@@ -0,0 +1,98 @@
package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.core.JavaTXServer;
public class NameGenerator {
private static String strNextName = "A";
/**
* Setzt den zu Beginn der Typinferenz auf "A" zurueck.
* Dies ist bei JUnit-Test noetig
* <code>TypePlaceholder</code>. <br>Author: Martin Pluemicke
* @return void
*/
public static void reset() {
strNextName = "A";
}
/**
* Berechnet einen neuen, eindeutigen Namen f�r eine neue
* <code>TypePlaceholder</code>. <br>Author: J�rg B�uerle
* @return Der Name
*/
public static String makeNewName()
{
// otth: Funktion berechnet einen neuen Namen anhand eines alten gespeicherten
String strReturn = strNextName;
// n�chster Name berechnen und in strNextName speichern
inc( strNextName.length() - 1 );
if (JavaTXServer.isRunning) {
throw new RuntimeException("Using the NameGenerator on a server is not allowed");
}
JavaTXCompiler.defaultClientPlaceholderRegistry.addPlaceholder(strReturn);
return strReturn;
}
/**
* Hilfsfunktion zur Berechnung eines neuen Namens
* <br>Author: J�rg B�uerle
* @param i
*/
private static void inc(int i)
{
// otth: Hilfsfunktion zur Berechnung eines neuen Namens
// otth: Erh�hung des Buchstabens an der Stelle i im String strNextName
// otth: Nach �berlauf: rekursiver Aufruf
// falls i = -1 --> neuer Buchstabe vorne anf�gen
if ( i == -1 )
{
strNextName = "A" + strNextName;
return;
}
char cBuchstabe = (char)(strNextName.charAt( i ));
cBuchstabe++;
if ( cBuchstabe - 65 > 25 )
{
// aktuelle Stelle: auf A zuruecksetzen
manipulate( i, 'A' );
// vorherige Stelle erh�hen
inc( i - 1 );
}
else
{
// aktueller Buchstabe �ndern
manipulate( i, cBuchstabe );
}
}
/**
* Hilfsfunktion zur Berechnung eines neuen Namens.
* <br>Author: J�rg B�uerle
* @param nStelle
* @param nWert
*/
private static void manipulate( int nStelle, char nWert )
{
// otth: Hilfsfunktion zur Berechnung eines neuen Namens
// otth: Ersetzt im String 'strNextName' an der Position 'nStelle' den Buchstaben durch 'nWert'
String strTemp = "";
for( int i = 0; i < strNextName.length(); i++)
{
if ( i == nStelle )
strTemp = strTemp + nWert;
else
strTemp = strTemp + strNextName.charAt( i );
}
strNextName = strTemp;
}
}

View File

@@ -1,6 +1,8 @@
package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.syntaxtree.SyntaxTree;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.util.Logger;
import java.io.Writer;
import java.lang.reflect.Modifier;
import java.util.*;
@@ -34,7 +36,7 @@ public class UnifyTypeFactory {
public static FiniteClosure generateFC(
List<ClassOrInterface> fromClasses,
Writer logFile,
Logger logger,
ClassLoader classLoader,
JavaTXCompiler compiler,
PlaceholderRegistry placeholderRegistry
@@ -49,7 +51,7 @@ public class UnifyTypeFactory {
Generell dürfen sie immer die gleichen Namen haben.
TODO: die transitive Hülle bilden
*/
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader, placeholderRegistry), logFile, compiler, placeholderRegistry);
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader, placeholderRegistry), logger, compiler, placeholderRegistry);
}
public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr, SourceLoc location){
@@ -132,7 +134,7 @@ public class UnifyTypeFactory {
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType, PlaceholderRegistry placeholderRegistry) {
if (tph.getName().equals("AFR")) {
System.out.println("XXX"+innerType);
SyntaxTree.logger.info("XXX"+innerType);
}
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance(), placeholderRegistry);
ntph.setVariance(tph.getVariance());
@@ -199,7 +201,7 @@ public class UnifyTypeFactory {
&& ((PlaceholderType)lhs).isWildcardable()
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) {
if (lhs.getName().equals("AQ")) {
// System.out.println("");
// SyntaxTree.logger.info("");
}
((PlaceholderType)rhs).enableWildcardtable();
}
@@ -208,7 +210,7 @@ public class UnifyTypeFactory {
&& ((PlaceholderType)rhs).isWildcardable()
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) {
if (rhs.getName().equals("AQ")) {
// System.out.println("");
// SyntaxTree.logger.info("");
}
((PlaceholderType)lhs).enableWildcardtable();
}

View File

@@ -7,6 +7,7 @@ import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import org.antlr.v4.runtime.Token;
@@ -65,20 +66,16 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric implements
* @return
*/
public static TypePlaceholder fresh(Token position){
return fresh(position, 0, true);
return new TypePlaceholder(NameGenerator.makeNewName(), position, 0, true);
}
public static TypePlaceholder fresh(Token position, PlaceholderRegistry placeholderRegistry){
return fresh(position, 0, true, placeholderRegistry);
}
public static TypePlaceholder fresh(Token position, PlaceholderRegistry placeholderRegistry){
String newName = placeholderRegistry.generateFreshPlaceholderName();
return new TypePlaceholder(newName, position, 0, true);
}
public static TypePlaceholder fresh(Token position, int variance, boolean wildcardable){
return fresh(position, variance, wildcardable, PlaceholderRegistry.defaultRegistry);
}
public static TypePlaceholder fresh(Token position, int variance, boolean wildcardable, PlaceholderRegistry placeholderRegistry){
String newName = placeholderRegistry.generateFreshPlaceholderName();
return new TypePlaceholder(newName, position, variance, wildcardable);
return new TypePlaceholder(NameGenerator.makeNewName(), position, variance, wildcardable);
}
public static RefTypeOrTPHOrWildcardOrGeneric of(String name) {

View File

@@ -0,0 +1,8 @@
package de.dhbwstuttgart.target;
import de.dhbwstuttgart.util.Logger;
public class Target {
public static Logger logger = new Logger("Target");
}

View File

@@ -12,6 +12,7 @@ import de.dhbwstuttgart.syntaxtree.Record;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.*;
import de.dhbwstuttgart.target.tree.expression.*;
import de.dhbwstuttgart.target.tree.type.*;
@@ -337,10 +338,10 @@ public class ASTToTargetAST {
var result = r0.stream().map(l -> l.stream().toList()).toList();
System.out.println("============== OUTPUT ==============");
Target.logger.info("============== OUTPUT ==============");
for (var l : result) {
for (var m : l) System.out.println(m.name() + " " + m.getSignature());
System.out.println();
for (var m : l) Target.logger.info(m.name() + " " + m.getSignature());
Target.logger.info("");
}
return result;
}

View File

@@ -1,10 +1,12 @@
package de.dhbwstuttgart.target.generate;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.*;
import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.syntaxtree.type.Void;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.type.TargetGenericType;
import de.dhbwstuttgart.target.tree.type.TargetType;
import de.dhbwstuttgart.typeinference.result.PairTPHEqualTPH;
@@ -138,17 +140,17 @@ public abstract class GenerateGenerics {
this.astToTargetAST = astToTargetAST;
for (var constraint : constraints.results) {
if (constraint instanceof PairTPHsmallerTPH p) {
System.out.println(p.left + " " + p.left.getVariance());
Target.logger.info(p.left + " " + p.left.getVariance());
simplifiedConstraints.add(new PairLT(new TPH(p.left), new TPH(p.right)));
} else if (constraint instanceof PairTPHEqualTPH p) {
equality.put(p.getLeft(), p.getRight());
} else if (constraint instanceof PairTPHequalRefTypeOrWildcardType p) {
System.out.println(p.left + " = " + p.right);
Target.logger.info(p.left + " = " + p.right);
concreteTypes.put(new TPH(p.left), p.right);
}
}
System.out.println("Simplified constraints: " + simplifiedConstraints);
Target.logger.info("Simplified constraints: " + simplifiedConstraints);
}
/*public record GenericsState(Map<TPH, RefTypeOrTPHOrWildcardOrGeneric> concreteTypes, Map<TypePlaceholder, TypePlaceholder> equality) {}
@@ -248,7 +250,7 @@ public abstract class GenerateGenerics {
equality.put(entry.getKey(), to);
}
}
System.out.println(from + " -> " + to + " " + from.getVariance());
Target.logger.info(from + " -> " + to + " " + from.getVariance());
//from.setVariance(to.getVariance());
equality.put(from, to);
referenced.remove(new TPH(from));
@@ -317,7 +319,7 @@ public abstract class GenerateGenerics {
Set<TPH> T2s = new HashSet<>();
findTphs(superType, T2s);
System.out.println("T1s: " + T1s + " T2s: " + T2s);
Target.logger.info("T1s: " + T1s + " T2s: " + T2s);
//Ende
superType = methodCall.receiverType;
@@ -332,7 +334,7 @@ public abstract class GenerateGenerics {
var optMethod = astToTargetAST.findMethod(owner, methodCall.name, methodCall.signatureArguments().stream().map(astToTargetAST::convert).toList());
if (optMethod.isEmpty()) return;
var method2 = optMethod.get();
System.out.println("In: " + method.getName() + " Method: " + method2.getName());
Target.logger.info("In: " + method.getName() + " Method: " + method2.getName());
var generics = family(owner, method2);
// transitive and
@@ -365,7 +367,7 @@ public abstract class GenerateGenerics {
if (!T1s.contains(R1) || !T2s.contains(R2)) continue;
var newPair = new PairLT(R1, R2);
System.out.println("New pair: " + newPair);
Target.logger.info("New pair: " + newPair);
newPairs.add(newPair);
if (!containsRelation(result, newPair))
@@ -567,7 +569,7 @@ public abstract class GenerateGenerics {
Set<Pair> generics(ClassOrInterface owner, Method method) {
if (computedGenericsOfMethods.containsKey(method)) {
var cached = computedGenericsOfMethods.get(method);
System.out.println("Cached " + method.getName() + ": " + cached);
Target.logger.info("Cached " + method.getName() + ": " + cached);
return cached;
}
@@ -596,7 +598,7 @@ public abstract class GenerateGenerics {
normalize(result, classGenerics, usedTphs);
System.out.println(this.getClass().getSimpleName() + " " + method.name + ": " + result);
Target.logger.info(this.getClass().getSimpleName() + " " + method.name + ": " + result);
return result;
}
@@ -675,7 +677,7 @@ public abstract class GenerateGenerics {
normalize(javaResult, null, referencedByClass);
System.out.println(this.getClass().getSimpleName() + " Class " + classOrInterface.getClassName().getClassName() + ": " + javaResult);
Target.logger.info(this.getClass().getSimpleName() + " Class " + classOrInterface.getClassName().getClassName() + ": " + javaResult);
return javaResult;
}
@@ -726,7 +728,7 @@ public abstract class GenerateGenerics {
if (!added) break;
}
System.out.println(chain + " " + chain.stream().map(e -> e.resolve().getVariance()).toList());
Target.logger.info(chain + " " + chain.stream().map(e -> e.resolve().getVariance()).toList());
var variance = chain.get(0).resolve().getVariance();
if (variance != 1) continue;
var index = 0;
@@ -764,7 +766,7 @@ public abstract class GenerateGenerics {
}
for (var pair : elementsToAddToEquality) {
System.out.println(pair);
Target.logger.info(pair);
addToEquality(pair.left, pair.right, referenced);
}
}
@@ -917,11 +919,11 @@ public abstract class GenerateGenerics {
}
}
if (infima.size() > 1) {
System.out.println(infima);
Target.logger.info(infima);
for (var pair : infima) {
var returnTypes = findTypeVariables(method.getReturnType());
var chain = findConnectionToReturnType(returnTypes, input, new HashSet<>(), pair.left);
System.out.println("Find: " + pair.left + " " + chain);
Target.logger.info("Find: " + pair.left + " " + chain);
chain.remove(pair.left);
if (chain.size() > 0) {
for (var tph : chain)
@@ -959,8 +961,8 @@ public abstract class GenerateGenerics {
}
}
newTph.setVariance(variance);
System.out.println(infima + " " + infima.stream().map(i -> i.right.resolve().getVariance()).toList());
System.out.println("Infima new TPH " + newTph + " variance " + variance);
Target.logger.info(infima + " " + infima.stream().map(i -> i.right.resolve().getVariance()).toList());
Target.logger.info("Infima new TPH " + newTph + " variance " + variance);
//referenced.add(newTph);
addToPairs(input, new PairLT(left, new TPH(newTph)));

View File

@@ -1,6 +1,5 @@
package de.dhbwstuttgart.target.generate;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.exceptions.DebugException;
import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.parser.SyntaxTreeGenerator.AssignToLocal;
@@ -8,6 +7,7 @@ import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.*;
import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.MethodParameter;
import de.dhbwstuttgart.target.tree.TargetMethod;
import de.dhbwstuttgart.target.tree.expression.*;
@@ -15,6 +15,7 @@ import de.dhbwstuttgart.target.tree.type.*;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
@@ -120,7 +121,7 @@ public class StatementToTargetExpression implements ASTVisitor {
@Override
public void visit(BoolExpression bool) {
System.out.println("BoolExpression");
Target.logger.info("BoolExpression");
}
@Override
@@ -218,6 +219,22 @@ public class StatementToTargetExpression implements ASTVisitor {
if (methodCall.receiver instanceof ExpressionReceiver expressionReceiver && expressionReceiver.expr instanceof This) {
if (receiverClass == null) throw new DebugException("Class " + receiverName + " does not exist!");
var thisMethod = converter.findMethod(receiverClass, methodCall.name, signature);
if (thisMethod.isEmpty()) {
Target.logger.error("Expected: " + receiverClass.getClassName() + "." + methodCall.name + "(" +
signature.stream().map(TargetType::toSignature).collect(Collectors.joining())+ ")" );
AtomicBoolean hasM = new AtomicBoolean(false);
receiverClass.getMethods().forEach(m -> {
if (Objects.equals(m.getName(), methodCall.name)) {
hasM.set(true);
Target.logger.error("But only has: " + m.name + "(" +
m.getParameterList().getFormalparalist().stream().map(t -> t.getType().toString()).collect(Collectors.joining())+ ")" );
}
});
if (!hasM.get())
Target.logger.error("But does not contain method at all");
}
ClassOrInterface finalReceiverClass = receiverClass;
foundMethod = thisMethod.orElseGet(() -> findMethod(finalReceiverClass.getSuperClass().getName(), methodCall.name, signature).orElseThrow());
} else if (!isFunNType) {
@@ -234,7 +251,7 @@ public class StatementToTargetExpression implements ASTVisitor {
isInterface = receiverClass.isInterface();
}
System.out.println(argList);
Target.logger.info(argList);
result = new TargetMethodCall(converter.convert(methodCall.getType()), returnType, argList, converter.convert(methodCall.receiver), methodCall.getArgumentList().getArguments().stream().map(converter::convert).toList(), receiverType, methodCall.name, isStatic, isInterface, isPrivate);
}

View File

@@ -7,6 +7,7 @@ import de.dhbwstuttgart.syntaxtree.GenericDeclarationList;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.Method;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;

View File

@@ -19,11 +19,11 @@ public class MethodAssumption extends Assumption{
private ClassOrInterface receiver;
private RefTypeOrTPHOrWildcardOrGeneric retType;
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params;
private final Boolean isInherited;
private final Boolean isOverridden;
private final boolean isInherited;
private final boolean isOverridden;
public MethodAssumption(ClassOrInterface receiver, RefTypeOrTPHOrWildcardOrGeneric retType,
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope, Boolean isInherited, Boolean isOverridden){
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope, boolean isInherited, boolean isOverridden){
super(scope);
this.receiver = receiver;
this.retType = retType;
@@ -73,11 +73,11 @@ public class MethodAssumption extends Assumption{
return TYPEStmt.getReceiverType(receiver, resolver);
}
public Boolean isInherited() {
public boolean isInherited() {
return isInherited;
}
public Boolean isOverridden() {
public boolean isOverridden() {
return isOverridden;
}
}

View File

@@ -22,8 +22,8 @@ import javax.annotation.Nullable;
public class Constraint<A extends IConstraintElement> extends HashSet<A> implements ISerializableData {
private static final long serialVersionUID = 1L;
private Boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
private Boolean isImplemented = false;
private boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
private boolean isImplemented = false;
/*
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
@@ -37,27 +37,31 @@ public class Constraint<A extends IConstraintElement> extends HashSet<A> impleme
super();
}
public Constraint(Boolean isInherited, Boolean isImplemented) {
public Constraint(int initialCapacity) {
super(initialCapacity);
}
public Constraint(boolean isInherited, boolean isImplemented) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
}
public Constraint(Boolean isInherited, Boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
public Constraint(boolean isInherited, boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
this.extendConstraint = extendConstraint;
this.methodSignatureConstraint = methodSignatureConstraint;
}
public void setIsInherited(Boolean isInherited) {
public void setIsInherited(boolean isInherited) {
this.isInherited = isInherited;
}
public Boolean isInherited() {
public boolean isInherited() {
return isInherited;
}
public Boolean isImplemented() {
public boolean isImplemented() {
return isImplemented;
}
@@ -77,6 +81,14 @@ public class Constraint<A extends IConstraintElement> extends HashSet<A> impleme
methodSignatureConstraint = c;
}
public <B extends IConstraintElement> Constraint<B> createdMapped(Function<A,B> mapper) {
Constraint<B> result = new Constraint<>(this.size());
for (A element : this) {
result.add(mapper.apply(element));
}
return result;
}
public String toString() {
return super.toString() + "\nisInherited = " + isInherited
+ " isOveridden = " + isImplemented

View File

@@ -21,7 +21,7 @@ public class Pair implements Serializable, IConstraintElement, ISerializableData
private SourceLoc location;
private PairOperator eOperator = PairOperator.SMALLER;
private Boolean noUnification = false;
private boolean noUnification = false;
private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2) {
@@ -43,7 +43,7 @@ public class Pair implements Serializable, IConstraintElement, ISerializableData
this.location = location;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification) {
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, boolean noUnification) {
// Konstruktor
this(TA1, TA2);
this.eOperator = eOp;
@@ -161,7 +161,7 @@ public class Pair implements Serializable, IConstraintElement, ISerializableData
String op = data.getValue("op").getOf(String.class);
SerialMap ta1 = data.getMap("ta1");
SerialMap ta2 = data.getMap("ta2");
Boolean noUnification = data.getValue("noUnification").getOf(Integer.class) == 1;
boolean noUnification = data.getValue("noUnification").getOf(Integer.class) == 1;
SerialMap location = data.getMapOrNull("location");
var pair = new Pair(

View File

@@ -38,7 +38,7 @@ implements ISerializableData {
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);

View File

@@ -21,7 +21,7 @@ public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);

View File

@@ -46,7 +46,7 @@ public class PairTPHsmallerTPH extends ResultPair<TypePlaceholder,TypePlaceholde
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);

View File

@@ -5,8 +5,11 @@ import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
@@ -27,7 +30,7 @@ public class ResultSet implements ISerializableData {
public ResultSet(Set<ResultPair> set) {
this.results = set;
this.genIns = new HashSet<>();
this.genIns = TypeUnifyTaskHelper.getPresizedHashSet(results.size());
results.forEach(x -> {
if (x instanceof PairTPHsmallerTPH) {
this.genIns.add(x);
@@ -59,15 +62,19 @@ public class ResultSet implements ISerializableData {
public String toString() {
var results = new ArrayList<>(this.results);
results.sort(Ordering.usingToString());
results.sort(
Comparator
.comparingInt((ResultPair o) -> o.getLeft().toString().length())
.thenComparing(o -> o.getLeft().toString())
.thenComparingInt(o -> o.getRight().toString().length())
.thenComparing(o -> o.getRight().toString())
);
return results.toString();
}
@Override
public boolean equals(Object o) {
if (o instanceof ResultSet) {
ResultSet other = (ResultSet) o;
if (o instanceof ResultSet other) {
// sort both result lists
var thisElements = new ArrayList<>(this.results);
thisElements.sort(Ordering.usingToString());
@@ -105,6 +112,8 @@ class Resolver implements ResultSetVisitor {
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
private ResultPair<?, ?> currentPair;
public static Logger logger = new Logger("Resolver");
public Resolver(ResultSet resultPairs) {
this.result = resultPairs;
}
@@ -112,7 +121,7 @@ class Resolver implements ResultSetVisitor {
public ResolvedType resolve(TypePlaceholder tph) {
toResolve = tph;
resolved = null;
System.out.println(tph.toString());
logger.info(tph.toString());
for (ResultPair<?, ?> resultPair : result.results) {
if (resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)) {
currentPair = resultPair;

View File

@@ -14,6 +14,7 @@ import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceBlockInformation;
import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceInformation;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.util.BiRelation;
import org.antlr.v4.runtime.Token;
@@ -33,7 +34,7 @@ public class TYPE {
public ConstraintSet getConstraints() {
ConstraintSet ret = new ConstraintSet();
for (ClassOrInterface cl : sf.KlassenVektor) {
var allClasses = new HashSet<ClassOrInterface>();
Set<ClassOrInterface> allClasses = TypeUnifyTaskHelper.getPresizedHashSet(allAvailableClasses.size() + sf.availableClasses.size());
allClasses.addAll(allAvailableClasses);
allClasses.addAll(sf.availableClasses);
ret.addAll(getConstraintsClass(cl, new TypeInferenceInformation(allClasses)));
@@ -68,7 +69,7 @@ public class TYPE {
for(SourceFile sourceFile : sfs){
for(JavaClassName importName : sourceFile.imports){
System.out.println(importName);
context.logger().info(importName);
try {
classes.add(ASTFactory.createClass(classLoader.loadClass(importName.toString())));
} catch (ClassNotFoundException e) {

View File

@@ -2,6 +2,7 @@
package de.dhbwstuttgart.typeinference.typeAlgo;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import java.util.*;
import java.util.stream.Collectors;
@@ -13,6 +14,7 @@ import de.dhbwstuttgart.parser.SyntaxTreeGenerator.AssignToLocal;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.*;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
@@ -116,17 +118,18 @@ public class TYPEStmt implements StatementVisitor {
@Override
public void visit(FieldVar fieldVar) {
fieldVar.receiver.accept(this);
Set<Constraint> oderConstraints = new HashSet<>();
List<FieldAssumption> fieldAssumptions = info.getFields(fieldVar.fieldVarName);
Set<Constraint> oderConstraints = TypeUnifyTaskHelper.getPresizedHashSet(fieldAssumptions.size());
for (FieldAssumption fieldAssumption : info.getFields(fieldVar.fieldVarName)) {
for (FieldAssumption fieldAssumption : fieldAssumptions) {
Constraint constraint = new Constraint();
GenericsResolver resolver = getResolverInstance();
constraint.add(new Pair(fieldVar.receiver.getType(), fieldAssumption.getReceiverType(resolver), PairOperator.SMALLERDOT, loc(fieldVar.getOffset()))); // PL 2019-12-09: SMALLERDOT eingefuegt, EQUALSDOT entfernt, wenn ds Field privat ist muesste es EQUALSDOT lauten
constraint.add(new Pair(fieldVar.getType(), fieldAssumption.getType(resolver), PairOperator.EQUALSDOT, loc(fieldVar.getOffset())));
oderConstraints.add(constraint);
}
if (oderConstraints.size() == 0)
if (oderConstraints.isEmpty())
throw new TypeinferenceException("Kein Feld " + fieldVar.fieldVarName + " gefunden", fieldVar.getOffset());
constraintsSet.addOderConstraint(oderConstraints);
}
@@ -141,7 +144,7 @@ public class TYPEStmt implements StatementVisitor {
@Override
public void visit(ForEachStmt forEachStmt) {
var iterableType = new RefType(ASTFactory.createClass(java.lang.Iterable.class).getClassName(), Arrays.asList(new ExtendsWildcardType(forEachStmt.statement.getType(), new NullToken())), new NullToken());
var iterableType = new RefType(ASTFactory.createClass(java.lang.Iterable.class).getClassName(), List.of(new ExtendsWildcardType(forEachStmt.statement.getType(), new NullToken())), new NullToken());
constraintsSet.addUndConstraint(new Pair(forEachStmt.expression.getType(), iterableType, PairOperator.SMALLERDOT, loc(forEachStmt.getOffset())));
forEachStmt.statement.accept(this);
forEachStmt.expression.accept(this);
@@ -189,7 +192,7 @@ public class TYPEStmt implements StatementVisitor {
methodCall.receiver.accept(this);
// Overloading:
Set<Constraint<Pair>> methodConstraints = new HashSet<>();
for (MethodAssumption m : this.getMethods(methodCall.name, methodCall.arglist, info)) {
for (MethodAssumption m : TYPEStmt.getMethods(methodCall.name, methodCall.arglist, info)) {
GenericsResolver resolver = getResolverInstance();
Set<Constraint<Pair>> oneMethodConstraints = generateConstraint(methodCall, m, info, resolver);
methodConstraints.addAll(oneMethodConstraints);
@@ -199,7 +202,7 @@ public class TYPEStmt implements StatementVisitor {
* oneMethodConstraint.setExtendConstraint(extendsOneMethodConstraint); extendsOneMethodConstraint.setExtendConstraint(oneMethodConstraint); methodConstraints.add(extendsOneMethodConstraint);
*/
}
if (methodConstraints.size() < 1) {
if (methodConstraints.isEmpty()) {
throw new TypeinferenceException("Methode " + methodCall.name + " ist nicht vorhanden!", methodCall.getOffset());
}
constraintsSet.addOderConstraint(methodConstraints);
@@ -212,7 +215,7 @@ public class TYPEStmt implements StatementVisitor {
for (MethodAssumption m : this.getConstructors(info, (RefType) methodCall.getType(), methodCall.getArgumentList())) {
methodConstraints.add(generateConstructorConstraint(methodCall, m, info, getResolverInstance()));
}
if (methodConstraints.size() < 1) {
if (methodConstraints.isEmpty()) {
throw new TypeinferenceException("Konstruktor in Klasse " + methodCall.getType().toString() + " ist nicht vorhanden!", methodCall.getOffset());
}
constraintsSet.addOderConstraint(methodConstraints);
@@ -282,8 +285,13 @@ public class TYPEStmt implements StatementVisitor {
// see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17
// Expression muss zu Numeric Convertierbar sein. also von Numeric erben
Constraint<Pair> numeric;
HashSet<JavaClassName> classNames = TypeUnifyTaskHelper.getPresizedHashSet(info.getAvailableClasses().size());
for (var classEl : info.getAvailableClasses()) {
classNames.add(classEl.getClassName());
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(bytee.getName())) {
if (classNames.contains(bytee.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), bytee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), bytee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -291,7 +299,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(shortt.getName())) {
if (classNames.contains(shortt.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), shortt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), shortt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -299,7 +307,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(integer.getName())) {
if (classNames.contains(integer.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), integer, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), integer, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -307,7 +315,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(longg.getName())) {
if (classNames.contains(longg.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), longg, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), longg, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -315,7 +323,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(floatt.getName())) {
if (classNames.contains(floatt.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), floatt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), floatt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -323,7 +331,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(doublee.getName())) {
if (classNames.contains(doublee.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), doublee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), doublee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -338,7 +346,7 @@ public class TYPEStmt implements StatementVisitor {
if (binary.operation.equals(BinaryExpr.Operator.ADD)) {
// Dann kann der Ausdruck auch das aneinanderfügen zweier Strings sein: ("a" + "b") oder (1 + 2)
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(string.getName())) {
if (classNames.contains(string.getName())) {
Constraint<Pair> stringConcat = new Constraint<>();
stringConcat.add(new Pair(binary.lexpr.getType(), string, PairOperator.EQUALSDOT, loc(binary.getOffset())));
stringConcat.add(new Pair(binary.rexpr.getType(), string, PairOperator.EQUALSDOT, loc(binary.getOffset())));
@@ -346,7 +354,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(stringConcat);
}
}
if (numericAdditionOrStringConcatenation.size() < 1) {
if (numericAdditionOrStringConcatenation.isEmpty()) {
throw new TypeinferenceException("Kein Typ für " + binary.operation.toString() + " vorhanden", binary.getOffset());
}
constraintsSet.addOderConstraint(numericAdditionOrStringConcatenation);
@@ -693,8 +701,8 @@ public class TYPEStmt implements StatementVisitor {
Set<Pair> methodSignatureConstraint = generatemethodSignatureConstraint(forMethod, assumption, info, resolver);
//System.out.println("methodSignatureConstraint: " + methodSignatureConstraint);
//System.out.println("methodConstraint: " + methodConstraint);
//context.logger().info("methodSignatureConstraint: " + methodSignatureConstraint);
//context.logger().info("methodConstraint: " + methodConstraint);
methodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
extendsMethodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
@@ -732,7 +740,7 @@ public class TYPEStmt implements StatementVisitor {
}
// Zuordnung von MethodCall.signature(ReturnType) zu dem ReturnType der ausgewaehlten Methode (assumption.returnType)
ret.add(new Pair(foMethod.signature.get(foMethod.signature.size() - 1), assumption.getReturnType(), PairOperator.EQUALSDOT));
ret.add(new Pair(foMethod.signature.getLast(), assumption.getReturnType(), PairOperator.EQUALSDOT));
return ret;
}
@@ -743,10 +751,10 @@ public class TYPEStmt implements StatementVisitor {
List<GenericRefType> funNParams = new ArrayList<>();
for (int i = 0; i < numArgs + 1; i++) {
// funNParams.add(TypePlaceholder.fresh(new NullToken()));
funNParams.add(new GenericRefType(PlaceholderRegistry.defaultRegistry.generateFreshPlaceholderName(), new NullToken()));
funNParams.add(new GenericRefType(NameGenerator.makeNewName(), new NullToken()));
}
funNParams.get(funNParams.size() - 1);
ret.add(new MethodAssumption(new FunNClass(funNParams), funNParams.get(funNParams.size() - 1), funNParams.subList(0, funNParams.size() - 1), new TypeScope() {
funNParams.getLast();
ret.add(new MethodAssumption(new FunNClass(funNParams), funNParams.getLast(), funNParams.subList(0, funNParams.size() - 1), new TypeScope() {
@Override
public Iterable<? extends GenericTypeVar> getGenerics() {
throw new NotImplementedException();
@@ -841,7 +849,7 @@ public class TYPEStmt implements StatementVisitor {
for (var child : switchStmt.getBlocks()) {
for (var label : child.getLabels()) {
if (label.getPattern() == null) {
//System.out.println("DefaultCase");
//context.logger().info("DefaultCase");
} else {
constraintsSet.addUndConstraint(
new Pair(
@@ -882,13 +890,9 @@ public class TYPEStmt implements StatementVisitor {
child.getLabels().forEach(el -> {
if (el.getType() instanceof RefType) {
var recType = el;
if (el.getPattern() instanceof RecordPattern) {
var pattern = (RecordPattern) recType.getPattern();
recursivelyAddRecordConstraints(pattern);
}
if (el.getPattern() instanceof RecordPattern pattern) {
recursivelyAddRecordConstraints(pattern);
}
}
});
@@ -904,13 +908,13 @@ public class TYPEStmt implements StatementVisitor {
var allClasses = info.getAvailableClasses();
var interestingClasses = allClasses.stream().filter(as -> as.getClassName().equals(((RefType) pattern.getType()).getName())).toList();
var constructors = interestingClasses.get(0).getConstructors();
var constructors = interestingClasses.getFirst().getConstructors();
int counter = 0;
for (var subPattern : pattern.getSubPattern()) {
for (Constructor con : constructors) {
//System.out.println("----------------------\n" + subPattern.getType() + " | " + con.getParameterList().getParameterAt(counter).getType() + "\n----------------------\n");
//context.logger().info("----------------------\n" + subPattern.getType() + " | " + con.getParameterList().getParameterAt(counter).getType() + "\n----------------------\n");
constraintsSet.addUndConstraint(new Pair(subPattern.getType(), con.getParameterList().getParameterAt(counter).getType(), PairOperator.SMALLERDOT, loc(con.getParameterList().getParameterAt(counter).getOffset())));
}
if (subPattern instanceof RecordPattern) recursivelyAddRecordConstraints((RecordPattern) subPattern);

View File

@@ -0,0 +1,91 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* An intermediate class for the recursive steps of the TypeUnifyTask:
* This allows canceling parts of the recursion tree, instead of only the whole execution as before. But in
* order for that to work, all cancellable child tasks must be added when they are created
*
* @param <T>
*/
public abstract class CancellableTask<T> extends RecursiveTask<T> {
private final AtomicBoolean executionCancelled = new AtomicBoolean(false);
private final List<CancellableTask<?>> childTasks = new LinkedList<>();
private CancellableTask<?> parentTask = null;
/**
* Set the execution for this task and all its (recursive) children to be canceled
*/
protected void cancelExecution() {
// is this branch already canceled? Then do nothing
if (this.executionCancelled.getAndSet(true)) return;
this.cancelChildExecution();
}
private void cancelChildExecution() {
synchronized (this.childTasks) {
for (var childTask : childTasks) {
// no need to cancel a branch that is already finished
if (!childTask.isDone()) {
childTask.cancelExecution();
}
}
}
}
private void cancelChildExecutionAfter(CancellableTask<?> checkpointTask) {
boolean reachedCheckpoint = false;
int i = 0;
for (var childTask : childTasks) {
if (!reachedCheckpoint) {
reachedCheckpoint = childTask == checkpointTask;
}
else {
// no need to cancel a branch that is already finished
if (!childTask.isDone()) {
childTask.cancelExecution();
}
i++;
}
}
System.out.println("Skipped " + i + " younger siblings");
}
protected void cancelSiblingTasks() {
if (this.parentTask != null) {
boolean thisWasCancelledBefore = this.executionCancelled.get();
this.parentTask.cancelChildExecution();
this.executionCancelled.set(thisWasCancelledBefore);
}
}
public void cancelYoungerSiblingTasks() {
if (this.parentTask != null) {
this.parentTask.cancelChildExecutionAfter(this);
}
}
public Boolean isExecutionCancelled() {
return executionCancelled.get();
}
public void addChildTask(CancellableTask<?> childTask) {
this.childTasks.add(childTask);
childTask.setParentTask(this);
if (this.executionCancelled.get()) {
childTask.executionCancelled.set(true);
}
}
private void setParentTask(CancellableTask<?> parentTask) {
this.parentTask = parentTask;
}
}

View File

@@ -37,9 +37,6 @@ public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
totalElements += list.get(i).size();
}
System.out.println("ConcurrentSetMerge? -> " + (size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD ? "true" : "false"));
// size will always be at least one
if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) {
Set<T> result = this.list.get(start);

View File

@@ -94,8 +94,8 @@ public class MartelliMontanariUnify implements IUnify {
// SUBST - Rule
if(lhsType instanceof PlaceholderType) {
mgu.add((PlaceholderType) lhsType, rhsType);
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
termsList = termsList.stream().map(x -> mgu.apply(x)).collect(Collectors.toCollection(ArrayList::new));
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
termsList.replaceAll(mgu::apply);
idx = idx+1 == termsList.size() ? 0 : idx+1;
continue;
}

View File

@@ -16,10 +16,6 @@ import java.util.concurrent.atomic.AtomicInteger;
*/
public class PlaceholderRegistry implements ISerializableData {
// The default registry should only ever be used outside on the Client-Side and NEVER on the Server-Side!
// This includes at least the server-handled packets and the unification algorithm
public static PlaceholderRegistry defaultRegistry = new PlaceholderRegistry();
private final Set<String> existingPlaceholders = ConcurrentHashMap.newKeySet();
private final AtomicInteger placeholderCount = new AtomicInteger();
public ArrayList<PlaceholderType> UnifyTypeFactory_PLACEHOLDERS = new ArrayList<>();
@@ -49,6 +45,14 @@ public class PlaceholderRegistry implements ISerializableData {
return name;
}
public PlaceholderRegistry deepClone() {
PlaceholderRegistry pr2 = new PlaceholderRegistry();
this.existingPlaceholders.forEach(pr2::addPlaceholder);
pr2.UnifyTypeFactory_PLACEHOLDERS.addAll(this.UnifyTypeFactory_PLACEHOLDERS);
pr2.placeholderCount.set(this.placeholderCount.get());
return pr2;
}
/**
* Generate a token that consists of uppercase letters and contains the given prefix and suffix from the value i
*

View File

@@ -1,10 +1,12 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
@@ -12,24 +14,16 @@ import java.util.Stack;
import java.util.function.Function;
import java.util.stream.Collectors;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.exceptions.DebugException;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.model.*;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.io.OutputStreamWriter;
import org.apache.commons.io.output.NullOutputStream;
/**
* Implementation of the type inference rules.
* @author Florian Steurer
@@ -37,17 +31,17 @@ import org.apache.commons.io.output.NullOutputStream;
*/
public class RuleSet implements IRuleSet{
Writer logFile;
Logger logger;
final PlaceholderRegistry placeholderRegistry;
public RuleSet(PlaceholderRegistry placeholderRegistry) {
super();
logFile = OutputStreamWriter.nullWriter();
logger = Logger.NULL_LOGGER;
this.placeholderRegistry = placeholderRegistry;
}
RuleSet(Writer logFile, PlaceholderRegistry placeholderRegistry) {
this.logFile = logFile;
RuleSet(Logger logger, PlaceholderRegistry placeholderRegistry) {
this.logger = logger;
this.placeholderRegistry = placeholderRegistry;
}
@@ -300,8 +294,8 @@ public class RuleSet implements IRuleSet{
if(dFromFc == null || !dFromFc.getTypeParams().arePlaceholders() || dFromFc.getTypeParams().size() != cFromFc.getTypeParams().size())
return Optional.empty();
//System.out.println("cFromFc: " + cFromFc);
//System.out.println("dFromFc: " + dFromFc);
//context.logger().info("cFromFc: " + cFromFc);
//context.logger().info("dFromFc: " + dFromFc);
int[] pi = pi(cFromFc.getTypeParams(), dFromFc.getTypeParams());
if(pi.length == 0)
@@ -510,17 +504,17 @@ public class RuleSet implements IRuleSet{
TypeParams typeDParams = typeD.getTypeParams();
TypeParams typeDgenParams = typeDgen.getTypeParams();
//System.out.println("Pair: " +pair);
//System.out.println("typeD: " +typeD);
//System.out.println("typeDParams: " +typeDParams);
//System.out.println("typeDgen: " +typeD);
//System.out.println("typeDgenParams: " +typeDgenParams);
//context.logger().info("Pair: " +pair);
//context.logger().info("typeD: " +typeD);
//context.logger().info("typeDParams: " +typeDParams);
//context.logger().info("typeDgen: " +typeD);
//context.logger().info("typeDgenParams: " +typeDgenParams);
Unifier unif = Unifier.identity();
for(int i = 0; i < typeDParams.size(); i++) {
//System.out.println("ADAPT" +typeDgenParams);
//context.logger().info("ADAPT" +typeDgenParams);
if (typeDgenParams.get(i) instanceof PlaceholderType)
unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
else System.out.println("ERROR");
unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
else logger.exception(new Exception("ERROR in adapt rule: cannot add non placeholder type"));
}
return Optional.of(new UnifyPair(unif.apply(newLhs), typeDs, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
@@ -655,15 +649,17 @@ public class RuleSet implements IRuleSet{
@Override
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs, List<Set<Constraint<UnifyPair>>> oderConstraints) {
HashMap<UnifyType, Integer> typeMap = new HashMap<>();
// Statistically, typeMap will fill up quickly and resize multiple times. To reduce this, we start with a higher capacity
HashMap<UnifyType, Integer> typeMap = new HashMap<>(200);
Stack<UnifyType> occuringTypes = new Stack<>();
occuringTypes.ensureCapacity(pairs.size() * 3);
for(UnifyPair pair : pairs) {
occuringTypes.push(pair.getLhsType());
occuringTypes.push(pair.getRhsType());
}
while(!occuringTypes.isEmpty()) {
UnifyType t1 = occuringTypes.pop();
if(!typeMap.containsKey(t1))
@@ -675,12 +671,12 @@ public class RuleSet implements IRuleSet{
if(t1 instanceof SuperType)
occuringTypes.push(((SuperType) t1).getSuperedType());
else
t1.getTypeParams().forEach(x -> occuringTypes.push(x));
t1.getTypeParams().forEach(occuringTypes::push);
}
Queue<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
LinkedList<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
ArrayList<UnifyPair> result = new ArrayList<UnifyPair>();
boolean applied = false;
while(!result1.isEmpty()) {
UnifyPair pair = result1.poll();
PlaceholderType lhsType = null;
@@ -698,19 +694,30 @@ public class RuleSet implements IRuleSet{
&& !((rhsType instanceof WildcardType) && ((WildcardType)rhsType).getWildcardedType().equals(lhsType))) //PL eigefuegt 2018-02-18
{
Unifier uni = new Unifier(lhsType, rhsType);
result = result.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(ArrayList::new));
result1 = result1.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(LinkedList::new));
// apply unifier to result and result1 in place
result.replaceAll(p -> uni.apply(pair, p));
ListIterator<UnifyPair> result1Iterator = result1.listIterator();
while (result1Iterator.hasNext()) {
UnifyPair x = result1Iterator.next();
result1Iterator.set(uni.apply(pair, x));
}
Function<? super Constraint<UnifyPair>,? extends Constraint<UnifyPair>> applyUni = b -> b.stream().map(
x -> uni.apply(pair,x)).collect(Collectors.toCollection((b.getExtendConstraint() != null)
? () -> new Constraint<UnifyPair>(
b.isInherited(),
b.isImplemented(),
b.getExtendConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(Constraint::new)),
b.getExtendConstraint().createdMapped(x -> uni.apply(pair,x)),
b.getmethodSignatureConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(HashSet::new)))
: () -> new Constraint<UnifyPair>(b.isInherited(), b.isImplemented())
));
oderConstraints.replaceAll(oc -> oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new)));
oderConstraints.replaceAll(oc -> {
HashSet<Constraint<UnifyPair>> mapped = new HashSet<>(oc.size());
for (var element : oc) {
mapped.add(applyUni.apply(element));
}
return mapped;
});
/*
oderConstraints = oderConstraints.stream().map(
a -> a.stream().map(applyUni
@@ -864,14 +871,11 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNred: " + result + "\n");
// logFile.flush();
}
catch (IOException e) {
System.out.println("logFile-Error");
}
logger.debug(() -> "FUNgreater: " + pair);
logger.debug(() -> "FUNred: " + result);
return Optional.of(result);
}
@@ -937,8 +941,8 @@ public class RuleSet implements IRuleSet{
Set<UnifyPair> result = new HashSet<UnifyPair>();
Integer variance = ((PlaceholderType)rhsType).getVariance();
Integer inversVariance = distributeVariance.inverseVariance(variance);
int variance = ((PlaceholderType)rhsType).getVariance();
int inversVariance = distributeVariance.inverseVariance(variance);
UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) {
@@ -956,18 +960,14 @@ public class RuleSet implements IRuleSet{
result.add(new UnifyPair(rhsType, funNLhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.stream().forEach(x -> { UnifyType l = x.getLhsType();
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNgreater: " + result + "\n");
// logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
logger.debug(() -> "FUNgreater: " + pair);
logger.debug(() -> "FUNgreater: " + result);
return Optional.of(result);
}
@@ -986,8 +986,8 @@ public class RuleSet implements IRuleSet{
Set<UnifyPair> result = new HashSet<UnifyPair>();
Integer variance = ((PlaceholderType)lhsType).getVariance();
Integer inversVariance = distributeVariance.inverseVariance(variance);
int variance = ((PlaceholderType)lhsType).getVariance();
int inversVariance = distributeVariance.inverseVariance(variance);
UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) {
@@ -1006,18 +1006,15 @@ public class RuleSet implements IRuleSet{
result.add(new UnifyPair(lhsType, funNRhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.stream().forEach(x -> { UnifyType l = x.getLhsType();
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNsmaller: " + result + "\n");
// logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
logger.debug(() -> "FUNgreater: " + pair);
logger.debug(() -> "FUNsmaller: " + result);
return Optional.of(result);
}

View File

@@ -29,17 +29,12 @@ public class TypeUnify {
* unify parallel ohne result modell
*/
public static Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool();
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
try {
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
// logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
return res;
}
@@ -47,7 +42,7 @@ public class TypeUnify {
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
*/
public static UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool();
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
unifyTask.compute();
@@ -58,18 +53,12 @@ public class TypeUnify {
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
*/
public static Set<Set<UnifyPair>> unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool();
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
var result = joinFuture(unifyTask.compute());
try {
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
// logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
return result;
}
@@ -87,18 +76,12 @@ public class TypeUnify {
public static Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(false), 0);
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
try {
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
// logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
return res;
}
private static ForkJoinPool createThreadPool() {
Logger.print("Available processors: " + Runtime.getRuntime().availableProcessors());
private static ForkJoinPool createThreadPool(Logger logger) {
logger.info("Available processors: " + Runtime.getRuntime().availableProcessors());
return new ForkJoinPool(
Runtime.getRuntime().availableProcessors(),
ForkJoinPool.defaultForkJoinWorkerThreadFactory,

View File

@@ -34,7 +34,7 @@ public class TypeUnify2Task extends TypeUnifyTask {
@Override
public CompletableFuture<Set<Set<UnifyPair>>> compute() {
if (one) {
System.out.println("two");
context.logger().info("two");
}
one = true;
CompletableFuture<Set<Set<UnifyPair>>> res =
@@ -45,7 +45,7 @@ public class TypeUnify2Task extends TypeUnifyTask {
*/
//writeLog("xxx");
//noOfThread--;
if (this.myIsCancelled()) {
if (this.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new HashSet<>());
} else {
return res;
@@ -53,12 +53,6 @@ public class TypeUnify2Task extends TypeUnifyTask {
}
public void closeLogFile() {
try {
context.logFile().close();
} catch (IOException ioE) {
System.err.println("no log-File");
}
context.logger().close();
}
}

View File

@@ -3,6 +3,7 @@ package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.exceptions.TypeinferenceException;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.ServerTaskLogger;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.cartesianproduct.VarianceCase;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -26,11 +27,7 @@ import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
import de.dhbwstuttgart.util.Logger;
import de.dhbwstuttgart.util.Pair;
import de.dhbwstuttgart.util.Tuple;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Serial;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -45,13 +42,10 @@ import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiFunction;
import java.util.function.BinaryOperator;
import java.util.stream.Collectors;
import org.apache.commons.io.output.NullOutputStream;
/**
@@ -59,14 +53,14 @@ import org.apache.commons.io.output.NullOutputStream;
*
* @author Florian Steurer
*/
public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<UnifyPair>>>> {
public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<UnifyPair>>>> {
@Serial
private static final long serialVersionUID = 1L;
private static int i = 0;
private final boolean printtag = false;
final UnifyContext context;
public final UnifyContext context;
/**
* Element, das aus dem nextSet den Gleichunen dieses Threads hinzugefuegt wurde
@@ -108,21 +102,19 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
int rekTiefeField;
Integer nOfUnify = 0;
int nOfUnify = 0;
Integer noUndefPair = 0;
int noUndefPair = 0;
Integer noAllErasedElements = 0;
int noAllErasedElements = 0;
// some statistics for local output (they will not make sense when executed on the server)
public static int noBacktracking;
public static int noShortendElements;
public static int noou = 0;
Boolean myIsCanceled = false;
public TypeUnifyTask(UnifyContext context) {
this.context = context.newWithLogFile(new OutputStreamWriter(NullOutputStream.INSTANCE));
this.context = context.newWithLogger(Logger.NULL_LOGGER);
rules = new RuleSet(context.placeholderRegistry());
}
@@ -157,24 +149,19 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
this.fc = fc;
this.oup = new OrderingUnifyPair(fc, context);
Writer logFileWriter = OutputStreamWriter.nullWriter();
if (context.log()) {
try {
logFileWriter = new FileWriter(System.getProperty("user.dir") + "/logFiles/" + "Thread");
logFileWriter.write("");
} catch (IOException e) {
System.err.println("log-File nicht vorhanden");
}
}
this.context = context.newWithLogFile(logFileWriter);
this.context = (context.logger() instanceof ServerTaskLogger) ? context : context.newWithLogger(
Logger.forFile(
System.getProperty("user.dir") + "/logFiles/" + "Thread",
"Unify"
)
);
/*Abbruchtest
if (thNo > 10) {
System.out.println("cancel");
context.logger().info("cancel");
usedTasks.cancel();
writeLog(nOfUnify.toString() + "cancel");
System.out.println("cancel");
context.logger().info("cancel");
try {
logFile.write("Abbruch");
}
@@ -183,7 +170,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
}
}
*/
rules = new RuleSet(context.logFile(), context.placeholderRegistry());
rules = new RuleSet(context.logger(), context.placeholderRegistry());
this.rekTiefeField = rekTiefe;
context.usedTasks().add(this);
}
@@ -220,17 +207,10 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
}
}
*/
void myCancel(Boolean b) {
myIsCanceled = true;
}
public boolean myIsCancelled() {
return myIsCanceled;
}
public CompletableFuture<Set<Set<UnifyPair>>> compute() {
if (one) {
System.out.println("two");
context.logger().info("two");
}
one = true;
Set<UnifyPair> neweq = new HashSet<>(eq);
@@ -247,11 +227,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
.collect(Collectors.toCollection(ArrayList::new));
var unifyFuture = unify(neweq, remainingOderconstraints, fc, context.parallel(), rekTiefeField, methodSignatureConstraint);
return unifyFuture.thenApply(res -> {
try {
context.logFile().close();
} catch (IOException ioE) {
System.err.println("no log-File");
}
context.logger().close();
if (isUndefinedPairSetSet(res)) {
//fuer debug-Zwecke
ArrayList<ArrayList<UnifyPair>> al = res.stream()
@@ -260,7 +236,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
throw new TypeinferenceException("Unresolved constraints: " + res, new NullToken()); //return new HashSet<>();
}
if (this.myIsCancelled()) {
if (this.isExecutionCancelled()) {
return new HashSet<>();
}
@@ -292,19 +268,19 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// ).collect(Collectors.toCollection(HashSet::new));
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
//if (aas.isEmpty()) {
// System.out.println("");
// context.logger().info("");
//}
//.collect(Collectors.toCollection(HashSet::new)));
if (this.myIsCancelled()) {
if (this.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new HashSet<>());
}
rekTiefe++;
nOfUnify++;
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
writeLog(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString());
context.logger().debug(() -> nOfUnify + " Unifikation: " + eq.toString());
context.logger().debug(() -> nOfUnify + " Oderconstraints: " + oderConstraints.toString());
/*
* Variancen auf alle Gleichungen vererben
@@ -326,15 +302,10 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
/*
* Occurs-Check durchfuehren
*/
Set<UnifyPair> ocurrPairs = eq.stream().filter(x -> {
UnifyType lhs, rhs;
return (lhs = x.getLhsType()) instanceof PlaceholderType
&& !((rhs = x.getRhsType()) instanceof PlaceholderType)
&& rhs.getTypeParams().occurs((PlaceholderType) lhs);
})
.peek(UnifyPair::setUndefinedPair)
.collect(Collectors.toCollection(HashSet::new));
writeLog("ocurrPairs: " + ocurrPairs);
Set<UnifyPair> ocurrPairs = TypeUnifyTaskHelper.occursCheck(eq);
Set<UnifyPair> finalOcurrPairs = ocurrPairs;
context.logger().debug(() -> "ocurrPairs: " + finalOcurrPairs);
if (!ocurrPairs.isEmpty()) {
Set<Set<UnifyPair>> ret = new HashSet<>();
ret.add(ocurrPairs);
@@ -355,15 +326,9 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
/* In commit dfd91b5f8b7fca1cb5f302eec4b0ba3330271c9b eingefuegt ANFANG */
Set<UnifyPair> occurcheck = new HashSet<>(eq0);
occurcheck.removeAll(eq0Prime);
ocurrPairs = occurcheck.stream().filter(x -> {
UnifyType lhs, rhs;
return (lhs = x.getLhsType()) instanceof PlaceholderType
&& !((rhs = x.getRhsType()) instanceof PlaceholderType)
&& rhs.getTypeParams().occurs((PlaceholderType) lhs);
})
.peek(UnifyPair::setUndefinedPair)
.collect(Collectors.toCollection(HashSet::new));
writeLog("ocurrPairs: " + ocurrPairs);
ocurrPairs = TypeUnifyTaskHelper.occursCheck(occurcheck);
Set<UnifyPair> finalOcurrPairs1 = ocurrPairs;
context.logger().debug(() -> "ocurrPairs: " + finalOcurrPairs1);
if (!ocurrPairs.isEmpty()) {
Set<Set<UnifyPair>> ret = new HashSet<>();
ret.add(ocurrPairs);
@@ -375,8 +340,8 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
eq0.forEach(UnifyPair::disableCondWildcards);
writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString());
writeLog(nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
context.logger().debug(() ->nOfUnify + " Unifikation nach applyTypeUnificationRules: " + eq.toString());
context.logger().debug(() -> nOfUnify + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
/*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
@@ -396,7 +361,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// cartesian product of the sets created by pattern matching.
List<Set<? extends Set<UnifyPair>>> topLevelSets = new ArrayList<>();
//System.out.println(eq2s);
//context.logger().info(eq2s);
if (!eq1s.isEmpty()) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
@@ -420,7 +385,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// Sets that originate from pair pattern matching
// Sets of the "second level"
Set<UnifyPair> undefinedPairs = new HashSet<>();
if (printtag) System.out.println("eq2s " + eq2s);
if (printtag) context.logger().info("eq2s " + eq2s);
//writeLog("BufferSet: " + bufferSet.toString()+"\n");
List<Set<Constraint<UnifyPair>>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints);
Set<Set<Set<? extends Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput);
@@ -428,21 +393,21 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
//nicht ausgewertet Faculty Beispiel im 1. Schritt
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
//Typen getestet werden.
writeLog(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput.toString());
if (printtag) System.out.println("secondLevelSets:" + secondLevelSets);
context.logger().debug(() -> nOfUnify + " Oderconstraints2: " + oderConstraintsOutput.toString());
if (printtag) context.logger().info("secondLevelSets:" + secondLevelSets);
// If pairs occured that did not match one of the cartesian product cases,
// those pairs are contradictory and the unification is impossible.
if (!undefinedPairs.isEmpty()) {
noUndefPair++;
for (UnifyPair up : undefinedPairs) {
writeLog(noUndefPair.toString() + " UndefinedPairs; " + up);
writeLog("BasePair; " + up.getBasePair());
context.logger().debug(() -> noUndefPair + " UndefinedPairs; " + up);
context.logger().debug(() -> "BasePair; " + up.getBasePair());
}
Set<Set<UnifyPair>> error = new HashSet<>();
undefinedPairs = undefinedPairs.stream().peek(UnifyPair::setUndefinedPair)
.collect(Collectors.toCollection(HashSet::new));
error.add(undefinedPairs);
undefinedPairs.forEach(x -> writeLog("AllSubst: " + x.getAllSubstitutions().toString()));
undefinedPairs.forEach(x -> context.logger().debug(() -> "AllSubst: " + x.getAllSubstitutions().toString()));
return CompletableFuture.completedFuture(error);
}
@@ -452,16 +417,16 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// Alternative: Sub cartesian products of the second level (pattern matched) sets
// "the big (x)"
/* for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
//System.out.println("secondLevelSet "+secondLevelSet.size());
//context.logger().info("secondLevelSet "+secondLevelSet.size());
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList);
//System.out.println("CardResult: "+cartResult.size());
//context.logger().info("CardResult: "+cartResult.size());
// Flatten and add to top level sets
Set<Set<UnifyPair>> flat = new HashSet<>();
int j = 0;
for(List<Set<UnifyPair>> s : cartResult) {
j++;
//System.out.println("s from CardResult: "+cartResult.size() + " " + j);
//context.logger().info("s from CardResult: "+cartResult.size() + " " + j);
Set<UnifyPair> flat1 = new HashSet<>();
for(Set<UnifyPair> s1 : s)
flat1.addAll(s1);
@@ -475,8 +440,8 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
for (Set<Set<? extends Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
topLevelSets.addAll(secondLevelSet);
}
//System.out.println(topLevelSets);
//System.out.println();
//context.logger().info(topLevelSets);
//context.logger().info();
//Aufruf von computeCartesianRecursive ANFANG
@@ -498,7 +463,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// .collect(Collectors.toCollection(HashSet::new));
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
if (this.myIsCancelled()) {
if (this.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new HashSet<>());
}
@@ -517,18 +482,18 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
* Step 5: Substitution
*/
//writeLog("vor Subst: " + eqPrime);
writeLog("vor Subst: " + oderConstraints);
context.logger().debug(() -> "vor Subst: " + oderConstraints);
String ocString = oderConstraints.toString();
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime, newOderConstraints);
Set<Set<UnifyPair>> unifyres1 = null;
Set<Set<UnifyPair>> unifyres2 = null;
if (!ocString.equals(newOderConstraints.toString()))
writeLog("nach Subst: " + newOderConstraints);
context.logger().debug(() -> "nach Subst: " + newOderConstraints);
{// sequentiell (Step 6b is included)
if (printtag) System.out.println("nextStep: " + eqPrimePrime);
if (printtag) context.logger().info("nextStep: " + eqPrimePrime);
if (eqPrime.equals(eq) && eqPrimePrime.isEmpty()
&& oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
@@ -547,12 +512,12 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
return eqPrimePrimeSet;
});
if (finalresult && isSolvedForm(eqPrime)) {
writeLog("eqPrime:" + eqPrime.toString() + "\n");
context.logger().debug(() -> "eqPrime:" + eqPrime.toString() + "\n");
/* methodconstraintsets werden zum Ergebnis hinzugefuegt
* Anfang
*/
//System.out.println("methodSignatureConstraint Return: " + methodSignatureConstraint + "\n");
//context.logger().info("methodSignatureConstraint Return: " + methodSignatureConstraint + "\n");
eqPrimePrimeSetFuture = eqPrimePrimeSetFuture.thenApply(eqPrimePrimeSet -> {
eqPrimePrimeSet.forEach(x -> x.addAll(methodSignatureConstraint));
@@ -603,7 +568,8 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
return eqPrimePrimeSetFuture.thenApply(eqPrimePrimeSet -> {
eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new));
if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) {
writeLog("Result1 " + eqPrimePrimeSet);
Set<Set<UnifyPair>> finalEqPrimePrimeSet = eqPrimePrimeSet;
context.logger().debug(() -> "Result1 " + finalEqPrimePrimeSet);
}
return eqPrimePrimeSet;
});
@@ -675,7 +641,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
} else {
//Varianz-Bestimmung Oder-Constraints
if (printtag) {
System.out.println("nextSetasList " + nextSetAsList);
context.logger().info("nextSetasList " + nextSetAsList);
}
variance = TypeUnifyTaskHelper.calculateOderConstraintVariance(nextSetAsList);
}
@@ -691,14 +657,16 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
*/
Set<UnifyPair> sameEqSet = new HashSet<>();
//optOrigPair enthaelt ggf. das Paar a = ty \in nextSet
Optional<UnifyPair> optOrigPair = Optional.empty();
Optional<UnifyPair> optOrigPair;
if (!oderConstraint) {
optOrigPair = TypeUnifyTaskHelper.findEqualityConstrainedUnifyPair(nextSetElement);
writeLog("optOrigPair: " + optOrigPair);
context.logger().debug(() -> "optOrigPair: " + optOrigPair);
if (optOrigPair.isPresent()) {
sameEqSet = TypeUnifyTaskHelper.findConstraintsWithSameTVAssociation(optOrigPair.get(), singleElementSets);
}
} else {
optOrigPair = Optional.empty();
}
@@ -708,7 +676,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
return resultFuture.thenApply(result -> {
//2020-02-02: if (variance ==2) Hier Aufruf von filterOverriding einfuegen
writeLog("Return computeCR: " + result.toString());
context.logger().debug(() ->"Return computeCR: " + result.toString());
return result;
});
}
@@ -735,23 +703,23 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
VarianceCase varianceCase = VarianceCase.createFromVariance(variance, oderConstraint, this, context);
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + prevNextSetAsList.toString());
context.logger().debug(() -> "nextSet: " + nextSet.toString());
context.logger().debug(() -> "nextSetasList: " + prevNextSetAsList.toString());
varianceCase.selectNextData(this, prevNextSetAsList, optOrigPair);
if (oderConstraint) {//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) varianceCase.a).getmethodSignatureConstraint());
writeLog("ERSTELLUNG methodSignatureConstraint: " + methodSignatureConstraint);
//System.out.println("ERSTELLUNG methodSignatureConstraint: " +noOfThread+" "+methodSignatureConstraint);
//System.out.println("a: " +a);
//System.out.println("eq: " +eq);
//System.out.println();
context.logger().debug(() -> "ERSTELLUNG methodSignatureConstraint: " + methodSignatureConstraint);
//context.logger().info("ERSTELLUNG methodSignatureConstraint: " +noOfThread+" "+methodSignatureConstraint);
//context.logger().info("a: " +a);
//context.logger().info("eq: " +eq);
//context.logger().info();
}
i++;
Set<Set<UnifyPair>> elems = new HashSet<>(singleElementSets);
writeLog("a1: " + rekTiefe + " " + "variance: " + variance + " " + varianceCase.a.toString() + "\n");
context.logger().debug(() -> "a1: " + rekTiefe + " " + "variance: " + variance + " " + varianceCase.a.toString() + "\n");
Set<Set<UnifyPair>> aParDef = new HashSet<>();
@@ -772,7 +740,8 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// Ergebnisvariable für die parallele Verabeitung: Tupel aus
// - forkOrig result : currentThreadResult (frueher "res")
// - fork results : forkResults (frueher "add_res")
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> parallelResultDataFuture;
CompletableFuture<VarianceCase.ComputationResults> parallelResultDataFuture;
if (parallel) {
parallelResultDataFuture = varianceCase.computeParallel(
@@ -783,13 +752,16 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// same as variance = 0
elems.add(varianceCase.a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
parallelResultDataFuture = this.unify2(elems, eq, oderConstraints, fc, false, rekTiefe, new HashSet<>(methodSignatureConstraint))
.thenApply(currentThreadResult -> new Tuple<>(currentThreadResult, new HashSet<>()));
.thenApply(VarianceCase.ComputationResults::new);
}
if (this.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new HashSet<>());
}
return parallelResultDataFuture.thenCompose(parallelResultData -> {
Set<Set<UnifyPair>> currentThreadResult = parallelResultData.getFirst();
Set<Set<Set<UnifyPair>>> forkResults = parallelResultData.getSecond();
Set<Set<UnifyPair>> currentThreadResult = parallelResultData.mainResult;
Set<Set<Set<UnifyPair>>> forkResults = parallelResultData.forkResults;
Set<Set<UnifyPair>> result = prevResult;
List<Set<UnifyPair>> nextSetAsList = prevNextSetAsList;
@@ -798,7 +770,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
if (oderConstraint) {//Wenn weiteres Element nextSetasList genommen wird, muss die vorherige methodsignatur geloescht werden
methodSignatureConstraint.removeAll(((Constraint<UnifyPair>) varianceCase.a).getmethodSignatureConstraint());
//System.out.println("REMOVE: " +methodSignatureConstraint);
//context.logger().info("REMOVE: " +methodSignatureConstraint);
}
if (!isUndefinedPairSetSet(currentThreadResult) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
@@ -816,16 +788,16 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a
//PL 2018-12-28: Hier gab es eine ClassCastException, war nicht reproduzierbar
// System.out.println("");
// context.logger().info("");
List<PlaceholderType> vars_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(varianceCase.a);
Set<UnifyPair> fstElemRes = currentThreadResult.iterator().next();
Set<UnifyPair> compRes = fstElemRes.stream().filter(x -> vars_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
//System.out.println(a_last);
//context.logger().info(a_last);
try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen
a_last.forEach(x -> writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair()));//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
a_last.forEach(x -> context.logger().debug(() -> "a_last_elem:" + x + " basepair: " + x.getBasePair()));//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
List<PlaceholderType> varsLast_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(a_last);
//[(java.util.Vector<java.lang.Integer> <. gen_aq, , 1), (CEK =. ? extends gen_aq, 1)] KANN VORKOMMEN
//erstes Element genügt, da vars immer auf die gleichen Elemente zugeordnet werden muessen
@@ -834,11 +806,12 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
varianceCase.applyComputedResults(result, currentThreadResult, compResult, compRes);
} catch (NullPointerException e) {
writeLog("NullPointerException: " + a_last.toString());
context.logger().debug(() -> "NullPointerException: " + a_last.toString());
}
} else {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RES Fst: result: " + result.toString() + " currentThreadResult: " + currentThreadResult.toString());
Set<Set<UnifyPair>> finalResult = result;
context.logger().debug(() -> "RES Fst: result: " + finalResult.toString() + " currentThreadResult: " + currentThreadResult.toString());
result.addAll(currentThreadResult);
}
}
@@ -856,14 +829,15 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = par_res;
if (!par_res.isEmpty() && par_res.iterator().next() instanceof WildcardType) {
// System.out.println();
// context.logger().info();
}
} else {
if ((isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result))
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RES var1 ADD:" + result.toString() + " " + par_res.toString());
Set<Set<UnifyPair>> finalResult1 = result;
context.logger().debug(() ->"RES var1 ADD:" + finalResult1.toString() + " " + par_res.toString());
result.addAll(par_res);
}
}
@@ -884,15 +858,15 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator();
boolean shouldBreak = varianceCase.eraseInvalidSets(rekTiefe, aParDef, nextSetAsList);
if (shouldBreak) {
// this.cancelYoungerSiblingTasks();
return CompletableFuture.completedFuture(result);
}
writeLog("a: " + rekTiefe + " variance: " + variance + varianceCase.a.toString());
context.logger().debug(() -> "a: " + rekTiefe + " variance: " + variance + varianceCase.a.toString());
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
if (isUndefinedPairSetSet(currentThreadResult) && aParDef.isEmpty()) {
int nofstred = 0;
Set<UnifyPair> abhSubst = TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getAllSubstitutions);
abhSubst.addAll(
TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getThisAndAllBases)
@@ -922,9 +896,10 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
*/
if (currentThreadResult.size() > 1) {
// System.out.println();
// context.logger().info();
}
writeLog("nextSetasList vor filter-Aufruf: " + nextSetAsList);
List<Set<UnifyPair>> finalNextSetAsList = nextSetAsList;
context.logger().debug(() -> "nextSetasList vor filter-Aufruf: " + finalNextSetAsList);
if (!oderConstraint) {//PL 2023-02-08 eingefuegt: Bei oderconstraints sind Subststitutionen nicht als Substitutionen in idesem Sinne zu sehen
nextSetAsList = nextSetAsList.stream().filter(x -> {
//Boolean ret = false;
@@ -935,24 +910,29 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
})//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
.collect(Collectors.toCollection(ArrayList::new));
}
writeLog("nextSetasList nach filter-Aufruf: " + nextSetAsList);
nofstred = nextSetAsList.size();
//NOCH NICHT korrekt PL 2018-10-12
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
// .collect(Collectors.toCollection(ArrayList::new));
writeLog("currentThreadResult (undef): " + currentThreadResult.toString());
writeLog("abhSubst: " + abhSubst.toString());
writeLog("a2: " + rekTiefe + " " + varianceCase.a.toString());
writeLog("Durchschnitt: " + durchschnitt.toString());
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetAsList.toString());
writeLog("Number first erased Elements (undef): " + (len - nofstred));
writeLog("Number second erased Elements (undef): " + (nofstred - nextSetAsList.size()));
writeLog("Number erased Elements (undef): " + (len - nextSetAsList.size()));
noAllErasedElements += (len - nextSetAsList.size());
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
writeLog("Number of Backtracking: " + noBacktracking++);
// System.out.println("");
if (context.logger().isLogLevelActive(Logger.LogLevel.DEBUG)) {
List<Set<UnifyPair>> finalNextSetAsList1 = nextSetAsList;
context.logger().debug(() -> "nextSetasList nach filter-Aufruf: " + finalNextSetAsList1);
int nofstred = nextSetAsList.size();
//NOCH NICHT korrekt PL 2018-10-12
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
// .collect(Collectors.toCollection(ArrayList::new));
context.logger().debug("currentThreadResult (undef): " + currentThreadResult.toString());
context.logger().debug("abhSubst: " + abhSubst.toString());
context.logger().debug("a2: " + rekTiefe + " " + varianceCase.a.toString());
context.logger().debug("Durchschnitt: " + durchschnitt.toString());
context.logger().debug("nextSet: " + nextSet.toString());
context.logger().debug("nextSetasList: " + nextSetAsList.toString());
context.logger().debug("Number first erased Elements (undef): " + (len - nofstred));
context.logger().debug("Number second erased Elements (undef): " + (nofstred - nextSetAsList.size()));
context.logger().debug("Number erased Elements (undef): " + (len - nextSetAsList.size()));
noAllErasedElements += (len - nextSetAsList.size());
context.logger().debug("Number of all erased Elements (undef): " + noAllErasedElements);
context.logger().debug("Number of Backtracking: " + noBacktracking++);
}
// context.logger().info("");
}
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
// return result;
@@ -961,7 +941,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// result.removeIf(y -> isUndefinedPairSet(y));
//}
//else result.stream().filter(y -> !isUndefinedPairSet(y));
writeLog("currentThreadResult: " + currentThreadResult.toString());
context.logger().debug(() -> "currentThreadResult: " + currentThreadResult.toString());
return this.innerCartesianLoop(variance, rekTiefe, oderConstraint, parallel, result, varianceCase.a, nextSet,
nextSetAsList, optOrigPair, methodSignatureConstraint, singleElementSets, sameEqSet, oderConstraints);
@@ -978,7 +958,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
* the error constraints. Error constraints are added
* @result contradiction of (a = ty) in sameEqSet
*/
public Boolean checkNoContradiction(Set<UnifyPair> a, Set<UnifyPair> sameEqSet, Set<Set<UnifyPair>> result) {
public boolean checkNoContradiction(Set<UnifyPair> a, Set<UnifyPair> sameEqSet, Set<Set<UnifyPair>> result) {
//optAPair enthaelt ggf. das Paar a = ty' \in a
//unterscheidet sich von optOrigPair, da dort a = ty
@@ -998,7 +978,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
UnifyPair aPair = optAPair.get();
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
writeLog("checkA: " + aPair + "sameEqSet: " + sameEqSet);
context.logger().debug(() ->"checkA: " + aPair + "sameEqSet: " + sameEqSet);
for (UnifyPair sameEq : sameEqSet) {
if (sameEq.getLhsType() instanceof PlaceholderType) {
Set<UnifyPair> localEq = new HashSet<>();
@@ -1019,7 +999,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
if (result.isEmpty() || isUndefinedPairSetSet(result)) {
result.addAll(localRes);
}
writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
context.logger().debug(() ->"FALSE: " + aPair + "sameEqSet: " + sameEqSet);
return false;
}
} else {
@@ -1041,12 +1021,12 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
if (result.isEmpty() || isUndefinedPairSetSet(result)) {
result.addAll(localRes);
}
writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
context.logger().debug(() ->"FALSE: " + aPair + "sameEqSet: " + sameEqSet);
return false;
}
}
}
writeLog("TRUE: " + aPair + "sameEqSet: " + sameEqSet);
context.logger().debug(() ->"TRUE: " + aPair + "sameEqSet: " + sameEqSet);
return true;
}
return true;
@@ -1132,7 +1112,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// Through application of the rules, every pair should have one of the above forms.
// Pairs that do not have one of the aboves form are contradictory.
else {
writeLog("Second erase:" + checkPair);
context.logger().debug(() ->"Second erase:" + checkPair);
return false;
}
//*/
@@ -1331,7 +1311,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
* (as in case 1 where sigma is added to the innermost set).
*/
protected Set<Set<Set<? extends Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, List<Set<Constraint<UnifyPair>>> oderConstraintsInput, IFiniteClosure fc, Set<UnifyPair> undefined, List<Set<Constraint<UnifyPair>>> oderConstraintsOutput) {
writeLog("eq2s: " + eq2s.toString());
context.logger().debug(() ->"eq2s: " + eq2s.toString());
oderConstraintsOutput.addAll(oderConstraintsInput);
List<Set<Set<? extends Set<UnifyPair>>>> result = new ArrayList<>(9);
@@ -1384,15 +1364,13 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
{
List<Set<Constraint<UnifyPair>>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind
.filter(x -> x.stream()
.anyMatch(y ->
y.stream().anyMatch(z ->
( (z.getLhsType() instanceof PlaceholderType)
&& (((PlaceholderType) (z.getLhsType())).getVariance() != 0))
||
( (z.getRhsType() instanceof PlaceholderType)
&& (((PlaceholderType) (z.getRhsType())).getVariance() != 0))
)
)).toList();
.filter(y ->
y.stream().filter(z -> ((z.getLhsType() instanceof PlaceholderType)
&& (((PlaceholderType) (z.getLhsType())).getVariance() != 0))
|| ((z.getRhsType() instanceof PlaceholderType)
&& (((PlaceholderType) (z.getRhsType())).getVariance() != 0))
).findFirst().isPresent()
).findFirst().isPresent()).collect(Collectors.toList());
if (!oderConstraintsVariance.isEmpty()) {
Set<Constraint<UnifyPair>> ret = oderConstraintsVariance.getFirst();
oderConstraintsOutput.remove(ret);
@@ -1420,10 +1398,12 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
}
}
writeLog("eq2s: " + eq2s);
writeLog("eq2sAsListFst: " + eq2sAsListFst);
writeLog("eq2sAsListSnd: " + eq2sAsListSnd);
writeLog("eq2sAsListBack: " + eq2sAsListBack);
if (context.logger().isLogLevelActive(Logger.LogLevel.DEBUG)) {
context.logger().debug("eq2s: " + eq2s);
context.logger().debug("eq2sAsListFst: " + eq2sAsListFst);
context.logger().debug("eq2sAsListSnd: " + eq2sAsListSnd);
context.logger().debug("eq2sAsListBack: " + eq2sAsListBack);
}
eq2sAsList.addAll(eq2sAsListFst);
eq2sAsList.addAll(eq2sAsListSnd);
@@ -1436,7 +1416,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
if (!oderConstraintsOutput.isEmpty()) {
Set<Constraint<UnifyPair>> ret = oderConstraintsOutput.removeFirst();
//if (ret.iterator().next().iterator().next().getLhsType().getName().equals("M"))
// System.out.println("M");
// context.logger().info("M");
//Set<UnifyPair> retFlat = new HashSet<>();
//ret.stream().forEach(x -> retFlat.addAll(x));
@@ -1476,7 +1456,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// Case 1: (a <. Theta')
if (((pairOp == PairOperator.SMALLERDOT) || (pairOp == PairOperator.SMALLERNEQDOT)) && lhsType instanceof PlaceholderType) {
//System.out.println(pair);
//context.logger().info(pair);
if (first) { //writeLog(pair.toString()+"\n");
Set<Set<UnifyPair>> x1 = new HashSet<>();
if (pair.getRhsType().getName().equals("void")) {
@@ -1498,7 +1478,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
x1.remove(remElem);
}
/* ZU LOESCHEN ANFANG
//System.out.println(x1);
//context.logger().info(x1);
Set<UnifyPair> sameEqSet = eq2sAsList.stream()
.filter(x -> ((x.getLhsType().equals(lhsType) || x.getRhsType().equals(lhsType)) && !x.equals(pair)))
.collect(Collectors.toCollection(HashSet::new));
@@ -1716,11 +1696,11 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
// && ((ReferenceType)thetaPrime).getTypeParams().iterator().next() instanceof PlaceholderType) //.getName().equals("java.util.Vector"))
// && ((ReferenceType)((ReferenceType)thetaPrime).getTypeParams().iterator().next()).getTypeParams().iterator().next().getName().equals("java.lang.Integer")) {
// {
// System.out.println("");
// context.logger().info("");
//}
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());//cs= [java.util.Vector<NP>, java.util.Vector<java.util.Vector<java.lang.Integer>>, ????java.util.Vector<gen_hv>???]
writeLog("cs: " + cs.toString());
context.logger().debug(() ->"cs: " + cs.toString());
//PL 18-02-06 entfernt, kommt durch unify wieder rein
//cs.add(thetaPrime);
//PL 18-02-06 entfernt
@@ -1731,8 +1711,9 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
aa.putAll(b);
return aa;
};
HashMap<PlaceholderType, PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream()
.reduce(new HashMap<PlaceholderType, PlaceholderType>(),
var involvedPlaceholderTypes = x.getInvolvedPlaceholderTypes();
HashMap<PlaceholderType, PlaceholderType> hm = involvedPlaceholderTypes.stream()
.reduce(TypeUnifyTaskHelper.getPresizedHashMap(involvedPlaceholderTypes.size()),
(aa, b) -> {
aa.put(b, PlaceholderType.freshPlaceholder(context.placeholderRegistry()));
return aa;
@@ -1752,7 +1733,8 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
if ((match.match(ml)).isEmpty()) {
thetaQs.remove(c);
}
writeLog("thetaQs von " + c + ": " + thetaQs.toString());
Set<UnifyType> finalThetaQs = thetaQs;
context.logger().debug(() ->"thetaQs von " + c + ": " + finalThetaQs.toString());
//Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
//thetaQs.add(thetaPrime); //PL 18-02-05 wieder geloescht
//PL 2017-10-03: War auskommentiert habe ich wieder einkommentiert,
@@ -1776,7 +1758,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
for (TypeParams tp : permuteParams(candidateParams))
thetaQPrimes.add(c.setTypeParams(tp));
}
writeLog("thetaQPrimes von " + c + ": " + thetaQPrimes.toString());
context.logger().debug(() ->"thetaQPrimes von " + c + ": " + thetaQPrimes.toString());
for (UnifyType tqp : thetaQPrimes) {//PL 2020-03-08 umbauen in der Schleife wird nur unifizierbarer Typ gesucht break am Ende
Collection<PlaceholderType> tphs = tqp.getInvolvedPlaceholderTypes();
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
@@ -1803,7 +1785,13 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
//eingefuegt PL 2018-03-29 Anfang ? ext. theta hinzufuegen
if (a.isWildcardable()) {
Set<UnifyType> smaller_ext = smaller.stream().filter(x -> !(x instanceof ExtendsType) && !(x instanceof SuperType))
.map(ExtendsType::new)//.accept(new freshPlaceholder(), hm));}
.map(x -> {
//BinaryOperator<HashMap<PlaceholderType,PlaceholderType>> combiner = (aa,b) -> { aa.putAll(b); return aa;}; //Variablenumbenennung rausgenommen
//HashMap<PlaceholderType,PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream() //Variablen muessen wahrscheinlich erhalten bleiben
// .reduce(new HashMap<PlaceholderType,PlaceholderType>(),
// (aa, b)-> { aa.put(b,PlaceholderType.freshPlaceholder()); return aa; }, combiner);
return new ExtendsType(x);
})//.accept(new freshPlaceholder(), hm));}
.collect(Collectors.toCollection(HashSet::new));
smaller.addAll(smaller_ext);
}
@@ -1843,7 +1831,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
}
}
}
writeLog("result von " + pair + ": " + result);
context.logger().debug(() ->"result von " + pair + ": " + result);
return result;
}
@@ -1923,7 +1911,13 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
//writeLog("GREATER: " + greater + pair + "THETA: " + theta + "FBOUNDED: " + pair.getfBounded() + " ");
if (a.isWildcardable()) {
Set<UnifyType> greater_ext = greater.stream().filter(x -> !(x instanceof ExtendsType) && !(x instanceof SuperType))
.map(SuperType::new)//.accept(new freshPlaceholder(), hm));}
.map(x -> {
//BinaryOperator<HashMap<PlaceholderType,PlaceholderType>> combiner = (aa,b) -> { aa.putAll(b); return aa;}; //Variablenumbenennung rausgenommen
//HashMap<PlaceholderType,PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream() //Variablen muessen wahrscheinlich erhalten bleiben
// .reduce(new HashMap<PlaceholderType,PlaceholderType>(),
// (aa, b)-> { aa.put(b,PlaceholderType.freshPlaceholder()); return aa; }, combiner);
return new SuperType(x);
})//.accept(new freshPlaceholder(), hm));}
.collect(Collectors.toCollection(HashSet::new));
greater.addAll(greater_ext);
}
@@ -1948,7 +1942,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
return ((match.match(termList).isPresent()) || x);
};
//if (parai.getName().equals("java.lang.Integer")) {
// System.out.println("");
// context.logger().info("");
//}
BinaryOperator<Boolean> bo = (x, y) -> (x || y);
if (fBounded.stream().reduce(false, f, bo)) {
@@ -2038,22 +2032,4 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
permuteParams(candidates, idx + 1, result, current);
}
}
public void writeLog(String str) {
if (context.log() && finalresult) {
synchronized (context.logFile()) {
try {
/*
logFile.write("Thread no.:" + thNo + "\n");
logFile.write("noOfThread:" + noOfThread + "\n");
logFile.write("parallel:" + parallel + "\n");
*/
context.logFile().write(str + "\n\n");
// logFile.flush();
} catch (IOException e) {
System.err.println("kein LogFile");
}
}
}
}
}

View File

@@ -5,6 +5,7 @@ import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
@@ -185,4 +186,36 @@ public class TypeUnifyTaskHelper {
.collect(Collectors.toCollection(ArrayList::new));
}
public static Set<UnifyPair> occursCheck(final Set<UnifyPair> eq) {
Set<UnifyPair> ocurrPairs = new HashSet<>(eq.size());
for (UnifyPair x : eq) {
UnifyType lhs = x.getLhsType();
UnifyType rhs = x.getRhsType();
if (lhs instanceof PlaceholderType lhsPlaceholder &&
!(rhs instanceof PlaceholderType) &&
rhs.getTypeParams().occurs(lhsPlaceholder))
{
x.setUndefinedPair();
ocurrPairs.add(x);
}
}
return ocurrPairs;
}
public static <T> HashSet<T> getPresizedHashSet(int minElements) {
if (minElements < 16) return new HashSet<>();
// HashSet and HashMap will resize at 75% load, so we account for that by multiplying with 1.5
int n = (int)(minElements * 1.5);
return new HashSet<>(n);
}
public static <S,T> HashMap<S,T> getPresizedHashMap(int minElements) {
if (minElements < 16) return new HashMap<>();
// HashSet and HashMap will resize at 75% load, so we account for that by multiplying with 1.5
int n = (int)(minElements * 1.5);
return new HashMap<>(n);
}
}

View File

@@ -1,17 +1,15 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.util.Logger;
import java.io.Writer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.atomic.AtomicInteger;
public record UnifyContext(
// main log file of a unification
Writer logFile,
// if logs should be made
Boolean log,
// main logger of a unification
Logger logger,
// if the unify algorithm should run in parallel
Boolean parallel,
boolean parallel,
// the model for storing calculated results
UnifyResultModel resultModel,
// the executor used for thread management in parallel execution
@@ -23,26 +21,24 @@ public record UnifyContext(
) {
public UnifyContext(
Writer logFile,
Boolean log,
Boolean parallel,
Logger logger,
boolean parallel,
UnifyResultModel resultModel,
UnifyTaskModel usedTasks,
ExecutorService executor,
PlaceholderRegistry placeholderRegistry
) {
this(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
this(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext(
Writer logFile,
Boolean log,
Boolean parallel,
Logger logger,
boolean parallel,
UnifyResultModel resultModel,
UnifyTaskModel usedTasks,
PlaceholderRegistry placeholderRegistry
) {
this(logFile, log, parallel, resultModel, usedTasks, ForkJoinPool.commonPool(), placeholderRegistry);
this(logger, parallel, resultModel, usedTasks, ForkJoinPool.commonPool(), placeholderRegistry);
}
@@ -51,20 +47,21 @@ public record UnifyContext(
* causes the UnifyContext to be essentially handled as a
*/
public UnifyContext newWithLogFile(Writer logFile) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
public UnifyContext newWithLogger(Logger logger) {
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithParallel(boolean parallel) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
if (this.parallel == parallel) return this;
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithExecutor(ExecutorService executor) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithResultModel(UnifyResultModel resultModel) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
}

View File

@@ -12,7 +12,7 @@ public class UnifyTaskModel {
public synchronized void cancel() {
for(TypeUnifyTask t : usedTasks) {
t.myCancel(true);
t.cancelExecution();
}
}
}

View File

@@ -1,6 +1,5 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
@@ -17,11 +16,11 @@ import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
public class Variance1Case extends VarianceCase {
public class ContravarianceCase extends VarianceCase {
protected final int variance = 1;
protected Variance1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
protected ContravarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@@ -32,12 +31,12 @@ public class Variance1Case extends VarianceCase {
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
writeLog("Max: a in " + variance + " " + a);
context.logger().debug("Max: a in " + variance + " " + a);
nextSetAsList.remove(a);
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
context.logger().debug(() -> "nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
//Alle maximale Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
@@ -49,7 +48,7 @@ public class Variance1Case extends VarianceCase {
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
@@ -61,10 +60,6 @@ public class Variance1Case extends VarianceCase {
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
new HashSet<>(), new HashSet<>()
));
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
@@ -72,27 +67,28 @@ public class Variance1Case extends VarianceCase {
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
typeUnifyTask.addChildTask(forkOrig);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkOrigFuture,
(prevResults, currentThreadResult) -> {
forkOrig.writeLog("final Orig 1");
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
(currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig 1");
forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, prevResults.getSecond());
return new ComputationResults(currentThreadResult);
});
//forks.add(forkOrig);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
if (typeUnifyTask.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new ComputationResults());
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
context.logger().debug("a in " + variance + " " + a);
context.logger().debug("nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
writeLog("1 RM" + nSaL.toString());
context.logger().debug("1 RM" + nSaL.toString());
if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
@@ -108,27 +104,28 @@ public class Variance1Case extends VarianceCase {
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
if (typeUnifyTask.isExecutionCancelled()) {
return new ComputationResults();
}
writeLog("fork_res: " + fork_res.toString());
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.getSecond().add(fork_res);
context.logger().debug("fork_res: " + fork_res.toString());
context.logger().debug(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.addForkResult(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final 1");
fork.context.logger().debug("final 1");
fork.closeLogFile();
return prevResults;
}
);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
if (typeUnifyTask.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new ComputationResults());
}
}
@@ -144,7 +141,7 @@ public class Variance1Case extends VarianceCase {
) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == -1) {
writeLog("Geloescht result: " + result);
context.logger().debug("Geloescht result: " + result);
result.clear();
result.addAll(currentThreadResult);
}
@@ -152,7 +149,7 @@ public class Variance1Case extends VarianceCase {
result.addAll(currentThreadResult);
}
else if (resOfCompare == 1) {
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
context.logger().debug("Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
@@ -163,35 +160,35 @@ public class Variance1Case extends VarianceCase {
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
// context.logger().info("");
context.logger().debug("a: " + rekTiefe + " variance: " + variance + a.toString());
context.logger().debug("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
context.logger().debug("Removed: " + nextSetasListOderConstraints);
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList);
writeLog("smallerSetasList: " + smallerSetasList);
context.logger().debug("smallerSetasList: " + smallerSetasList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
.collect(Collectors.toCollection(ArrayList::new));
writeLog("notInherited: " + notInherited + "\n");
context.logger().debug("notInherited: " + notInherited + "\n");
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> {
notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList));
});
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
writeLog("notErased: " + notErased + "\n");
context.logger().debug("notErased: " + notErased + "\n");
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
context.logger().debug("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
context.logger().debug("Not Removed: " + nextSetAsList);
}
} else {
@@ -201,9 +198,9 @@ public class Variance1Case extends VarianceCase {
List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
context.logger().debug("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
context.logger().debug("Not Removed: " + nextSetAsList);
}
}

View File

@@ -1,6 +1,5 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
@@ -17,11 +16,11 @@ import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
public class VarianceM1Case extends VarianceCase {
public class CovarianceCase extends VarianceCase {
protected final int variance = -1;
protected VarianceM1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
protected CovarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@@ -32,11 +31,11 @@ public class VarianceM1Case extends VarianceCase {
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
writeLog("Min: a in " + variance + " " + a);
context.logger().debug(() -> "Min: a in " + variance + " " + a);
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
context.logger().debug(() -> "nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
nextSetAsList.remove(a);
//Alle minimalen Elemente in nextSetasListRest bestimmen
@@ -49,7 +48,7 @@ public class VarianceM1Case extends VarianceCase {
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
@@ -61,10 +60,6 @@ public class VarianceM1Case extends VarianceCase {
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
new HashSet<>(), new HashSet<>()
));
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
@@ -72,28 +67,29 @@ public class VarianceM1Case extends VarianceCase {
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
typeUnifyTask.addChildTask(forkOrig);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkOrigFuture,
(prevResults, currentThreadResult) -> {
forkOrig.writeLog("final Orig -1");
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
(currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig -1");
forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, prevResults.getSecond());
return new ComputationResults(currentThreadResult);
});
//forks.add(forkOrig);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
context.logger().debug(() -> "a in " + variance + " " + a);
context.logger().debug(() -> "nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString());
context.logger().debug(() -> "-1 RM" + nSaL.toString());
if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
@@ -109,27 +105,28 @@ public class VarianceM1Case extends VarianceCase {
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
if (typeUnifyTask.isExecutionCancelled()) {
return prevResults;
}
writeLog("fork_res: " + fork_res.toString());
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.getSecond().add(fork_res);
context.logger().debug(() -> "fork_res: " + fork_res.toString());
context.logger().debug(() -> Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.addForkResult(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final -1");
fork.context.logger().debug("final -1");
fork.closeLogFile();
return prevResults;
}
);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
}
@@ -146,13 +143,13 @@ public class VarianceM1Case extends VarianceCase {
) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == 1) {
writeLog("Geloescht result: " + result);
context.logger().debug(() -> "Geloescht result: " + result);
result.clear();
result.addAll(currentThreadResult);
} else if (resOfCompare == 0) {
result.addAll(currentThreadResult);
} else if (resOfCompare == -1) {
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
context.logger().debug(() -> "Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
@@ -164,14 +161,14 @@ public class VarianceM1Case extends VarianceCase {
List<Set<UnifyPair>> nextSetAsList
) {
// System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
// context.logger().info("");
context.logger().debug(() -> "a: " + rekTiefe + " variance: " + variance + a.toString());
context.logger().debug(() -> "aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
writeLog("Removed: " + nextSetasListOderConstraints);
context.logger().debug(() -> "Removed: " + nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
@@ -204,9 +201,9 @@ public class VarianceM1Case extends VarianceCase {
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
context.logger().debug(() -> "Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
context.logger().debug(() -> "Not Removed: " + nextSetAsList);
}
} else {
@@ -217,9 +214,9 @@ public class VarianceM1Case extends VarianceCase {
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
context.logger().debug(() -> "Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
context.logger().debug(() -> "Not Removed: " + nextSetAsList);
}
}

View File

@@ -1,6 +1,5 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
@@ -15,11 +14,12 @@ import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
public class Variance2Case extends VarianceCase {
public class InvarianceOrConstraintCase extends VarianceCase {
// either for invariance or for oderConstraints
protected final int variance = 2;
protected Variance2Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
protected InvarianceOrConstraintCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@@ -37,7 +37,7 @@ public class Variance2Case extends VarianceCase {
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
@@ -49,9 +49,7 @@ public class Variance2Case extends VarianceCase {
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValuesFuture;
writeLog("var2einstieg");
context.logger().debug("var2einstieg");
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
@@ -60,19 +58,21 @@ public class Variance2Case extends VarianceCase {
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(forkOrig);
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValuesFuture = forkOrigFuture.thenApply((currentThreadResult) -> {
forkOrig.writeLog("final Orig 2");
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply((currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig 2");
forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, new HashSet<>());
return new ComputationResults(currentThreadResult);
});
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
context.logger().debug(() -> "a in " + variance + " " + a);
context.logger().debug(() -> "nextSetasListRest: " + nextSetasListRest.toString());
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
@@ -89,27 +89,24 @@ public class Variance2Case extends VarianceCase {
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
typeUnifyTask.addChildTask(fork);
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValuesFuture = resultValuesFuture.thenCombine(forkFuture, (resultValues, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
resultValues = resultValues.thenCombine(forkFuture, (prevResults, fork_res) -> {
if (typeUnifyTask.isExecutionCancelled()) {
return prevResults;
}
resultValues.getSecond().add(fork_res);
fork.writeLog("final 2");
prevResults.addForkResult(fork_res);
fork.context.logger().debug("final 2");
fork.closeLogFile();
return resultValues;
return prevResults;
});
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
}
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
return resultValuesFuture;
return resultValues;
}
@Override

View File

@@ -0,0 +1,241 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
public class UnknownVarianceCase extends VarianceCase {
protected final int variance = 0;
protected final AtomicBoolean shouldBreak = new AtomicBoolean(false);
protected UnknownVarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
} else {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
}
nextSetAsList.remove(a);
} else if (this.isOderConstraint) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
nextSetAsList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} else {
a = nextSetAsList.removeFirst();
}
Set<UnifyPair> finalA = a;
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
} else {
nextSetasListRest = typeUnifyTask.oup.minElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
);
}
} else if (this.isOderConstraint) {
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
} else {
nextSetasListRest = (nextSetAsList.size() > 5) ? nextSetAsList.subList(0, 5) : nextSetAsList;
}
nextSetAsList.removeAll(nextSetasListRest);
// */
}
@Override
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
Set<UnifyPair> newMethodSignatureConstraintOrig = new HashSet<>(methodSignatureConstraint);
if (isOderConstraint) {
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
}
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, newMethodSignatureConstraintOrig);
typeUnifyTask.addChildTask(forkOrig);
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
(currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig 0");
forkOrig.closeLogFile();
return new ComputationResults(currentThreadResult);
});
int i = 0;
Set<Set<UnifyPair>>[] additionalResults = new HashSet[nextSetasListRest.size()];
Constraint<UnifyPair>[] extendConstraints = new Constraint[nextSetasListRest.size()];
while (!nextSetasListRest.isEmpty()) {
final int finalI = i++;
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
context.logger().debug(() -> "0 RM" + nSaL.toString());
if (this.isOderConstraint) {
Constraint<UnifyPair> extendConstraint = ((Constraint<UnifyPair>) nSaL).getExtendConstraint();
extendConstraints[finalI] = extendConstraint;
}
else if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
Set<UnifyPair> newMethodSignatureConstraint = new HashSet<>(methodSignatureConstraint);
if (isOderConstraint) {
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
}
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, newMethodSignatureConstraint);
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture, (compResult, forkResult) -> {
additionalResults[finalI] = forkResult;
context.logger().error("finalI: " + finalI);
return compResult;
});
}
int finalI1 = i;
return resultValues.thenCompose(compResult -> {
var oldResult = compResult.mainResult;
for (int e = 0; e < finalI1; e++) {
Set<Set<UnifyPair>> currentResult = additionalResults[e];
boolean oldResultInvalid = typeUnifyTask.isUndefinedPairSetSet(oldResult);
boolean currentResultInvalid = typeUnifyTask.isUndefinedPairSetSet(currentResult);
if (!oldResult.isEmpty() && !oldResultInvalid) {
boolean shouldBreak = this.eraseInvalidSets(rekTiefe, new HashSet<>(), nextSetAsList);
if (shouldBreak) {
return CompletableFuture.completedFuture(compResult);
}
}
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(extendConstraints[e]);
}
if (!currentResultInvalid && oldResultInvalid) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
oldResult = currentResult;
} else if (oldResultInvalid == currentResultInvalid || oldResult.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
Set<Set<UnifyPair>> finalOldResult = oldResult;
context.logger().debug(() -> "RES var1 ADD:" + finalOldResult.toString() + " " + currentResult.toString());
oldResult.addAll(currentResult);
}
}
compResult.mainResult = oldResult;
return CompletableFuture.completedFuture(compResult);
});
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
context.logger().debug("RES var=0 ADD:" + result.toString() + " " + currentThreadResult.toString());
result.addAll(currentThreadResult);
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
if (!this.isOderConstraint) {
return true;
} else {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
context.logger().debug("Removed: " + nextSetasListOderConstraints);
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
context.logger().debug("Removed: " + erased);
context.logger().debug("Not Removed: " + nextSetAsList);
for (Set<UnifyPair> aPar : aParDef) {
smallerSetasList.clear();
smallerSetasList.addAll(typeUnifyTask.oup.smallerThan(aPar, nextSetAsList));
notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
notErased.clear();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
context.logger().debug("Removed: " + erased);
context.logger().debug("Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -1,112 +0,0 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
public class Variance0Case extends VarianceCase {
protected final int variance = 0;
protected Variance0Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
} else {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
}
nextSetAsList.remove(a);
} else if (this.isOderConstraint) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
nextSetAsList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} else {
a = nextSetAsList.removeFirst();
}
}
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
return typeUnifyTask.unify2(elems, eq, oderConstraints, fc, context.parallel(), rekTiefe, new HashSet<>(methodSignatureConstraint)).thenApply(
unify2Result -> new Tuple<>(unify2Result, new HashSet<>())
);
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
writeLog("RES var=1 ADD:" + result.toString() + " " + currentThreadResult.toString());
result.addAll(currentThreadResult);
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
if (!this.isOderConstraint) {
return true;
} else {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
return false;
}
}

View File

@@ -5,9 +5,9 @@ import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Logger;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
@@ -17,11 +17,11 @@ public abstract class VarianceCase {
public static VarianceCase createFromVariance(int variance, boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
return switch (variance) {
case 0 -> new Variance0Case(isOderConstraint, typeUnifyTask, context);
case 1 -> new Variance1Case(isOderConstraint, typeUnifyTask, context);
case -1 -> new VarianceM1Case(isOderConstraint, typeUnifyTask, context);
case 2 -> new Variance2Case(isOderConstraint, typeUnifyTask, context);
default -> throw new RuntimeException("Invalid variance: " + variance);
case 0 -> new UnknownVarianceCase(isOderConstraint, typeUnifyTask, context);
case 1 -> new ContravarianceCase(isOderConstraint, typeUnifyTask, context);
case -1 -> new CovarianceCase(isOderConstraint, typeUnifyTask, context);
case 2 -> new InvarianceOrConstraintCase(isOderConstraint, typeUnifyTask, context);
default -> throw new RuntimeException("Invalid variance: " + variance);
};
}
@@ -72,7 +72,7 @@ public abstract class VarianceCase {
/**
*
*/
public abstract CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
public abstract CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
@@ -105,8 +105,28 @@ public abstract class VarianceCase {
List<Set<UnifyPair>> nextSetAsList
);
protected void writeLog(String s) {
typeUnifyTask.writeLog(s);
}
/**
* Wrapper class for the parallel computation results
*/
public static class ComputationResults {
public Set<Set<UnifyPair>> mainResult;
public Set<Set<Set<UnifyPair>>> forkResults;
public ComputationResults() {
this(new HashSet<>(), new HashSet<>());
}
public ComputationResults(Set<Set<UnifyPair>> mainResult) {
this(mainResult, new HashSet<>());
}
public ComputationResults(Set<Set<UnifyPair>> mainResult, Set<Set<Set<UnifyPair>>> forkResults) {
this.mainResult = mainResult;
this.forkResults = forkResults;
}
void addForkResult(Set<Set<UnifyPair>> forkResult) {
forkResults.add(forkResult);
}
}
}

View File

@@ -12,15 +12,12 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
public class distributeVariance extends visitUnifyTypeVisitor<Integer> {
public static int inverseVariance(int variance) {
Integer ret = 0;
if (variance == 1) {
ret = -1;
}
if (variance == -1) {
ret = 1;
}
return ret;
public static int inverseVariance(int variance) {
return switch (variance) {
case 1 -> -1;
case -1 -> 1;
default -> 0;
};
}
@@ -42,7 +39,7 @@ public class distributeVariance extends visitUnifyTypeVisitor<Integer> {
List<UnifyType> param = new ArrayList<>(funnty.getTypeParams().get().length);
param.addAll(Arrays.asList(funnty.getTypeParams().get()));
UnifyType resultType = param.remove(param.size()-1);
Integer htInverse = inverseVariance(ht);
int htInverse = inverseVariance(ht);
param = param.stream()
.map(x -> x.accept(this, htInverse))
.collect(Collectors.toCollection(ArrayList::new));

View File

@@ -24,9 +24,6 @@ public final class ExtendsType extends WildcardType implements ISerializableData
*/
public ExtendsType(UnifyType extendedType) {
super("? extends " + extendedType.getName(), extendedType);
if (extendedType instanceof ExtendsType) {
System.out.print("");
}
}
/**

View File

@@ -5,7 +5,9 @@ import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.util.Logger;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
@@ -42,7 +44,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
final JavaTXCompiler compiler;
final PlaceholderRegistry placeholderRegistry;
Writer logFile;
Logger logger;
/**
* A map that maps every type to the node in the inheritance graph that contains that type.
@@ -73,10 +75,10 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
/**
* Creates a new instance using the inheritance tree defined in the pairs.
*/
public FiniteClosure(Set<UnifyPair> pairs, Writer logFile, JavaTXCompiler compiler, PlaceholderRegistry placeholderRegistry) {
public FiniteClosure(Set<UnifyPair> pairs, Logger logger, JavaTXCompiler compiler, PlaceholderRegistry placeholderRegistry) {
this.compiler = compiler;
this.placeholderRegistry = placeholderRegistry;
this.logFile = logFile;
this.logger = logger;
this.pairs = new HashSet<>(pairs);
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
@@ -125,7 +127,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
}
// Build the alternative representation with strings as keys
strInheritanceGraph = new HashMap<>();
strInheritanceGraph = TypeUnifyTaskHelper.getPresizedHashMap(inheritanceGraph.size());
for(UnifyType key : inheritanceGraph.keySet()) {
if(!strInheritanceGraph.containsKey(key.getName()))
strInheritanceGraph.put(key.getName(), new HashSet<>());
@@ -134,8 +136,8 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
}
}
public FiniteClosure(Set<UnifyPair> constraints, Writer writer, PlaceholderRegistry placeholderRegistry) {
this(constraints, writer, null, placeholderRegistry);
public FiniteClosure(Set<UnifyPair> constraints, Logger logger, PlaceholderRegistry placeholderRegistry) {
this(constraints, logger, null, placeholderRegistry);
}
void testSmaller() {
@@ -159,7 +161,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
Set<UnifyType> ret;
if ((ret = smallerHash.get(new hashKeyType(type))) != null) {
//System.out.println(greaterHash);
//context.logger().info(greaterHash);
return new HashSet<>(ret);
}
@@ -200,7 +202,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
result.add(new Pair<>(t, fBounded));
}
catch (StackOverflowError e) {
// System.out.println("");
// context.logger().info("");
}
// if C<...> <* C<...> then ... (third case in definition of <*)
@@ -237,9 +239,9 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
result.add(new Pair<>(theta1.apply(sigma), fBounded));
}
}
HashSet<UnifyType> resut = result.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new));
if(resut.equals(types.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new))))
HashSet<UnifyType> resut = result.stream().map(Pair::getKey).collect(Collectors.toCollection(HashSet::new));
if(resut.equals(types.stream().map(Pair::getKey).collect(Collectors.toCollection(HashSet::new))))
return resut;
return computeSmaller(result);
}
@@ -271,9 +273,9 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
@Override
//Eingefuegt PL 2018-05-24 F-Bounded Problematik
public Set<UnifyType> greater(UnifyType type, Set<UnifyType> fBounded, SourceLoc location) {
Set<UnifyType> ret;
if ((ret = greaterHash.get(new hashKeyType(type))) != null) {
//System.out.println(greaterHash);
Set<UnifyType> ret = greaterHash.get(new hashKeyType(type));
if (ret != null) {
//context.logger().info(greaterHash);
return new HashSet<>(ret);
}
@@ -323,7 +325,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
Set<UnifyType> fBoundedNew = new HashSet<>(fBounded);
fBoundedNew.add(theta1);
Set<UnifyType> theta2Set = candidate.getContentOfPredecessors();
//System.out.println("");
//context.logger().info("");
for(UnifyType theta2 : theta2Set) {
result.add(theta2.apply(sigma));
PairResultFBounded.add(new Pair<>(theta2.apply(sigma), fBoundedNew));
@@ -357,7 +359,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
return ((match.match(termList).isPresent()) || x);
};
//if (parai.getName().equals("java.lang.Integer")) {
// System.out.println("");
// context.logger().info("");
//}
BinaryOperator<Boolean> bo = (a,b) -> (a || b);
if (lfBounded.stream().reduce(false,f,bo)) {
@@ -370,7 +372,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
}
}
permuteParams(paramCandidates).forEach(x -> result.add(t.setTypeParams(x)));
//System.out.println("");
//context.logger().info("");
}
}
@@ -430,7 +432,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
return ((match.match(termList).isPresent()) || x);
};
if (parai.getName().equals("java.lang.Integer")) {
System.out.println("");
context.logger().info("");
}
BinaryOperator<Boolean> bo = (a,b) -> (a || b);
if (fBounded.stream().reduce(false,f,bo)) continue; //F-Bounded Endlosrekursion
@@ -469,7 +471,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
}
HashSet<UnifyType> resut = result.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new));
System.out.println(resut);
context.logger().info(resut);
if(resut.equals(types.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new))))
return resut;
return computeGreater(result);
@@ -505,35 +507,42 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
public Set<UnifyType> grArg(ReferenceType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
smaller(type, fBounded).forEach(x -> result.add(new SuperType(x)));
greater(type,fBounded).forEach(x -> result.add(new ExtendsType(x)));
smaller(type, fBounded).forEach(x -> result.add(new SuperType(x)));
greater(type,fBounded).forEach(x -> result.add(new ExtendsType(x)));
return result;
}
@Override
public Set<UnifyType> grArg(FunNType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
Set<UnifyType> smaller = smaller(type, fBounded);
Set<UnifyType> greater = greater(type, fBounded);
Set<UnifyType> result = new HashSet<UnifyType>((int)((1 + smaller.size() + greater.size()) * 1.5));
result.add(type);
smaller(type, fBounded).forEach(x -> result.add(new SuperType(x)));
greater(type, fBounded).forEach(x -> result.add(new ExtendsType(x)));
smaller.forEach(x -> result.add(new SuperType(x)));
greater.forEach(x -> result.add(new ExtendsType(x)));
return result;
}
@Override
public Set<UnifyType> grArg(ExtendsType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getExtendedType();
greater(t, fBounded).forEach(x -> result.add(new ExtendsType(x)));
Set<UnifyType> greater = greater(t, fBounded);
Set<UnifyType> result = new HashSet<UnifyType>((int)((1 + greater.size()) * 1.5));
result.add(type);
greater.forEach(x -> result.add(new ExtendsType(x)));
return result;
}
@Override
public Set<UnifyType> grArg(SuperType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getSuperedType();
smaller(t, fBounded).forEach(x -> result.add(new SuperType(x)));
Set<UnifyType> smaller = smaller(t, fBounded);
Set<UnifyType> result = TypeUnifyTaskHelper.getPresizedHashSet(1 + smaller.size());
result.add(type);
smaller.forEach(x -> result.add(new SuperType(x)));
return result;
}
@@ -565,29 +574,33 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
@Override
public Set<UnifyType> smArg(ExtendsType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getExtendedType();
result.add(t);
smaller(t, fBounded).forEach(x -> {
result.add(new ExtendsType(x));
result.add(x);
});
Set<UnifyType> smaller = smaller(t, fBounded);
Set<UnifyType> result = TypeUnifyTaskHelper.getPresizedHashSet(2 * (1 + smaller.size()));
result.add(type);
result.add(t);
smaller.forEach(x -> {
result.add(new ExtendsType(x));
result.add(x);
});
return result;
}
@Override
public Set<UnifyType> smArg(SuperType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getSuperedType();
result.add(t);
//*** ACHTUNG das koennte FALSCH sein PL 2018-05-23 evtl. HashSet durch smArg durchschleifen
greater(t, fBounded).forEach(x -> {
result.add(new SuperType(x));
result.add(x);
});
Set<UnifyType> greater = greater(t, fBounded);
Set<UnifyType> result = TypeUnifyTaskHelper.getPresizedHashSet(2 * (1 + greater.size()));
result.add(type);
result.add(t);
//*** ACHTUNG das koennte FALSCH sein PL 2018-05-23 evtl. HashSet durch smArg durchschleifen
greater.forEach(x -> {
result.add(new SuperType(x));
result.add(x);
});
return result;
}
@@ -602,7 +615,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
public Set<UnifyType> getAllTypesByName(String typeName) {
if(!strInheritanceGraph.containsKey(typeName))
return new HashSet<>();
return strInheritanceGraph.get(typeName).stream().map(x -> x.getContent()).collect(Collectors.toCollection(HashSet::new));
return strInheritanceGraph.get(typeName).stream().map(Node::getContent).collect(Collectors.toCollection(HashSet::new));
}
@Override
@@ -692,9 +705,9 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
*/
public int compare (UnifyType left, UnifyType right, PairOperator pairop, UnifyContext context) {
try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {}
logger.debug(() -> "left: "+ left + " right: " + right + " pairop: " + pairop +"\n");
// if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
// System.out.println("");
// context.logger().info("");
/*
pairop = PairOperator.SMALLERDOTWC;
List<UnifyType> al = new ArrayList<>();
@@ -751,13 +764,8 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this);
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
{try {
logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
// logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}}
logger.debug(() -> "\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
Predicate<UnifyPair> delFun = x -> !((x.getLhsType() instanceof PlaceholderType ||
x.getRhsType() instanceof PlaceholderType)
@@ -765,12 +773,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
((WildcardType)x.getLhsType()).getWildcardedType().equals(x.getRhsType()))
);
long smallerLen = smallerRes.stream().filter(delFun).count();
try {
logFile.write("\nsmallerLen: " + smallerLen +"\n");
// logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}
logger.debug(() -> "\nsmallerLen: " + smallerLen +"\n");
if (smallerLen == 0) return -1;
else {
up = new UnifyPair(right, left, pairop);
@@ -780,13 +783,8 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
Set<UnifyPair> greaterRes = unifyTask.applyTypeUnificationRules(hs, this);
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
{try {
logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
// logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}}
logger.debug(() -> "\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
long greaterLen = greaterRes.stream().filter(delFun).count();
if (greaterLen == 0) return 1;
@@ -807,7 +805,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
var pairList = data.getList("pairs").assertListOfUUIDs();
Set<UnifyPair> pairs = pairList.stream()
.map(pairData -> UnifyPair.fromSerial(pairData, context, keyStorage)).collect(Collectors.toSet());
return new FiniteClosure(pairs, context.logFile(), context.placeholderRegistry());
return new FiniteClosure(pairs, context.logger(), context.placeholderRegistry());
}
}

View File

@@ -81,7 +81,7 @@ public class FunNType extends UnifyType implements ISerializableData {
}
@Override
public Boolean wrongWildcard() {
public boolean wrongWildcard() {
return (new ArrayList<UnifyType>(Arrays.asList(getTypeParams()
.get())).stream().filter(x -> (x instanceof WildcardType)).findFirst().isPresent());
}

View File

@@ -48,12 +48,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT, context);
}}
catch (ClassCastException e) {
try {
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
// ((FiniteClosure)fc).logFile.flush();
}
catch (IOException ie) {
}
((FiniteClosure)fc).logger.debug(() -> "ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
return -99;
}
}
@@ -61,7 +56,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
/*
public int compareEq (UnifyPair left, UnifyPair right) {
if (left == null || right == null)
System.out.println("Fehler");
context.logger().info("Fehler");
if (left.getLhsType() instanceof PlaceholderType) {
return fc.compare(left.getRhsType(), right.getRhsType(), left.getPairOp());
}
@@ -82,12 +77,12 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{
// System.out.println("");
// context.logger().info("");
}
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
{
// System.out.println("");
// context.logger().info("");
}
}
else {
@@ -109,11 +104,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{
// System.out.println("");
// context.logger().info("");
}
if (right instanceof SuperType)
{
// System.out.println("");
// context.logger().info("");
}
}
else {
@@ -175,85 +170,81 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
left.add(p2);
left.add(p4);
*/
Set<UnifyPair> lefteq = left.stream()
.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> righteq = right.stream()
.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> leftle = left.stream()
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
&& x.getPairOp() == PairOperator.SMALLERDOT))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> rightle = right.stream()
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
&& x.getPairOp() == PairOperator.SMALLERDOT))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> leftlewc = left.stream()
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
&& x.getPairOp() == PairOperator.SMALLERDOTWC))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> rightlewc = right.stream()
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
&& x.getPairOp() == PairOperator.SMALLERDOTWC))
.collect(Collectors.toCollection(HashSet::new));
//System.out.println(left.toString());
Set<UnifyPair> lefteq = new HashSet<>();
Set<UnifyPair> leftle = new HashSet<>();
Set<UnifyPair> leftlewc = new HashSet<>();
Set<UnifyPair> lefteqOder = new HashSet<>();
Set<UnifyPair> lefteqRet = new HashSet<>();
Set<UnifyPair> leftleOder = new HashSet<>();
for (var x : left) {
boolean isLeftPlaceholder = x.getLhsType() instanceof PlaceholderType;
boolean isRightPlaceholder = x.getRhsType() instanceof PlaceholderType;
boolean hasPlaceholder = isLeftPlaceholder || isRightPlaceholder;
if (isLeftPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) lefteq.add(x);
if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOT) leftle.add(x);
if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOTWC) leftlewc.add(x);
UnifyPair y = x.getGroundBasePair();
boolean isBasePairLeftPlaceholder = y.getLhsType() instanceof PlaceholderType;
boolean isBasePairRightPlaceholder = y.getRhsType() instanceof PlaceholderType;
if (isBasePairLeftPlaceholder && !isBasePairRightPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) {
lefteqOder.add(x);
}
else if (isBasePairRightPlaceholder && ((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1) {
lefteqRet.add(x);
}
else if (x.getPairOp() == PairOperator.SMALLERDOT) {
leftleOder.add(x);
}
}
Set<UnifyPair> righteq = new HashSet<>();
Set<UnifyPair> rightle = new HashSet<>();
Set<UnifyPair> rightlewc = new HashSet<>();
Set<UnifyPair> righteqOder = new HashSet<>();
Set<UnifyPair> righteqRet = new HashSet<>();
Set<UnifyPair> rightleOder = new HashSet<>();
for (var x : right) {
boolean isLeftPlaceholder = x.getLhsType() instanceof PlaceholderType;
boolean isRightPlaceholder = x.getRhsType() instanceof PlaceholderType;
boolean hasPlaceholder = isLeftPlaceholder || isRightPlaceholder;
if (isLeftPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) righteq.add(x);
if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOT) rightle.add(x);
if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOTWC) rightlewc.add(x);
UnifyPair y = x.getGroundBasePair();
boolean isBasePairLeftPlaceholder = y.getLhsType() instanceof PlaceholderType;
boolean isBasePairRightPlaceholder = y.getRhsType() instanceof PlaceholderType;
if (isBasePairLeftPlaceholder && !isBasePairRightPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) {
righteqOder.add(x);
}
else if (isBasePairRightPlaceholder && ((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1) {
righteqRet.add(x);
}
else if (x.getPairOp() == PairOperator.SMALLERDOT) {
rightleOder.add(x);
}
}
//context.logger().info(left.toString());
//Fall 2
//if (lefteq.iterator().next().getLhsType().getName().equals("AJO")) {
// System.out.print("");
//}
//ODER-CONSTRAINT
Set<UnifyPair> leftBase = left.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> rightBase = right.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
// Set<UnifyPair> leftBase = left.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
// Set<UnifyPair> rightBase = right.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> lefteqOder = left.stream()
.filter(x -> { UnifyPair y = x.getGroundBasePair();
/*try {
((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n");
((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("y: " + y.toString() +"\n");
((FiniteClosure)fc).logFile.write("y.getLhsType() : " + y.getLhsType() .toString() +"\n\n");
((FiniteClosure)fc).logFile.write("y.getRhsType(): " + y.getRhsType().toString() +"\n");
((FiniteClosure)fc).logFile.write("x.getPairOp(): " + x.getPairOp().toString() +"\n\n");
}
catch (IOException ie) {
} */
return (y.getLhsType() instanceof PlaceholderType &&
!(y.getRhsType() instanceof PlaceholderType) &&
x.getPairOp() == PairOperator.EQUALSDOT);})
.collect(Collectors.toCollection(HashSet::new));
left.removeAll(lefteqOder);
Set<UnifyPair> righteqOder = right.stream()
.filter(x -> { UnifyPair y = x.getGroundBasePair();
return (y.getLhsType() instanceof PlaceholderType &&
!(y.getRhsType() instanceof PlaceholderType) &&
x.getPairOp() == PairOperator.EQUALSDOT);})
.collect(Collectors.toCollection(HashSet::new));
right.removeAll(righteqOder);
Set<UnifyPair> lefteqRet = left.stream()
.filter(x -> { UnifyPair y = x.getGroundBasePair();
return (y.getRhsType() instanceof PlaceholderType &&
((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1);})
.collect(Collectors.toCollection(HashSet::new));
left.removeAll(lefteqRet);
Set<UnifyPair> righteqRet = right.stream()
.filter(x -> { UnifyPair y = x.getGroundBasePair();
return (y.getRhsType() instanceof PlaceholderType &&
((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1);})
.collect(Collectors.toCollection(HashSet::new));
right.removeAll(righteqOder);
right.removeAll(righteqRet);
Set<UnifyPair> leftleOder = left.stream()
.filter(x -> (x.getPairOp() == PairOperator.SMALLERDOT))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> rightleOder = right.stream()
.filter(x -> (x.getPairOp() == PairOperator.SMALLERDOT))
.collect(Collectors.toCollection(HashSet::new));
/*
synchronized(this) {
try {
@@ -275,89 +266,73 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
*/
Integer compareEq;
int compareEq;
if (lefteqOder.size() == 1 && righteqOder.size() == 1 && lefteqRet.size() == 1 && righteqRet.size() == 1) {
Match m = new Match();
if ((compareEq = compareEq(lefteqOder.iterator().next().getGroundBasePair(), righteqOder.iterator().next().getGroundBasePair())) == -1) {
ArrayList<UnifyPair> matchList =
rightleOder.stream().map(x -> {
UnifyPair leftElem = leftleOder.stream()
compareEq = compareEq(lefteqOder.iterator().next().getGroundBasePair(), righteqOder.iterator().next().getGroundBasePair());
if (compareEq == -1) {
ArrayList<UnifyPair> matchList =
rightleOder.stream().map(x -> {
UnifyPair leftElem = leftleOder.stream()
.filter(y -> y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType()))
.findAny().get();
return new UnifyPair(x.getRhsType(), leftElem.getRhsType(), PairOperator.EQUALSDOT);})
.collect(Collectors.toCollection(ArrayList::new));
if (m.match(matchList).isPresent()) {
//try { ((FiniteClosure)fc).logFile.write("result1: -1 \n\n"); } catch (IOException ie) {}
return -1;
}
else {
//try { ((FiniteClosure)fc).logFile.write("result1: 0 \n\n"); } catch (IOException ie) {}
return 0;
}
.findAny().orElseThrow();
return new UnifyPair(x.getRhsType(), leftElem.getRhsType(), PairOperator.EQUALSDOT);}
)
.collect(Collectors.toCollection(ArrayList::new));
return (m.match(matchList).isPresent()) ? -1 : 0;
} else if (compareEq == 1) {
ArrayList<UnifyPair> matchList =
leftleOder.stream().map(x -> {
ArrayList<UnifyPair> matchList =
leftleOder.stream().map(x -> {
UnifyPair rightElem = rightleOder.stream()
.filter(y ->
y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType()))
.findAny().get();
return new UnifyPair(x.getRhsType(), rightElem.getRhsType(), PairOperator.EQUALSDOT);})
.collect(Collectors.toCollection(ArrayList::new));
if (m.match(matchList).isPresent()) {
//try { ((FiniteClosure)fc).logFile.write("result2: 1 \n\n"); } catch (IOException ie) {}
return 1;
}
else {
//try { ((FiniteClosure)fc).logFile.write("result2: 0 \n\n"); } catch (IOException ie) {}
return 0;
}
.filter(y -> y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType()))
.findAny().orElseThrow();
return new UnifyPair(x.getRhsType(), rightElem.getRhsType(), PairOperator.EQUALSDOT);}
)
.collect(Collectors.toCollection(ArrayList::new));
return (m.match(matchList).isPresent()) ? 1 : 0;
} else {
/*
synchronized(this) {
try {
((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n");
((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("lefteqOder: " + lefteqOder.toString() +"\n");
((FiniteClosure)fc).logFile.write("righteqOder: " + righteqOder.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("lefteqRet: " + lefteqRet.toString() +"\n");
((FiniteClosure)fc).logFile.write("righteqRet: " + righteqRet.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("leftleOder: " + leftleOder.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightleOder: " + rightleOder.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("result3: 0 \n\n");
((FiniteClosure)fc).logFile.flush();
}
catch (IOException ie) {
}
}
((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n");
((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("lefteqOder: " + lefteqOder.toString() +"\n");
((FiniteClosure)fc).logFile.write("righteqOder: " + righteqOder.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("lefteqRet: " + lefteqRet.toString() +"\n");
((FiniteClosure)fc).logFile.write("righteqRet: " + righteqRet.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("leftleOder: " + leftleOder.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightleOder: " + rightleOder.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("result3: 0 \n\n");
((FiniteClosure)fc).logFile.flush();
*/
return 0;
}
}
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof ExtendsType && leftle.size() == 1 && righteq.size() == 0 && rightle.size() == 1) {
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof ExtendsType && leftle.size() == 1 && righteq.isEmpty() && rightle.size() == 1) {
return 1;
}
//Fall 2
if (lefteq.size() == 0 && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof ExtendsType && rightle.size() == 1) {
if (lefteq.isEmpty() && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof ExtendsType && rightle.size() == 1) {
return -1;
}
//Fall 3
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof SuperType && leftle.size() == 1 && righteq.size() == 0 && rightle.size() == 1) {
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof SuperType && leftle.size() == 1 && righteq.isEmpty() && rightle.size() == 1) {
return -1;
}
//Fall 3
if (lefteq.size() == 0 && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof SuperType && rightle.size() == 1) {
if (lefteq.isEmpty() && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof SuperType && rightle.size() == 1) {
return 1;
}
//Fall 5
if (lefteq.size() == 1 && leftle.size() == 0 && righteq.size() == 1 && rightle.size() == 1) {
if (lefteq.size() == 1 && leftle.isEmpty() && righteq.size() == 1 && rightle.size() == 1) {
return -1;
}
//Fall 5
if (lefteq.size() == 1 && leftle.size() == 1 && righteq.size() == 1 && rightle.size() == 0) {
if (lefteq.size() == 1 && leftle.size() == 1 && righteq.size() == 1 && rightle.isEmpty()) {
return 1;
}
//Fall 5
@@ -365,7 +340,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
return 0;
}
// Nur Paare a =. Theta
if (leftle.size() == 0 && rightle.size() == 0 && leftlewc.size() == 0 && rightlewc.size() ==0) {
if (leftle.isEmpty() && rightle.isEmpty() && leftlewc.isEmpty() && rightlewc.isEmpty()) {
Stream<UnifyPair> lseq = lefteq.stream(); //left.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT));
Stream<UnifyPair> rseq = righteq.stream(); //right.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT));
BinaryOperator<HashMap<UnifyType,UnifyPair>> combiner = (x,y) -> { x.putAll(y); return x;};
@@ -373,13 +348,10 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
lseq = lseq.filter(x -> !(hm.get(x.getLhsType()) == null));//NOCHMALS UEBERPRUEFEN!!!!
lseq = lseq.filter(x -> !x.equals(hm.get(x.getLhsType()))); //Elemente die gleich sind muessen nicht verglichen werden
Optional<Integer> si = lseq.map(x -> compareEq(x, hm.get(x.getLhsType()))).reduce((x,y)-> { if (x == y) return x; else return 0; } );
if (!si.isPresent()) return 0;
else return si.get();
return si.orElse(0);
}
//Fall 1 und 4
if (lefteq.size() >= 1 && righteq.size() >= 1 && (leftlewc.size() > 0 || rightlewc.size() > 0)) {
if (lefteq.iterator().next().getLhsType().getName().equals("D"))
System.out.print("");
if (!lefteq.isEmpty() && !righteq.isEmpty() && (!leftlewc.isEmpty() || !rightlewc.isEmpty())) {
//Set<PlaceholderType> varsleft = lefteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
//Set<PlaceholderType> varsright = righteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
//filtern des Paares a = Theta, das durch a <. Thata' generiert wurde (nur im Fall 1 relevant) andere Substitutioen werden rausgefiltert
@@ -400,12 +372,12 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
//TODO: Hier wird bei Wildcards nicht das richtige compare aufgerufen PL 18-04-20
Pair<Integer, Set<UnifyPair>> int_Unifier = compare(lseq.getRhsType(), rseq.getRhsType());
Unifier uni = new Unifier();
int_Unifier.getValue().get().forEach(x -> uni.add((PlaceholderType) x.getLhsType(), x.getRhsType()));
if (!lseq.getRhsType().getName().equals(rseq.getRhsType().getName())
|| leftlewc.size() == 0 || rightlewc.size() == 0) return int_Unifier.getKey();
int_Unifier.getValue().orElseThrow().forEach(x -> uni.add((PlaceholderType) x.getLhsType(), x.getRhsType()));
if (!lseq.getRhsType().getName().equals(rseq.getRhsType().getName())
|| leftlewc.isEmpty() || rightlewc.isEmpty()) return int_Unifier.getKey();
else {
Set <UnifyPair> lsleuni = leftlewc.stream().map(x -> uni.apply(x)).collect(Collectors.toCollection(HashSet::new));
Set <UnifyPair> rsleuni = rightlewc.stream().map(x -> uni.apply(x)).collect(Collectors.toCollection(HashSet::new));
Set <UnifyPair> lsleuni = leftlewc.stream().map(uni::apply).collect(Collectors.toCollection(HashSet::new));
Set <UnifyPair> rsleuni = rightlewc.stream().map(uni::apply).collect(Collectors.toCollection(HashSet::new));
BinaryOperator<HashMap<UnifyType,UnifyPair>> combiner = (x,y) -> { x.putAll(y); return x;};
HashMap<UnifyType,UnifyPair> hm;
@@ -422,11 +394,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getRhsType()) == null));
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
}
if (!si.isPresent()) return 0;
else return si.get();
return si.orElse(0);
}
} else {
if (leftlewc.size() > 0) {
if (!leftlewc.isEmpty()) {
/*
Set<UnifyPair> subst;
subst = leftlewc.stream().map(x -> {
if (x.getLhsType() instanceof PlaceholderType) {
@@ -435,6 +407,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
else {
return new UnifyPair(x.getRhsType(), x.getLhsType(), PairOperator.EQUALSDOT);
}}).collect(Collectors.toCollection(HashSet::new));
*/
Unifier uni = new Unifier();
lseq = uni.apply(lseq);
}

View File

@@ -134,15 +134,15 @@ public final class PlaceholderType extends UnifyType{
wildcardable = true;
}
public void setWildcardtable(Boolean wildcardable) {
public void setWildcardtable(boolean wildcardable) {
this.wildcardable = wildcardable;
}
public Boolean isInnerType() {
public boolean isInnerType() {
return innerType;
}
public void setInnerType(Boolean innerType) {
public void setInnerType(boolean innerType) {
this.innerType = innerType;
}

View File

@@ -32,7 +32,7 @@ public class ReferenceType extends UnifyType implements ISerializableData {
return visitor.visit(this, ht);
}
public ReferenceType(String name, Boolean genericTypeVar) {
public ReferenceType(String name, boolean genericTypeVar) {
super(name, new TypeParams());
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
this.genericTypeVar = genericTypeVar;

View File

@@ -50,8 +50,7 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
Unifier tempU = new Unifier(source, target);
// Every new substitution must be applied to previously added substitutions
// otherwise the unifier needs to be applied multiple times to unify two terms
for(PlaceholderType pt : substitutions.keySet())
substitutions.put(pt, substitutions.get(pt).apply(tempU));
substitutions.replaceAll((pt, ut) -> ut.apply(tempU));
substitutions.put(source, target);
}

View File

@@ -201,7 +201,7 @@ public class UnifyPair implements IConstraintElement, ISerializableData {
}
public Boolean wrongWildcard() {
public boolean wrongWildcard() {
return lhs.wrongWildcard() || rhs.wrongWildcard();
}

View File

@@ -103,7 +103,7 @@ public abstract class UnifyType implements ISerializableData {
return new ArrayList<>(typeParams.getInvolvedPlaceholderTypes());
}
public Boolean wrongWildcard() {//default
public boolean wrongWildcard() {//default
return false;
}

View File

@@ -42,7 +42,7 @@ public abstract class WildcardType extends UnifyType implements ISerializableDat
}
@Override
public Boolean wrongWildcard () {//This is an error
public boolean wrongWildcard () {//This is an error
return (wildcardedType instanceof WildcardType);
}

View File

@@ -1,9 +1,338 @@
package de.dhbwstuttgart.util;
import com.diogonunes.jcolor.Attribute;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.server.SocketServer;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Arrays;
import java.util.Objects;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static com.diogonunes.jcolor.Ansi.colorize;
public class Logger {
public static void print(String s) {
System.out.println(s);
public static final Logger NULL_LOGGER = new NullLogger();
private static final DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
protected final Writer writer;
protected final String prefix;
public Logger() {
this(null, "");
}
}
public Logger(String prefix) {
this(null, prefix);
}
public Logger(Writer writer, String prefix) {
this.prefix = (Objects.equals(prefix, "")) ? "" : "["+prefix+"] ";
this.writer = writer;
}
/**
* Create a new logger object from the path provided
*
* @param filePath The path to the output file. Recommended file extension ".log"
* @return The Logger object for this output file
*/
public static Logger forFile(String filePath, String prefix) {
File file = Path.of(filePath).toFile();
try {
Writer fileWriter = new FileWriter(file);
return new Logger(fileWriter, prefix);
}
catch (IOException exception) {
throw new RuntimeException("Failed to created Logger for file " + filePath, exception);
}
}
/**
* Create a new logger object that inherits the writer of the given logger object
*
* @param logger The old logger object that will provide the writer
* @return The new prefix for the new logger object
*/
public static Logger inherit(Logger logger, String newPrefix) {
return new Logger(logger.writer, newPrefix);
}
/**
* Tint the prefix in the color of the logLevel
* @param logLevel The logLevel to set the tint to
* @return The tinted string (using ANSI sequences)
*/
protected String getPrefix(LogLevel logLevel) {
String fullPrefix = prefix + "[" + logLevel + "] ";
return switch (logLevel) {
case DEBUG -> colorize(fullPrefix, Attribute.BRIGHT_MAGENTA_TEXT());
case INFO -> colorize(fullPrefix, Attribute.BLUE_TEXT());
case WARNING -> colorize(fullPrefix, Attribute.YELLOW_TEXT());
case ERROR -> colorize(fullPrefix, Attribute.RED_TEXT());
case SUCCESS -> colorize(fullPrefix, Attribute.GREEN_TEXT());
};
}
/**
* Print text to the output or error stream, depending on the logLevel
* @param s The string to print
* @param logLevel If logLevel == error, then print to stderr or print to stdout otherwise
*/
protected void print(String s, LogLevel logLevel) {
String coloredPrefix = this.getPrefix(logLevel);
// if we are running the server, prepend the timestamp
if(SocketServer.isServerRunning()) {
String timestamp = LocalDateTime.now().format(timeFormatter);
coloredPrefix = "[" + timestamp + "] " + coloredPrefix;
}
// output to the correct output-stream
if (logLevel.getValue() == LogLevel.ERROR.getValue()) {
System.out.println(coloredPrefix + s);
}
else {
System.err.println(coloredPrefix + s);
}
}
public boolean isLogLevelActive(LogLevel logLevel) {
return logLevel.isHigherOrEqualTo(ConsoleInterface.logLevel);
}
/**
* Write text to the attached writer if there is any
* @param s The string to print
*/
protected void write(String s) {
if (writer != null && ConsoleInterface.writeLogFiles) {
// writing to file should only be done when necessary
synchronized (writer) {
try {
writer.write(s);
}
catch (IOException exception) {
throw new RuntimeException("Failed writing to file", exception);
}
}
}
}
public String findLogCaller() {
StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
final String thisFileName = stackTrace[0].getFileName();
int i = 0;
StackTraceElement currentElement = stackTrace[i];
while (++i < stackTrace.length) {
currentElement = stackTrace[i];
if (!Objects.equals(thisFileName, currentElement.getFileName())) {
break;
}
}
return ".(" + currentElement.getFileName() + ":" + currentElement.getLineNumber() + ")";
}
/**
* Base method for logging a string value. Should mostly be used by the Logger internal functions that
* abstract the logLevel away from the parameters
*
* @hidden Only relevant for the Logger and very special cases with dynamic logLevel
* @param s The text to log
* @param logLevel The logLevel on which the text should be logged
*/
public void log(String s, LogLevel logLevel) {
if (isLogLevelActive(logLevel)) {
// prepend the call to the logger instance
// s = findLogCaller() + "\n" + s;
this.print(s, logLevel);
this.write(s);
}
}
public void log(Supplier<String> supp, LogLevel logLevel) {
if (isLogLevelActive(logLevel)) {
this.log(supp.get(), logLevel);
}
}
public void log(Object obj, LogLevel logLevel) {
if (isLogLevelActive(logLevel)) {
this.log(obj.toString(), logLevel);
}
}
/**
* Output a debug message
* @param s The string to log
*/
public void debug(String s) {
this.log(s, LogLevel.DEBUG);
}
public void debug(Object o) {
this.log(o, LogLevel.DEBUG);
}
public void debug(Supplier<String> supp) {
this.log(supp, LogLevel.DEBUG);
}
/**
* Output an info message
* @param s The string to log
*/
public void info(String s) {
this.log(s, LogLevel.INFO);
}
public void info(Object o) {
this.log(o, LogLevel.INFO);
}
public void info(Supplier<String> supp) {
this.log(supp, LogLevel.INFO);
}
/**
* Output a warning message
* @param s The string to log
*/
public void warn(String s) {
this.log(s, LogLevel.WARNING);
}
public void warn(Object o) {
this.log(o, LogLevel.WARNING);
}
public void warn(Supplier<String> supp) {
this.log(supp, LogLevel.WARNING);
}
/**
* Output an error message
* @param s The string to log
*/
public void error(String s) {
this.log(s, LogLevel.ERROR);
}
public void error(Object o) {
this.log(o, LogLevel.ERROR);
}
public void error(Supplier<String> supp) {
this.log(supp, LogLevel.ERROR);
}
/**
* Output a success message
* @param s The string to log
*/
public void success(String s) {
this.log(s, LogLevel.SUCCESS);
}
public void success(Object o) {
this.log(o, LogLevel.SUCCESS);
}
public void success(Supplier<String> supp) {
this.log(supp, LogLevel.SUCCESS);
}
/**
* Special logging function that prints a throwable object and all of its recursive causes (including stacktrace)
* as an error
*
* @param throwable The Throwable object to output
*/
public void exception(Throwable throwable) {
// Format the exception output
String s = "Exception: " + throwable.getMessage() + "\n" +
Arrays.stream(throwable.getStackTrace()).map(stackTraceElement ->
" | " + stackTraceElement.toString()
).collect(Collectors.joining("\n"));
// if there is a cause printed afterward, announce it with the print of the exception
if (throwable.getCause() != null) {
s += "\n\nCaused by: ";
}
// print the exception
this.error(s);
// print the cause recursively
if (throwable.getCause() != null) {
this.exception(throwable.getCause());
}
}
public void close() {
if (this.writer != null) {
try {
this.writer.close();
}
catch (IOException exception) {
throw new RuntimeException("Failed to close a loggers writer. Was it maybe already closed? ", exception);
}
}
}
/**
* An enum representing the different log levels as integers:
* <ul>
* <li>DEBUG: highly specific output only for debugging</li>
* <li>INFO: informational output about the current state of the program</li>
* <li>WARNING: warnings about potential issues or an unexpected state</li>
* <li>ERROR: invalid states, errors and exceptions</li>
* <li>SUCCESS: successfully executed key steps of the program</li>
* </ul>
*/
public enum LogLevel {
/** Highly specific output only for debugging */
DEBUG(0),
/** Informational output about the current state of the program */
INFO(1),
/** Warnings about potential issues or an unexpected state **/
WARNING(2),
/** Invalid states, errors and exceptions */
ERROR(3),
/** Successfully executed key steps of the program */
SUCCESS(4);
private final int value;
LogLevel(final int newValue) {
value = newValue;
}
public boolean isHigherOrEqualTo(LogLevel other) {
return this.value >= other.value;
}
public int getValue() {
return value;
}
public static LogLevel fromValue(int value) {
return switch (value) {
case 0 -> LogLevel.DEBUG;
case 1 -> LogLevel.INFO;
case 2 -> LogLevel.WARNING;
case 3 -> LogLevel.ERROR;
case 4 -> LogLevel.SUCCESS;
default -> throw new RuntimeException("Invalid log level value: " + value);
};
}
}
/**
* A special case of logger that will never output anything
*/
private static class NullLogger extends Logger {
@Override
public void log(String s, LogLevel logLevel) {
// Do nothing. Yay
}
}
}

View File

@@ -18,7 +18,7 @@ public class TestTypeDeployment {
public void testTypeDeployment() throws Exception {
var path = Path.of(System.getProperty("user.dir"), "/resources/bytecode/javFiles/Cycle.jav");
var file = path.toFile();
var compiler = new JavaTXCompiler(file, false);
var compiler = new JavaTXCompiler(file);
var parsedSource = compiler.sourceFiles.get(file);
var tiResults = compiler.typeInference(file);
Set<TypeInsert> tips = new HashSet<>();

View File

@@ -1,5 +1,6 @@
package finiteClosure;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.parser.SyntaxTreeGenerator.FCGenerator;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
@@ -15,14 +16,14 @@ public class SuperInterfacesTest {
public void test() throws ClassNotFoundException {
Collection<ClassOrInterface> classes = new ArrayList<>();
classes.add(ASTFactory.createClass(TestClass.class));
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), new PlaceholderRegistry()));
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), JavaTXCompiler.defaultClientPlaceholderRegistry));
}
@Test
public void testGeneric() throws ClassNotFoundException {
Collection<ClassOrInterface> classes = new ArrayList<>();
classes.add(ASTFactory.createClass(TestClassGeneric.class));
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), new PlaceholderRegistry()));
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), JavaTXCompiler.defaultClientPlaceholderRegistry));
}
}

View File

@@ -18,6 +18,7 @@ import de.dhbwstuttgart.typeinference.unify.model.SuperType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.util.Logger;
import java.io.Writer;
import java.util.HashSet;
import java.util.List;
@@ -84,13 +85,12 @@ class PacketExampleData {
static UnifyContext createTestContext() {
var placeholderRegistry = new PlaceholderRegistry();
var nullWriter = Writer.nullWriter();
return new UnifyContext(nullWriter, false, true,
return new UnifyContext(Logger.NULL_LOGGER, true,
new UnifyResultModel(
new ConstraintSet<>(),
new FiniteClosure(
new HashSet<>(),
nullWriter,
Logger.NULL_LOGGER,
placeholderRegistry)),
new UnifyTaskModel(),
ForkJoinPool.commonPool(),

View File

@@ -0,0 +1,152 @@
package server;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.core.JavaTXServer;
import de.dhbwstuttgart.environment.CompilationEnvironment;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.packet.SetAutoclosePacket;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.util.Logger;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
import org.junit.Ignore;
import org.junit.Test;
import targetast.TestCodegen;
import static org.junit.Assert.*;
@Ignore("Server tests create huge overhead, so they are ignored until required")
public class ServerTest {
public ServerTest() {
ConsoleInterface.unifyServerUrl = Optional.of("ws://localhost:5000");
}
@Test
public void checkServer_Scalar() throws IOException, ClassNotFoundException {
compareLocalAndServerResult("Scalar.jav");
}
@Test
public void checkServer_Matrix() throws IOException, ClassNotFoundException {
compareLocalAndServerResult("Matrix.jav");
}
protected void compareLocalAndServerResult(final String filename) throws IOException, ClassNotFoundException {
File file = Path.of(TestCodegen.path.toString(), filename).toFile();
// get information from the compiler
JavaTXCompiler compiler = new JavaTXCompiler(List.of(file));
// NOW: simulate the call to method typeInference. Once via server and once locally
// if everything works, they should neither interfere with each other nor differ in their result
// get the values from the compiler
PlaceholderRegistry placeholderRegistry = JavaTXCompiler.defaultClientPlaceholderRegistry; //new PlaceholderRegistry();
ConstraintSet<Pair> cons = compiler.getConstraints(file);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(compiler, cons, placeholderRegistry);
unifyCons = unifyCons.map(ServerTest::distributeInnerVars);
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(
ServerTest.getAllClasses(compiler, file).stream().toList(),
Logger.NULL_LOGGER,
compiler.classLoader,
compiler,
placeholderRegistry
);
UnifyTaskModel usedTasks = new UnifyTaskModel();
// create the server
JavaTXServer server = new JavaTXServer(5000);
try (ExecutorService executor = Executors.newSingleThreadExecutor()) {
// run the server in a separate thread
executor.submit(server::listen);
}
// run the unification on the server
PlaceholderRegistry prCopy = JavaTXCompiler.defaultClientPlaceholderRegistry.deepClone();
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyContext context = new UnifyContext(Logger.NULL_LOGGER, true, urm, usedTasks, prCopy);
SocketClient.execute(SetAutoclosePacket.create());
List<ResultSet> serverResult = SocketClient.executeAndGet(
UnifyRequestPacket.create(finiteClosure, cons, unifyCons, context.placeholderRegistry())
).getResultSet(context);
// close the server
server.forceStop();
// run the unification on the client (do this second, because it changes the initial placeholder registry)
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, context);
List<ResultSet> clientResult = li.getResults();
// create the bytecode from both results
var sf = compiler.sourceFiles.get(file);
var serverBytecode = compiler.generateBytecode(sf, serverResult);
var localBytecode = compiler.generateBytecode(sf, clientResult);
// test if the generated code is the same
for (var serverEntry : serverBytecode.entrySet()) {
var serverBytes = serverEntry.getValue();
var localBytes = localBytecode.get(serverEntry.getKey());
assertArrayEquals(serverBytes, localBytes);
}
}
protected static UnifyPair distributeInnerVars(UnifyPair x) {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
return x;
}
protected static Set<ClassOrInterface> getAllClasses(JavaTXCompiler compiler, File file)
throws ClassNotFoundException, IOException
{
var sf = compiler.sourceFiles.get(file);
Set<ClassOrInterface> allClasses = new HashSet<>();
allClasses.addAll(compiler.getAvailableClasses(sf));
allClasses.addAll(sf.getClasses());
var newClasses = CompilationEnvironment.loadDefaultPackageClasses(sf.getPkgName(), file, compiler).stream().map(ASTFactory::createClass).collect(Collectors.toSet());
for (var clazz : newClasses) {
// Don't load classes that get recompiled
if (sf.getClasses().stream().anyMatch(nf -> nf.getClassName().equals(clazz.getClassName())))
continue;
if (allClasses.stream().noneMatch(old -> old.getClassName().equals(clazz.getClassName())))
allClasses.add(clazz);
}
return allClasses;
}
}

22
testOut/test.java Normal file
View File

@@ -0,0 +1,22 @@
import java.util.Vector;
import java.util.List;
class Main {
public static void main(String[] args) {
var grid = new Vector<Vector<Boolean>>(List.of(
new Vector<Boolean>(List.of(false, false, false)),
new Vector<Boolean>(List.of(false, false, false)),
new Vector<Boolean>(List.of(false, true, false))
));
BFS img = new BFS();
Pos pos = img.search(grid);
System.out.println("Found at: x=" + pos.x + " | y=" + pos.y);
}
}