68 Commits

Author SHA1 Message Date
Fabian Holzwarth
3d99f282f5 feat: cleanup code 2025-07-21 16:12:56 +02:00
Fabian Holzwarth
303c91dc87 chore: making classnames more expressive and cleanup some structures 2025-07-19 12:50:30 +02:00
Fabian Holzwarth
603a8b176a feat: implement partically cancellable tasks 2025-07-16 11:16:30 +02:00
Fabian Holzwarth
f396189a4b feat: add timestamp to server output 2025-07-12 13:44:18 +02:00
Fabian Holzwarth
e7f4a94908 feat: fixed old condition on server timeout 2025-07-12 13:22:54 +02:00
Fabian Holzwarth
ce49a4b9a5 feat: reduce temporary objects and repeated loops 2025-07-12 13:14:55 +02:00
Fabian Holzwarth
03b3692724 feat: update indepenedentest 2025-07-11 12:50:27 +02:00
Fabian Holzwarth
f0022d2b6f feat: use presized hashMaps to reduce resizing 2025-07-11 11:22:06 +02:00
Fabian Holzwarth
b1015cfa82 feat: update logging and add success-level 2025-07-07 16:19:04 +02:00
Fabian Holzwarth
b63a27a0a0 feat: improve server by assinging configured thread pools 2025-07-07 15:59:46 +02:00
Fabian Holzwarth
3b0a53d3c4 feat: add cross dependency, fix: socket closing and error messages 2025-07-07 15:19:56 +02:00
Fabian Holzwarth
50dbbf5f86 feat: implement generalized socket client, server logger and cleanup code 2025-07-07 14:01:27 +02:00
Fabian Holzwarth
130c491ac0 feat: more Boxing replacements 2025-07-06 15:49:59 +02:00
Fabian Holzwarth
9f9b264ac4 feat: replace unnecessary boxing with primitives 2025-07-06 15:18:51 +02:00
Fabian Holzwarth
1393db05c2 feat: implement lazy evaluation for logger outputs 2025-07-06 13:37:47 +02:00
Fabian Holzwarth
93e1a8787c feat: do not create a new context, if nothing changes 2025-07-05 11:43:10 +02:00
Fabian Holzwarth
0129d7540f feat use perMessagDeflate compression in websocket and use logger for message outpute 2025-07-05 11:16:06 +02:00
Fabian Holzwarth
7ea8337aee feat: remove unused logging library 2025-07-05 11:15:33 +02:00
Fabian Holzwarth
28458d405f feat: ignore server test 2025-07-02 15:47:55 +02:00
Fabian Holzwarth
1b905cb3e2 feat: implement loggers for the rest of the compiler 2025-07-01 23:06:09 +02:00
Fabian Holzwarth
d02c3583e9 feat: implement new logger into type inference code 2025-07-01 22:16:29 +02:00
Fabian Holzwarth
ca98e83fd2 feat: added logger 2025-07-01 21:21:39 +02:00
Fabian Holzwarth
c80a0c8596 feat: fix error by reintroducing name generator and add server tests 2025-06-30 16:42:20 +02:00
Fabian Holzwarth
2278fb1b91 feat: undo removing NameGenerator to fix errors in ast generation 2025-06-30 12:46:41 +02:00
Fabian Holzwarth
32b16cd5fd feat: replace concurrent modification with correct function call 2025-06-30 11:49:53 +02:00
Fabian Holzwarth
fd30c5f63f feat: prevent reusing the placeholder registry in tests 2025-06-29 16:04:54 +02:00
Fabian Holzwarth
8bfd6ae255 feat: remove redundant lambda functions and Set-resizings 2025-06-28 14:48:43 +02:00
Fabian Holzwarth
ad2dfb13bd feat: speedup toString methods by using a StringBuilder instead of String concatenation 2025-06-28 14:30:12 +02:00
Fabian Holzwarth
501633a90c feat: fix test with null methodSignatureConstraint 2025-06-28 14:06:34 +02:00
Fabian Holzwarth
4defa50ca2 feat: added version check on connecting 2025-06-25 19:48:29 +02:00
Fabian Holzwarth
d65e90536a feat: replace NameGenerator with instance of PlaceholderRegistry to prevent duplicates 2025-06-25 19:15:28 +02:00
Fabian Holzwarth
3de7f1aa61 fix: try generating new placeholders only in current placeholderRegistry context to prevent duplicates 2025-06-25 17:38:56 +02:00
Fabian Holzwarth
029e40b775 feat: make packets directional and self handling 2025-06-25 17:35:49 +02:00
Fabian Holzwarth
459bfcdd5f feat: added tests for client-server communication 2025-06-23 16:13:43 +02:00
Fabian Holzwarth
02886c38ea feat: fixed error in object serialization 2025-06-23 16:13:21 +02:00
Fabian Holzwarth
57ffae0481 fix: fixed some serialization and deserialization issues 2025-06-22 15:11:49 +02:00
Fabian Holzwarth
d084d74a25 feat: fixed mismatch in PairOperator serialization 2025-06-22 10:10:32 +02:00
Fabian Holzwarth
cd15016f61 feat: allow subclasses when asserting values 2025-06-21 13:44:29 +02:00
Fabian Holzwarth
b0e5eee25c feat: rename Object... to Serial... and move into separate classes 2025-06-21 13:40:24 +02:00
Fabian Holzwarth
d1bd285be7 fix: replace reflection class check with simple string check 2025-06-21 13:23:01 +02:00
Fabian Holzwarth
a902fd5bee feat: replaced HashMaps with better type safety structure 2025-06-21 12:58:45 +02:00
Fabian Holzwarth
ced9fdc9f7 fix: non serialized constraitnContext 2025-06-20 19:09:33 +02:00
Fabian Holzwarth
53417bf298 feat: implement serialization and adjust packets to correct data types 2025-06-20 18:53:25 +02:00
Fabian Holzwarth
2d4da03f00 feat: implementing client-server model 2025-06-18 19:58:23 +02:00
Fabian Holzwarth
f7a13f5faa feat: turn UnifyContext into a record 2025-06-18 18:26:44 +02:00
Fabian Holzwarth
8fe80b4396 feat: move static placeholder generation into object 2025-06-18 17:47:29 +02:00
Fabian Holzwarth
eb1201ae5e feat: apply future-based approach to inner cartesian loop 2025-06-09 16:49:45 +02:00
Fabian Holzwarth
963ad76593 feat: make cartesian loop computation Future-based 2025-06-09 15:30:04 +02:00
Fabian Holzwarth
1eba09e3b0 feat: change cartesian while loop into recursive 2025-06-09 15:16:09 +02:00
Fabian Holzwarth
fc82125d14 feat: change TypeUnifyTask to use future-based logic 2025-06-09 14:53:37 +02:00
Fabian Holzwarth
dad468368b feat: make functions unify and unify2 future-based 2025-06-09 13:14:44 +02:00
Fabian Holzwarth
fdd4f3aa59 feat: implement variance-dependent calculation as Future based 2025-06-09 12:59:23 +02:00
Fabian Holzwarth
a0c11b60e8 Remove unnecessary parameter and fix some parallelization 2025-06-07 16:11:34 +02:00
Fabian Holzwarth
4cddf73e6d feat: small fixes for correct parameters 2025-06-07 14:38:18 +02:00
Fabian Holzwarth
5024a02447 feat: implement unify context and prepare variance code capsulation 2025-06-07 11:53:32 +02:00
Fabian Holzwarth
6c2d97b770 chore: code cleanup 2025-05-26 15:49:01 +02:00
Fabian Holzwarth
426c2916d3 feat: remove unnecessary synchronized blocks 2025-05-26 14:40:17 +02:00
Fabian Holzwarth
f722a00fbb feat: use the current thread for computation as well 2025-05-25 15:55:07 +02:00
Fabian Holzwarth
32797c9b9f feat: cleanup more cartesian product code 2025-05-24 12:43:42 +02:00
Fabian Holzwarth
87f655c85a feat: isolate constraint-filtering for one tv from computeCartesianRecursive 2025-05-23 16:10:37 +02:00
Fabian Holzwarth
613dceae1d feat: added Logger class, remove empty println start cleanup of computeCartesianRecursive 2025-05-23 14:12:25 +02:00
Fabian Holzwarth
81cac06e16 feat: add tool for merging many hash sets in parallel 2025-05-23 14:11:52 +02:00
Fabian Holzwarth
a47d5bc024 feat: slightly improved placeholder name generation 2025-05-23 14:04:48 +02:00
Fabian Holzwarth
e5916d455a feat: format and merge results in parallel 2025-05-19 17:05:18 +02:00
Fabian Holzwarth
ebb639e72e feat: remove log flushes 2025-05-18 16:29:19 +02:00
Fabian Holzwarth
f0a4a51ce6 feat: replace thread counter with thread pool 2025-05-18 15:40:31 +02:00
Fabian Holzwarth
7442880452 feat: limit placeholder generation to uppercase chars 2025-05-18 13:24:29 +02:00
Fabian Holzwarth
c4dc3b4245 feat: replace random based placeholder generation with deterministic approach 2025-05-18 12:41:56 +02:00
104 changed files with 7834 additions and 4069 deletions

40
independentTest.sh Executable file
View File

@@ -0,0 +1,40 @@
#!/usr/bin/env bash
REPO="https://gitea.hb.dhbw-stuttgart.de/f.holzwarth/JavaCompilerCore.git"
TDIR="./testBuild"
rm -rf "$TDIR" 2>/dev/null
mkdir $TDIR
cd $TDIR
git clone $REPO .
git checkout feat/unify-server
# git checkout 93e1a8787cd94c73f4538f6a348f58613893a584
# git checkout dad468368b86bdd5a3d3b2754b17617cee0a9107 # 1:55
# git checkout a0c11b60e8c9d7addcbe0d3a09c9ce2924e9d5c0 # 2:25
# git checkout 4cddf73e6d6c9116d3e1705c4b27a8e7f18d80c3 # 2:27
# git checkout 6c2d97b7703d954e4a42eef3ec374bcf313af75c # 2:13
# git checkout f722a00fbb6e69423d48a890e4a6283471763e64 # 1:35
# git checkout f0a4a51ce65639ce9a9470ff0fdb538fdf9c02cc # 2:19
# git checkout 1391206dfe59263cdb22f93371cfd1dd5465d97f # 1:29
date "+%Y.%m.%d %H:%M:%S"
# sed -i -e 's/source>21/source>23/g' pom.xml
# sed -i -e 's/target>21/target>23/g' pom.xml
mvn clean compile -DskipTests package
time java -jar target/JavaTXcompiler-0.1-jar-with-dependencies.jar resources/bytecode/javFiles/Matrix.jav;
# mvn clean compile test
echo -e "\nCleanup... "
cd -
rm -rf "$TDIR" 2>/dev/null
echo -e "\nFinished "
date "+%Y.%m.%d %H:%M:%S"
echo -e "\n "

22
pom.xml
View File

@@ -44,6 +44,26 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<artifactId>asm</artifactId>
<version>9.5</version>
</dependency>
<dependency>
<groupId>org.java-websocket</groupId>
<artifactId>Java-WebSocket</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.17.2</version>
</dependency>
<dependency>
<groupId>com.diogonunes</groupId>
<artifactId>JColor</artifactId>
<version>5.5.1</version>
</dependency>
</dependencies>
<build>
@@ -138,4 +158,4 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<url>file:///${project.basedir}/maven-repository/</url>
</repository>
</distributionManagement>
</project>
</project>

View File

@@ -8,6 +8,7 @@ import de.dhbwstuttgart.target.generate.ASTToTargetAST;
import de.dhbwstuttgart.target.tree.*;
import de.dhbwstuttgart.target.tree.expression.*;
import de.dhbwstuttgart.target.tree.type.*;
import de.dhbwstuttgart.util.Logger;
import org.objectweb.asm.*;
import java.lang.invoke.*;
@@ -19,6 +20,8 @@ import static de.dhbwstuttgart.target.tree.expression.TargetBinaryOp.*;
import static de.dhbwstuttgart.target.tree.expression.TargetLiteral.*;
public class Codegen {
public static Logger logger = new Logger("codegen");
private final TargetStructure clazz;
private final ClassWriter cw;
public final String className;
@@ -1317,7 +1320,7 @@ public class Codegen {
types.add(Type.getObjectType(guard.inner().type().getInternalName()));
// TODO Same here we need to evaluate constant;
} else {
System.out.println(label);
logger.info(label);
throw new NotImplementedException();
}
}

View File

@@ -120,7 +120,7 @@ public class FunNGenerator {
superFunNMethodDescriptor.append(")V");
}
System.out.println(superFunNMethodSignature);
Codegen.logger.info(superFunNMethodSignature);
ClassWriter classWriter = new ClassWriter(0);
MethodVisitor methodVisitor;

View File

@@ -1,46 +1,81 @@
package de.dhbwstuttgart.core;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.util.Logger;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.*;
public class ConsoleInterface {
private static final String directory = System.getProperty("user.dir");
public static void main(String[] args) throws IOException, ClassNotFoundException {
List<File> input = new ArrayList<>();
List<File> classpath = new ArrayList<>();
String outputPath = null;
Iterator<String> it = Arrays.asList(args).iterator();
if(args.length == 0){
System.out.println("No input files given. Get help with --help");
System.exit(1);
}else if(args.length == 1 && args[0].equals("--help")){
System.out.println("Usage: javatx [OPTION]... [FILE]...\n" +
"\t-cp\tSet Classpath\n" +
"\t-d\tSet destination directory");
System.exit(1);
public class ConsoleInterface {
/**
* Leave the argument configurations here for the rest of the code to read
*/
public static Logger.LogLevel logLevel = Logger.LogLevel.ERROR;
public static boolean writeLogFiles = false;
public static Optional<String> unifyServerUrl = Optional.empty();
public static void main(String[] args) throws IOException, ClassNotFoundException {
List<File> input = new ArrayList<>();
List<File> classpath = new ArrayList<>();
String outputPath = null;
Iterator<String> it = Arrays.asList(args).iterator();
Optional<Integer> serverPort = Optional.empty();
if (args.length == 0) {
System.out.println("No input files given. Get help with --help");
System.exit(1);
} else if (args.length == 1 && args[0].equals("--help")) {
System.out.println("Usage: javatx [OPTION]... [FILE]...\n" +
"\t-cp\tSet Classpath\n" +
"\t-d\tSet destination directory\n" +
"\t[--server-mode <port>]\n" +
"\t[--unify-server <url>]\n" +
"\t[--write-logs]\n" +
"\t[-v|-vv-|-vvv]");
System.exit(1);
}
while (it.hasNext()) {
String arg = it.next();
if (arg.equals("-d")) {
outputPath = it.next();
} else if (arg.startsWith("-d")) {
outputPath = arg.substring(2);
} else if (arg.equals("-cp") || arg.equals("-classpath")) {
String[] cps = it.next().split(":");
for (String cp : cps) {
classpath.add(new File(cp));
}
while(it.hasNext()){
String arg = it.next();
if(arg.equals("-d")){
outputPath = it.next();
}else if(arg.startsWith("-d")) {
outputPath = arg.substring(2);
}else if(arg.equals("-cp") || arg.equals("-classpath")){
String[] cps = it.next().split(":");
for(String cp : cps){
classpath.add(new File(cp));
}
}else{
input.add(new File(arg));
}
}
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null);
//compiler.typeInference();
compiler.generateBytecode();
}
} else if (arg.equals("--server-mode")) {
serverPort = Optional.of(Integer.parseInt(it.next()));
} else if (arg.equals("--unify-server")) {
unifyServerUrl = Optional.of(it.next());
} else if (arg.equals("--write-logs")) {
ConsoleInterface.writeLogFiles = true;
} else if (arg.startsWith("-v")) {
logLevel = switch (arg) {
case "-v" -> Logger.LogLevel.WARNING;
case "-vv" -> Logger.LogLevel.INFO;
case "-vvv" -> Logger.LogLevel.DEBUG;
default -> throw new IllegalArgumentException("Argument " + arg + " is not a valid verbosity level");
};
} else {
input.add(new File(arg));
}
}
if (serverPort.isPresent()) {
if (unifyServerUrl.isPresent()) throw new RuntimeException("Cannot use unifyServer when in server mode!");
JavaTXServer server = new JavaTXServer(serverPort.get());
server.listen();
}
else {
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null);
//compiler.typeInference();
compiler.generateBytecode();
SocketClient.closeIfOpen();
}
}
}

View File

@@ -12,6 +12,11 @@ import de.dhbwstuttgart.parser.SyntaxTreeGenerator.SyntaxTreeGenerator;
import de.dhbwstuttgart.parser.antlr.Java17Parser.SourceFileContext;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.packet.SetAutoclosePacket;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.Method;
@@ -35,10 +40,13 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.typeAlgo.TYPE;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.RuleSet;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
@@ -49,6 +57,7 @@ import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.util.Logger;
import java.io.*;
import java.lang.reflect.Modifier;
import java.nio.file.Path;
@@ -61,14 +70,17 @@ import org.apache.commons.io.output.NullOutputStream;
public class JavaTXCompiler {
// do not use this in any code, that can be executed serverside!
public static PlaceholderRegistry defaultClientPlaceholderRegistry = new PlaceholderRegistry();
public static Logger defaultLogger = new Logger();
// public static JavaTXCompiler INSTANCE;
final CompilationEnvironment environment;
Boolean resultmodel = true;
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
public final DirectoryClassLoader classLoader;
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
public final DirectoryClassLoader classLoader;
public final List<File> classPath;
private final File outputPath;
@@ -76,14 +88,9 @@ public class JavaTXCompiler {
public DirectoryClassLoader getClassLoader() {
return classLoader;
}
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(Arrays.asList(sourceFile), List.of(), new File("."));
}
public JavaTXCompiler(File sourceFile, Boolean log) throws IOException, ClassNotFoundException {
this(sourceFile);
this.log = log;
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(Collections.singletonList(sourceFile), List.of(), new File("."));
}
public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException {
@@ -91,6 +98,9 @@ public class JavaTXCompiler {
}
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath) throws IOException, ClassNotFoundException {
// ensure new default placeholder registry for tests
defaultClientPlaceholderRegistry = new PlaceholderRegistry();
var path = new ArrayList<>(contextPath);
if (contextPath.isEmpty()) {
// When no contextPaths are given, the working directory is the sources root
@@ -300,52 +310,51 @@ public class JavaTXCompiler {
Set<Set<UnifyPair>> results = new HashSet<>();
UnifyResultModel urm = null;
// urm.addUnifyResultListener(resultListener);
try {
logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile;
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this);
System.out.println(finiteClosure);
urm = new UnifyResultModel(cons, finiteClosure);
urm.addUnifyResultListener(resultListener);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons);
logFile = logFile == null ? new FileWriter("log_" + sourceFiles.keySet().iterator().next().getName()) : logFile;
Logger logger = new Logger(logFile, "TypeInferenceAsync");
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, defaultClientPlaceholderRegistry);
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
return x;
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logger, getClassLoader(), this, context.placeholderRegistry());
logger.info(finiteClosure.toString());
urm = new UnifyResultModel(cons, finiteClosure);
urm.addUnifyResultListener(resultListener);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, context.placeholderRegistry());
};
logFile.write(unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logFile.write(unifyCons.toString());
TypeUnify unify = new TypeUnify();
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
for (SourceFile f : this.sourceFiles.values()) {
logFile.write(ASTTypePrinter.print(f));
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
logFile.flush();
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
return x;
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
*/;
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
} catch (IOException e) {
System.err.println("kein LogFile");
};
logger.debug(unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logger.debug(unifyCons.toString());
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logger.debug("FC:\\" + finiteClosure.toString() + "\n");
for (SourceFile f : this.sourceFiles.values()) {
logger.debug(ASTTypePrinter.print(f));
}
// logFile.flush();
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
*/;
TypeUnify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
return urm;
}
@@ -365,96 +374,108 @@ public class JavaTXCompiler {
final ConstraintSet<Pair> cons = getConstraints(file);
Set<Set<UnifyPair>> results = new HashSet<>();
try {
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
if (log) logFolder.mkdirs();
Writer logFile = log ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this);
System.out.println(finiteClosure);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons);
System.out.println("xxx1");
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
return x;
PlaceholderRegistry placeholderRegistry = new PlaceholderRegistry();
};
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
if (ConsoleInterface.writeLogFiles && !logFolder.mkdirs()) throw new RuntimeException("Could not creat directoy for log files: " + logFolder);
Writer logFile = ConsoleInterface.writeLogFiles ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
Logger logger = new Logger(logFile, "TypeInference");
logFile.write("Unify:" + unifyCons.toString());
System.out.println("Unify:" + unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logFile.write("\nUnify_distributeInnerVars: " + unifyCons.toString());
TypeUnify unify = new TypeUnify();
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
logFile.write(ASTTypePrinter.print(sf));
System.out.println(ASTTypePrinter.print(sf));
logFile.flush();
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()// .stream().map(x -> {
/*
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
*/;
if (resultmodel) {
/* UnifyResultModel Anfang */
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
System.out.println("RESULT Final: " + li.getResults());
System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
logFile.flush();
return li.getResults();
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logger, classLoader, this, placeholderRegistry);
logger.info(finiteClosure.toString());
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, placeholderRegistry);
logger.info("xxx1");
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
/* UnifyResultModel End */
else {
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
// finiteClosure));
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString() + "\n");
logFile.flush();
results.addAll(result);
return x;
results = results.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
y.setPairOp(PairOperator.EQUALSDOT);
return y; // alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure);
} else
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
System.out.println("RESULT Final: " + results);
System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + results.toString() + "\n");
logFile.flush();
logFile.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS);
logFile.flush();
}
} catch (IOException e) {
System.err.println("kein LogFile");
};
logger.debug("Unify:" + unifyCons.toString());
logger.info("Unify:" + unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logger.debug("\nUnify_distributeInnerVars: " + unifyCons.toString());
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logger.debug("FC:\\" + finiteClosure.toString() + "\n");
logger.debug(ASTTypePrinter.print(sf));
logger.info(ASTTypePrinter.print(sf));
// logFile.flush();
logger.info("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()// .stream().map(x -> {
/*
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
*/;
if (ConsoleInterface.unifyServerUrl.isPresent()) {
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, placeholderRegistry);
SocketFuture<UnifyResultPacket> future = SocketClient.execute(
UnifyRequestPacket.create(finiteClosure, cons, unifyCons, context.placeholderRegistry())
);
SocketClient.execute(SetAutoclosePacket.create());
return future.get().getResultSet(context);
}
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons))))).collect(Collectors.toList());
else if (resultmodel) {
/* UnifyResultModel Anfang */
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, placeholderRegistry);
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
logger.info("RESULT Final: " + li.getResults());
logger.info("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
// logFile.flush();
return li.getResults();
}
/* UnifyResultModel End */
else {
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
// finiteClosure));
UnifyContext context = new UnifyContext(logger, false, new UnifyResultModel(cons, finiteClosure), usedTasks, placeholderRegistry);
Set<Set<UnifyPair>> result = TypeUnify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
logger.info("RESULT: " + result);
logFile.write("RES: " + result.toString() + "\n");
// logFile.flush();
results.addAll(result);
results = results.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet(placeholderRegistry).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
y.setPairOp(PairOperator.EQUALSDOT);
return y; // alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure);
} else
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
logger.info("RESULT Final: " + results);
logger.info("Constraints for Generated Generics: " + " ???");
logger.debug("RES_FINAL: " + results.toString() + "\n");
// logFile.flush();
logger.debug("PLACEHOLDERS: " + placeholderRegistry);
// logFile.flush();
}
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons), placeholderRegistry)))).collect(Collectors.toList());
}
/**
@@ -586,10 +607,6 @@ public class JavaTXCompiler {
}
}
/**
* @param path - output-Directory can be null, then class file output is in the same directory as the parsed source files
* @return
*/
public Map<JavaClassName, byte[]> generateBytecode(File sourceFile) throws ClassNotFoundException, IOException {
var sf = sourceFiles.get(sourceFile);
if (sf.isGenerated()) return null;
@@ -644,15 +661,15 @@ public class JavaTXCompiler {
FileOutputStream output;
for (JavaClassName name : classFiles.keySet()) {
byte[] bytecode = classFiles.get(name);
System.out.println("generating " + name + ".class file ...");
defaultLogger.info("generating " + name + ".class file ...");
var subPath = preserveHierarchy ? path : Path.of(path.toString(), name.getPackageName().split("\\.")).toFile();
File outputFile = new File(subPath, name.getClassName() + ".class");
outputFile.getAbsoluteFile().getParentFile().mkdirs();
System.out.println(outputFile);
defaultLogger.info(outputFile.toString());
output = new FileOutputStream(outputFile);
output.write(bytecode);
output.close();
System.out.println(name + ".class file generated");
defaultLogger.success(name + ".class file generated");
}
}

View File

@@ -0,0 +1,31 @@
package de.dhbwstuttgart.core;
import de.dhbwstuttgart.server.SocketServer;
public class JavaTXServer {
public static boolean isRunning = false;
final SocketServer socketServer;
public JavaTXServer(int port) {
this.socketServer = new SocketServer(port);
}
public void listen() {
isRunning = true;
socketServer.start();
}
public void forceStop() {
try {
socketServer.stop();
}
catch (InterruptedException exception) {
System.err.println("Interrupted socketServer: " + exception);
}
isRunning = false;
}
}

View File

@@ -0,0 +1,11 @@
package de.dhbwstuttgart.exceptions;
/**
* Eine Runtime Exception, die für den Fall genutzt wird, dass eine Unifikation abgebrochen wird.
* Durch das Werfen einer Exception können Abbrüche auch aus Methodenaufrufen heraus
* geprüft werden, da zuvor nur ein return X; stattfinden würde.
*/
public class UnifyCancelException extends RuntimeException {
}

View File

@@ -7,6 +7,7 @@ import de.dhbwstuttgart.parser.antlr.Java17Parser;
import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.util.Logger;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
@@ -17,6 +18,9 @@ import java.util.ArrayList;
import java.util.List;
public class JavaTXParser {
public static Logger logger = new Logger("Parser");
public static Java17Parser.SourceFileContext parse(File source) throws IOException, java.lang.ClassNotFoundException {
InputStream stream = new FileInputStream(source);
// DEPRECATED: ANTLRInputStream input = new ANTLRInputStream(stream);

View File

@@ -1,4 +1,25 @@
package de.dhbwstuttgart.parser;
public record SourceLoc(String file, int line) {
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
public record SourceLoc(String file, int line) implements ISerializableData {
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
var serialized = new SerialMap();
serialized.put("file", file);
serialized.put("line", line);
return serialized;
}
public static SourceLoc fromSerial(SerialMap data) {
return new SourceLoc(
data.getValue("file").getOf(String.class),
data.getValue("line").getOf(Integer.class)
);
}
}

View File

@@ -12,6 +12,7 @@ import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.model.*;
import java.util.*;
@@ -26,16 +27,21 @@ public class FCGenerator {
*
* @param availableClasses - Alle geparsten Klassen
*/
public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException {
return toFC(availableClasses, classLoader).stream().map(t -> UnifyTypeFactory.convert(compiler, t)).collect(Collectors.toSet());
public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException {
return toFC(
availableClasses,
classLoader,
placeholderRegistry
).stream().map(t -> UnifyTypeFactory.convert(compiler, t, placeholderRegistry))
.collect(Collectors.toSet());
}
public static Set<Pair> toFC(Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException {
public static Set<Pair> toFC(Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException {
HashSet<Pair> pairs = new HashSet<>();
//PL 2018-09-18: gtvs vor die for-Schleife gezogen, damit immer die gleichen Typeplaceholder eingesetzt werden.
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs = new HashMap<>();
for(ClassOrInterface cly : availableClasses){
List<Pair> newPairs = getSuperTypes(cly, availableClasses, gtvs, classLoader);
List<Pair> newPairs = getSuperTypes(cly, availableClasses, gtvs, classLoader, placeholderRegistry);
pairs.addAll(newPairs);
//For all Functional Interfaces FI: FunN$$<... args auf dem Functional Interface ...> <. FI is added to FC
@@ -75,8 +81,13 @@ public class FCGenerator {
* @param forType
* @return
*/
private static List<Pair> getSuperTypes(ClassOrInterface forType, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException {
return getSuperTypes(forType, availableClasses, new HashMap<>(), classLoader);
private static List<Pair> getSuperTypes(
ClassOrInterface forType,
Collection<ClassOrInterface> availableClasses,
ClassLoader classLoader,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
return getSuperTypes(forType, availableClasses, new HashMap<>(), classLoader, placeholderRegistry);
}
/**
@@ -87,8 +98,13 @@ public class FCGenerator {
* @return
* @throws ClassNotFoundException
*/
private static List<Pair> getSuperTypes(ClassOrInterface forType, Collection<ClassOrInterface> availableClasses,
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs, ClassLoader classLoader) throws ClassNotFoundException {
private static List<Pair> getSuperTypes(
ClassOrInterface forType,
Collection<ClassOrInterface> availableClasses,
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs,
ClassLoader classLoader,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
//Die GTVs, die in forType hinzukommen:
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> newGTVs = new HashMap<>();
@@ -147,7 +163,7 @@ public class FCGenerator {
if(superClass.getClassName().equals(ASTFactory.createObjectClass().getClassName())){
superTypes = Arrays.asList(new Pair(ASTFactory.createObjectType(), ASTFactory.createObjectType(), PairOperator.SMALLER));
}else{
superTypes = getSuperTypes(superClass, availableClasses, newGTVs, classLoader);
superTypes = getSuperTypes(superClass, availableClasses, newGTVs, classLoader, placeholderRegistry);
}
retList.add(ret);

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.parser.SyntaxTreeGenerator;
import de.dhbwstuttgart.parser.JavaTXParser;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
@@ -259,7 +260,7 @@ public class StatementGenerator {
ret.setStatement();
return ret;
default:
System.out.println(stmt.getClass());
JavaTXParser.logger.info(stmt.getClass());
throw new NotImplementedException();
}
}

View File

@@ -74,7 +74,7 @@ public class TypeGenerator {
throw new NotImplementedException();
}
} else if (!typeContext.LBRACK().isEmpty()) { // ArrayType über eckige Klammer prüfen
// System.out.println(unannTypeContext.getText());
// JavaTXParser.logger.info(unannTypeContext.getText());
throw new NotImplementedException();
}
/*

View File

@@ -0,0 +1,41 @@
package de.dhbwstuttgart.server;
import de.dhbwstuttgart.util.Logger;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import org.java_websocket.WebSocket;
public class ServerTaskLogger extends Logger {
private final WebSocket webSocket;
private final SocketServer socketServer;
private final LogLevel customLogLevel;
public ServerTaskLogger(WebSocket webSocket, SocketServer socketServer, LogLevel customLogLevel) {
this.webSocket = webSocket;
this.socketServer = socketServer;
this.customLogLevel = customLogLevel;
}
@Override
public boolean isLogLevelActive(LogLevel logLevel) {
return logLevel.isHigherOrEqualTo(customLogLevel);
}
@Override
protected void print(String s, LogLevel logLevel) {
String coloredPrefix = this.getPrefix(logLevel);
if (logLevel.isHigherOrEqualTo(LogLevel.ERROR)) {
socketServer.sendError(webSocket, coloredPrefix + s, false);
}
else {
socketServer.sendMessage(webSocket, coloredPrefix + s);
}
}
@Override
protected void write(String s) {
// under no circumstances write anything to a file
}
}

View File

@@ -0,0 +1,202 @@
package de.dhbwstuttgart.server;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.server.packet.IClientToServerPacket;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.IServerToClientPacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Logger;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.enums.ReadyState;
import org.java_websocket.handshake.ServerHandshake;
/**
* The Client-side of the websocket
*/
public class SocketClient extends WebSocketClient {
public static Logger logger = new Logger("SocketClient");
/**
* The singleton object
*/
private static SocketClient socketClient = null;
/**
* List of futures that are still waiting to be fulfilled
*/
private final Map<String, SocketFuture<?>> responseFutures = new HashMap<>();
private SocketClient(String url) {
super(
URI.create(url), // target url
//SocketServer.perMessageDeflateDraft, // enable compression
Map.of( // headers
"packetProtocolVersion", SocketServer.packetProtocolVersion
)
);
// make sure the url is in a valid format
final String regex = "^wss?://(\\w+(\\.\\w+)?)*:(\\d+)$";
final Matcher matcher = Pattern.compile(regex).matcher(url);
if (!matcher.find()) {
throw new RuntimeException("Provided string \"" + url + "\" is not a valid server URL! Use pattern ws(s?)://<host.name>:<port>");
}
try {
// wait for the connection to be set up
this.connectBlocking();
// make sure the connection has been established successfully
if (this.getReadyState() != ReadyState.OPEN) {
throw new RuntimeException("WebSocket Client could not connect to remote host at " + this.uri);
}
} catch (InterruptedException exception) {
throw new RuntimeException(exception);
}
// add a shutdown hook to close the connection when the process ends or is stopped by a SIGINT signal
Runtime.getRuntime().addShutdownHook(new Thread(this::close));
}
private SocketClient(String host, int port, boolean secure) throws InterruptedException {
this(String.format("%s://%s:%d/", secure ? "wss" : "ws", host, port));
}
/**
* Singleton access method, creates one if none is available
*
* @return The one and only socketClient
*/
private static SocketClient initializeClient() {
if (socketClient == null) {
socketClient = new SocketClient(ConsoleInterface.unifyServerUrl.get());
}
return socketClient;
}
/**
* Send a packet to the server (connection will be created, if none is found) and return a future
* for the response packet
*/
synchronized public static <T extends IServerToClientPacket> SocketFuture<T> execute(IClientToServerPacket<T> packet) {
SocketClient client = initializeClient();
/*
* Create a future that will be associated with the packet and eventually completed
*/
SocketFuture<T> future = packet.getFuture();
if (!future.isDone()) {
client.responseFutures.put(future.futureId, future);
}
/*
* Establish connection, if not already done.
* Serialize the packet and send it to the server.
* Return the future to be handled by the caller.
*/
try {
String json = PacketContainer.serialize(packet);
client.send(json);
} catch (Exception exception) {
logger.exception(exception);
throw new RuntimeException("Exception occurred in server connection: ", exception);
}
return future;
}
/**
* Shortcut for waiting and retrieving the response immediately
*
* @param packet The packet to send
* @param <T> The type of response packet to await
* @return The response packet, once it is received
*/
public static <T extends IServerToClientPacket> T executeAndGet(IClientToServerPacket<T> packet) {
return SocketClient.execute(packet).get();
}
/**
* Specific client-side implementations to handle incoming packets
*/
protected void handleReceivedPacket(IPacket packet) {
if (!(packet instanceof IServerToClientPacket serverToClientPacket)) {
System.err.println("Received package of invalid type + " + packet.getClass().getName());
this.close();
return;
}
serverToClientPacket.onHandle(this.getConnection(), this);
}
/**
* Complete a registered future, so it can be handled by whoever executed the creator task
*
* @param id The associated id for this future
* @param trigger The object triggering the completion
*/
public void completeResponseFuture(String id, IServerToClientPacket trigger) {
SocketFuture<?> future = this.responseFutures.remove(id);
if (future == null) return;
if (!future.accept(trigger)) {
throw new RuntimeException("Packet " + trigger.getClass().getName() + " tried to complete future, but was not allowed to");
}
}
public static void closeIfOpen() {
if (socketClient != null && socketClient.isOpen()) {
socketClient.close();
}
}
@Override
public void onOpen(ServerHandshake handshakedata) {
logger.success("Connected to server with status " + handshakedata.getHttpStatus());
}
@Override
public void onMessage(String message) {
// logger.info("received: " + message);
IPacket packet = PacketContainer.deserialize(message);
this.handleReceivedPacket(packet);
}
@Override
public void onClose(int code, String reason, boolean remote) {
logger.info(
"Disconnected from server " +
"with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
"(closed by remote: " + remote + ")"
);
if (!this.responseFutures.isEmpty()) {
throw new RuntimeException("Server closed before all required tasks were answered");
}
}
@Override
public void onError(Exception e) {
logger.exception(e);
throw new RuntimeException(e);
}
}

View File

@@ -0,0 +1,48 @@
package de.dhbwstuttgart.server;
import de.dhbwstuttgart.server.packet.IServerToClientPacket;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
public class SocketFuture<T extends IServerToClientPacket> extends CompletableFuture<T> {
public final String futureId = UUID.randomUUID().toString();
public final List<Class<T>> allowedTriggers;
public SocketFuture(List<Class<T>> allowedTriggers) {
this.allowedTriggers = allowedTriggers;
}
public boolean accept(IServerToClientPacket trigger) {
if (this.allowedTriggers.contains(trigger.getClass())) {
this.complete((T)trigger);
return true;
}
return false;
}
@Override
public T get() {
try {
return super.get();
}
catch (InterruptedException | ExecutionException exception) {
throw new RuntimeException(exception);
}
}
/**
* Special case where the future is immediately fulfilled without a response package similar to
* <code>CompletableFuture.completedFuture()</code> but without a value
*/
public static <R extends IServerToClientPacket> SocketFuture<R> completedFuture() {
SocketFuture<R> dummyFuture = new SocketFuture<>(new ArrayList<>(0));
dummyFuture.complete(null);
return dummyFuture;
}
}

View File

@@ -0,0 +1,235 @@
package de.dhbwstuttgart.server;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.server.packet.ErrorPacket;
import de.dhbwstuttgart.server.packet.IClientToServerPacket;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.MessagePacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.util.Logger;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.java_websocket.WebSocket;
import org.java_websocket.drafts.Draft;
import org.java_websocket.drafts.Draft_6455;
import org.java_websocket.extensions.permessage_deflate.PerMessageDeflateExtension;
import org.java_websocket.handshake.ClientHandshake;
import org.java_websocket.server.WebSocketServer;
public class SocketServer extends WebSocketServer {
public static Logger logger = new Logger("SocketServer");
public static final int maxTasksPerSession = 100;
private static boolean serverRunning = false;
/**
* Increase this every time a breaking change to the server communication is done.
* This will prevent errors when the server version and client version do not match.
*/
public static final String packetProtocolVersion = "1";
// create an executor for tasks that will always keep at least one task around
private final ThreadPoolExecutor taskExecutor = new ThreadPoolExecutor(1, Integer.MAX_VALUE,60L, TimeUnit.SECONDS, new SynchronousQueue<>());
// create an executor for scheduling timeouts
private final ScheduledExecutorService timeoutExecutor = Executors.newSingleThreadScheduledExecutor();
public SocketServer(int port) {
super(new InetSocketAddress(port));
this.setConnectionLostTimeout(30);
serverRunning = true;
// add a shutdown hook to close all connections when the process ends or is stopped by a SIGINT signal
Runtime.getRuntime().addShutdownHook(new Thread(this::onShutdown));
}
public static boolean isServerRunning() {
return serverRunning;
}
private void onShutdown() {
serverRunning = false;
try {
for (var webSocket : this.getConnections()) {
this.sendError(webSocket, "Sorry, i am shutting down. You are now on your own, good Luck!", true);
webSocket.close();
}
this.stop();
taskExecutor.shutdown();
timeoutExecutor.shutdown();
} catch (InterruptedException exception) {
// we are shutting down anyway
}
}
@Override
public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
String ppv = clientHandshake.getFieldValue("packetProtocolVersion");
if (!ppv.equals(packetProtocolVersion)) {
this.sendError(webSocket,
"Mismatch in packet protocol version! Client (you): \"" + ppv + "\" and Server (me): \"" + packetProtocolVersion + "\"",
true
);
webSocket.close(1);
return;
}
SocketData socketData = new SocketData(webSocket);
logger.info("New connection: " + socketData.id + " (with ppv " + ppv + ")");
try {
sendMessage(webSocket, "Welcome to the server!");
// wait 10 seconds for the client to send a task and close the connection if nothing has been received until then
final int secondsUntilTimeout = 10;
timeoutExecutor.schedule(() -> {
if (webSocket.<SocketData>getAttachment().totalTasks.get() > 0 || !webSocket.isOpen()) {
return;
}
sendMessage(webSocket, "No task received after " + secondsUntilTimeout + " seconds. Closing connection...");
webSocket.close();
},
secondsUntilTimeout,
TimeUnit.SECONDS
);
// and finally, when your program wants to exit
} catch (Exception e) {
logger.exception(e);
webSocket.close(1, e.getMessage());
}
}
@Override
public void onClose(WebSocket webSocket, int code, String reason, boolean remote) {
SocketData socketData = webSocket.getAttachment();
logger.info("Connection closed: " + socketData.id);
logger.info(
"Disconnected client " + socketData.id + " " +
"with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
"(closed by client: " + remote + ")"
);
}
@Override
public void onMessage(WebSocket webSocket, String s) {
// logger.info("Received: " + s.substring(0, 50));
IPacket reconstructedPacket = PacketContainer.deserialize(s);
try {
this.onPacketReceived(webSocket, reconstructedPacket);
} catch (JsonProcessingException e) {
logger.exception(e);
this.log(webSocket, "Error on processing incoming package: " + e.getMessage());
}
}
@Override
public void onError(WebSocket webSocket, Exception e) {
if (webSocket != null) {
log(webSocket, e.getMessage());
webSocket.close();
}
logger.exception(e);
}
@Override
public void onStart() {
logger.success("Websocket server started on port " + this.getPort());
}
/**
* A shorthand method for sending informational messages to the client
*/
public void sendMessage(WebSocket webSocket, String text) {
try {
MessagePacket message = MessagePacket.create(text);
webSocket.send(PacketContainer.serialize(message));
} catch (Exception e) {
System.err.println("Failed to send message: " + text);
logger.exception(e);
}
}
/**
* A shorthand method for sending error messages to the client
*/
public void sendError(WebSocket webSocket, String text, boolean isFatal) {
try {
ErrorPacket error = ErrorPacket.create(text, isFatal);
webSocket.send(PacketContainer.serialize(error));
} catch (Exception e) {
logger.exception(e);
log(webSocket, "Failed to send error: " + text);
}
}
/**
* The server-side implementation on how to handle certain packets when received
*/
private void onPacketReceived(WebSocket webSocket, IPacket packet) throws JsonProcessingException {
SocketData socketData = webSocket.getAttachment();
// limit the number of tasks per connection
if (socketData.totalTasks.get() >= maxTasksPerSession) {
sendError(webSocket, "Exceeded the maximum amount of " + maxTasksPerSession + " tasks per session", true);
webSocket.close();
return;
}
// only allow packets that are meant to be handled by the server
if (!(packet instanceof IClientToServerPacket<?> clientToServerPacket)) {
sendMessage(webSocket, "The package of type " + packet.getClass().getName() + " is not handled by the server!");
return;
}
// update the socket data
socketData.unhandledTasks.incrementAndGet();
socketData.totalTasks.incrementAndGet();
// add the packet to the queue so it can be started by the worker
CompletableFuture.runAsync(() -> {
clientToServerPacket.onHandle(webSocket, this);
int remainingUnhandledTasks = socketData.unhandledTasks.decrementAndGet();
if (socketData.closeIfNoTasksLeft) {
// if the websocket has 0 unhandled Tasks, close the connection
if (remainingUnhandledTasks <= 0) {
sendMessage(webSocket, "All requested tasks finished! Closing connection...");
webSocket.close();
}
}
}, taskExecutor);
}
public void log(WebSocket webSocket, String msg) {
String socketId = (webSocket == null) ? "???" : webSocket.<SocketData>getAttachment().id;
logger.info("[" + socketId + "] " + msg);
}
/**
* The data that is associated server-side with any connected client.
* This makes it possible to store information that can be mapped to any existing connection.
*/
public static class SocketData {
public final String id;
public final AtomicInteger unhandledTasks = new AtomicInteger(0);
public final AtomicInteger totalTasks = new AtomicInteger(0);
public boolean closeIfNoTasksLeft = false;
public SocketData(WebSocket webSocket) {
this.id = UUID.randomUUID().toString();
webSocket.setAttachment(this);
}
}
}

View File

@@ -0,0 +1,35 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import org.java_websocket.WebSocket;
public class DebugPacket implements IClientToServerPacket.Void, IServerToClientPacket {
public SerialUUID a1;
public SerialUUID a2;
public SerialMap b1;
public SerialMap b2;
public SerialList<? extends ISerialNode> c1;
public SerialList<? extends ISerialNode> c2;
public SerialValue<?> d1;
public SerialValue<?> d2;
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -0,0 +1,36 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A packet to send simple error messages between the client and the server
*/
public class ErrorPacket implements IServerToClientPacket {
/**
* The error endpoint for messages from the server that should be logged out as errors and possibly abort the process
*/
public String error;
public boolean isFatal;
@JsonIgnore
public static ErrorPacket create(String error, boolean isFatal) {
ErrorPacket packet = new ErrorPacket();
packet.error = error;
packet.isFatal = isFatal;
return packet;
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.exception(new RuntimeException(this.error));
if (this.isFatal) {
socketClient.close(1, "Received fatal error from server");
}
}
}

View File

@@ -0,0 +1,26 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A packet that will be sent to the server. Use <code>Void</code> Sub-Interface for packets without response
*
* @param <T> The response packet that will fulfill the future.
*/
public interface IClientToServerPacket<T extends IServerToClientPacket> extends IPacket {
@JsonIgnore
void onHandle(WebSocket webSocket, SocketServer socketServer);
@JsonIgnore
SocketFuture<T> getFuture();
/**
* Special case, where the packet will remain unanswered by the server
*/
interface Void extends IClientToServerPacket<IServerToClientPacket> {}
}

View File

@@ -0,0 +1,12 @@
package de.dhbwstuttgart.server.packet;
/**
* The shared interface for all packet of the client-server connection.
* A packet must always:
* - Have a default / no-parameter constructor
* - Have only serializable public properties (or disable them via jackson annotations)
*
*/
public interface IPacket {
}

View File

@@ -0,0 +1,12 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import org.java_websocket.WebSocket;
public interface IServerToClientPacket extends IPacket {
@JsonIgnore
void onHandle(WebSocket webSocket, SocketClient socketClient);
}

View File

@@ -0,0 +1,35 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A fallback packet that is generated if the received JSON could not be mapped to an existing package
*/
public class InvalidPacket implements IClientToServerPacket.Void, IServerToClientPacket {
/**
* If available, the error that caused this package to appear
*/
public String error = "<unknown error>";
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.error("InvalidPacket: " + this.error);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log(webSocket, "InvalidPacket: " + this.error);
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -0,0 +1,40 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A packet to send simple informational messages between the client and the server
*/
public class MessagePacket implements IClientToServerPacket.Void, IServerToClientPacket {
/**
* The informational message from the server that should be logged out outputted
*/
public String message;
@JsonIgnore
public static MessagePacket create(String message) {
MessagePacket packet = new MessagePacket();
packet.message = message;
return packet;
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.info("SocketMessage: " + this.message);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log(webSocket, this.message);
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -0,0 +1,98 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.dhbwstuttgart.util.Logger;
/**
* A wrapper for the packet to ensure correct serialization/deserialization and make it possible to detect the matching
* packet type for deserialization.
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
public class PacketContainer {
// The jackson serializer / deserializer tool
private static final ObjectMapper objectMapper = new ObjectMapper();
/*
* The available packet types. The one type that is represented in the JSON should always be the ONLY non-null value.
* They have to be public (for the moment) to let jackson fill them in while deserializing
*/
public ErrorPacket errorPacket = null;
public MessagePacket messagePacket = null;
public InvalidPacket invalidPacket = null;
public UnifyRequestPacket unifyRequestPacket = null;
public UnifyResultPacket unifyResultPacket = null;
public DebugPacket debugPacket = null;
public SetAutoclosePacket setAutoclosePacket = null;
/**
* Generate the JSON string for the given packet
*
* @param packet The packet to serialize
* @return The JSON representation of the packet
*/
public static String serialize(IPacket packet) throws JsonProcessingException {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
PacketContainer container = new PacketContainer();
if (packet instanceof ErrorPacket)
container.errorPacket = (ErrorPacket) packet;
else if (packet instanceof MessagePacket)
container.messagePacket = (MessagePacket) packet;
else if (packet instanceof UnifyRequestPacket)
container.unifyRequestPacket = (UnifyRequestPacket) packet;
else if (packet instanceof UnifyResultPacket)
container.unifyResultPacket = (UnifyResultPacket) packet;
else if (packet instanceof DebugPacket)
container.debugPacket = (DebugPacket) packet;
else if (packet instanceof SetAutoclosePacket)
container.setAutoclosePacket = (SetAutoclosePacket) packet;
// Add new packets here and in the deserialize method
else
throw new RuntimeException("Cannot map packet to any known packet class");
return objectMapper.writeValueAsString(container);
}
/**
* Use the JSON string to generate the matching packet object
*
* @param json The serialized representation of a packet container
* @return The deserialized Packet object
*/
public static IPacket deserialize(String json) {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
try {
PacketContainer container = objectMapper.readValue(json, PacketContainer.class);
if (container.errorPacket != null)
return container.errorPacket;
if (container.messagePacket != null)
return container.messagePacket;
if (container.invalidPacket != null)
return container.invalidPacket;
if (container.unifyRequestPacket != null)
return container.unifyRequestPacket;
if (container.unifyResultPacket != null)
return container.unifyResultPacket;
if (container.debugPacket != null)
return container.debugPacket;
if (container.setAutoclosePacket != null)
return container.setAutoclosePacket;
// Add new packets here and in the serialize method
throw new RuntimeException("Cannot map received json to any known packet class");
} catch (Exception e) {
(new Logger()).exception(e);
InvalidPacket packet = new InvalidPacket();
packet.error = e.getMessage();
return packet;
}
}
}

View File

@@ -0,0 +1,32 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* Normally, a connection stays open until either the client or the server process ends.
* Send this packet to inform the server that the connection can be closed once all tasks are done
*/
public class SetAutoclosePacket implements IClientToServerPacket.Void {
public int dummyProperty = 1;
@JsonIgnore
public static SetAutoclosePacket create() {
return new SetAutoclosePacket();
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
webSocket.<SocketServer.SocketData>getAttachment().closeIfNoTasksLeft = true;
socketServer.log(webSocket, "Marked connection as autoclose");
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -0,0 +1,164 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.server.ServerTaskLogger;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ForkJoinPool;
import org.java_websocket.WebSocket;
/**
* A packet to send all required data for the unification algorithm to the server and request the unification
*/
public class UnifyRequestPacket implements IClientToServerPacket<UnifyResultPacket> {
public SerialMap finiteClosure;
public SerialMap constraintSet;
public SerialMap unifyConstraintSet;
public SerialMap serialKeyStorage;
public SerialValue<?> placeholders;
public SerialList<SerialMap> factoryplaceholders;
public String futureId;
public int logLevel;
@JsonIgnore
private KeyStorage keyStorage = new KeyStorage();
@JsonIgnore
private boolean keyStorageLoaded = false;
public static UnifyRequestPacket create(
FiniteClosure finiteClosure,
ConstraintSet<Pair> constraintSet,
ConstraintSet<UnifyPair> unifyConstraintSet,
PlaceholderRegistry placeholderRegistry
) {
UnifyRequestPacket packet = new UnifyRequestPacket();
// store constraint and finite closure
packet.finiteClosure = finiteClosure.toSerial(packet.keyStorage);
packet.constraintSet = constraintSet.toSerial(packet.keyStorage);
packet.unifyConstraintSet = unifyConstraintSet.toSerial(packet.keyStorage);
// store placeholder registry
var serialRegistry = placeholderRegistry.toSerial(packet.keyStorage);
packet.placeholders = serialRegistry.getValue("ph");
packet.factoryplaceholders = serialRegistry.getList("factoryPh").assertListOfMaps();
// store referenced objects separately
packet.serialKeyStorage = packet.keyStorage.toSerial(packet.keyStorage);
packet.logLevel = ConsoleInterface.logLevel.getValue();
return packet;
}
@JsonIgnore
public void loadKeyStorage(UnifyContext context) {
if (!keyStorageLoaded) {
keyStorageLoaded = true;
keyStorage = KeyStorage.fromSerial(this.serialKeyStorage, context);
}
}
@JsonIgnore
private FiniteClosure retrieveFiniteClosure(UnifyContext context) {
this.loadKeyStorage(context);
return FiniteClosure.fromSerial(this.finiteClosure, context, keyStorage);
}
@JsonIgnore
private ConstraintSet<Pair> retrieveConstraintSet(UnifyContext context) {
this.loadKeyStorage(context);
return ConstraintSet.fromSerial(this.constraintSet, context, Pair.class, keyStorage);
}
@JsonIgnore
private ConstraintSet<UnifyPair> retrieveUnifyConstraintSet(UnifyContext context) {
this.loadKeyStorage(context);
return ConstraintSet.fromSerial(this.unifyConstraintSet, context, UnifyPair.class, keyStorage);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.sendMessage(webSocket, "You requested a unify! Please wait until I calculated everything...");
socketServer.log(webSocket, "Client requested a unification. Starting now...");
try {
var placeholderRegistry = new PlaceholderRegistry();
ArrayList<String> existingPlaceholders = (ArrayList) this.placeholders.getOf(ArrayList.class);
existingPlaceholders.forEach(placeholderRegistry::addPlaceholder);
Logger logger = new ServerTaskLogger(
webSocket,
socketServer,
Logger.LogLevel.fromValue(
Math.max(this.logLevel, Logger.LogLevel.INFO.getValue())
)
);
var unifyContext = new UnifyContext(logger, true,
new UnifyResultModel(new ConstraintSet<>(), new FiniteClosure(new HashSet<>(), logger, placeholderRegistry)),
new UnifyTaskModel(), ForkJoinPool.commonPool(), placeholderRegistry
);
this.factoryplaceholders.stream()
.map(p -> (PlaceholderType)UnifyType.fromSerial(p, unifyContext))
.forEach(placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS::add);
// start the unification algorithm from the received data
IFiniteClosure finiteClosure = this.retrieveFiniteClosure(unifyContext);
ConstraintSet<Pair> constraintSet = this.retrieveConstraintSet(unifyContext);
ConstraintSet<UnifyPair> unifyConstraintSet = this.retrieveUnifyConstraintSet(unifyContext);
var resultModel = new UnifyResultModel(constraintSet, finiteClosure);
UnifyResultListenerImpl resultListener = new UnifyResultListenerImpl();
resultModel.addUnifyResultListener(resultListener);
TypeUnify.unifyParallel(
unifyConstraintSet.getUndConstraints(),
unifyConstraintSet.getOderConstraints(),
finiteClosure,
unifyContext.newWithResultModel(resultModel)
);
var resultSets = resultListener.getResults();
socketServer.log(webSocket, "Finished unification");
socketServer.sendMessage(webSocket, "Unification finished. Found " + resultSets.size() + " result sets");
if (webSocket.isOpen()) {
UnifyResultPacket resultPacket = UnifyResultPacket.create(resultSets, futureId);
webSocket.send(PacketContainer.serialize(resultPacket));
}
} catch (Exception e) {
SocketServer.logger.exception(e);
socketServer.log(webSocket, e.getMessage());
}
}
@JsonIgnore
public SocketFuture<UnifyResultPacket> getFuture() {
var future = new SocketFuture<>(List.of(UnifyResultPacket.class));
futureId = future.futureId;
return future;
}
}

View File

@@ -0,0 +1,45 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.List;
import org.java_websocket.WebSocket;
/**
* A packet to send all calculated data from the unification algorithm back to the client
*/
public class UnifyResultPacket implements IServerToClientPacket {
public SerialList<ISerialNode> results;
public SerialMap keyStorage;
public String futureId;
public static UnifyResultPacket create(List<ResultSet> resultSets, String futureId) {
UnifyResultPacket serialized = new UnifyResultPacket();
KeyStorage keyStorage = new KeyStorage();
serialized.results = SerialList.fromMapped(resultSets, resultSet -> resultSet.toSerial(keyStorage));
serialized.keyStorage = keyStorage.toSerial(keyStorage);
serialized.futureId = futureId;
return serialized;
}
@JsonIgnore
public List<ResultSet> getResultSet(UnifyContext context) {
return this.results.assertListOfMaps().stream()
.map(resultData -> ResultSet.fromSerial(resultData, context)).toList();
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.info("Received unify result");
socketClient.completeResponseFuture(futureId, this);
}
}

View File

@@ -0,0 +1,16 @@
package de.dhbwstuttgart.server.packet.dataContainers;
import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
public interface ISerializableData {
public abstract ISerialNode toSerial(KeyStorage keyStorage);
public static Object fromSerial(SerialMap data, UnifyContext context) {
throw new NotImplementedException("Missing implementation of \"fromSerial\" for a serializable element");
}
}

View File

@@ -0,0 +1,103 @@
package de.dhbwstuttgart.server.packet.dataContainers;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
public class KeyStorage implements ISerializableData {
/**
* Store a unique identifier for every element, so it can be referenced in the json
*/
protected AtomicInteger identifierCount = new AtomicInteger();
/**
* Store the serialized element per identifier when serializing
*/
protected SerialMap serializedElements = new SerialMap();
/**
* Store the unserialized element per identifier when unserializing
*/
protected Map<String, ISerializableData> unserializedElements = new HashMap<>();
/**
* Retrieve or generate a new identifier for a constraint
*/
public String getIdentifier() {
return this.identifierCount.incrementAndGet() + "_";
}
/**
* Checks if the given element identifier belongs to an element that was already serialized
*/
public boolean isAlreadySerialized(String identifier) {
return this.serializedElements.containsKey(identifier);
}
/**
* Checks if the given element identifier belongs to a element that was already unserialized
*/
public boolean isAlreadyUnserialized(String identifier) {
return this.unserializedElements.containsKey(identifier);
}
/**
* Register a serialized element to prevent it from being serialized again
*/
public void putSerialized(String identifier, SerialMap serializedElement) {
this.serializedElements.put(identifier, serializedElement);
}
/**
* Retrieve a serialized element
*/
public SerialMap getSerialized(String identifier) {
if (!this.serializedElements.containsKey(identifier)) {
throw new RuntimeException("No serialized element of identifier " + identifier + " available to get");
}
return this.serializedElements.getMap(identifier);
}
/**
* Register an unserialized element to prevent it from being unserialized again
*/
public void putUnserialized(String identifier, ISerializableData element) {
this.unserializedElements.put(identifier, element);
}
/**
* Retrieve an unserialized element
*/
public <T extends ISerializableData> T getUnserialized(String identifier, Class<T> target) {
if (!this.unserializedElements.containsKey(identifier)) {
throw new RuntimeException("No unserialized element of identifier " + identifier + " available to get");
}
var element = this.unserializedElements.get(identifier);
if (target.isInstance(element)) {
return (T) element;
}
throw new RuntimeException("Failed to get unserialized element from KeyStorage. Expected instance of " +
target.getName() + " but found " + element.getClass().getName());
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("serializedElements", this.serializedElements);
return serialized;
}
public static KeyStorage fromSerial(SerialMap data, UnifyContext context) {
var serializedConstraintsData = data.getMap("serializedElements");
var constraintContext = new KeyStorage();
for (var entry : serializedConstraintsData.entrySet()) {
if (entry.getValue() instanceof SerialMap valueMap) {
constraintContext.putSerialized(entry.getKey(), valueMap);
}
}
return constraintContext;
}
}

View File

@@ -0,0 +1,31 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
/**
* Use the following classes for an intermediate serialized tree structure
*/
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY,
property = "_t"
)
@JsonSubTypes({
@JsonSubTypes.Type(value = SerialMap.class, name = "m"),
@JsonSubTypes.Type(value = SerialList.class, name = "l"),
@JsonSubTypes.Type(value = SerialValue.class, name = "v"),
@JsonSubTypes.Type(value = SerialUUID.class, name = "u")
})
public interface ISerialNode {
default <T extends ISerialNode> T assertType(Class<T> type) {
if (type.isInstance(this)) {
return (T) this;
}
throw new RuntimeException("Expected ISerialNode to be of type " + type.getName()
+ " but found " + this.getClass().getName() + " instead");
}
}

View File

@@ -0,0 +1,74 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.function.Function;
import java.util.stream.Stream;
public class SerialList<I extends ISerialNode> extends ArrayList<I> implements ISerialNode {
public SerialList() {}
public SerialList(Collection<I> data) {
this.addAll(data);
}
public SerialList(Stream<I> data) {
this(data.toList());
}
public SerialList(I[] data) {
this(Arrays.stream(data).toList());
}
@SafeVarargs
@JsonIgnore
public static <A extends ISerialNode> ArrayList<A> from(A ...values) {
ArrayList<A> list = new SerialList<>();
Collections.addAll(list, values);
return list;
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(Stream<A> data, Function<A,B> mapper) {
return new SerialList<>(data.map(mapper).toList());
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(Collection<A> data, Function<A,B> mapper) {
return SerialList.fromMapped(data.stream(), mapper);
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(A[] data, Function<A,B> mapper) {
return SerialList.fromMapped(Arrays.stream(data), mapper);
}
@JsonIgnore
public SerialList<SerialMap> assertListOfMaps() {
if (this.isEmpty() || this.get(0) instanceof SerialMap) {
return (SerialList<SerialMap>) this;
}
throw new RuntimeException("Required List to contain SerialMap elements but condition failed");
}
@JsonIgnore
public SerialList<SerialList<?>> assertListOfLists() {
if (this.isEmpty() || this.get(0) instanceof SerialList) {
return (SerialList<SerialList<?>>) this;
}
throw new RuntimeException("Required List to contain SerialList elements but condition failed");
}
@JsonIgnore
public SerialList<SerialValue<?>> assertListOfValues() {
if (this.isEmpty() || this.get(0) instanceof SerialValue) {
return (SerialList<SerialValue<?>>) this;
}
throw new RuntimeException("Required List to contain SerialValue elements but condition failed");
}
@JsonIgnore
public SerialList<SerialUUID> assertListOfUUIDs() {
if (this.isEmpty() || this.get(0) instanceof SerialUUID) {
return (SerialList<SerialUUID>) this;
}
throw new RuntimeException("Required List to contain SerialUUID elements but condition failed");
}
}

View File

@@ -0,0 +1,84 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nullable;
public class SerialMap extends HashMap<String, ISerialNode> implements ISerialNode {
public SerialMap() {
super();
}
public SerialMap(Map<String, ISerialNode> data) {
super(data);
}
@JsonIgnore
public void put(String key, Boolean value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
public void put(String key, String value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
public void put(String key, Number value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
private <T> T get(String key, Class<T> expectedType) {
if (!this.containsKey(key)) {
throw new RuntimeException("Missing required value " + key + " in ObjectMap");
}
var element = this.get(key);
if (element != null && element.getClass() != expectedType) {
throw new RuntimeException(
"Required value " + key + " to be of type " + expectedType.getName() + " but found " + element.getClass().getName()
);
}
return (T)element;
}
@JsonIgnore
public SerialList<?> getList(String key) {
return this.get(key, SerialList.class);
}
@Nullable
@JsonIgnore
public SerialList<?> getListOrNull(String key) {
return this.containsKey(key) ? this.getList(key) : null;
}
@JsonIgnore
public SerialMap getMap(String key) {
return this.get(key, SerialMap.class);
}
@Nullable
@JsonIgnore
public SerialMap getMapOrNull(String key) {
return this.containsKey(key) ? this.getMap(key) : null;
}
@JsonIgnore
public SerialValue<?> getValue(String key) {
return this.get(key, SerialValue.class);
}
@JsonIgnore
public SerialUUID getUUID(String key) {
return this.get(key, SerialUUID.class);
}
@Nullable
@JsonIgnore
public SerialUUID getUUIDOrNull(String key) {
return this.containsKey(key) ? this.getUUID(key) : null;
}
}

View File

@@ -0,0 +1,13 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
public class SerialUUID implements ISerialNode {
public String uuid;
public SerialUUID() {}
public SerialUUID(String uuid) {
this.uuid = uuid;
}
}

View File

@@ -0,0 +1,28 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
public class SerialValue<T> implements ISerialNode {
public T value;
public static final SerialValue<Object> NULL = new SerialValue<>(null);
public SerialValue() {}
public SerialValue(T value) {
this.value = value;
}
@JsonIgnore
public <A> SerialValue<A> assertValueOf(Class<A> targetClass) {
if (this.value == null || targetClass.isInstance(this.value)) {
return (SerialValue<A>) this;
}
throw new RuntimeException("Required Value to contain " + targetClass.getName() + " value but condition failed on" +
" type " + this.value.getClass().getName());
}
@JsonIgnore
public <A> A getOf(Class<A> targetClass) {
return this.assertValueOf(targetClass).value;
}
}

View File

@@ -0,0 +1,8 @@
package de.dhbwstuttgart.syntaxtree;
import de.dhbwstuttgart.util.Logger;
public class SyntaxTree {
public static Logger logger = new Logger("SyntaxTree");
}

View File

@@ -1,5 +1,8 @@
package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.core.JavaTXServer;
public class NameGenerator {
private static String strNextName = "A";
@@ -26,7 +29,11 @@ public class NameGenerator {
// n�chster Name berechnen und in strNextName speichern
inc( strNextName.length() - 1 );
if (JavaTXServer.isRunning) {
throw new RuntimeException("Using the NameGenerator on a server is not allowed");
}
JavaTXCompiler.defaultClientPlaceholderRegistry.addPlaceholder(strReturn);
return strReturn;
}

View File

@@ -1,5 +1,8 @@
package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.syntaxtree.SyntaxTree;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.util.Logger;
import java.io.Writer;
import java.lang.reflect.Modifier;
import java.util.*;
@@ -31,9 +34,13 @@ import org.antlr.v4.runtime.Token;
public class UnifyTypeFactory {
private static ArrayList<PlaceholderType> PLACEHOLDERS = new ArrayList<>();
public static FiniteClosure generateFC(List<ClassOrInterface> fromClasses, Writer logFile, ClassLoader classLoader, JavaTXCompiler compiler) throws ClassNotFoundException {
public static FiniteClosure generateFC(
List<ClassOrInterface> fromClasses,
Logger logger,
ClassLoader classLoader,
JavaTXCompiler compiler,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
/*
Die transitive Hülle muss funktionieren.
Man darf schreiben List<A> extends AL<A>
@@ -44,7 +51,7 @@ public class UnifyTypeFactory {
Generell dürfen sie immer die gleichen Namen haben.
TODO: die transitive Hülle bilden
*/
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader), logFile, compiler);
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader, placeholderRegistry), logger, compiler, placeholderRegistry);
}
public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr, SourceLoc location){
@@ -67,23 +74,23 @@ public class UnifyTypeFactory {
* Convert from
* ASTType -> UnifyType
*/
public static UnifyType convert(JavaTXCompiler compiler, RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
if (t instanceof GenericRefType){
return UnifyTypeFactory.convert(compiler, (GenericRefType)t, innerType);
return UnifyTypeFactory.convert(compiler, (GenericRefType)t, innerType, placeholderRegistry);
} else if (t instanceof TypePlaceholder){
return UnifyTypeFactory.convert(compiler, (TypePlaceholder)t, innerType);
return UnifyTypeFactory.convert(compiler, (TypePlaceholder)t, innerType, placeholderRegistry);
} else if (t instanceof ExtendsWildcardType){
return UnifyTypeFactory.convert(compiler, (ExtendsWildcardType)t, innerType);
return UnifyTypeFactory.convert(compiler, (ExtendsWildcardType)t, innerType, placeholderRegistry);
} else if (t instanceof SuperWildcardType) {
return UnifyTypeFactory.convert(compiler, (SuperWildcardType) t, innerType);
return UnifyTypeFactory.convert(compiler, (SuperWildcardType) t, innerType, placeholderRegistry);
} else if (t instanceof RefType){
return UnifyTypeFactory.convert(compiler, (RefType)t, innerType);
return UnifyTypeFactory.convert(compiler, (RefType)t, innerType, placeholderRegistry);
}
//Es wurde versucht ein Typ umzuwandeln, welcher noch nicht von der Factory abgedeckt ist
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
}
public static UnifyType convert(JavaTXCompiler compiler, RefType t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, RefType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
//Check if it is a FunN Type:
Pattern p = Pattern.compile("Fun(\\d+)[$][$]");
Matcher m = p.matcher(t.getName().toString());
@@ -91,76 +98,76 @@ public class UnifyTypeFactory {
if(b){
Integer N = Integer.valueOf(m.group(1));
if((N + 1) == t.getParaList().size()){
return convertFunN(compiler, t.getParaList(), false);
return convertFunN(compiler, t.getParaList(), false, placeholderRegistry);
}
}
UnifyType ret;
List<UnifyType> params = new ArrayList<>();
if (t.getParaList() != null) {
for (RefTypeOrTPHOrWildcardOrGeneric pT : t.getParaList()) {
params.add(UnifyTypeFactory.convert(compiler, pT, true));
params.add(UnifyTypeFactory.convert(compiler, pT, true, placeholderRegistry));
}
}
var clazz = compiler.getClass(t.getName());
if (clazz != null && clazz.isInterface() && clazz.isFunctionalInterface()) {
var method = clazz.getMethods().stream().filter(x -> Modifier.isAbstract(x.modifier)).findFirst().orElseThrow();
var methodParams = method.getParameterList().getFormalparalist().stream().map(x -> convert(compiler, x.getType(), true)).toList();
var methodParams = method.getParameterList().getFormalparalist().stream().map(x -> convert(compiler, x.getType(), true, placeholderRegistry)).toList();
var generics = StreamSupport.stream(clazz.getGenerics().spliterator(), false).map(GenericTypeVar::getName).toList();
return new FunInterfaceType(t.getName().toString(), new TypeParams(params), methodParams, convert(compiler, method.getReturnType(), true), generics);
return new FunInterfaceType(t.getName().toString(), new TypeParams(params), methodParams, convert(compiler, method.getReturnType(), true, placeholderRegistry), generics);
}
return new ReferenceType(t.getName().toString(),new TypeParams(params));
}
public static UnifyType convertFunN(JavaTXCompiler compiler, List<RefTypeOrTPHOrWildcardOrGeneric> paraList, Boolean innerType){
public static UnifyType convertFunN(JavaTXCompiler compiler, List<RefTypeOrTPHOrWildcardOrGeneric> paraList, Boolean innerType, PlaceholderRegistry placeholderRegistry){
UnifyType ret;
List<UnifyType> params = new ArrayList<>();
if(paraList != null && paraList.size() > 0){
for(RefTypeOrTPHOrWildcardOrGeneric pT : paraList){
params.add(UnifyTypeFactory.convert(compiler, pT, false));
params.add(UnifyTypeFactory.convert(compiler, pT, false, placeholderRegistry));
}
}
ret = FunNType.getFunNType(new TypeParams(params));
return ret;
}
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType, PlaceholderRegistry placeholderRegistry) {
if (tph.getName().equals("AFR")) {
System.out.println("XXX"+innerType);
SyntaxTree.logger.info("XXX"+innerType);
}
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance());
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance(), placeholderRegistry);
ntph.setVariance(tph.getVariance());
ntph.setOrCons(tph.getOrCons());
ntph.setWildcardtable(tph.getWildcardtable());
int in = PLACEHOLDERS.indexOf(ntph);
int in = placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.indexOf(ntph);
if (in == -1) {
PLACEHOLDERS.add(ntph);
placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.add(ntph);
ntph.setInnerType(innerType);
return ntph;
}
else {
PlaceholderType oldpht = PLACEHOLDERS.get(in);
PlaceholderType oldpht = placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.get(in);
oldpht.setInnerType(oldpht.isInnerType() || innerType);
return oldpht;
}
}
public static UnifyType convert(JavaTXCompiler compiler, GenericRefType t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, GenericRefType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
return new ReferenceType(t.getParsedName(), true);
}
public static UnifyType convert(JavaTXCompiler compiler, WildcardType t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, WildcardType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
if(t.isExtends())
return new ExtendsType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false));
return new ExtendsType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false, placeholderRegistry));
else if(t.isSuper())
return new SuperType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false));
return new SuperType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false, placeholderRegistry));
else throw new NotImplementedException();
}
public static ConstraintSet<UnifyPair> convert(JavaTXCompiler compiler, ConstraintSet<Pair> constraints) {
return constraints.map(c -> UnifyTypeFactory.convert(compiler, c));
public static ConstraintSet<UnifyPair> convert(JavaTXCompiler compiler, ConstraintSet<Pair> constraints, PlaceholderRegistry placeholderRegistry) {
return constraints.map(c -> UnifyTypeFactory.convert(compiler, c, placeholderRegistry));
}
//NEVER USED
@@ -171,30 +178,30 @@ public class UnifyTypeFactory {
// return unifyPairConstraint;
//}
public static UnifyPair convert(JavaTXCompiler compiler, Pair p) {
public static UnifyPair convert(JavaTXCompiler compiler, Pair p, PlaceholderRegistry placeholderRegistry) {
UnifyPair ret = null;
if(p.GetOperator().equals(PairOperator.SMALLERDOT)) {
ret = generateSmallerDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateSmallerDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLERNEQDOT)) {
ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret;
}else if(p.GetOperator().equals(PairOperator.EQUALSDOT)) {
ret = generateEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLER)){
ret = generateSmallerPair(UnifyTypeFactory.convert(compiler, p.TA1, false),
UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateSmallerPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry),
UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
}else throw new NotImplementedException();
UnifyType lhs, rhs;
if (((lhs = ret.getLhsType()) instanceof PlaceholderType)
&& ((PlaceholderType)lhs).isWildcardable()
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) {
if (lhs.getName().equals("AQ")) {
System.out.println("");
// SyntaxTree.logger.info("");
}
((PlaceholderType)rhs).enableWildcardtable();
}
@@ -203,7 +210,7 @@ public class UnifyTypeFactory {
&& ((PlaceholderType)rhs).isWildcardable()
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) {
if (rhs.getName().equals("AQ")) {
System.out.println("");
// SyntaxTree.logger.info("");
}
((PlaceholderType)lhs).enableWildcardtable();
}
@@ -214,16 +221,16 @@ public class UnifyTypeFactory {
* Convert from
* UnifyType -> ASTType
*/
public static Set<ResultPair> convert(Set<UnifyPair> unifyPairSet, Map<String,TypePlaceholder> tphs) {
public static Set<ResultPair> convert(Set<UnifyPair> unifyPairSet, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
return unifyPairSet.stream().map(
unifyPair -> convert(unifyPair, tphs))
unifyPair -> convert(unifyPair, tphs, placeholderRegistry))
.collect(Collectors.toSet());
}
public static ResultPair convert(UnifyPair mp, Map<String,TypePlaceholder> tphs) {
public static ResultPair convert(UnifyPair mp, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if (mp == null) { return null;} //kann bei basePairs passieren
RefTypeOrTPHOrWildcardOrGeneric tl = UnifyTypeFactory.convert(mp.getLhsType(), tphs);
RefTypeOrTPHOrWildcardOrGeneric tr = UnifyTypeFactory.convert(mp.getRhsType(), tphs);
RefTypeOrTPHOrWildcardOrGeneric tl = UnifyTypeFactory.convert(mp.getLhsType(), tphs, placeholderRegistry);
RefTypeOrTPHOrWildcardOrGeneric tr = UnifyTypeFactory.convert(mp.getRhsType(), tphs, placeholderRegistry);
if(tl instanceof TypePlaceholder){
if(tr instanceof TypePlaceholder) {
@@ -232,7 +239,7 @@ public class UnifyTypeFactory {
//Einfach ignorieren TODO: Das hier muss ausgebessert werden:
//return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, ASTFactory.createObjectType());
}else{
return new PairTPHsmallerTPH((TypePlaceholder)tl, (TypePlaceholder)tr, convert(mp.getBasePair(), tphs));
return new PairTPHsmallerTPH((TypePlaceholder)tl, (TypePlaceholder)tr, convert(mp.getBasePair(), tphs, placeholderRegistry));
}
}else if(tr instanceof RefType){
return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, (RefType) tr);
@@ -244,51 +251,51 @@ public class UnifyTypeFactory {
}else return new PairNoResult(tl, tr);//throw new NotImplementedException();
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(ReferenceType t, Map<String,TypePlaceholder> tphs) {
public static RefTypeOrTPHOrWildcardOrGeneric convert(ReferenceType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if(JavaClassName.Void.equals(t.getName()))return new Void(new NullToken());
if (t.isGenTypeVar()) return new GenericRefType(t.getName(),new NullToken());
RefType ret = new RefType(new JavaClassName(t.getName()),convert(t.getTypeParams(), tphs),new NullToken());
RefType ret = new RefType(new JavaClassName(t.getName()),convert(t.getTypeParams(), tphs, placeholderRegistry),new NullToken());
return ret;
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(FunNType t, Map<String,TypePlaceholder> tphs) {
RefType ret = new RefType(new JavaClassName(t.getName()), convert(t.getTypeParams(), tphs), new NullToken());
public static RefTypeOrTPHOrWildcardOrGeneric convert(FunNType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefType ret = new RefType(new JavaClassName(t.getName()), convert(t.getTypeParams(), tphs, placeholderRegistry), new NullToken());
return ret;
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(SuperType t, Map<String,TypePlaceholder> tphs) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getSuperedType(), tphs);
public static RefTypeOrTPHOrWildcardOrGeneric convert(SuperType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getSuperedType(), tphs, placeholderRegistry);
return new SuperWildcardType(innerType, new NullToken());
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(ExtendsType t, Map<String,TypePlaceholder> tphs) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getExtendedType(), tphs);
public static RefTypeOrTPHOrWildcardOrGeneric convert(ExtendsType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getExtendedType(), tphs, placeholderRegistry);
return new ExtendsWildcardType(innerType, new NullToken());
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs) {
public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
TypePlaceholder ret = tphs.get(t.getName());
if(ret == null){ //Dieser TPH wurde vom Unifikationsalgorithmus erstellt
ret = TypePlaceholder.fresh(new NullToken());
ret = TypePlaceholder.fresh(new NullToken(), placeholderRegistry);
tphs.put(t.getName(), ret);
}
ret.setVariance(t.getVariance());
return ret;
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(UnifyType t, Map<String,TypePlaceholder> tphs) {
if(t instanceof FunNType)return convert((FunNType) t, tphs);
if(t instanceof ReferenceType)return convert((ReferenceType) t, tphs);
if(t instanceof SuperType)return convert((SuperType) t, tphs);
if(t instanceof ExtendsType)return convert((ExtendsType) t, tphs);
if(t instanceof PlaceholderType)return convert((PlaceholderType) t, tphs);
public static RefTypeOrTPHOrWildcardOrGeneric convert(UnifyType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if(t instanceof FunNType)return convert((FunNType) t, tphs, placeholderRegistry);
if(t instanceof ReferenceType)return convert((ReferenceType) t, tphs, placeholderRegistry);
if(t instanceof SuperType)return convert((SuperType) t, tphs, placeholderRegistry);
if(t instanceof ExtendsType)return convert((ExtendsType) t, tphs, placeholderRegistry);
if(t instanceof PlaceholderType)return convert((PlaceholderType) t, tphs, placeholderRegistry);
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
}
private static List<RefTypeOrTPHOrWildcardOrGeneric> convert(TypeParams typeParams, Map<String,TypePlaceholder> tphs) {
private static List<RefTypeOrTPHOrWildcardOrGeneric> convert(TypeParams typeParams, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
List<RefTypeOrTPHOrWildcardOrGeneric> ret = new ArrayList<>();
for(UnifyType uT : typeParams){
RefTypeOrTPHOrWildcardOrGeneric toAdd = convert(uT, tphs);
RefTypeOrTPHOrWildcardOrGeneric toAdd = convert(uT, tphs, placeholderRegistry);
ret.add(toAdd);
}
return ret;

View File

@@ -1,8 +1,13 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.Objects;
@@ -15,7 +20,7 @@ import java.util.Objects;
*
*/
public class ExtendsWildcardType extends WildcardType{
public class ExtendsWildcardType extends WildcardType implements ISerializableData {
/**
* Author: Arne Lüdtke<br/>
@@ -68,4 +73,22 @@ public class ExtendsWildcardType extends WildcardType{
ExtendsWildcardType that = (ExtendsWildcardType) o;
return that.innerType.equals(this.innerType);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("innerType", this.innerType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static ExtendsWildcardType fromSerial(SerialMap data, UnifyContext context) {
return new ExtendsWildcardType(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(data.getMap("innerType"), context),
new NullToken()
);
}
}

View File

@@ -1,57 +1,77 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.Objects;
public class GenericRefType extends RefTypeOrTPHOrWildcardOrGeneric
{
private String name;
public class GenericRefType extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData {
private String name;
public GenericRefType(String name, Token offset)
{
super(offset);
this.name = name;
}
public GenericRefType(String name, Token offset) {
super(offset);
this.name = name;
}
public String getParsedName(){
return name.toString();
}
public String getParsedName() {
return name.toString();
}
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GenericRefType that = (GenericRefType) o;
return name.equals(that.name);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GenericRefType that = (GenericRefType) o;
return name.equals(that.name);
}
@Override
public int hashCode() {
return Objects.hash(name);
}
@Override
public int hashCode() {
return Objects.hash(name);
}
@Override
public String toString()
{
return "GTV " + this.name;
}
@Override
public String toString() {
return "GTV " + this.name;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.name);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static GenericRefType fromSerial(SerialMap data, UnifyContext context) {
return new GenericRefType(
data.getValue("name").getOf(String.class),
new NullToken()
);
}
}

View File

@@ -1,8 +1,15 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.ArrayList;
@@ -11,122 +18,137 @@ import java.util.List;
import java.util.Objects;
public class RefType extends RefTypeOrTPHOrWildcardOrGeneric
{
protected final JavaClassName name;
protected final List<RefTypeOrTPHOrWildcardOrGeneric> parameter;
/**
* Ist primitiveFlag auf true, muss beim Codegen dieser Reftype durch
* den primitiven Datentyp ersetzt werden
*
* Bsp: java.lang.Integer mit Flag wird dann zu [int]
*/
boolean primitiveFlag = false; // TODO Should be final
public class RefType extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData {
protected final JavaClassName name;
protected final List<RefTypeOrTPHOrWildcardOrGeneric> parameter;
/**
* Ist primitiveFlag auf true, muss beim Codegen dieser Reftype durch
* den primitiven Datentyp ersetzt werden
* <p>
* Bsp: java.lang.Integer mit Flag wird dann zu [int]
*/
boolean primitiveFlag = false; // TODO Should be final
public RefType(JavaClassName fullyQualifiedName, Token offset)
{
this(fullyQualifiedName, new ArrayList<>(), offset);
public RefType(JavaClassName fullyQualifiedName, Token offset) {
this(fullyQualifiedName, new ArrayList<>(), offset);
}
public boolean isPrimitive() {
return primitiveFlag;
}
@Override
public String toString() {
String params = "";
if (parameter.size() > 0) {
params += "<";
Iterator<RefTypeOrTPHOrWildcardOrGeneric> it = parameter.iterator();
while (it.hasNext()) {
RefTypeOrTPHOrWildcardOrGeneric param = it.next();
params += param.toString();
if (it.hasNext()) params += ", ";
}
params += ">";
}
return this.name.toString() + params;
}
@Override
public int hashCode() {
return this.name.hashCode();//Nur den Name hashen. Sorgt für langsame, aber funktionierende HashMaps
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset) {
this(fullyQualifiedName, parameter, offset, false);
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset, boolean primitiveFlag) {
super(offset);
this.name = (fullyQualifiedName);
this.parameter = parameter;
this.primitiveFlag = primitiveFlag;
}
public JavaClassName getName() {
return name;
}
public List<RefTypeOrTPHOrWildcardOrGeneric> getParaList() {
if (this.parameter == null) return new ArrayList<>();
return this.parameter;
}
/**
* Author: Jrg Buerle<br/>
*
* @return
*/
public boolean equals(Object obj) {
if (!(obj instanceof RefType refObj)) {
return false;
}
public boolean isPrimitive() {
return primitiveFlag;
}
if (!Objects.equals(this.name, refObj.name)) return false;
boolean ret = true;
@Override
public String toString(){
String params = "";
if(parameter.size()>0){
params += "<";
Iterator<RefTypeOrTPHOrWildcardOrGeneric> it = parameter.iterator();
while(it.hasNext()){
RefTypeOrTPHOrWildcardOrGeneric param = it.next();
params += param.toString();
if(it.hasNext())params += ", ";
}
params += ">";
//if(!(super.equals(obj))) PL 2020-03-12 muss vll. einkommentiert werden
// return false;
if (parameter == null || parameter.size() == 0) {
ret &= (refObj.getParaList() == null || refObj.getParaList().isEmpty());
} else {
if (refObj.getParaList() == null) {
ret = false;
} else if (parameter.size() != refObj.getParaList().size()) {
ret = false;
} else {
for (int i = 0; i < parameter.size(); i++) {
ret &= parameter.get(i).equals(refObj.getParaList().get(i));
}
return this.name.toString() + params;
}
}
return ret;
@Override
public int hashCode() {
return this.name.hashCode();//Nur den Name hashen. Sorgt für langsame, aber funktionierende HashMaps
}
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset) {
this(fullyQualifiedName, parameter, offset, false);
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset, boolean primitiveFlag) {
super(offset);
this.name = (fullyQualifiedName);
this.parameter = parameter;
this.primitiveFlag = primitiveFlag;
}
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
public JavaClassName getName()
{
return name;
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
public List<RefTypeOrTPHOrWildcardOrGeneric> getParaList(){
if(this.parameter==null)return new ArrayList<>();
return this.parameter;
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
/**
* Author: Jrg Buerle<br/>
* @return
*/
public boolean equals(Object obj)
{
if(obj instanceof RefType){
if (!Objects.equals(this.name, ((RefType) obj).name)) return false;
boolean ret = true;
//if(!(super.equals(obj))) PL 2020-03-12 muss vll. einkommentiert werden
// return false;
if(parameter==null || parameter.size()==0){
ret &= (((RefType)obj).getParaList()==null || ((RefType)obj).getParaList().size()==0);
}
else{
if(((RefType)obj).getParaList()==null){
ret = false;
}
else if(parameter.size() != ((RefType)obj).getParaList().size())
{
ret = false;
}
else
{
for(int i = 0; i<parameter.size(); i++)
{
ret &= parameter.get(i).equals(((RefType)obj).getParaList().get(i));
}
}
}
return ret;
}
else{
return false;
}
}
@Override
public ISerialNode toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("isPrimitive", this.primitiveFlag);
serialized.put("name", this.name.toString());
serialized.put("parameters", SerialList.fromMapped(this.parameter, param -> param.toSerial(keyStorage)));
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
public static RefType fromSerial(SerialMap data, UnifyContext context) {
return new RefType(
new JavaClassName(data.getValue("name").getOf(String.class)),
data.getList("parameters").assertListOfMaps().stream()
.map(param -> RefTypeOrTPHOrWildcardOrGeneric.fromSerial(param, context))
.toList(),
new NullToken(),
data.getValue("isPrimitive").getOf(Boolean.class)
);
}
}

View File

@@ -1,11 +1,17 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode{
public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode implements ISerializableData {
public RefTypeOrTPHOrWildcardOrGeneric(Token offset) {
super(offset);
}
@@ -18,5 +24,26 @@ public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode{
@Override
public abstract boolean equals(Object o);
@Override
public ISerialNode toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("type", this.getClass().getSimpleName());
// we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL);
return serialized;
}
public static RefTypeOrTPHOrWildcardOrGeneric fromSerial(SerialMap data, UnifyContext context) {
String type = data.getValue("type").getOf(String.class);
SerialMap object = data.getMap("object");
if (type.equals(ExtendsWildcardType.class.getSimpleName())) return ExtendsWildcardType.fromSerial(object, context);
else if (type.equals(GenericRefType.class.getSimpleName())) return GenericRefType.fromSerial(object, context);
else if (type.equals(SuperWildcardType.class.getSimpleName())) return SuperWildcardType.fromSerial(object, context);
else if (type.equals(RefType.class.getSimpleName())) return RefType.fromSerial(object, context);
else if (type.equals(Void.class.getSimpleName())) return Void.fromSerial(object, context);
else if (type.equals(TypePlaceholder.class.getSimpleName())) return TypePlaceholder.fromSerial(object, context);
else throw new RuntimeException("Could not unserialize class of unhandled type " + type);
}
}

View File

@@ -1,9 +1,13 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.Objects;
@@ -16,7 +20,7 @@ import java.util.Objects;
*
*/
public class SuperWildcardType extends WildcardType{
public class SuperWildcardType extends WildcardType implements ISerializableData {
/**
* Author: Arne Lüdtke<br/>
@@ -80,4 +84,22 @@ public class SuperWildcardType extends WildcardType{
SuperWildcardType that = (SuperWildcardType) o;
return that.innerType.equals(this.innerType);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("innerType", this.innerType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static SuperWildcardType fromSerial(SerialMap data, UnifyContext context) {
return new SuperWildcardType(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(data.getMap("innerType"), context),
new NullToken()
);
}
}

View File

@@ -1,9 +1,12 @@
package de.dhbwstuttgart.syntaxtree.type;
import java.util.Hashtable;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import org.antlr.v4.runtime.Token;
@@ -16,7 +19,7 @@ import org.antlr.v4.runtime.Token;
* @author J�rg B�uerle
* @version $Date: 2013/06/19 12:45:37 $
*/
public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData
{
private final String name;
@@ -65,7 +68,12 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
public static TypePlaceholder fresh(Token position){
return new TypePlaceholder(NameGenerator.makeNewName(), position, 0, true);
}
public static TypePlaceholder fresh(Token position, PlaceholderRegistry placeholderRegistry){
String newName = placeholderRegistry.generateFreshPlaceholderName();
return new TypePlaceholder(newName, position, 0, true);
}
public static TypePlaceholder fresh(Token position, int variance, boolean wildcardable){
return new TypePlaceholder(NameGenerator.makeNewName(), position, variance, wildcardable);
}
@@ -139,4 +147,26 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
public Boolean getWildcardtable() {
return wildcardable;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.name);
serialized.put("variance", this.variance);
serialized.put("wildcardable", this.wildcardable);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static TypePlaceholder fromSerial(SerialMap data, UnifyContext context) {
return new TypePlaceholder(
data.getValue("name").getOf(String.class),
new NullToken(),
data.getValue("variance").getOf(Integer.class),
data.getValue("wildcardable").getOf(Boolean.class)
);
}
}

View File

@@ -1,14 +1,32 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import de.dhbwstuttgart.parser.scope.JavaClassName;
public class Void extends RefType
public class Void extends RefType implements ISerializableData
{
public Void(Token offset) {
super(JavaClassName.Void, offset);
}
@Override
public ISerialNode toSerial(KeyStorage keyStorage) {
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", new SerialMap());
return serializedWrapper;
}
public static Void fromSerial(SerialMap data, UnifyContext context) {
return new Void(new NullToken());
}
}

View File

@@ -0,0 +1,8 @@
package de.dhbwstuttgart.target;
import de.dhbwstuttgart.util.Logger;
public class Target {
public static Logger logger = new Logger("Target");
}

View File

@@ -12,6 +12,7 @@ import de.dhbwstuttgart.syntaxtree.Record;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.*;
import de.dhbwstuttgart.target.tree.expression.*;
import de.dhbwstuttgart.target.tree.type.*;
@@ -337,10 +338,10 @@ public class ASTToTargetAST {
var result = r0.stream().map(l -> l.stream().toList()).toList();
System.out.println("============== OUTPUT ==============");
Target.logger.info("============== OUTPUT ==============");
for (var l : result) {
for (var m : l) System.out.println(m.name() + " " + m.getSignature());
System.out.println();
for (var m : l) Target.logger.info(m.name() + " " + m.getSignature());
Target.logger.info("");
}
return result;
}

View File

@@ -1,10 +1,12 @@
package de.dhbwstuttgart.target.generate;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.*;
import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.syntaxtree.type.Void;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.type.TargetGenericType;
import de.dhbwstuttgart.target.tree.type.TargetType;
import de.dhbwstuttgart.typeinference.result.PairTPHEqualTPH;
@@ -138,17 +140,17 @@ public abstract class GenerateGenerics {
this.astToTargetAST = astToTargetAST;
for (var constraint : constraints.results) {
if (constraint instanceof PairTPHsmallerTPH p) {
System.out.println(p.left + " " + p.left.getVariance());
Target.logger.info(p.left + " " + p.left.getVariance());
simplifiedConstraints.add(new PairLT(new TPH(p.left), new TPH(p.right)));
} else if (constraint instanceof PairTPHEqualTPH p) {
equality.put(p.getLeft(), p.getRight());
} else if (constraint instanceof PairTPHequalRefTypeOrWildcardType p) {
System.out.println(p.left + " = " + p.right);
Target.logger.info(p.left + " = " + p.right);
concreteTypes.put(new TPH(p.left), p.right);
}
}
System.out.println("Simplified constraints: " + simplifiedConstraints);
Target.logger.info("Simplified constraints: " + simplifiedConstraints);
}
/*public record GenericsState(Map<TPH, RefTypeOrTPHOrWildcardOrGeneric> concreteTypes, Map<TypePlaceholder, TypePlaceholder> equality) {}
@@ -248,7 +250,7 @@ public abstract class GenerateGenerics {
equality.put(entry.getKey(), to);
}
}
System.out.println(from + " -> " + to + " " + from.getVariance());
Target.logger.info(from + " -> " + to + " " + from.getVariance());
//from.setVariance(to.getVariance());
equality.put(from, to);
referenced.remove(new TPH(from));
@@ -317,7 +319,7 @@ public abstract class GenerateGenerics {
Set<TPH> T2s = new HashSet<>();
findTphs(superType, T2s);
System.out.println("T1s: " + T1s + " T2s: " + T2s);
Target.logger.info("T1s: " + T1s + " T2s: " + T2s);
//Ende
superType = methodCall.receiverType;
@@ -332,7 +334,7 @@ public abstract class GenerateGenerics {
var optMethod = astToTargetAST.findMethod(owner, methodCall.name, methodCall.signatureArguments().stream().map(astToTargetAST::convert).toList());
if (optMethod.isEmpty()) return;
var method2 = optMethod.get();
System.out.println("In: " + method.getName() + " Method: " + method2.getName());
Target.logger.info("In: " + method.getName() + " Method: " + method2.getName());
var generics = family(owner, method2);
// transitive and
@@ -365,7 +367,7 @@ public abstract class GenerateGenerics {
if (!T1s.contains(R1) || !T2s.contains(R2)) continue;
var newPair = new PairLT(R1, R2);
System.out.println("New pair: " + newPair);
Target.logger.info("New pair: " + newPair);
newPairs.add(newPair);
if (!containsRelation(result, newPair))
@@ -567,7 +569,7 @@ public abstract class GenerateGenerics {
Set<Pair> generics(ClassOrInterface owner, Method method) {
if (computedGenericsOfMethods.containsKey(method)) {
var cached = computedGenericsOfMethods.get(method);
System.out.println("Cached " + method.getName() + ": " + cached);
Target.logger.info("Cached " + method.getName() + ": " + cached);
return cached;
}
@@ -596,7 +598,7 @@ public abstract class GenerateGenerics {
normalize(result, classGenerics, usedTphs);
System.out.println(this.getClass().getSimpleName() + " " + method.name + ": " + result);
Target.logger.info(this.getClass().getSimpleName() + " " + method.name + ": " + result);
return result;
}
@@ -675,7 +677,7 @@ public abstract class GenerateGenerics {
normalize(javaResult, null, referencedByClass);
System.out.println(this.getClass().getSimpleName() + " Class " + classOrInterface.getClassName().getClassName() + ": " + javaResult);
Target.logger.info(this.getClass().getSimpleName() + " Class " + classOrInterface.getClassName().getClassName() + ": " + javaResult);
return javaResult;
}
@@ -726,7 +728,7 @@ public abstract class GenerateGenerics {
if (!added) break;
}
System.out.println(chain + " " + chain.stream().map(e -> e.resolve().getVariance()).toList());
Target.logger.info(chain + " " + chain.stream().map(e -> e.resolve().getVariance()).toList());
var variance = chain.get(0).resolve().getVariance();
if (variance != 1) continue;
var index = 0;
@@ -764,7 +766,7 @@ public abstract class GenerateGenerics {
}
for (var pair : elementsToAddToEquality) {
System.out.println(pair);
Target.logger.info(pair);
addToEquality(pair.left, pair.right, referenced);
}
}
@@ -917,11 +919,11 @@ public abstract class GenerateGenerics {
}
}
if (infima.size() > 1) {
System.out.println(infima);
Target.logger.info(infima);
for (var pair : infima) {
var returnTypes = findTypeVariables(method.getReturnType());
var chain = findConnectionToReturnType(returnTypes, input, new HashSet<>(), pair.left);
System.out.println("Find: " + pair.left + " " + chain);
Target.logger.info("Find: " + pair.left + " " + chain);
chain.remove(pair.left);
if (chain.size() > 0) {
for (var tph : chain)
@@ -959,8 +961,8 @@ public abstract class GenerateGenerics {
}
}
newTph.setVariance(variance);
System.out.println(infima + " " + infima.stream().map(i -> i.right.resolve().getVariance()).toList());
System.out.println("Infima new TPH " + newTph + " variance " + variance);
Target.logger.info(infima + " " + infima.stream().map(i -> i.right.resolve().getVariance()).toList());
Target.logger.info("Infima new TPH " + newTph + " variance " + variance);
//referenced.add(newTph);
addToPairs(input, new PairLT(left, new TPH(newTph)));

View File

@@ -8,6 +8,7 @@ import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.*;
import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.MethodParameter;
import de.dhbwstuttgart.target.tree.TargetMethod;
import de.dhbwstuttgart.target.tree.expression.*;
@@ -120,7 +121,7 @@ public class StatementToTargetExpression implements ASTVisitor {
@Override
public void visit(BoolExpression bool) {
System.out.println("BoolExpression");
Target.logger.info("BoolExpression");
}
@Override
@@ -234,7 +235,7 @@ public class StatementToTargetExpression implements ASTVisitor {
isInterface = receiverClass.isInterface();
}
System.out.println(argList);
Target.logger.info(argList);
result = new TargetMethodCall(converter.convert(methodCall.getType()), returnType, argList, converter.convert(methodCall.receiver), methodCall.getArgumentList().getArguments().stream().map(converter::convert).toList(), receiverType, methodCall.name, isStatic, isInterface, isPrivate);
}

View File

@@ -9,13 +9,8 @@ import de.dhbwstuttgart.syntaxtree.Method;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import org.antlr.v4.runtime.Token;
import javax.swing.text.html.Option;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;

View File

@@ -19,11 +19,11 @@ public class MethodAssumption extends Assumption{
private ClassOrInterface receiver;
private RefTypeOrTPHOrWildcardOrGeneric retType;
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params;
private final Boolean isInherited;
private final Boolean isOverridden;
private final boolean isInherited;
private final boolean isOverridden;
public MethodAssumption(ClassOrInterface receiver, RefTypeOrTPHOrWildcardOrGeneric retType,
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope, Boolean isInherited, Boolean isOverridden){
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope, boolean isInherited, boolean isOverridden){
super(scope);
this.receiver = receiver;
this.retType = retType;
@@ -73,11 +73,11 @@ public class MethodAssumption extends Assumption{
return TYPEStmt.getReceiverType(receiver, resolver);
}
public Boolean isInherited() {
public boolean isInherited() {
return isInherited;
}
public Boolean isOverridden() {
public boolean isOverridden() {
return isOverridden;
}
}

View File

@@ -1,77 +1,171 @@
package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.Collection;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
public class Constraint<A> extends HashSet<A> {
private static final long serialVersionUID = 1L;
private Boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
private Boolean isImplemented = false;
/*
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
* auszuwaehlen
*/
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
private Constraint<A> extendConstraint = null;
public Constraint() {
super();
}
public Constraint(Boolean isInherited, Boolean isImplemented) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
}
public Constraint(Boolean isInherited, Boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
this.extendConstraint = extendConstraint;
this.methodSignatureConstraint = methodSignatureConstraint;
}
public void setIsInherited(Boolean isInherited) {
this.isInherited = isInherited;
}
public Boolean isInherited() {
return isInherited;
}
public Boolean isImplemented() {
return isImplemented;
}
public Constraint<A> getExtendConstraint() {
return extendConstraint;
}
public void setExtendConstraint(Constraint<A> c) {
extendConstraint = c;
}
public Set<A> getmethodSignatureConstraint() {
return methodSignatureConstraint;
}
public void setmethodSignatureConstraint(Set<A> c) {
methodSignatureConstraint = c;
}
public class Constraint<A extends IConstraintElement> extends HashSet<A> implements ISerializableData {
private static final long serialVersionUID = 1L;
private boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
private boolean isImplemented = false;
/*
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
* auszuwaehlen
*/
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
private Constraint<A> extendConstraint = null;
public Constraint() {
super();
}
public Constraint(int initialCapacity) {
super(initialCapacity);
}
public Constraint(boolean isInherited, boolean isImplemented) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
}
public Constraint(boolean isInherited, boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
this.extendConstraint = extendConstraint;
this.methodSignatureConstraint = methodSignatureConstraint;
}
public void setIsInherited(boolean isInherited) {
this.isInherited = isInherited;
}
public boolean isInherited() {
return isInherited;
}
public boolean isImplemented() {
return isImplemented;
}
public Constraint<A> getExtendConstraint() {
return extendConstraint;
}
public void setExtendConstraint(Constraint<A> c) {
extendConstraint = c;
}
public Set<A> getmethodSignatureConstraint() {
return methodSignatureConstraint;
}
public void setmethodSignatureConstraint(Set<A> c) {
methodSignatureConstraint = c;
}
public <B extends IConstraintElement> Constraint<B> createdMapped(Function<A,B> mapper) {
Constraint<B> result = new Constraint<>(this.size());
for (A element : this) {
result.add(mapper.apply(element));
}
return result;
}
public String toString() {
return super.toString() + "\nisInherited = " + isInherited
+ " isOveridden = " + isImplemented
+ " msc[" + methodSignatureConstraint.size() + "] = " + methodSignatureConstraint
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
+ "\n";
}
public String toStringBase() {
return super.toString();
}
private String serialUUID = null;
@Override
public SerialUUID toSerial(KeyStorage keyStorage) {
final String uuid = serialUUID == null ? keyStorage.getIdentifier() : serialUUID;
if (serialUUID == null) serialUUID = uuid;
if (!keyStorage.isAlreadySerialized(uuid)) {
SerialMap serialized = new SerialMap();
keyStorage.putSerialized(uuid, serialized);
serialized.put("isInherited", isInherited);
serialized.put("isImplemented", isImplemented);
serialized.put("extendedConstraint", extendConstraint == null ? null :
extendConstraint.toSerial(keyStorage));
Function<A, ISerialNode> pairMapper = pair -> {
if (pair instanceof Pair simplePair) return simplePair.toSerial(keyStorage);
if (pair instanceof UnifyPair unifyPair) return unifyPair.toSerial(keyStorage);
throw new RuntimeException("No serialization is supported for type " + pair.getClass().getName());
};
serialized.put("methodSignatureConstraint", methodSignatureConstraint == null ? null :
SerialList.fromMapped(methodSignatureConstraint, pairMapper));
serialized.put("setElements", SerialList.fromMapped(this, pairMapper));
}
// return only the unique key
return new SerialUUID(uuid);
}
public static <T extends IConstraintElement> Constraint<T> fromSerial(SerialUUID serialUUID, UnifyContext context, Class<T> target, KeyStorage keyStorage) {
String uuid = serialUUID.uuid;
if (!keyStorage.isAlreadyUnserialized(uuid)) {
Constraint<T> constraint = new Constraint<>();
// immediately add the object to the context to prevent infinite recursion
keyStorage.putUnserialized(uuid, constraint);
// retrieve the serialized data und start unserializing it
SerialMap data = keyStorage.getSerialized(uuid);
constraint.isInherited = data.getValue("isInherited").getOf(Boolean.class);
constraint.isImplemented = data.getValue("isImplemented").getOf(Boolean.class);
constraint.extendConstraint = Optional.ofNullable(data.getUUIDOrNull("extendedConstraint"))
.map(v -> Constraint.fromSerial(v, context, target, keyStorage))
.orElse(null);
// to convert the maps back to elements, we sadly have to do some assumptions about the generic types...
Function<ISerialNode, T> pairUnmapper = pairData -> {
if (target == Pair.class && pairData instanceof SerialMap pairMap) {
return (T) Pair.fromSerial(pairMap, context);
}
if (target == UnifyPair.class && pairData instanceof SerialUUID pairUUID) {
return (T) UnifyPair.fromSerial(pairUUID, context, keyStorage);
}
throw new RuntimeException("No serialization is supported for target type " + target.getName());
};
constraint.methodSignatureConstraint =
Optional.ofNullable(data.getListOrNull("methodSignatureConstraint"))
.map(l -> l.stream().map(pairUnmapper).collect(Collectors.toSet()))
.orElse(null);
constraint.addAll(
data.getList("setElements")
.stream().map(pairUnmapper).toList());
}
return keyStorage.getUnserialized(uuid, Constraint.class);
}
public String toString() {
return super.toString() + "\nisInherited = " + isInherited + " isOveridden = " + isImplemented
+ methodSignatureConstraint
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
+ "\n" ;
}
public String toStringBase() {
return super.toString();
}
}

View File

@@ -1,63 +1,80 @@
package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import java.util.*;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
public class ConstraintSet<A> {
Constraint<A> undConstraints = new Constraint<>();
List<Set<Constraint<A>>> oderConstraints = new ArrayList<>();
public class ConstraintSet<A extends IConstraintElement> implements ISerializableData {
Constraint<A> undConstraints = new Constraint<>();
List<Set<Constraint<A>>> oderConstraints = new ArrayList<>();
public void addUndConstraint(A p){
undConstraints.add(p);
}
public void addUndConstraint(A p) {
undConstraints.add(p);
}
public void addOderConstraint(Set<Constraint<A>> methodConstraints) {
oderConstraints.add(methodConstraints);
}
public void addOderConstraint(Set<Constraint<A>> methodConstraints) {
oderConstraints.add(methodConstraints);
}
public void addAllUndConstraint(Constraint<A> allUndConstraints){
undConstraints.addAll(allUndConstraints);
}
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints){
this.oderConstraints.addAll(allOderConstraints);
}
public void addAll(ConstraintSet constraints) {
this.addAllUndConstraint(constraints.undConstraints);
this.addAllOderConstraint(constraints.oderConstraints);
}
public void addAllUndConstraint(Constraint<A> allUndConstraints) {
undConstraints.addAll(allUndConstraints);
}
@Override
public String toString(){
BinaryOperator<String> b = (x,y) -> x+y;
return "\nUND:" + this.undConstraints.toString() + "\n" +
"ODER:" + this.oderConstraints.stream().reduce("", (x,y) -> x.toString()+ "\n" +y, b);
//cartesianProduct().toString();
}
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints) {
this.oderConstraints.addAll(allOderConstraints);
}
public Set<List<Constraint<A>>> cartesianProduct(){
Set<Constraint<A>> toAdd = new HashSet<>();
toAdd.add(undConstraints);
List<Set<Constraint<A>>> allConstraints = new ArrayList<>();
allConstraints.add(toAdd);
allConstraints.addAll(oderConstraints);
return new GuavaSetOperations().cartesianProduct(allConstraints);
}
public void addAll(ConstraintSet constraints) {
this.addAllUndConstraint(constraints.undConstraints);
this.addAllOderConstraint(constraints.oderConstraints);
}
public <B> ConstraintSet<B> map(Function<? super A, ? extends B> o) {
Hashtable<Constraint<A>,Constraint<B>> CSA2CSB = new Hashtable<>();
ConstraintSet<B> ret = new ConstraintSet<>();
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new));
List<Set<Constraint<B>>> newOder = new ArrayList<>();
@Override
public String toString() {
BinaryOperator<String> b = (x, y) -> x + y;
return "\nUND:\n" + this.undConstraints.toString() +
"ODER:" + this.oderConstraints.stream().reduce("", (x, y) -> x + "\n\t" + y, b) +
"\n";
//cartesianProduct().toString();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ConstraintSet<?> other)) return false;
return Objects.equals(undConstraints, other.undConstraints)
&& Objects.equals(oderConstraints, other.oderConstraints);
}
@Override
public int hashCode() {
return Objects.hash(undConstraints, oderConstraints);
}
public Set<List<Constraint<A>>> cartesianProduct() {
Set<Constraint<A>> toAdd = new HashSet<>();
toAdd.add(undConstraints);
List<Set<Constraint<A>>> allConstraints = new ArrayList<>();
allConstraints.add(toAdd);
allConstraints.addAll(oderConstraints);
return new GuavaSetOperations().cartesianProduct(allConstraints);
}
public <B extends IConstraintElement> ConstraintSet<B> map(Function<? super A, ? extends B> o) {
Hashtable<Constraint<A>, Constraint<B>> CSA2CSB = new Hashtable<>();
ConstraintSet<B> ret = new ConstraintSet<>();
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new));
List<Set<Constraint<B>>> newOder = new ArrayList<>();
/*
for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.forEach(as -> {
@@ -68,25 +85,25 @@ public class ConstraintSet<A> {
CSA2CSB.put(as, newConst);} );
}
*/
for(Set<Constraint<A>> oderConstraint : oderConstraints){
newOder.add(
oderConstraint.stream().map((Constraint<A> as) -> {
Constraint<B> newConst = as.stream()
.map(o)
.collect(Collectors.toCollection((
() -> new Constraint<B> (as.isInherited(),
as.isImplemented(),
(as.getExtendConstraint() != null)
? as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new))
: null,
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new))))
));
//CSA2CSB.put(as, newConst);
return newConst;
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
newOder.add(
oderConstraint.stream().map((Constraint<A> as) -> {
Constraint<B> newConst = as.stream()
.map(o)
.collect(Collectors.toCollection((
() -> new Constraint<B>(as.isInherited(),
as.isImplemented(),
(as.getExtendConstraint() != null)
? as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new))
: null,
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new))))
));
//CSA2CSB.put(as, newConst);
return newConst;
/*
Constraint<B> bs = CSA2CSB.get(as);
@@ -95,36 +112,60 @@ public class ConstraintSet<A> {
}
return bs;
*/
}).collect(Collectors.toSet())
);
}
ret.oderConstraints = newOder;
return ret;
}).collect(Collectors.toSet())
);
}
public void forEach (Consumer<? super A> c) {
undConstraints.stream().forEach(c);
for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
as.stream().forEach(c));
}
ret.oderConstraints = newOder;
return ret;
}
public void forEach(Consumer<? super A> c) {
undConstraints.stream().forEach(c);
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
as.stream().forEach(c));
}
public Set<A> getAll () {
Set<A> ret = new HashSet<>();
ret.addAll(undConstraints);
for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.parallelStream().forEach((Constraint<A> as) -> ret.addAll(as));
}
return ret;
}
public List<Set<Constraint<A>>> getOderConstraints() {
return oderConstraints;
}
public Set<A> getUndConstraints() {
return undConstraints;
}
public Set<A> getAll() {
Set<A> ret = new HashSet<>(undConstraints);
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
oderConstraint.parallelStream().forEach(ret::addAll);
}
return ret;
}
public List<Set<Constraint<A>>> getOderConstraints() {
return oderConstraints;
}
public Set<A> getUndConstraints() {
return undConstraints;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("undConstraints", undConstraints.toSerial(keyStorage));
serialized.put("oderConstraints", SerialList.fromMapped(oderConstraints, oderConstraintSet ->
SerialList.fromMapped(oderConstraintSet, oderConstraint ->
oderConstraint.toSerial(keyStorage))
));
return serialized;
}
public static <T extends IConstraintElement> ConstraintSet<T> fromSerial(SerialMap data, UnifyContext context, Class<T> target, KeyStorage keyStorage) {
ConstraintSet<T> constraintSet = new ConstraintSet<>();
constraintSet.undConstraints = Constraint.fromSerial(data.getUUID("undConstraints"), context, target, keyStorage);
constraintSet.oderConstraints = data.getList("oderConstraints").assertListOfLists().stream()
.map(oderConstraintSetData -> oderConstraintSetData.assertListOfUUIDs().stream()
.map(oderConstraintData -> Constraint.fromSerial(oderConstraintData, context, target, keyStorage))
.collect(Collectors.toSet())
).toList();
return constraintSet;
}
}

View File

@@ -0,0 +1,4 @@
package de.dhbwstuttgart.typeinference.constraints;
public interface IConstraintElement {
}

View File

@@ -1,72 +1,70 @@
package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import org.antlr.v4.runtime.Token;
public class Pair implements Serializable
{
public final RefTypeOrTPHOrWildcardOrGeneric TA1;
public final RefTypeOrTPHOrWildcardOrGeneric TA2;
public class Pair implements Serializable, IConstraintElement, ISerializableData {
public final RefTypeOrTPHOrWildcardOrGeneric TA1;
public final RefTypeOrTPHOrWildcardOrGeneric TA2;
private SourceLoc location;
private SourceLoc location;
private PairOperator eOperator = PairOperator.SMALLER;
private Boolean noUnification = false;
private PairOperator eOperator = PairOperator.SMALLER;
private boolean noUnification = false;
private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2 )
{
this.TA1 = TA1;
this.TA2 = TA2;
if(TA1 == null || TA2 == null)
throw new NullPointerException();
eOperator = PairOperator.SMALLER;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp)
{
// Konstruktor
this(TA1,TA2);
this.eOperator = eOp;
}
private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2) {
this.TA1 = TA1;
this.TA2 = TA2;
if (TA1 == null || TA2 == null)
throw new NullPointerException();
eOperator = PairOperator.SMALLER;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator e0p, SourceLoc location) {
this(TA1, TA2, e0p);
this.location = location;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification)
{
// Konstruktor
this(TA1,TA2);
this.eOperator = eOp;
this.noUnification = noUnification;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp) {
// Konstruktor
this(TA1, TA2);
this.eOperator = eOp;
}
public SourceLoc getLocation() {
return this.location;
}
public String toString()
{
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen
String strElement1 = "NULL";
String strElement2 = "NULL";
String Operator = "<.";
if( TA1 != null )
strElement1 = TA1.toString();
if( TA2 != null )
strElement2 = TA2.toString();
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator e0p, SourceLoc location) {
this(TA1, TA2, e0p);
this.location = location;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, boolean noUnification) {
// Konstruktor
this(TA1, TA2);
this.eOperator = eOp;
this.noUnification = noUnification;
}
public SourceLoc getLocation() {
return this.location;
}
public String toString() {
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen
String strElement1 = "NULL";
String strElement2 = "NULL";
String Operator = "<.";
if (TA1 != null)
strElement1 = TA1.toString();
if (TA2 != null)
strElement2 = TA2.toString();
/* PL ausskommentiert 2018-05-24
if(OperatorEqual())
@@ -76,80 +74,104 @@ public class Pair implements Serializable
if(OperatorSmallerExtends())
Operator = "<?";
*/
return "\n(" + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
/*- Equals: " + bEqual*/
}
/**
* <br/>Author: J�rg B�uerle
* @param obj
* @return
*/
public boolean equals(Object obj)
{
boolean ret = true;
ret &= (obj instanceof Pair);
if(!ret)return ret;
ret &= ((Pair)obj).TA1.equals(this.TA1);
ret &= ((Pair)obj).TA2.equals(this.TA2);
return ret;
}
return "\n(P: " + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Equal ist.
*/
public boolean OperatorEqual()
{
return eOperator == PairOperator.EQUALSDOT;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Smaller ist.
*/
public boolean OperatorSmaller()
{
return eOperator == PairOperator.SMALLER;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ SmallerExtends ist.
*/
public boolean OperatorSmallerExtends()
{
return eOperator == PairOperator.SMALLERDOTWC;
}
/**
* Author: Arne Lüdtke<br/>
* Gibt den Operator zurück.
*/
public PairOperator GetOperator()
{
return eOperator;
}
/*- Equals: " + bEqual*/
}
public boolean OperatorSmallerDot() {
return eOperator == PairOperator.SMALLERDOT;
}
static public Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
HashMap<String, TypePlaceholder> ret = new HashMap<>();
constraints.map((Pair p) -> {
if (p.TA1 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA1).getName(), (TypePlaceholder) p.TA1);
}
if (p.TA2 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA2).getName(), (TypePlaceholder) p.TA2);
}
return null;
});
return ret;
}
/**
* <br/>Author: J�rg B�uerle
*
* @param obj
* @return
*/
public boolean equals(Object obj) {
return (
(obj instanceof Pair pairObj) &&
pairObj.TA1.equals(this.TA1) &&
pairObj.TA2.equals(this.TA2)
);
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Equal ist.
*/
public boolean OperatorEqual() {
return eOperator == PairOperator.EQUALSDOT;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Smaller ist.
*/
public boolean OperatorSmaller() {
return eOperator == PairOperator.SMALLER;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ SmallerExtends ist.
*/
public boolean OperatorSmallerExtends() {
return eOperator == PairOperator.SMALLERDOTWC;
}
/**
* Author: Arne Lüdtke<br/>
* Gibt den Operator zurück.
*/
public PairOperator GetOperator() {
return eOperator;
}
public boolean OperatorSmallerDot() {
return eOperator == PairOperator.SMALLERDOT;
}
static public Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
HashMap<String, TypePlaceholder> ret = new HashMap<>();
constraints.map((Pair p) -> {
if (p.TA1 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA1).getName(), (TypePlaceholder) p.TA1);
}
if (p.TA2 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA2).getName(), (TypePlaceholder) p.TA2);
}
return null;
});
return ret;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
// because toString() will output TA1 and TA2 recursively, we can ignore potential infinite recursion here too
SerialMap serialized = new SerialMap();
serialized.put("ta1", this.TA1.toSerial(keyStorage));
serialized.put("ta2", this.TA2.toSerial(keyStorage));
serialized.put("op", this.eOperator.toString());
serialized.put("noUnification", this.noUnification ? 1 : 0);
serialized.put("location", this.location == null ? null : this.location.toSerial(keyStorage));
return serialized;
}
public static Pair fromSerial(SerialMap data, UnifyContext context) {
String op = data.getValue("op").getOf(String.class);
SerialMap ta1 = data.getMap("ta1");
SerialMap ta2 = data.getMap("ta2");
boolean noUnification = data.getValue("noUnification").getOf(Integer.class) == 1;
SerialMap location = data.getMapOrNull("location");
var pair = new Pair(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(ta1, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(ta2, context),
PairOperator.fromString(op),
noUnification
);
if (location != null) pair.location = SourceLoc.fromSerial(location);
return pair;
}
}
// ino.end

View File

@@ -1,15 +1,19 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* enthaelt alle Paare, die in einem Ergebnis nicht vorkommen koennen
* sie sind noetig fuer origPairs in PairTPHsmallerTPH, da hier auch
* Paare vorkommen koennen die keine Result sind (z.B. bei FunN$$)
*/
public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>{
public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
//public final TypePlaceholder left;
//public final TypePlaceholder right;
@@ -17,7 +21,7 @@ public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, Re
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
* wichtig fuer generated Generics
*/
ResultPair origPair;
ResultPair<?,?> origPair;
public PairNoResult(RefTypeOrTPHOrWildcardOrGeneric left, RefTypeOrTPHOrWildcardOrGeneric right){
super(left, right);
@@ -29,4 +33,24 @@ public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, Re
throw new NotImplementedException();
//visitor.visit(this);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairNoResult fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairNoResult(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,9 +1,13 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder> {
public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder> implements ISerializableData {
public PairTPHEqualTPH(TypePlaceholder tl, TypePlaceholder tr) {
super(tl, tr);
}
@@ -12,4 +16,24 @@ public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder
public void accept(ResultPairVisitor visitor) {
visitor.visit(this);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairTPHEqualTPH fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairTPHEqualTPH(
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,13 +1,17 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* Steht für A =. RefType
*/
public class PairTPHequalRefTypeOrWildcardType extends ResultPair{
public class PairTPHequalRefTypeOrWildcardType extends ResultPair<TypePlaceholder, RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
public final TypePlaceholder left;
public final RefTypeOrTPHOrWildcardOrGeneric right;
@@ -26,4 +30,24 @@ public class PairTPHequalRefTypeOrWildcardType extends ResultPair{
public String toString() {
return "(" + left.toString() + " = " + right.toString() + ")";
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairTPHequalRefTypeOrWildcardType fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairTPHequalRefTypeOrWildcardType(
(TypePlaceholder)RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,12 +1,17 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* Steht für: A <. B
*/
public class PairTPHsmallerTPH extends ResultPair{
public class PairTPHsmallerTPH extends ResultPair<TypePlaceholder,TypePlaceholder>
implements ISerializableData {
public final TypePlaceholder left;
public final TypePlaceholder right;
@@ -14,7 +19,7 @@ public class PairTPHsmallerTPH extends ResultPair{
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
* wichtig fuer generated Generics
*/
ResultPair origPair;
ResultPair<?,?> origPair;
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right){
super(left, right);
@@ -22,7 +27,7 @@ public class PairTPHsmallerTPH extends ResultPair{
this.right = right;
}
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right, ResultPair origPair){
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right, ResultPair<?,?> origPair){
this(left, right);
this.origPair = origPair;
}
@@ -36,4 +41,24 @@ public class PairTPHsmallerTPH extends ResultPair{
public String toString() {
return "(" + left.toString() + " < " + right.toString() + ")";
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairTPHsmallerTPH fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairTPHsmallerTPH(
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,11 +1,17 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* Paare, welche das Unifikationsergebnis darstellen
*/
public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric> {
public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
private final A left;
private final B right;
@@ -58,5 +64,35 @@ public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B ext
return false;
return true;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
String type = switch (this) {
case PairNoResult _ -> "pnr";
case PairTPHEqualTPH _ -> "ptet";
case PairTPHsmallerTPH _ -> "ptst";
case PairTPHequalRefTypeOrWildcardType _ -> "ptertwt";
default -> throw new RuntimeException("No type defined for ResultPair of class " + this.getClass().getName());
};
serialized.put("type", type);
// we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL);
return serialized;
}
public static <A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric> ResultPair<A,B>
fromSerial(SerialMap data, UnifyContext context) {
String type = data.getValue("type").getOf(String.class);
SerialMap object = data.getMap("object");
return switch (type) {
case "pnr" -> (ResultPair) PairNoResult.fromSerial2(object, context);
case "ptet" -> (ResultPair) PairTPHEqualTPH.fromSerial2(object, context);
case "ptst" -> (ResultPair) PairTPHsmallerTPH.fromSerial2(object, context);
case "ptertwt" -> (ResultPair) PairTPHequalRefTypeOrWildcardType.fromSerial2(object, context);
default -> throw new RuntimeException("Could not unserialize class of unhandled type " + type);
};
}
}

View File

@@ -1,5 +1,15 @@
package de.dhbwstuttgart.typeinference.result;
import com.google.common.collect.Ordering;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
@@ -10,148 +20,179 @@ import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import java.util.stream.Collectors;
@SuppressWarnings("rawtypes")
public class ResultSet {
public class ResultSet implements ISerializableData {
public final Set<ResultPair> results;
public Set<ResultPair<TypePlaceholder, TypePlaceholder>> genIns;
public final Set<ResultPair> results;
public Set<ResultPair<TypePlaceholder, TypePlaceholder>> genIns;
public ResultSet(Set<ResultPair> set){
this.results = set;
this.genIns = new HashSet<>();
results.forEach(x -> { if (x instanceof PairTPHsmallerTPH) { this.genIns.add(x);}} );
}
public boolean contains(ResultPair toCheck) {
return this.results.contains(toCheck);
}
public void remove(ResultPair toCheck) {
results.remove(toCheck);
public ResultSet(Set<ResultPair> set) {
this.results = set;
this.genIns = TypeUnifyTaskHelper.getPresizedHashSet(results.size());
results.forEach(x -> {
if (x instanceof PairTPHsmallerTPH) {
this.genIns.add(x);
}
});
}
public boolean contains(ResultPair toCheck) {
return this.results.contains(toCheck);
}
public void remove(ResultPair toCheck) {
results.remove(toCheck);
}
public ResolvedType resolveType(RefTypeOrTPHOrWildcardOrGeneric type) {
if (type instanceof TypePlaceholder)
return new Resolver(this).resolve((TypePlaceholder) type);
if (type instanceof GenericRefType) return new ResolvedType(type, new HashSet<>());
if (type instanceof RefType) {
RelatedTypeWalker related = new RelatedTypeWalker(null, this);
type.accept(related);
return new ResolvedType(type, related.relatedTPHs);
} else {
throw new NotImplementedException();
//return new ResolvedType(type,new HashSet<>());
}
}
public ResolvedType resolveType(RefTypeOrTPHOrWildcardOrGeneric type) {
if(type instanceof TypePlaceholder)
return new Resolver(this).resolve((TypePlaceholder)type);
if(type instanceof GenericRefType)return new ResolvedType(type, new HashSet<>());
if(type instanceof RefType) {
RelatedTypeWalker related = new RelatedTypeWalker(null, this);
type.accept(related);
return new ResolvedType(type, related.relatedTPHs);
} else {
throw new NotImplementedException();
//return new ResolvedType(type,new HashSet<>());
}
public String toString() {
var results = new ArrayList<>(this.results);
results.sort(
Comparator
.comparingInt((ResultPair o) -> o.getLeft().toString().length())
.thenComparing(o -> o.getLeft().toString())
.thenComparingInt(o -> o.getRight().toString().length())
.thenComparing(o -> o.getRight().toString())
);
return results.toString();
}
@Override
public boolean equals(Object o) {
if (o instanceof ResultSet other) {
// sort both result lists
var thisElements = new ArrayList<>(this.results);
thisElements.sort(Ordering.usingToString());
var otherElements = new ArrayList<>(other.results);
otherElements.sort(Ordering.usingToString());
return thisElements.equals(otherElements);
} else {
return false;
}
}
public String toString() {
return results.toString();
}
@Override
public int hashCode() {
return results.hashCode();
}
@Override
public boolean equals(Object o) {
if (o instanceof ResultSet) {
ResultSet other = (ResultSet)o;
return this.results.equals(other.results);
} else {
return false;
}
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();;
serialized.put("results", SerialList.fromMapped(results, result -> result.toSerial(keyStorage)));
return serialized;
}
@Override
public int hashCode() {
return results.hashCode();
}
public static ResultSet fromSerial(SerialMap data, UnifyContext context) {
var resultsData = data.getList("results").assertListOfMaps();
return new ResultSet(resultsData.stream().map(resultData -> ResultPair.fromSerial(resultData, context)).collect(Collectors.toSet()));
}
}
class Resolver implements ResultSetVisitor {
private final ResultSet result;
private TypePlaceholder toResolve;
private RefTypeOrTPHOrWildcardOrGeneric resolved;
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
private ResultPair<?,?> currentPair;
private final ResultSet result;
private TypePlaceholder toResolve;
private RefTypeOrTPHOrWildcardOrGeneric resolved;
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
private ResultPair<?, ?> currentPair;
public Resolver(ResultSet resultPairs){
this.result = resultPairs;
public static Logger logger = new Logger("Resolver");
public Resolver(ResultSet resultPairs) {
this.result = resultPairs;
}
public ResolvedType resolve(TypePlaceholder tph) {
toResolve = tph;
resolved = null;
logger.info(tph.toString());
for (ResultPair<?, ?> resultPair : result.results) {
if (resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)) {
currentPair = resultPair;
return resolve(((PairTPHEqualTPH) resultPair).getRight());
}
}
for (ResultPair<?, ?> resultPair : result.results) {
currentPair = resultPair;
resultPair.accept(this);
}
if (resolved == null) {//TPH kommt nicht im Result vor:
resolved = tph;
}
public ResolvedType resolve(TypePlaceholder tph){
toResolve = tph;
resolved = null;
System.out.println(tph.toString());
for(ResultPair<?,?> resultPair : result.results) {
if(resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)){
currentPair = resultPair;
return resolve(((PairTPHEqualTPH) resultPair).getRight());
}
}
for(ResultPair<?,?> resultPair : result.results){
currentPair = resultPair;
resultPair.accept(this);
}
if(resolved==null){//TPH kommt nicht im Result vor:
resolved = tph;
}
ResolvedType result = new ResolvedType(resolved, additionalTPHs);//resolved;
result.setResultPair(currentPair);
return result;
}
ResolvedType result = new ResolvedType(resolved, additionalTPHs);//resolved;
result.setResultPair(currentPair);
return result;
@Override
public void visit(PairTPHsmallerTPH p) {
currentPair = p;
if (p.left.equals(toResolve)) {
additionalTPHs.add(new GenericInsertPair(p.left, p.right));
additionalTPHs.addAll(new RelatedTypeWalker(p.right, result).relatedTPHs);
}
if (p.right.equals(toResolve))
additionalTPHs.addAll(new RelatedTypeWalker(p.left, result).relatedTPHs);
}
@Override
public void visit(PairTPHsmallerTPH p) {
currentPair = p;
if(p.left.equals(toResolve)){
additionalTPHs.add(new GenericInsertPair(p.left, p.right));
additionalTPHs.addAll(new RelatedTypeWalker(p.right, result).relatedTPHs);
}
if(p.right.equals(toResolve))
additionalTPHs.addAll(new RelatedTypeWalker(p.left, result).relatedTPHs);
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
currentPair = p;
if (p.left.equals(toResolve)) {
resolved = p.right;
RelatedTypeWalker related = new RelatedTypeWalker(null, result);
p.right.accept(related);
additionalTPHs.addAll(related.relatedTPHs);
}
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
currentPair = p;
if(p.left.equals(toResolve)){
resolved = p.right;
RelatedTypeWalker related = new RelatedTypeWalker(null, result);
p.right.accept(related);
additionalTPHs.addAll(related.relatedTPHs);
}
}
@Override
public void visit(PairTPHEqualTPH p) {
//Do nothing. Dieser Fall wird in der resolve-Methode abgefangen
}
@Override
public void visit(PairTPHEqualTPH p) {
//Do nothing. Dieser Fall wird in der resolve-Methode abgefangen
}
@Override
public void visit(RefType refType) {
@Override
public void visit(RefType refType) {
}
}
@Override
public void visit(GenericRefType genericRefType) {
@Override
public void visit(GenericRefType genericRefType) {
}
}
@Override
public void visit(SuperWildcardType superWildcardType) {
@Override
public void visit(SuperWildcardType superWildcardType) {
}
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
@Override
public void visit(TypePlaceholder typePlaceholder) {
}
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
}
}
}
@@ -161,149 +202,150 @@ class Resolver implements ResultSetVisitor {
@SuppressWarnings("rawtypes")
class TPHResolver implements ResultSetVisitor {
private final TypePlaceholder tph;
Set<GenericInsertPair> resolved = new HashSet<>();
private final ResultSet resultSet;
private final TypePlaceholder tph;
Set<GenericInsertPair> resolved = new HashSet<>();
private final ResultSet resultSet;
TPHResolver(TypePlaceholder tph, ResultSet resultSet){
this.resultSet = resultSet;
this.tph = tph;
for(ResultPair p : resultSet.results){
p.accept(this);
}
if(resolved.size() == 0){
resolved.add(new GenericInsertPair(tph, null));
}
TPHResolver(TypePlaceholder tph, ResultSet resultSet) {
this.resultSet = resultSet;
this.tph = tph;
for (ResultPair p : resultSet.results) {
p.accept(this);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if(p.left.equals(tph) || p.right.equals(tph)){
resolved.add(new GenericInsertPair(p.left, p.right));
}
if (resolved.size() == 0) {
resolved.add(new GenericInsertPair(tph, null));
}
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
TypePlaceholder otherSide = null;
if(p.right.equals(tph)){
otherSide = p.left;
}
if(otherSide != null){
Set<ResultPair> newResultSet = new HashSet<>(this.resultSet.results);
newResultSet.remove(p);
resolved.addAll(new TPHResolver(otherSide, new ResultSet(newResultSet)).resolved);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if (p.left.equals(tph) || p.right.equals(tph)) {
resolved.add(new GenericInsertPair(p.left, p.right));
}
}
@Override
public void visit(PairTPHEqualTPH p) {
//ignorieren. Wird vom Resolver behandelt
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
TypePlaceholder otherSide = null;
if (p.right.equals(tph)) {
otherSide = p.left;
}
@Override
public void visit(RefType refType) {
if (otherSide != null) {
Set<ResultPair> newResultSet = new HashSet<>(this.resultSet.results);
newResultSet.remove(p);
resolved.addAll(new TPHResolver(otherSide, new ResultSet(newResultSet)).resolved);
}
}
@Override
public void visit(GenericRefType genericRefType) {
@Override
public void visit(PairTPHEqualTPH p) {
//ignorieren. Wird vom Resolver behandelt
}
}
@Override
public void visit(RefType refType) {
@Override
public void visit(SuperWildcardType superWildcardType) {
}
}
@Override
public void visit(GenericRefType genericRefType) {
@Override
public void visit(TypePlaceholder typePlaceholder) {
}
}
@Override
public void visit(SuperWildcardType superWildcardType) {
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
}
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
}
}
@SuppressWarnings("rawtypes")
class RelatedTypeWalker implements ResultSetVisitor {
final Set<GenericInsertPair> relatedTPHs = new HashSet<>();
private final TypePlaceholder toResolve;
private final ResultSet resultSet;
final Set<GenericInsertPair> relatedTPHs = new HashSet<>();
private final TypePlaceholder toResolve;
private final ResultSet resultSet;
/**
* Läuft über das resultSet und speichert alle TPHs, welche mit start in Verbindung stehen
* @param start - kann null sein, wenn der Walker für einen RefType benutzt wird
* @param resultSet
*/
RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet){
this.toResolve = start;
this.resultSet = resultSet;
int resolved = 0;
do{
resolved = relatedTPHs.size();
for(ResultPair p : resultSet.results){
p.accept(this);
p.accept(this);
}
}while(resolved - relatedTPHs.size() > 0);
}
/**
* Läuft über das resultSet und speichert alle TPHs, welche mit start in Verbindung stehen
*
* @param start - kann null sein, wenn der Walker für einen RefType benutzt wird
* @param resultSet
*/
RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet) {
this.toResolve = start;
this.resultSet = resultSet;
int resolved = 0;
do {
resolved = relatedTPHs.size();
for (ResultPair p : resultSet.results) {
p.accept(this);
p.accept(this);
}
} while (resolved - relatedTPHs.size() > 0);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if(p.getRight().equals(toResolve)){
relatedTPHs.addAll(new TPHResolver(p.right, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.right, resultSet).relatedTPHs);
}
if(p.getLeft().equals(toResolve)){
relatedTPHs.addAll(new TPHResolver(p.left, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.left, resultSet).relatedTPHs);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if (p.getRight().equals(toResolve)) {
relatedTPHs.addAll(new TPHResolver(p.right, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.right, resultSet).relatedTPHs);
}
if (p.getLeft().equals(toResolve)) {
relatedTPHs.addAll(new TPHResolver(p.left, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.left, resultSet).relatedTPHs);
}
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
if(p.getLeft().equals(toResolve)){
p.getRight().accept(this);
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
if (p.getLeft().equals(toResolve)) {
p.getRight().accept(this);
}
}
@Override
public void visit(PairTPHEqualTPH p) {
//Kann ignoriert werden. Diese Fälle werden vom Resolver behandelt
}
@Override
public void visit(PairTPHEqualTPH p) {
//Kann ignoriert werden. Diese Fälle werden vom Resolver behandelt
}
/*
Die folgenden Funktionen fügen alle TPHs an die relatedTPHs an, denen sie begegnen:
Das wird verwendet, wenn alle relatedTPHs aus den Parametern eines RefTypes angefügt werden sollen
*/
@Override
public void visit(RefType refType) {
for(RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()){
param.accept(this);
}
@Override
public void visit(RefType refType) {
for (RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()) {
param.accept(this);
}
}
@Override
public void visit(SuperWildcardType superWildcardType) {
superWildcardType.getInnerType().accept(this);
}
@Override
public void visit(SuperWildcardType superWildcardType) {
superWildcardType.getInnerType().accept(this);
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
relatedTPHs.addAll(new TPHResolver(typePlaceholder, resultSet).resolved);
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
relatedTPHs.addAll(new TPHResolver(typePlaceholder, resultSet).resolved);
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
extendsWildcardType.getInnerType().accept(this);
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
extendsWildcardType.getInnerType().accept(this);
}
@Override
public void visit(GenericRefType genericRefType) {
}
@Override
public void visit(GenericRefType genericRefType) {
}
}

View File

@@ -14,6 +14,7 @@ import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceBlockInformation;
import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceInformation;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.util.BiRelation;
import org.antlr.v4.runtime.Token;
@@ -33,7 +34,7 @@ public class TYPE {
public ConstraintSet getConstraints() {
ConstraintSet ret = new ConstraintSet();
for (ClassOrInterface cl : sf.KlassenVektor) {
var allClasses = new HashSet<ClassOrInterface>();
Set<ClassOrInterface> allClasses = TypeUnifyTaskHelper.getPresizedHashSet(allAvailableClasses.size() + sf.availableClasses.size());
allClasses.addAll(allAvailableClasses);
allClasses.addAll(sf.availableClasses);
ret.addAll(getConstraintsClass(cl, new TypeInferenceInformation(allClasses)));
@@ -68,7 +69,7 @@ public class TYPE {
for(SourceFile sourceFile : sfs){
for(JavaClassName importName : sourceFile.imports){
System.out.println(importName);
context.logger().info(importName);
try {
classes.add(ASTFactory.createClass(classLoader.loadClass(importName.toString())));
} catch (ClassNotFoundException e) {

View File

@@ -1,6 +1,8 @@
//PL 2018-12-19: Merge chekcen
package de.dhbwstuttgart.typeinference.typeAlgo;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import java.util.*;
import java.util.stream.Collectors;
@@ -116,17 +118,18 @@ public class TYPEStmt implements StatementVisitor {
@Override
public void visit(FieldVar fieldVar) {
fieldVar.receiver.accept(this);
Set<Constraint> oderConstraints = new HashSet<>();
List<FieldAssumption> fieldAssumptions = info.getFields(fieldVar.fieldVarName);
Set<Constraint> oderConstraints = TypeUnifyTaskHelper.getPresizedHashSet(fieldAssumptions.size());
for (FieldAssumption fieldAssumption : info.getFields(fieldVar.fieldVarName)) {
for (FieldAssumption fieldAssumption : fieldAssumptions) {
Constraint constraint = new Constraint();
GenericsResolver resolver = getResolverInstance();
constraint.add(new Pair(fieldVar.receiver.getType(), fieldAssumption.getReceiverType(resolver), PairOperator.SMALLERDOT, loc(fieldVar.getOffset()))); // PL 2019-12-09: SMALLERDOT eingefuegt, EQUALSDOT entfernt, wenn ds Field privat ist muesste es EQUALSDOT lauten
constraint.add(new Pair(fieldVar.getType(), fieldAssumption.getType(resolver), PairOperator.EQUALSDOT, loc(fieldVar.getOffset())));
oderConstraints.add(constraint);
}
if (oderConstraints.size() == 0)
if (oderConstraints.isEmpty())
throw new TypeinferenceException("Kein Feld " + fieldVar.fieldVarName + " gefunden", fieldVar.getOffset());
constraintsSet.addOderConstraint(oderConstraints);
}
@@ -141,7 +144,7 @@ public class TYPEStmt implements StatementVisitor {
@Override
public void visit(ForEachStmt forEachStmt) {
var iterableType = new RefType(ASTFactory.createClass(java.lang.Iterable.class).getClassName(), Arrays.asList(new ExtendsWildcardType(forEachStmt.statement.getType(), new NullToken())), new NullToken());
var iterableType = new RefType(ASTFactory.createClass(java.lang.Iterable.class).getClassName(), List.of(new ExtendsWildcardType(forEachStmt.statement.getType(), new NullToken())), new NullToken());
constraintsSet.addUndConstraint(new Pair(forEachStmt.expression.getType(), iterableType, PairOperator.SMALLERDOT, loc(forEachStmt.getOffset())));
forEachStmt.statement.accept(this);
forEachStmt.expression.accept(this);
@@ -189,7 +192,7 @@ public class TYPEStmt implements StatementVisitor {
methodCall.receiver.accept(this);
// Overloading:
Set<Constraint<Pair>> methodConstraints = new HashSet<>();
for (MethodAssumption m : this.getMethods(methodCall.name, methodCall.arglist, info)) {
for (MethodAssumption m : TYPEStmt.getMethods(methodCall.name, methodCall.arglist, info)) {
GenericsResolver resolver = getResolverInstance();
Set<Constraint<Pair>> oneMethodConstraints = generateConstraint(methodCall, m, info, resolver);
methodConstraints.addAll(oneMethodConstraints);
@@ -199,7 +202,7 @@ public class TYPEStmt implements StatementVisitor {
* oneMethodConstraint.setExtendConstraint(extendsOneMethodConstraint); extendsOneMethodConstraint.setExtendConstraint(oneMethodConstraint); methodConstraints.add(extendsOneMethodConstraint);
*/
}
if (methodConstraints.size() < 1) {
if (methodConstraints.isEmpty()) {
throw new TypeinferenceException("Methode " + methodCall.name + " ist nicht vorhanden!", methodCall.getOffset());
}
constraintsSet.addOderConstraint(methodConstraints);
@@ -212,7 +215,7 @@ public class TYPEStmt implements StatementVisitor {
for (MethodAssumption m : this.getConstructors(info, (RefType) methodCall.getType(), methodCall.getArgumentList())) {
methodConstraints.add(generateConstructorConstraint(methodCall, m, info, getResolverInstance()));
}
if (methodConstraints.size() < 1) {
if (methodConstraints.isEmpty()) {
throw new TypeinferenceException("Konstruktor in Klasse " + methodCall.getType().toString() + " ist nicht vorhanden!", methodCall.getOffset());
}
constraintsSet.addOderConstraint(methodConstraints);
@@ -282,8 +285,13 @@ public class TYPEStmt implements StatementVisitor {
// see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17
// Expression muss zu Numeric Convertierbar sein. also von Numeric erben
Constraint<Pair> numeric;
HashSet<JavaClassName> classNames = TypeUnifyTaskHelper.getPresizedHashSet(info.getAvailableClasses().size());
for (var classEl : info.getAvailableClasses()) {
classNames.add(classEl.getClassName());
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(bytee.getName())) {
if (classNames.contains(bytee.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), bytee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), bytee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -291,7 +299,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(shortt.getName())) {
if (classNames.contains(shortt.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), shortt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), shortt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -299,7 +307,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(integer.getName())) {
if (classNames.contains(integer.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), integer, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), integer, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -307,7 +315,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(longg.getName())) {
if (classNames.contains(longg.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), longg, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), longg, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -315,7 +323,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(floatt.getName())) {
if (classNames.contains(floatt.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), floatt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), floatt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -323,7 +331,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(doublee.getName())) {
if (classNames.contains(doublee.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), doublee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), doublee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -338,7 +346,7 @@ public class TYPEStmt implements StatementVisitor {
if (binary.operation.equals(BinaryExpr.Operator.ADD)) {
// Dann kann der Ausdruck auch das aneinanderfügen zweier Strings sein: ("a" + "b") oder (1 + 2)
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(string.getName())) {
if (classNames.contains(string.getName())) {
Constraint<Pair> stringConcat = new Constraint<>();
stringConcat.add(new Pair(binary.lexpr.getType(), string, PairOperator.EQUALSDOT, loc(binary.getOffset())));
stringConcat.add(new Pair(binary.rexpr.getType(), string, PairOperator.EQUALSDOT, loc(binary.getOffset())));
@@ -346,7 +354,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(stringConcat);
}
}
if (numericAdditionOrStringConcatenation.size() < 1) {
if (numericAdditionOrStringConcatenation.isEmpty()) {
throw new TypeinferenceException("Kein Typ für " + binary.operation.toString() + " vorhanden", binary.getOffset());
}
constraintsSet.addOderConstraint(numericAdditionOrStringConcatenation);
@@ -693,8 +701,8 @@ public class TYPEStmt implements StatementVisitor {
Set<Pair> methodSignatureConstraint = generatemethodSignatureConstraint(forMethod, assumption, info, resolver);
//System.out.println("methodSignatureConstraint: " + methodSignatureConstraint);
//System.out.println("methodConstraint: " + methodConstraint);
//context.logger().info("methodSignatureConstraint: " + methodSignatureConstraint);
//context.logger().info("methodConstraint: " + methodConstraint);
methodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
extendsMethodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
@@ -732,7 +740,7 @@ public class TYPEStmt implements StatementVisitor {
}
// Zuordnung von MethodCall.signature(ReturnType) zu dem ReturnType der ausgewaehlten Methode (assumption.returnType)
ret.add(new Pair(foMethod.signature.get(foMethod.signature.size() - 1), assumption.getReturnType(), PairOperator.EQUALSDOT));
ret.add(new Pair(foMethod.signature.getLast(), assumption.getReturnType(), PairOperator.EQUALSDOT));
return ret;
}
@@ -745,8 +753,8 @@ public class TYPEStmt implements StatementVisitor {
// funNParams.add(TypePlaceholder.fresh(new NullToken()));
funNParams.add(new GenericRefType(NameGenerator.makeNewName(), new NullToken()));
}
funNParams.get(funNParams.size() - 1);
ret.add(new MethodAssumption(new FunNClass(funNParams), funNParams.get(funNParams.size() - 1), funNParams.subList(0, funNParams.size() - 1), new TypeScope() {
funNParams.getLast();
ret.add(new MethodAssumption(new FunNClass(funNParams), funNParams.getLast(), funNParams.subList(0, funNParams.size() - 1), new TypeScope() {
@Override
public Iterable<? extends GenericTypeVar> getGenerics() {
throw new NotImplementedException();
@@ -841,7 +849,7 @@ public class TYPEStmt implements StatementVisitor {
for (var child : switchStmt.getBlocks()) {
for (var label : child.getLabels()) {
if (label.getPattern() == null) {
//System.out.println("DefaultCase");
//context.logger().info("DefaultCase");
} else {
constraintsSet.addUndConstraint(
new Pair(
@@ -882,13 +890,9 @@ public class TYPEStmt implements StatementVisitor {
child.getLabels().forEach(el -> {
if (el.getType() instanceof RefType) {
var recType = el;
if (el.getPattern() instanceof RecordPattern) {
var pattern = (RecordPattern) recType.getPattern();
recursivelyAddRecordConstraints(pattern);
}
if (el.getPattern() instanceof RecordPattern pattern) {
recursivelyAddRecordConstraints(pattern);
}
}
});
@@ -904,13 +908,13 @@ public class TYPEStmt implements StatementVisitor {
var allClasses = info.getAvailableClasses();
var interestingClasses = allClasses.stream().filter(as -> as.getClassName().equals(((RefType) pattern.getType()).getName())).toList();
var constructors = interestingClasses.get(0).getConstructors();
var constructors = interestingClasses.getFirst().getConstructors();
int counter = 0;
for (var subPattern : pattern.getSubPattern()) {
for (Constructor con : constructors) {
//System.out.println("----------------------\n" + subPattern.getType() + " | " + con.getParameterList().getParameterAt(counter).getType() + "\n----------------------\n");
//context.logger().info("----------------------\n" + subPattern.getType() + " | " + con.getParameterList().getParameterAt(counter).getType() + "\n----------------------\n");
constraintsSet.addUndConstraint(new Pair(subPattern.getType(), con.getParameterList().getParameterAt(counter).getType(), PairOperator.SMALLERDOT, loc(con.getParameterList().getParameterAt(counter).getOffset())));
}
if (subPattern instanceof RecordPattern) recursivelyAddRecordConstraints((RecordPattern) subPattern);

View File

@@ -0,0 +1,91 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* An intermediate class for the recursive steps of the TypeUnifyTask:
* This allows canceling parts of the recursion tree, instead of only the whole execution as before. But in
* order for that to work, all cancellable child tasks must be added when they are created
*
* @param <T>
*/
public abstract class CancellableTask<T> extends RecursiveTask<T> {
private final AtomicBoolean executionCancelled = new AtomicBoolean(false);
private final List<CancellableTask<?>> childTasks = new LinkedList<>();
private CancellableTask<?> parentTask = null;
/**
* Set the execution for this task and all its (recursive) children to be canceled
*/
protected void cancelExecution() {
// is this branch already canceled? Then do nothing
if (this.executionCancelled.getAndSet(true)) return;
this.cancelChildExecution();
}
private void cancelChildExecution() {
synchronized (this.childTasks) {
for (var childTask : childTasks) {
// no need to cancel a branch that is already finished
if (!childTask.isDone()) {
childTask.cancelExecution();
}
}
}
}
private void cancelChildExecutionAfter(CancellableTask<?> checkpointTask) {
boolean reachedCheckpoint = false;
int i = 0;
for (var childTask : childTasks) {
if (!reachedCheckpoint) {
reachedCheckpoint = childTask == checkpointTask;
}
else {
// no need to cancel a branch that is already finished
if (!childTask.isDone()) {
childTask.cancelExecution();
}
i++;
}
}
System.out.println("Skipped " + i + " younger siblings");
}
protected void cancelSiblingTasks() {
if (this.parentTask != null) {
boolean thisWasCancelledBefore = this.executionCancelled.get();
this.parentTask.cancelChildExecution();
this.executionCancelled.set(thisWasCancelledBefore);
}
}
public void cancelYoungerSiblingTasks() {
if (this.parentTask != null) {
this.parentTask.cancelChildExecutionAfter(this);
}
}
public Boolean isExecutionCancelled() {
return executionCancelled.get();
}
public void addChildTask(CancellableTask<?> childTask) {
this.childTasks.add(childTask);
childTask.setParentTask(this);
if (this.executionCancelled.get()) {
childTask.executionCancelled.set(true);
}
}
private void setParentTask(CancellableTask<?> parentTask) {
this.parentTask = parentTask;
}
}

View File

@@ -0,0 +1,61 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.RecursiveTask;
public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
public static <E> Set<E> merge(List<Set<E>> list) {
if (list.isEmpty()) {
return new HashSet<>();
}
var task = new ConcurrentSetMergeTask<>(list, 0, list.size());
return task.compute();
}
private static final int LIST_THRESHOLD = 3;
private static final int ELEMENT_THRESHOLD = 1000;
private final List<Set<T>> list;
private final int start;
private final int end;
private ConcurrentSetMergeTask(List<Set<T>> list, int start, int end) {
this.list = list;
this.start = start;
this.end = end;
}
@Override
protected Set<T> compute() {
int size = end - start;
int totalElements = 0;
for (int i = start+1; i < end; i++) {
totalElements += list.get(i).size();
}
// size will always be at least one
if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) {
Set<T> result = this.list.get(start);
for (int i = start+1; i < end; i++) {
result.addAll(list.get(i));
}
return result;
} else {
int mid = start + (size / 2);
ConcurrentSetMergeTask<T> leftTask = new ConcurrentSetMergeTask<>(list, start, mid);
ConcurrentSetMergeTask<T> rightTask = new ConcurrentSetMergeTask<>(list, mid, end);
leftTask.fork();
Set<T> rightResult = rightTask.compute();
Set<T> leftResult = leftTask.join();
// Merge results
leftResult.addAll(rightResult);
return leftResult;
}
}
}

View File

@@ -94,8 +94,8 @@ public class MartelliMontanariUnify implements IUnify {
// SUBST - Rule
if(lhsType instanceof PlaceholderType) {
mgu.add((PlaceholderType) lhsType, rhsType);
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
termsList = termsList.stream().map(x -> mgu.apply(x)).collect(Collectors.toCollection(ArrayList::new));
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
termsList.replaceAll(mgu::apply);
idx = idx+1 == termsList.size() ? 0 : idx+1;
continue;
}

View File

@@ -0,0 +1,84 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import java.util.ArrayList;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Calculate unique placeholder names
*/
public class PlaceholderRegistry implements ISerializableData {
private final Set<String> existingPlaceholders = ConcurrentHashMap.newKeySet();
private final AtomicInteger placeholderCount = new AtomicInteger();
public ArrayList<PlaceholderType> UnifyTypeFactory_PLACEHOLDERS = new ArrayList<>();
/**
* Add a placeholder into the list of existing ones, as soon as a new PlaceholderType is created
*
* @param placeholderName The placeholder to add
*/
public void addPlaceholder(String placeholderName) {
this.existingPlaceholders.add(placeholderName);
}
/**
* Generate a random placeholder name, that is unique to this context
*
* @return The generated name
*/
public String generateFreshPlaceholderName() {
String name;
do {
int pc = placeholderCount.incrementAndGet();
name = getUppercaseTokenFromInt(pc);
}
while (existingPlaceholders.contains(name));
this.addPlaceholder(name);
return name;
}
public PlaceholderRegistry deepClone() {
PlaceholderRegistry pr2 = new PlaceholderRegistry();
this.existingPlaceholders.forEach(pr2::addPlaceholder);
pr2.UnifyTypeFactory_PLACEHOLDERS.addAll(this.UnifyTypeFactory_PLACEHOLDERS);
pr2.placeholderCount.set(this.placeholderCount.get());
return pr2;
}
/**
* Generate a token that consists of uppercase letters and contains the given prefix and suffix from the value i
*
* @param i The value that will be represented as a token
* @return The generated token
*/
private String getUppercaseTokenFromInt(int i) {
StringBuilder sb = new StringBuilder();
while (i >= 0) {
sb.append((char)(i % 26 + 65));
i = i / 26 - 1;
}
//sb.append(suffix);
return sb.toString();
}
@Override
public String toString() {
return this.existingPlaceholders.toString();
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("ph", new SerialValue<>(new ArrayList<>(this.existingPlaceholders)));
serialized.put("factoryPh", SerialList.fromMapped(this.UnifyTypeFactory_PLACEHOLDERS, t -> t.toSerial(keyStorage)));
return serialized;
}
}

View File

@@ -1,10 +1,12 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
@@ -12,24 +14,16 @@ import java.util.Stack;
import java.util.function.Function;
import java.util.stream.Collectors;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.exceptions.DebugException;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.model.*;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.io.OutputStreamWriter;
import org.apache.commons.io.output.NullOutputStream;
/**
* Implementation of the type inference rules.
* @author Florian Steurer
@@ -37,15 +31,18 @@ import org.apache.commons.io.output.NullOutputStream;
*/
public class RuleSet implements IRuleSet{
Writer logFile;
Logger logger;
final PlaceholderRegistry placeholderRegistry;
public RuleSet() {
public RuleSet(PlaceholderRegistry placeholderRegistry) {
super();
logFile = new OutputStreamWriter(new NullOutputStream());
logger = Logger.NULL_LOGGER;
this.placeholderRegistry = placeholderRegistry;
}
RuleSet(Writer logFile) {
this.logFile = logFile;
RuleSet(Logger logger, PlaceholderRegistry placeholderRegistry) {
this.logger = logger;
this.placeholderRegistry = placeholderRegistry;
}
@Override
@@ -297,8 +294,8 @@ public class RuleSet implements IRuleSet{
if(dFromFc == null || !dFromFc.getTypeParams().arePlaceholders() || dFromFc.getTypeParams().size() != cFromFc.getTypeParams().size())
return Optional.empty();
//System.out.println("cFromFc: " + cFromFc);
//System.out.println("dFromFc: " + dFromFc);
//context.logger().info("cFromFc: " + cFromFc);
//context.logger().info("dFromFc: " + dFromFc);
int[] pi = pi(cFromFc.getTypeParams(), dFromFc.getTypeParams());
if(pi.length == 0)
@@ -507,17 +504,17 @@ public class RuleSet implements IRuleSet{
TypeParams typeDParams = typeD.getTypeParams();
TypeParams typeDgenParams = typeDgen.getTypeParams();
//System.out.println("Pair: " +pair);
//System.out.println("typeD: " +typeD);
//System.out.println("typeDParams: " +typeDParams);
//System.out.println("typeDgen: " +typeD);
//System.out.println("typeDgenParams: " +typeDgenParams);
//context.logger().info("Pair: " +pair);
//context.logger().info("typeD: " +typeD);
//context.logger().info("typeDParams: " +typeDParams);
//context.logger().info("typeDgen: " +typeD);
//context.logger().info("typeDgenParams: " +typeDgenParams);
Unifier unif = Unifier.identity();
for(int i = 0; i < typeDParams.size(); i++) {
//System.out.println("ADAPT" +typeDgenParams);
//context.logger().info("ADAPT" +typeDgenParams);
if (typeDgenParams.get(i) instanceof PlaceholderType)
unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
else System.out.println("ERROR");
unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
else logger.exception(new Exception("ERROR in adapt rule: cannot add non placeholder type"));
}
return Optional.of(new UnifyPair(unif.apply(newLhs), typeDs, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
@@ -652,15 +649,17 @@ public class RuleSet implements IRuleSet{
@Override
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs, List<Set<Constraint<UnifyPair>>> oderConstraints) {
HashMap<UnifyType, Integer> typeMap = new HashMap<>();
// Statistically, typeMap will fill up quickly and resize multiple times. To reduce this, we start with a higher capacity
HashMap<UnifyType, Integer> typeMap = new HashMap<>(200);
Stack<UnifyType> occuringTypes = new Stack<>();
occuringTypes.ensureCapacity(pairs.size() * 3);
for(UnifyPair pair : pairs) {
occuringTypes.push(pair.getLhsType());
occuringTypes.push(pair.getRhsType());
}
while(!occuringTypes.isEmpty()) {
UnifyType t1 = occuringTypes.pop();
if(!typeMap.containsKey(t1))
@@ -672,12 +671,12 @@ public class RuleSet implements IRuleSet{
if(t1 instanceof SuperType)
occuringTypes.push(((SuperType) t1).getSuperedType());
else
t1.getTypeParams().forEach(x -> occuringTypes.push(x));
t1.getTypeParams().forEach(occuringTypes::push);
}
Queue<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
LinkedList<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
ArrayList<UnifyPair> result = new ArrayList<UnifyPair>();
boolean applied = false;
while(!result1.isEmpty()) {
UnifyPair pair = result1.poll();
PlaceholderType lhsType = null;
@@ -695,19 +694,30 @@ public class RuleSet implements IRuleSet{
&& !((rhsType instanceof WildcardType) && ((WildcardType)rhsType).getWildcardedType().equals(lhsType))) //PL eigefuegt 2018-02-18
{
Unifier uni = new Unifier(lhsType, rhsType);
result = result.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(ArrayList::new));
result1 = result1.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(LinkedList::new));
// apply unifier to result and result1 in place
result.replaceAll(p -> uni.apply(pair, p));
ListIterator<UnifyPair> result1Iterator = result1.listIterator();
while (result1Iterator.hasNext()) {
UnifyPair x = result1Iterator.next();
result1Iterator.set(uni.apply(pair, x));
}
Function<? super Constraint<UnifyPair>,? extends Constraint<UnifyPair>> applyUni = b -> b.stream().map(
x -> uni.apply(pair,x)).collect(Collectors.toCollection((b.getExtendConstraint() != null)
? () -> new Constraint<UnifyPair>(
b.isInherited(),
b.isImplemented(),
b.getExtendConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(Constraint::new)),
b.getExtendConstraint().createdMapped(x -> uni.apply(pair,x)),
b.getmethodSignatureConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(HashSet::new)))
: () -> new Constraint<UnifyPair>(b.isInherited(), b.isImplemented())
));
oderConstraints.replaceAll(oc -> oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new)));
oderConstraints.replaceAll(oc -> {
HashSet<Constraint<UnifyPair>> mapped = new HashSet<>(oc.size());
for (var element : oc) {
mapped.add(applyUni.apply(element));
}
return mapped;
});
/*
oderConstraints = oderConstraints.stream().map(
a -> a.stream().map(applyUni
@@ -861,14 +871,11 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNred: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("logFile-Error");
}
logger.debug(() -> "FUNgreater: " + pair);
logger.debug(() -> "FUNred: " + result);
return Optional.of(result);
}
@@ -934,15 +941,15 @@ public class RuleSet implements IRuleSet{
Set<UnifyPair> result = new HashSet<UnifyPair>();
Integer variance = ((PlaceholderType)rhsType).getVariance();
Integer inversVariance = distributeVariance.inverseVariance(variance);
int variance = ((PlaceholderType)rhsType).getVariance();
int inversVariance = distributeVariance.inverseVariance(variance);
UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) {
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
freshPlaceholders[i] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
}
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), freshPlaceholders[funNLhsType.getTypeParams().size()-1], PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
@@ -953,18 +960,14 @@ public class RuleSet implements IRuleSet{
result.add(new UnifyPair(rhsType, funNLhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.stream().forEach(x -> { UnifyType l = x.getLhsType();
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNgreater: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
logger.debug(() -> "FUNgreater: " + pair);
logger.debug(() -> "FUNgreater: " + result);
return Optional.of(result);
}
@@ -983,15 +986,15 @@ public class RuleSet implements IRuleSet{
Set<UnifyPair> result = new HashSet<UnifyPair>();
Integer variance = ((PlaceholderType)lhsType).getVariance();
Integer inversVariance = distributeVariance.inverseVariance(variance);
int variance = ((PlaceholderType)lhsType).getVariance();
int inversVariance = distributeVariance.inverseVariance(variance);
UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) {
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
freshPlaceholders[i] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
}
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
result.add(new UnifyPair(freshPlaceholders[funNRhsType.getTypeParams().size()-1], funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
@@ -1003,18 +1006,15 @@ public class RuleSet implements IRuleSet{
result.add(new UnifyPair(lhsType, funNRhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.stream().forEach(x -> { UnifyType l = x.getLhsType();
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNsmaller: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
logger.debug(() -> "FUNgreater: " + pair);
logger.debug(() -> "FUNsmaller: " + result);
return Optional.of(result);
}
@@ -1051,7 +1051,7 @@ public class RuleSet implements IRuleSet{
if(isGen)
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
else {
UnifyType freshTph = PlaceholderType.freshPlaceholder();
UnifyType freshTph = PlaceholderType.freshPlaceholder(placeholderRegistry);
result.add(new UnifyPair(rhsType, new ExtendsType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.add(new UnifyPair(extendedType, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
@@ -1079,7 +1079,7 @@ public class RuleSet implements IRuleSet{
if(isGen)
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
else {
UnifyType freshTph = PlaceholderType.freshPlaceholder();
UnifyType freshTph = PlaceholderType.freshPlaceholder(placeholderRegistry);
result.add(new UnifyPair(rhsType, new SuperType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
Set<UnifyType> fBounded = pair.getfBounded();
fBounded.add(lhsType);

View File

@@ -1,90 +1,65 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import de.dhbwstuttgart.util.Logger;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify {
private TypeUnify() {}
private static <T> T joinFuture(CompletableFuture<T> future) {
try {
return future.get();
}
catch (InterruptedException | ExecutionException exception) {
throw new RuntimeException(exception);
}
}
/**
* unify parallel ohne result modell
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
public static Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
return res;
}
/**
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @param ret
* @return
*/
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
return ret;
public static UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
unifyTask.compute();
return unifyContext.resultModel();
}
/**
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @param ret
* @return
*/
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return ret;
public static Set<Set<UnifyPair>> unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
var result = joinFuture(unifyTask.compute());
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
return result;
}
/*
@@ -97,25 +72,22 @@ public class TypeUnify {
/**
* unify sequentiell mit oderconstraints
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
Set<Set<UnifyPair>> res = unifyTask.compute();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
public static Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(false), 0);
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
return res;
}
private static ForkJoinPool createThreadPool(Logger logger) {
logger.info("Available processors: " + Runtime.getRuntime().availableProcessors());
return new ForkJoinPool(
Runtime.getRuntime().availableProcessors(),
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
null,
false
);
}
}

View File

@@ -13,54 +13,46 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.concurrent.CompletableFuture;
public class TypeUnify2Task extends TypeUnifyTask {
Set<Set<UnifyPair>> setToFlatten;
Set<UnifyPair> methodSignatureConstraintUebergabe;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
}
Set<UnifyPair> getNextSetElement() {
return nextSetElement;
}
@Override
protected Set<Set<UnifyPair>> compute() {
if (one) {
System.out.println("two");
}
one = true;
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
Set<Set<UnifyPair>> setToFlatten;
Set<UnifyPair> methodSignatureConstraintUebergabe;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, UnifyContext context, int rekTiefe, Set<UnifyPair> methodSignatureConstraintUebergabe) {
super(eq, oderConstraints, fc, context, rekTiefe);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
}
public Set<UnifyPair> getNextSetElement() {
return nextSetElement;
}
@Override
public CompletableFuture<Set<Set<UnifyPair>>> compute() {
if (one) {
context.logger().info("two");
}
one = true;
CompletableFuture<Set<Set<UnifyPair>>> res =
unify2(setToFlatten, eq, oderConstraintsField, fc, context.parallel(), rekTiefeField, methodSignatureConstraintUebergabe);
/*if (isUndefinedPairSetSet(res)) {
return new HashSet<>(); }
else
*/
//writeLog("xxx");
//noOfThread--;
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
else {
return res;
}
}
}
public void closeLogFile() {
*/
//writeLog("xxx");
//noOfThread--;
if (this.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new HashSet<>());
} else {
return res;
}
}
try {
logFile.close();
}
catch (IOException ioE) {
System.err.println("no log-File" + thNo);
}
}
public void closeLogFile() {
context.logger().close();
}
}

View File

@@ -0,0 +1,221 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* A collection of capsuled (and thus static) functions to split up large algorithms in TypeUnifyTask
*/
public class TypeUnifyTaskHelper {
/**
* Filter all topLevelSets for those with a single element that contain only one pair:
* a <. theta,
* theta <. a or
* a =. theta
*/
public static Set<Set<UnifyPair>> getSingleElementSets(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets) {
return topLevelSets.stream()
.filter(x -> x.size() == 1)
.map(y -> y.stream().findFirst().get()).collect(Collectors.toSet());
}
/**
* Varianzbestimmung Anfang
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
* Varianz = 1 => Argumentvariable
* Varianz = -1 => Rückgabevariable
* Varianz = 0 => unklar
* Varianz = 2 => Operatoren oderConstraints
*/
public static int calculateVariance(List<Set<UnifyPair>> nextSetasList) {
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
.reduce((a, b) -> {
if (a.intValue() == b.intValue()) return a;
else return 0;
})) //2 kommt insbesondere bei Oder-Constraints vor
.filter(Optional::isPresent)
.map(Optional::get)
.findAny();
return xi.orElse(0);
}
/**
*
*/
public static int calculateOderConstraintVariance(List<Set<UnifyPair>> nextSetAsList) {
Optional<Integer> optVariance =
nextSetAsList
.getFirst()
.stream()
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
!(x.getRhsType() instanceof PlaceholderType) &&
x.getPairOp() == PairOperator.EQUALSDOT)
.map(x ->
((PlaceholderType) x.getGroundBasePair().getLhsType()).getVariance())
.reduce((n, m) -> (n != 0) ? n : m);
//Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
//da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
return optVariance.orElse(2);
}
/**
* Find the first occurrence (if any) of a UnifyPair with operator EQUALSDOT while having
* one side equal to its base pair counterpart
*/
public static Optional<UnifyPair> findEqualityConstrainedUnifyPair(Set<UnifyPair> nextSetElement) {
return nextSetElement.stream().filter(x ->
x.getPairOp()
.equals(PairOperator.EQUALSDOT))
.filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType()
.equals(x.getBasePair().getLhsType()) ||
x.getLhsType()
.equals(x.getBasePair().getRhsType())
).findFirst();
}
/**
* Find all unifyPairs, that associate the identified type variable of origPair with any concrete type. That means:
* If "a = type" is in origPair, then we get all UnifyPairs that contain either "a < typeA" or "typeB < a"
*/
public static Set<UnifyPair> findConstraintsWithSameTVAssociation(UnifyPair origPair, Set<Set<UnifyPair>> singleElementSets) {
UnifyType tyVar = origPair.getLhsType();
if (!(tyVar instanceof PlaceholderType)) {
tyVar = origPair.getRhsType();
}
UnifyType tyVarEF = tyVar;
return singleElementSets.stream()
.map(xx ->
xx.iterator().next())
.filter(x ->
(x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
||
(x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType))
)
.collect(Collectors.toCollection(HashSet::new));
}
/**
*
*/
public static boolean doesFirstNextSetHasSameBase(List<Set<UnifyPair>> nextSetAsList) {
if (nextSetAsList.isEmpty()) {
return false;
}
UnifyPair firstBasePair = null;
for (var unifyPair : nextSetAsList.getFirst().stream().toList()) {
var basePair = unifyPair.getBasePair();
// if any base pair is null, there is NOT always the same base!
if (basePair == null) {
return false;
}
if (firstBasePair == null) {
firstBasePair = basePair;
}
else if (!basePair.equals(firstBasePair)) {
return false;
}
}
return true;
}
/**
* Extracts data from every element in the nested lists of results. What data depends on the given
* extractor function
*/
public static Set<UnifyPair> collectFromThreadResult (
Set<Set<UnifyPair>> currentThreadResult,
Function<UnifyPair, Set<UnifyPair>> extractor
) {
return currentThreadResult.stream()
.map(b ->
b.stream()
.map(extractor)
.reduce((y, z) -> {
y.addAll(z);
return y;
})
.orElse(new HashSet<>()))
.reduce((y, z) -> {
y.addAll(z);
return y;
})
.orElse(new HashSet<>());
}
/**
* Extract a list of PlaceholderTypes from a set of pairs, such that each resulting element:
* - Is the LHS of a pair
* - Is a PlaceholderType
* - has a basePair Side that is a PlaceholderType with the same name
*/
public static List<PlaceholderType> extractMatchingPlaceholderTypes(Set<UnifyPair> pairs) {
return pairs.stream()
.filter(x -> {
UnifyType lhs = x.getLhsType();
UnifyType baseLhs = x.getBasePair().getLhsType();
UnifyType baseRhs = x.getBasePair().getRhsType();
return (lhs instanceof PlaceholderType) &&
((baseLhs instanceof PlaceholderType && lhs.getName().equals(baseLhs.getName())) ||
(baseRhs instanceof PlaceholderType && lhs.getName().equals(baseRhs.getName())));
})
.map(x -> (PlaceholderType) x.getLhsType())
.collect(Collectors.toCollection(ArrayList::new));
}
public static Set<UnifyPair> occursCheck(final Set<UnifyPair> eq) {
Set<UnifyPair> ocurrPairs = new HashSet<>(eq.size());
for (UnifyPair x : eq) {
UnifyType lhs = x.getLhsType();
UnifyType rhs = x.getRhsType();
if (lhs instanceof PlaceholderType lhsPlaceholder &&
!(rhs instanceof PlaceholderType) &&
rhs.getTypeParams().occurs(lhsPlaceholder))
{
x.setUndefinedPair();
ocurrPairs.add(x);
}
}
return ocurrPairs;
}
public static <T> HashSet<T> getPresizedHashSet(int minElements) {
if (minElements < 16) return new HashSet<>();
// HashSet and HashMap will resize at 75% load, so we account for that by multiplying with 1.5
int n = (int)(minElements * 1.5);
return new HashSet<>(n);
}
public static <S,T> HashMap<S,T> getPresizedHashMap(int minElements) {
if (minElements < 16) return new HashMap<>();
// HashSet and HashMap will resize at 75% load, so we account for that by multiplying with 1.5
int n = (int)(minElements * 1.5);
return new HashMap<>(n);
}
}

View File

@@ -0,0 +1,67 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.util.Logger;
import java.io.Writer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool;
public record UnifyContext(
// main logger of a unification
Logger logger,
// if the unify algorithm should run in parallel
boolean parallel,
// the model for storing calculated results
UnifyResultModel resultModel,
// the executor used for thread management in parallel execution
ExecutorService executor,
// a generator for new placeholders in this unify context
PlaceholderRegistry placeholderRegistry,
// a control structure to cancel the unification early
UnifyTaskModel usedTasks
) {
public UnifyContext(
Logger logger,
boolean parallel,
UnifyResultModel resultModel,
UnifyTaskModel usedTasks,
ExecutorService executor,
PlaceholderRegistry placeholderRegistry
) {
this(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext(
Logger logger,
boolean parallel,
UnifyResultModel resultModel,
UnifyTaskModel usedTasks,
PlaceholderRegistry placeholderRegistry
) {
this(logger, parallel, resultModel, usedTasks, ForkJoinPool.commonPool(), placeholderRegistry);
}
/*
* Shortcuts for creating a similar context with some properties changed. This combined with the final properties
* causes the UnifyContext to be essentially handled as a
*/
public UnifyContext newWithLogger(Logger logger) {
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithParallel(boolean parallel) {
if (this.parallel == parallel) return this;
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithExecutor(ExecutorService executor) {
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithResultModel(UnifyResultModel resultModel) {
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
}

View File

@@ -36,19 +36,19 @@ public class UnifyResultModel {
listeners.remove(listenerToRemove);
}
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet, UnifyContext context) {
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
Optional<Set<UnifyPair>> res = new RuleSet(context.placeholderRegistry()).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
return y; //alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), fc);
}
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons))))
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons), context.placeholderRegistry())))
.collect(Collectors.toList());
UnifyResultEvent evt = new UnifyResultEvent(newResult);

View File

@@ -12,7 +12,7 @@ public class UnifyTaskModel {
public synchronized void cancel() {
for(TypeUnifyTask t : usedTasks) {
t.myCancel(true);
t.cancelExecution();
}
}
}

View File

@@ -0,0 +1,209 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
public class ContravarianceCase extends VarianceCase {
protected final int variance = 1;
protected ContravarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
context.logger().debug("Max: a in " + variance + " " + a);
nextSetAsList.remove(a);
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
context.logger().debug(() -> "nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
//Alle maximale Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
Set<UnifyPair> finalA = a;
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
}
@Override
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
typeUnifyTask.addChildTask(forkOrig);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
(currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig 1");
forkOrig.closeLogFile();
return new ComputationResults(currentThreadResult);
});
//forks.add(forkOrig);
if (typeUnifyTask.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new ComputationResults());
}
/* FORK ENDE */
context.logger().debug("a in " + variance + " " + a);
context.logger().debug("nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
context.logger().debug("1 RM" + nSaL.toString());
if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
} else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> {
if (typeUnifyTask.isExecutionCancelled()) {
return new ComputationResults();
}
context.logger().debug("fork_res: " + fork_res.toString());
context.logger().debug(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.addForkResult(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.context.logger().debug("final 1");
fork.closeLogFile();
return prevResults;
}
);
if (typeUnifyTask.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new ComputationResults());
}
}
return resultValues;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == -1) {
context.logger().debug("Geloescht result: " + result);
result.clear();
result.addAll(currentThreadResult);
}
else if (resOfCompare == 0) {
result.addAll(currentThreadResult);
}
else if (resOfCompare == 1) {
context.logger().debug("Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// context.logger().info("");
context.logger().debug("a: " + rekTiefe + " variance: " + variance + a.toString());
context.logger().debug("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
context.logger().debug("Removed: " + nextSetasListOderConstraints);
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList);
context.logger().debug("smallerSetasList: " + smallerSetasList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
.collect(Collectors.toCollection(ArrayList::new));
context.logger().debug("notInherited: " + notInherited + "\n");
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> {
notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList));
});
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
context.logger().debug("notErased: " + notErased + "\n");
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
context.logger().debug("Removed: " + erased);
context.logger().debug("Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
context.logger().debug("Removed: " + erased);
context.logger().debug("Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -0,0 +1,226 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
public class CovarianceCase extends VarianceCase {
protected final int variance = -1;
protected CovarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
context.logger().debug(() -> "Min: a in " + variance + " " + a);
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
context.logger().debug(() -> "nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
nextSetAsList.remove(a);
//Alle minimalen Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
Set<UnifyPair> finalA = a;
nextSetasListRest = typeUnifyTask.oup.minElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
);
}
@Override
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
typeUnifyTask.addChildTask(forkOrig);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
(currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig -1");
forkOrig.closeLogFile();
return new ComputationResults(currentThreadResult);
});
//forks.add(forkOrig);
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
/* FORK ENDE */
context.logger().debug(() -> "a in " + variance + " " + a);
context.logger().debug(() -> "nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
context.logger().debug(() -> "-1 RM" + nSaL.toString());
if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
} else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> {
if (typeUnifyTask.isExecutionCancelled()) {
return prevResults;
}
context.logger().debug(() -> "fork_res: " + fork_res.toString());
context.logger().debug(() -> Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.addForkResult(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.context.logger().debug("final -1");
fork.closeLogFile();
return prevResults;
}
);
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
}
return resultValues;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == 1) {
context.logger().debug(() -> "Geloescht result: " + result);
result.clear();
result.addAll(currentThreadResult);
} else if (resOfCompare == 0) {
result.addAll(currentThreadResult);
} else if (resOfCompare == -1) {
context.logger().debug(() -> "Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// context.logger().info("");
context.logger().debug(() -> "a: " + rekTiefe + " variance: " + variance + a.toString());
context.logger().debug(() -> "aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
context.logger().debug(() -> "Removed: " + nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> greaterSetasList = typeUnifyTask.oup.greaterThan(a_new, nextSetAsList);
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
if (!((Constraint<UnifyPair>) a_new).isInherited()) {
greaterSetasList.add(a_new);
}
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
//Wenn x nicht vererbt ist, beginnt beim naechstgroesseren Element die naechste Ueberladung
notInherited.forEach(x -> {
notErased.addAll(typeUnifyTask.oup.greaterEqThan(x, greaterSetasList));
});
//das kleineste Element ist das Element von dem a_new geerbt hat
//muss deshalb geloescht werden
Iterator<Set<UnifyPair>> notErasedIt = notErased.iterator();
if (notErasedIt.hasNext()) {
Set<UnifyPair> min = typeUnifyTask.oup.min(notErasedIt);
notErased.remove(min);
notErased.remove(((Constraint<UnifyPair>) min).getExtendConstraint());
}
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
context.logger().debug(() -> "Removed: " + erased);
context.logger().debug(() -> "Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = typeUnifyTask.oup.greaterEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
context.logger().debug(() -> "Removed: " + erased);
context.logger().debug(() -> "Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -0,0 +1,132 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
public class InvarianceOrConstraintCase extends VarianceCase {
// either for invariance or for oderConstraints
protected final int variance = 2;
protected InvarianceOrConstraintCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = nextSetAsList.removeFirst();
//Fuer alle Elemente wird parallele Berechnung angestossen.
nextSetasListRest = new ArrayList<>(nextSetAsList);
}
@Override
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
context.logger().debug("var2einstieg");
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(forkOrig);
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply((currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig 2");
forkOrig.closeLogFile();
return new ComputationResults(currentThreadResult);
});
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
/* FORK ENDE */
context.logger().debug(() -> "a in " + variance + " " + a);
context.logger().debug(() -> "nextSetasListRest: " + nextSetasListRest.toString());
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
Set<UnifyPair> methodSignatureConstraintForParallel = new HashSet<>(methodSignatureConstraint);
Set<UnifyPair> nSaL = a;
while (!nextSetasListRest.isEmpty()) {
methodSignatureConstraintForParallel.removeAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL); //PL einkommentiert 20-02-03
methodSignatureConstraintForParallel.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
typeUnifyTask.addChildTask(fork);
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture, (prevResults, fork_res) -> {
if (typeUnifyTask.isExecutionCancelled()) {
return prevResults;
}
prevResults.addForkResult(fork_res);
fork.context.logger().debug("final 2");
fork.closeLogFile();
return prevResults;
});
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
}
return resultValues;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
// Nothing
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// Nothing
return false;
}
}

View File

@@ -0,0 +1,110 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
public class UnknownVarianceCase extends VarianceCase {
protected final int variance = 0;
protected UnknownVarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
} else {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
}
nextSetAsList.remove(a);
} else if (this.isOderConstraint) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
nextSetAsList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} else {
a = nextSetAsList.removeFirst();
}
}
@Override
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
elems.add(a);
return typeUnifyTask.unify2(elems, eq, oderConstraints, fc, context.parallel(), rekTiefe, new HashSet<>(methodSignatureConstraint))
.thenApply(ComputationResults::new);
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
context.logger().debug("RES var=0 ADD:" + result.toString() + " " + currentThreadResult.toString());
result.addAll(currentThreadResult);
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
if (!this.isOderConstraint) {
return true;
} else {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
context.logger().debug("Removed: " + nextSetasListOderConstraints);
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
context.logger().debug("Removed: " + erased);
context.logger().debug("Not Removed: " + nextSetAsList);
}
return false;
}
}

View File

@@ -0,0 +1,132 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
public abstract class VarianceCase {
public static VarianceCase createFromVariance(int variance, boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
return switch (variance) {
case 0 -> new UnknownVarianceCase(isOderConstraint, typeUnifyTask, context);
case 1 -> new ContravarianceCase(isOderConstraint, typeUnifyTask, context);
case -1 -> new CovarianceCase(isOderConstraint, typeUnifyTask, context);
case 2 -> new InvarianceOrConstraintCase(isOderConstraint, typeUnifyTask, context);
default -> throw new RuntimeException("Invalid variance: " + variance);
};
}
protected final boolean isOderConstraint;
protected final TypeUnifyTask typeUnifyTask;
protected final UnifyContext context;
/**
* Aktueller Fall
*/
public Set<UnifyPair> a;
/**
* Liste der Faelle für die parallele Verarbeitung
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
* Deshalb wird ihre Berechnung parallel angestossen.
*/
public List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
/**
* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
* In der Regel ist dies genau ein Element
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
* gefuehrt hat.
*/
public List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
protected VarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
this.isOderConstraint = isOderConstraint;
this.typeUnifyTask = typeUnifyTask;
this.context = context;
}
/**
* Selects values for the next iteration in the run method:
* - a : The element to ???
* - nextSetAsList: The list of cases that have no relation to the selected a and will have to be worked on
* - nextSetasListOderConstraints: The list of cases of which the receiver contains "? extends", typically one element
*/
public abstract void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
);
/**
*
*/
public abstract CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
);
/**
*
*/
public abstract void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
);
/**
*
* @return If the current iteration should be broken out of
*/
public abstract boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
);
/**
* Wrapper class for the parallel computation results
*/
public static class ComputationResults {
public Set<Set<UnifyPair>> mainResult;
public Set<Set<Set<UnifyPair>>> forkResults;
public ComputationResults() {
this(new HashSet<>(), new HashSet<>());
}
public ComputationResults(Set<Set<UnifyPair>> mainResult) {
this(mainResult, new HashSet<>());
}
public ComputationResults(Set<Set<UnifyPair>> mainResult, Set<Set<Set<UnifyPair>>> forkResults) {
this.mainResult = mainResult;
this.forkResults = forkResults;
}
void addForkResult(Set<Set<UnifyPair>> forkResult) {
forkResults.add(forkResult);
}
}
}

View File

@@ -12,15 +12,12 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
public class distributeVariance extends visitUnifyTypeVisitor<Integer> {
public static int inverseVariance(int variance) {
Integer ret = 0;
if (variance == 1) {
ret = -1;
}
if (variance == -1) {
ret = 1;
}
return ret;
public static int inverseVariance(int variance) {
return switch (variance) {
case 1 -> -1;
case -1 -> 1;
default -> 0;
};
}
@@ -42,7 +39,7 @@ public class distributeVariance extends visitUnifyTypeVisitor<Integer> {
List<UnifyType> param = new ArrayList<>(funnty.getTypeParams().get().length);
param.addAll(Arrays.asList(funnty.getTypeParams().get()));
UnifyType resultType = param.remove(param.size()-1);
Integer htInverse = inverseVariance(ht);
int htInverse = inverseVariance(ht);
param = param.stream()
.map(x -> x.accept(this, htInverse))
.collect(Collectors.toCollection(ArrayList::new));

View File

@@ -1,6 +1,7 @@
package de.dhbwstuttgart.typeinference.unify.interfaces;
import java.util.List;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Optional;
import java.util.Set;
@@ -18,9 +19,8 @@ import org.antlr.v4.runtime.Token;
*
* @author Florian Steurer
*/
public interface IFiniteClosure {
public interface IFiniteClosure extends ISerializableData {
public void setLogTrue();
/**
* Returns all types of the finite closure that are subtypes of the argument.
* @return The set of subtypes of the argument.
@@ -74,5 +74,5 @@ public interface IFiniteClosure {
public Set<UnifyType> getChildren(UnifyType t);
public Set<UnifyType> getAllTypesByName(String typeName);
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop);
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop, UnifyContext context);
}

View File

@@ -1,8 +1,9 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -11,7 +12,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
/**
* An extends wildcard type "? extends T".
*/
public final class ExtendsType extends WildcardType {
public final class ExtendsType extends WildcardType implements ISerializableData {
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht);
@@ -23,9 +24,6 @@ public final class ExtendsType extends WildcardType {
*/
public ExtendsType(UnifyType extendedType) {
super("? extends " + extendedType.getName(), extendedType);
if (extendedType instanceof ExtendsType) {
System.out.print("");
}
}
/**
@@ -92,5 +90,21 @@ public final class ExtendsType extends WildcardType {
return "? extends " + wildcardedType;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("wildcardedType", this.wildcardedType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static ExtendsType fromSerial(SerialMap data, UnifyContext context) {
return new ExtendsType(
UnifyType.fromSerial(data.getMap("wildcardedType"), context)
);
}
}

View File

@@ -1,16 +1,22 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.io.FileWriter;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.util.Logger;
import java.io.IOException;
import java.io.Writer;
import java.lang.reflect.Modifier;
import java.sql.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.BiFunction;
@@ -18,40 +24,28 @@ import java.util.function.BinaryOperator;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import com.google.common.collect.Ordering;
//PL 18-02-05/18-04-05 Unifier durch Matcher ersetzt
//muss greater noch ersetzt werden ja erledigt 18--04-05
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.MartelliMontanariUnify;
import de.dhbwstuttgart.typeinference.unify.Match;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.util.Pair;
import org.antlr.v4.runtime.Token;
import org.apache.commons.io.output.NullWriter;
/**
* The finite closure for the type unification
* @author Florian Steurer
*/
public class FiniteClosure //extends Ordering<UnifyType> //entfernt PL 2018-12-11
implements IFiniteClosure {
public class FiniteClosure implements IFiniteClosure, ISerializableData {
final JavaTXCompiler compiler;
final PlaceholderRegistry placeholderRegistry;
Logger logger;
Writer logFile;
static Boolean log = false;
public void setLogTrue() {
log = true;
}
/**
* A map that maps every type to the node in the inheritance graph that contains that type.
*/
@@ -81,9 +75,10 @@ implements IFiniteClosure {
/**
* Creates a new instance using the inheritance tree defined in the pairs.
*/
public FiniteClosure(Set<UnifyPair> pairs, Writer logFile, JavaTXCompiler compiler) {
public FiniteClosure(Set<UnifyPair> pairs, Logger logger, JavaTXCompiler compiler, PlaceholderRegistry placeholderRegistry) {
this.compiler = compiler;
this.logFile = logFile;
this.placeholderRegistry = placeholderRegistry;
this.logger = logger;
this.pairs = new HashSet<>(pairs);
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
@@ -132,7 +127,7 @@ implements IFiniteClosure {
}
// Build the alternative representation with strings as keys
strInheritanceGraph = new HashMap<>();
strInheritanceGraph = TypeUnifyTaskHelper.getPresizedHashMap(inheritanceGraph.size());
for(UnifyType key : inheritanceGraph.keySet()) {
if(!strInheritanceGraph.containsKey(key.getName()))
strInheritanceGraph.put(key.getName(), new HashSet<>());
@@ -141,13 +136,13 @@ implements IFiniteClosure {
}
}
public FiniteClosure(Set<UnifyPair> constraints, Writer writer) {
this(constraints, writer, null);
public FiniteClosure(Set<UnifyPair> constraints, Logger logger, PlaceholderRegistry placeholderRegistry) {
this(constraints, logger, null, placeholderRegistry);
}
void testSmaller() {
UnifyType tq1, tq2, tq3;
tq1 = new ExtendsType(PlaceholderType.freshPlaceholder());
tq1 = new ExtendsType(PlaceholderType.freshPlaceholder(placeholderRegistry));
List<UnifyType> l1 = new ArrayList<>();
List<UnifyType> l2 = new ArrayList<>();
l1.add(tq1);
@@ -166,7 +161,7 @@ implements IFiniteClosure {
Set<UnifyType> ret;
if ((ret = smallerHash.get(new hashKeyType(type))) != null) {
//System.out.println(greaterHash);
//context.logger().info(greaterHash);
return new HashSet<>(ret);
}
@@ -207,7 +202,7 @@ implements IFiniteClosure {
result.add(new Pair<>(t, fBounded));
}
catch (StackOverflowError e) {
System.out.println("");
// context.logger().info("");
}
// if C<...> <* C<...> then ... (third case in definition of <*)
@@ -244,9 +239,9 @@ implements IFiniteClosure {
result.add(new Pair<>(theta1.apply(sigma), fBounded));
}
}
HashSet<UnifyType> resut = result.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new));
if(resut.equals(types.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new))))
HashSet<UnifyType> resut = result.stream().map(Pair::getKey).collect(Collectors.toCollection(HashSet::new));
if(resut.equals(types.stream().map(Pair::getKey).collect(Collectors.toCollection(HashSet::new))))
return resut;
return computeSmaller(result);
}
@@ -278,9 +273,9 @@ implements IFiniteClosure {
@Override
//Eingefuegt PL 2018-05-24 F-Bounded Problematik
public Set<UnifyType> greater(UnifyType type, Set<UnifyType> fBounded, SourceLoc location) {
Set<UnifyType> ret;
if ((ret = greaterHash.get(new hashKeyType(type))) != null) {
//System.out.println(greaterHash);
Set<UnifyType> ret = greaterHash.get(new hashKeyType(type));
if (ret != null) {
//context.logger().info(greaterHash);
return new HashSet<>(ret);
}
@@ -330,7 +325,7 @@ implements IFiniteClosure {
Set<UnifyType> fBoundedNew = new HashSet<>(fBounded);
fBoundedNew.add(theta1);
Set<UnifyType> theta2Set = candidate.getContentOfPredecessors();
//System.out.println("");
//context.logger().info("");
for(UnifyType theta2 : theta2Set) {
result.add(theta2.apply(sigma));
PairResultFBounded.add(new Pair<>(theta2.apply(sigma), fBoundedNew));
@@ -364,7 +359,7 @@ implements IFiniteClosure {
return ((match.match(termList).isPresent()) || x);
};
//if (parai.getName().equals("java.lang.Integer")) {
// System.out.println("");
// context.logger().info("");
//}
BinaryOperator<Boolean> bo = (a,b) -> (a || b);
if (lfBounded.stream().reduce(false,f,bo)) {
@@ -377,7 +372,7 @@ implements IFiniteClosure {
}
}
permuteParams(paramCandidates).forEach(x -> result.add(t.setTypeParams(x)));
//System.out.println("");
//context.logger().info("");
}
}
@@ -437,7 +432,7 @@ implements IFiniteClosure {
return ((match.match(termList).isPresent()) || x);
};
if (parai.getName().equals("java.lang.Integer")) {
System.out.println("");
context.logger().info("");
}
BinaryOperator<Boolean> bo = (a,b) -> (a || b);
if (fBounded.stream().reduce(false,f,bo)) continue; //F-Bounded Endlosrekursion
@@ -476,7 +471,7 @@ implements IFiniteClosure {
}
HashSet<UnifyType> resut = result.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new));
System.out.println(resut);
context.logger().info(resut);
if(resut.equals(types.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new))))
return resut;
return computeGreater(result);
@@ -512,35 +507,42 @@ implements IFiniteClosure {
public Set<UnifyType> grArg(ReferenceType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
smaller(type, fBounded).forEach(x -> result.add(new SuperType(x)));
greater(type,fBounded).forEach(x -> result.add(new ExtendsType(x)));
smaller(type, fBounded).forEach(x -> result.add(new SuperType(x)));
greater(type,fBounded).forEach(x -> result.add(new ExtendsType(x)));
return result;
}
@Override
public Set<UnifyType> grArg(FunNType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
Set<UnifyType> smaller = smaller(type, fBounded);
Set<UnifyType> greater = greater(type, fBounded);
Set<UnifyType> result = new HashSet<UnifyType>((int)((1 + smaller.size() + greater.size()) * 1.5));
result.add(type);
smaller(type, fBounded).forEach(x -> result.add(new SuperType(x)));
greater(type, fBounded).forEach(x -> result.add(new ExtendsType(x)));
smaller.forEach(x -> result.add(new SuperType(x)));
greater.forEach(x -> result.add(new ExtendsType(x)));
return result;
}
@Override
public Set<UnifyType> grArg(ExtendsType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getExtendedType();
greater(t, fBounded).forEach(x -> result.add(new ExtendsType(x)));
Set<UnifyType> greater = greater(t, fBounded);
Set<UnifyType> result = new HashSet<UnifyType>((int)((1 + greater.size()) * 1.5));
result.add(type);
greater.forEach(x -> result.add(new ExtendsType(x)));
return result;
}
@Override
public Set<UnifyType> grArg(SuperType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getSuperedType();
smaller(t, fBounded).forEach(x -> result.add(new SuperType(x)));
Set<UnifyType> smaller = smaller(t, fBounded);
Set<UnifyType> result = TypeUnifyTaskHelper.getPresizedHashSet(1 + smaller.size());
result.add(type);
smaller.forEach(x -> result.add(new SuperType(x)));
return result;
}
@@ -572,29 +574,33 @@ implements IFiniteClosure {
@Override
public Set<UnifyType> smArg(ExtendsType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getExtendedType();
result.add(t);
smaller(t, fBounded).forEach(x -> {
result.add(new ExtendsType(x));
result.add(x);
});
Set<UnifyType> smaller = smaller(t, fBounded);
Set<UnifyType> result = TypeUnifyTaskHelper.getPresizedHashSet(2 * (1 + smaller.size()));
result.add(type);
result.add(t);
smaller.forEach(x -> {
result.add(new ExtendsType(x));
result.add(x);
});
return result;
}
@Override
public Set<UnifyType> smArg(SuperType type, Set<UnifyType> fBounded) {
Set<UnifyType> result = new HashSet<UnifyType>();
result.add(type);
UnifyType t = type.getSuperedType();
result.add(t);
//*** ACHTUNG das koennte FALSCH sein PL 2018-05-23 evtl. HashSet durch smArg durchschleifen
greater(t, fBounded).forEach(x -> {
result.add(new SuperType(x));
result.add(x);
});
Set<UnifyType> greater = greater(t, fBounded);
Set<UnifyType> result = TypeUnifyTaskHelper.getPresizedHashSet(2 * (1 + greater.size()));
result.add(type);
result.add(t);
//*** ACHTUNG das koennte FALSCH sein PL 2018-05-23 evtl. HashSet durch smArg durchschleifen
greater.forEach(x -> {
result.add(new SuperType(x));
result.add(x);
});
return result;
}
@@ -609,7 +615,7 @@ implements IFiniteClosure {
public Set<UnifyType> getAllTypesByName(String typeName) {
if(!strInheritanceGraph.containsKey(typeName))
return new HashSet<>();
return strInheritanceGraph.get(typeName).stream().map(x -> x.getContent()).collect(Collectors.toCollection(HashSet::new));
return strInheritanceGraph.get(typeName).stream().map(Node::getContent).collect(Collectors.toCollection(HashSet::new));
}
@Override
@@ -698,10 +704,10 @@ implements IFiniteClosure {
}
*/
public int compare (UnifyType left, UnifyType right, PairOperator pairop) {
try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {}
if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
System.out.println("");
public int compare (UnifyType left, UnifyType right, PairOperator pairop, UnifyContext context) {
logger.debug(() -> "left: "+ left + " right: " + right + " pairop: " + pairop +"\n");
// if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
// context.logger().info("");
/*
pairop = PairOperator.SMALLERDOTWC;
List<UnifyType> al = new ArrayList<>();
@@ -752,19 +758,14 @@ implements IFiniteClosure {
}
}
UnifyPair up = new UnifyPair(left, right, pairop);
TypeUnifyTask unifyTask = new TypeUnifyTask();
TypeUnifyTask unifyTask = new TypeUnifyTask(context);
HashSet<UnifyPair> hs = new HashSet<>();
hs.add(up);
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this);
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
{try {
logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}}
logger.debug(() -> "\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
Predicate<UnifyPair> delFun = x -> !((x.getLhsType() instanceof PlaceholderType ||
x.getRhsType() instanceof PlaceholderType)
@@ -772,12 +773,7 @@ implements IFiniteClosure {
((WildcardType)x.getLhsType()).getWildcardedType().equals(x.getRhsType()))
);
long smallerLen = smallerRes.stream().filter(delFun).count();
try {
logFile.write("\nsmallerLen: " + smallerLen +"\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}
logger.debug(() -> "\nsmallerLen: " + smallerLen +"\n");
if (smallerLen == 0) return -1;
else {
up = new UnifyPair(right, left, pairop);
@@ -787,13 +783,8 @@ implements IFiniteClosure {
Set<UnifyPair> greaterRes = unifyTask.applyTypeUnificationRules(hs, this);
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
{try {
logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}}
logger.debug(() -> "\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
long greaterLen = greaterRes.stream().filter(delFun).count();
if (greaterLen == 0) return 1;
@@ -803,4 +794,18 @@ implements IFiniteClosure {
}
}
}
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("pairs", SerialList.fromMapped(this.pairs, unifyPair -> unifyPair.toSerial(keyStorage)));
return serialized;
}
public static FiniteClosure fromSerial(SerialMap data, UnifyContext context, KeyStorage keyStorage) {
var pairList = data.getList("pairs").assertListOfUUIDs();
Set<UnifyPair> pairs = pairList.stream()
.map(pairData -> UnifyPair.fromSerial(pairData, context, keyStorage)).collect(Collectors.toSet());
return new FiniteClosure(pairs, context.logger(), context.placeholderRegistry());
}
}

View File

@@ -1,14 +1,17 @@
package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import java.lang.reflect.Modifier;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class FunInterfaceType extends ReferenceType {
public class FunInterfaceType extends ReferenceType implements ISerializableData {
final List<UnifyType> intfArgTypes;
final UnifyType intfReturnType;
final List<String> generics;
@@ -46,4 +49,29 @@ public class FunInterfaceType extends ReferenceType {
return args;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
var serializedWrapper = super.toSerial(keyStorage);
SerialMap serialized = serializedWrapper.getMap("object");
serialized.put("intfArgTypes", SerialList.fromMapped(intfArgTypes, u -> u.toSerial(keyStorage)));
serialized.put("intfReturnType", intfReturnType.toSerial(keyStorage));
serialized.put("generics", SerialList.fromMapped(generics, SerialValue::new));
return serializedWrapper;
}
public static FunInterfaceType fromSerial(SerialMap data, UnifyContext context) {
var name = data.getValue("name").getOf(String.class);
var params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
var intfArgTypes = data.getList("intfArgTypes").assertListOfMaps().stream().map(
argTypeData -> UnifyType.fromSerial(argTypeData, context)).toList();
var intfReturnType = UnifyType.fromSerial(data.getMap("intfReturnType"), context);
var generics = data.getList("generics").assertListOfValues().stream().map(
generic -> generic.getOf(String.class)).toList();
return new FunInterfaceType(name, new TypeParams(params), intfArgTypes, intfReturnType, generics);
}
}

View File

@@ -1,8 +1,13 @@
package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -12,7 +17,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
* A real function type in java.
* @author Florian Steurer
*/
public class FunNType extends UnifyType {
public class FunNType extends UnifyType implements ISerializableData {
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht);
@@ -76,7 +81,7 @@ public class FunNType extends UnifyType {
}
@Override
public Boolean wrongWildcard() {
public boolean wrongWildcard() {
return (new ArrayList<UnifyType>(Arrays.asList(getTypeParams()
.get())).stream().filter(x -> (x instanceof WildcardType)).findFirst().isPresent());
}
@@ -99,5 +104,22 @@ public class FunNType extends UnifyType {
return other.getTypeParams().equals(typeParams);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static FunNType fromSerial(SerialMap data, UnifyContext context) {
List<UnifyType> params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
return new FunNType(new TypeParams(params));
}
}

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
@@ -26,9 +27,11 @@ import de.dhbwstuttgart.util.Pair;
public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
protected IFiniteClosure fc;
public OrderingUnifyPair(IFiniteClosure fc) {
protected UnifyContext context;
public OrderingUnifyPair(IFiniteClosure fc, UnifyContext context) {
this.fc = fc;
this.context = context;
}
/*
@@ -39,18 +42,13 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
try {
//if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht
if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC);
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC, context);
}
else {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT);
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT, context);
}}
catch (ClassCastException e) {
try {
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
((FiniteClosure)fc).logFile.flush();
}
catch (IOException ie) {
}
((FiniteClosure)fc).logger.debug(() -> "ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
return -99;
}
}
@@ -58,7 +56,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
/*
public int compareEq (UnifyPair left, UnifyPair right) {
if (left == null || right == null)
System.out.println("Fehler");
context.logger().info("Fehler");
if (left.getLhsType() instanceof PlaceholderType) {
return fc.compare(left.getRhsType(), right.getRhsType(), left.getPairOp());
}
@@ -79,18 +77,18 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{
System.out.println("");
// context.logger().info("");
}
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
{
System.out.println("");
// context.logger().info("");
}
}
else {
up = new UnifyPair(left, right, PairOperator.SMALLERDOT);
}
TypeUnifyTask unifyTask = new TypeUnifyTask();
TypeUnifyTask unifyTask = new TypeUnifyTask(context);
HashSet<UnifyPair> hs = new HashSet<>();
hs.add(up);
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, fc);
@@ -106,11 +104,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{
System.out.println("");
// context.logger().info("");
}
if (right instanceof SuperType)
{
System.out.println("");
// context.logger().info("");
}
}
else {
@@ -172,85 +170,81 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
left.add(p2);
left.add(p4);
*/
Set<UnifyPair> lefteq = left.stream()
.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> righteq = right.stream()
.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> leftle = left.stream()
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
&& x.getPairOp() == PairOperator.SMALLERDOT))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> rightle = right.stream()
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
&& x.getPairOp() == PairOperator.SMALLERDOT))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> leftlewc = left.stream()
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
&& x.getPairOp() == PairOperator.SMALLERDOTWC))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> rightlewc = right.stream()
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
&& x.getPairOp() == PairOperator.SMALLERDOTWC))
.collect(Collectors.toCollection(HashSet::new));
//System.out.println(left.toString());
Set<UnifyPair> lefteq = new HashSet<>();
Set<UnifyPair> leftle = new HashSet<>();
Set<UnifyPair> leftlewc = new HashSet<>();
Set<UnifyPair> lefteqOder = new HashSet<>();
Set<UnifyPair> lefteqRet = new HashSet<>();
Set<UnifyPair> leftleOder = new HashSet<>();
for (var x : left) {
boolean isLeftPlaceholder = x.getLhsType() instanceof PlaceholderType;
boolean isRightPlaceholder = x.getRhsType() instanceof PlaceholderType;
boolean hasPlaceholder = isLeftPlaceholder || isRightPlaceholder;
if (isLeftPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) lefteq.add(x);
if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOT) leftle.add(x);
if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOTWC) leftlewc.add(x);
UnifyPair y = x.getGroundBasePair();
boolean isBasePairLeftPlaceholder = y.getLhsType() instanceof PlaceholderType;
boolean isBasePairRightPlaceholder = y.getRhsType() instanceof PlaceholderType;
if (isBasePairLeftPlaceholder && !isBasePairRightPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) {
lefteqOder.add(x);
}
else if (isBasePairRightPlaceholder && ((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1) {
lefteqRet.add(x);
}
else if (x.getPairOp() == PairOperator.SMALLERDOT) {
leftleOder.add(x);
}
}
Set<UnifyPair> righteq = new HashSet<>();
Set<UnifyPair> rightle = new HashSet<>();
Set<UnifyPair> rightlewc = new HashSet<>();
Set<UnifyPair> righteqOder = new HashSet<>();
Set<UnifyPair> righteqRet = new HashSet<>();
Set<UnifyPair> rightleOder = new HashSet<>();
for (var x : right) {
boolean isLeftPlaceholder = x.getLhsType() instanceof PlaceholderType;
boolean isRightPlaceholder = x.getRhsType() instanceof PlaceholderType;
boolean hasPlaceholder = isLeftPlaceholder || isRightPlaceholder;
if (isLeftPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) righteq.add(x);
if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOT) rightle.add(x);
if (hasPlaceholder && x.getPairOp() == PairOperator.SMALLERDOTWC) rightlewc.add(x);
UnifyPair y = x.getGroundBasePair();
boolean isBasePairLeftPlaceholder = y.getLhsType() instanceof PlaceholderType;
boolean isBasePairRightPlaceholder = y.getRhsType() instanceof PlaceholderType;
if (isBasePairLeftPlaceholder && !isBasePairRightPlaceholder && x.getPairOp() == PairOperator.EQUALSDOT) {
righteqOder.add(x);
}
else if (isBasePairRightPlaceholder && ((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1) {
righteqRet.add(x);
}
else if (x.getPairOp() == PairOperator.SMALLERDOT) {
rightleOder.add(x);
}
}
//context.logger().info(left.toString());
//Fall 2
//if (lefteq.iterator().next().getLhsType().getName().equals("AJO")) {
// System.out.print("");
//}
//ODER-CONSTRAINT
Set<UnifyPair> leftBase = left.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> rightBase = right.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
// Set<UnifyPair> leftBase = left.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
// Set<UnifyPair> rightBase = right.stream().map(x -> x.getGroundBasePair()).collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> lefteqOder = left.stream()
.filter(x -> { UnifyPair y = x.getGroundBasePair();
/*try {
((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n");
((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("y: " + y.toString() +"\n");
((FiniteClosure)fc).logFile.write("y.getLhsType() : " + y.getLhsType() .toString() +"\n\n");
((FiniteClosure)fc).logFile.write("y.getRhsType(): " + y.getRhsType().toString() +"\n");
((FiniteClosure)fc).logFile.write("x.getPairOp(): " + x.getPairOp().toString() +"\n\n");
}
catch (IOException ie) {
} */
return (y.getLhsType() instanceof PlaceholderType &&
!(y.getRhsType() instanceof PlaceholderType) &&
x.getPairOp() == PairOperator.EQUALSDOT);})
.collect(Collectors.toCollection(HashSet::new));
left.removeAll(lefteqOder);
Set<UnifyPair> righteqOder = right.stream()
.filter(x -> { UnifyPair y = x.getGroundBasePair();
return (y.getLhsType() instanceof PlaceholderType &&
!(y.getRhsType() instanceof PlaceholderType) &&
x.getPairOp() == PairOperator.EQUALSDOT);})
.collect(Collectors.toCollection(HashSet::new));
right.removeAll(righteqOder);
Set<UnifyPair> lefteqRet = left.stream()
.filter(x -> { UnifyPair y = x.getGroundBasePair();
return (y.getRhsType() instanceof PlaceholderType &&
((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1);})
.collect(Collectors.toCollection(HashSet::new));
left.removeAll(lefteqRet);
Set<UnifyPair> righteqRet = right.stream()
.filter(x -> { UnifyPair y = x.getGroundBasePair();
return (y.getRhsType() instanceof PlaceholderType &&
((PlaceholderType)y.getRhsType()).getOrCons() == (byte)-1);})
.collect(Collectors.toCollection(HashSet::new));
right.removeAll(righteqOder);
right.removeAll(righteqRet);
Set<UnifyPair> leftleOder = left.stream()
.filter(x -> (x.getPairOp() == PairOperator.SMALLERDOT))
.collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> rightleOder = right.stream()
.filter(x -> (x.getPairOp() == PairOperator.SMALLERDOT))
.collect(Collectors.toCollection(HashSet::new));
/*
synchronized(this) {
try {
@@ -272,89 +266,73 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
*/
Integer compareEq;
int compareEq;
if (lefteqOder.size() == 1 && righteqOder.size() == 1 && lefteqRet.size() == 1 && righteqRet.size() == 1) {
Match m = new Match();
if ((compareEq = compareEq(lefteqOder.iterator().next().getGroundBasePair(), righteqOder.iterator().next().getGroundBasePair())) == -1) {
ArrayList<UnifyPair> matchList =
rightleOder.stream().map(x -> {
UnifyPair leftElem = leftleOder.stream()
compareEq = compareEq(lefteqOder.iterator().next().getGroundBasePair(), righteqOder.iterator().next().getGroundBasePair());
if (compareEq == -1) {
ArrayList<UnifyPair> matchList =
rightleOder.stream().map(x -> {
UnifyPair leftElem = leftleOder.stream()
.filter(y -> y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType()))
.findAny().get();
return new UnifyPair(x.getRhsType(), leftElem.getRhsType(), PairOperator.EQUALSDOT);})
.collect(Collectors.toCollection(ArrayList::new));
if (m.match(matchList).isPresent()) {
//try { ((FiniteClosure)fc).logFile.write("result1: -1 \n\n"); } catch (IOException ie) {}
return -1;
}
else {
//try { ((FiniteClosure)fc).logFile.write("result1: 0 \n\n"); } catch (IOException ie) {}
return 0;
}
.findAny().orElseThrow();
return new UnifyPair(x.getRhsType(), leftElem.getRhsType(), PairOperator.EQUALSDOT);}
)
.collect(Collectors.toCollection(ArrayList::new));
return (m.match(matchList).isPresent()) ? -1 : 0;
} else if (compareEq == 1) {
ArrayList<UnifyPair> matchList =
leftleOder.stream().map(x -> {
ArrayList<UnifyPair> matchList =
leftleOder.stream().map(x -> {
UnifyPair rightElem = rightleOder.stream()
.filter(y ->
y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType()))
.findAny().get();
return new UnifyPair(x.getRhsType(), rightElem.getRhsType(), PairOperator.EQUALSDOT);})
.collect(Collectors.toCollection(ArrayList::new));
if (m.match(matchList).isPresent()) {
//try { ((FiniteClosure)fc).logFile.write("result2: 1 \n\n"); } catch (IOException ie) {}
return 1;
}
else {
//try { ((FiniteClosure)fc).logFile.write("result2: 0 \n\n"); } catch (IOException ie) {}
return 0;
}
.filter(y -> y.getGroundBasePair().getLhsType().equals(x.getGroundBasePair().getLhsType()))
.findAny().orElseThrow();
return new UnifyPair(x.getRhsType(), rightElem.getRhsType(), PairOperator.EQUALSDOT);}
)
.collect(Collectors.toCollection(ArrayList::new));
return (m.match(matchList).isPresent()) ? 1 : 0;
} else {
/*
synchronized(this) {
try {
((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n");
((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("lefteqOder: " + lefteqOder.toString() +"\n");
((FiniteClosure)fc).logFile.write("righteqOder: " + righteqOder.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("lefteqRet: " + lefteqRet.toString() +"\n");
((FiniteClosure)fc).logFile.write("righteqRet: " + righteqRet.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("leftleOder: " + leftleOder.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightleOder: " + rightleOder.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("result3: 0 \n\n");
((FiniteClosure)fc).logFile.flush();
}
catch (IOException ie) {
}
}
((FiniteClosure)fc).logFile.write("leftBase: " + leftBase.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightBase: " + rightBase.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("left: " + left.toString() +"\n");
((FiniteClosure)fc).logFile.write("right: " + right.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("lefteqOder: " + lefteqOder.toString() +"\n");
((FiniteClosure)fc).logFile.write("righteqOder: " + righteqOder.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("lefteqRet: " + lefteqRet.toString() +"\n");
((FiniteClosure)fc).logFile.write("righteqRet: " + righteqRet.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("leftleOder: " + leftleOder.toString() +"\n");
((FiniteClosure)fc).logFile.write("rightleOder: " + rightleOder.toString() +"\n\n");
((FiniteClosure)fc).logFile.write("result3: 0 \n\n");
((FiniteClosure)fc).logFile.flush();
*/
return 0;
}
}
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof ExtendsType && leftle.size() == 1 && righteq.size() == 0 && rightle.size() == 1) {
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof ExtendsType && leftle.size() == 1 && righteq.isEmpty() && rightle.size() == 1) {
return 1;
}
//Fall 2
if (lefteq.size() == 0 && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof ExtendsType && rightle.size() == 1) {
if (lefteq.isEmpty() && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof ExtendsType && rightle.size() == 1) {
return -1;
}
//Fall 3
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof SuperType && leftle.size() == 1 && righteq.size() == 0 && rightle.size() == 1) {
if (lefteq.size() == 1 && lefteq.iterator().next().getRhsType() instanceof SuperType && leftle.size() == 1 && righteq.isEmpty() && rightle.size() == 1) {
return -1;
}
//Fall 3
if (lefteq.size() == 0 && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof SuperType && rightle.size() == 1) {
if (lefteq.isEmpty() && leftle.size() == 1 && righteq.size() == 1 && righteq.iterator().next().getRhsType() instanceof SuperType && rightle.size() == 1) {
return 1;
}
//Fall 5
if (lefteq.size() == 1 && leftle.size() == 0 && righteq.size() == 1 && rightle.size() == 1) {
if (lefteq.size() == 1 && leftle.isEmpty() && righteq.size() == 1 && rightle.size() == 1) {
return -1;
}
//Fall 5
if (lefteq.size() == 1 && leftle.size() == 1 && righteq.size() == 1 && rightle.size() == 0) {
if (lefteq.size() == 1 && leftle.size() == 1 && righteq.size() == 1 && rightle.isEmpty()) {
return 1;
}
//Fall 5
@@ -362,7 +340,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
return 0;
}
// Nur Paare a =. Theta
if (leftle.size() == 0 && rightle.size() == 0 && leftlewc.size() == 0 && rightlewc.size() ==0) {
if (leftle.isEmpty() && rightle.isEmpty() && leftlewc.isEmpty() && rightlewc.isEmpty()) {
Stream<UnifyPair> lseq = lefteq.stream(); //left.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT));
Stream<UnifyPair> rseq = righteq.stream(); //right.filter(x -> (x.getLhsType() instanceof PlaceholderType && x.getPairOp() == PairOperator.EQUALSDOT));
BinaryOperator<HashMap<UnifyType,UnifyPair>> combiner = (x,y) -> { x.putAll(y); return x;};
@@ -370,13 +348,10 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
lseq = lseq.filter(x -> !(hm.get(x.getLhsType()) == null));//NOCHMALS UEBERPRUEFEN!!!!
lseq = lseq.filter(x -> !x.equals(hm.get(x.getLhsType()))); //Elemente die gleich sind muessen nicht verglichen werden
Optional<Integer> si = lseq.map(x -> compareEq(x, hm.get(x.getLhsType()))).reduce((x,y)-> { if (x == y) return x; else return 0; } );
if (!si.isPresent()) return 0;
else return si.get();
return si.orElse(0);
}
//Fall 1 und 4
if (lefteq.size() >= 1 && righteq.size() >= 1 && (leftlewc.size() > 0 || rightlewc.size() > 0)) {
if (lefteq.iterator().next().getLhsType().getName().equals("D"))
System.out.print("");
if (!lefteq.isEmpty() && !righteq.isEmpty() && (!leftlewc.isEmpty() || !rightlewc.isEmpty())) {
//Set<PlaceholderType> varsleft = lefteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
//Set<PlaceholderType> varsright = righteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
//filtern des Paares a = Theta, das durch a <. Thata' generiert wurde (nur im Fall 1 relevant) andere Substitutioen werden rausgefiltert
@@ -397,12 +372,12 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
//TODO: Hier wird bei Wildcards nicht das richtige compare aufgerufen PL 18-04-20
Pair<Integer, Set<UnifyPair>> int_Unifier = compare(lseq.getRhsType(), rseq.getRhsType());
Unifier uni = new Unifier();
int_Unifier.getValue().get().forEach(x -> uni.add((PlaceholderType) x.getLhsType(), x.getRhsType()));
if (!lseq.getRhsType().getName().equals(rseq.getRhsType().getName())
|| leftlewc.size() == 0 || rightlewc.size() == 0) return int_Unifier.getKey();
int_Unifier.getValue().orElseThrow().forEach(x -> uni.add((PlaceholderType) x.getLhsType(), x.getRhsType()));
if (!lseq.getRhsType().getName().equals(rseq.getRhsType().getName())
|| leftlewc.isEmpty() || rightlewc.isEmpty()) return int_Unifier.getKey();
else {
Set <UnifyPair> lsleuni = leftlewc.stream().map(x -> uni.apply(x)).collect(Collectors.toCollection(HashSet::new));
Set <UnifyPair> rsleuni = rightlewc.stream().map(x -> uni.apply(x)).collect(Collectors.toCollection(HashSet::new));
Set <UnifyPair> lsleuni = leftlewc.stream().map(uni::apply).collect(Collectors.toCollection(HashSet::new));
Set <UnifyPair> rsleuni = rightlewc.stream().map(uni::apply).collect(Collectors.toCollection(HashSet::new));
BinaryOperator<HashMap<UnifyType,UnifyPair>> combiner = (x,y) -> { x.putAll(y); return x;};
HashMap<UnifyType,UnifyPair> hm;
@@ -411,19 +386,19 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
if (leftlewc.iterator().next().getLhsType() instanceof PlaceholderType) {
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getLhsType(),y); return x; }, combiner);
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getLhsType()) == null));
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
}
//4. Fall
else {
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getRhsType(),y); return x; }, combiner);
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getRhsType()) == null));
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
}
if (!si.isPresent()) return 0;
else return si.get();
return si.orElse(0);
}
} else {
if (leftlewc.size() > 0) {
if (!leftlewc.isEmpty()) {
/*
Set<UnifyPair> subst;
subst = leftlewc.stream().map(x -> {
if (x.getLhsType() instanceof PlaceholderType) {
@@ -432,6 +407,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
else {
return new UnifyPair(x.getRhsType(), x.getLhsType(), PairOperator.EQUALSDOT);
}}).collect(Collectors.toCollection(HashSet::new));
*/
Unifier uni = new Unifier();
lseq = uni.apply(lseq);
}

View File

@@ -46,4 +46,15 @@ public enum PairOperator {
default: return "=."; // EQUALSDOT
}
}
public static PairOperator fromString(String op) {
switch (op) {
case "<": return SMALLER;
case "<.": return SMALLERDOT;
case "<!=.": return SMALLERNEQDOT;
case "<.?": return SMALLERDOTWC;
case "=.": return EQUALSDOT;
default: throw new RuntimeException("Unknown PairOperator: " + op);
}
}
}

View File

@@ -1,119 +1,96 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.io.File;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Set;
/**
* An unbounded placeholder type.
* @author Florian Steurer
*/
public final class PlaceholderType extends UnifyType{
/**
* Static list containing the names of all existing placeholders.
* Used for generating fresh placeholders.
*/
public static final ArrayList<String> EXISTING_PLACEHOLDERS = new ArrayList<String>();
/**
* Prefix of auto-generated placeholder names.
*/
protected static String nextName = "gen_";
/**
* Random number generator used to generate fresh placeholder name.
*/
protected static Random rnd = new Random(43558747548978L);
/**
* True if this object was auto-generated, false if this object was user-generated.
*/
private final boolean IsGenerated;
/**
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
*/
private boolean wildcardable = true;
private boolean wildcardable = true;
/**
* is innerType gibt an, ob der Type des PlaceholderType innerhalb eines Typkonstruktorsverwendet wird
*/
private boolean innerType = false;
private boolean innerType = false;
/**
* variance shows the variance of the pair
* 1: contravariant
* -1 covariant
* 0 invariant
* PL 2018-03-21
*/
private int variance = 0;
/*
* Fuer Oder-Constraints:
* orCons = 1: Receiver
* orCons = 0: Argument oder kein Oder-Constraint
* orCons = -1: RetType
*/
private byte orCons = 0;
* variance shows the variance of the pair
* 1: contravariant
* -1 covariant
* 0 invariant
* PL 2018-03-21
*/
private int variance = 0;
/*
* Fuer Oder-Constraints:
* orCons = 1: Receiver
* orCons = 0: Argument oder kein Oder-Constraint
* orCons = -1: RetType
*/
private byte orCons = 0;
/**
* Creates a new placeholder type with the specified name.
*/
public PlaceholderType(String name) {
public PlaceholderType(String name, PlaceholderRegistry placeholderRegistry) {
super(name, new TypeParams());
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
IsGenerated = false; // This type is user generated
}
public PlaceholderType(String name, int variance) {
public PlaceholderType(String name, int variance, PlaceholderRegistry placeholderRegistry) {
super(name, new TypeParams());
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
IsGenerated = false; // This type is user generated
this.variance = variance;
}
/**
* Creates a new placeholdertype
* Creates a new placeholdertype
* @param isGenerated true if this placeholder is auto-generated, false if it is user-generated.
*/
protected PlaceholderType(String name, boolean isGenerated) {
protected PlaceholderType(String name, boolean isGenerated, PlaceholderRegistry placeholderRegistry) {
super(name, new TypeParams());
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
IsGenerated = isGenerated;
}
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht);
}
/**
* Creates a fresh placeholder type with a name that does so far not exist.
* Creates a fresh placeholder type with a name that does so far not exist from the chars A-Z.
* A user could later instantiate a type using the same name that is equivalent to this type.
* @return A fresh placeholder type.
*/
public synchronized static PlaceholderType freshPlaceholder() {
String name = nextName + (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
// Add random chars while the name is in use.
while(EXISTING_PLACEHOLDERS.contains(name)) {
name += (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
}
return new PlaceholderType(name, true);
public static PlaceholderType freshPlaceholder(PlaceholderRegistry placeholderRegistry) {
var name = placeholderRegistry.generateFreshPlaceholderName();
return new PlaceholderType(name, true, placeholderRegistry);
}
/**
* True if this placeholder is auto-generated, false if it is user-generated.
*/
@@ -124,51 +101,51 @@ public final class PlaceholderType extends UnifyType{
public void setVariance(int v) {
variance = v;
}
public int getVariance() {
return variance;
}
public void reversVariance() {
if (variance == 1) {
setVariance(-1);
} else {
if (variance == -1) {
setVariance(1);
}}
if (variance == -1) {
setVariance(1);
}}
}
public void setOrCons(byte i) {
orCons = i;
}
public byte getOrCons() {
return orCons;
}
public Boolean isWildcardable() {
return wildcardable;
}
public void disableWildcardtable() {
wildcardable = false;
}
public void enableWildcardtable() {
wildcardable = true;
}
public void setWildcardtable(Boolean wildcardable) {
public void setWildcardtable(boolean wildcardable) {
this.wildcardable = wildcardable;
}
public Boolean isInnerType() {
public boolean isInnerType() {
return innerType;
}
public void setInnerType(Boolean innerType) {
public void setInnerType(boolean innerType) {
this.innerType = innerType;
}
@Override
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
return fc.smArg(this, fBounded);
@@ -178,17 +155,17 @@ public final class PlaceholderType extends UnifyType{
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
return fc.grArg(this, fBounded);
}
@Override
public UnifyType setTypeParams(TypeParams newTp) {
return this; // Placeholders never have params.
}
@Override
public int hashCode() {
return typeName.hashCode();
}
@Override
UnifyType apply(Unifier unif) {
if(unif.hasSubstitute(this)) {
@@ -200,15 +177,15 @@ public final class PlaceholderType extends UnifyType{
}
return this;
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof PlaceholderType))
return false;
return ((PlaceholderType) obj).getName().equals(typeName);
}
@Override
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
@@ -216,4 +193,39 @@ public final class PlaceholderType extends UnifyType{
ret.add(this);
return ret;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.typeName);
// Placeholders never make use of the typeParams
serialized.put("isGenerated", IsGenerated);
serialized.put("wildcardable", wildcardable);
serialized.put("isInnerType", innerType);
serialized.put("variance", variance);
serialized.put("orCons", orCons);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PlaceholderType fromSerial(SerialMap data, UnifyContext context) {
var name = data.getValue("name").getOf(String.class);
var isGenerated = data.getValue("isGenerated").getOf(Boolean.class);
var wildcardable = data.getValue("wildcardable").getOf(Boolean.class);
var isInnerType = data.getValue("isInnerType").getOf(Boolean.class);
var variance = data.getValue("variance").getOf(Integer.class);
var orCons = data.getValue("orCons").getOf(Number.class).byteValue();
var placeholderType = new PlaceholderType(name, isGenerated, context.placeholderRegistry());
placeholderType.setWildcardtable(wildcardable);
placeholderType.setInnerType(isInnerType);
placeholderType.setVariance(variance);
placeholderType.setOrCons(orCons);
return placeholderType;
}
}

View File

@@ -1,6 +1,10 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.HashMap;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -11,45 +15,51 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
* @author Florian Steurer
*
*/
public class ReferenceType extends UnifyType {
public class ReferenceType extends UnifyType implements ISerializableData {
/**
* The buffered hashCode
*/
private final int hashCode;
/**
* gibt an, ob der ReferenceType eine generische Typvariable ist
*/
private final boolean genericTypeVar;
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht);
}
public ReferenceType(String name, Boolean genericTypeVar) {
public ReferenceType(String name, boolean genericTypeVar) {
super(name, new TypeParams());
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
this.genericTypeVar = genericTypeVar;
}
public ReferenceType(String name, UnifyType... params) {
super(name, new TypeParams(params));
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
genericTypeVar = false;
}
public ReferenceType(String name, TypeParams params) {
super(name, params);
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
genericTypeVar = false;
}
public boolean isGenTypeVar () {
private ReferenceType(String name, TypeParams params, boolean genericTypeVar) {
super(name, params);
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
this.genericTypeVar = genericTypeVar;
}
public boolean isGenTypeVar() {
return genericTypeVar;
}
@Override
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
return fc.smArg(this, fBounded);
@@ -63,38 +73,59 @@ public class ReferenceType extends UnifyType {
@Override
UnifyType apply(Unifier unif) {
TypeParams newParams = typeParams.apply(unif);
if(newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
if (newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
return this;
return new ReferenceType(typeName, newParams);
}
@Override
public UnifyType setTypeParams(TypeParams newTp) {
if(newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
if (newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
return this; // reduced the amount of objects created
return new ReferenceType(typeName, newTp);
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof ReferenceType))
if (!(obj instanceof ReferenceType))
return false;
if(obj.hashCode() != this.hashCode())
if (obj.hashCode() != this.hashCode())
return false;
ReferenceType other = (ReferenceType) obj;
if(!other.getName().equals(typeName))
if (!other.getName().equals(typeName))
return false;
return other.getTypeParams().equals(typeParams);
}
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.typeName);
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
serialized.put("isGenericTypeVar", this.genericTypeVar);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static ReferenceType fromSerial(SerialMap data, UnifyContext context) {
var name = data.getValue("name").getOf(String.class);
var params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
var isGenericTypeVar = data.getValue("isGenericTypeVar").getOf(Boolean.class);
return new ReferenceType(name, new TypeParams(params), isGenericTypeVar);
}
}

View File

@@ -1,6 +1,9 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.HashMap;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -85,4 +88,26 @@ public final class SuperType extends WildcardType {
SuperType other = (SuperType) obj;
return other.getSuperedType().equals(wildcardedType);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("wildcardedType", this.wildcardedType.toSerial(keyStorage));
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static SuperType fromSerial(SerialMap data, UnifyContext context) {
var params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
var superType = new SuperType(
UnifyType.fromSerial(data.getMap("wildcardedType"), context)
);
superType.setTypeParams(new TypeParams(params));
return superType;
}
}

View File

@@ -174,9 +174,9 @@ public final class TypeParams implements Iterable<UnifyType>{
@Override
public String toString() {
String res = "";
StringBuilder res = new StringBuilder();
for(UnifyType t : typeParams)
res += t + ",";
res.append(t).append(",");
return "<" + res.substring(0, res.length()-1) + ">";
}

View File

@@ -50,8 +50,7 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
Unifier tempU = new Unifier(source, target);
// Every new substitution must be applied to previously added substitutions
// otherwise the unifier needs to be applied multiple times to unify two terms
for(PlaceholderType pt : substitutions.keySet())
substitutions.put(pt, substitutions.get(pt).apply(tempU));
substitutions.replaceAll((pt, ut) -> ut.apply(tempU));
substitutions.put(source, target);
}
@@ -93,8 +92,7 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
// }
//}
if (!(p.getLhsType().equals(newLhs)) || !(p.getRhsType().equals(newRhs))) {//Die Anwendung von this hat was veraendert PL 2018-04-01
Set<UnifyPair> suniUnifyPair = new HashSet<>();
suniUnifyPair.addAll(thisAsPair.getAllSubstitutions());
Set<UnifyPair> suniUnifyPair = new HashSet<>(thisAsPair.getAllSubstitutions());
suniUnifyPair.add(thisAsPair);
if (p.getLhsType() instanceof PlaceholderType //&& newLhs instanceof PlaceholderType entfernt PL 2018-04-13
&& p.getPairOp() == PairOperator.EQUALSDOT) {
@@ -172,13 +170,13 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
@Override
public String toString() {
String result = "{ ";
StringBuilder result = new StringBuilder("{ ");
for(Entry<PlaceholderType, UnifyType> entry : substitutions.entrySet())
result += "(" + entry.getKey() + " -> " + entry.getValue() + "), ";
result.append("(").append(entry.getKey()).append(" -> ").append(entry.getValue()).append("), ");
if(!substitutions.isEmpty())
result = result.substring(0, result.length()-2);
result += " }";
return result;
result = new StringBuilder(result.substring(0, result.length() - 2));
result.append(" }");
return result.toString();
}
@Override

View File

@@ -1,23 +1,26 @@
package de.dhbwstuttgart.typeinference.unify.model;
import com.google.common.collect.ObjectArrays;
import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import org.antlr.v4.runtime.Token;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.typeinference.constraints.IConstraintElement;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/**
* A pair which contains two types and an operator, e.q. (Integer <. a).
* @author Florian Steurer
*/
public class UnifyPair {
public class UnifyPair implements IConstraintElement, ISerializableData {
private SourceLoc location;
@@ -151,8 +154,7 @@ public class UnifyPair {
}
public Set<UnifyPair> getAllSubstitutions () {
Set<UnifyPair> ret = new HashSet<>();
ret.addAll(new ArrayList<>(getSubstitution()));
Set<UnifyPair> ret = new HashSet<>(new ArrayList<>(getSubstitution()));
if (basePair != null) {
ret.addAll(new ArrayList<>(basePair.getAllSubstitutions()));
}
@@ -199,7 +201,7 @@ public class UnifyPair {
}
public Boolean wrongWildcard() {
public boolean wrongWildcard() {
return lhs.wrongWildcard() || rhs.wrongWildcard();
}
@@ -243,16 +245,16 @@ public class UnifyPair {
public String toString() {
String ret = "";
if (lhs instanceof PlaceholderType) {
ret = new Integer(((PlaceholderType)lhs).getVariance()).toString() + " "
ret = Integer.valueOf(((PlaceholderType)lhs).getVariance()).toString() + " "
+ "WC: " + ((PlaceholderType)lhs).isWildcardable()
+ ", IT: " + ((PlaceholderType)lhs).isInnerType();
}
if (rhs instanceof PlaceholderType) {
ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString() + " "
ret = ret + ", " + Integer.valueOf(((PlaceholderType)rhs).getVariance()).toString() + " "
+ "WC: " + ((PlaceholderType)rhs).isWildcardable()
+ ", IT: " + ((PlaceholderType)rhs).isInnerType();
}
var res = "(" + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])";
var res = "(UP: " + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])";
var location = this.getLocation();
if (location != null) {
res += "@" + location.line() + " in " + location.file();
@@ -269,6 +271,58 @@ public class UnifyPair {
return ret;
}
*/
private String serialUUID = null;
public SerialUUID toSerial(KeyStorage keyStorage) {
final String uuid = serialUUID == null ? keyStorage.getIdentifier() : serialUUID;
if (serialUUID == null) serialUUID = uuid;
if (!keyStorage.isAlreadySerialized(uuid)) {
SerialMap serialized = new SerialMap();
keyStorage.putSerialized(uuid, serialized);
serialized.put("lhs", this.lhs.toSerial(keyStorage));
serialized.put("rhs", this.rhs.toSerial(keyStorage));
serialized.put("op", this.pairOp.toString());
serialized.put("basePair", this.basePair == null ? null : this.basePair.toSerial(keyStorage));
serialized.put("location", this.location == null ? null : this.location.toSerial(keyStorage));
serialized.put("substitution", SerialList.fromMapped(this.substitution, unifyPair -> unifyPair.toSerial(keyStorage)));
serialized.put("fBounded", SerialList.fromMapped(this.fBounded, fbounded -> fbounded.toSerial(keyStorage)));
}
return new SerialUUID(uuid);
}
public static UnifyPair fromSerial(SerialUUID serialUUID, UnifyContext context, KeyStorage keyStorage) {
String uuid = serialUUID.uuid;
if (!keyStorage.isAlreadyUnserialized(uuid)) {
SerialMap data = keyStorage.getSerialized(uuid);
SerialMap lhsData = data.getMap("lhs");
SerialMap rhsData = data.getMap("rhs");
String opData = data.getValue("op").getOf(String.class);
UnifyPair pair = new UnifyPair(
UnifyType.fromSerial(lhsData, context),
UnifyType.fromSerial(rhsData, context),
PairOperator.fromString(opData)
);
// put the object into the storage before unserializing basePair recursively
keyStorage.putUnserialized(uuid, pair);
SerialList<SerialUUID> substitutionData = data.getList("substitution").assertListOfUUIDs();
SerialList<SerialMap> fBoundedData = data.getList("fBounded").assertListOfMaps();
SerialUUID basePairData = data.getUUIDOrNull("basePair");
SerialMap locationData = data.getMapOrNull("location");
pair.substitution = substitutionData.stream().map(substData -> UnifyPair.fromSerial(substData, context, keyStorage)).collect(Collectors.toSet());
pair.fBounded = fBoundedData.stream().map(fBoundData -> UnifyType.fromSerial(fBoundData, context)).collect(Collectors.toSet());
if (basePairData != null) {
pair.basePair = UnifyPair.fromSerial(basePairData, context, keyStorage);
}
if (locationData != null) {
pair.location = SourceLoc.fromSerial(locationData);
}
}
return keyStorage.getUnserialized(uuid, UnifyPair.class);
}
}

View File

@@ -1,12 +1,14 @@
package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import de.dhbwstuttgart.syntaxtree.StatementVisitor;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
@@ -14,7 +16,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
* Represents a java type.
* @author Florian Steurer
*/
public abstract class UnifyType {
public abstract class UnifyType implements ISerializableData {
/**
* The name of the type e.q. "Integer", "? extends Integer" or "List" for (List<T>)
@@ -29,7 +31,7 @@ public abstract class UnifyType {
/**
* Creates a new instance
* @param name Name of the type (e.q. List for List<T>, Integer or ? extends Integer)
* @param typeParams Parameters of the type (e.q. <T> for List<T>)
* @param p Parameters of the type (e.q. <T> for List<T>)
*/
protected UnifyType(String name, TypeParams p) {
typeName = name;
@@ -87,23 +89,21 @@ public abstract class UnifyType {
@Override
public String toString() {
String params = "";
StringBuilder params = new StringBuilder();
if(typeParams.size() != 0) {
for(UnifyType param : typeParams)
params += param.toString() + ",";
params = "<" + params.substring(0, params.length()-1) + ">";
params.append(param.toString()).append(",");
params = new StringBuilder("<" + params.substring(0, params.length() - 1) + ">");
}
return typeName + params;
}
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>();
ret.addAll(typeParams.getInvolvedPlaceholderTypes());
return ret;
return new ArrayList<>(typeParams.getInvolvedPlaceholderTypes());
}
public Boolean wrongWildcard() {//default
public boolean wrongWildcard() {//default
return false;
}
@@ -114,7 +114,40 @@ public abstract class UnifyType {
@Override
public boolean equals(Object obj) {
if(obj == null)return false;
if(obj == null) return false;
return this.toString().equals(obj.toString());
}
}
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
String type = switch (this) {
case FunInterfaceType _ -> "funi";
case ReferenceType _ -> "ref";
case ExtendsType _ -> "ext";
case SuperType _ -> "sup";
case FunNType _ -> "funn";
case PlaceholderType _ -> "tph";
default -> throw new RuntimeException("No type defined for UnifyType of class " + this.getClass().getName());
};
serialized.put("type", type);
// we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL);
return serialized;
}
public static UnifyType fromSerial(SerialMap data, UnifyContext context) {
var type = data.getValue("type").getOf(String.class);
var object = data.getMap("object");
return switch (type) {
case "funi" -> FunInterfaceType.fromSerial(object, context);
case "ref" -> ReferenceType.fromSerial(object, context);
case "ext" -> ExtendsType.fromSerial(object, context);
case "sup" -> SuperType.fromSerial(object, context);
case "funn" -> FunNType.fromSerial(object, context);
case "tph" -> PlaceholderType.fromSerial(object, context);
default -> throw new RuntimeException("Could not unserialize class of unhandled type " + type);
};
}
}

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import java.util.ArrayList;
import java.util.Collection;
@@ -7,7 +8,7 @@ import java.util.Collection;
* A wildcard type that is either a ExtendsType or a SuperType.
* @author Florian Steurer
*/
public abstract class WildcardType extends UnifyType {
public abstract class WildcardType extends UnifyType implements ISerializableData {
/**
* The wildcarded type, e.q. Integer for ? extends Integer. Never a wildcard type itself.
@@ -41,7 +42,7 @@ public abstract class WildcardType extends UnifyType {
}
@Override
public Boolean wrongWildcard () {//This is an error
public boolean wrongWildcard () {//This is an error
return (wildcardedType instanceof WildcardType);
}
@@ -65,8 +66,7 @@ public abstract class WildcardType extends UnifyType {
@Override
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>();
ret.addAll(wildcardedType.getInvolvedPlaceholderTypes());
ArrayList<PlaceholderType> ret = new ArrayList<>(wildcardedType.getInvolvedPlaceholderTypes());
return ret;
}
}

View File

@@ -0,0 +1,322 @@
package de.dhbwstuttgart.util;
import com.diogonunes.jcolor.Attribute;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.server.SocketServer;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Arrays;
import java.util.Objects;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static com.diogonunes.jcolor.Ansi.colorize;
public class Logger {
public static final Logger NULL_LOGGER = new NullLogger();
private static final DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
protected final Writer writer;
protected final String prefix;
public Logger() {
this(null, "");
}
public Logger(String prefix) {
this(null, prefix);
}
public Logger(Writer writer, String prefix) {
this.prefix = (Objects.equals(prefix, "")) ? "" : "["+prefix+"] ";
this.writer = writer;
}
/**
* Create a new logger object from the path provided
*
* @param filePath The path to the output file. Recommended file extension ".log"
* @return The Logger object for this output file
*/
public static Logger forFile(String filePath, String prefix) {
File file = Path.of(filePath).toFile();
try {
Writer fileWriter = new FileWriter(file);
return new Logger(fileWriter, prefix);
}
catch (IOException exception) {
throw new RuntimeException("Failed to created Logger for file " + filePath, exception);
}
}
/**
* Create a new logger object that inherits the writer of the given logger object
*
* @param logger The old logger object that will provide the writer
* @return The new prefix for the new logger object
*/
public static Logger inherit(Logger logger, String newPrefix) {
return new Logger(logger.writer, newPrefix);
}
/**
* Tint the prefix in the color of the logLevel
* @param logLevel The logLevel to set the tint to
* @return The tinted string (using ANSI sequences)
*/
protected String getPrefix(LogLevel logLevel) {
String fullPrefix = prefix + "[" + logLevel + "] ";
return switch (logLevel) {
case DEBUG -> colorize(fullPrefix, Attribute.BRIGHT_MAGENTA_TEXT());
case INFO -> colorize(fullPrefix, Attribute.BLUE_TEXT());
case WARNING -> colorize(fullPrefix, Attribute.YELLOW_TEXT());
case ERROR -> colorize(fullPrefix, Attribute.RED_TEXT());
case SUCCESS -> colorize(fullPrefix, Attribute.GREEN_TEXT());
};
}
/**
* Print text to the output or error stream, depending on the logLevel
* @param s The string to print
* @param logLevel If logLevel == error, then print to stderr or print to stdout otherwise
*/
protected void print(String s, LogLevel logLevel) {
String coloredPrefix = this.getPrefix(logLevel);
// if we are running the server, prepend the timestamp
if(SocketServer.isServerRunning()) {
String timestamp = LocalDateTime.now().format(timeFormatter);
coloredPrefix = "[" + timestamp + "] " + coloredPrefix;
}
// output to the correct output-stream
if (logLevel.getValue() == LogLevel.ERROR.getValue()) {
System.out.println(coloredPrefix + s);
}
else {
System.err.println(coloredPrefix + s);
}
}
public boolean isLogLevelActive(LogLevel logLevel) {
return logLevel.isHigherOrEqualTo(ConsoleInterface.logLevel);
}
/**
* Write text to the attached writer if there is any
* @param s The string to print
*/
protected void write(String s) {
if (writer != null && ConsoleInterface.writeLogFiles) {
// writing to file should only be done when necessary
synchronized (writer) {
try {
writer.write(s);
}
catch (IOException exception) {
throw new RuntimeException("Failed writing to file", exception);
}
}
}
}
/**
* Base method for logging a string value. Should mostly be used by the Logger internal functions that
* abstract the logLevel away from the parameters
*
* @hidden Only relevant for the Logger and very special cases with dynamic logLevel
* @param s The text to log
* @param logLevel The logLevel on which the text should be logged
*/
public void log(String s, LogLevel logLevel) {
if (isLogLevelActive(logLevel)) {
this.print(s, logLevel);
this.write(s);
}
}
public void log(Supplier<String> supp, LogLevel logLevel) {
if (isLogLevelActive(logLevel)) {
this.log(supp.get(), logLevel);
}
}
public void log(Object obj, LogLevel logLevel) {
if (isLogLevelActive(logLevel)) {
this.log(obj.toString(), logLevel);
}
}
/**
* Output a debug message
* @param s The string to log
*/
public void debug(String s) {
this.log(s, LogLevel.DEBUG);
}
public void debug(Object o) {
this.log(o, LogLevel.DEBUG);
}
public void debug(Supplier<String> supp) {
this.log(supp, LogLevel.DEBUG);
}
/**
* Output an info message
* @param s The string to log
*/
public void info(String s) {
this.log(s, LogLevel.INFO);
}
public void info(Object o) {
this.log(o, LogLevel.INFO);
}
public void info(Supplier<String> supp) {
this.log(supp, LogLevel.INFO);
}
/**
* Output a warning message
* @param s The string to log
*/
public void warn(String s) {
this.log(s, LogLevel.WARNING);
}
public void warn(Object o) {
this.log(o, LogLevel.WARNING);
}
public void warn(Supplier<String> supp) {
this.log(supp, LogLevel.WARNING);
}
/**
* Output an error message
* @param s The string to log
*/
public void error(String s) {
this.log(s, LogLevel.ERROR);
}
public void error(Object o) {
this.log(o, LogLevel.ERROR);
}
public void error(Supplier<String> supp) {
this.log(supp, LogLevel.ERROR);
}
/**
* Output a success message
* @param s The string to log
*/
public void success(String s) {
this.log(s, LogLevel.SUCCESS);
}
public void success(Object o) {
this.log(o, LogLevel.SUCCESS);
}
public void success(Supplier<String> supp) {
this.log(supp, LogLevel.SUCCESS);
}
/**
* Special logging function that prints a throwable object and all of its recursive causes (including stacktrace)
* as an error
*
* @param throwable The Throwable object to output
*/
public void exception(Throwable throwable) {
// Format the exception output
String s = "Exception: " + throwable.getMessage() + "\n" +
Arrays.stream(throwable.getStackTrace()).map(stackTraceElement ->
" | " + stackTraceElement.toString()
).collect(Collectors.joining("\n"));
// if there is a cause printed afterward, announce it with the print of the exception
if (throwable.getCause() != null) {
s += "\n\nCaused by: ";
}
// print the exception
this.error(s);
// print the cause recursively
if (throwable.getCause() != null) {
this.exception(throwable.getCause());
}
}
public void close() {
if (this.writer != null) {
try {
this.writer.close();
}
catch (IOException exception) {
throw new RuntimeException("Failed to close a loggers writer. Was it maybe already closed? ", exception);
}
}
}
/**
* An enum representing the different log levels as integers:
* <ul>
* <li>DEBUG: highly specific output only for debugging</li>
* <li>INFO: informational output about the current state of the program</li>
* <li>WARNING: warnings about potential issues or an unexpected state</li>
* <li>ERROR: invalid states, errors and exceptions</li>
* <li>SUCCESS: successfully executed key steps of the program</li>
* </ul>
*/
public enum LogLevel {
/** Highly specific output only for debugging */
DEBUG(0),
/** Informational output about the current state of the program */
INFO(1),
/** Warnings about potential issues or an unexpected state **/
WARNING(2),
/** Invalid states, errors and exceptions */
ERROR(3),
/** Successfully executed key steps of the program */
SUCCESS(4);
private final int value;
LogLevel(final int newValue) {
value = newValue;
}
public boolean isHigherOrEqualTo(LogLevel other) {
return this.value >= other.value;
}
public int getValue() {
return value;
}
public static LogLevel fromValue(int value) {
return switch (value) {
case 0 -> LogLevel.DEBUG;
case 1 -> LogLevel.INFO;
case 2 -> LogLevel.WARNING;
case 3 -> LogLevel.ERROR;
case 4 -> LogLevel.SUCCESS;
default -> throw new RuntimeException("Invalid log level value: " + value);
};
}
}
/**
* A special case of logger that will never output anything
*/
private static class NullLogger extends Logger {
@Override
public void log(String s, LogLevel logLevel) {
// Do nothing. Yay
}
}
}

View File

@@ -3,37 +3,26 @@ package de.dhbwstuttgart.util;
import java.util.Objects;
import java.util.Optional;
public class Pair<T, T1> {
private final T key;
private final T1 value;
public class Pair<T, T1> extends Tuple<T, T1> {
public Pair(T a, T1 b) {
this.value = b;
this.key = a;
}
public Pair(T a, T1 b) {
super(a, b);
}
public Optional<T1> getValue() {
return Optional.of(value);
}
public Optional<T1> getValue() {
return Optional.of(second);
}
public T getKey() {
return key;
}
public String toString() {
return "(" + key.toString() + "," + value.toString() + ")\n";
}
public T getKey() {
return first;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Pair<?, ?> pair = (Pair<?, ?>) o;
return Objects.equals(key, pair.key) && Objects.equals(value, pair.value);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Pair<?, ?> pair = (Pair<?, ?>) o;
return Objects.equals(first, pair.first) && Objects.equals(second, pair.second);
}
@Override
public int hashCode() {
return Objects.hash(key, value);
}
}

View File

@@ -0,0 +1,38 @@
package de.dhbwstuttgart.util;
import java.util.Objects;
public class Tuple<T1, T2> {
protected final T1 first;
protected final T2 second;
public Tuple(T1 a, T2 b) {
this.second = b;
this.first = a;
}
public T1 getFirst() {
return first;
}
public T2 getSecond() {
return second;
}
public String toString() {
return "(" + first.toString() + "," + second.toString() + ")\n";
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Tuple<?, ?> pair = (Tuple<?, ?>) o;
return Objects.equals(first, pair.first) && Objects.equals(second, pair.second);
}
@Override
public int hashCode() {
return Objects.hash(first, second);
}
}

View File

@@ -18,7 +18,7 @@ public class TestTypeDeployment {
public void testTypeDeployment() throws Exception {
var path = Path.of(System.getProperty("user.dir"), "/resources/bytecode/javFiles/Cycle.jav");
var file = path.toFile();
var compiler = new JavaTXCompiler(file, false);
var compiler = new JavaTXCompiler(file);
var parsedSource = compiler.sourceFiles.get(file);
var tiResults = compiler.typeInference(file);
Set<TypeInsert> tips = new HashSet<>();

Some files were not shown because too many files have changed in this diff Show More