33 Commits

Author SHA1 Message Date
Fabian Holzwarth
57ffae0481 fix: fixed some serialization and deserialization issues 2025-06-22 15:11:49 +02:00
Fabian Holzwarth
d084d74a25 feat: fixed mismatch in PairOperator serialization 2025-06-22 10:10:32 +02:00
Fabian Holzwarth
cd15016f61 feat: allow subclasses when asserting values 2025-06-21 13:44:29 +02:00
Fabian Holzwarth
b0e5eee25c feat: rename Object... to Serial... and move into separate classes 2025-06-21 13:40:24 +02:00
Fabian Holzwarth
d1bd285be7 fix: replace reflection class check with simple string check 2025-06-21 13:23:01 +02:00
Fabian Holzwarth
a902fd5bee feat: replaced HashMaps with better type safety structure 2025-06-21 12:58:45 +02:00
Fabian Holzwarth
ced9fdc9f7 fix: non serialized constraitnContext 2025-06-20 19:09:33 +02:00
Fabian Holzwarth
53417bf298 feat: implement serialization and adjust packets to correct data types 2025-06-20 18:53:25 +02:00
Fabian Holzwarth
2d4da03f00 feat: implementing client-server model 2025-06-18 19:58:23 +02:00
Fabian Holzwarth
f7a13f5faa feat: turn UnifyContext into a record 2025-06-18 18:26:44 +02:00
Fabian Holzwarth
8fe80b4396 feat: move static placeholder generation into object 2025-06-18 17:47:29 +02:00
Fabian Holzwarth
eb1201ae5e feat: apply future-based approach to inner cartesian loop 2025-06-09 16:49:45 +02:00
Fabian Holzwarth
963ad76593 feat: make cartesian loop computation Future-based 2025-06-09 15:30:04 +02:00
Fabian Holzwarth
1eba09e3b0 feat: change cartesian while loop into recursive 2025-06-09 15:16:09 +02:00
Fabian Holzwarth
fc82125d14 feat: change TypeUnifyTask to use future-based logic 2025-06-09 14:53:37 +02:00
Fabian Holzwarth
dad468368b feat: make functions unify and unify2 future-based 2025-06-09 13:14:44 +02:00
Fabian Holzwarth
fdd4f3aa59 feat: implement variance-dependent calculation as Future based 2025-06-09 12:59:23 +02:00
Fabian Holzwarth
a0c11b60e8 Remove unnecessary parameter and fix some parallelization 2025-06-07 16:11:34 +02:00
Fabian Holzwarth
4cddf73e6d feat: small fixes for correct parameters 2025-06-07 14:38:18 +02:00
Fabian Holzwarth
5024a02447 feat: implement unify context and prepare variance code capsulation 2025-06-07 11:53:32 +02:00
Fabian Holzwarth
6c2d97b770 chore: code cleanup 2025-05-26 15:49:01 +02:00
Fabian Holzwarth
426c2916d3 feat: remove unnecessary synchronized blocks 2025-05-26 14:40:17 +02:00
Fabian Holzwarth
f722a00fbb feat: use the current thread for computation as well 2025-05-25 15:55:07 +02:00
Fabian Holzwarth
32797c9b9f feat: cleanup more cartesian product code 2025-05-24 12:43:42 +02:00
Fabian Holzwarth
87f655c85a feat: isolate constraint-filtering for one tv from computeCartesianRecursive 2025-05-23 16:10:37 +02:00
Fabian Holzwarth
613dceae1d feat: added Logger class, remove empty println start cleanup of computeCartesianRecursive 2025-05-23 14:12:25 +02:00
Fabian Holzwarth
81cac06e16 feat: add tool for merging many hash sets in parallel 2025-05-23 14:11:52 +02:00
Fabian Holzwarth
a47d5bc024 feat: slightly improved placeholder name generation 2025-05-23 14:04:48 +02:00
Fabian Holzwarth
e5916d455a feat: format and merge results in parallel 2025-05-19 17:05:18 +02:00
Fabian Holzwarth
ebb639e72e feat: remove log flushes 2025-05-18 16:29:19 +02:00
Fabian Holzwarth
f0a4a51ce6 feat: replace thread counter with thread pool 2025-05-18 15:40:31 +02:00
Fabian Holzwarth
7442880452 feat: limit placeholder generation to uppercase chars 2025-05-18 13:24:29 +02:00
Fabian Holzwarth
c4dc3b4245 feat: replace random based placeholder generation with deterministic approach 2025-05-18 12:41:56 +02:00
70 changed files with 5906 additions and 3532 deletions

40
independentTest.sh Executable file
View File

@@ -0,0 +1,40 @@
#!/usr/bin/env bash
REPO="https://gitea.hb.dhbw-stuttgart.de/f.holzwarth/JavaCompilerCore.git"
TDIR="./testBuild"
rm -rf "$TDIR" 2>/dev/null
mkdir $TDIR
cd $TDIR
git clone $REPO .
git checkout feat/unify-server
# git checkout dad468368b86bdd5a3d3b2754b17617cee0a9107 # 1:55
# git checkout a0c11b60e8c9d7addcbe0d3a09c9ce2924e9d5c0 # 2:25
# git checkout 4cddf73e6d6c9116d3e1705c4b27a8e7f18d80c3 # 2:27
# git checkout 6c2d97b7703d954e4a42eef3ec374bcf313af75c # 2:13
# git checkout f722a00fbb6e69423d48a890e4a6283471763e64 # 1:35
# git checkout f0a4a51ce65639ce9a9470ff0fdb538fdf9c02cc # 2:19
# git checkout 1391206dfe59263cdb22f93371cfd1dd5465d97f # 1:29
date "+%Y.%m.%d %H:%M:%S"
# mvn clean compile -DskipTests package
## prefix each stderr line with " | "
# exec 2> >(trap "" INT TERM; sed 's/^/ | /' >&2)
# echo -e "\nMatrix test:\n |"
# time java -jar target/JavaTXcompiler-0.1-jar-with-dependencies.jar resources/bytecode/javFiles/Matrix.jav >/dev/null;
mvn clean compile test
echo -e "\nCleanup... "
cd -
rm -rf "$TDIR" 2>/dev/null
echo -e "\nFinished "
date "+%Y.%m.%d %H:%M:%S"
echo -e "\n "

15
pom.xml
View File

@@ -44,6 +44,21 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<artifactId>asm</artifactId>
<version>9.5</version>
</dependency>
<dependency>
<groupId>org.java-websocket</groupId>
<artifactId>Java-WebSocket</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.17.2</version>
</dependency>
</dependencies>
<build>

View File

@@ -5,42 +5,61 @@ import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.*;
public class ConsoleInterface {
private static final String directory = System.getProperty("user.dir");
public static void main(String[] args) throws IOException, ClassNotFoundException {
List<File> input = new ArrayList<>();
List<File> classpath = new ArrayList<>();
String outputPath = null;
Iterator<String> it = Arrays.asList(args).iterator();
if(args.length == 0){
System.out.println("No input files given. Get help with --help");
System.exit(1);
}else if(args.length == 1 && args[0].equals("--help")){
System.out.println("Usage: javatx [OPTION]... [FILE]...\n" +
"\t-cp\tSet Classpath\n" +
"\t-d\tSet destination directory");
System.exit(1);
public class ConsoleInterface {
private static final String directory = System.getProperty("user.dir");
public static void main(String[] args) throws IOException, ClassNotFoundException {
List<File> input = new ArrayList<>();
List<File> classpath = new ArrayList<>();
String outputPath = null;
Iterator<String> it = Arrays.asList(args).iterator();
Optional<Integer> serverPort = Optional.empty();
Optional<String> unifyServer = Optional.empty();
if (args.length == 0) {
System.out.println("No input files given. Get help with --help");
System.exit(1);
} else if (args.length == 1 && args[0].equals("--help")) {
System.out.println("Usage: javatx [OPTION]... [FILE]...\n" +
"\t-cp\tSet Classpath\n" +
"\t-d\tSet destination directory\n" +
"\t[--server-mode <port>]\n" +
"\t[--unify-server <url>]\n");
System.exit(1);
}
while (it.hasNext()) {
String arg = it.next();
if (arg.equals("-d")) {
outputPath = it.next();
} else if (arg.startsWith("-d")) {
outputPath = arg.substring(2);
} else if (arg.equals("-cp") || arg.equals("-classpath")) {
String[] cps = it.next().split(":");
for (String cp : cps) {
classpath.add(new File(cp));
}
while(it.hasNext()){
String arg = it.next();
if(arg.equals("-d")){
outputPath = it.next();
}else if(arg.startsWith("-d")) {
outputPath = arg.substring(2);
}else if(arg.equals("-cp") || arg.equals("-classpath")){
String[] cps = it.next().split(":");
for(String cp : cps){
classpath.add(new File(cp));
}
}else{
input.add(new File(arg));
}
}
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null);
//compiler.typeInference();
compiler.generateBytecode();
}
} else if (arg.equals("--server-mode")) {
serverPort = Optional.of(Integer.parseInt(it.next()));
} else if (arg.equals("--unify-server")) {
unifyServer = Optional.of(it.next());
} else {
input.add(new File(arg));
}
}
if (serverPort.isPresent()) {
if (unifyServer.isPresent()) throw new RuntimeException("Cannot use unifyServer when in server mode!");
JavaTXServer server = new JavaTXServer();
server.listen(serverPort.get());
}
else {
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null, unifyServer);
//compiler.typeInference();
compiler.generateBytecode();
}
}
}

View File

@@ -12,6 +12,7 @@ import de.dhbwstuttgart.parser.SyntaxTreeGenerator.SyntaxTreeGenerator;
import de.dhbwstuttgart.parser.antlr.Java17Parser.SourceFileContext;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.Method;
@@ -35,10 +36,13 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.typeAlgo.TYPE;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.RuleSet;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
@@ -64,21 +68,23 @@ public class JavaTXCompiler {
// public static JavaTXCompiler INSTANCE;
final CompilationEnvironment environment;
Boolean resultmodel = true;
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
public final DirectoryClassLoader classLoader;
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
public final DirectoryClassLoader classLoader;
public final List<File> classPath;
private final File outputPath;
private final Optional<String> unifyServer;
public DirectoryClassLoader getClassLoader() {
return classLoader;
}
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(Arrays.asList(sourceFile), List.of(), new File("."));
this(Arrays.asList(sourceFile), List.of(), new File("."), Optional.empty());
}
public JavaTXCompiler(File sourceFile, Boolean log) throws IOException, ClassNotFoundException {
@@ -87,10 +93,15 @@ public class JavaTXCompiler {
}
public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException {
this(sourceFiles, List.of(), new File("."));
this(sourceFiles, List.of(), new File("."), Optional.empty());
}
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath) throws IOException, ClassNotFoundException {
this(sources, contextPath, outputPath, Optional.empty());
}
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath, Optional<String> unifyServer) throws IOException, ClassNotFoundException {
this.unifyServer = unifyServer;
var path = new ArrayList<>(contextPath);
if (contextPath.isEmpty()) {
// When no contextPaths are given, the working directory is the sources root
@@ -300,13 +311,14 @@ public class JavaTXCompiler {
Set<Set<UnifyPair>> results = new HashSet<>();
UnifyResultModel urm = null;
// urm.addUnifyResultListener(resultListener);
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, new PlaceholderRegistry());
try {
logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile;
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this);
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this, context.placeholderRegistry());
System.out.println(finiteClosure);
urm = new UnifyResultModel(cons, finiteClosure);
urm.addUnifyResultListener(resultListener);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, context.placeholderRegistry());
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
@@ -320,13 +332,12 @@ public class JavaTXCompiler {
logFile.write(unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logFile.write(unifyCons.toString());
TypeUnify unify = new TypeUnify();
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
for (SourceFile f : this.sourceFiles.values()) {
logFile.write(ASTTypePrinter.print(f));
}
logFile.flush();
// logFile.flush();
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
@@ -342,7 +353,7 @@ public class JavaTXCompiler {
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
*/;
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
TypeUnify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
} catch (IOException e) {
System.err.println("kein LogFile");
}
@@ -365,13 +376,14 @@ public class JavaTXCompiler {
final ConstraintSet<Pair> cons = getConstraints(file);
Set<Set<UnifyPair>> results = new HashSet<>();
PlaceholderRegistry placeholderRegistry = new PlaceholderRegistry();
try {
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
if (log) logFolder.mkdirs();
Writer logFile = log ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this);
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this, placeholderRegistry);
System.out.println(finiteClosure);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, placeholderRegistry);
System.out.println("xxx1");
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
@@ -387,12 +399,11 @@ public class JavaTXCompiler {
System.out.println("Unify:" + unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logFile.write("\nUnify_distributeInnerVars: " + unifyCons.toString());
TypeUnify unify = new TypeUnify();
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
logFile.write(ASTTypePrinter.print(sf));
System.out.println(ASTTypePrinter.print(sf));
logFile.flush();
// logFile.flush();
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
@@ -410,16 +421,24 @@ public class JavaTXCompiler {
/*
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
*/;
if (resultmodel) {
if (unifyServer.isPresent()) {
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
var socketClient = new SocketClient(unifyServer.get());
return socketClient.execute(finiteClosure, cons, unifyCons, context);
}
else if (resultmodel) {
/* UnifyResultModel Anfang */
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
System.out.println("RESULT Final: " + li.getResults());
System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
logFile.flush();
// logFile.flush();
return li.getResults();
}
/* UnifyResultModel End */
@@ -427,34 +446,35 @@ public class JavaTXCompiler {
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
// finiteClosure));
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
UnifyContext context = new UnifyContext(logFile, log, false, new UnifyResultModel(cons, finiteClosure), usedTasks, placeholderRegistry);
Set<Set<UnifyPair>> result = TypeUnify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString() + "\n");
logFile.flush();
// logFile.flush();
results.addAll(result);
results = results.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
Optional<Set<UnifyPair>> res = new RuleSet(placeholderRegistry).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
y.setPairOp(PairOperator.EQUALSDOT);
return y; // alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure);
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure);
} else
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
System.out.println("RESULT Final: " + results);
System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + results.toString() + "\n");
logFile.flush();
logFile.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS);
logFile.flush();
// logFile.flush();
logFile.write("PLACEHOLDERS: " + placeholderRegistry);
// logFile.flush();
}
} catch (IOException e) {
System.err.println("kein LogFile");
}
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons))))).collect(Collectors.toList());
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons), placeholderRegistry)))).collect(Collectors.toList());
}
/**
@@ -586,10 +606,6 @@ public class JavaTXCompiler {
}
}
/**
* @param path - output-Directory can be null, then class file output is in the same directory as the parsed source files
* @return
*/
public Map<JavaClassName, byte[]> generateBytecode(File sourceFile) throws ClassNotFoundException, IOException {
var sf = sourceFiles.get(sourceFile);
if (sf.isGenerated()) return null;

View File

@@ -0,0 +1,17 @@
package de.dhbwstuttgart.core;
import de.dhbwstuttgart.server.SocketServer;
public class JavaTXServer {
public void listen(int port) {
try {
SocketServer socketServer = new SocketServer(port);
socketServer.start();
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,11 @@
package de.dhbwstuttgart.exceptions;
/**
* Eine Runtime Exception, die für den Fall genutzt wird, dass eine Unifikation abgebrochen wird.
* Durch das Werfen einer Exception können Abbrüche auch aus Methodenaufrufen heraus
* geprüft werden, da zuvor nur ein return X; stattfinden würde.
*/
public class UnifyCancelException extends RuntimeException {
}

View File

@@ -1,4 +1,25 @@
package de.dhbwstuttgart.parser;
public record SourceLoc(String file, int line) {
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
public record SourceLoc(String file, int line) implements ISerializableData {
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
var serialized = new SerialMap();
serialized.put("file", file);
serialized.put("line", line);
return serialized;
}
public static SourceLoc fromSerial(SerialMap data) {
return new SourceLoc(
data.getValue("file").getOf(String.class),
data.getValue("line").getOf(Integer.class)
);
}
}

View File

@@ -12,6 +12,7 @@ import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.model.*;
import java.util.*;
@@ -26,8 +27,8 @@ public class FCGenerator {
*
* @param availableClasses - Alle geparsten Klassen
*/
public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException {
return toFC(availableClasses, classLoader).stream().map(t -> UnifyTypeFactory.convert(compiler, t)).collect(Collectors.toSet());
public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException {
return toFC(availableClasses, classLoader).stream().map(t -> UnifyTypeFactory.convert(compiler, t, placeholderRegistry)).collect(Collectors.toSet());
}
public static Set<Pair> toFC(Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException {

View File

@@ -0,0 +1,152 @@
package de.dhbwstuttgart.server;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.server.packet.ErrorPacket;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.InvalidPacket;
import de.dhbwstuttgart.server.packet.MessagePacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.io.FileWriter;
import java.net.URI;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.enums.ReadyState;
import org.java_websocket.handshake.ServerHandshake;
/**
* The Client-side of the websocket
*/
public class SocketClient extends WebSocketClient {
// use a latch to wait until the connection is closed by the remote host
private final CountDownLatch closeLatch = new CountDownLatch(1);
// temporarily: The received unify result packet
private UnifyResultPacket unifyResultPacket = null;
public SocketClient(String url) {
super(URI.create(url));
// make sure, the url is in a valid format
final String regex = "^wss?://(\\w+(\\.\\w+)?)*:(\\d+)$";
final Matcher matcher = Pattern.compile(regex).matcher(url);
if (!matcher.find()) {
throw new RuntimeException("Provided string \"" + url + "\" is not a valid server URL! Use pattern ws(s?)://<host.name>:<port>");
}
}
public SocketClient(String host, int port, boolean secure) {
super(URI.create(String.format("%s://%s:%d/", secure ? "wss" : "ws", host, port)));
}
/**
* The main method for connecting, requesting and waiting for the server to unify.
* This is synchronized to prevent multiple webSockets connections at the moment, but it is not called from any
* thread except the main thread right now and is not necessary at all, probably. Maybe remove it later
*/
synchronized public List<ResultSet> execute(
FiniteClosure finiteClosure,
ConstraintSet<Pair> constraintSet,
ConstraintSet<UnifyPair> unifyConstraintSet,
UnifyContext context
) throws JsonProcessingException {
try {
// wait for the connection to be set up
this.connectBlocking();
// make sure the connection has been established successfully
if (this.getReadyState() != ReadyState.OPEN) {
throw new RuntimeException("WebSocket Client could not connect to remote host at " + this.uri);
}
// send the unify task request
UnifyRequestPacket packet = new UnifyRequestPacket(finiteClosure, constraintSet, unifyConstraintSet);
String json = PacketContainer.serialize(packet);
try (FileWriter w = new FileWriter("./log.json")) {
w.write(json);
}
this.send(json);
// block the thread, until the connection is closed by the remote host (usually after sending the results)
this.waitUntilClosed();
// wait for the connection to fully close
this.closeBlocking();
} catch (InterruptedException exception) {
System.err.println("Server connection interrupted: " + exception);
this.notifyAll();
throw new RuntimeException("Aborted server connection", exception);
}
catch (Exception exception) {
throw new RuntimeException("Exception occurred in server unify: ", exception);
}
// detect error cases, in which no error was thrown, but also no result was sent back from the server
if (this.unifyResultPacket == null) {
throw new RuntimeException("Did not receive server response but closed connection already");
}
return unifyResultPacket.getResultSet(context);
}
/**
* Specific client-side implementations to handle incoming packets
*/
protected void handleReceivedPacket(IPacket packet) {
if (packet instanceof InvalidPacket) {
System.err.println("[socket] " + ((InvalidPacket) packet).error);
} else if (packet instanceof MessagePacket) {
System.out.println("[socket] " + ((MessagePacket) packet).message);
} else if (packet instanceof ErrorPacket) {
System.err.println("[socket] " + ((ErrorPacket) packet).error);
} else if (packet instanceof UnifyResultPacket) {
System.out.println("[socket] Received unify result");
unifyResultPacket = (UnifyResultPacket) packet;
}
}
@Override
public void onOpen(ServerHandshake handshakedata) {
System.out.println("Connected to server with status " + handshakedata.getHttpStatus());
}
@Override
public void onMessage(String message) {
// System.out.println("received: " + message);
IPacket packet = PacketContainer.deserialize(message);
this.handleReceivedPacket(packet);
}
@Override
public void onClose(int code, String reason, boolean remote) {
System.out.println(
"Disconnected from server " +
"with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
"(closed by remote: " + remote + ")"
);
this.closeLatch.countDown();
}
@Override
public void onError(Exception e) {
System.out.println("Error: " + e.getMessage());
e.printStackTrace();
}
public void waitUntilClosed() throws InterruptedException {
closeLatch.await();
}
}

View File

@@ -0,0 +1,184 @@
package de.dhbwstuttgart.server;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.MessagePacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.io.Writer;
import java.net.InetSocketAddress;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.java_websocket.WebSocket;
import org.java_websocket.handshake.ClientHandshake;
import org.java_websocket.server.WebSocketServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SocketServer extends WebSocketServer {
private static final Logger log = LoggerFactory.getLogger(SocketServer.class);
public SocketServer(int port) {
super(new InetSocketAddress(port));
}
@Override
public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
System.out.println("New connection: " + webSocket.getResourceDescriptor());
webSocket.setAttachment(new SocketData(UUID.randomUUID().toString()));
try {
sendMessage(webSocket, "Welcome to the server!");
// wait 10 seconds for the client to send a task and close the connection, if nothing has been received until then
ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
Runnable task = () -> {
if (webSocket.<SocketData>getAttachment().hasSentTask || !webSocket.isOpen()) {
return;
}
sendMessage(webSocket, "No task received after 10 seconds. Closing connection...");
webSocket.close();
};
executor.schedule(task, 10, TimeUnit.SECONDS);
executor.shutdown();
// and finally, when your program wants to exit
} catch (Exception e) {
log.error("e: ", e);
webSocket.close(1, e.getMessage());
}
}
@Override
public void onClose(WebSocket webSocket, int code, String reason, boolean remote) {
System.out.println("Connection closed: " + webSocket.getResourceDescriptor());
System.out.println(
"Disconnected client " + webSocket.getResourceDescriptor() + " " +
"with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
"(closed by client: " + remote + ")"
);
}
@Override
public void onMessage(WebSocket webSocket, String s) {
// System.out.println("Received: " + s.substring(0, 50));
IPacket reconstructedPacket = PacketContainer.deserialize(s);
this.onPacketReceived(webSocket, reconstructedPacket);
}
@Override
public void onError(WebSocket webSocket, Exception e) {
webSocket.close();
}
@Override
public void onStart() {
System.out.println("Websocket server started");
}
/**
* A shorthand method for sending informational messages to the client
*/
public void sendMessage(WebSocket webSocket, String text) {
try {
MessagePacket message = new MessagePacket();
message.message = text;
webSocket.send(PacketContainer.serialize(message));
} catch (Exception e) {
System.err.println("Failed to send message: " + text);
System.err.println(e);
}
}
/**
* The server-side implementation on how to handle certain packets when received
*/
private void onPacketReceived(WebSocket webSocket, IPacket packet) {
if (packet instanceof UnifyRequestPacket unifyRequestPacket) {
sendMessage(webSocket, "You requested a unify! Please wait until I calculated everything...");
System.out.println("Client " + webSocket.<SocketData>getAttachment().id + " requested a unification. Starting now...");
webSocket.<SocketData>getAttachment().hasSentTask = true;
try {
var placeholderRegistry = new PlaceholderRegistry();
var unifyContext = new UnifyContext(Writer.nullWriter(), false, true,
new UnifyResultModel(new ConstraintSet<>(), new FiniteClosure(new HashSet<>(), null, placeholderRegistry)),
new UnifyTaskModel(), ForkJoinPool.commonPool(), placeholderRegistry
);
// start the unification algorithm from the received data
IFiniteClosure finiteClosure = unifyRequestPacket.retrieveFiniteClosure(unifyContext);
ConstraintSet<Pair> constraintSet = unifyRequestPacket.retrieveConstraintSet(unifyContext);
ConstraintSet<UnifyPair> unifyConstraintSet = unifyRequestPacket.retrieveUnifyConstraintSet(unifyContext);
var resultModel = new UnifyResultModel(constraintSet, finiteClosure);
UnifyResultListenerImpl resultListener = new UnifyResultListenerImpl();
resultModel.addUnifyResultListener(resultListener);
TypeUnify.unifyParallel(
unifyConstraintSet.getUndConstraints(),
unifyConstraintSet.getOderConstraints(),
finiteClosure,
unifyContext.newWithResultModel(resultModel)
);
var resultSets = resultListener.getResults();
System.out.println("Finished unification for client " + webSocket.<SocketData>getAttachment().id);
sendMessage(webSocket, "Unification finished. Found " + resultSets.size() + " result sets");
if (webSocket.isOpen()) {
UnifyResultPacket resultPacket = UnifyResultPacket.create(resultSets);
webSocket.send(PacketContainer.serialize(resultPacket));
}
} catch (Exception e) {
System.err.println(e);
log.error("e: ", e);
}
webSocket.close();
} else {
sendMessage(webSocket, "The package of type " + packet.getClass().getName() + " is not handled by the server!");
}
}
/**
* The data that is associated server-side with any connected client.
* This makes it possible to store information that can be mapped to any existing connection.
*/
static class SocketData {
public final String id;
// used for the timeout of 10 seconds, until an unused open connection is automatically closed
public boolean hasSentTask = false;
public SocketData(String id) {
this.id = id;
}
}
}

View File

@@ -0,0 +1,14 @@
package de.dhbwstuttgart.server.packet;
/**
* A packet to send simple error messages between the client and the server
*/
public class ErrorPacket implements IPacket {
/**
* The error endpoint for messages from the server, that should be logged out outputted
*/
public String error;
}

View File

@@ -0,0 +1,18 @@
package de.dhbwstuttgart.server.packet;
/**
* The shared interface for all packet of the server connection.
* A packet must always:
* - Have a default / no-parameter constructor
* - Have only serializable public properties (or disable them via jackson annotations)
* A packet should have, for easy usage and consisteny:
* - a static create() method
*
*/
public interface IPacket {
interface IDataContainer<T> {
T toObject();
}
}

View File

@@ -0,0 +1,13 @@
package de.dhbwstuttgart.server.packet;
/**
* A fallback packet that is generated, if the received json could not be mapped to an existing package
*/
public class InvalidPacket implements IPacket {
/**
* If available, the error that caused this package to appear
*/
public String error = "<unknown error>";
}

View File

@@ -0,0 +1,13 @@
package de.dhbwstuttgart.server.packet;
/**
* A packet to send simple informational messages between the client and the server
*/
public class MessagePacket implements IPacket {
/**
* The informational message from the server, that should be logged out outputted
*/
public String message;
}

View File

@@ -0,0 +1,84 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* A wrapper for the packet to ensure correct serialization/deserialization and make it possible to detect the matching
* packet type for deserialization.
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
public class PacketContainer {
// The jackson serializer / deserializer tool
private static final ObjectMapper objectMapper = new ObjectMapper();
/*
* The available packet types. The one type that is represented in the JSON should always be the ONLY non-null value.
* They have to be public (for the moment) to let jackson fill them in while deserializing
*/
public ErrorPacket errorPacket = null;
public MessagePacket messagePacket = null;
public InvalidPacket invalidPacket = null;
public UnifyRequestPacket unifyRequestPacket = null;
public UnifyResultPacket unifyResultPacket = null;
/**
* Generate the JSON string for the given packet
*
* @param packet The packet to serialize
* @return The json representation of the packet
*/
public static String serialize(IPacket packet) throws JsonProcessingException {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
PacketContainer container = new PacketContainer();
if (packet instanceof ErrorPacket)
container.errorPacket = (ErrorPacket) packet;
else if (packet instanceof MessagePacket)
container.messagePacket = (MessagePacket) packet;
else if (packet instanceof UnifyRequestPacket)
container.unifyRequestPacket = (UnifyRequestPacket) packet;
else if (packet instanceof UnifyResultPacket)
container.unifyResultPacket = (UnifyResultPacket) packet;
// Add new packets here and in the deserialize method
return objectMapper.writeValueAsString(container);
}
/**
* Use the JSON string to generate the matching packet object
*
* @param json The serialized representation of a packet container
* @return The deserialized Packet object
*/
public static IPacket deserialize(String json) {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
try {
PacketContainer container = objectMapper.readValue(json, PacketContainer.class);
if (container.errorPacket != null)
return container.errorPacket;
if (container.messagePacket != null)
return container.messagePacket;
if (container.invalidPacket != null)
return container.invalidPacket;
if (container.unifyRequestPacket != null)
return container.unifyRequestPacket;
if (container.unifyResultPacket != null)
return container.unifyResultPacket;
// Add new packets here and in the serialize method
throw new RuntimeException("Cannot map received json to any known packet class");
} catch (Exception e) {
InvalidPacket packet = new InvalidPacket();
packet.error = e.getMessage();
return packet;
}
}
}

View File

@@ -0,0 +1,68 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
/**
* A packet to send all required data for the unification algorithm to the server and request the unification
*/
public class UnifyRequestPacket implements IPacket {
public SerialMap finiteClosure;
public SerialMap constraintSet;
public SerialMap unifyConstraintSet;
public SerialMap serialKeyStorage;
@JsonIgnore
private KeyStorage keyStorage = new KeyStorage();
@JsonIgnore
private boolean keyStorageLoaded = false;
public UnifyRequestPacket() {}
public UnifyRequestPacket(
FiniteClosure finiteClosure,
ConstraintSet<Pair> constraintSet,
ConstraintSet<UnifyPair> unifyConstraintSet
) {
this.finiteClosure = finiteClosure.toSerial(keyStorage);
this.constraintSet = constraintSet.toSerial(keyStorage);
this.unifyConstraintSet = unifyConstraintSet.toSerial(keyStorage);
this.serialKeyStorage = keyStorage.toSerial(keyStorage);
}
@JsonIgnore
public void loadKeyStorage(UnifyContext context) {
if (!keyStorageLoaded) {
keyStorageLoaded = true;
keyStorage = KeyStorage.fromSerial(this.serialKeyStorage, context);
}
}
@JsonIgnore
public FiniteClosure retrieveFiniteClosure(UnifyContext context) {
this.loadKeyStorage(context);
return FiniteClosure.fromSerial(this.finiteClosure, context, keyStorage);
}
@JsonIgnore
public ConstraintSet<Pair> retrieveConstraintSet(UnifyContext context) {
this.loadKeyStorage(context);
return ConstraintSet.fromSerial(this.constraintSet, context, Pair.class, keyStorage);
}
@JsonIgnore
public ConstraintSet<UnifyPair> retrieveUnifyConstraintSet(UnifyContext context) {
this.loadKeyStorage(context);
return ConstraintSet.fromSerial(this.unifyConstraintSet, context, UnifyPair.class, keyStorage);
}
}

View File

@@ -0,0 +1,34 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.List;
/**
* A packet to send all calculated data from the unification algorithm back to the client
*/
public class UnifyResultPacket implements IPacket {
public SerialList<ISerialNode> results;
public SerialMap keyStorage;
public static UnifyResultPacket create(List<ResultSet> resultSets) {
UnifyResultPacket serialized = new UnifyResultPacket();
KeyStorage keyStorage = new KeyStorage();
serialized.results = SerialList.fromMapped(resultSets, resultSet -> resultSet.toSerial(keyStorage));
serialized.keyStorage = keyStorage.toSerial(keyStorage);
return serialized;
}
@JsonIgnore
public List<ResultSet> getResultSet(UnifyContext context) {
return this.results.assertListOfMaps().stream()
.map(resultData -> ResultSet.fromSerial(resultData, context)).toList();
}
}

View File

@@ -0,0 +1,16 @@
package de.dhbwstuttgart.server.packet.dataContainers;
import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
public interface ISerializableData {
public abstract ISerialNode toSerial(KeyStorage keyStorage);
public static Object fromSerial(SerialMap data, UnifyContext context) {
throw new NotImplementedException("Missing implementation of \"fromSerial\" for a serializable element");
}
}

View File

@@ -0,0 +1,104 @@
package de.dhbwstuttgart.server.packet.dataContainers;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.HashMap;
import java.util.Map;
public class KeyStorage implements ISerializableData {
/**
* Store a unique identifier for every element, so it can be referenced in the json
*/
protected Map<ISerializableData, String> identifiers = new HashMap<>();
/**
* Store the serialized element per identifier when serializing
*/
protected SerialMap serializedElements = new SerialMap();
/**
* Store the unserialized element per identifier when unserializing
*/
protected Map<String, ISerializableData> unserializedElements = new HashMap<>();
/**
* Retrieve or generate a new identifier for a constraint
*/
public String getIdentifier(ISerializableData element) {
final String identifier = this.identifiers.getOrDefault(element, "_" + identifiers.size());
this.identifiers.putIfAbsent(element, identifier);
return identifier;
}
/**
* Checks if the given element identifier belongs to an element that was already serialized
*/
public boolean isAlreadySerialized(String identifier) {
return this.serializedElements.containsKey(identifier);
}
/**
* Checks if the given element identifier belongs to a element that was already unserialized
*/
public boolean isAlreadyUnserialized(String identifier) {
return this.unserializedElements.containsKey(identifier);
}
/**
* Register a serialized element to prevent it from being serialized again
*/
public void putSerialized(String identifier, SerialMap serializedElement) {
this.serializedElements.put(identifier, serializedElement);
}
/**
* Retrieve a serialized element
*/
public SerialMap getSerialized(String identifier) {
if (!this.serializedElements.containsKey(identifier)) {
throw new RuntimeException("No serialized element of identifier " + identifier + " available to get");
}
return this.serializedElements.getMap(identifier);
}
/**
* Register an unserialized element to prevent it from being unserialized again
*/
public void putUnserialized(String identifier, ISerializableData element) {
this.unserializedElements.put(identifier, element);
}
/**
* Retrieve an unserialized element
*/
public <T extends ISerializableData> T getUnserialized(String identifier, Class<T> target) {
if (!this.unserializedElements.containsKey(identifier)) {
throw new RuntimeException("No unserialized element of identifier " + identifier + " available to get");
}
var element = this.unserializedElements.get(identifier);
if (target.isInstance(element)) {
return (T) element;
}
throw new RuntimeException("Failed to get unserialized element from KeyStorage. Expected instance of " +
target.getName() + " but found " + element.getClass().getName());
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("serializedElements", this.serializedElements);
return serialized;
}
public static KeyStorage fromSerial(SerialMap data, UnifyContext context) {
var serializedConstraintsData = data.getMap("serializedElements");
var constraintContext = new KeyStorage();
for (var entry : serializedConstraintsData.entrySet()) {
if (entry.getValue() instanceof SerialMap valueMap) {
constraintContext.putSerialized(entry.getKey(), valueMap);
}
}
return constraintContext;
}
}

View File

@@ -0,0 +1,31 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
/**
* Use the following classes for an intermediate serialized tree structure
*/
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY,
property = "type"
)
@JsonSubTypes({
@JsonSubTypes.Type(value = SerialMap.class, name = "m"),
@JsonSubTypes.Type(value = SerialList.class, name = "l"),
@JsonSubTypes.Type(value = SerialValue.class, name = "v"),
@JsonSubTypes.Type(value = SerialUUID.class, name = "u")
})
public interface ISerialNode {
default <T extends ISerialNode> T assertType(Class<T> type) {
if (type.isInstance(this)) {
return (T) this;
}
throw new RuntimeException("Expected ISerialNode to be of type " + type.getName()
+ " but found " + this.getClass().getName() + " instead");
}
}

View File

@@ -0,0 +1,74 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.function.Function;
import java.util.stream.Stream;
public class SerialList<I extends ISerialNode> extends ArrayList<I> implements ISerialNode {
public SerialList() {}
public SerialList(Collection<I> data) {
this.addAll(data);
}
public SerialList(Stream<I> data) {
this(data.toList());
}
public SerialList(I[] data) {
this(Arrays.stream(data).toList());
}
@SafeVarargs
@JsonIgnore
public static <A extends ISerialNode> ArrayList<A> from(A ...values) {
ArrayList<A> list = new SerialList<>();
Collections.addAll(list, values);
return list;
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(Stream<A> data, Function<A,B> mapper) {
return new SerialList<>(data.map(mapper).toList());
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(Collection<A> data, Function<A,B> mapper) {
return SerialList.fromMapped(data.stream(), mapper);
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(A[] data, Function<A,B> mapper) {
return SerialList.fromMapped(Arrays.stream(data), mapper);
}
@JsonIgnore
public SerialList<SerialMap> assertListOfMaps() {
if (this.isEmpty() || this.get(0) instanceof SerialMap) {
return (SerialList<SerialMap>) this;
}
throw new RuntimeException("Required List to contain SerialMap elements but condition failed");
}
@JsonIgnore
public SerialList<SerialList<?>> assertListOfLists() {
if (this.isEmpty() || this.get(0) instanceof SerialList) {
return (SerialList<SerialList<?>>) this;
}
throw new RuntimeException("Required List to contain SerialList elements but condition failed");
}
@JsonIgnore
public SerialList<SerialValue<?>> assertListOfValues() {
if (this.isEmpty() || this.get(0) instanceof SerialValue) {
return (SerialList<SerialValue<?>>) this;
}
throw new RuntimeException("Required List to contain SerialValue elements but condition failed");
}
@JsonIgnore
public SerialList<SerialUUID> assertListOfUUIDs() {
if (this.isEmpty() || this.get(0) instanceof SerialUUID) {
return (SerialList<SerialUUID>) this;
}
throw new RuntimeException("Required List to contain SerialUUID elements but condition failed");
}
}

View File

@@ -0,0 +1,84 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nullable;
public class SerialMap extends HashMap<String, ISerialNode> implements ISerialNode {
public SerialMap() {
super();
}
public SerialMap(Map<String, ISerialNode> data) {
super(data);
}
@JsonIgnore
public void put(String key, Boolean value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
public void put(String key, String value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
public void put(String key, Number value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
private <T> T get(String key, Class<T> expectedType) {
if (!this.containsKey(key)) {
throw new RuntimeException("Missing required value " + key + " in ObjectMap");
}
var element = this.get(key);
if (element != null && element.getClass() != expectedType) {
throw new RuntimeException(
"Required value " + key + " to be of type " + expectedType.getName() + " but found " + element.getClass().getName()
);
}
return (T)element;
}
@JsonIgnore
public SerialList<?> getList(String key) {
return this.get(key, SerialList.class);
}
@Nullable
@JsonIgnore
public SerialList<?> getListOrNull(String key) {
return this.containsKey(key) ? this.getList(key) : null;
}
@JsonIgnore
public SerialMap getMap(String key) {
return this.get(key, SerialMap.class);
}
@Nullable
@JsonIgnore
public SerialMap getMapOrNull(String key) {
return this.containsKey(key) ? this.getMap(key) : null;
}
@JsonIgnore
public SerialValue<?> getValue(String key) {
return this.get(key, SerialValue.class);
}
@JsonIgnore
public SerialUUID getUUID(String key) {
return this.get(key, SerialUUID.class);
}
@Nullable
@JsonIgnore
public SerialUUID getUUIDOrNull(String key) {
return this.containsKey(key) ? this.getUUID(key) : null;
}
}

View File

@@ -0,0 +1,13 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
public class SerialUUID implements ISerialNode {
public String uuid;
public SerialUUID() {}
public SerialUUID(String uuid) {
this.uuid = uuid;
}
}

View File

@@ -0,0 +1,28 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
public class SerialValue<T> implements ISerialNode {
public T value;
public static final SerialValue<Object> NULL = new SerialValue<>(null);
public SerialValue() {}
public SerialValue(T value) {
this.value = value;
}
@JsonIgnore
public <A> SerialValue<A> assertValueOf(Class<A> targetClass) {
if (this.value == null || targetClass.isInstance(this.value)) {
return (SerialValue<A>) this;
}
throw new RuntimeException("Required Value to contain " + targetClass.getName() + " value but condition failed on" +
" type " + this.value.getClass().getName());
}
@JsonIgnore
public <A> A getOf(Class<A> targetClass) {
return this.assertValueOf(targetClass).value;
}
}

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import java.io.Writer;
import java.lang.reflect.Modifier;
import java.util.*;
@@ -31,9 +32,13 @@ import org.antlr.v4.runtime.Token;
public class UnifyTypeFactory {
private static ArrayList<PlaceholderType> PLACEHOLDERS = new ArrayList<>();
public static FiniteClosure generateFC(List<ClassOrInterface> fromClasses, Writer logFile, ClassLoader classLoader, JavaTXCompiler compiler) throws ClassNotFoundException {
public static FiniteClosure generateFC(
List<ClassOrInterface> fromClasses,
Writer logFile,
ClassLoader classLoader,
JavaTXCompiler compiler,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
/*
Die transitive Hülle muss funktionieren.
Man darf schreiben List<A> extends AL<A>
@@ -44,7 +49,7 @@ public class UnifyTypeFactory {
Generell dürfen sie immer die gleichen Namen haben.
TODO: die transitive Hülle bilden
*/
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader), logFile, compiler);
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader, placeholderRegistry), logFile, compiler, placeholderRegistry);
}
public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr, SourceLoc location){
@@ -67,23 +72,23 @@ public class UnifyTypeFactory {
* Convert from
* ASTType -> UnifyType
*/
public static UnifyType convert(JavaTXCompiler compiler, RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
if (t instanceof GenericRefType){
return UnifyTypeFactory.convert(compiler, (GenericRefType)t, innerType);
return UnifyTypeFactory.convert(compiler, (GenericRefType)t, innerType, placeholderRegistry);
} else if (t instanceof TypePlaceholder){
return UnifyTypeFactory.convert(compiler, (TypePlaceholder)t, innerType);
return UnifyTypeFactory.convert(compiler, (TypePlaceholder)t, innerType, placeholderRegistry);
} else if (t instanceof ExtendsWildcardType){
return UnifyTypeFactory.convert(compiler, (ExtendsWildcardType)t, innerType);
return UnifyTypeFactory.convert(compiler, (ExtendsWildcardType)t, innerType, placeholderRegistry);
} else if (t instanceof SuperWildcardType) {
return UnifyTypeFactory.convert(compiler, (SuperWildcardType) t, innerType);
return UnifyTypeFactory.convert(compiler, (SuperWildcardType) t, innerType, placeholderRegistry);
} else if (t instanceof RefType){
return UnifyTypeFactory.convert(compiler, (RefType)t, innerType);
return UnifyTypeFactory.convert(compiler, (RefType)t, innerType, placeholderRegistry);
}
//Es wurde versucht ein Typ umzuwandeln, welcher noch nicht von der Factory abgedeckt ist
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
}
public static UnifyType convert(JavaTXCompiler compiler, RefType t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, RefType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
//Check if it is a FunN Type:
Pattern p = Pattern.compile("Fun(\\d+)[$][$]");
Matcher m = p.matcher(t.getName().toString());
@@ -91,76 +96,76 @@ public class UnifyTypeFactory {
if(b){
Integer N = Integer.valueOf(m.group(1));
if((N + 1) == t.getParaList().size()){
return convertFunN(compiler, t.getParaList(), false);
return convertFunN(compiler, t.getParaList(), false, placeholderRegistry);
}
}
UnifyType ret;
List<UnifyType> params = new ArrayList<>();
if (t.getParaList() != null) {
for (RefTypeOrTPHOrWildcardOrGeneric pT : t.getParaList()) {
params.add(UnifyTypeFactory.convert(compiler, pT, true));
params.add(UnifyTypeFactory.convert(compiler, pT, true, placeholderRegistry));
}
}
var clazz = compiler.getClass(t.getName());
if (clazz != null && clazz.isInterface() && clazz.isFunctionalInterface()) {
var method = clazz.getMethods().stream().filter(x -> Modifier.isAbstract(x.modifier)).findFirst().orElseThrow();
var methodParams = method.getParameterList().getFormalparalist().stream().map(x -> convert(compiler, x.getType(), true)).toList();
var methodParams = method.getParameterList().getFormalparalist().stream().map(x -> convert(compiler, x.getType(), true, placeholderRegistry)).toList();
var generics = StreamSupport.stream(clazz.getGenerics().spliterator(), false).map(GenericTypeVar::getName).toList();
return new FunInterfaceType(t.getName().toString(), new TypeParams(params), methodParams, convert(compiler, method.getReturnType(), true), generics);
return new FunInterfaceType(t.getName().toString(), new TypeParams(params), methodParams, convert(compiler, method.getReturnType(), true, placeholderRegistry), generics);
}
return new ReferenceType(t.getName().toString(),new TypeParams(params));
}
public static UnifyType convertFunN(JavaTXCompiler compiler, List<RefTypeOrTPHOrWildcardOrGeneric> paraList, Boolean innerType){
public static UnifyType convertFunN(JavaTXCompiler compiler, List<RefTypeOrTPHOrWildcardOrGeneric> paraList, Boolean innerType, PlaceholderRegistry placeholderRegistry){
UnifyType ret;
List<UnifyType> params = new ArrayList<>();
if(paraList != null && paraList.size() > 0){
for(RefTypeOrTPHOrWildcardOrGeneric pT : paraList){
params.add(UnifyTypeFactory.convert(compiler, pT, false));
params.add(UnifyTypeFactory.convert(compiler, pT, false, placeholderRegistry));
}
}
ret = FunNType.getFunNType(new TypeParams(params));
return ret;
}
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType, PlaceholderRegistry placeholderRegistry) {
if (tph.getName().equals("AFR")) {
System.out.println("XXX"+innerType);
}
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance());
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance(), placeholderRegistry);
ntph.setVariance(tph.getVariance());
ntph.setOrCons(tph.getOrCons());
ntph.setWildcardtable(tph.getWildcardtable());
int in = PLACEHOLDERS.indexOf(ntph);
int in = placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.indexOf(ntph);
if (in == -1) {
PLACEHOLDERS.add(ntph);
placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.add(ntph);
ntph.setInnerType(innerType);
return ntph;
}
else {
PlaceholderType oldpht = PLACEHOLDERS.get(in);
PlaceholderType oldpht = placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.get(in);
oldpht.setInnerType(oldpht.isInnerType() || innerType);
return oldpht;
}
}
public static UnifyType convert(JavaTXCompiler compiler, GenericRefType t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, GenericRefType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
return new ReferenceType(t.getParsedName(), true);
}
public static UnifyType convert(JavaTXCompiler compiler, WildcardType t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, WildcardType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
if(t.isExtends())
return new ExtendsType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false));
return new ExtendsType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false, placeholderRegistry));
else if(t.isSuper())
return new SuperType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false));
return new SuperType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false, placeholderRegistry));
else throw new NotImplementedException();
}
public static ConstraintSet<UnifyPair> convert(JavaTXCompiler compiler, ConstraintSet<Pair> constraints) {
return constraints.map(c -> UnifyTypeFactory.convert(compiler, c));
public static ConstraintSet<UnifyPair> convert(JavaTXCompiler compiler, ConstraintSet<Pair> constraints, PlaceholderRegistry placeholderRegistry) {
return constraints.map(c -> UnifyTypeFactory.convert(compiler, c, placeholderRegistry));
}
//NEVER USED
@@ -171,30 +176,30 @@ public class UnifyTypeFactory {
// return unifyPairConstraint;
//}
public static UnifyPair convert(JavaTXCompiler compiler, Pair p) {
public static UnifyPair convert(JavaTXCompiler compiler, Pair p, PlaceholderRegistry placeholderRegistry) {
UnifyPair ret = null;
if(p.GetOperator().equals(PairOperator.SMALLERDOT)) {
ret = generateSmallerDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateSmallerDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLERNEQDOT)) {
ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret;
}else if(p.GetOperator().equals(PairOperator.EQUALSDOT)) {
ret = generateEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLER)){
ret = generateSmallerPair(UnifyTypeFactory.convert(compiler, p.TA1, false),
UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateSmallerPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry),
UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
}else throw new NotImplementedException();
UnifyType lhs, rhs;
if (((lhs = ret.getLhsType()) instanceof PlaceholderType)
&& ((PlaceholderType)lhs).isWildcardable()
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) {
if (lhs.getName().equals("AQ")) {
System.out.println("");
// System.out.println("");
}
((PlaceholderType)rhs).enableWildcardtable();
}
@@ -203,7 +208,7 @@ public class UnifyTypeFactory {
&& ((PlaceholderType)rhs).isWildcardable()
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) {
if (rhs.getName().equals("AQ")) {
System.out.println("");
// System.out.println("");
}
((PlaceholderType)lhs).enableWildcardtable();
}
@@ -214,16 +219,16 @@ public class UnifyTypeFactory {
* Convert from
* UnifyType -> ASTType
*/
public static Set<ResultPair> convert(Set<UnifyPair> unifyPairSet, Map<String,TypePlaceholder> tphs) {
public static Set<ResultPair> convert(Set<UnifyPair> unifyPairSet, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
return unifyPairSet.stream().map(
unifyPair -> convert(unifyPair, tphs))
unifyPair -> convert(unifyPair, tphs, placeholderRegistry))
.collect(Collectors.toSet());
}
public static ResultPair convert(UnifyPair mp, Map<String,TypePlaceholder> tphs) {
public static ResultPair convert(UnifyPair mp, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if (mp == null) { return null;} //kann bei basePairs passieren
RefTypeOrTPHOrWildcardOrGeneric tl = UnifyTypeFactory.convert(mp.getLhsType(), tphs);
RefTypeOrTPHOrWildcardOrGeneric tr = UnifyTypeFactory.convert(mp.getRhsType(), tphs);
RefTypeOrTPHOrWildcardOrGeneric tl = UnifyTypeFactory.convert(mp.getLhsType(), tphs, placeholderRegistry);
RefTypeOrTPHOrWildcardOrGeneric tr = UnifyTypeFactory.convert(mp.getRhsType(), tphs, placeholderRegistry);
if(tl instanceof TypePlaceholder){
if(tr instanceof TypePlaceholder) {
@@ -232,7 +237,7 @@ public class UnifyTypeFactory {
//Einfach ignorieren TODO: Das hier muss ausgebessert werden:
//return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, ASTFactory.createObjectType());
}else{
return new PairTPHsmallerTPH((TypePlaceholder)tl, (TypePlaceholder)tr, convert(mp.getBasePair(), tphs));
return new PairTPHsmallerTPH((TypePlaceholder)tl, (TypePlaceholder)tr, convert(mp.getBasePair(), tphs, placeholderRegistry));
}
}else if(tr instanceof RefType){
return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, (RefType) tr);
@@ -244,29 +249,29 @@ public class UnifyTypeFactory {
}else return new PairNoResult(tl, tr);//throw new NotImplementedException();
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(ReferenceType t, Map<String,TypePlaceholder> tphs) {
public static RefTypeOrTPHOrWildcardOrGeneric convert(ReferenceType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if(JavaClassName.Void.equals(t.getName()))return new Void(new NullToken());
if (t.isGenTypeVar()) return new GenericRefType(t.getName(),new NullToken());
RefType ret = new RefType(new JavaClassName(t.getName()),convert(t.getTypeParams(), tphs),new NullToken());
RefType ret = new RefType(new JavaClassName(t.getName()),convert(t.getTypeParams(), tphs, placeholderRegistry),new NullToken());
return ret;
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(FunNType t, Map<String,TypePlaceholder> tphs) {
RefType ret = new RefType(new JavaClassName(t.getName()), convert(t.getTypeParams(), tphs), new NullToken());
public static RefTypeOrTPHOrWildcardOrGeneric convert(FunNType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefType ret = new RefType(new JavaClassName(t.getName()), convert(t.getTypeParams(), tphs, placeholderRegistry), new NullToken());
return ret;
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(SuperType t, Map<String,TypePlaceholder> tphs) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getSuperedType(), tphs);
public static RefTypeOrTPHOrWildcardOrGeneric convert(SuperType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getSuperedType(), tphs, placeholderRegistry);
return new SuperWildcardType(innerType, new NullToken());
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(ExtendsType t, Map<String,TypePlaceholder> tphs) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getExtendedType(), tphs);
public static RefTypeOrTPHOrWildcardOrGeneric convert(ExtendsType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getExtendedType(), tphs, placeholderRegistry);
return new ExtendsWildcardType(innerType, new NullToken());
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs) {
public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
TypePlaceholder ret = tphs.get(t.getName());
if(ret == null){ //Dieser TPH wurde vom Unifikationsalgorithmus erstellt
ret = TypePlaceholder.fresh(new NullToken());
@@ -276,19 +281,19 @@ public class UnifyTypeFactory {
return ret;
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(UnifyType t, Map<String,TypePlaceholder> tphs) {
if(t instanceof FunNType)return convert((FunNType) t, tphs);
if(t instanceof ReferenceType)return convert((ReferenceType) t, tphs);
if(t instanceof SuperType)return convert((SuperType) t, tphs);
if(t instanceof ExtendsType)return convert((ExtendsType) t, tphs);
if(t instanceof PlaceholderType)return convert((PlaceholderType) t, tphs);
public static RefTypeOrTPHOrWildcardOrGeneric convert(UnifyType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if(t instanceof FunNType)return convert((FunNType) t, tphs, placeholderRegistry);
if(t instanceof ReferenceType)return convert((ReferenceType) t, tphs, placeholderRegistry);
if(t instanceof SuperType)return convert((SuperType) t, tphs, placeholderRegistry);
if(t instanceof ExtendsType)return convert((ExtendsType) t, tphs, placeholderRegistry);
if(t instanceof PlaceholderType)return convert((PlaceholderType) t, tphs, placeholderRegistry);
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
}
private static List<RefTypeOrTPHOrWildcardOrGeneric> convert(TypeParams typeParams, Map<String,TypePlaceholder> tphs) {
private static List<RefTypeOrTPHOrWildcardOrGeneric> convert(TypeParams typeParams, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
List<RefTypeOrTPHOrWildcardOrGeneric> ret = new ArrayList<>();
for(UnifyType uT : typeParams){
RefTypeOrTPHOrWildcardOrGeneric toAdd = convert(uT, tphs);
RefTypeOrTPHOrWildcardOrGeneric toAdd = convert(uT, tphs, placeholderRegistry);
ret.add(toAdd);
}
return ret;

View File

@@ -1,8 +1,13 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.Objects;
@@ -15,7 +20,7 @@ import java.util.Objects;
*
*/
public class ExtendsWildcardType extends WildcardType{
public class ExtendsWildcardType extends WildcardType implements ISerializableData {
/**
* Author: Arne Lüdtke<br/>
@@ -68,4 +73,22 @@ public class ExtendsWildcardType extends WildcardType{
ExtendsWildcardType that = (ExtendsWildcardType) o;
return that.innerType.equals(this.innerType);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("innerType", this.innerType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static ExtendsWildcardType fromSerial(SerialMap data, UnifyContext context) {
return new ExtendsWildcardType(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(data.getMap("innerType"), context),
new NullToken()
);
}
}

View File

@@ -1,57 +1,77 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.Objects;
public class GenericRefType extends RefTypeOrTPHOrWildcardOrGeneric
{
private String name;
public class GenericRefType extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData {
private String name;
public GenericRefType(String name, Token offset)
{
super(offset);
this.name = name;
}
public GenericRefType(String name, Token offset) {
super(offset);
this.name = name;
}
public String getParsedName(){
return name.toString();
}
public String getParsedName() {
return name.toString();
}
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GenericRefType that = (GenericRefType) o;
return name.equals(that.name);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GenericRefType that = (GenericRefType) o;
return name.equals(that.name);
}
@Override
public int hashCode() {
return Objects.hash(name);
}
@Override
public int hashCode() {
return Objects.hash(name);
}
@Override
public String toString()
{
return "GTV " + this.name;
}
@Override
public String toString() {
return "GTV " + this.name;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.name);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static GenericRefType fromSerial(SerialMap data, UnifyContext context) {
return new GenericRefType(
data.getValue("name").getOf(String.class),
new NullToken()
);
}
}

View File

@@ -1,8 +1,15 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.ArrayList;
@@ -11,122 +18,137 @@ import java.util.List;
import java.util.Objects;
public class RefType extends RefTypeOrTPHOrWildcardOrGeneric
{
protected final JavaClassName name;
protected final List<RefTypeOrTPHOrWildcardOrGeneric> parameter;
/**
* Ist primitiveFlag auf true, muss beim Codegen dieser Reftype durch
* den primitiven Datentyp ersetzt werden
*
* Bsp: java.lang.Integer mit Flag wird dann zu [int]
*/
boolean primitiveFlag = false; // TODO Should be final
public class RefType extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData {
protected final JavaClassName name;
protected final List<RefTypeOrTPHOrWildcardOrGeneric> parameter;
/**
* Ist primitiveFlag auf true, muss beim Codegen dieser Reftype durch
* den primitiven Datentyp ersetzt werden
* <p>
* Bsp: java.lang.Integer mit Flag wird dann zu [int]
*/
boolean primitiveFlag = false; // TODO Should be final
public RefType(JavaClassName fullyQualifiedName, Token offset)
{
this(fullyQualifiedName, new ArrayList<>(), offset);
public RefType(JavaClassName fullyQualifiedName, Token offset) {
this(fullyQualifiedName, new ArrayList<>(), offset);
}
public boolean isPrimitive() {
return primitiveFlag;
}
@Override
public String toString() {
String params = "";
if (parameter.size() > 0) {
params += "<";
Iterator<RefTypeOrTPHOrWildcardOrGeneric> it = parameter.iterator();
while (it.hasNext()) {
RefTypeOrTPHOrWildcardOrGeneric param = it.next();
params += param.toString();
if (it.hasNext()) params += ", ";
}
params += ">";
}
return this.name.toString() + params;
}
@Override
public int hashCode() {
return this.name.hashCode();//Nur den Name hashen. Sorgt für langsame, aber funktionierende HashMaps
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset) {
this(fullyQualifiedName, parameter, offset, false);
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset, boolean primitiveFlag) {
super(offset);
this.name = (fullyQualifiedName);
this.parameter = parameter;
this.primitiveFlag = primitiveFlag;
}
public JavaClassName getName() {
return name;
}
public List<RefTypeOrTPHOrWildcardOrGeneric> getParaList() {
if (this.parameter == null) return new ArrayList<>();
return this.parameter;
}
/**
* Author: Jrg Buerle<br/>
*
* @return
*/
public boolean equals(Object obj) {
if (!(obj instanceof RefType refObj)) {
return false;
}
public boolean isPrimitive() {
return primitiveFlag;
}
if (!Objects.equals(this.name, refObj.name)) return false;
boolean ret = true;
@Override
public String toString(){
String params = "";
if(parameter.size()>0){
params += "<";
Iterator<RefTypeOrTPHOrWildcardOrGeneric> it = parameter.iterator();
while(it.hasNext()){
RefTypeOrTPHOrWildcardOrGeneric param = it.next();
params += param.toString();
if(it.hasNext())params += ", ";
}
params += ">";
//if(!(super.equals(obj))) PL 2020-03-12 muss vll. einkommentiert werden
// return false;
if (parameter == null || parameter.size() == 0) {
ret &= (refObj.getParaList() == null || refObj.getParaList().isEmpty());
} else {
if (refObj.getParaList() == null) {
ret = false;
} else if (parameter.size() != refObj.getParaList().size()) {
ret = false;
} else {
for (int i = 0; i < parameter.size(); i++) {
ret &= parameter.get(i).equals(refObj.getParaList().get(i));
}
return this.name.toString() + params;
}
}
return ret;
@Override
public int hashCode() {
return this.name.hashCode();//Nur den Name hashen. Sorgt für langsame, aber funktionierende HashMaps
}
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset) {
this(fullyQualifiedName, parameter, offset, false);
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset, boolean primitiveFlag) {
super(offset);
this.name = (fullyQualifiedName);
this.parameter = parameter;
this.primitiveFlag = primitiveFlag;
}
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
public JavaClassName getName()
{
return name;
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
public List<RefTypeOrTPHOrWildcardOrGeneric> getParaList(){
if(this.parameter==null)return new ArrayList<>();
return this.parameter;
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
/**
* Author: Jrg Buerle<br/>
* @return
*/
public boolean equals(Object obj)
{
if(obj instanceof RefType){
if (!Objects.equals(this.name, ((RefType) obj).name)) return false;
boolean ret = true;
//if(!(super.equals(obj))) PL 2020-03-12 muss vll. einkommentiert werden
// return false;
if(parameter==null || parameter.size()==0){
ret &= (((RefType)obj).getParaList()==null || ((RefType)obj).getParaList().size()==0);
}
else{
if(((RefType)obj).getParaList()==null){
ret = false;
}
else if(parameter.size() != ((RefType)obj).getParaList().size())
{
ret = false;
}
else
{
for(int i = 0; i<parameter.size(); i++)
{
ret &= parameter.get(i).equals(((RefType)obj).getParaList().get(i));
}
}
}
return ret;
}
else{
return false;
}
}
@Override
public ISerialNode toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("isPrimitive", this.primitiveFlag);
serialized.put("name", this.name.toString());
serialized.put("parameters", SerialList.fromMapped(this.parameter, param -> param.toSerial(keyStorage)));
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
public static RefType fromSerial(SerialMap data, UnifyContext context) {
return new RefType(
new JavaClassName(data.getValue("name").getOf(String.class)),
data.getList("parameters").assertListOfMaps().stream()
.map(param -> RefTypeOrTPHOrWildcardOrGeneric.fromSerial(param, context))
.toList(),
new NullToken(),
data.getValue("isPrimitive").getOf(Boolean.class)
);
}
}

View File

@@ -1,11 +1,17 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode{
public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode implements ISerializableData {
public RefTypeOrTPHOrWildcardOrGeneric(Token offset) {
super(offset);
}
@@ -18,5 +24,26 @@ public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode{
@Override
public abstract boolean equals(Object o);
@Override
public ISerialNode toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("type", this.getClass().getSimpleName());
// we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL);
return serialized;
}
public static RefTypeOrTPHOrWildcardOrGeneric fromSerial(SerialMap data, UnifyContext context) {
String type = data.getValue("type").getOf(String.class);
SerialMap object = data.getMap("object");
if (type.equals(ExtendsWildcardType.class.getSimpleName())) return ExtendsWildcardType.fromSerial(object, context);
else if (type.equals(GenericRefType.class.getSimpleName())) return GenericRefType.fromSerial(object, context);
else if (type.equals(SuperWildcardType.class.getSimpleName())) return SuperWildcardType.fromSerial(object, context);
else if (type.equals(RefType.class.getSimpleName())) return RefType.fromSerial(object, context);
else if (type.equals(Void.class.getSimpleName())) return Void.fromSerial(object, context);
else if (type.equals(TypePlaceholder.class.getSimpleName())) return TypePlaceholder.fromSerial(object, context);
else throw new RuntimeException("Could not unserialize class of unhandled type " + type);
}
}

View File

@@ -1,9 +1,13 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.Objects;
@@ -16,7 +20,7 @@ import java.util.Objects;
*
*/
public class SuperWildcardType extends WildcardType{
public class SuperWildcardType extends WildcardType implements ISerializableData {
/**
* Author: Arne Lüdtke<br/>
@@ -80,4 +84,22 @@ public class SuperWildcardType extends WildcardType{
SuperWildcardType that = (SuperWildcardType) o;
return that.innerType.equals(this.innerType);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("innerType", this.innerType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static SuperWildcardType fromSerial(SerialMap data, UnifyContext context) {
return new SuperWildcardType(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(data.getMap("innerType"), context),
new NullToken()
);
}
}

View File

@@ -1,9 +1,11 @@
package de.dhbwstuttgart.syntaxtree.type;
import java.util.Hashtable;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import org.antlr.v4.runtime.Token;
@@ -16,7 +18,7 @@ import org.antlr.v4.runtime.Token;
* @author J�rg B�uerle
* @version $Date: 2013/06/19 12:45:37 $
*/
public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData
{
private final String name;
@@ -139,4 +141,26 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
public Boolean getWildcardtable() {
return wildcardable;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.name);
serialized.put("variance", this.variance);
serialized.put("wildcardable", this.wildcardable);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static TypePlaceholder fromSerial(SerialMap data, UnifyContext context) {
return new TypePlaceholder(
data.getValue("name").getOf(String.class),
new NullToken(),
data.getValue("variance").getOf(Integer.class),
data.getValue("wildcardable").getOf(Boolean.class)
);
}
}

View File

@@ -1,14 +1,32 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import de.dhbwstuttgart.parser.scope.JavaClassName;
public class Void extends RefType
public class Void extends RefType implements ISerializableData
{
public Void(Token offset) {
super(JavaClassName.Void, offset);
}
@Override
public ISerialNode toSerial(KeyStorage keyStorage) {
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", new SerialMap());
return serializedWrapper;
}
public static Void fromSerial(SerialMap data, UnifyContext context) {
return new Void(new NullToken());
}
}

View File

@@ -1,77 +1,148 @@
package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.Collection;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
public class Constraint<A> extends HashSet<A> {
private static final long serialVersionUID = 1L;
private Boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
private Boolean isImplemented = false;
/*
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
* auszuwaehlen
*/
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
private Constraint<A> extendConstraint = null;
public Constraint() {
super();
}
public Constraint(Boolean isInherited, Boolean isImplemented) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
}
public Constraint(Boolean isInherited, Boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
this.extendConstraint = extendConstraint;
this.methodSignatureConstraint = methodSignatureConstraint;
}
public void setIsInherited(Boolean isInherited) {
this.isInherited = isInherited;
}
public Boolean isInherited() {
return isInherited;
}
public Boolean isImplemented() {
return isImplemented;
}
public Constraint<A> getExtendConstraint() {
return extendConstraint;
}
public void setExtendConstraint(Constraint<A> c) {
extendConstraint = c;
}
public Set<A> getmethodSignatureConstraint() {
return methodSignatureConstraint;
}
public void setmethodSignatureConstraint(Set<A> c) {
methodSignatureConstraint = c;
}
public class Constraint<A extends IConstraintElement> extends HashSet<A> implements ISerializableData {
private static final long serialVersionUID = 1L;
private Boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
private Boolean isImplemented = false;
/*
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
* auszuwaehlen
*/
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
private Constraint<A> extendConstraint = null;
public Constraint() {
super();
}
public Constraint(Boolean isInherited, Boolean isImplemented) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
}
public Constraint(Boolean isInherited, Boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
this.extendConstraint = extendConstraint;
this.methodSignatureConstraint = methodSignatureConstraint;
}
public void setIsInherited(Boolean isInherited) {
this.isInherited = isInherited;
}
public Boolean isInherited() {
return isInherited;
}
public Boolean isImplemented() {
return isImplemented;
}
public Constraint<A> getExtendConstraint() {
return extendConstraint;
}
public void setExtendConstraint(Constraint<A> c) {
extendConstraint = c;
}
public Set<A> getmethodSignatureConstraint() {
return methodSignatureConstraint;
}
public void setmethodSignatureConstraint(Set<A> c) {
methodSignatureConstraint = c;
}
public String toString() {
return super.toString() + "\nisInherited = " + isInherited + " isOveridden = " + isImplemented
+ methodSignatureConstraint
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
+ "\n";
}
public String toStringBase() {
return super.toString();
}
@Override
public SerialUUID toSerial(KeyStorage keyStorage) {
final String uuid = keyStorage.getIdentifier(this);
if (!keyStorage.isAlreadySerialized(uuid)) {
SerialMap serialized = new SerialMap();
keyStorage.putSerialized(uuid, serialized);
serialized.put("isInherited", isInherited);
serialized.put("extendedConstraint", extendConstraint == null ? null : extendConstraint.toSerial(keyStorage));
Function<A, ISerialNode> pairMapper = pair -> {
if (pair instanceof Pair simplePair) return simplePair.toSerial(keyStorage);
if (pair instanceof UnifyPair unifyPair) return unifyPair.toSerial(keyStorage);
throw new RuntimeException("No serialization is supported for type " + pair.getClass().getName());
};
serialized.put("methodSignatureConstraint", SerialList.fromMapped(methodSignatureConstraint, pairMapper));
serialized.put("setElements", SerialList.fromMapped(this, pairMapper));
}
// return only the unique key
return new SerialUUID(uuid);
}
public static <T extends IConstraintElement> Constraint<T> fromSerial(SerialUUID serialUUID, UnifyContext context, Class<T> target, KeyStorage keyStorage) {
String uuid = serialUUID.uuid;
if (!keyStorage.isAlreadyUnserialized(uuid)) {
Constraint<T> constraint = new Constraint<>();
// immediately add the object to the context to prevent infinite recursion
keyStorage.putUnserialized(uuid, constraint);
// retrieve the serialized data und start unserializing it
SerialMap data = keyStorage.getSerialized(uuid);
constraint.isInherited = data.getValue("isInherited").getOf(Boolean.class);
constraint.extendConstraint = Optional.ofNullable(data.getUUIDOrNull("extendedConstraint"))
.map(v -> Constraint.fromSerial(v, context, target, keyStorage))
.orElse(null);
// to convert the maps back to elements, we sadly have to do some assumptions about the generic types...
Function<ISerialNode, T> pairUnmapper = pairData -> {
if (target == Pair.class && pairData instanceof SerialMap pairMap) {
return (T) Pair.fromSerial(pairMap, context);
}
if (target == UnifyPair.class && pairData instanceof SerialUUID pairUUID) {
return (T) UnifyPair.fromSerial(pairUUID, context, keyStorage);
}
throw new RuntimeException("No serialization is supported for target type " + target.getName());
};
constraint.methodSignatureConstraint = data.getList("methodSignatureConstraint")
.stream().map(pairUnmapper).collect(Collectors.toSet());
constraint.addAll(
data.getList("setElements")
.stream().map(pairUnmapper).collect(Collectors.toSet()));
}
return keyStorage.getUnserialized(uuid, Constraint.class);
}
public String toString() {
return super.toString() + "\nisInherited = " + isInherited + " isOveridden = " + isImplemented
+ methodSignatureConstraint
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
+ "\n" ;
}
public String toStringBase() {
return super.toString();
}
}

View File

@@ -1,63 +1,67 @@
package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import java.util.*;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
public class ConstraintSet<A> {
Constraint<A> undConstraints = new Constraint<>();
List<Set<Constraint<A>>> oderConstraints = new ArrayList<>();
public class ConstraintSet<A extends IConstraintElement> implements ISerializableData {
Constraint<A> undConstraints = new Constraint<>();
List<Set<Constraint<A>>> oderConstraints = new ArrayList<>();
public void addUndConstraint(A p){
undConstraints.add(p);
}
public void addUndConstraint(A p) {
undConstraints.add(p);
}
public void addOderConstraint(Set<Constraint<A>> methodConstraints) {
oderConstraints.add(methodConstraints);
}
public void addOderConstraint(Set<Constraint<A>> methodConstraints) {
oderConstraints.add(methodConstraints);
}
public void addAllUndConstraint(Constraint<A> allUndConstraints){
undConstraints.addAll(allUndConstraints);
}
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints){
this.oderConstraints.addAll(allOderConstraints);
}
public void addAll(ConstraintSet constraints) {
this.addAllUndConstraint(constraints.undConstraints);
this.addAllOderConstraint(constraints.oderConstraints);
}
public void addAllUndConstraint(Constraint<A> allUndConstraints) {
undConstraints.addAll(allUndConstraints);
}
@Override
public String toString(){
BinaryOperator<String> b = (x,y) -> x+y;
return "\nUND:" + this.undConstraints.toString() + "\n" +
"ODER:" + this.oderConstraints.stream().reduce("", (x,y) -> x.toString()+ "\n" +y, b);
//cartesianProduct().toString();
}
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints) {
this.oderConstraints.addAll(allOderConstraints);
}
public Set<List<Constraint<A>>> cartesianProduct(){
Set<Constraint<A>> toAdd = new HashSet<>();
toAdd.add(undConstraints);
List<Set<Constraint<A>>> allConstraints = new ArrayList<>();
allConstraints.add(toAdd);
allConstraints.addAll(oderConstraints);
return new GuavaSetOperations().cartesianProduct(allConstraints);
}
public void addAll(ConstraintSet constraints) {
this.addAllUndConstraint(constraints.undConstraints);
this.addAllOderConstraint(constraints.oderConstraints);
}
public <B> ConstraintSet<B> map(Function<? super A, ? extends B> o) {
Hashtable<Constraint<A>,Constraint<B>> CSA2CSB = new Hashtable<>();
ConstraintSet<B> ret = new ConstraintSet<>();
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new));
List<Set<Constraint<B>>> newOder = new ArrayList<>();
@Override
public String toString() {
BinaryOperator<String> b = (x, y) -> x + y;
return "\nUND:" + this.undConstraints.toString() + "\n" +
"ODER:" + this.oderConstraints.stream().reduce("", (x, y) -> x.toString() + "\n" + y, b);
//cartesianProduct().toString();
}
public Set<List<Constraint<A>>> cartesianProduct() {
Set<Constraint<A>> toAdd = new HashSet<>();
toAdd.add(undConstraints);
List<Set<Constraint<A>>> allConstraints = new ArrayList<>();
allConstraints.add(toAdd);
allConstraints.addAll(oderConstraints);
return new GuavaSetOperations().cartesianProduct(allConstraints);
}
public <B extends IConstraintElement> ConstraintSet<B> map(Function<? super A, ? extends B> o) {
Hashtable<Constraint<A>, Constraint<B>> CSA2CSB = new Hashtable<>();
ConstraintSet<B> ret = new ConstraintSet<>();
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new));
List<Set<Constraint<B>>> newOder = new ArrayList<>();
/*
for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.forEach(as -> {
@@ -68,25 +72,25 @@ public class ConstraintSet<A> {
CSA2CSB.put(as, newConst);} );
}
*/
for(Set<Constraint<A>> oderConstraint : oderConstraints){
newOder.add(
oderConstraint.stream().map((Constraint<A> as) -> {
Constraint<B> newConst = as.stream()
.map(o)
.collect(Collectors.toCollection((
() -> new Constraint<B> (as.isInherited(),
as.isImplemented(),
(as.getExtendConstraint() != null)
? as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new))
: null,
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new))))
));
//CSA2CSB.put(as, newConst);
return newConst;
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
newOder.add(
oderConstraint.stream().map((Constraint<A> as) -> {
Constraint<B> newConst = as.stream()
.map(o)
.collect(Collectors.toCollection((
() -> new Constraint<B>(as.isInherited(),
as.isImplemented(),
(as.getExtendConstraint() != null)
? as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new))
: null,
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new))))
));
//CSA2CSB.put(as, newConst);
return newConst;
/*
Constraint<B> bs = CSA2CSB.get(as);
@@ -95,36 +99,61 @@ public class ConstraintSet<A> {
}
return bs;
*/
}).collect(Collectors.toSet())
);
}
ret.oderConstraints = newOder;
return ret;
}).collect(Collectors.toSet())
);
}
public void forEach (Consumer<? super A> c) {
undConstraints.stream().forEach(c);
for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
as.stream().forEach(c));
}
ret.oderConstraints = newOder;
return ret;
}
public void forEach(Consumer<? super A> c) {
undConstraints.stream().forEach(c);
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
as.stream().forEach(c));
}
public Set<A> getAll () {
Set<A> ret = new HashSet<>();
ret.addAll(undConstraints);
for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.parallelStream().forEach((Constraint<A> as) -> ret.addAll(as));
}
return ret;
}
public List<Set<Constraint<A>>> getOderConstraints() {
return oderConstraints;
}
public Set<A> getUndConstraints() {
return undConstraints;
}
public Set<A> getAll() {
Set<A> ret = new HashSet<>();
ret.addAll(undConstraints);
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
oderConstraint.parallelStream().forEach((Constraint<A> as) -> ret.addAll(as));
}
return ret;
}
public List<Set<Constraint<A>>> getOderConstraints() {
return oderConstraints;
}
public Set<A> getUndConstraints() {
return undConstraints;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("undConstraints", undConstraints.toSerial(keyStorage));
serialized.put("oderConstraints", SerialList.fromMapped(oderConstraints, oderConstraintSet ->
SerialList.fromMapped(oderConstraintSet, oderConstraint ->
oderConstraint.toSerial(keyStorage))
));
return serialized;
}
public static <T extends IConstraintElement> ConstraintSet<T> fromSerial(SerialMap data, UnifyContext context, Class<T> target, KeyStorage keyStorage) {
ConstraintSet<T> constraintSet = new ConstraintSet<>();
constraintSet.undConstraints = Constraint.fromSerial(data.getUUID("undConstraints"), context, target, keyStorage);
constraintSet.oderConstraints = data.getList("oderConstraints").assertListOfLists().stream()
.map(oderConstraintSetData -> oderConstraintSetData.assertListOfUUIDs().stream()
.map(oderConstraintData -> Constraint.fromSerial(oderConstraintData, context, target, keyStorage))
.collect(Collectors.toSet())
).toList();
return constraintSet;
}
}

View File

@@ -0,0 +1,4 @@
package de.dhbwstuttgart.typeinference.constraints;
public interface IConstraintElement {
}

View File

@@ -1,72 +1,70 @@
package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import org.antlr.v4.runtime.Token;
public class Pair implements Serializable
{
public final RefTypeOrTPHOrWildcardOrGeneric TA1;
public final RefTypeOrTPHOrWildcardOrGeneric TA2;
public class Pair implements Serializable, IConstraintElement, ISerializableData {
public final RefTypeOrTPHOrWildcardOrGeneric TA1;
public final RefTypeOrTPHOrWildcardOrGeneric TA2;
private SourceLoc location;
private SourceLoc location;
private PairOperator eOperator = PairOperator.SMALLER;
private Boolean noUnification = false;
private PairOperator eOperator = PairOperator.SMALLER;
private Boolean noUnification = false;
private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2 )
{
this.TA1 = TA1;
this.TA2 = TA2;
if(TA1 == null || TA2 == null)
throw new NullPointerException();
eOperator = PairOperator.SMALLER;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp)
{
// Konstruktor
this(TA1,TA2);
this.eOperator = eOp;
}
private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2) {
this.TA1 = TA1;
this.TA2 = TA2;
if (TA1 == null || TA2 == null)
throw new NullPointerException();
eOperator = PairOperator.SMALLER;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator e0p, SourceLoc location) {
this(TA1, TA2, e0p);
this.location = location;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification)
{
// Konstruktor
this(TA1,TA2);
this.eOperator = eOp;
this.noUnification = noUnification;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp) {
// Konstruktor
this(TA1, TA2);
this.eOperator = eOp;
}
public SourceLoc getLocation() {
return this.location;
}
public String toString()
{
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen
String strElement1 = "NULL";
String strElement2 = "NULL";
String Operator = "<.";
if( TA1 != null )
strElement1 = TA1.toString();
if( TA2 != null )
strElement2 = TA2.toString();
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator e0p, SourceLoc location) {
this(TA1, TA2, e0p);
this.location = location;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification) {
// Konstruktor
this(TA1, TA2);
this.eOperator = eOp;
this.noUnification = noUnification;
}
public SourceLoc getLocation() {
return this.location;
}
public String toString() {
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen
String strElement1 = "NULL";
String strElement2 = "NULL";
String Operator = "<.";
if (TA1 != null)
strElement1 = TA1.toString();
if (TA2 != null)
strElement2 = TA2.toString();
/* PL ausskommentiert 2018-05-24
if(OperatorEqual())
@@ -76,80 +74,104 @@ public class Pair implements Serializable
if(OperatorSmallerExtends())
Operator = "<?";
*/
return "\n(" + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
/*- Equals: " + bEqual*/
}
/**
* <br/>Author: J�rg B�uerle
* @param obj
* @return
*/
public boolean equals(Object obj)
{
boolean ret = true;
ret &= (obj instanceof Pair);
if(!ret)return ret;
ret &= ((Pair)obj).TA1.equals(this.TA1);
ret &= ((Pair)obj).TA2.equals(this.TA2);
return ret;
}
return "\n(" + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Equal ist.
*/
public boolean OperatorEqual()
{
return eOperator == PairOperator.EQUALSDOT;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Smaller ist.
*/
public boolean OperatorSmaller()
{
return eOperator == PairOperator.SMALLER;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ SmallerExtends ist.
*/
public boolean OperatorSmallerExtends()
{
return eOperator == PairOperator.SMALLERDOTWC;
}
/**
* Author: Arne Lüdtke<br/>
* Gibt den Operator zurück.
*/
public PairOperator GetOperator()
{
return eOperator;
}
/*- Equals: " + bEqual*/
}
public boolean OperatorSmallerDot() {
return eOperator == PairOperator.SMALLERDOT;
}
static public Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
HashMap<String, TypePlaceholder> ret = new HashMap<>();
constraints.map((Pair p) -> {
if (p.TA1 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA1).getName(), (TypePlaceholder) p.TA1);
}
if (p.TA2 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA2).getName(), (TypePlaceholder) p.TA2);
}
return null;
});
return ret;
}
/**
* <br/>Author: J�rg B�uerle
*
* @param obj
* @return
*/
public boolean equals(Object obj) {
return (
(obj instanceof Pair pairObj) &&
pairObj.TA1.equals(this.TA1) &&
pairObj.TA2.equals(this.TA2)
);
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Equal ist.
*/
public boolean OperatorEqual() {
return eOperator == PairOperator.EQUALSDOT;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Smaller ist.
*/
public boolean OperatorSmaller() {
return eOperator == PairOperator.SMALLER;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ SmallerExtends ist.
*/
public boolean OperatorSmallerExtends() {
return eOperator == PairOperator.SMALLERDOTWC;
}
/**
* Author: Arne Lüdtke<br/>
* Gibt den Operator zurück.
*/
public PairOperator GetOperator() {
return eOperator;
}
public boolean OperatorSmallerDot() {
return eOperator == PairOperator.SMALLERDOT;
}
static public Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
HashMap<String, TypePlaceholder> ret = new HashMap<>();
constraints.map((Pair p) -> {
if (p.TA1 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA1).getName(), (TypePlaceholder) p.TA1);
}
if (p.TA2 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA2).getName(), (TypePlaceholder) p.TA2);
}
return null;
});
return ret;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
// because toString() will output TA1 and TA2 recursively, we can ignore potential infinite recursion here too
SerialMap serialized = new SerialMap();
serialized.put("ta1", this.TA1.toSerial(keyStorage));
serialized.put("ta2", this.TA2.toSerial(keyStorage));
serialized.put("op", this.eOperator.toString());
serialized.put("noUnification", this.noUnification ? 1 : 0);
serialized.put("location", this.location == null ? null : this.location.toSerial(keyStorage));
return serialized;
}
public static Pair fromSerial(SerialMap data, UnifyContext context) {
String op = data.getValue("op").getOf(String.class);
SerialMap ta1 = data.getMap("ta1");
SerialMap ta2 = data.getMap("ta2");
Boolean noUnification = data.getValue("noUnification").getOf(Integer.class) == 1;
SerialMap location = data.getMapOrNull("location");
var pair = new Pair(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(ta1, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(ta2, context),
PairOperator.fromString(op),
noUnification
);
if (location != null) pair.location = SourceLoc.fromSerial(location);
return pair;
}
}
// ino.end

View File

@@ -1,15 +1,19 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* enthaelt alle Paare, die in einem Ergebnis nicht vorkommen koennen
* sie sind noetig fuer origPairs in PairTPHsmallerTPH, da hier auch
* Paare vorkommen koennen die keine Result sind (z.B. bei FunN$$)
*/
public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>{
public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
//public final TypePlaceholder left;
//public final TypePlaceholder right;
@@ -17,7 +21,7 @@ public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, Re
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
* wichtig fuer generated Generics
*/
ResultPair origPair;
ResultPair<?,?> origPair;
public PairNoResult(RefTypeOrTPHOrWildcardOrGeneric left, RefTypeOrTPHOrWildcardOrGeneric right){
super(left, right);
@@ -29,4 +33,24 @@ public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, Re
throw new NotImplementedException();
//visitor.visit(this);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getLeft().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairNoResult fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairNoResult(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,9 +1,13 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder> {
public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder> implements ISerializableData {
public PairTPHEqualTPH(TypePlaceholder tl, TypePlaceholder tr) {
super(tl, tr);
}
@@ -12,4 +16,24 @@ public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder
public void accept(ResultPairVisitor visitor) {
visitor.visit(this);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getLeft().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairNoResult fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairNoResult(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,13 +1,17 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* Steht für A =. RefType
*/
public class PairTPHequalRefTypeOrWildcardType extends ResultPair{
public class PairTPHequalRefTypeOrWildcardType extends ResultPair<TypePlaceholder, RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
public final TypePlaceholder left;
public final RefTypeOrTPHOrWildcardOrGeneric right;
@@ -26,4 +30,24 @@ public class PairTPHequalRefTypeOrWildcardType extends ResultPair{
public String toString() {
return "(" + left.toString() + " = " + right.toString() + ")";
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();;
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getLeft().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairNoResult fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairNoResult(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,12 +1,17 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* Steht für: A <. B
*/
public class PairTPHsmallerTPH extends ResultPair{
public class PairTPHsmallerTPH extends ResultPair<TypePlaceholder,TypePlaceholder>
implements ISerializableData {
public final TypePlaceholder left;
public final TypePlaceholder right;
@@ -14,7 +19,7 @@ public class PairTPHsmallerTPH extends ResultPair{
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
* wichtig fuer generated Generics
*/
ResultPair origPair;
ResultPair<?,?> origPair;
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right){
super(left, right);
@@ -22,7 +27,7 @@ public class PairTPHsmallerTPH extends ResultPair{
this.right = right;
}
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right, ResultPair origPair){
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right, ResultPair<?,?> origPair){
this(left, right);
this.origPair = origPair;
}
@@ -36,4 +41,24 @@ public class PairTPHsmallerTPH extends ResultPair{
public String toString() {
return "(" + left.toString() + " < " + right.toString() + ")";
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getLeft().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairNoResult fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairNoResult(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,11 +1,17 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* Paare, welche das Unifikationsergebnis darstellen
*/
public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric> {
public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
private final A left;
private final B right;
@@ -58,5 +64,26 @@ public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B ext
return false;
return true;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("type", this.getClass().getSimpleName());
// we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL);
return serialized;
}
public static <A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric> ResultPair<A,B>
fromSerial(SerialMap data, UnifyContext context) {
String type = data.getValue("type").getOf(String.class);
SerialMap object = data.getMap("object");
if (type.equals(PairNoResult.class.getSimpleName())) return (ResultPair)PairNoResult.fromSerial2(object, context);
else if (type.equals(PairTPHEqualTPH.class.getSimpleName())) return (ResultPair)PairTPHEqualTPH.fromSerial2(object, context);
else if (type.equals(PairTPHsmallerTPH.class.getSimpleName())) return (ResultPair)PairTPHsmallerTPH.fromSerial2(object, context);
else if (type.equals(PairTPHequalRefTypeOrWildcardType.class.getSimpleName())) return (ResultPair)PairTPHequalRefTypeOrWildcardType.fromSerial2(object, context);
else throw new RuntimeException("Could not unserialize class of unhandled type " + type);
}
}

View File

@@ -1,5 +1,10 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.HashSet;
import java.util.Set;
@@ -10,148 +15,164 @@ import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import java.util.stream.Collectors;
@SuppressWarnings("rawtypes")
public class ResultSet {
public class ResultSet implements ISerializableData {
public final Set<ResultPair> results;
public Set<ResultPair<TypePlaceholder, TypePlaceholder>> genIns;
public final Set<ResultPair> results;
public Set<ResultPair<TypePlaceholder, TypePlaceholder>> genIns;
public ResultSet(Set<ResultPair> set){
this.results = set;
this.genIns = new HashSet<>();
results.forEach(x -> { if (x instanceof PairTPHsmallerTPH) { this.genIns.add(x);}} );
}
public boolean contains(ResultPair toCheck) {
return this.results.contains(toCheck);
}
public void remove(ResultPair toCheck) {
results.remove(toCheck);
public ResultSet(Set<ResultPair> set) {
this.results = set;
this.genIns = new HashSet<>();
results.forEach(x -> {
if (x instanceof PairTPHsmallerTPH) {
this.genIns.add(x);
}
});
}
public boolean contains(ResultPair toCheck) {
return this.results.contains(toCheck);
}
public void remove(ResultPair toCheck) {
results.remove(toCheck);
}
public ResolvedType resolveType(RefTypeOrTPHOrWildcardOrGeneric type) {
if (type instanceof TypePlaceholder)
return new Resolver(this).resolve((TypePlaceholder) type);
if (type instanceof GenericRefType) return new ResolvedType(type, new HashSet<>());
if (type instanceof RefType) {
RelatedTypeWalker related = new RelatedTypeWalker(null, this);
type.accept(related);
return new ResolvedType(type, related.relatedTPHs);
} else {
throw new NotImplementedException();
//return new ResolvedType(type,new HashSet<>());
}
}
public ResolvedType resolveType(RefTypeOrTPHOrWildcardOrGeneric type) {
if(type instanceof TypePlaceholder)
return new Resolver(this).resolve((TypePlaceholder)type);
if(type instanceof GenericRefType)return new ResolvedType(type, new HashSet<>());
if(type instanceof RefType) {
RelatedTypeWalker related = new RelatedTypeWalker(null, this);
type.accept(related);
return new ResolvedType(type, related.relatedTPHs);
} else {
throw new NotImplementedException();
//return new ResolvedType(type,new HashSet<>());
}
public String toString() {
return results.toString();
}
@Override
public boolean equals(Object o) {
if (o instanceof ResultSet) {
ResultSet other = (ResultSet) o;
return this.results.equals(other.results);
} else {
return false;
}
}
public String toString() {
return results.toString();
}
@Override
public int hashCode() {
return results.hashCode();
}
@Override
public boolean equals(Object o) {
if (o instanceof ResultSet) {
ResultSet other = (ResultSet)o;
return this.results.equals(other.results);
} else {
return false;
}
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();;
serialized.put("results", SerialList.fromMapped(results, result -> result.toSerial(keyStorage)));
return serialized;
}
@Override
public int hashCode() {
return results.hashCode();
}
public static ResultSet fromSerial(SerialMap data, UnifyContext context) {
var resultsData = data.getList("results").assertListOfMaps();
return new ResultSet(resultsData.stream().map(resultData -> ResultPair.fromSerial(resultData, context)).collect(Collectors.toSet()));
}
}
class Resolver implements ResultSetVisitor {
private final ResultSet result;
private TypePlaceholder toResolve;
private RefTypeOrTPHOrWildcardOrGeneric resolved;
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
private ResultPair<?,?> currentPair;
private final ResultSet result;
private TypePlaceholder toResolve;
private RefTypeOrTPHOrWildcardOrGeneric resolved;
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
private ResultPair<?, ?> currentPair;
public Resolver(ResultSet resultPairs){
this.result = resultPairs;
public Resolver(ResultSet resultPairs) {
this.result = resultPairs;
}
public ResolvedType resolve(TypePlaceholder tph) {
toResolve = tph;
resolved = null;
System.out.println(tph.toString());
for (ResultPair<?, ?> resultPair : result.results) {
if (resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)) {
currentPair = resultPair;
return resolve(((PairTPHEqualTPH) resultPair).getRight());
}
}
for (ResultPair<?, ?> resultPair : result.results) {
currentPair = resultPair;
resultPair.accept(this);
}
if (resolved == null) {//TPH kommt nicht im Result vor:
resolved = tph;
}
public ResolvedType resolve(TypePlaceholder tph){
toResolve = tph;
resolved = null;
System.out.println(tph.toString());
for(ResultPair<?,?> resultPair : result.results) {
if(resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)){
currentPair = resultPair;
return resolve(((PairTPHEqualTPH) resultPair).getRight());
}
}
for(ResultPair<?,?> resultPair : result.results){
currentPair = resultPair;
resultPair.accept(this);
}
if(resolved==null){//TPH kommt nicht im Result vor:
resolved = tph;
}
ResolvedType result = new ResolvedType(resolved, additionalTPHs);//resolved;
result.setResultPair(currentPair);
return result;
}
ResolvedType result = new ResolvedType(resolved, additionalTPHs);//resolved;
result.setResultPair(currentPair);
return result;
@Override
public void visit(PairTPHsmallerTPH p) {
currentPair = p;
if (p.left.equals(toResolve)) {
additionalTPHs.add(new GenericInsertPair(p.left, p.right));
additionalTPHs.addAll(new RelatedTypeWalker(p.right, result).relatedTPHs);
}
if (p.right.equals(toResolve))
additionalTPHs.addAll(new RelatedTypeWalker(p.left, result).relatedTPHs);
}
@Override
public void visit(PairTPHsmallerTPH p) {
currentPair = p;
if(p.left.equals(toResolve)){
additionalTPHs.add(new GenericInsertPair(p.left, p.right));
additionalTPHs.addAll(new RelatedTypeWalker(p.right, result).relatedTPHs);
}
if(p.right.equals(toResolve))
additionalTPHs.addAll(new RelatedTypeWalker(p.left, result).relatedTPHs);
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
currentPair = p;
if (p.left.equals(toResolve)) {
resolved = p.right;
RelatedTypeWalker related = new RelatedTypeWalker(null, result);
p.right.accept(related);
additionalTPHs.addAll(related.relatedTPHs);
}
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
currentPair = p;
if(p.left.equals(toResolve)){
resolved = p.right;
RelatedTypeWalker related = new RelatedTypeWalker(null, result);
p.right.accept(related);
additionalTPHs.addAll(related.relatedTPHs);
}
}
@Override
public void visit(PairTPHEqualTPH p) {
//Do nothing. Dieser Fall wird in der resolve-Methode abgefangen
}
@Override
public void visit(PairTPHEqualTPH p) {
//Do nothing. Dieser Fall wird in der resolve-Methode abgefangen
}
@Override
public void visit(RefType refType) {
@Override
public void visit(RefType refType) {
}
}
@Override
public void visit(GenericRefType genericRefType) {
@Override
public void visit(GenericRefType genericRefType) {
}
}
@Override
public void visit(SuperWildcardType superWildcardType) {
@Override
public void visit(SuperWildcardType superWildcardType) {
}
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
@Override
public void visit(TypePlaceholder typePlaceholder) {
}
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
}
}
}
@@ -161,149 +182,150 @@ class Resolver implements ResultSetVisitor {
@SuppressWarnings("rawtypes")
class TPHResolver implements ResultSetVisitor {
private final TypePlaceholder tph;
Set<GenericInsertPair> resolved = new HashSet<>();
private final ResultSet resultSet;
private final TypePlaceholder tph;
Set<GenericInsertPair> resolved = new HashSet<>();
private final ResultSet resultSet;
TPHResolver(TypePlaceholder tph, ResultSet resultSet){
this.resultSet = resultSet;
this.tph = tph;
for(ResultPair p : resultSet.results){
p.accept(this);
}
if(resolved.size() == 0){
resolved.add(new GenericInsertPair(tph, null));
}
TPHResolver(TypePlaceholder tph, ResultSet resultSet) {
this.resultSet = resultSet;
this.tph = tph;
for (ResultPair p : resultSet.results) {
p.accept(this);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if(p.left.equals(tph) || p.right.equals(tph)){
resolved.add(new GenericInsertPair(p.left, p.right));
}
if (resolved.size() == 0) {
resolved.add(new GenericInsertPair(tph, null));
}
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
TypePlaceholder otherSide = null;
if(p.right.equals(tph)){
otherSide = p.left;
}
if(otherSide != null){
Set<ResultPair> newResultSet = new HashSet<>(this.resultSet.results);
newResultSet.remove(p);
resolved.addAll(new TPHResolver(otherSide, new ResultSet(newResultSet)).resolved);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if (p.left.equals(tph) || p.right.equals(tph)) {
resolved.add(new GenericInsertPair(p.left, p.right));
}
}
@Override
public void visit(PairTPHEqualTPH p) {
//ignorieren. Wird vom Resolver behandelt
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
TypePlaceholder otherSide = null;
if (p.right.equals(tph)) {
otherSide = p.left;
}
@Override
public void visit(RefType refType) {
if (otherSide != null) {
Set<ResultPair> newResultSet = new HashSet<>(this.resultSet.results);
newResultSet.remove(p);
resolved.addAll(new TPHResolver(otherSide, new ResultSet(newResultSet)).resolved);
}
}
@Override
public void visit(GenericRefType genericRefType) {
@Override
public void visit(PairTPHEqualTPH p) {
//ignorieren. Wird vom Resolver behandelt
}
}
@Override
public void visit(RefType refType) {
@Override
public void visit(SuperWildcardType superWildcardType) {
}
}
@Override
public void visit(GenericRefType genericRefType) {
@Override
public void visit(TypePlaceholder typePlaceholder) {
}
}
@Override
public void visit(SuperWildcardType superWildcardType) {
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
}
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
}
}
@SuppressWarnings("rawtypes")
class RelatedTypeWalker implements ResultSetVisitor {
final Set<GenericInsertPair> relatedTPHs = new HashSet<>();
private final TypePlaceholder toResolve;
private final ResultSet resultSet;
final Set<GenericInsertPair> relatedTPHs = new HashSet<>();
private final TypePlaceholder toResolve;
private final ResultSet resultSet;
/**
* Läuft über das resultSet und speichert alle TPHs, welche mit start in Verbindung stehen
* @param start - kann null sein, wenn der Walker für einen RefType benutzt wird
* @param resultSet
*/
RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet){
this.toResolve = start;
this.resultSet = resultSet;
int resolved = 0;
do{
resolved = relatedTPHs.size();
for(ResultPair p : resultSet.results){
p.accept(this);
p.accept(this);
}
}while(resolved - relatedTPHs.size() > 0);
}
/**
* Läuft über das resultSet und speichert alle TPHs, welche mit start in Verbindung stehen
*
* @param start - kann null sein, wenn der Walker für einen RefType benutzt wird
* @param resultSet
*/
RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet) {
this.toResolve = start;
this.resultSet = resultSet;
int resolved = 0;
do {
resolved = relatedTPHs.size();
for (ResultPair p : resultSet.results) {
p.accept(this);
p.accept(this);
}
} while (resolved - relatedTPHs.size() > 0);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if(p.getRight().equals(toResolve)){
relatedTPHs.addAll(new TPHResolver(p.right, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.right, resultSet).relatedTPHs);
}
if(p.getLeft().equals(toResolve)){
relatedTPHs.addAll(new TPHResolver(p.left, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.left, resultSet).relatedTPHs);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if (p.getRight().equals(toResolve)) {
relatedTPHs.addAll(new TPHResolver(p.right, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.right, resultSet).relatedTPHs);
}
if (p.getLeft().equals(toResolve)) {
relatedTPHs.addAll(new TPHResolver(p.left, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.left, resultSet).relatedTPHs);
}
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
if(p.getLeft().equals(toResolve)){
p.getRight().accept(this);
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
if (p.getLeft().equals(toResolve)) {
p.getRight().accept(this);
}
}
@Override
public void visit(PairTPHEqualTPH p) {
//Kann ignoriert werden. Diese Fälle werden vom Resolver behandelt
}
@Override
public void visit(PairTPHEqualTPH p) {
//Kann ignoriert werden. Diese Fälle werden vom Resolver behandelt
}
/*
Die folgenden Funktionen fügen alle TPHs an die relatedTPHs an, denen sie begegnen:
Das wird verwendet, wenn alle relatedTPHs aus den Parametern eines RefTypes angefügt werden sollen
*/
@Override
public void visit(RefType refType) {
for(RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()){
param.accept(this);
}
@Override
public void visit(RefType refType) {
for (RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()) {
param.accept(this);
}
}
@Override
public void visit(SuperWildcardType superWildcardType) {
superWildcardType.getInnerType().accept(this);
}
@Override
public void visit(SuperWildcardType superWildcardType) {
superWildcardType.getInnerType().accept(this);
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
relatedTPHs.addAll(new TPHResolver(typePlaceholder, resultSet).resolved);
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
relatedTPHs.addAll(new TPHResolver(typePlaceholder, resultSet).resolved);
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
extendsWildcardType.getInnerType().accept(this);
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
extendsWildcardType.getInnerType().accept(this);
}
@Override
public void visit(GenericRefType genericRefType) {
}
@Override
public void visit(GenericRefType genericRefType) {
}
}

View File

@@ -0,0 +1,64 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.RecursiveTask;
public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
public static <E> Set<E> merge(List<Set<E>> list) {
if (list.isEmpty()) {
return new HashSet<>();
}
var task = new ConcurrentSetMergeTask<>(list, 0, list.size());
return task.compute();
}
private static final int LIST_THRESHOLD = 3;
private static final int ELEMENT_THRESHOLD = 1000;
private final List<Set<T>> list;
private final int start;
private final int end;
private ConcurrentSetMergeTask(List<Set<T>> list, int start, int end) {
this.list = list;
this.start = start;
this.end = end;
}
@Override
protected Set<T> compute() {
int size = end - start;
int totalElements = 0;
for (int i = start+1; i < end; i++) {
totalElements += list.get(i).size();
}
System.out.println("ConcurrentSetMerge? -> " + (size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD ? "true" : "false"));
// size will always be at least one
if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) {
Set<T> result = this.list.get(start);
for (int i = start+1; i < end; i++) {
result.addAll(list.get(i));
}
return result;
} else {
int mid = start + (size / 2);
ConcurrentSetMergeTask<T> leftTask = new ConcurrentSetMergeTask<>(list, start, mid);
ConcurrentSetMergeTask<T> rightTask = new ConcurrentSetMergeTask<>(list, mid, end);
leftTask.fork();
Set<T> rightResult = rightTask.compute();
Set<T> leftResult = leftTask.join();
// Merge results
leftResult.addAll(rightResult);
return leftResult;
}
}
}

View File

@@ -0,0 +1,63 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import java.util.ArrayList;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Calculate unique placeholder names
*/
public class PlaceholderRegistry {
private final Set<String> existingPlaceholders = ConcurrentHashMap.newKeySet();
private final AtomicInteger placeholderCount = new AtomicInteger();
public ArrayList<PlaceholderType> UnifyTypeFactory_PLACEHOLDERS = new ArrayList<>();
/**
* Add a placeholder into the list of existing ones, as soon as a new PlaceholderType is created
*
* @param placeholderName The placeholder to add
*/
public void addPlaceholder(String placeholderName) {
this.existingPlaceholders.add(placeholderName);
}
/**
* Generate a random placeholder name, that is unique to this context
*
* @return The generated name
*/
public String generateFreshPlaceholderName() {
String name;
do {
int pc = placeholderCount.incrementAndGet();
name = getUppercaseTokenFromInt(pc);
}
while (existingPlaceholders.contains(name));
return name;
}
/**
* Generate a token that consists of uppercase letters and contains the given prefix and suffix from the value i
*
* @param i The value that will be represented as a token
* @return The generated token
*/
private String getUppercaseTokenFromInt(int i) {
StringBuilder sb = new StringBuilder();
while (i >= 0) {
sb.append((char)(i % 26 + 97));
i = i / 26 - 1;
}
//sb.append(suffix);
return sb.toString();
}
@Override
public String toString() {
return this.existingPlaceholders.toString();
}
}

View File

@@ -38,14 +38,17 @@ import org.apache.commons.io.output.NullOutputStream;
public class RuleSet implements IRuleSet{
Writer logFile;
final PlaceholderRegistry placeholderRegistry;
public RuleSet() {
public RuleSet(PlaceholderRegistry placeholderRegistry) {
super();
logFile = new OutputStreamWriter(new NullOutputStream());
logFile = OutputStreamWriter.nullWriter();
this.placeholderRegistry = placeholderRegistry;
}
RuleSet(Writer logFile) {
RuleSet(Writer logFile, PlaceholderRegistry placeholderRegistry) {
this.logFile = logFile;
this.placeholderRegistry = placeholderRegistry;
}
@Override
@@ -864,7 +867,7 @@ public class RuleSet implements IRuleSet{
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNred: " + result + "\n");
logFile.flush();
// logFile.flush();
}
catch (IOException e) {
System.out.println("logFile-Error");
@@ -939,10 +942,10 @@ public class RuleSet implements IRuleSet{
UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) {
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
freshPlaceholders[i] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
}
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), freshPlaceholders[funNLhsType.getTypeParams().size()-1], PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
@@ -960,7 +963,7 @@ public class RuleSet implements IRuleSet{
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNgreater: " + result + "\n");
logFile.flush();
// logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
@@ -988,10 +991,10 @@ public class RuleSet implements IRuleSet{
UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) {
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
freshPlaceholders[i] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
}
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
result.add(new UnifyPair(freshPlaceholders[funNRhsType.getTypeParams().size()-1], funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
@@ -1010,7 +1013,7 @@ public class RuleSet implements IRuleSet{
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNsmaller: " + result + "\n");
logFile.flush();
// logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
@@ -1051,7 +1054,7 @@ public class RuleSet implements IRuleSet{
if(isGen)
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
else {
UnifyType freshTph = PlaceholderType.freshPlaceholder();
UnifyType freshTph = PlaceholderType.freshPlaceholder(placeholderRegistry);
result.add(new UnifyPair(rhsType, new ExtendsType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.add(new UnifyPair(extendedType, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
@@ -1079,7 +1082,7 @@ public class RuleSet implements IRuleSet{
if(isGen)
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
else {
UnifyType freshTph = PlaceholderType.freshPlaceholder();
UnifyType freshTph = PlaceholderType.freshPlaceholder(placeholderRegistry);
result.add(new UnifyPair(rhsType, new SuperType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
Set<UnifyType> fBounded = pair.getfBounded();
fBounded.add(lhsType);

View File

@@ -1,41 +1,44 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.util.Logger;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify {
private TypeUnify() {}
private static <T> T joinFuture(CompletableFuture<T> future) {
try {
return future.get();
}
catch (InterruptedException | ExecutionException exception) {
throw new RuntimeException(exception);
}
}
/**
* unify parallel ohne result modell
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
public static Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool();
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
logFile.flush();
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
// logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
@@ -45,46 +48,31 @@ public class TypeUnify {
/**
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @param ret
* @return
*/
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
return ret;
public static UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool();
UnifyContext context = unifyContext.newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
unifyTask.compute();
return unifyContext.resultModel();
}
/**
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @param ret
* @return
*/
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
public static Set<Set<UnifyPair>> unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool();
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
var result = joinFuture(unifyTask.compute());
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush();
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
// logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return ret;
return result;
}
/*
@@ -97,20 +85,13 @@ public class TypeUnify {
/**
* unify sequentiell mit oderconstraints
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
Set<Set<UnifyPair>> res = unifyTask.compute();
public static Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(false), 0);
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush();
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
// logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
@@ -118,4 +99,14 @@ public class TypeUnify {
return res;
}
private static ForkJoinPool createThreadPool() {
Logger.print("Available processors: " + Runtime.getRuntime().availableProcessors());
return new ForkJoinPool(
Runtime.getRuntime().availableProcessors(),
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
null,
false
);
}
}

View File

@@ -13,54 +13,52 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.concurrent.CompletableFuture;
public class TypeUnify2Task extends TypeUnifyTask {
Set<Set<UnifyPair>> setToFlatten;
Set<UnifyPair> methodSignatureConstraintUebergabe;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
}
Set<UnifyPair> getNextSetElement() {
return nextSetElement;
}
@Override
protected Set<Set<UnifyPair>> compute() {
if (one) {
System.out.println("two");
}
one = true;
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
Set<Set<UnifyPair>> setToFlatten;
Set<UnifyPair> methodSignatureConstraintUebergabe;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, UnifyContext context, int rekTiefe, Set<UnifyPair> methodSignatureConstraintUebergabe) {
super(eq, oderConstraints, fc, context, rekTiefe);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
}
public Set<UnifyPair> getNextSetElement() {
return nextSetElement;
}
@Override
public CompletableFuture<Set<Set<UnifyPair>>> compute() {
if (one) {
System.out.println("two");
}
one = true;
CompletableFuture<Set<Set<UnifyPair>>> res =
unify2(setToFlatten, eq, oderConstraintsField, fc, context.parallel(), rekTiefeField, methodSignatureConstraintUebergabe);
/*if (isUndefinedPairSetSet(res)) {
return new HashSet<>(); }
else
*/
//writeLog("xxx");
//noOfThread--;
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
else {
return res;
}
}
}
public void closeLogFile() {
*/
//writeLog("xxx");
//noOfThread--;
if (this.myIsCancelled()) {
return CompletableFuture.completedFuture(new HashSet<>());
} else {
return res;
}
}
try {
logFile.close();
}
catch (IOException ioE) {
System.err.println("no log-File" + thNo);
}
}
public void closeLogFile() {
try {
context.logFile().close();
} catch (IOException ioE) {
System.err.println("no log-File");
}
}
}

View File

@@ -0,0 +1,188 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* A collection of capsuled (and thus static) functions to split up large algorithms in TypeUnifyTask
*/
public class TypeUnifyTaskHelper {
/**
* Filter all topLevelSets for those with a single element that contain only one pair:
* a <. theta,
* theta <. a or
* a =. theta
*/
public static Set<Set<UnifyPair>> getSingleElementSets(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets) {
return topLevelSets.stream()
.filter(x -> x.size() == 1)
.map(y -> y.stream().findFirst().get()).collect(Collectors.toSet());
}
/**
* Varianzbestimmung Anfang
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
* Varianz = 1 => Argumentvariable
* Varianz = -1 => Rückgabevariable
* Varianz = 0 => unklar
* Varianz = 2 => Operatoren oderConstraints
*/
public static int calculateVariance(List<Set<UnifyPair>> nextSetasList) {
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
.reduce((a, b) -> {
if (a == b) return a;
else return 0;
})) //2 kommt insbesondere bei Oder-Constraints vor
.filter(d -> d.isPresent())
.map(e -> e.get())
.findAny();
return xi.orElse(0);
}
/**
*
*/
public static int calculateOderConstraintVariance(List<Set<UnifyPair>> nextSetAsList) {
Optional<Integer> optVariance =
nextSetAsList
.getFirst()
.stream()
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
!(x.getRhsType() instanceof PlaceholderType) &&
x.getPairOp() == PairOperator.EQUALSDOT)
.map(x ->
((PlaceholderType) x.getGroundBasePair().getLhsType()).getVariance())
.reduce((n, m) -> (n != 0) ? n : m);
//Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
//da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
return optVariance.orElse(2);
}
/**
* Find the first occurrence (if any) of a UnifyPair with operator EQUALSDOT while having
* one side equal to its base pair counterpart
*/
public static Optional<UnifyPair> findEqualityConstrainedUnifyPair(Set<UnifyPair> nextSetElement) {
return nextSetElement.stream().filter(x ->
x.getPairOp()
.equals(PairOperator.EQUALSDOT))
.filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType()
.equals(x.getBasePair().getLhsType()) ||
x.getLhsType()
.equals(x.getBasePair().getRhsType())
).findFirst();
}
/**
* Find all unifyPairs, that associate the identified type variable of origPair with any concrete type. That means:
* If "a = type" is in origPair, then we get all UnifyPairs that contain either "a < typeA" or "typeB < a"
*/
public static Set<UnifyPair> findConstraintsWithSameTVAssociation(UnifyPair origPair, Set<Set<UnifyPair>> singleElementSets) {
UnifyType tyVar = origPair.getLhsType();
if (!(tyVar instanceof PlaceholderType)) {
tyVar = origPair.getRhsType();
}
UnifyType tyVarEF = tyVar;
return singleElementSets.stream()
.map(xx ->
xx.iterator().next())
.filter(x ->
(x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
||
(x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType))
)
.collect(Collectors.toCollection(HashSet::new));
}
/**
*
*/
public static boolean doesFirstNextSetHasSameBase(List<Set<UnifyPair>> nextSetAsList) {
if (nextSetAsList.isEmpty()) {
return false;
}
UnifyPair firstBasePair = null;
for (var unifyPair : nextSetAsList.getFirst().stream().toList()) {
var basePair = unifyPair.getBasePair();
// if any base pair is null, there is NOT always the same base!
if (basePair == null) {
return false;
}
if (firstBasePair == null) {
firstBasePair = basePair;
}
else if (!basePair.equals(firstBasePair)) {
return false;
}
}
return true;
}
/**
* Extracts data from every element in the nested lists of results. What data depends on the given
* extractor function
*/
public static Set<UnifyPair> collectFromThreadResult (
Set<Set<UnifyPair>> currentThreadResult,
Function<UnifyPair, Set<UnifyPair>> extractor
) {
return currentThreadResult.stream()
.map(b ->
b.stream()
.map(extractor)
.reduce((y, z) -> {
y.addAll(z);
return y;
})
.orElse(new HashSet<>()))
.reduce((y, z) -> {
y.addAll(z);
return y;
})
.orElse(new HashSet<>());
}
/**
* Extract a list of PlaceholderTypes from a set of pairs, such that each resulting element:
* - Is the LHS of a pair
* - Is a PlaceholderType
* - has a basePair Side that is a PlaceholderType with the same name
*/
public static List<PlaceholderType> extractMatchingPlaceholderTypes(Set<UnifyPair> pairs) {
return pairs.stream()
.filter(x -> {
UnifyType lhs = x.getLhsType();
UnifyType baseLhs = x.getBasePair().getLhsType();
UnifyType baseRhs = x.getBasePair().getRhsType();
return (lhs instanceof PlaceholderType) &&
((baseLhs instanceof PlaceholderType && lhs.getName().equals(baseLhs.getName())) ||
(baseRhs instanceof PlaceholderType && lhs.getName().equals(baseRhs.getName())));
})
.map(x -> (PlaceholderType) x.getLhsType())
.collect(Collectors.toCollection(ArrayList::new));
}
}

View File

@@ -0,0 +1,70 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.Writer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.atomic.AtomicInteger;
public record UnifyContext(
// main log file of a unification
Writer logFile,
// if logs should be made
Boolean log,
// if the unify algorithm should run in parallel
Boolean parallel,
// the model for storing calculated results
UnifyResultModel resultModel,
// the executor used for thread management in parallel execution
ExecutorService executor,
// a generator for new placeholders in this unify context
PlaceholderRegistry placeholderRegistry,
// a control structure to cancel the unification early
UnifyTaskModel usedTasks
) {
public UnifyContext(
Writer logFile,
Boolean log,
Boolean parallel,
UnifyResultModel resultModel,
UnifyTaskModel usedTasks,
ExecutorService executor,
PlaceholderRegistry placeholderRegistry
) {
this(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext(
Writer logFile,
Boolean log,
Boolean parallel,
UnifyResultModel resultModel,
UnifyTaskModel usedTasks,
PlaceholderRegistry placeholderRegistry
) {
this(logFile, log, parallel, resultModel, usedTasks, ForkJoinPool.commonPool(), placeholderRegistry);
}
/*
* Shortcuts for creating a similar context with some properties changed. This combined with the final properties
* causes the UnifyContext to be essentially handled as a
*/
public UnifyContext newWithLogFile(Writer logFile) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithParallel(boolean parallel) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithExecutor(ExecutorService executor) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithResultModel(UnifyResultModel resultModel) {
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
}

View File

@@ -36,19 +36,19 @@ public class UnifyResultModel {
listeners.remove(listenerToRemove);
}
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet, UnifyContext context) {
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
Optional<Set<UnifyPair>> res = new RuleSet(context.placeholderRegistry()).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
return y; //alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), fc);
}
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons))))
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons), context.placeholderRegistry())))
.collect(Collectors.toList());
UnifyResultEvent evt = new UnifyResultEvent(newResult);

View File

@@ -0,0 +1,112 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
public class Variance0Case extends VarianceCase {
protected final int variance = 0;
protected Variance0Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
} else {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
}
nextSetAsList.remove(a);
} else if (this.isOderConstraint) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
nextSetAsList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} else {
a = nextSetAsList.removeFirst();
}
}
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
return typeUnifyTask.unify2(elems, eq, oderConstraints, fc, context.parallel(), rekTiefe, new HashSet<>(methodSignatureConstraint)).thenApply(
unify2Result -> new Tuple<>(unify2Result, new HashSet<>())
);
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
writeLog("RES var=1 ADD:" + result.toString() + " " + currentThreadResult.toString());
result.addAll(currentThreadResult);
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
if (!this.isOderConstraint) {
return true;
} else {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
return false;
}
}

View File

@@ -0,0 +1,212 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
public class Variance1Case extends VarianceCase {
protected final int variance = 1;
protected Variance1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
writeLog("Max: a in " + variance + " " + a);
nextSetAsList.remove(a);
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
//Alle maximale Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
Set<UnifyPair> finalA = a;
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
}
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
new HashSet<>(), new HashSet<>()
));
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkOrigFuture,
(prevResults, currentThreadResult) -> {
forkOrig.writeLog("final Orig 1");
forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, prevResults.getSecond());
});
//forks.add(forkOrig);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
writeLog("1 RM" + nSaL.toString());
if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
} else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
writeLog("fork_res: " + fork_res.toString());
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.getSecond().add(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final 1");
fork.closeLogFile();
return prevResults;
}
);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
}
return resultValues;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == -1) {
writeLog("Geloescht result: " + result);
result.clear();
result.addAll(currentThreadResult);
}
else if (resOfCompare == 0) {
result.addAll(currentThreadResult);
}
else if (resOfCompare == 1) {
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
writeLog("Removed: " + nextSetasListOderConstraints);
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList);
writeLog("smallerSetasList: " + smallerSetasList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
.collect(Collectors.toCollection(ArrayList::new));
writeLog("notInherited: " + notInherited + "\n");
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> {
notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList));
});
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
writeLog("notErased: " + notErased + "\n");
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -0,0 +1,135 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
public class Variance2Case extends VarianceCase {
protected final int variance = 2;
protected Variance2Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = nextSetAsList.removeFirst();
//Fuer alle Elemente wird parallele Berechnung angestossen.
nextSetasListRest = new ArrayList<>(nextSetAsList);
}
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValuesFuture;
writeLog("var2einstieg");
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValuesFuture = forkOrigFuture.thenApply((currentThreadResult) -> {
forkOrig.writeLog("final Orig 2");
forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, new HashSet<>());
});
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
Set<UnifyPair> methodSignatureConstraintForParallel = new HashSet<>(methodSignatureConstraint);
Set<UnifyPair> nSaL = a;
while (!nextSetasListRest.isEmpty()) {
methodSignatureConstraintForParallel.removeAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL); //PL einkommentiert 20-02-03
methodSignatureConstraintForParallel.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValuesFuture = resultValuesFuture.thenCombine(forkFuture, (resultValues, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
resultValues.getSecond().add(fork_res);
fork.writeLog("final 2");
fork.closeLogFile();
return resultValues;
});
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
}
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
return resultValuesFuture;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
// Nothing
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// Nothing
return false;
}
}

View File

@@ -0,0 +1,112 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Logger;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
public abstract class VarianceCase {
public static VarianceCase createFromVariance(int variance, boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
return switch (variance) {
case 0 -> new Variance0Case(isOderConstraint, typeUnifyTask, context);
case 1 -> new Variance1Case(isOderConstraint, typeUnifyTask, context);
case -1 -> new VarianceM1Case(isOderConstraint, typeUnifyTask, context);
case 2 -> new Variance2Case(isOderConstraint, typeUnifyTask, context);
default -> throw new RuntimeException("Invalid variance: " + variance);
};
}
protected final boolean isOderConstraint;
protected final TypeUnifyTask typeUnifyTask;
protected final UnifyContext context;
/**
* Aktueller Fall
*/
public Set<UnifyPair> a;
/**
* Liste der Faelle für die parallele Verarbeitung
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
* Deshalb wird ihre Berechnung parallel angestossen.
*/
public List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
/**
* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
* In der Regel ist dies genau ein Element
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
* gefuehrt hat.
*/
public List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
protected VarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
this.isOderConstraint = isOderConstraint;
this.typeUnifyTask = typeUnifyTask;
this.context = context;
}
/**
* Selects values for the next iteration in the run method:
* - a : The element to ???
* - nextSetAsList: The list of cases that have no relation to the selected a and will have to be worked on
* - nextSetasListOderConstraints: The list of cases of which the receiver contains "? extends", typically one element
*/
public abstract void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
);
/**
*
*/
public abstract CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
);
/**
*
*/
public abstract void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
);
/**
*
* @return If the current iteration should be broken out of
*/
public abstract boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
);
protected void writeLog(String s) {
typeUnifyTask.writeLog(s);
}
}

View File

@@ -0,0 +1,229 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
public class VarianceM1Case extends VarianceCase {
protected final int variance = -1;
protected VarianceM1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
writeLog("Min: a in " + variance + " " + a);
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
nextSetAsList.remove(a);
//Alle minimalen Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
Set<UnifyPair> finalA = a;
nextSetasListRest = typeUnifyTask.oup.minElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
);
}
@Override
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
new HashSet<>(), new HashSet<>()
));
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkOrigFuture,
(prevResults, currentThreadResult) -> {
forkOrig.writeLog("final Orig -1");
forkOrig.closeLogFile();
return new Tuple<>(currentThreadResult, prevResults.getSecond());
});
//forks.add(forkOrig);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
/* FORK ENDE */
writeLog("a in " + variance + " " + a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString());
if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
} else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> {
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
writeLog("fork_res: " + fork_res.toString());
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.getSecond().add(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final -1");
fork.closeLogFile();
return prevResults;
}
);
if (typeUnifyTask.myIsCancelled()) {
throw new UnifyCancelException();
}
}
return resultValues;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == 1) {
writeLog("Geloescht result: " + result);
result.clear();
result.addAll(currentThreadResult);
} else if (resOfCompare == 0) {
result.addAll(currentThreadResult);
} else if (resOfCompare == -1) {
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// System.out.println("");
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
writeLog("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
writeLog("Removed: " + nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> greaterSetasList = typeUnifyTask.oup.greaterThan(a_new, nextSetAsList);
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
if (!((Constraint<UnifyPair>) a_new).isInherited()) {
greaterSetasList.add(a_new);
}
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
//Wenn x nicht vererbt ist, beginnt beim naechstgroesseren Element die naechste Ueberladung
notInherited.forEach(x -> {
notErased.addAll(typeUnifyTask.oup.greaterEqThan(x, greaterSetasList));
});
//das kleineste Element ist das Element von dem a_new geerbt hat
//muss deshalb geloescht werden
Iterator<Set<UnifyPair>> notErasedIt = notErased.iterator();
if (notErasedIt.hasNext()) {
Set<UnifyPair> min = typeUnifyTask.oup.min(notErasedIt);
notErased.remove(min);
notErased.remove(((Constraint<UnifyPair>) min).getExtendConstraint());
}
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = typeUnifyTask.oup.greaterEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
writeLog("Removed: " + erased);
writeLog("Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -1,6 +1,7 @@
package de.dhbwstuttgart.typeinference.unify.interfaces;
import java.util.List;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Optional;
import java.util.Set;
@@ -18,9 +19,8 @@ import org.antlr.v4.runtime.Token;
*
* @author Florian Steurer
*/
public interface IFiniteClosure {
public interface IFiniteClosure extends ISerializableData {
public void setLogTrue();
/**
* Returns all types of the finite closure that are subtypes of the argument.
* @return The set of subtypes of the argument.
@@ -74,5 +74,5 @@ public interface IFiniteClosure {
public Set<UnifyType> getChildren(UnifyType t);
public Set<UnifyType> getAllTypesByName(String typeName);
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop);
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop, UnifyContext context);
}

View File

@@ -1,8 +1,9 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -92,5 +93,21 @@ public final class ExtendsType extends WildcardType {
return "? extends " + wildcardedType;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("wildcardedType", this.wildcardedType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static ExtendsType fromSerial(SerialMap data, UnifyContext context) {
return new ExtendsType(
UnifyType.fromSerial(data.getMap("wildcardedType"), context)
);
}
}

View File

@@ -1,16 +1,20 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.io.FileWriter;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.io.IOException;
import java.io.Writer;
import java.lang.reflect.Modifier;
import java.sql.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.BiFunction;
@@ -18,40 +22,28 @@ import java.util.function.BinaryOperator;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import com.google.common.collect.Ordering;
//PL 18-02-05/18-04-05 Unifier durch Matcher ersetzt
//muss greater noch ersetzt werden ja erledigt 18--04-05
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.MartelliMontanariUnify;
import de.dhbwstuttgart.typeinference.unify.Match;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.util.Pair;
import org.antlr.v4.runtime.Token;
import org.apache.commons.io.output.NullWriter;
/**
* The finite closure for the type unification
* @author Florian Steurer
*/
public class FiniteClosure //extends Ordering<UnifyType> //entfernt PL 2018-12-11
implements IFiniteClosure {
public class FiniteClosure implements IFiniteClosure, ISerializableData {
final JavaTXCompiler compiler;
final PlaceholderRegistry placeholderRegistry;
Writer logFile;
static Boolean log = false;
public void setLogTrue() {
log = true;
}
/**
* A map that maps every type to the node in the inheritance graph that contains that type.
*/
@@ -81,8 +73,9 @@ implements IFiniteClosure {
/**
* Creates a new instance using the inheritance tree defined in the pairs.
*/
public FiniteClosure(Set<UnifyPair> pairs, Writer logFile, JavaTXCompiler compiler) {
public FiniteClosure(Set<UnifyPair> pairs, Writer logFile, JavaTXCompiler compiler, PlaceholderRegistry placeholderRegistry) {
this.compiler = compiler;
this.placeholderRegistry = placeholderRegistry;
this.logFile = logFile;
this.pairs = new HashSet<>(pairs);
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
@@ -141,13 +134,13 @@ implements IFiniteClosure {
}
}
public FiniteClosure(Set<UnifyPair> constraints, Writer writer) {
this(constraints, writer, null);
public FiniteClosure(Set<UnifyPair> constraints, Writer writer, PlaceholderRegistry placeholderRegistry) {
this(constraints, writer, null, placeholderRegistry);
}
void testSmaller() {
UnifyType tq1, tq2, tq3;
tq1 = new ExtendsType(PlaceholderType.freshPlaceholder());
tq1 = new ExtendsType(PlaceholderType.freshPlaceholder(placeholderRegistry));
List<UnifyType> l1 = new ArrayList<>();
List<UnifyType> l2 = new ArrayList<>();
l1.add(tq1);
@@ -207,7 +200,7 @@ implements IFiniteClosure {
result.add(new Pair<>(t, fBounded));
}
catch (StackOverflowError e) {
System.out.println("");
// System.out.println("");
}
// if C<...> <* C<...> then ... (third case in definition of <*)
@@ -698,10 +691,10 @@ implements IFiniteClosure {
}
*/
public int compare (UnifyType left, UnifyType right, PairOperator pairop) {
public int compare (UnifyType left, UnifyType right, PairOperator pairop, UnifyContext context) {
try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {}
if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
System.out.println("");
// if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
// System.out.println("");
/*
pairop = PairOperator.SMALLERDOTWC;
List<UnifyType> al = new ArrayList<>();
@@ -752,7 +745,7 @@ implements IFiniteClosure {
}
}
UnifyPair up = new UnifyPair(left, right, pairop);
TypeUnifyTask unifyTask = new TypeUnifyTask();
TypeUnifyTask unifyTask = new TypeUnifyTask(context);
HashSet<UnifyPair> hs = new HashSet<>();
hs.add(up);
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this);
@@ -760,7 +753,7 @@ implements IFiniteClosure {
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
{try {
logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
logFile.flush();
// logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}}
@@ -774,7 +767,7 @@ implements IFiniteClosure {
long smallerLen = smallerRes.stream().filter(delFun).count();
try {
logFile.write("\nsmallerLen: " + smallerLen +"\n");
logFile.flush();
// logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}
@@ -789,7 +782,7 @@ implements IFiniteClosure {
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
{try {
logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
logFile.flush();
// logFile.flush();
}
catch (IOException e) {
System.err.println("no LogFile");}}
@@ -803,4 +796,29 @@ implements IFiniteClosure {
}
}
}
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("pairs", SerialList.fromMapped(this.pairs, unifyPair -> unifyPair.toSerial(keyStorage)));
/*/
if (serialized != null) {
throw new RuntimeException(
"Check both: \n"
+ "-> " + this.pairs.toArray()[51] + "\n"
+ "-> " + this.pairs.toArray()[65] + "\n"
);
}
//*/
return serialized;
}
public static FiniteClosure fromSerial(SerialMap data, UnifyContext context, KeyStorage keyStorage) {
var pairList = data.getList("pairs").assertListOfUUIDs();
Set<UnifyPair> pairs = pairList.stream()
.map(pairData -> UnifyPair.fromSerial(pairData, context, keyStorage)).collect(Collectors.toSet());
return new FiniteClosure(pairs, context.logFile(), context.placeholderRegistry());
}
}

View File

@@ -1,8 +1,12 @@
package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -99,5 +103,22 @@ public class FunNType extends UnifyType {
return other.getTypeParams().equals(typeParams);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static FunNType fromSerial(SerialMap data, UnifyContext context) {
List<UnifyType> params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
return new FunNType(new TypeParams(params));
}
}

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
@@ -26,9 +27,11 @@ import de.dhbwstuttgart.util.Pair;
public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
protected IFiniteClosure fc;
public OrderingUnifyPair(IFiniteClosure fc) {
protected UnifyContext context;
public OrderingUnifyPair(IFiniteClosure fc, UnifyContext context) {
this.fc = fc;
this.context = context;
}
/*
@@ -39,15 +42,15 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
try {
//if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht
if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC);
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC, context);
}
else {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT);
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT, context);
}}
catch (ClassCastException e) {
try {
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
((FiniteClosure)fc).logFile.flush();
// ((FiniteClosure)fc).logFile.flush();
}
catch (IOException ie) {
}
@@ -79,18 +82,18 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{
System.out.println("");
// System.out.println("");
}
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
{
System.out.println("");
// System.out.println("");
}
}
else {
up = new UnifyPair(left, right, PairOperator.SMALLERDOT);
}
TypeUnifyTask unifyTask = new TypeUnifyTask();
TypeUnifyTask unifyTask = new TypeUnifyTask(context);
HashSet<UnifyPair> hs = new HashSet<>();
hs.add(up);
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, fc);
@@ -106,11 +109,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
{
System.out.println("");
// System.out.println("");
}
if (right instanceof SuperType)
{
System.out.println("");
// System.out.println("");
}
}
else {
@@ -411,13 +414,13 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
if (leftlewc.iterator().next().getLhsType() instanceof PlaceholderType) {
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getLhsType(),y); return x; }, combiner);
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getLhsType()) == null));
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
}
//4. Fall
else {
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getRhsType(),y); return x; }, combiner);
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getRhsType()) == null));
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
}
if (!si.isPresent()) return 0;
else return si.get();

View File

@@ -46,4 +46,15 @@ public enum PairOperator {
default: return "=."; // EQUALSDOT
}
}
public static PairOperator fromString(String op) {
switch (op) {
case "<": return SMALLER;
case "<.": return SMALLERDOT;
case "<!=.": return SMALLERNEQDOT;
case "<.?": return SMALLERDOTWC;
case "=.": return EQUALSDOT;
default: throw new RuntimeException("Unknown PairOperator: " + op);
}
}
}

View File

@@ -1,119 +1,96 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.io.File;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Set;
/**
* An unbounded placeholder type.
* @author Florian Steurer
*/
public final class PlaceholderType extends UnifyType{
/**
* Static list containing the names of all existing placeholders.
* Used for generating fresh placeholders.
*/
public static final ArrayList<String> EXISTING_PLACEHOLDERS = new ArrayList<String>();
/**
* Prefix of auto-generated placeholder names.
*/
protected static String nextName = "gen_";
/**
* Random number generator used to generate fresh placeholder name.
*/
protected static Random rnd = new Random(43558747548978L);
/**
* True if this object was auto-generated, false if this object was user-generated.
*/
private final boolean IsGenerated;
/**
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
*/
private boolean wildcardable = true;
private boolean wildcardable = true;
/**
* is innerType gibt an, ob der Type des PlaceholderType innerhalb eines Typkonstruktorsverwendet wird
*/
private boolean innerType = false;
private boolean innerType = false;
/**
* variance shows the variance of the pair
* 1: contravariant
* -1 covariant
* 0 invariant
* PL 2018-03-21
*/
private int variance = 0;
/*
* Fuer Oder-Constraints:
* orCons = 1: Receiver
* orCons = 0: Argument oder kein Oder-Constraint
* orCons = -1: RetType
*/
private byte orCons = 0;
* variance shows the variance of the pair
* 1: contravariant
* -1 covariant
* 0 invariant
* PL 2018-03-21
*/
private int variance = 0;
/*
* Fuer Oder-Constraints:
* orCons = 1: Receiver
* orCons = 0: Argument oder kein Oder-Constraint
* orCons = -1: RetType
*/
private byte orCons = 0;
/**
* Creates a new placeholder type with the specified name.
*/
public PlaceholderType(String name) {
public PlaceholderType(String name, PlaceholderRegistry placeholderRegistry) {
super(name, new TypeParams());
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
IsGenerated = false; // This type is user generated
}
public PlaceholderType(String name, int variance) {
public PlaceholderType(String name, int variance, PlaceholderRegistry placeholderRegistry) {
super(name, new TypeParams());
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
IsGenerated = false; // This type is user generated
this.variance = variance;
}
/**
* Creates a new placeholdertype
* Creates a new placeholdertype
* @param isGenerated true if this placeholder is auto-generated, false if it is user-generated.
*/
protected PlaceholderType(String name, boolean isGenerated) {
protected PlaceholderType(String name, boolean isGenerated, PlaceholderRegistry placeholderRegistry) {
super(name, new TypeParams());
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
placeholderRegistry.addPlaceholder(name); // Add to list of existing placeholder names
IsGenerated = isGenerated;
}
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht);
}
/**
* Creates a fresh placeholder type with a name that does so far not exist.
* Creates a fresh placeholder type with a name that does so far not exist from the chars A-Z.
* A user could later instantiate a type using the same name that is equivalent to this type.
* @return A fresh placeholder type.
*/
public synchronized static PlaceholderType freshPlaceholder() {
String name = nextName + (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
// Add random chars while the name is in use.
while(EXISTING_PLACEHOLDERS.contains(name)) {
name += (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
}
return new PlaceholderType(name, true);
public static PlaceholderType freshPlaceholder(PlaceholderRegistry placeholderRegistry) {
var name = placeholderRegistry.generateFreshPlaceholderName();
return new PlaceholderType(name, true, placeholderRegistry);
}
/**
* True if this placeholder is auto-generated, false if it is user-generated.
*/
@@ -124,51 +101,51 @@ public final class PlaceholderType extends UnifyType{
public void setVariance(int v) {
variance = v;
}
public int getVariance() {
return variance;
}
public void reversVariance() {
if (variance == 1) {
setVariance(-1);
} else {
if (variance == -1) {
setVariance(1);
}}
if (variance == -1) {
setVariance(1);
}}
}
public void setOrCons(byte i) {
orCons = i;
}
public byte getOrCons() {
return orCons;
}
public Boolean isWildcardable() {
return wildcardable;
}
public void disableWildcardtable() {
wildcardable = false;
}
public void enableWildcardtable() {
wildcardable = true;
}
public void setWildcardtable(Boolean wildcardable) {
this.wildcardable = wildcardable;
}
public Boolean isInnerType() {
return innerType;
}
public void setInnerType(Boolean innerType) {
this.innerType = innerType;
}
@Override
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
return fc.smArg(this, fBounded);
@@ -178,17 +155,17 @@ public final class PlaceholderType extends UnifyType{
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
return fc.grArg(this, fBounded);
}
@Override
public UnifyType setTypeParams(TypeParams newTp) {
return this; // Placeholders never have params.
}
@Override
public int hashCode() {
return typeName.hashCode();
}
@Override
UnifyType apply(Unifier unif) {
if(unif.hasSubstitute(this)) {
@@ -200,15 +177,15 @@ public final class PlaceholderType extends UnifyType{
}
return this;
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof PlaceholderType))
return false;
return ((PlaceholderType) obj).getName().equals(typeName);
}
@Override
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
@@ -216,4 +193,36 @@ public final class PlaceholderType extends UnifyType{
ret.add(this);
return ret;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.typeName);
// Placeholders never make use of the typeParams
serialized.put("isGenerated", IsGenerated);
serialized.put("isInnerType", innerType);
serialized.put("variance", variance);
serialized.put("orCons", orCons);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PlaceholderType fromSerial(SerialMap data, UnifyContext context) {
var name = data.getValue("name").getOf(String.class);
var isGenerated = data.getValue("isGenerated").getOf(Boolean.class);
var isInnerType = data.getValue("isInnerType").getOf(Boolean.class);
var variance = data.getValue("variance").getOf(Integer.class);
var orCons = data.getValue("orCons").getOf(Number.class).byteValue();
var placeholderType = new PlaceholderType(name, isGenerated, context.placeholderRegistry());
placeholderType.setInnerType(isInnerType);
placeholderType.setVariance(variance);
placeholderType.setOrCons(orCons);
return placeholderType;
}
}

View File

@@ -1,6 +1,9 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.HashMap;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -12,44 +15,50 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
*
*/
public class ReferenceType extends UnifyType {
/**
* The buffered hashCode
*/
private final int hashCode;
/**
* gibt an, ob der ReferenceType eine generische Typvariable ist
*/
private final boolean genericTypeVar;
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht);
}
public ReferenceType(String name, Boolean genericTypeVar) {
super(name, new TypeParams());
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
this.genericTypeVar = genericTypeVar;
}
public ReferenceType(String name, UnifyType... params) {
super(name, new TypeParams(params));
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
genericTypeVar = false;
}
public ReferenceType(String name, TypeParams params) {
super(name, params);
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
genericTypeVar = false;
}
public boolean isGenTypeVar () {
private ReferenceType(String name, TypeParams params, boolean genericTypeVar) {
super(name, params);
hashCode = 31 + 17 * typeName.hashCode() + 17 * typeParams.hashCode();
this.genericTypeVar = genericTypeVar;
}
public boolean isGenTypeVar() {
return genericTypeVar;
}
@Override
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
return fc.smArg(this, fBounded);
@@ -63,38 +72,59 @@ public class ReferenceType extends UnifyType {
@Override
UnifyType apply(Unifier unif) {
TypeParams newParams = typeParams.apply(unif);
if(newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
if (newParams.hashCode() == typeParams.hashCode() && newParams.equals(typeParams))
return this;
return new ReferenceType(typeName, newParams);
}
@Override
public UnifyType setTypeParams(TypeParams newTp) {
if(newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
if (newTp.hashCode() == typeParams.hashCode() && newTp.equals(typeParams))
return this; // reduced the amount of objects created
return new ReferenceType(typeName, newTp);
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof ReferenceType))
if (!(obj instanceof ReferenceType))
return false;
if(obj.hashCode() != this.hashCode())
if (obj.hashCode() != this.hashCode())
return false;
ReferenceType other = (ReferenceType) obj;
if(!other.getName().equals(typeName))
if (!other.getName().equals(typeName))
return false;
return other.getTypeParams().equals(typeParams);
}
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.typeName);
serialized.put("params", SerialList.fromMapped(this.typeParams.get(), param -> param.toSerial(keyStorage)));
serialized.put("isGenericTypeVar", this.genericTypeVar);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static ReferenceType fromSerial(SerialMap data, UnifyContext context) {
var name = data.getValue("name").getOf(String.class);
var params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
var isGenericTypeVar = data.getValue("isGenericTypeVar").getOf(Boolean.class);
return new ReferenceType(name, new TypeParams(params), isGenericTypeVar);
}
}

View File

@@ -1,6 +1,8 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.HashMap;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -85,4 +87,21 @@ public final class SuperType extends WildcardType {
SuperType other = (SuperType) obj;
return other.getSuperedType().equals(wildcardedType);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("wildcardedType", this.wildcardedType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static SuperType fromSerial(SerialMap data, UnifyContext context) {
return new SuperType(
UnifyType.fromSerial(data.getMap("wildcardedType"), context)
);
}
}

View File

@@ -1,23 +1,26 @@
package de.dhbwstuttgart.typeinference.unify.model;
import com.google.common.collect.ObjectArrays;
import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import org.antlr.v4.runtime.Token;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.typeinference.constraints.IConstraintElement;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/**
* A pair which contains two types and an operator, e.q. (Integer <. a).
* @author Florian Steurer
*/
public class UnifyPair {
public class UnifyPair implements IConstraintElement, ISerializableData {
private SourceLoc location;
@@ -243,12 +246,12 @@ public class UnifyPair {
public String toString() {
String ret = "";
if (lhs instanceof PlaceholderType) {
ret = new Integer(((PlaceholderType)lhs).getVariance()).toString() + " "
ret = Integer.valueOf(((PlaceholderType)lhs).getVariance()).toString() + " "
+ "WC: " + ((PlaceholderType)lhs).isWildcardable()
+ ", IT: " + ((PlaceholderType)lhs).isInnerType();
}
if (rhs instanceof PlaceholderType) {
ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString() + " "
ret = ret + ", " + Integer.valueOf(((PlaceholderType)rhs).getVariance()).toString() + " "
+ "WC: " + ((PlaceholderType)rhs).isWildcardable()
+ ", IT: " + ((PlaceholderType)rhs).isInnerType();
}
@@ -269,6 +272,55 @@ public class UnifyPair {
return ret;
}
*/
public SerialUUID toSerial(KeyStorage keyStorage) {
String uuid = keyStorage.getIdentifier(this);
if (!keyStorage.isAlreadySerialized(uuid)) {
SerialMap serialized = new SerialMap();
keyStorage.putSerialized(uuid, serialized);
serialized.put("lhs", this.lhs.toSerial(keyStorage));
serialized.put("rhs", this.rhs.toSerial(keyStorage));
serialized.put("op", this.pairOp.toString());
serialized.put("basePair", this.basePair == null ? null : this.basePair.toSerial(keyStorage));
serialized.put("location", this.location == null ? null : this.location.toSerial(keyStorage));
serialized.put("substitution", SerialList.fromMapped(this.substitution, unifyPair -> unifyPair.toSerial(keyStorage)));
serialized.put("fBounded", SerialList.fromMapped(this.fBounded, fbounded -> fbounded.toSerial(keyStorage)));
}
return new SerialUUID(uuid);
}
public static UnifyPair fromSerial(SerialUUID serialUUID, UnifyContext context, KeyStorage keyStorage) {
String uuid = serialUUID.uuid;
if (!keyStorage.isAlreadyUnserialized(uuid)) {
SerialMap data = keyStorage.getSerialized(uuid);
SerialMap lhsData = data.getMap("lhs");
SerialMap rhsData = data.getMap("rhs");
String opData = data.getValue("op").getOf(String.class);
UnifyPair pair = new UnifyPair(
UnifyType.fromSerial(lhsData, context),
UnifyType.fromSerial(rhsData, context),
PairOperator.fromString(opData)
);
// put the object into the storage before unserializing basePair recursively
keyStorage.putUnserialized(uuid, pair);
SerialList<SerialUUID> substitutionData = data.getList("substitution").assertListOfUUIDs();
SerialList<SerialMap> fBoundedData = data.getList("fBounded").assertListOfMaps();
SerialUUID basePairData = data.getUUIDOrNull("basePair");
SerialMap locationData = data.getMapOrNull("location");
pair.substitution = substitutionData.stream().map(substData -> UnifyPair.fromSerial(substData, context, keyStorage)).collect(Collectors.toSet());
pair.fBounded = fBoundedData.stream().map(fBoundData -> UnifyType.fromSerial(fBoundData, context)).collect(Collectors.toSet());
if (basePairData != null) {
pair.basePair = UnifyPair.fromSerial(basePairData, context, keyStorage);
}
if (locationData != null) {
pair.location = SourceLoc.fromSerial(locationData);
}
}
return keyStorage.getUnserialized(uuid, UnifyPair.class);
}
}

View File

@@ -1,12 +1,14 @@
package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import de.dhbwstuttgart.syntaxtree.StatementVisitor;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
@@ -14,7 +16,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
* Represents a java type.
* @author Florian Steurer
*/
public abstract class UnifyType {
public abstract class UnifyType implements ISerializableData {
/**
* The name of the type e.q. "Integer", "? extends Integer" or "List" for (List<T>)
@@ -29,7 +31,7 @@ public abstract class UnifyType {
/**
* Creates a new instance
* @param name Name of the type (e.q. List for List<T>, Integer or ? extends Integer)
* @param typeParams Parameters of the type (e.q. <T> for List<T>)
* @param p Parameters of the type (e.q. <T> for List<T>)
*/
protected UnifyType(String name, TypeParams p) {
typeName = name;
@@ -117,4 +119,36 @@ public abstract class UnifyType {
if(obj == null)return false;
return this.toString().equals(obj.toString());
}
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("type", this.getClass().toString());
// we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL);
return serialized;
}
public static UnifyType fromSerial(SerialMap data, UnifyContext context) {
var type = data.getValue("type").getOf(String.class);
var object = data.getMap("object");
if (type.equals(ReferenceType.class.toString())) {
return ReferenceType.fromSerial(object, context);
}
else if (type.equals(ExtendsType.class.toString())) {
return ExtendsType.fromSerial(object, context);
}
else if (type.equals(SuperType.class.toString())) {
return SuperType.fromSerial(object, context);
}
else if (type.equals(FunNType.class.toString())) {
return FunNType.fromSerial(object, context);
}
else if (type.equals(PlaceholderType.class.toString())) {
return PlaceholderType.fromSerial(object, context);
}
else {
throw new RuntimeException("Could not unserialize class of unhandled type " + type);
}
}
}

View File

@@ -0,0 +1,9 @@
package de.dhbwstuttgart.util;
public class Logger {
public static void print(String s) {
System.out.println(s);
}
}

View File

@@ -3,37 +3,26 @@ package de.dhbwstuttgart.util;
import java.util.Objects;
import java.util.Optional;
public class Pair<T, T1> {
private final T key;
private final T1 value;
public class Pair<T, T1> extends Tuple<T, T1> {
public Pair(T a, T1 b) {
this.value = b;
this.key = a;
}
public Pair(T a, T1 b) {
super(a, b);
}
public Optional<T1> getValue() {
return Optional.of(value);
}
public Optional<T1> getValue() {
return Optional.of(second);
}
public T getKey() {
return key;
}
public String toString() {
return "(" + key.toString() + "," + value.toString() + ")\n";
}
public T getKey() {
return first;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Pair<?, ?> pair = (Pair<?, ?>) o;
return Objects.equals(key, pair.key) && Objects.equals(value, pair.value);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Pair<?, ?> pair = (Pair<?, ?>) o;
return Objects.equals(first, pair.first) && Objects.equals(second, pair.second);
}
@Override
public int hashCode() {
return Objects.hash(key, value);
}
}

View File

@@ -0,0 +1,38 @@
package de.dhbwstuttgart.util;
import java.util.Objects;
public class Tuple<T1, T2> {
protected final T1 first;
protected final T2 second;
public Tuple(T1 a, T2 b) {
this.second = b;
this.first = a;
}
public T1 getFirst() {
return first;
}
public T2 getSecond() {
return second;
}
public String toString() {
return "(" + first.toString() + "," + second.toString() + ")\n";
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Tuple<?, ?> pair = (Tuple<?, ?>) o;
return Objects.equals(first, pair.first) && Objects.equals(second, pair.second);
}
@Override
public int hashCode() {
return Objects.hash(first, second);
}
}