Compare commits

..

72 Commits

Author SHA1 Message Date
Fabian Holzwarth
1af31e4513 feat: update parallelization for 0variance 2025-08-04 13:41:17 +02:00
Fabian Holzwarth
5b06f0a249 Merge branch 'feat/unify-server' into feat/unify-server-0variance 2025-07-21 16:27:55 +02:00
Fabian Holzwarth
3d99f282f5 feat: cleanup code 2025-07-21 16:12:56 +02:00
Fabian Holzwarth
512b10542e feat: adjusted parallelization 2025-07-21 15:41:04 +02:00
Fabian Holzwarth
3b1185d9d0 feat: paralellize 0-variance cases 2025-07-20 15:06:50 +02:00
Fabian Holzwarth
303c91dc87 chore: making classnames more expressive and cleanup some structures 2025-07-19 12:50:30 +02:00
Fabian Holzwarth
603a8b176a feat: implement partically cancellable tasks 2025-07-16 11:16:30 +02:00
Fabian Holzwarth
f396189a4b feat: add timestamp to server output 2025-07-12 13:44:18 +02:00
Fabian Holzwarth
e7f4a94908 feat: fixed old condition on server timeout 2025-07-12 13:22:54 +02:00
Fabian Holzwarth
ce49a4b9a5 feat: reduce temporary objects and repeated loops 2025-07-12 13:14:55 +02:00
Fabian Holzwarth
03b3692724 feat: update indepenedentest 2025-07-11 12:50:27 +02:00
Fabian Holzwarth
f0022d2b6f feat: use presized hashMaps to reduce resizing 2025-07-11 11:22:06 +02:00
Fabian Holzwarth
b1015cfa82 feat: update logging and add success-level 2025-07-07 16:19:04 +02:00
Fabian Holzwarth
b63a27a0a0 feat: improve server by assinging configured thread pools 2025-07-07 15:59:46 +02:00
Fabian Holzwarth
3b0a53d3c4 feat: add cross dependency, fix: socket closing and error messages 2025-07-07 15:19:56 +02:00
Fabian Holzwarth
50dbbf5f86 feat: implement generalized socket client, server logger and cleanup code 2025-07-07 14:01:27 +02:00
Fabian Holzwarth
130c491ac0 feat: more Boxing replacements 2025-07-06 15:49:59 +02:00
Fabian Holzwarth
9f9b264ac4 feat: replace unnecessary boxing with primitives 2025-07-06 15:18:51 +02:00
Fabian Holzwarth
1393db05c2 feat: implement lazy evaluation for logger outputs 2025-07-06 13:37:47 +02:00
Fabian Holzwarth
93e1a8787c feat: do not create a new context, if nothing changes 2025-07-05 11:43:10 +02:00
Fabian Holzwarth
0129d7540f feat use perMessagDeflate compression in websocket and use logger for message outpute 2025-07-05 11:16:06 +02:00
Fabian Holzwarth
7ea8337aee feat: remove unused logging library 2025-07-05 11:15:33 +02:00
Fabian Holzwarth
28458d405f feat: ignore server test 2025-07-02 15:47:55 +02:00
Fabian Holzwarth
1b905cb3e2 feat: implement loggers for the rest of the compiler 2025-07-01 23:06:09 +02:00
Fabian Holzwarth
d02c3583e9 feat: implement new logger into type inference code 2025-07-01 22:16:29 +02:00
Fabian Holzwarth
ca98e83fd2 feat: added logger 2025-07-01 21:21:39 +02:00
Fabian Holzwarth
c80a0c8596 feat: fix error by reintroducing name generator and add server tests 2025-06-30 16:42:20 +02:00
Fabian Holzwarth
2278fb1b91 feat: undo removing NameGenerator to fix errors in ast generation 2025-06-30 12:46:41 +02:00
Fabian Holzwarth
32b16cd5fd feat: replace concurrent modification with correct function call 2025-06-30 11:49:53 +02:00
Fabian Holzwarth
fd30c5f63f feat: prevent reusing the placeholder registry in tests 2025-06-29 16:04:54 +02:00
Fabian Holzwarth
8bfd6ae255 feat: remove redundant lambda functions and Set-resizings 2025-06-28 14:48:43 +02:00
Fabian Holzwarth
ad2dfb13bd feat: speedup toString methods by using a StringBuilder instead of String concatenation 2025-06-28 14:30:12 +02:00
Fabian Holzwarth
501633a90c feat: fix test with null methodSignatureConstraint 2025-06-28 14:06:34 +02:00
Fabian Holzwarth
4defa50ca2 feat: added version check on connecting 2025-06-25 19:48:29 +02:00
Fabian Holzwarth
d65e90536a feat: replace NameGenerator with instance of PlaceholderRegistry to prevent duplicates 2025-06-25 19:15:28 +02:00
Fabian Holzwarth
3de7f1aa61 fix: try generating new placeholders only in current placeholderRegistry context to prevent duplicates 2025-06-25 17:38:56 +02:00
Fabian Holzwarth
029e40b775 feat: make packets directional and self handling 2025-06-25 17:35:49 +02:00
Fabian Holzwarth
459bfcdd5f feat: added tests for client-server communication 2025-06-23 16:13:43 +02:00
Fabian Holzwarth
02886c38ea feat: fixed error in object serialization 2025-06-23 16:13:21 +02:00
Fabian Holzwarth
57ffae0481 fix: fixed some serialization and deserialization issues 2025-06-22 15:11:49 +02:00
Fabian Holzwarth
d084d74a25 feat: fixed mismatch in PairOperator serialization 2025-06-22 10:10:32 +02:00
Fabian Holzwarth
cd15016f61 feat: allow subclasses when asserting values 2025-06-21 13:44:29 +02:00
Fabian Holzwarth
b0e5eee25c feat: rename Object... to Serial... and move into separate classes 2025-06-21 13:40:24 +02:00
Fabian Holzwarth
d1bd285be7 fix: replace reflection class check with simple string check 2025-06-21 13:23:01 +02:00
Fabian Holzwarth
a902fd5bee feat: replaced HashMaps with better type safety structure 2025-06-21 12:58:45 +02:00
Fabian Holzwarth
ced9fdc9f7 fix: non serialized constraitnContext 2025-06-20 19:09:33 +02:00
Fabian Holzwarth
53417bf298 feat: implement serialization and adjust packets to correct data types 2025-06-20 18:53:25 +02:00
Fabian Holzwarth
2d4da03f00 feat: implementing client-server model 2025-06-18 19:58:23 +02:00
Fabian Holzwarth
f7a13f5faa feat: turn UnifyContext into a record 2025-06-18 18:26:44 +02:00
Fabian Holzwarth
8fe80b4396 feat: move static placeholder generation into object 2025-06-18 17:47:29 +02:00
Fabian Holzwarth
eb1201ae5e feat: apply future-based approach to inner cartesian loop 2025-06-09 16:49:45 +02:00
Fabian Holzwarth
963ad76593 feat: make cartesian loop computation Future-based 2025-06-09 15:30:04 +02:00
Fabian Holzwarth
1eba09e3b0 feat: change cartesian while loop into recursive 2025-06-09 15:16:09 +02:00
Fabian Holzwarth
fc82125d14 feat: change TypeUnifyTask to use future-based logic 2025-06-09 14:53:37 +02:00
Fabian Holzwarth
dad468368b feat: make functions unify and unify2 future-based 2025-06-09 13:14:44 +02:00
Fabian Holzwarth
fdd4f3aa59 feat: implement variance-dependent calculation as Future based 2025-06-09 12:59:23 +02:00
Fabian Holzwarth
a0c11b60e8 Remove unnecessary parameter and fix some parallelization 2025-06-07 16:11:34 +02:00
Fabian Holzwarth
4cddf73e6d feat: small fixes for correct parameters 2025-06-07 14:38:18 +02:00
Fabian Holzwarth
5024a02447 feat: implement unify context and prepare variance code capsulation 2025-06-07 11:53:32 +02:00
Fabian Holzwarth
6c2d97b770 chore: code cleanup 2025-05-26 15:49:01 +02:00
Fabian Holzwarth
426c2916d3 feat: remove unnecessary synchronized blocks 2025-05-26 14:40:17 +02:00
Fabian Holzwarth
f722a00fbb feat: use the current thread for computation as well 2025-05-25 15:55:07 +02:00
Fabian Holzwarth
32797c9b9f feat: cleanup more cartesian product code 2025-05-24 12:43:42 +02:00
Fabian Holzwarth
87f655c85a feat: isolate constraint-filtering for one tv from computeCartesianRecursive 2025-05-23 16:10:37 +02:00
Fabian Holzwarth
613dceae1d feat: added Logger class, remove empty println start cleanup of computeCartesianRecursive 2025-05-23 14:12:25 +02:00
Fabian Holzwarth
81cac06e16 feat: add tool for merging many hash sets in parallel 2025-05-23 14:11:52 +02:00
Fabian Holzwarth
a47d5bc024 feat: slightly improved placeholder name generation 2025-05-23 14:04:48 +02:00
Fabian Holzwarth
e5916d455a feat: format and merge results in parallel 2025-05-19 17:05:18 +02:00
Fabian Holzwarth
ebb639e72e feat: remove log flushes 2025-05-18 16:29:19 +02:00
Fabian Holzwarth
f0a4a51ce6 feat: replace thread counter with thread pool 2025-05-18 15:40:31 +02:00
Fabian Holzwarth
7442880452 feat: limit placeholder generation to uppercase chars 2025-05-18 13:24:29 +02:00
Fabian Holzwarth
c4dc3b4245 feat: replace random based placeholder generation with deterministic approach 2025-05-18 12:41:56 +02:00
144 changed files with 9140 additions and 5240 deletions

View File

@@ -15,7 +15,7 @@ jobs:
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: '24'
java-version: '23'
cache: 'maven'
- name: Compile project
run: |

40
independentTest.sh Executable file
View File

@@ -0,0 +1,40 @@
#!/usr/bin/env bash
REPO="https://gitea.hb.dhbw-stuttgart.de/f.holzwarth/JavaCompilerCore.git"
TDIR="./testBuild"
rm -rf "$TDIR" 2>/dev/null
mkdir $TDIR
cd $TDIR
git clone $REPO .
git checkout feat/unify-server
# git checkout 93e1a8787cd94c73f4538f6a348f58613893a584
# git checkout dad468368b86bdd5a3d3b2754b17617cee0a9107 # 1:55
# git checkout a0c11b60e8c9d7addcbe0d3a09c9ce2924e9d5c0 # 2:25
# git checkout 4cddf73e6d6c9116d3e1705c4b27a8e7f18d80c3 # 2:27
# git checkout 6c2d97b7703d954e4a42eef3ec374bcf313af75c # 2:13
# git checkout f722a00fbb6e69423d48a890e4a6283471763e64 # 1:35
# git checkout f0a4a51ce65639ce9a9470ff0fdb538fdf9c02cc # 2:19
# git checkout 1391206dfe59263cdb22f93371cfd1dd5465d97f # 1:29
date "+%Y.%m.%d %H:%M:%S"
# sed -i -e 's/source>21/source>23/g' pom.xml
# sed -i -e 's/target>21/target>23/g' pom.xml
mvn clean compile -DskipTests package
time java -jar target/JavaTXcompiler-0.1-jar-with-dependencies.jar resources/bytecode/javFiles/Matrix.jav;
# mvn clean compile test
echo -e "\nCleanup... "
cd -
rm -rf "$TDIR" 2>/dev/null
echo -e "\nFinished "
date "+%Y.%m.%d %H:%M:%S"
echo -e "\n "

50
pom.xml
View File

@@ -12,37 +12,57 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<url>http://maven.apache.org</url>
<dependencies>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>5.13.2</version>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.antlr/antlr4 -->
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr4</artifactId>
<version>4.13.2</version>
<version>4.11.1</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.19.0</version>
<version>2.16.1</version>
</dependency>
<dependency>
<groupId>io.github.classgraph</groupId>
<artifactId>classgraph</artifactId>
<version>4.8.180</version>
<version>4.8.172</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>33.4.8-jre</version>
<version>33.2.0-jre</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.ow2.asm/asm -->
<dependency>
<groupId>org.ow2.asm</groupId>
<artifactId>asm</artifactId>
<version>9.8</version>
<version>9.5</version>
</dependency>
<dependency>
<groupId>org.java-websocket</groupId>
<artifactId>Java-WebSocket</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.17.2</version>
</dependency>
<dependency>
<groupId>com.diogonunes</groupId>
<artifactId>JColor</artifactId>
<version>5.5.1</version>
</dependency>
</dependencies>
@@ -51,17 +71,17 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.14.0</version>
<version>3.11.0</version>
<configuration>
<compilerArgs>--enable-preview</compilerArgs>
<source>24</source>
<target>24</target>
<source>23</source>
<target>23</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>3.5.3</version>
<version>3.1.0</version>
<configuration>
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<reportsDirectory>${project.build.directory}/test-reports</reportsDirectory>
@@ -77,7 +97,7 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr4-maven-plugin</artifactId>
<version>4.13.2</version>
<version>4.11.1</version>
<executions>
<execution>
<id>antlr</id>
@@ -90,7 +110,7 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.4.2</version>
<version>3.3.0</version>
<configuration>
<archive>
<manifest>
@@ -138,4 +158,4 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<url>file:///${project.basedir}/maven-repository/</url>
</repository>
</distributionManagement>
</project>
</project>

View File

@@ -0,0 +1,51 @@
class C1 {
C1 self() {
return this;
}
}
class C2 {
C2 self() {
return this;
}
}
class Example {
untypedMethod(var) {
return var.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self()
.self().self().self().self();
}
}

View File

@@ -0,0 +1,43 @@
import java.lang.Integer;
import java.lang.Boolean;
import java.util.Queue;
import java.util.Vector;
import java.util.List;
import java.util.ArrayDeque;
class Pos {
public Integer x;
public Integer y;
public Pos(Integer x, Integer y) {
this.x = x;
this.y = y;
}
}
class GridSearch {
Pos search(Vector<Vector<Boolean>> grid) {
var w = grid.size();
var h = grid.getFirst().size();
// keep a queue on which cells to check
var cellQueue = new ArrayDeque<Pos>();
cellQueue.add(new Pos(0,0));
while (!cellQueue.isEmpty()) {
var pos = cellQueue.poll();
// if the target was found: return the position
var value = grid.get(pos.x).get(pos.y);
if (value) {
return pos;
}
// keep searching on neighboring tiles
if (pos.x < w-1) cellQueue.add(new Pos(pos.x + 1, pos.y));
if (pos.y < h-1) cellQueue.add(new Pos(pos.x, pos.y + 1));
}
return (Pos)null;
}
}

View File

@@ -0,0 +1,42 @@
import java.util.List;
import java.util.AbstractList;
import java.util.Vector;
import java.lang.Integer;
class Pixel {
public color;
}
class Mask {
mask;
Mask(mask) {
this.mask = mask;
}
apply(pixels) {
var w = mask.size();
var h = mask.get(0).size();
var imgW = pixels.size();
var imgH = pixels.get(0).size();
for (var x = 0; x < imgW - w; x++) {
for (var y = 0; y < imgH - h; y++) {
var total = 0;
for (var xd = 0; xd < w; xd++) {
for (var yd = 0; yd < h; yd++) {
var p = pixels.get(x + xd).get(y + yd);
var m = mask.get(xd).get(yd);
total = total + (p.color * m);
}
}
pixels.get(x).get(y).color = total;
}
}
return pixels;
}
}

View File

@@ -0,0 +1,39 @@
import java.lang.Integer;
import java.lang.Boolean;
import java.util.ArrayList;
import java.util.HashMap;
public class PascalsTriangle {
create(n) {
var rows = new ArrayList<ArrayList<Integer>>();
var evens = new ArrayList<ArrayList<Boolean>>();
if (n <= 0) return rows;
// first row
rows.add(new ArrayList<Integer>(1));
evens.add(new ArrayList<Boolean>(false));
for (int y = 1; y < n; y++) {
var row = new ArrayList<Integer>();
var evensRow = new ArrayList<Boolean>();
row.add(1);
evensRow.add(false);
for (int x = 1; x < y-1; x++) {
int tl = rows.getLast().get(x-1);
int tr = rows.getLast().get(x);
row.add(tl + tr);
evensRow.add(((tl + tr) % 2) == 1);
}
row.add(1);
rows.add(row);
evensRow.add(false);
evens.add(evensRow);
}
return rows;
}
}

View File

@@ -1,22 +0,0 @@
import java.lang.String;
public class Bug363 {
uncurry (f){
return x -> f.apply(x);
}
uncurry (f){
return (x, y) -> f.apply(x).apply(y);
}
uncurry (f){
return (x, y, z) -> f.apply(x).apply(y).apply(z);
}
public test(){
var f = x -> y -> z -> x + y + z;
var g = uncurry(f);
return g.apply("A", "B", "C"); // Outputs: 6
}
}

View File

@@ -1,8 +0,0 @@
import java.lang.String;
public class Bug364{
public main(){
var f = x -> y -> z -> x + y + z;
return f.apply("A").apply("B").apply("C");
}
}

View File

@@ -1,21 +0,0 @@
import java.lang.String;
import java.lang.Object;
public class Bug365{
swap(f){
return x -> y -> f.apply(y).apply(x);
}
swap(Fun1$$<String, Fun1$$<String, Fun1$$<String, Object>>> f){
return x -> y -> z -> f.apply(z).apply(y).apply(x);
}
public ex1() {
var func = x -> y -> z -> x + y + z;
return func.apply("A").apply("B").apply("C");
}
public ex2() {
var func = x -> y -> z -> x + y + z;
return swap(func).apply("A").apply("B").apply("C");
}
}

View File

@@ -1,12 +0,0 @@
import java.lang.Integer;
public class Bug366 {
public static lambda() {
return (a, b) -> a + b;
}
public static test() {
var l = lambda();
return l.apply(10, 20);
}
}

View File

@@ -1,10 +0,0 @@
import java.lang.Boolean;
public class Bug371 {
static m1(x, y) { return x || y; }
static m2(x, y) { return x && y; }
public static test() {
return m2(m1(true, false), true);
}
}

View File

@@ -0,0 +1,16 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.lang.String;
import java.util.stream.Stream;
import java.util.function.Function;
import java.util.function.Predicate;
import java.lang.Integer;
class BugXXX {
public main() {
List<Integer> i = new ArrayList<>(List.of(1,2,3,4,5,6,7,8,9,10));
Optional<Integer> tmp = i.stream().filter(x -> x == 5).map(x -> x*2).findFirst();
return tmp;
}
}

View File

@@ -0,0 +1,17 @@
import java.util.List;
import java.lang.Integer;
//import java.util.Collection;
public class Merge2 {
public merge(a, b) {
a.addAll(b);
return a;
}
public sort(in){
var firstHalf = in.subList(1,2);
return merge(sort(firstHalf), sort(in));
}
}

View File

@@ -8,18 +8,20 @@ import de.dhbwstuttgart.target.generate.ASTToTargetAST;
import de.dhbwstuttgart.target.tree.*;
import de.dhbwstuttgart.target.tree.expression.*;
import de.dhbwstuttgart.target.tree.type.*;
import de.dhbwstuttgart.util.Logger;
import org.objectweb.asm.*;
import java.lang.invoke.*;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.stream.Collectors;
import static org.objectweb.asm.Opcodes.*;
import static de.dhbwstuttgart.target.tree.expression.TargetBinaryOp.*;
import static de.dhbwstuttgart.target.tree.expression.TargetLiteral.*;
public class Codegen {
public static Logger logger = new Logger("codegen");
private final TargetStructure clazz;
private final ClassWriter cw;
public final String className;
@@ -85,16 +87,14 @@ public class Codegen {
int localCounter;
MethodVisitor mv;
TargetType returnType;
boolean isStatic = false;
Stack<BreakEnv> breakStack = new Stack<>();
Stack<Integer> switchResultValue = new Stack<>();
State(TargetType returnType, MethodVisitor mv, int localCounter, boolean isStatic) {
State(TargetType returnType, MethodVisitor mv, int localCounter) {
this.returnType = returnType;
this.mv = mv;
this.localCounter = localCounter;
this.isStatic = isStatic;
}
void enterScope() {
@@ -760,16 +760,6 @@ public class Codegen {
}
}
private static TargetType removeGenerics(TargetType param) {
return switch (param) {
case null -> null;
case TargetFunNType funNType -> new TargetFunNType(funNType.name(), funNType.funNParams(), List.of(), funNType.returnArguments());
case TargetRefType refType -> new TargetRefType(refType.name());
case TargetGenericType targetGenericType -> TargetType.Object;
default -> param;
};
}
private void generateLambdaExpression(State state, TargetLambdaExpression lambda) {
var mv = state.mv;
@@ -781,8 +771,7 @@ public class Codegen {
var parameters = new ArrayList<>(lambda.captures());
parameters.addAll(signature.parameters());
parameters = parameters.stream().map(param -> param.withType(removeGenerics(param.pattern().type()))).collect(Collectors.toCollection(ArrayList::new));
var implSignature = new TargetMethod.Signature(Set.of(), parameters, removeGenerics(lambda.signature().returnType()));
var implSignature = new TargetMethod.Signature(Set.of(), parameters, lambda.signature().returnType());
TargetMethod impl;
if (lambdas.containsKey(lambda)) {
@@ -790,22 +779,21 @@ public class Codegen {
} else {
var name = "lambda$" + lambdaCounter++;
impl = new TargetMethod(state.isStatic ? ACC_STATIC : 0, name, lambda.block(), implSignature, null);
generateMethod(impl, state);
impl = new TargetMethod(0, name, lambda.block(), implSignature, null);
generateMethod(impl);
lambdas.put(lambda, impl);
}
var mt = MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, MethodType.class, MethodHandle.class, MethodType.class);
var bootstrap = new Handle(H_INVOKESTATIC, "java/lang/invoke/LambdaMetafactory", "metafactory", mt.toMethodDescriptorString(), false);
var handle = new Handle(state.isStatic ? H_INVOKESTATIC : H_INVOKEVIRTUAL, clazz.getName(), impl.name(), implSignature.getDescriptor(), false);
var handle = new Handle(H_INVOKEVIRTUAL, clazz.getName(), impl.name(), implSignature.getDescriptor(), false);
var params = new ArrayList<TargetType>();
if(!state.isStatic) params.add(new TargetRefType(clazz.qualifiedName().getClassName()));
params.add(new TargetRefType(clazz.qualifiedName().getClassName()));
params.addAll(lambda.captures().stream().map(mp -> mp.pattern().type()).toList());
if (!state.isStatic)
mv.visitVarInsn(ALOAD, 0);
mv.visitVarInsn(ALOAD, 0);
for (var index = 0; index < lambda.captures().size(); index++) {
var capture = lambda.captures().get(index);
var pattern = (TargetTypePattern) capture.pattern();
@@ -1332,7 +1320,7 @@ public class Codegen {
types.add(Type.getObjectType(guard.inner().type().getInternalName()));
// TODO Same here we need to evaluate constant;
} else {
System.out.println(label);
logger.info(label);
throw new NotImplementedException();
}
}
@@ -1533,7 +1521,7 @@ public class Codegen {
MethodVisitor mv = cw.visitMethod(ACC_PUBLIC | ACC_STATIC, "<clinit>", "()V", null, null);
mv.visitCode();
var state = new State(null, mv, 0, true);
var state = new State(null, mv, 0);
generate(state, constructor.block());
mv.visitInsn(RETURN);
@@ -1547,7 +1535,7 @@ public class Codegen {
mv.visitAttribute(new JavaTXSignatureAttribute(constructor.getTXSignature()));
mv.visitCode();
var state = new State(null, mv, 1, false);
var state = new State(null, mv, 1);
for (var param : constructor.parameters()) {
var pattern = param.pattern();
if (pattern instanceof TargetTypePattern tp)
@@ -1593,11 +1581,8 @@ public class Codegen {
}
}
}
private void generateMethod(TargetMethod method) {
generateMethod(method, null);
}
private void generateMethod(TargetMethod method, State parent) {
private void generateMethod(TargetMethod method) {
var access = method.access();
if (method.block() == null)
access |= ACC_ABSTRACT;
@@ -1612,10 +1597,7 @@ public class Codegen {
if (method.block() != null) {
mv.visitCode();
var state = new State(method.signature().returnType(), mv, method.isStatic() ? 0 : 1, method.isStatic());
if (parent != null) {
state.scope.parent = parent.scope;
}
var state = new State(method.signature().returnType(), mv, method.isStatic() ? 0 : 1);
var offset = 1;
for (var param : method.signature().parameters()) {
state.createVariable(param.pattern().name(), param.pattern().type());
@@ -1625,8 +1607,6 @@ public class Codegen {
bindLocalVariables(state, cp, offset);
offset++;
}
//if (parent != null) System.out.println("parent: " + parent.scope.locals.keySet());
//System.out.println(state.scope.locals.keySet());
generate(state, method.block());
if (method.signature().returnType() == null)
mv.visitInsn(RETURN);
@@ -1733,7 +1713,7 @@ public class Codegen {
// Generate wrapper method
var mv = cw2.visitMethod(ACC_PUBLIC, toMethod.name, toDescriptor, null, null);
var state = new State(null, mv, 0, false);
var state = new State(null, mv, 0);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, className, "wrapped", pair.from.toDescriptor());
@@ -1765,7 +1745,7 @@ public class Codegen {
converter.classLoader.findClass(className);
} catch (ClassNotFoundException e) {
try {
converter.classLoader.loadClass(className, bytes);
converter.classLoader.loadClass(bytes);
} catch (LinkageError ignored) {}
}
}

View File

@@ -39,10 +39,6 @@ public class FunNGenerator {
public final List<TargetType> inParams;
public final List<TargetType> realParams;
public GenericParameters(TargetFunNType funNType) {
this(funNType.funNParams(), funNType.returnArguments());
}
public GenericParameters(List<TargetType> params, int numReturns) {
this.realParams = params;
this.inParams = flattenTypeParams(params);
@@ -124,7 +120,7 @@ public class FunNGenerator {
superFunNMethodDescriptor.append(")V");
}
System.out.println(superFunNMethodSignature);
Codegen.logger.info(superFunNMethodSignature);
ClassWriter classWriter = new ClassWriter(0);
MethodVisitor methodVisitor;

View File

@@ -1,46 +1,81 @@
package de.dhbwstuttgart.core;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.util.Logger;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.*;
public class ConsoleInterface {
private static final String directory = System.getProperty("user.dir");
public static void main(String[] args) throws IOException, ClassNotFoundException {
List<File> input = new ArrayList<>();
List<File> classpath = new ArrayList<>();
String outputPath = null;
Iterator<String> it = Arrays.asList(args).iterator();
if(args.length == 0){
System.out.println("No input files given. Get help with --help");
System.exit(1);
}else if(args.length == 1 && args[0].equals("--help")){
System.out.println("Usage: javatx [OPTION]... [FILE]...\n" +
"\t-cp\tSet Classpath\n" +
"\t-d\tSet destination directory");
System.exit(1);
public class ConsoleInterface {
/**
* Leave the argument configurations here for the rest of the code to read
*/
public static Logger.LogLevel logLevel = Logger.LogLevel.ERROR;
public static boolean writeLogFiles = false;
public static Optional<String> unifyServerUrl = Optional.empty();
public static void main(String[] args) throws IOException, ClassNotFoundException {
List<File> input = new ArrayList<>();
List<File> classpath = new ArrayList<>();
String outputPath = null;
Iterator<String> it = Arrays.asList(args).iterator();
Optional<Integer> serverPort = Optional.empty();
if (args.length == 0) {
System.out.println("No input files given. Get help with --help");
System.exit(1);
} else if (args.length == 1 && args[0].equals("--help")) {
System.out.println("Usage: javatx [OPTION]... [FILE]...\n" +
"\t-cp\tSet Classpath\n" +
"\t-d\tSet destination directory\n" +
"\t[--server-mode <port>]\n" +
"\t[--unify-server <url>]\n" +
"\t[--write-logs]\n" +
"\t[-v|-vv-|-vvv]");
System.exit(1);
}
while (it.hasNext()) {
String arg = it.next();
if (arg.equals("-d")) {
outputPath = it.next();
} else if (arg.startsWith("-d")) {
outputPath = arg.substring(2);
} else if (arg.equals("-cp") || arg.equals("-classpath")) {
String[] cps = it.next().split(":");
for (String cp : cps) {
classpath.add(new File(cp));
}
while(it.hasNext()){
String arg = it.next();
if(arg.equals("-d")){
outputPath = it.next();
}else if(arg.startsWith("-d")) {
outputPath = arg.substring(2);
}else if(arg.equals("-cp") || arg.equals("-classpath")){
String[] cps = it.next().split(":");
for(String cp : cps){
classpath.add(new File(cp));
}
}else{
input.add(new File(arg));
}
}
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null);
//compiler.typeInference();
compiler.generateBytecode();
}
} else if (arg.equals("--server-mode")) {
serverPort = Optional.of(Integer.parseInt(it.next()));
} else if (arg.equals("--unify-server")) {
unifyServerUrl = Optional.of(it.next());
} else if (arg.equals("--write-logs")) {
ConsoleInterface.writeLogFiles = true;
} else if (arg.startsWith("-v")) {
logLevel = switch (arg) {
case "-v" -> Logger.LogLevel.WARNING;
case "-vv" -> Logger.LogLevel.INFO;
case "-vvv" -> Logger.LogLevel.DEBUG;
default -> throw new IllegalArgumentException("Argument " + arg + " is not a valid verbosity level");
};
} else {
input.add(new File(arg));
}
}
if (serverPort.isPresent()) {
if (unifyServerUrl.isPresent()) throw new RuntimeException("Cannot use unifyServer when in server mode!");
JavaTXServer server = new JavaTXServer(serverPort.get());
server.listen();
}
else {
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null);
//compiler.typeInference();
compiler.generateBytecode();
SocketClient.closeIfOpen();
}
}
}

View File

@@ -12,6 +12,11 @@ import de.dhbwstuttgart.parser.SyntaxTreeGenerator.SyntaxTreeGenerator;
import de.dhbwstuttgart.parser.antlr.Java17Parser.SourceFileContext;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.packet.SetAutoclosePacket;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.Method;
@@ -19,6 +24,7 @@ import de.dhbwstuttgart.syntaxtree.ParameterList;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.syntaxtree.GenericDeclarationList;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
@@ -35,10 +41,13 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.typeAlgo.TYPE;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.RuleSet;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
@@ -49,6 +58,7 @@ import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.util.Logger;
import java.io.*;
import java.lang.reflect.Modifier;
import java.nio.file.Path;
@@ -61,14 +71,17 @@ import org.apache.commons.io.output.NullOutputStream;
public class JavaTXCompiler {
// do not use this in any code, that can be executed serverside!
public static PlaceholderRegistry defaultClientPlaceholderRegistry = new PlaceholderRegistry();
public static Logger defaultLogger = new Logger();
// public static JavaTXCompiler INSTANCE;
final CompilationEnvironment environment;
Boolean resultmodel = true;
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
public final DirectoryClassLoader classLoader;
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
public final DirectoryClassLoader classLoader;
public final List<File> classPath;
private final File outputPath;
@@ -76,14 +89,9 @@ public class JavaTXCompiler {
public DirectoryClassLoader getClassLoader() {
return classLoader;
}
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(Arrays.asList(sourceFile), List.of(), new File("."));
}
public JavaTXCompiler(File sourceFile, Boolean log) throws IOException, ClassNotFoundException {
this(sourceFile);
this.log = log;
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(Collections.singletonList(sourceFile), List.of(), new File("."));
}
public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException {
@@ -91,6 +99,11 @@ public class JavaTXCompiler {
}
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath) throws IOException, ClassNotFoundException {
// ensure new default placeholder registry for tests
defaultClientPlaceholderRegistry = new PlaceholderRegistry();
NameGenerator.reset();
ASTToTargetAST.OBJECT = ASTFactory.createObjectType();
var path = new ArrayList<>(contextPath);
if (contextPath.isEmpty()) {
// When no contextPaths are given, the working directory is the sources root
@@ -300,52 +313,51 @@ public class JavaTXCompiler {
Set<Set<UnifyPair>> results = new HashSet<>();
UnifyResultModel urm = null;
// urm.addUnifyResultListener(resultListener);
try {
logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile;
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this);
System.out.println(finiteClosure);
urm = new UnifyResultModel(cons, finiteClosure);
urm.addUnifyResultListener(resultListener);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons);
logFile = logFile == null ? new FileWriter("log_" + sourceFiles.keySet().iterator().next().getName()) : logFile;
Logger logger = new Logger(logFile, "TypeInferenceAsync");
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, defaultClientPlaceholderRegistry);
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
return x;
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logger, getClassLoader(), this, context.placeholderRegistry());
logger.info(finiteClosure.toString());
urm = new UnifyResultModel(cons, finiteClosure);
urm.addUnifyResultListener(resultListener);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, context.placeholderRegistry());
};
logFile.write(unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logFile.write(unifyCons.toString());
TypeUnify unify = new TypeUnify();
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
for (SourceFile f : this.sourceFiles.values()) {
logFile.write(ASTTypePrinter.print(f));
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
logFile.flush();
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
return x;
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
*/;
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
} catch (IOException e) {
System.err.println("kein LogFile");
};
logger.debug(unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logger.debug(unifyCons.toString());
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logger.debug("FC:\\" + finiteClosure.toString() + "\n");
for (SourceFile f : this.sourceFiles.values()) {
logger.debug(ASTTypePrinter.print(f));
}
// logFile.flush();
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
*/;
TypeUnify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
return urm;
}
@@ -365,108 +377,108 @@ public class JavaTXCompiler {
final ConstraintSet<Pair> cons = getConstraints(file);
Set<Set<UnifyPair>> results = new HashSet<>();
try {
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
if (log) logFolder.mkdirs();
Writer logFile = log ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this);
System.out.println(finiteClosure);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons);
System.out.println("xxx1");
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
return x;
PlaceholderRegistry placeholderRegistry = new PlaceholderRegistry();
};
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
if (ConsoleInterface.writeLogFiles && !logFolder.mkdirs()) throw new RuntimeException("Could not creat directoy for log files: " + logFolder);
Writer logFile = ConsoleInterface.writeLogFiles ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
Logger logger = new Logger(logFile, "TypeInference");
logFile.write("Unify:" + unifyCons.toString());
System.out.println("Unify:" + unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logFile.write("\nUnify_distributeInnerVars: " + unifyCons.toString());
TypeUnify unify = new TypeUnify();
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
logFile.write(ASTTypePrinter.print(sf));
System.out.println(ASTTypePrinter.print(sf));
logFile.flush();
List<UnifyPair> andConstraintsSorted = unifyCons.getUndConstraints().stream()
.sorted(Comparator.comparing(UnifyPair::getPairOp).thenComparing(UnifyPair::getLhsType, Comparator.comparing(UnifyType::getName)))
.collect(Collectors.toList());
System.out.println(andConstraintsSorted);
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()// .stream().map(x -> {
/*
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
*/;
if (resultmodel) {
/* UnifyResultModel Anfang */
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
//System.out.println("RESULT Final: " + li.getResults());
var finalResults = li.getResults().stream().sorted().toList();
int i = 0;
System.out.println("RESULT Final: ");
for (var result : finalResults){
System.out.println("Result: " + i++);
System.out.println(result.getSortedResults());
}
System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
logFile.flush();
return li.getResults();
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logger, classLoader, this, placeholderRegistry);
logger.info(finiteClosure.toString());
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, placeholderRegistry);
logger.info("xxx1");
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
UnifyType lhs, rhs;
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
((PlaceholderType) lhs).setInnerType(true);
((PlaceholderType) rhs).setInnerType(true);
}
/* UnifyResultModel End */
else {
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
// finiteClosure));
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString() + "\n");
logFile.flush();
results.addAll(result);
return x;
results = results.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
y.setPairOp(PairOperator.EQUALSDOT);
return y; // alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure);
} else
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
System.out.println("RESULT Final: " + results);
System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + results.toString() + "\n");
logFile.flush();
logFile.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS);
logFile.flush();
}
} catch (IOException e) {
System.err.println("kein LogFile");
};
logger.debug("Unify:" + unifyCons.toString());
logger.info("Unify:" + unifyCons.toString());
unifyCons = unifyCons.map(distributeInnerVars);
logger.debug("\nUnify_distributeInnerVars: " + unifyCons.toString());
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logger.debug("FC:\\" + finiteClosure.toString() + "\n");
logger.debug(ASTTypePrinter.print(sf));
logger.info(ASTTypePrinter.print(sf));
// logFile.flush();
logger.info("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
Set<PlaceholderType> varianceTPHold;
Set<PlaceholderType> varianceTPH = new HashSet<>();
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
/*
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
*/
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
// logFile, log);
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()// .stream().map(x -> {
/*
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
*/;
if (ConsoleInterface.unifyServerUrl.isPresent()) {
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, placeholderRegistry);
SocketFuture<UnifyResultPacket> future = SocketClient.execute(
UnifyRequestPacket.create(finiteClosure, cons, unifyCons, context.placeholderRegistry())
);
SocketClient.execute(SetAutoclosePacket.create());
return future.get().getResultSet(context);
}
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons))))).collect(Collectors.toList());
else if (resultmodel) {
/* UnifyResultModel Anfang */
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, placeholderRegistry);
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
logger.info("RESULT Final: " + li.getResults());
logger.info("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
// logFile.flush();
return li.getResults();
}
/* UnifyResultModel End */
else {
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
// finiteClosure));
UnifyContext context = new UnifyContext(logger, false, new UnifyResultModel(cons, finiteClosure), usedTasks, placeholderRegistry);
Set<Set<UnifyPair>> result = TypeUnify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
logger.info("RESULT: " + result);
logFile.write("RES: " + result.toString() + "\n");
// logFile.flush();
results.addAll(result);
results = results.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet(placeholderRegistry).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
y.setPairOp(PairOperator.EQUALSDOT);
return y; // alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure);
} else
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
logger.info("RESULT Final: " + results);
logger.info("Constraints for Generated Generics: " + " ???");
logger.debug("RES_FINAL: " + results.toString() + "\n");
// logFile.flush();
logger.debug("PLACEHOLDERS: " + placeholderRegistry);
// logFile.flush();
}
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons), placeholderRegistry)))).collect(Collectors.toList());
}
/**
@@ -598,10 +610,6 @@ public class JavaTXCompiler {
}
}
/**
* @param path - output-Directory can be null, then class file output is in the same directory as the parsed source files
* @return
*/
public Map<JavaClassName, byte[]> generateBytecode(File sourceFile) throws ClassNotFoundException, IOException {
var sf = sourceFiles.get(sourceFile);
if (sf.isGenerated()) return null;
@@ -643,12 +651,12 @@ public class JavaTXCompiler {
var codegen = new Codegen(converter.convert(clazz), this, converter);
var code = codegen.generate();
generatedClasses.put(clazz.getClassName(), code);
converter.auxiliaries.forEach((name, source) -> {
generatedClasses.put(new JavaClassName(name), source);
});
}
generatedGenerics.put(sf, converter.javaGenerics());
converter.generateFunNTypes();
converter.auxiliaries.forEach((name, source) -> {
generatedClasses.put(new JavaClassName(name), source);
});
return generatedClasses;
}
@@ -656,15 +664,15 @@ public class JavaTXCompiler {
FileOutputStream output;
for (JavaClassName name : classFiles.keySet()) {
byte[] bytecode = classFiles.get(name);
System.out.println("generating " + name + ".class file ...");
defaultLogger.info("generating " + name + ".class file ...");
var subPath = preserveHierarchy ? path : Path.of(path.toString(), name.getPackageName().split("\\.")).toFile();
File outputFile = new File(subPath, name.getClassName() + ".class");
outputFile.getAbsoluteFile().getParentFile().mkdirs();
System.out.println(outputFile);
defaultLogger.info(outputFile.toString());
output = new FileOutputStream(outputFile);
output.write(bytecode);
output.close();
System.out.println(name + ".class file generated");
defaultLogger.success(name + ".class file generated");
}
}

View File

@@ -0,0 +1,31 @@
package de.dhbwstuttgart.core;
import de.dhbwstuttgart.server.SocketServer;
public class JavaTXServer {
public static boolean isRunning = false;
final SocketServer socketServer;
public JavaTXServer(int port) {
this.socketServer = new SocketServer(port);
}
public void listen() {
isRunning = true;
socketServer.start();
}
public void forceStop() {
try {
socketServer.stop();
}
catch (InterruptedException exception) {
System.err.println("Interrupted socketServer: " + exception);
}
isRunning = false;
}
}

View File

@@ -0,0 +1,13 @@
package de.dhbwstuttgart.environment;
public class ByteArrayClassLoader extends ClassLoader implements IByteArrayClassLoader {
@Override
public Class _defineClass(String name, byte[] code, int i, int length) throws ClassFormatError {
return defineClass(name, code, i, length);
}
@Override
public Class<?> findClass(String name) throws ClassNotFoundException {
return super.findClass(name);
}
}

View File

@@ -6,22 +6,18 @@ import java.nio.file.Path;
public interface IByteArrayClassLoader {
Class<?> loadClass(String path) throws ClassNotFoundException;
Class loadClass(String path) throws ClassNotFoundException;
default Class<?> loadClass(byte[] code) {
return this.loadClass(null, code);
default Class loadClass(byte[] code) {
return this._defineClass(null, code, 0, code.length);
}
default Class<?> loadClass(String name, byte[] code) {
return this._defineClass(name, code, 0, code.length);
}
default Class<?> loadClass(Path path) throws IOException {
default Class loadClass(Path path) throws IOException {
var code = Files.readAllBytes(path);
return this._defineClass(null, code, 0, code.length);
}
public Class<?> findClass(String name) throws ClassNotFoundException;
Class<?> _defineClass(String name, byte[] code, int i, int length) throws ClassFormatError;
Class _defineClass(String name, byte[] code, int i, int length) throws ClassFormatError;
}

View File

@@ -0,0 +1,11 @@
package de.dhbwstuttgart.exceptions;
/**
* Eine Runtime Exception, die für den Fall genutzt wird, dass eine Unifikation abgebrochen wird.
* Durch das Werfen einer Exception können Abbrüche auch aus Methodenaufrufen heraus
* geprüft werden, da zuvor nur ein return X; stattfinden würde.
*/
public class UnifyCancelException extends RuntimeException {
}

View File

@@ -7,6 +7,7 @@ import de.dhbwstuttgart.parser.antlr.Java17Parser;
import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.util.Logger;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
@@ -17,6 +18,9 @@ import java.util.ArrayList;
import java.util.List;
public class JavaTXParser {
public static Logger logger = new Logger("Parser");
public static Java17Parser.SourceFileContext parse(File source) throws IOException, java.lang.ClassNotFoundException {
InputStream stream = new FileInputStream(source);
// DEPRECATED: ANTLRInputStream input = new ANTLRInputStream(stream);

View File

@@ -1,4 +1,25 @@
package de.dhbwstuttgart.parser;
public record SourceLoc(String file, int line) {
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
public record SourceLoc(String file, int line) implements ISerializableData {
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
var serialized = new SerialMap();
serialized.put("file", file);
serialized.put("line", line);
return serialized;
}
public static SourceLoc fromSerial(SerialMap data) {
return new SourceLoc(
data.getValue("file").getOf(String.class),
data.getValue("line").getOf(Integer.class)
);
}
}

View File

@@ -12,6 +12,7 @@ import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.model.*;
import java.util.*;
@@ -26,16 +27,21 @@ public class FCGenerator {
*
* @param availableClasses - Alle geparsten Klassen
*/
public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException {
return toFC(availableClasses, classLoader).stream().map(t -> UnifyTypeFactory.convert(compiler, t)).collect(Collectors.toSet());
public static Set<UnifyPair> toUnifyFC(JavaTXCompiler compiler, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException {
return toFC(
availableClasses,
classLoader,
placeholderRegistry
).stream().map(t -> UnifyTypeFactory.convert(compiler, t, placeholderRegistry))
.collect(Collectors.toSet());
}
public static Set<Pair> toFC(Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException {
public static Set<Pair> toFC(Collection<ClassOrInterface> availableClasses, ClassLoader classLoader, PlaceholderRegistry placeholderRegistry) throws ClassNotFoundException {
HashSet<Pair> pairs = new HashSet<>();
//PL 2018-09-18: gtvs vor die for-Schleife gezogen, damit immer die gleichen Typeplaceholder eingesetzt werden.
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs = new HashMap<>();
for(ClassOrInterface cly : availableClasses){
List<Pair> newPairs = getSuperTypes(cly, availableClasses, gtvs, classLoader);
List<Pair> newPairs = getSuperTypes(cly, availableClasses, gtvs, classLoader, placeholderRegistry);
pairs.addAll(newPairs);
//For all Functional Interfaces FI: FunN$$<... args auf dem Functional Interface ...> <. FI is added to FC
@@ -75,8 +81,13 @@ public class FCGenerator {
* @param forType
* @return
*/
private static List<Pair> getSuperTypes(ClassOrInterface forType, Collection<ClassOrInterface> availableClasses, ClassLoader classLoader) throws ClassNotFoundException {
return getSuperTypes(forType, availableClasses, new HashMap<>(), classLoader);
private static List<Pair> getSuperTypes(
ClassOrInterface forType,
Collection<ClassOrInterface> availableClasses,
ClassLoader classLoader,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
return getSuperTypes(forType, availableClasses, new HashMap<>(), classLoader, placeholderRegistry);
}
/**
@@ -87,8 +98,13 @@ public class FCGenerator {
* @return
* @throws ClassNotFoundException
*/
private static List<Pair> getSuperTypes(ClassOrInterface forType, Collection<ClassOrInterface> availableClasses,
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs, ClassLoader classLoader) throws ClassNotFoundException {
private static List<Pair> getSuperTypes(
ClassOrInterface forType,
Collection<ClassOrInterface> availableClasses,
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> gtvs,
ClassLoader classLoader,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
//Die GTVs, die in forType hinzukommen:
HashMap<String, RefTypeOrTPHOrWildcardOrGeneric> newGTVs = new HashMap<>();
@@ -147,7 +163,7 @@ public class FCGenerator {
if(superClass.getClassName().equals(ASTFactory.createObjectClass().getClassName())){
superTypes = Arrays.asList(new Pair(ASTFactory.createObjectType(), ASTFactory.createObjectType(), PairOperator.SMALLER));
}else{
superTypes = getSuperTypes(superClass, availableClasses, newGTVs, classLoader);
superTypes = getSuperTypes(superClass, availableClasses, newGTVs, classLoader, placeholderRegistry);
}
retList.add(ret);

View File

@@ -1,5 +1,6 @@
package de.dhbwstuttgart.parser.SyntaxTreeGenerator;
import de.dhbwstuttgart.parser.JavaTXParser;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
@@ -259,7 +260,7 @@ public class StatementGenerator {
ret.setStatement();
return ret;
default:
System.out.println(stmt.getClass());
JavaTXParser.logger.info(stmt.getClass());
throw new NotImplementedException();
}
}
@@ -1099,9 +1100,9 @@ public class StatementGenerator {
block = lambdaGenerator.convert(expression.lambdaBody().block(), true);
}
List<RefTypeOrTPHOrWildcardOrGeneric> funNParams = new ArrayList<>();
funNParams.add(TypePlaceholder.fresh(expression.getStart(), -1, false));// ret-Type
funNParams.add(TypePlaceholder.fresh(expression.getStart()));// ret-Type
params.getFormalparalist().forEach(formalParameter -> // Für jeden Parameter einen TPH anfügen:
funNParams.add(TypePlaceholder.fresh(expression.getStart(), 1, false)));
funNParams.add(TypePlaceholder.fresh(expression.getStart())));
RefTypeOrTPHOrWildcardOrGeneric lambdaType = TypePlaceholder.fresh(expression.getStart());
// RefType lambdaType = new
// RefType(reg.getName("Fun"+params.getFormalparalist().size()),

View File

@@ -74,7 +74,7 @@ public class TypeGenerator {
throw new NotImplementedException();
}
} else if (!typeContext.LBRACK().isEmpty()) { // ArrayType über eckige Klammer prüfen
// System.out.println(unannTypeContext.getText());
// JavaTXParser.logger.info(unannTypeContext.getText());
throw new NotImplementedException();
}
/*

View File

@@ -7,7 +7,7 @@ import java.util.*;
/**
* Speichert die Klassen f<>r einen bestimmten Projektscope
*/
public class JavaClassRegistry{
public class JavaClassRegistry {
final Map<JavaClassName, Integer> existingClasses = new HashMap<>();
public JavaClassRegistry(Map<String, Integer> initialNames) {
@@ -22,10 +22,6 @@ public class JavaClassRegistry{
}
}
public Set<JavaClassName> getAllClassNames(){
return existingClasses.keySet();
}
public void addName(String className, int numberOfGenerics) {
existingClasses.put(new JavaClassName(className), numberOfGenerics);
}

View File

@@ -0,0 +1,41 @@
package de.dhbwstuttgart.server;
import de.dhbwstuttgart.util.Logger;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import org.java_websocket.WebSocket;
public class ServerTaskLogger extends Logger {
private final WebSocket webSocket;
private final SocketServer socketServer;
private final LogLevel customLogLevel;
public ServerTaskLogger(WebSocket webSocket, SocketServer socketServer, LogLevel customLogLevel) {
this.webSocket = webSocket;
this.socketServer = socketServer;
this.customLogLevel = customLogLevel;
}
@Override
public boolean isLogLevelActive(LogLevel logLevel) {
return logLevel.isHigherOrEqualTo(customLogLevel);
}
@Override
protected void print(String s, LogLevel logLevel) {
String coloredPrefix = this.getPrefix(logLevel);
if (logLevel.isHigherOrEqualTo(LogLevel.ERROR)) {
socketServer.sendError(webSocket, coloredPrefix + s, false);
}
else {
socketServer.sendMessage(webSocket, coloredPrefix + s);
}
}
@Override
protected void write(String s) {
// under no circumstances write anything to a file
}
}

View File

@@ -0,0 +1,202 @@
package de.dhbwstuttgart.server;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.server.packet.IClientToServerPacket;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.IServerToClientPacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.server.packet.UnifyRequestPacket;
import de.dhbwstuttgart.server.packet.UnifyResultPacket;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Logger;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.enums.ReadyState;
import org.java_websocket.handshake.ServerHandshake;
/**
* The Client-side of the websocket
*/
public class SocketClient extends WebSocketClient {
public static Logger logger = new Logger("SocketClient");
/**
* The singleton object
*/
private static SocketClient socketClient = null;
/**
* List of futures that are still waiting to be fulfilled
*/
private final Map<String, SocketFuture<?>> responseFutures = new HashMap<>();
private SocketClient(String url) {
super(
URI.create(url), // target url
//SocketServer.perMessageDeflateDraft, // enable compression
Map.of( // headers
"packetProtocolVersion", SocketServer.packetProtocolVersion
)
);
// make sure the url is in a valid format
final String regex = "^wss?://(\\w+(\\.\\w+)?)*:(\\d+)$";
final Matcher matcher = Pattern.compile(regex).matcher(url);
if (!matcher.find()) {
throw new RuntimeException("Provided string \"" + url + "\" is not a valid server URL! Use pattern ws(s?)://<host.name>:<port>");
}
try {
// wait for the connection to be set up
this.connectBlocking();
// make sure the connection has been established successfully
if (this.getReadyState() != ReadyState.OPEN) {
throw new RuntimeException("WebSocket Client could not connect to remote host at " + this.uri);
}
} catch (InterruptedException exception) {
throw new RuntimeException(exception);
}
// add a shutdown hook to close the connection when the process ends or is stopped by a SIGINT signal
Runtime.getRuntime().addShutdownHook(new Thread(this::close));
}
private SocketClient(String host, int port, boolean secure) throws InterruptedException {
this(String.format("%s://%s:%d/", secure ? "wss" : "ws", host, port));
}
/**
* Singleton access method, creates one if none is available
*
* @return The one and only socketClient
*/
private static SocketClient initializeClient() {
if (socketClient == null) {
socketClient = new SocketClient(ConsoleInterface.unifyServerUrl.get());
}
return socketClient;
}
/**
* Send a packet to the server (connection will be created, if none is found) and return a future
* for the response packet
*/
synchronized public static <T extends IServerToClientPacket> SocketFuture<T> execute(IClientToServerPacket<T> packet) {
SocketClient client = initializeClient();
/*
* Create a future that will be associated with the packet and eventually completed
*/
SocketFuture<T> future = packet.getFuture();
if (!future.isDone()) {
client.responseFutures.put(future.futureId, future);
}
/*
* Establish connection, if not already done.
* Serialize the packet and send it to the server.
* Return the future to be handled by the caller.
*/
try {
String json = PacketContainer.serialize(packet);
client.send(json);
} catch (Exception exception) {
logger.exception(exception);
throw new RuntimeException("Exception occurred in server connection: ", exception);
}
return future;
}
/**
* Shortcut for waiting and retrieving the response immediately
*
* @param packet The packet to send
* @param <T> The type of response packet to await
* @return The response packet, once it is received
*/
public static <T extends IServerToClientPacket> T executeAndGet(IClientToServerPacket<T> packet) {
return SocketClient.execute(packet).get();
}
/**
* Specific client-side implementations to handle incoming packets
*/
protected void handleReceivedPacket(IPacket packet) {
if (!(packet instanceof IServerToClientPacket serverToClientPacket)) {
System.err.println("Received package of invalid type + " + packet.getClass().getName());
this.close();
return;
}
serverToClientPacket.onHandle(this.getConnection(), this);
}
/**
* Complete a registered future, so it can be handled by whoever executed the creator task
*
* @param id The associated id for this future
* @param trigger The object triggering the completion
*/
public void completeResponseFuture(String id, IServerToClientPacket trigger) {
SocketFuture<?> future = this.responseFutures.remove(id);
if (future == null) return;
if (!future.accept(trigger)) {
throw new RuntimeException("Packet " + trigger.getClass().getName() + " tried to complete future, but was not allowed to");
}
}
public static void closeIfOpen() {
if (socketClient != null && socketClient.isOpen()) {
socketClient.close();
}
}
@Override
public void onOpen(ServerHandshake handshakedata) {
logger.success("Connected to server with status " + handshakedata.getHttpStatus());
}
@Override
public void onMessage(String message) {
// logger.info("received: " + message);
IPacket packet = PacketContainer.deserialize(message);
this.handleReceivedPacket(packet);
}
@Override
public void onClose(int code, String reason, boolean remote) {
logger.info(
"Disconnected from server " +
"with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
"(closed by remote: " + remote + ")"
);
if (!this.responseFutures.isEmpty()) {
throw new RuntimeException("Server closed before all required tasks were answered");
}
}
@Override
public void onError(Exception e) {
logger.exception(e);
throw new RuntimeException(e);
}
}

View File

@@ -0,0 +1,48 @@
package de.dhbwstuttgart.server;
import de.dhbwstuttgart.server.packet.IServerToClientPacket;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
public class SocketFuture<T extends IServerToClientPacket> extends CompletableFuture<T> {
public final String futureId = UUID.randomUUID().toString();
public final List<Class<T>> allowedTriggers;
public SocketFuture(List<Class<T>> allowedTriggers) {
this.allowedTriggers = allowedTriggers;
}
public boolean accept(IServerToClientPacket trigger) {
if (this.allowedTriggers.contains(trigger.getClass())) {
this.complete((T)trigger);
return true;
}
return false;
}
@Override
public T get() {
try {
return super.get();
}
catch (InterruptedException | ExecutionException exception) {
throw new RuntimeException(exception);
}
}
/**
* Special case where the future is immediately fulfilled without a response package similar to
* <code>CompletableFuture.completedFuture()</code> but without a value
*/
public static <R extends IServerToClientPacket> SocketFuture<R> completedFuture() {
SocketFuture<R> dummyFuture = new SocketFuture<>(new ArrayList<>(0));
dummyFuture.complete(null);
return dummyFuture;
}
}

View File

@@ -0,0 +1,235 @@
package de.dhbwstuttgart.server;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.dhbwstuttgart.server.packet.ErrorPacket;
import de.dhbwstuttgart.server.packet.IClientToServerPacket;
import de.dhbwstuttgart.server.packet.IPacket;
import de.dhbwstuttgart.server.packet.MessagePacket;
import de.dhbwstuttgart.server.packet.PacketContainer;
import de.dhbwstuttgart.util.Logger;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.java_websocket.WebSocket;
import org.java_websocket.drafts.Draft;
import org.java_websocket.drafts.Draft_6455;
import org.java_websocket.extensions.permessage_deflate.PerMessageDeflateExtension;
import org.java_websocket.handshake.ClientHandshake;
import org.java_websocket.server.WebSocketServer;
public class SocketServer extends WebSocketServer {
public static Logger logger = new Logger("SocketServer");
public static final int maxTasksPerSession = 100;
private static boolean serverRunning = false;
/**
* Increase this every time a breaking change to the server communication is done.
* This will prevent errors when the server version and client version do not match.
*/
public static final String packetProtocolVersion = "1";
// create an executor for tasks that will always keep at least one task around
private final ThreadPoolExecutor taskExecutor = new ThreadPoolExecutor(1, Integer.MAX_VALUE,60L, TimeUnit.SECONDS, new SynchronousQueue<>());
// create an executor for scheduling timeouts
private final ScheduledExecutorService timeoutExecutor = Executors.newSingleThreadScheduledExecutor();
public SocketServer(int port) {
super(new InetSocketAddress(port));
this.setConnectionLostTimeout(30);
serverRunning = true;
// add a shutdown hook to close all connections when the process ends or is stopped by a SIGINT signal
Runtime.getRuntime().addShutdownHook(new Thread(this::onShutdown));
}
public static boolean isServerRunning() {
return serverRunning;
}
private void onShutdown() {
serverRunning = false;
try {
for (var webSocket : this.getConnections()) {
this.sendError(webSocket, "Sorry, i am shutting down. You are now on your own, good Luck!", true);
webSocket.close();
}
this.stop();
taskExecutor.shutdown();
timeoutExecutor.shutdown();
} catch (InterruptedException exception) {
// we are shutting down anyway
}
}
@Override
public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
String ppv = clientHandshake.getFieldValue("packetProtocolVersion");
if (!ppv.equals(packetProtocolVersion)) {
this.sendError(webSocket,
"Mismatch in packet protocol version! Client (you): \"" + ppv + "\" and Server (me): \"" + packetProtocolVersion + "\"",
true
);
webSocket.close(1);
return;
}
SocketData socketData = new SocketData(webSocket);
logger.info("New connection: " + socketData.id + " (with ppv " + ppv + ")");
try {
sendMessage(webSocket, "Welcome to the server!");
// wait 10 seconds for the client to send a task and close the connection if nothing has been received until then
final int secondsUntilTimeout = 10;
timeoutExecutor.schedule(() -> {
if (webSocket.<SocketData>getAttachment().totalTasks.get() > 0 || !webSocket.isOpen()) {
return;
}
sendMessage(webSocket, "No task received after " + secondsUntilTimeout + " seconds. Closing connection...");
webSocket.close();
},
secondsUntilTimeout,
TimeUnit.SECONDS
);
// and finally, when your program wants to exit
} catch (Exception e) {
logger.exception(e);
webSocket.close(1, e.getMessage());
}
}
@Override
public void onClose(WebSocket webSocket, int code, String reason, boolean remote) {
SocketData socketData = webSocket.getAttachment();
logger.info("Connection closed: " + socketData.id);
logger.info(
"Disconnected client " + socketData.id + " " +
"with code " + code + " " +
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
"(closed by client: " + remote + ")"
);
}
@Override
public void onMessage(WebSocket webSocket, String s) {
// logger.info("Received: " + s.substring(0, 50));
IPacket reconstructedPacket = PacketContainer.deserialize(s);
try {
this.onPacketReceived(webSocket, reconstructedPacket);
} catch (JsonProcessingException e) {
logger.exception(e);
this.log(webSocket, "Error on processing incoming package: " + e.getMessage());
}
}
@Override
public void onError(WebSocket webSocket, Exception e) {
if (webSocket != null) {
log(webSocket, e.getMessage());
webSocket.close();
}
logger.exception(e);
}
@Override
public void onStart() {
logger.success("Websocket server started on port " + this.getPort());
}
/**
* A shorthand method for sending informational messages to the client
*/
public void sendMessage(WebSocket webSocket, String text) {
try {
MessagePacket message = MessagePacket.create(text);
webSocket.send(PacketContainer.serialize(message));
} catch (Exception e) {
System.err.println("Failed to send message: " + text);
logger.exception(e);
}
}
/**
* A shorthand method for sending error messages to the client
*/
public void sendError(WebSocket webSocket, String text, boolean isFatal) {
try {
ErrorPacket error = ErrorPacket.create(text, isFatal);
webSocket.send(PacketContainer.serialize(error));
} catch (Exception e) {
logger.exception(e);
log(webSocket, "Failed to send error: " + text);
}
}
/**
* The server-side implementation on how to handle certain packets when received
*/
private void onPacketReceived(WebSocket webSocket, IPacket packet) throws JsonProcessingException {
SocketData socketData = webSocket.getAttachment();
// limit the number of tasks per connection
if (socketData.totalTasks.get() >= maxTasksPerSession) {
sendError(webSocket, "Exceeded the maximum amount of " + maxTasksPerSession + " tasks per session", true);
webSocket.close();
return;
}
// only allow packets that are meant to be handled by the server
if (!(packet instanceof IClientToServerPacket<?> clientToServerPacket)) {
sendMessage(webSocket, "The package of type " + packet.getClass().getName() + " is not handled by the server!");
return;
}
// update the socket data
socketData.unhandledTasks.incrementAndGet();
socketData.totalTasks.incrementAndGet();
// add the packet to the queue so it can be started by the worker
CompletableFuture.runAsync(() -> {
clientToServerPacket.onHandle(webSocket, this);
int remainingUnhandledTasks = socketData.unhandledTasks.decrementAndGet();
if (socketData.closeIfNoTasksLeft) {
// if the websocket has 0 unhandled Tasks, close the connection
if (remainingUnhandledTasks <= 0) {
sendMessage(webSocket, "All requested tasks finished! Closing connection...");
webSocket.close();
}
}
}, taskExecutor);
}
public void log(WebSocket webSocket, String msg) {
String socketId = (webSocket == null) ? "???" : webSocket.<SocketData>getAttachment().id;
logger.info("[" + socketId + "] " + msg);
}
/**
* The data that is associated server-side with any connected client.
* This makes it possible to store information that can be mapped to any existing connection.
*/
public static class SocketData {
public final String id;
public final AtomicInteger unhandledTasks = new AtomicInteger(0);
public final AtomicInteger totalTasks = new AtomicInteger(0);
public boolean closeIfNoTasksLeft = false;
public SocketData(WebSocket webSocket) {
this.id = UUID.randomUUID().toString();
webSocket.setAttachment(this);
}
}
}

View File

@@ -0,0 +1,35 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import org.java_websocket.WebSocket;
public class DebugPacket implements IClientToServerPacket.Void, IServerToClientPacket {
public SerialUUID a1;
public SerialUUID a2;
public SerialMap b1;
public SerialMap b2;
public SerialList<? extends ISerialNode> c1;
public SerialList<? extends ISerialNode> c2;
public SerialValue<?> d1;
public SerialValue<?> d2;
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -0,0 +1,36 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A packet to send simple error messages between the client and the server
*/
public class ErrorPacket implements IServerToClientPacket {
/**
* The error endpoint for messages from the server that should be logged out as errors and possibly abort the process
*/
public String error;
public boolean isFatal;
@JsonIgnore
public static ErrorPacket create(String error, boolean isFatal) {
ErrorPacket packet = new ErrorPacket();
packet.error = error;
packet.isFatal = isFatal;
return packet;
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.exception(new RuntimeException(this.error));
if (this.isFatal) {
socketClient.close(1, "Received fatal error from server");
}
}
}

View File

@@ -0,0 +1,26 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A packet that will be sent to the server. Use <code>Void</code> Sub-Interface for packets without response
*
* @param <T> The response packet that will fulfill the future.
*/
public interface IClientToServerPacket<T extends IServerToClientPacket> extends IPacket {
@JsonIgnore
void onHandle(WebSocket webSocket, SocketServer socketServer);
@JsonIgnore
SocketFuture<T> getFuture();
/**
* Special case, where the packet will remain unanswered by the server
*/
interface Void extends IClientToServerPacket<IServerToClientPacket> {}
}

View File

@@ -0,0 +1,12 @@
package de.dhbwstuttgart.server.packet;
/**
* The shared interface for all packet of the client-server connection.
* A packet must always:
* - Have a default / no-parameter constructor
* - Have only serializable public properties (or disable them via jackson annotations)
*
*/
public interface IPacket {
}

View File

@@ -0,0 +1,12 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import org.java_websocket.WebSocket;
public interface IServerToClientPacket extends IPacket {
@JsonIgnore
void onHandle(WebSocket webSocket, SocketClient socketClient);
}

View File

@@ -0,0 +1,35 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A fallback packet that is generated if the received JSON could not be mapped to an existing package
*/
public class InvalidPacket implements IClientToServerPacket.Void, IServerToClientPacket {
/**
* If available, the error that caused this package to appear
*/
public String error = "<unknown error>";
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.error("InvalidPacket: " + this.error);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log(webSocket, "InvalidPacket: " + this.error);
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -0,0 +1,40 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* A packet to send simple informational messages between the client and the server
*/
public class MessagePacket implements IClientToServerPacket.Void, IServerToClientPacket {
/**
* The informational message from the server that should be logged out outputted
*/
public String message;
@JsonIgnore
public static MessagePacket create(String message) {
MessagePacket packet = new MessagePacket();
packet.message = message;
return packet;
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.info("SocketMessage: " + this.message);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.log(webSocket, this.message);
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -0,0 +1,98 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.dhbwstuttgart.util.Logger;
/**
* A wrapper for the packet to ensure correct serialization/deserialization and make it possible to detect the matching
* packet type for deserialization.
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
public class PacketContainer {
// The jackson serializer / deserializer tool
private static final ObjectMapper objectMapper = new ObjectMapper();
/*
* The available packet types. The one type that is represented in the JSON should always be the ONLY non-null value.
* They have to be public (for the moment) to let jackson fill them in while deserializing
*/
public ErrorPacket errorPacket = null;
public MessagePacket messagePacket = null;
public InvalidPacket invalidPacket = null;
public UnifyRequestPacket unifyRequestPacket = null;
public UnifyResultPacket unifyResultPacket = null;
public DebugPacket debugPacket = null;
public SetAutoclosePacket setAutoclosePacket = null;
/**
* Generate the JSON string for the given packet
*
* @param packet The packet to serialize
* @return The JSON representation of the packet
*/
public static String serialize(IPacket packet) throws JsonProcessingException {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
PacketContainer container = new PacketContainer();
if (packet instanceof ErrorPacket)
container.errorPacket = (ErrorPacket) packet;
else if (packet instanceof MessagePacket)
container.messagePacket = (MessagePacket) packet;
else if (packet instanceof UnifyRequestPacket)
container.unifyRequestPacket = (UnifyRequestPacket) packet;
else if (packet instanceof UnifyResultPacket)
container.unifyResultPacket = (UnifyResultPacket) packet;
else if (packet instanceof DebugPacket)
container.debugPacket = (DebugPacket) packet;
else if (packet instanceof SetAutoclosePacket)
container.setAutoclosePacket = (SetAutoclosePacket) packet;
// Add new packets here and in the deserialize method
else
throw new RuntimeException("Cannot map packet to any known packet class");
return objectMapper.writeValueAsString(container);
}
/**
* Use the JSON string to generate the matching packet object
*
* @param json The serialized representation of a packet container
* @return The deserialized Packet object
*/
public static IPacket deserialize(String json) {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
try {
PacketContainer container = objectMapper.readValue(json, PacketContainer.class);
if (container.errorPacket != null)
return container.errorPacket;
if (container.messagePacket != null)
return container.messagePacket;
if (container.invalidPacket != null)
return container.invalidPacket;
if (container.unifyRequestPacket != null)
return container.unifyRequestPacket;
if (container.unifyResultPacket != null)
return container.unifyResultPacket;
if (container.debugPacket != null)
return container.debugPacket;
if (container.setAutoclosePacket != null)
return container.setAutoclosePacket;
// Add new packets here and in the serialize method
throw new RuntimeException("Cannot map received json to any known packet class");
} catch (Exception e) {
(new Logger()).exception(e);
InvalidPacket packet = new InvalidPacket();
packet.error = e.getMessage();
return packet;
}
}
}

View File

@@ -0,0 +1,32 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import org.java_websocket.WebSocket;
/**
* Normally, a connection stays open until either the client or the server process ends.
* Send this packet to inform the server that the connection can be closed once all tasks are done
*/
public class SetAutoclosePacket implements IClientToServerPacket.Void {
public int dummyProperty = 1;
@JsonIgnore
public static SetAutoclosePacket create() {
return new SetAutoclosePacket();
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
webSocket.<SocketServer.SocketData>getAttachment().closeIfNoTasksLeft = true;
socketServer.log(webSocket, "Marked connection as autoclose");
}
@JsonIgnore
public SocketFuture<IServerToClientPacket> getFuture() {
return SocketFuture.completedFuture();
}
}

View File

@@ -0,0 +1,164 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.core.ConsoleInterface;
import de.dhbwstuttgart.server.ServerTaskLogger;
import de.dhbwstuttgart.server.SocketFuture;
import de.dhbwstuttgart.server.SocketServer;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ForkJoinPool;
import org.java_websocket.WebSocket;
/**
* A packet to send all required data for the unification algorithm to the server and request the unification
*/
public class UnifyRequestPacket implements IClientToServerPacket<UnifyResultPacket> {
public SerialMap finiteClosure;
public SerialMap constraintSet;
public SerialMap unifyConstraintSet;
public SerialMap serialKeyStorage;
public SerialValue<?> placeholders;
public SerialList<SerialMap> factoryplaceholders;
public String futureId;
public int logLevel;
@JsonIgnore
private KeyStorage keyStorage = new KeyStorage();
@JsonIgnore
private boolean keyStorageLoaded = false;
public static UnifyRequestPacket create(
FiniteClosure finiteClosure,
ConstraintSet<Pair> constraintSet,
ConstraintSet<UnifyPair> unifyConstraintSet,
PlaceholderRegistry placeholderRegistry
) {
UnifyRequestPacket packet = new UnifyRequestPacket();
// store constraint and finite closure
packet.finiteClosure = finiteClosure.toSerial(packet.keyStorage);
packet.constraintSet = constraintSet.toSerial(packet.keyStorage);
packet.unifyConstraintSet = unifyConstraintSet.toSerial(packet.keyStorage);
// store placeholder registry
var serialRegistry = placeholderRegistry.toSerial(packet.keyStorage);
packet.placeholders = serialRegistry.getValue("ph");
packet.factoryplaceholders = serialRegistry.getList("factoryPh").assertListOfMaps();
// store referenced objects separately
packet.serialKeyStorage = packet.keyStorage.toSerial(packet.keyStorage);
packet.logLevel = ConsoleInterface.logLevel.getValue();
return packet;
}
@JsonIgnore
public void loadKeyStorage(UnifyContext context) {
if (!keyStorageLoaded) {
keyStorageLoaded = true;
keyStorage = KeyStorage.fromSerial(this.serialKeyStorage, context);
}
}
@JsonIgnore
private FiniteClosure retrieveFiniteClosure(UnifyContext context) {
this.loadKeyStorage(context);
return FiniteClosure.fromSerial(this.finiteClosure, context, keyStorage);
}
@JsonIgnore
private ConstraintSet<Pair> retrieveConstraintSet(UnifyContext context) {
this.loadKeyStorage(context);
return ConstraintSet.fromSerial(this.constraintSet, context, Pair.class, keyStorage);
}
@JsonIgnore
private ConstraintSet<UnifyPair> retrieveUnifyConstraintSet(UnifyContext context) {
this.loadKeyStorage(context);
return ConstraintSet.fromSerial(this.unifyConstraintSet, context, UnifyPair.class, keyStorage);
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
socketServer.sendMessage(webSocket, "You requested a unify! Please wait until I calculated everything...");
socketServer.log(webSocket, "Client requested a unification. Starting now...");
try {
var placeholderRegistry = new PlaceholderRegistry();
ArrayList<String> existingPlaceholders = (ArrayList) this.placeholders.getOf(ArrayList.class);
existingPlaceholders.forEach(placeholderRegistry::addPlaceholder);
Logger logger = new ServerTaskLogger(
webSocket,
socketServer,
Logger.LogLevel.fromValue(
Math.max(this.logLevel, Logger.LogLevel.INFO.getValue())
)
);
var unifyContext = new UnifyContext(logger, true,
new UnifyResultModel(new ConstraintSet<>(), new FiniteClosure(new HashSet<>(), logger, placeholderRegistry)),
new UnifyTaskModel(), ForkJoinPool.commonPool(), placeholderRegistry
);
this.factoryplaceholders.stream()
.map(p -> (PlaceholderType)UnifyType.fromSerial(p, unifyContext))
.forEach(placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS::add);
// start the unification algorithm from the received data
IFiniteClosure finiteClosure = this.retrieveFiniteClosure(unifyContext);
ConstraintSet<Pair> constraintSet = this.retrieveConstraintSet(unifyContext);
ConstraintSet<UnifyPair> unifyConstraintSet = this.retrieveUnifyConstraintSet(unifyContext);
var resultModel = new UnifyResultModel(constraintSet, finiteClosure);
UnifyResultListenerImpl resultListener = new UnifyResultListenerImpl();
resultModel.addUnifyResultListener(resultListener);
TypeUnify.unifyParallel(
unifyConstraintSet.getUndConstraints(),
unifyConstraintSet.getOderConstraints(),
finiteClosure,
unifyContext.newWithResultModel(resultModel)
);
var resultSets = resultListener.getResults();
socketServer.log(webSocket, "Finished unification");
socketServer.sendMessage(webSocket, "Unification finished. Found " + resultSets.size() + " result sets");
if (webSocket.isOpen()) {
UnifyResultPacket resultPacket = UnifyResultPacket.create(resultSets, futureId);
webSocket.send(PacketContainer.serialize(resultPacket));
}
} catch (Exception e) {
SocketServer.logger.exception(e);
socketServer.log(webSocket, e.getMessage());
}
}
@JsonIgnore
public SocketFuture<UnifyResultPacket> getFuture() {
var future = new SocketFuture<>(List.of(UnifyResultPacket.class));
futureId = future.futureId;
return future;
}
}

View File

@@ -0,0 +1,45 @@
package de.dhbwstuttgart.server.packet;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.dhbwstuttgart.server.SocketClient;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.List;
import org.java_websocket.WebSocket;
/**
* A packet to send all calculated data from the unification algorithm back to the client
*/
public class UnifyResultPacket implements IServerToClientPacket {
public SerialList<ISerialNode> results;
public SerialMap keyStorage;
public String futureId;
public static UnifyResultPacket create(List<ResultSet> resultSets, String futureId) {
UnifyResultPacket serialized = new UnifyResultPacket();
KeyStorage keyStorage = new KeyStorage();
serialized.results = SerialList.fromMapped(resultSets, resultSet -> resultSet.toSerial(keyStorage));
serialized.keyStorage = keyStorage.toSerial(keyStorage);
serialized.futureId = futureId;
return serialized;
}
@JsonIgnore
public List<ResultSet> getResultSet(UnifyContext context) {
return this.results.assertListOfMaps().stream()
.map(resultData -> ResultSet.fromSerial(resultData, context)).toList();
}
@JsonIgnore
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
SocketClient.logger.info("Received unify result");
socketClient.completeResponseFuture(futureId, this);
}
}

View File

@@ -0,0 +1,16 @@
package de.dhbwstuttgart.server.packet.dataContainers;
import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
public interface ISerializableData {
public abstract ISerialNode toSerial(KeyStorage keyStorage);
public static Object fromSerial(SerialMap data, UnifyContext context) {
throw new NotImplementedException("Missing implementation of \"fromSerial\" for a serializable element");
}
}

View File

@@ -0,0 +1,103 @@
package de.dhbwstuttgart.server.packet.dataContainers;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
public class KeyStorage implements ISerializableData {
/**
* Store a unique identifier for every element, so it can be referenced in the json
*/
protected AtomicInteger identifierCount = new AtomicInteger();
/**
* Store the serialized element per identifier when serializing
*/
protected SerialMap serializedElements = new SerialMap();
/**
* Store the unserialized element per identifier when unserializing
*/
protected Map<String, ISerializableData> unserializedElements = new HashMap<>();
/**
* Retrieve or generate a new identifier for a constraint
*/
public String getIdentifier() {
return this.identifierCount.incrementAndGet() + "_";
}
/**
* Checks if the given element identifier belongs to an element that was already serialized
*/
public boolean isAlreadySerialized(String identifier) {
return this.serializedElements.containsKey(identifier);
}
/**
* Checks if the given element identifier belongs to a element that was already unserialized
*/
public boolean isAlreadyUnserialized(String identifier) {
return this.unserializedElements.containsKey(identifier);
}
/**
* Register a serialized element to prevent it from being serialized again
*/
public void putSerialized(String identifier, SerialMap serializedElement) {
this.serializedElements.put(identifier, serializedElement);
}
/**
* Retrieve a serialized element
*/
public SerialMap getSerialized(String identifier) {
if (!this.serializedElements.containsKey(identifier)) {
throw new RuntimeException("No serialized element of identifier " + identifier + " available to get");
}
return this.serializedElements.getMap(identifier);
}
/**
* Register an unserialized element to prevent it from being unserialized again
*/
public void putUnserialized(String identifier, ISerializableData element) {
this.unserializedElements.put(identifier, element);
}
/**
* Retrieve an unserialized element
*/
public <T extends ISerializableData> T getUnserialized(String identifier, Class<T> target) {
if (!this.unserializedElements.containsKey(identifier)) {
throw new RuntimeException("No unserialized element of identifier " + identifier + " available to get");
}
var element = this.unserializedElements.get(identifier);
if (target.isInstance(element)) {
return (T) element;
}
throw new RuntimeException("Failed to get unserialized element from KeyStorage. Expected instance of " +
target.getName() + " but found " + element.getClass().getName());
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("serializedElements", this.serializedElements);
return serialized;
}
public static KeyStorage fromSerial(SerialMap data, UnifyContext context) {
var serializedConstraintsData = data.getMap("serializedElements");
var constraintContext = new KeyStorage();
for (var entry : serializedConstraintsData.entrySet()) {
if (entry.getValue() instanceof SerialMap valueMap) {
constraintContext.putSerialized(entry.getKey(), valueMap);
}
}
return constraintContext;
}
}

View File

@@ -0,0 +1,31 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
/**
* Use the following classes for an intermediate serialized tree structure
*/
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY,
property = "_t"
)
@JsonSubTypes({
@JsonSubTypes.Type(value = SerialMap.class, name = "m"),
@JsonSubTypes.Type(value = SerialList.class, name = "l"),
@JsonSubTypes.Type(value = SerialValue.class, name = "v"),
@JsonSubTypes.Type(value = SerialUUID.class, name = "u")
})
public interface ISerialNode {
default <T extends ISerialNode> T assertType(Class<T> type) {
if (type.isInstance(this)) {
return (T) this;
}
throw new RuntimeException("Expected ISerialNode to be of type " + type.getName()
+ " but found " + this.getClass().getName() + " instead");
}
}

View File

@@ -0,0 +1,74 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.function.Function;
import java.util.stream.Stream;
public class SerialList<I extends ISerialNode> extends ArrayList<I> implements ISerialNode {
public SerialList() {}
public SerialList(Collection<I> data) {
this.addAll(data);
}
public SerialList(Stream<I> data) {
this(data.toList());
}
public SerialList(I[] data) {
this(Arrays.stream(data).toList());
}
@SafeVarargs
@JsonIgnore
public static <A extends ISerialNode> ArrayList<A> from(A ...values) {
ArrayList<A> list = new SerialList<>();
Collections.addAll(list, values);
return list;
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(Stream<A> data, Function<A,B> mapper) {
return new SerialList<>(data.map(mapper).toList());
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(Collection<A> data, Function<A,B> mapper) {
return SerialList.fromMapped(data.stream(), mapper);
}
@JsonIgnore
public static <A,B extends ISerialNode> SerialList<B> fromMapped(A[] data, Function<A,B> mapper) {
return SerialList.fromMapped(Arrays.stream(data), mapper);
}
@JsonIgnore
public SerialList<SerialMap> assertListOfMaps() {
if (this.isEmpty() || this.get(0) instanceof SerialMap) {
return (SerialList<SerialMap>) this;
}
throw new RuntimeException("Required List to contain SerialMap elements but condition failed");
}
@JsonIgnore
public SerialList<SerialList<?>> assertListOfLists() {
if (this.isEmpty() || this.get(0) instanceof SerialList) {
return (SerialList<SerialList<?>>) this;
}
throw new RuntimeException("Required List to contain SerialList elements but condition failed");
}
@JsonIgnore
public SerialList<SerialValue<?>> assertListOfValues() {
if (this.isEmpty() || this.get(0) instanceof SerialValue) {
return (SerialList<SerialValue<?>>) this;
}
throw new RuntimeException("Required List to contain SerialValue elements but condition failed");
}
@JsonIgnore
public SerialList<SerialUUID> assertListOfUUIDs() {
if (this.isEmpty() || this.get(0) instanceof SerialUUID) {
return (SerialList<SerialUUID>) this;
}
throw new RuntimeException("Required List to contain SerialUUID elements but condition failed");
}
}

View File

@@ -0,0 +1,84 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nullable;
public class SerialMap extends HashMap<String, ISerialNode> implements ISerialNode {
public SerialMap() {
super();
}
public SerialMap(Map<String, ISerialNode> data) {
super(data);
}
@JsonIgnore
public void put(String key, Boolean value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
public void put(String key, String value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
public void put(String key, Number value) {
this.put(key, new SerialValue<>(value));
}
@JsonIgnore
private <T> T get(String key, Class<T> expectedType) {
if (!this.containsKey(key)) {
throw new RuntimeException("Missing required value " + key + " in ObjectMap");
}
var element = this.get(key);
if (element != null && element.getClass() != expectedType) {
throw new RuntimeException(
"Required value " + key + " to be of type " + expectedType.getName() + " but found " + element.getClass().getName()
);
}
return (T)element;
}
@JsonIgnore
public SerialList<?> getList(String key) {
return this.get(key, SerialList.class);
}
@Nullable
@JsonIgnore
public SerialList<?> getListOrNull(String key) {
return this.containsKey(key) ? this.getList(key) : null;
}
@JsonIgnore
public SerialMap getMap(String key) {
return this.get(key, SerialMap.class);
}
@Nullable
@JsonIgnore
public SerialMap getMapOrNull(String key) {
return this.containsKey(key) ? this.getMap(key) : null;
}
@JsonIgnore
public SerialValue<?> getValue(String key) {
return this.get(key, SerialValue.class);
}
@JsonIgnore
public SerialUUID getUUID(String key) {
return this.get(key, SerialUUID.class);
}
@Nullable
@JsonIgnore
public SerialUUID getUUIDOrNull(String key) {
return this.containsKey(key) ? this.getUUID(key) : null;
}
}

View File

@@ -0,0 +1,13 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
public class SerialUUID implements ISerialNode {
public String uuid;
public SerialUUID() {}
public SerialUUID(String uuid) {
this.uuid = uuid;
}
}

View File

@@ -0,0 +1,28 @@
package de.dhbwstuttgart.server.packet.dataContainers.serialized;
import com.fasterxml.jackson.annotation.JsonIgnore;
public class SerialValue<T> implements ISerialNode {
public T value;
public static final SerialValue<Object> NULL = new SerialValue<>(null);
public SerialValue() {}
public SerialValue(T value) {
this.value = value;
}
@JsonIgnore
public <A> SerialValue<A> assertValueOf(Class<A> targetClass) {
if (this.value == null || targetClass.isInstance(this.value)) {
return (SerialValue<A>) this;
}
throw new RuntimeException("Required Value to contain " + targetClass.getName() + " value but condition failed on" +
" type " + this.value.getClass().getName());
}
@JsonIgnore
public <A> A getOf(Class<A> targetClass) {
return this.assertValueOf(targetClass).value;
}
}

View File

@@ -0,0 +1,8 @@
package de.dhbwstuttgart.syntaxtree;
import de.dhbwstuttgart.util.Logger;
public class SyntaxTree {
public static Logger logger = new Logger("SyntaxTree");
}

View File

@@ -1,5 +1,8 @@
package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.core.JavaTXServer;
public class NameGenerator {
private static String strNextName = "A";
@@ -26,7 +29,11 @@ public class NameGenerator {
// n�chster Name berechnen und in strNextName speichern
inc( strNextName.length() - 1 );
if (JavaTXServer.isRunning) {
throw new RuntimeException("Using the NameGenerator on a server is not allowed");
}
JavaTXCompiler.defaultClientPlaceholderRegistry.addPlaceholder(strReturn);
return strReturn;
}

View File

@@ -1,5 +1,8 @@
package de.dhbwstuttgart.syntaxtree.factory;
import de.dhbwstuttgart.syntaxtree.SyntaxTree;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.util.Logger;
import java.io.Writer;
import java.lang.reflect.Modifier;
import java.util.*;
@@ -31,9 +34,13 @@ import org.antlr.v4.runtime.Token;
public class UnifyTypeFactory {
private static ArrayList<PlaceholderType> PLACEHOLDERS = new ArrayList<>();
public static FiniteClosure generateFC(List<ClassOrInterface> fromClasses, Writer logFile, ClassLoader classLoader, JavaTXCompiler compiler) throws ClassNotFoundException {
public static FiniteClosure generateFC(
List<ClassOrInterface> fromClasses,
Logger logger,
ClassLoader classLoader,
JavaTXCompiler compiler,
PlaceholderRegistry placeholderRegistry
) throws ClassNotFoundException {
/*
Die transitive Hülle muss funktionieren.
Man darf schreiben List<A> extends AL<A>
@@ -44,7 +51,7 @@ public class UnifyTypeFactory {
Generell dürfen sie immer die gleichen Namen haben.
TODO: die transitive Hülle bilden
*/
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader), logFile, compiler);
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader, placeholderRegistry), logger, compiler, placeholderRegistry);
}
public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr, SourceLoc location){
@@ -67,23 +74,23 @@ public class UnifyTypeFactory {
* Convert from
* ASTType -> UnifyType
*/
public static UnifyType convert(JavaTXCompiler compiler, RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
if (t instanceof GenericRefType){
return UnifyTypeFactory.convert(compiler, (GenericRefType)t, innerType);
return UnifyTypeFactory.convert(compiler, (GenericRefType)t, innerType, placeholderRegistry);
} else if (t instanceof TypePlaceholder){
return UnifyTypeFactory.convert(compiler, (TypePlaceholder)t, innerType);
return UnifyTypeFactory.convert(compiler, (TypePlaceholder)t, innerType, placeholderRegistry);
} else if (t instanceof ExtendsWildcardType){
return UnifyTypeFactory.convert(compiler, (ExtendsWildcardType)t, innerType);
return UnifyTypeFactory.convert(compiler, (ExtendsWildcardType)t, innerType, placeholderRegistry);
} else if (t instanceof SuperWildcardType) {
return UnifyTypeFactory.convert(compiler, (SuperWildcardType) t, innerType);
return UnifyTypeFactory.convert(compiler, (SuperWildcardType) t, innerType, placeholderRegistry);
} else if (t instanceof RefType){
return UnifyTypeFactory.convert(compiler, (RefType)t, innerType);
return UnifyTypeFactory.convert(compiler, (RefType)t, innerType, placeholderRegistry);
}
//Es wurde versucht ein Typ umzuwandeln, welcher noch nicht von der Factory abgedeckt ist
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
}
public static UnifyType convert(JavaTXCompiler compiler, RefType t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, RefType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
//Check if it is a FunN Type:
Pattern p = Pattern.compile("Fun(\\d+)[$][$]");
Matcher m = p.matcher(t.getName().toString());
@@ -91,76 +98,76 @@ public class UnifyTypeFactory {
if(b){
Integer N = Integer.valueOf(m.group(1));
if((N + 1) == t.getParaList().size()){
return convertFunN(compiler, t.getParaList(), false);
return convertFunN(compiler, t.getParaList(), false, placeholderRegistry);
}
}
UnifyType ret;
List<UnifyType> params = new ArrayList<>();
if (t.getParaList() != null) {
for (RefTypeOrTPHOrWildcardOrGeneric pT : t.getParaList()) {
params.add(UnifyTypeFactory.convert(compiler, pT, true));
params.add(UnifyTypeFactory.convert(compiler, pT, true, placeholderRegistry));
}
}
var clazz = compiler.getClass(t.getName());
if (clazz != null && clazz.isInterface() && clazz.isFunctionalInterface()) {
var method = clazz.getMethods().stream().filter(x -> Modifier.isAbstract(x.modifier)).findFirst().orElseThrow();
var methodParams = method.getParameterList().getFormalparalist().stream().map(x -> convert(compiler, x.getType(), true)).toList();
var methodParams = method.getParameterList().getFormalparalist().stream().map(x -> convert(compiler, x.getType(), true, placeholderRegistry)).toList();
var generics = StreamSupport.stream(clazz.getGenerics().spliterator(), false).map(GenericTypeVar::getName).toList();
return new FunInterfaceType(t.getName().toString(), new TypeParams(params), methodParams, convert(compiler, method.getReturnType(), true), generics);
return new FunInterfaceType(t.getName().toString(), new TypeParams(params), methodParams, convert(compiler, method.getReturnType(), true, placeholderRegistry), generics);
}
return new ReferenceType(t.getName().toString(),new TypeParams(params));
}
public static UnifyType convertFunN(JavaTXCompiler compiler, List<RefTypeOrTPHOrWildcardOrGeneric> paraList, Boolean innerType){
public static UnifyType convertFunN(JavaTXCompiler compiler, List<RefTypeOrTPHOrWildcardOrGeneric> paraList, Boolean innerType, PlaceholderRegistry placeholderRegistry){
UnifyType ret;
List<UnifyType> params = new ArrayList<>();
if(paraList != null && paraList.size() > 0){
for(RefTypeOrTPHOrWildcardOrGeneric pT : paraList){
params.add(UnifyTypeFactory.convert(compiler, pT, false));
params.add(UnifyTypeFactory.convert(compiler, pT, false, placeholderRegistry));
}
}
ret = FunNType.getFunNType(new TypeParams(params));
return ret;
}
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType, PlaceholderRegistry placeholderRegistry) {
if (tph.getName().equals("AFR")) {
System.out.println("XXX"+innerType);
SyntaxTree.logger.info("XXX"+innerType);
}
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance());
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance(), placeholderRegistry);
ntph.setVariance(tph.getVariance());
ntph.setOrCons(tph.getOrCons());
ntph.setWildcardtable(tph.getWildcardtable());
int in = PLACEHOLDERS.indexOf(ntph);
int in = placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.indexOf(ntph);
if (in == -1) {
PLACEHOLDERS.add(ntph);
placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.add(ntph);
ntph.setInnerType(innerType);
return ntph;
}
else {
PlaceholderType oldpht = PLACEHOLDERS.get(in);
PlaceholderType oldpht = placeholderRegistry.UnifyTypeFactory_PLACEHOLDERS.get(in);
oldpht.setInnerType(oldpht.isInnerType() || innerType);
return oldpht;
}
}
public static UnifyType convert(JavaTXCompiler compiler, GenericRefType t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, GenericRefType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
return new ReferenceType(t.getParsedName(), true);
}
public static UnifyType convert(JavaTXCompiler compiler, WildcardType t, Boolean innerType){
public static UnifyType convert(JavaTXCompiler compiler, WildcardType t, Boolean innerType, PlaceholderRegistry placeholderRegistry){
if(t.isExtends())
return new ExtendsType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false));
return new ExtendsType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false, placeholderRegistry));
else if(t.isSuper())
return new SuperType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false));
return new SuperType(UnifyTypeFactory.convert(compiler, t.getInnerType(), false, placeholderRegistry));
else throw new NotImplementedException();
}
public static ConstraintSet<UnifyPair> convert(JavaTXCompiler compiler, ConstraintSet<Pair> constraints) {
return constraints.map(c -> UnifyTypeFactory.convert(compiler, c));
public static ConstraintSet<UnifyPair> convert(JavaTXCompiler compiler, ConstraintSet<Pair> constraints, PlaceholderRegistry placeholderRegistry) {
return constraints.map(c -> UnifyTypeFactory.convert(compiler, c, placeholderRegistry));
}
//NEVER USED
@@ -171,30 +178,30 @@ public class UnifyTypeFactory {
// return unifyPairConstraint;
//}
public static UnifyPair convert(JavaTXCompiler compiler, Pair p) {
public static UnifyPair convert(JavaTXCompiler compiler, Pair p, PlaceholderRegistry placeholderRegistry) {
UnifyPair ret = null;
if(p.GetOperator().equals(PairOperator.SMALLERDOT)) {
ret = generateSmallerDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateSmallerDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLERNEQDOT)) {
ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateSmallNotEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret;
}else if(p.GetOperator().equals(PairOperator.EQUALSDOT)) {
ret = generateEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false)
, UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateEqualDotPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry)
, UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
//return ret;
}else if(p.GetOperator().equals(PairOperator.SMALLER)){
ret = generateSmallerPair(UnifyTypeFactory.convert(compiler, p.TA1, false),
UnifyTypeFactory.convert(compiler, p.TA2, false), p.getLocation());
ret = generateSmallerPair(UnifyTypeFactory.convert(compiler, p.TA1, false, placeholderRegistry),
UnifyTypeFactory.convert(compiler, p.TA2, false, placeholderRegistry), p.getLocation());
}else throw new NotImplementedException();
UnifyType lhs, rhs;
if (((lhs = ret.getLhsType()) instanceof PlaceholderType)
&& ((PlaceholderType)lhs).isWildcardable()
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) {
if (lhs.getName().equals("AQ")) {
System.out.println("");
// SyntaxTree.logger.info("");
}
((PlaceholderType)rhs).enableWildcardtable();
}
@@ -203,7 +210,7 @@ public class UnifyTypeFactory {
&& ((PlaceholderType)rhs).isWildcardable()
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) {
if (rhs.getName().equals("AQ")) {
System.out.println("");
// SyntaxTree.logger.info("");
}
((PlaceholderType)lhs).enableWildcardtable();
}
@@ -214,16 +221,16 @@ public class UnifyTypeFactory {
* Convert from
* UnifyType -> ASTType
*/
public static Set<ResultPair> convert(Set<UnifyPair> unifyPairSet, Map<String,TypePlaceholder> tphs) {
public static Set<ResultPair> convert(Set<UnifyPair> unifyPairSet, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
return unifyPairSet.stream().map(
unifyPair -> convert(unifyPair, tphs))
unifyPair -> convert(unifyPair, tphs, placeholderRegistry))
.collect(Collectors.toSet());
}
public static ResultPair convert(UnifyPair mp, Map<String,TypePlaceholder> tphs) {
public static ResultPair convert(UnifyPair mp, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if (mp == null) { return null;} //kann bei basePairs passieren
RefTypeOrTPHOrWildcardOrGeneric tl = UnifyTypeFactory.convert(mp.getLhsType(), tphs);
RefTypeOrTPHOrWildcardOrGeneric tr = UnifyTypeFactory.convert(mp.getRhsType(), tphs);
RefTypeOrTPHOrWildcardOrGeneric tl = UnifyTypeFactory.convert(mp.getLhsType(), tphs, placeholderRegistry);
RefTypeOrTPHOrWildcardOrGeneric tr = UnifyTypeFactory.convert(mp.getRhsType(), tphs, placeholderRegistry);
if(tl instanceof TypePlaceholder){
if(tr instanceof TypePlaceholder) {
@@ -232,7 +239,7 @@ public class UnifyTypeFactory {
//Einfach ignorieren TODO: Das hier muss ausgebessert werden:
//return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, ASTFactory.createObjectType());
}else{
return new PairTPHsmallerTPH((TypePlaceholder)tl, (TypePlaceholder)tr, convert(mp.getBasePair(), tphs));
return new PairTPHsmallerTPH((TypePlaceholder)tl, (TypePlaceholder)tr, convert(mp.getBasePair(), tphs, placeholderRegistry));
}
}else if(tr instanceof RefType){
return new PairTPHequalRefTypeOrWildcardType((TypePlaceholder)tl, (RefType) tr);
@@ -244,51 +251,51 @@ public class UnifyTypeFactory {
}else return new PairNoResult(tl, tr);//throw new NotImplementedException();
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(ReferenceType t, Map<String,TypePlaceholder> tphs) {
public static RefTypeOrTPHOrWildcardOrGeneric convert(ReferenceType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if(JavaClassName.Void.equals(t.getName()))return new Void(new NullToken());
if (t.isGenTypeVar()) return new GenericRefType(t.getName(),new NullToken());
RefType ret = new RefType(new JavaClassName(t.getName()),convert(t.getTypeParams(), tphs),new NullToken());
RefType ret = new RefType(new JavaClassName(t.getName()),convert(t.getTypeParams(), tphs, placeholderRegistry),new NullToken());
return ret;
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(FunNType t, Map<String,TypePlaceholder> tphs) {
RefType ret = new RefType(new JavaClassName(t.getName()), convert(t.getTypeParams(), tphs), new NullToken());
public static RefTypeOrTPHOrWildcardOrGeneric convert(FunNType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefType ret = new RefType(new JavaClassName(t.getName()), convert(t.getTypeParams(), tphs, placeholderRegistry), new NullToken());
return ret;
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(SuperType t, Map<String,TypePlaceholder> tphs) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getSuperedType(), tphs);
public static RefTypeOrTPHOrWildcardOrGeneric convert(SuperType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getSuperedType(), tphs, placeholderRegistry);
return new SuperWildcardType(innerType, new NullToken());
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(ExtendsType t, Map<String,TypePlaceholder> tphs) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getExtendedType(), tphs);
public static RefTypeOrTPHOrWildcardOrGeneric convert(ExtendsType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
RefTypeOrTPHOrWildcardOrGeneric innerType = convert(t.getExtendedType(), tphs, placeholderRegistry);
return new ExtendsWildcardType(innerType, new NullToken());
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs) {
public static RefTypeOrTPHOrWildcardOrGeneric convert(PlaceholderType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
TypePlaceholder ret = tphs.get(t.getName());
if(ret == null){ //Dieser TPH wurde vom Unifikationsalgorithmus erstellt
ret = TypePlaceholder.fresh(new NullToken());
ret = TypePlaceholder.fresh(new NullToken(), placeholderRegistry);
tphs.put(t.getName(), ret);
}
ret.setVariance(t.getVariance());
return ret;
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(UnifyType t, Map<String,TypePlaceholder> tphs) {
if(t instanceof FunNType)return convert((FunNType) t, tphs);
if(t instanceof ReferenceType)return convert((ReferenceType) t, tphs);
if(t instanceof SuperType)return convert((SuperType) t, tphs);
if(t instanceof ExtendsType)return convert((ExtendsType) t, tphs);
if(t instanceof PlaceholderType)return convert((PlaceholderType) t, tphs);
public static RefTypeOrTPHOrWildcardOrGeneric convert(UnifyType t, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
if(t instanceof FunNType)return convert((FunNType) t, tphs, placeholderRegistry);
if(t instanceof ReferenceType)return convert((ReferenceType) t, tphs, placeholderRegistry);
if(t instanceof SuperType)return convert((SuperType) t, tphs, placeholderRegistry);
if(t instanceof ExtendsType)return convert((ExtendsType) t, tphs, placeholderRegistry);
if(t instanceof PlaceholderType)return convert((PlaceholderType) t, tphs, placeholderRegistry);
throw new NotImplementedException("Der Typ "+t+" kann nicht umgewandelt werden");
}
private static List<RefTypeOrTPHOrWildcardOrGeneric> convert(TypeParams typeParams, Map<String,TypePlaceholder> tphs) {
private static List<RefTypeOrTPHOrWildcardOrGeneric> convert(TypeParams typeParams, Map<String,TypePlaceholder> tphs, PlaceholderRegistry placeholderRegistry) {
List<RefTypeOrTPHOrWildcardOrGeneric> ret = new ArrayList<>();
for(UnifyType uT : typeParams){
RefTypeOrTPHOrWildcardOrGeneric toAdd = convert(uT, tphs);
RefTypeOrTPHOrWildcardOrGeneric toAdd = convert(uT, tphs, placeholderRegistry);
ret.add(toAdd);
}
return ret;

View File

@@ -1,8 +1,13 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.Objects;
@@ -15,7 +20,7 @@ import java.util.Objects;
*
*/
public class ExtendsWildcardType extends WildcardType{
public class ExtendsWildcardType extends WildcardType implements ISerializableData {
/**
* Author: Arne Lüdtke<br/>
@@ -68,4 +73,22 @@ public class ExtendsWildcardType extends WildcardType{
ExtendsWildcardType that = (ExtendsWildcardType) o;
return that.innerType.equals(this.innerType);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("innerType", this.innerType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static ExtendsWildcardType fromSerial(SerialMap data, UnifyContext context) {
return new ExtendsWildcardType(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(data.getMap("innerType"), context),
new NullToken()
);
}
}

View File

@@ -1,57 +1,77 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.Objects;
public class GenericRefType extends RefTypeOrTPHOrWildcardOrGeneric
{
private String name;
public class GenericRefType extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData {
private String name;
public GenericRefType(String name, Token offset)
{
super(offset);
this.name = name;
}
public GenericRefType(String name, Token offset) {
super(offset);
this.name = name;
}
public String getParsedName(){
return name.toString();
}
public String getParsedName() {
return name.toString();
}
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GenericRefType that = (GenericRefType) o;
return name.equals(that.name);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GenericRefType that = (GenericRefType) o;
return name.equals(that.name);
}
@Override
public int hashCode() {
return Objects.hash(name);
}
@Override
public int hashCode() {
return Objects.hash(name);
}
@Override
public String toString()
{
return "GTV " + this.name;
}
@Override
public String toString() {
return "GTV " + this.name;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.name);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static GenericRefType fromSerial(SerialMap data, UnifyContext context) {
return new GenericRefType(
data.getValue("name").getOf(String.class),
new NullToken()
);
}
}

View File

@@ -1,8 +1,15 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.ArrayList;
@@ -11,122 +18,137 @@ import java.util.List;
import java.util.Objects;
public class RefType extends RefTypeOrTPHOrWildcardOrGeneric
{
protected final JavaClassName name;
protected final List<RefTypeOrTPHOrWildcardOrGeneric> parameter;
/**
* Ist primitiveFlag auf true, muss beim Codegen dieser Reftype durch
* den primitiven Datentyp ersetzt werden
*
* Bsp: java.lang.Integer mit Flag wird dann zu [int]
*/
boolean primitiveFlag = false; // TODO Should be final
public class RefType extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData {
protected final JavaClassName name;
protected final List<RefTypeOrTPHOrWildcardOrGeneric> parameter;
/**
* Ist primitiveFlag auf true, muss beim Codegen dieser Reftype durch
* den primitiven Datentyp ersetzt werden
* <p>
* Bsp: java.lang.Integer mit Flag wird dann zu [int]
*/
boolean primitiveFlag = false; // TODO Should be final
public RefType(JavaClassName fullyQualifiedName, Token offset)
{
this(fullyQualifiedName, new ArrayList<>(), offset);
public RefType(JavaClassName fullyQualifiedName, Token offset) {
this(fullyQualifiedName, new ArrayList<>(), offset);
}
public boolean isPrimitive() {
return primitiveFlag;
}
@Override
public String toString() {
String params = "";
if (parameter.size() > 0) {
params += "<";
Iterator<RefTypeOrTPHOrWildcardOrGeneric> it = parameter.iterator();
while (it.hasNext()) {
RefTypeOrTPHOrWildcardOrGeneric param = it.next();
params += param.toString();
if (it.hasNext()) params += ", ";
}
params += ">";
}
return this.name.toString() + params;
}
@Override
public int hashCode() {
return this.name.hashCode();//Nur den Name hashen. Sorgt für langsame, aber funktionierende HashMaps
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset) {
this(fullyQualifiedName, parameter, offset, false);
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset, boolean primitiveFlag) {
super(offset);
this.name = (fullyQualifiedName);
this.parameter = parameter;
this.primitiveFlag = primitiveFlag;
}
public JavaClassName getName() {
return name;
}
public List<RefTypeOrTPHOrWildcardOrGeneric> getParaList() {
if (this.parameter == null) return new ArrayList<>();
return this.parameter;
}
/**
* Author: Jrg Buerle<br/>
*
* @return
*/
public boolean equals(Object obj) {
if (!(obj instanceof RefType refObj)) {
return false;
}
public boolean isPrimitive() {
return primitiveFlag;
}
if (!Objects.equals(this.name, refObj.name)) return false;
boolean ret = true;
@Override
public String toString(){
String params = "";
if(parameter.size()>0){
params += "<";
Iterator<RefTypeOrTPHOrWildcardOrGeneric> it = parameter.iterator();
while(it.hasNext()){
RefTypeOrTPHOrWildcardOrGeneric param = it.next();
params += param.toString();
if(it.hasNext())params += ", ";
}
params += ">";
//if(!(super.equals(obj))) PL 2020-03-12 muss vll. einkommentiert werden
// return false;
if (parameter == null || parameter.size() == 0) {
ret &= (refObj.getParaList() == null || refObj.getParaList().isEmpty());
} else {
if (refObj.getParaList() == null) {
ret = false;
} else if (parameter.size() != refObj.getParaList().size()) {
ret = false;
} else {
for (int i = 0; i < parameter.size(); i++) {
ret &= parameter.get(i).equals(refObj.getParaList().get(i));
}
return this.name.toString() + params;
}
}
return ret;
@Override
public int hashCode() {
return this.name.hashCode();//Nur den Name hashen. Sorgt für langsame, aber funktionierende HashMaps
}
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset) {
this(fullyQualifiedName, parameter, offset, false);
}
public RefType(JavaClassName fullyQualifiedName, List<RefTypeOrTPHOrWildcardOrGeneric> parameter, Token offset, boolean primitiveFlag) {
super(offset);
this.name = (fullyQualifiedName);
this.parameter = parameter;
this.primitiveFlag = primitiveFlag;
}
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
public JavaClassName getName()
{
return name;
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
public List<RefTypeOrTPHOrWildcardOrGeneric> getParaList(){
if(this.parameter==null)return new ArrayList<>();
return this.parameter;
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
/**
* Author: Jrg Buerle<br/>
* @return
*/
public boolean equals(Object obj)
{
if(obj instanceof RefType){
if (!Objects.equals(this.name, ((RefType) obj).name)) return false;
boolean ret = true;
//if(!(super.equals(obj))) PL 2020-03-12 muss vll. einkommentiert werden
// return false;
if(parameter==null || parameter.size()==0){
ret &= (((RefType)obj).getParaList()==null || ((RefType)obj).getParaList().size()==0);
}
else{
if(((RefType)obj).getParaList()==null){
ret = false;
}
else if(parameter.size() != ((RefType)obj).getParaList().size())
{
ret = false;
}
else
{
for(int i = 0; i<parameter.size(); i++)
{
ret &= parameter.get(i).equals(((RefType)obj).getParaList().get(i));
}
}
}
return ret;
}
else{
return false;
}
}
@Override
public ISerialNode toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("isPrimitive", this.primitiveFlag);
serialized.put("name", this.name.toString());
serialized.put("parameters", SerialList.fromMapped(this.parameter, param -> param.toSerial(keyStorage)));
@Override
public void accept(ASTVisitor visitor) {
visitor.visit(this);
}
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
@Override
public <A> A acceptTV(TypeVisitor<A> visitor) {
return visitor.visit(this);
}
@Override
public void accept(ResultSetVisitor visitor) {
visitor.visit(this);
}
public static RefType fromSerial(SerialMap data, UnifyContext context) {
return new RefType(
new JavaClassName(data.getValue("name").getOf(String.class)),
data.getList("parameters").assertListOfMaps().stream()
.map(param -> RefTypeOrTPHOrWildcardOrGeneric.fromSerial(param, context))
.toList(),
new NullToken(),
data.getValue("isPrimitive").getOf(Boolean.class)
);
}
}

View File

@@ -1,11 +1,17 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode{
public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode implements ISerializableData {
public RefTypeOrTPHOrWildcardOrGeneric(Token offset) {
super(offset);
}
@@ -18,5 +24,26 @@ public abstract class RefTypeOrTPHOrWildcardOrGeneric extends SyntaxTreeNode{
@Override
public abstract boolean equals(Object o);
@Override
public ISerialNode toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("type", this.getClass().getSimpleName());
// we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL);
return serialized;
}
public static RefTypeOrTPHOrWildcardOrGeneric fromSerial(SerialMap data, UnifyContext context) {
String type = data.getValue("type").getOf(String.class);
SerialMap object = data.getMap("object");
if (type.equals(ExtendsWildcardType.class.getSimpleName())) return ExtendsWildcardType.fromSerial(object, context);
else if (type.equals(GenericRefType.class.getSimpleName())) return GenericRefType.fromSerial(object, context);
else if (type.equals(SuperWildcardType.class.getSimpleName())) return SuperWildcardType.fromSerial(object, context);
else if (type.equals(RefType.class.getSimpleName())) return RefType.fromSerial(object, context);
else if (type.equals(Void.class.getSimpleName())) return Void.fromSerial(object, context);
else if (type.equals(TypePlaceholder.class.getSimpleName())) return TypePlaceholder.fromSerial(object, context);
else throw new RuntimeException("Could not unserialize class of unhandled type " + type);
}
}

View File

@@ -1,9 +1,13 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import java.util.Objects;
@@ -16,7 +20,7 @@ import java.util.Objects;
*
*/
public class SuperWildcardType extends WildcardType{
public class SuperWildcardType extends WildcardType implements ISerializableData {
/**
* Author: Arne Lüdtke<br/>
@@ -80,4 +84,22 @@ public class SuperWildcardType extends WildcardType{
SuperWildcardType that = (SuperWildcardType) o;
return that.innerType.equals(this.innerType);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("innerType", this.innerType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static SuperWildcardType fromSerial(SerialMap data, UnifyContext context) {
return new SuperWildcardType(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(data.getMap("innerType"), context),
new NullToken()
);
}
}

View File

@@ -1,9 +1,12 @@
package de.dhbwstuttgart.syntaxtree.type;
import java.util.Hashtable;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
import org.antlr.v4.runtime.Token;
@@ -16,7 +19,7 @@ import org.antlr.v4.runtime.Token;
* @author J�rg B�uerle
* @version $Date: 2013/06/19 12:45:37 $
*/
public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric implements ISerializableData
{
private final String name;
@@ -65,7 +68,12 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
public static TypePlaceholder fresh(Token position){
return new TypePlaceholder(NameGenerator.makeNewName(), position, 0, true);
}
public static TypePlaceholder fresh(Token position, PlaceholderRegistry placeholderRegistry){
String newName = placeholderRegistry.generateFreshPlaceholderName();
return new TypePlaceholder(newName, position, 0, true);
}
public static TypePlaceholder fresh(Token position, int variance, boolean wildcardable){
return new TypePlaceholder(NameGenerator.makeNewName(), position, variance, wildcardable);
}
@@ -139,4 +147,26 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric
public Boolean getWildcardtable() {
return wildcardable;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("name", this.name);
serialized.put("variance", this.variance);
serialized.put("wildcardable", this.wildcardable);
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static TypePlaceholder fromSerial(SerialMap data, UnifyContext context) {
return new TypePlaceholder(
data.getValue("name").getOf(String.class),
new NullToken(),
data.getValue("variance").getOf(Integer.class),
data.getValue("wildcardable").getOf(Boolean.class)
);
}
}

View File

@@ -1,14 +1,32 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import org.antlr.v4.runtime.Token;
import de.dhbwstuttgart.parser.scope.JavaClassName;
public class Void extends RefType
public class Void extends RefType implements ISerializableData
{
public Void(Token offset) {
super(JavaClassName.Void, offset);
}
@Override
public ISerialNode toSerial(KeyStorage keyStorage) {
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", new SerialMap());
return serializedWrapper;
}
public static Void fromSerial(SerialMap data, UnifyContext context) {
return new Void(new NullToken());
}
}

View File

@@ -0,0 +1,8 @@
package de.dhbwstuttgart.target;
import de.dhbwstuttgart.util.Logger;
public class Target {
public static Logger logger = new Logger("Target");
}

View File

@@ -2,6 +2,7 @@ package de.dhbwstuttgart.target.generate;
import de.dhbwstuttgart.bytecode.FunNGenerator;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.environment.ByteArrayClassLoader;
import de.dhbwstuttgart.environment.IByteArrayClassLoader;
import de.dhbwstuttgart.exceptions.DebugException;
import de.dhbwstuttgart.parser.NullToken;
@@ -11,8 +12,7 @@ import de.dhbwstuttgart.syntaxtree.Record;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.syntaxtree.visual.ASTPrinter;
import de.dhbwstuttgart.syntaxtree.visual.OutputGenerator;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.*;
import de.dhbwstuttgart.target.tree.expression.*;
import de.dhbwstuttgart.target.tree.type.*;
@@ -67,11 +67,11 @@ public class ASTToTargetAST {
public IByteArrayClassLoader classLoader;
protected SourceFile sourceFile;
public ASTToTargetAST(List<ResultSet> resultSets, IByteArrayClassLoader classLoader) {
this(null, resultSets, classLoader);
public ASTToTargetAST(List<ResultSet> resultSets) {
this(null, resultSets);
}
public ASTToTargetAST(JavaTXCompiler compiler, List<ResultSet> resultSets, IByteArrayClassLoader classLoader) {
this(compiler, resultSets, null, classLoader);
public ASTToTargetAST(JavaTXCompiler compiler, List<ResultSet> resultSets) {
this(compiler, resultSets, null, new ByteArrayClassLoader());
}
public ASTToTargetAST(JavaTXCompiler compiler, List<ResultSet> resultSets, SourceFile sourceFile, IByteArrayClassLoader classLoader) {
@@ -338,10 +338,10 @@ public class ASTToTargetAST {
var result = r0.stream().map(l -> l.stream().toList()).toList();
System.out.println("============== OUTPUT ==============");
Target.logger.info("============== OUTPUT ==============");
for (var l : result) {
for (var m : l) System.out.println(m.name() + " " + m.getSignature());
System.out.println();
for (var m : l) Target.logger.info(m.name() + " " + m.getSignature());
Target.logger.info("");
}
return result;
}
@@ -782,15 +782,7 @@ public class ASTToTargetAST {
return TargetFunNType.fromParams(params, filteredParams, gep.getReturnType() != null ? 1 : 0);
}
private FunNGenerator.GenericParameters convertToParameters(TargetFunNType input) {
return null;
}
private boolean isSubtype(TargetType test, TargetType other) {
if (other.equals(TargetType.Object)) return true;
if (test instanceof TargetFunNType tfun && other instanceof TargetFunNType ofun)
return isSubtype(new FunNGenerator.GenericParameters(tfun), new FunNGenerator.GenericParameters(ofun));
var testClass = compiler.getClass(new JavaClassName(test.name()));
var otherClass = compiler.getClass(new JavaClassName(other.name()));
if (testClass == null) return false;
@@ -827,6 +819,14 @@ public class ASTToTargetAST {
.toList();
var code = FunNGenerator.generateSpecializedBytecode(gep, superInterfaces);
try {
classLoader.findClass(entry.getKey());
} catch (ClassNotFoundException e) {
try {
classLoader.loadClass(code);
} catch (LinkageError ignored) {}
}
auxiliaries.put(entry.getKey(), code);
}
}
@@ -857,7 +857,7 @@ public class ASTToTargetAST {
classLoader.findClass(superClassName);
} catch (ClassNotFoundException e) {
try {
classLoader.loadClass(superClassName, code);
classLoader.loadClass(code);
} catch (LinkageError ignored) {}
}
auxiliaries.put(superClassName, code);

View File

@@ -1,10 +1,12 @@
package de.dhbwstuttgart.target.generate;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.*;
import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.syntaxtree.type.Void;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.type.TargetGenericType;
import de.dhbwstuttgart.target.tree.type.TargetType;
import de.dhbwstuttgart.typeinference.result.PairTPHEqualTPH;
@@ -138,17 +140,17 @@ public abstract class GenerateGenerics {
this.astToTargetAST = astToTargetAST;
for (var constraint : constraints.results) {
if (constraint instanceof PairTPHsmallerTPH p) {
System.out.println(p.left + " " + p.left.getVariance());
Target.logger.info(p.left + " " + p.left.getVariance());
simplifiedConstraints.add(new PairLT(new TPH(p.left), new TPH(p.right)));
} else if (constraint instanceof PairTPHEqualTPH p) {
equality.put(p.getLeft(), p.getRight());
} else if (constraint instanceof PairTPHequalRefTypeOrWildcardType p) {
System.out.println(p.left + " = " + p.right);
Target.logger.info(p.left + " = " + p.right);
concreteTypes.put(new TPH(p.left), p.right);
}
}
System.out.println("Simplified constraints: " + simplifiedConstraints);
Target.logger.info("Simplified constraints: " + simplifiedConstraints);
}
/*public record GenericsState(Map<TPH, RefTypeOrTPHOrWildcardOrGeneric> concreteTypes, Map<TypePlaceholder, TypePlaceholder> equality) {}
@@ -248,7 +250,7 @@ public abstract class GenerateGenerics {
equality.put(entry.getKey(), to);
}
}
System.out.println(from + " -> " + to + " " + from.getVariance());
Target.logger.info(from + " -> " + to + " " + from.getVariance());
//from.setVariance(to.getVariance());
equality.put(from, to);
referenced.remove(new TPH(from));
@@ -317,7 +319,7 @@ public abstract class GenerateGenerics {
Set<TPH> T2s = new HashSet<>();
findTphs(superType, T2s);
System.out.println("T1s: " + T1s + " T2s: " + T2s);
Target.logger.info("T1s: " + T1s + " T2s: " + T2s);
//Ende
superType = methodCall.receiverType;
@@ -332,7 +334,7 @@ public abstract class GenerateGenerics {
var optMethod = astToTargetAST.findMethod(owner, methodCall.name, methodCall.signatureArguments().stream().map(astToTargetAST::convert).toList());
if (optMethod.isEmpty()) return;
var method2 = optMethod.get();
System.out.println("In: " + method.getName() + " Method: " + method2.getName());
Target.logger.info("In: " + method.getName() + " Method: " + method2.getName());
var generics = family(owner, method2);
// transitive and
@@ -365,7 +367,7 @@ public abstract class GenerateGenerics {
if (!T1s.contains(R1) || !T2s.contains(R2)) continue;
var newPair = new PairLT(R1, R2);
System.out.println("New pair: " + newPair);
Target.logger.info("New pair: " + newPair);
newPairs.add(newPair);
if (!containsRelation(result, newPair))
@@ -567,7 +569,7 @@ public abstract class GenerateGenerics {
Set<Pair> generics(ClassOrInterface owner, Method method) {
if (computedGenericsOfMethods.containsKey(method)) {
var cached = computedGenericsOfMethods.get(method);
System.out.println("Cached " + method.getName() + ": " + cached);
Target.logger.info("Cached " + method.getName() + ": " + cached);
return cached;
}
@@ -596,7 +598,7 @@ public abstract class GenerateGenerics {
normalize(result, classGenerics, usedTphs);
System.out.println(this.getClass().getSimpleName() + " " + method.name + ": " + result);
Target.logger.info(this.getClass().getSimpleName() + " " + method.name + ": " + result);
return result;
}
@@ -675,7 +677,7 @@ public abstract class GenerateGenerics {
normalize(javaResult, null, referencedByClass);
System.out.println(this.getClass().getSimpleName() + " Class " + classOrInterface.getClassName().getClassName() + ": " + javaResult);
Target.logger.info(this.getClass().getSimpleName() + " Class " + classOrInterface.getClassName().getClassName() + ": " + javaResult);
return javaResult;
}
@@ -726,7 +728,7 @@ public abstract class GenerateGenerics {
if (!added) break;
}
System.out.println(chain + " " + chain.stream().map(e -> e.resolve().getVariance()).toList());
Target.logger.info(chain + " " + chain.stream().map(e -> e.resolve().getVariance()).toList());
var variance = chain.get(0).resolve().getVariance();
if (variance != 1) continue;
var index = 0;
@@ -764,7 +766,7 @@ public abstract class GenerateGenerics {
}
for (var pair : elementsToAddToEquality) {
System.out.println(pair);
Target.logger.info(pair);
addToEquality(pair.left, pair.right, referenced);
}
}
@@ -917,11 +919,11 @@ public abstract class GenerateGenerics {
}
}
if (infima.size() > 1) {
System.out.println(infima);
Target.logger.info(infima);
for (var pair : infima) {
var returnTypes = findTypeVariables(method.getReturnType());
var chain = findConnectionToReturnType(returnTypes, input, new HashSet<>(), pair.left);
System.out.println("Find: " + pair.left + " " + chain);
Target.logger.info("Find: " + pair.left + " " + chain);
chain.remove(pair.left);
if (chain.size() > 0) {
for (var tph : chain)
@@ -959,8 +961,8 @@ public abstract class GenerateGenerics {
}
}
newTph.setVariance(variance);
System.out.println(infima + " " + infima.stream().map(i -> i.right.resolve().getVariance()).toList());
System.out.println("Infima new TPH " + newTph + " variance " + variance);
Target.logger.info(infima + " " + infima.stream().map(i -> i.right.resolve().getVariance()).toList());
Target.logger.info("Infima new TPH " + newTph + " variance " + variance);
//referenced.add(newTph);
addToPairs(input, new PairLT(left, new TPH(newTph)));

View File

@@ -1,6 +1,5 @@
package de.dhbwstuttgart.target.generate;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.exceptions.DebugException;
import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.parser.SyntaxTreeGenerator.AssignToLocal;
@@ -8,14 +7,15 @@ import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.*;
import de.dhbwstuttgart.syntaxtree.statement.*;
import de.dhbwstuttgart.syntaxtree.type.*;
import de.dhbwstuttgart.target.Target;
import de.dhbwstuttgart.target.tree.MethodParameter;
import de.dhbwstuttgart.target.tree.TargetMethod;
import de.dhbwstuttgart.target.tree.expression.*;
import de.dhbwstuttgart.target.tree.type.*;
import java.lang.reflect.Modifier;
import java.sql.Array;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
@@ -33,71 +33,54 @@ public class StatementToTargetExpression implements ASTVisitor {
throw new NotImplementedException();
}
private class LambdaCaptureFinder extends TracingStatementVisitor {
// TODO The same mechanism is implemented in Codegen, maybe use it from there?
final Stack<Set<String>> localVariables = new Stack<>();
private final List<MethodParameter> parameters;
private final List<MethodParameter> captures;
LambdaCaptureFinder(List<MethodParameter> parameters, List<MethodParameter> captures) {
localVariables.push(new HashSet<>());
this.parameters = parameters;
this.captures = captures;
}
boolean hasLocalVar(String name) {
for (var localVariables : this.localVariables) {
if (localVariables.contains(name))
return true;
}
return false;
}
@Override
public void visit(Block block) {
localVariables.push(new HashSet<>());
super.visit(block);
localVariables.pop();
}
@Override
public void visit(LocalVar localVar) {
super.visit(localVar);
var capture = new MethodParameter(new TargetTypePattern(converter.convert(localVar.getType()), localVar.name));
if (!hasLocalVar(localVar.name) && !parameters.contains(capture) && !captures.contains(capture))
captures.add(capture);
}
@Override
public void visit(LocalVarDecl varDecl) {
var localVariables = this.localVariables.peek();
localVariables.add(varDecl.getName());
}
@Override
public void visit(LambdaExpression lambda) {
var newCaptures = new ArrayList<MethodParameter>();
var captureFinder = new LambdaCaptureFinder(createParameters(lambda), newCaptures);
lambda.methodBody.accept(captureFinder);
newCaptures.removeAll(parameters);
captures.addAll(newCaptures);
}
}
private List<MethodParameter> createParameters(LambdaExpression lambda) {
return StreamSupport.stream(lambda.params.spliterator(), false)
.map(p -> (FormalParameter) p)
.map(p -> new MethodParameter(new TargetTypePattern(converter.convert(p.getType()), p.getName())))
.toList();
}
@Override
public void visit(LambdaExpression lambdaExpression) {
var parameters = createParameters(lambdaExpression);
var parameters = StreamSupport.stream(lambdaExpression.params.spliterator(), false)
.map(p -> (FormalParameter) p)
.map(p -> new MethodParameter(new TargetTypePattern(converter.convert(p.getType()), p.getName())))
.toList();
List<MethodParameter> captures = new ArrayList<>();
var visitor = new LambdaCaptureFinder(parameters, captures);
lambdaExpression.methodBody.accept(visitor);
lambdaExpression.methodBody.accept(new TracingStatementVisitor() {
// TODO The same mechanism is implemented in Codegen, maybe use it from there?
final Stack<Set<String>> localVariables = new Stack<>();
{
localVariables.push(new HashSet<>());
}
boolean hasLocalVar(String name) {
for (var localVariables : this.localVariables) {
if (localVariables.contains(name))
return true;
}
return false;
}
@Override
public void visit(Block block) {
localVariables.push(new HashSet<>());
super.visit(block);
localVariables.pop();
}
@Override
public void visit(LocalVar localVar) {
super.visit(localVar);
var capture = new MethodParameter(new TargetTypePattern(converter.convert(localVar.getType()), localVar.name));
if (!hasLocalVar(localVar.name) && !parameters.contains(capture) && !captures.contains(capture))
captures.add(capture);
}
@Override
public void visit(LocalVarDecl varDecl) {
var localVariables = this.localVariables.peek();
localVariables.add(varDecl.getName());
}
@Override
public void visit(LambdaExpression lambda) {
} // Don't look at lambda expressions
});
TargetMethod.Signature signature = new TargetMethod.Signature(Set.of(), parameters, converter.convert(lambdaExpression.getReturnType()));;
var tpe = converter.convert(lambdaExpression.getType());
@@ -138,10 +121,7 @@ public class StatementToTargetExpression implements ASTVisitor {
@Override
public void visit(BoolExpression bool) {
result = switch(bool.operation) {
case OR -> new TargetBinaryOp.Or(converter.convert(bool.getType()), converter.convert(bool.lexpr), converter.convert(bool.rexpr));
case AND -> new TargetBinaryOp.And(converter.convert(bool.getType()), converter.convert(bool.lexpr), converter.convert(bool.rexpr));
};
Target.logger.info("BoolExpression");
}
@Override
@@ -239,6 +219,22 @@ public class StatementToTargetExpression implements ASTVisitor {
if (methodCall.receiver instanceof ExpressionReceiver expressionReceiver && expressionReceiver.expr instanceof This) {
if (receiverClass == null) throw new DebugException("Class " + receiverName + " does not exist!");
var thisMethod = converter.findMethod(receiverClass, methodCall.name, signature);
if (thisMethod.isEmpty()) {
Target.logger.error("Expected: " + receiverClass.getClassName() + "." + methodCall.name + "(" +
signature.stream().map(TargetType::toSignature).collect(Collectors.joining())+ ")" );
AtomicBoolean hasM = new AtomicBoolean(false);
receiverClass.getMethods().forEach(m -> {
if (Objects.equals(m.getName(), methodCall.name)) {
hasM.set(true);
Target.logger.error("But only has: " + m.name + "(" +
m.getParameterList().getFormalparalist().stream().map(t -> t.getType().toString()).collect(Collectors.joining())+ ")" );
}
});
if (!hasM.get())
Target.logger.error("But does not contain method at all");
}
ClassOrInterface finalReceiverClass = receiverClass;
foundMethod = thisMethod.orElseGet(() -> findMethod(finalReceiverClass.getSuperClass().getName(), methodCall.name, signature).orElseThrow());
} else if (!isFunNType) {
@@ -255,7 +251,7 @@ public class StatementToTargetExpression implements ASTVisitor {
isInterface = receiverClass.isInterface();
}
System.out.println(argList);
Target.logger.info(argList);
result = new TargetMethodCall(converter.convert(methodCall.getType()), returnType, argList, converter.convert(methodCall.receiver), methodCall.getArgumentList().getArguments().stream().map(converter::convert).toList(), receiverType, methodCall.name, isStatic, isInterface, isPrivate);
}

View File

@@ -9,13 +9,8 @@ import de.dhbwstuttgart.syntaxtree.Method;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import org.antlr.v4.runtime.Token;
import javax.swing.text.html.Option;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;

View File

@@ -19,11 +19,11 @@ public class MethodAssumption extends Assumption{
private ClassOrInterface receiver;
private RefTypeOrTPHOrWildcardOrGeneric retType;
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params;
private final Boolean isInherited;
private final Boolean isOverridden;
private final boolean isInherited;
private final boolean isOverridden;
public MethodAssumption(ClassOrInterface receiver, RefTypeOrTPHOrWildcardOrGeneric retType,
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope, Boolean isInherited, Boolean isOverridden){
List<? extends RefTypeOrTPHOrWildcardOrGeneric> params, TypeScope scope, boolean isInherited, boolean isOverridden){
super(scope);
this.receiver = receiver;
this.retType = retType;
@@ -73,11 +73,11 @@ public class MethodAssumption extends Assumption{
return TYPEStmt.getReceiverType(receiver, resolver);
}
public Boolean isInherited() {
public boolean isInherited() {
return isInherited;
}
public Boolean isOverridden() {
public boolean isOverridden() {
return isOverridden;
}
}

View File

@@ -1,81 +1,171 @@
package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.ISerialNode;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialUUID;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.Collection;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
public class Constraint<A> extends HashSet<A> implements Comparable<Constraint<A>> {
private static final long serialVersionUID = 1L;
private Boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
private Boolean isImplemented = false;
/*
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
* auszuwaehlen
*/
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
private Constraint<A> extendConstraint = null;
public Constraint() {
super();
}
public Constraint(Boolean isInherited, Boolean isImplemented) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
}
public Constraint(Boolean isInherited, Boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
this.extendConstraint = extendConstraint;
this.methodSignatureConstraint = methodSignatureConstraint;
}
public void setIsInherited(Boolean isInherited) {
this.isInherited = isInherited;
}
public Boolean isInherited() {
return isInherited;
}
public Boolean isImplemented() {
return isImplemented;
}
public Constraint<A> getExtendConstraint() {
return extendConstraint;
}
public void setExtendConstraint(Constraint<A> c) {
extendConstraint = c;
}
public Set<A> getmethodSignatureConstraint() {
return methodSignatureConstraint;
}
public void setmethodSignatureConstraint(Set<A> c) {
methodSignatureConstraint = c;
}
public class Constraint<A extends IConstraintElement> extends HashSet<A> implements ISerializableData {
private static final long serialVersionUID = 1L;
private boolean isInherited = false;//wird beides nur für die Method-Constraints benoetigt
private boolean isImplemented = false;
public String toString() {
return super.toString() + "\nisInherited = " + isInherited + " isOveridden = " + isImplemented
+ methodSignatureConstraint
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
+ "\n" ;
}
public String toStringBase() {
return super.toString();
}
/*
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
* auszuwaehlen
*/
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
private Constraint<A> extendConstraint = null;
public Constraint() {
super();
}
public Constraint(int initialCapacity) {
super(initialCapacity);
}
public Constraint(boolean isInherited, boolean isImplemented) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
}
public Constraint(boolean isInherited, boolean isImplemented, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
this.isInherited = isInherited;
this.isImplemented = isImplemented;
this.extendConstraint = extendConstraint;
this.methodSignatureConstraint = methodSignatureConstraint;
}
public void setIsInherited(boolean isInherited) {
this.isInherited = isInherited;
}
public boolean isInherited() {
return isInherited;
}
public boolean isImplemented() {
return isImplemented;
}
public Constraint<A> getExtendConstraint() {
return extendConstraint;
}
public void setExtendConstraint(Constraint<A> c) {
extendConstraint = c;
}
public Set<A> getmethodSignatureConstraint() {
return methodSignatureConstraint;
}
public void setmethodSignatureConstraint(Set<A> c) {
methodSignatureConstraint = c;
}
public <B extends IConstraintElement> Constraint<B> createdMapped(Function<A,B> mapper) {
Constraint<B> result = new Constraint<>(this.size());
for (A element : this) {
result.add(mapper.apply(element));
}
return result;
}
public String toString() {
return super.toString() + "\nisInherited = " + isInherited
+ " isOveridden = " + isImplemented
+ " msc[" + methodSignatureConstraint.size() + "] = " + methodSignatureConstraint
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
+ "\n";
}
public String toStringBase() {
return super.toString();
}
private String serialUUID = null;
@Override
public SerialUUID toSerial(KeyStorage keyStorage) {
final String uuid = serialUUID == null ? keyStorage.getIdentifier() : serialUUID;
if (serialUUID == null) serialUUID = uuid;
if (!keyStorage.isAlreadySerialized(uuid)) {
SerialMap serialized = new SerialMap();
keyStorage.putSerialized(uuid, serialized);
serialized.put("isInherited", isInherited);
serialized.put("isImplemented", isImplemented);
serialized.put("extendedConstraint", extendConstraint == null ? null :
extendConstraint.toSerial(keyStorage));
Function<A, ISerialNode> pairMapper = pair -> {
if (pair instanceof Pair simplePair) return simplePair.toSerial(keyStorage);
if (pair instanceof UnifyPair unifyPair) return unifyPair.toSerial(keyStorage);
throw new RuntimeException("No serialization is supported for type " + pair.getClass().getName());
};
serialized.put("methodSignatureConstraint", methodSignatureConstraint == null ? null :
SerialList.fromMapped(methodSignatureConstraint, pairMapper));
serialized.put("setElements", SerialList.fromMapped(this, pairMapper));
}
// return only the unique key
return new SerialUUID(uuid);
}
public static <T extends IConstraintElement> Constraint<T> fromSerial(SerialUUID serialUUID, UnifyContext context, Class<T> target, KeyStorage keyStorage) {
String uuid = serialUUID.uuid;
if (!keyStorage.isAlreadyUnserialized(uuid)) {
Constraint<T> constraint = new Constraint<>();
// immediately add the object to the context to prevent infinite recursion
keyStorage.putUnserialized(uuid, constraint);
// retrieve the serialized data und start unserializing it
SerialMap data = keyStorage.getSerialized(uuid);
constraint.isInherited = data.getValue("isInherited").getOf(Boolean.class);
constraint.isImplemented = data.getValue("isImplemented").getOf(Boolean.class);
constraint.extendConstraint = Optional.ofNullable(data.getUUIDOrNull("extendedConstraint"))
.map(v -> Constraint.fromSerial(v, context, target, keyStorage))
.orElse(null);
// to convert the maps back to elements, we sadly have to do some assumptions about the generic types...
Function<ISerialNode, T> pairUnmapper = pairData -> {
if (target == Pair.class && pairData instanceof SerialMap pairMap) {
return (T) Pair.fromSerial(pairMap, context);
}
if (target == UnifyPair.class && pairData instanceof SerialUUID pairUUID) {
return (T) UnifyPair.fromSerial(pairUUID, context, keyStorage);
}
throw new RuntimeException("No serialization is supported for target type " + target.getName());
};
constraint.methodSignatureConstraint =
Optional.ofNullable(data.getListOrNull("methodSignatureConstraint"))
.map(l -> l.stream().map(pairUnmapper).collect(Collectors.toSet()))
.orElse(null);
constraint.addAll(
data.getList("setElements")
.stream().map(pairUnmapper).toList());
}
return keyStorage.getUnserialized(uuid, Constraint.class);
}
@Override
public int compareTo(Constraint<A> o) {
return this.toString().compareTo(o.toString());
}
}

View File

@@ -1,63 +1,80 @@
package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import java.util.*;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
public class ConstraintSet<A> {
Constraint<A> undConstraints = new Constraint<>();
List<Set<Constraint<A>>> oderConstraints = new ArrayList<>();
public class ConstraintSet<A extends IConstraintElement> implements ISerializableData {
Constraint<A> undConstraints = new Constraint<>();
List<Set<Constraint<A>>> oderConstraints = new ArrayList<>();
public void addUndConstraint(A p){
undConstraints.add(p);
}
public void addUndConstraint(A p) {
undConstraints.add(p);
}
public void addOderConstraint(Set<Constraint<A>> methodConstraints) {
oderConstraints.add(methodConstraints);
}
public void addOderConstraint(Set<Constraint<A>> methodConstraints) {
oderConstraints.add(methodConstraints);
}
public void addAllUndConstraint(Constraint<A> allUndConstraints){
undConstraints.addAll(allUndConstraints);
}
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints){
this.oderConstraints.addAll(allOderConstraints);
}
public void addAll(ConstraintSet constraints) {
this.addAllUndConstraint(constraints.undConstraints);
this.addAllOderConstraint(constraints.oderConstraints);
}
public void addAllUndConstraint(Constraint<A> allUndConstraints) {
undConstraints.addAll(allUndConstraints);
}
@Override
public String toString(){
BinaryOperator<String> b = (x,y) -> x+y;
return "\nUND:" + this.undConstraints.toString() + "\n" +
"ODER:" + this.oderConstraints.stream().reduce("", (x,y) -> x.toString()+ "\n" +y, b);
//cartesianProduct().toString();
}
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints) {
this.oderConstraints.addAll(allOderConstraints);
}
public Set<List<Constraint<A>>> cartesianProduct(){
Set<Constraint<A>> toAdd = new HashSet<>();
toAdd.add(undConstraints);
List<Set<Constraint<A>>> allConstraints = new ArrayList<>();
allConstraints.add(toAdd);
allConstraints.addAll(oderConstraints);
return new GuavaSetOperations().cartesianProduct(allConstraints);
}
public void addAll(ConstraintSet constraints) {
this.addAllUndConstraint(constraints.undConstraints);
this.addAllOderConstraint(constraints.oderConstraints);
}
public <B> ConstraintSet<B> map(Function<? super A, ? extends B> o) {
Hashtable<Constraint<A>,Constraint<B>> CSA2CSB = new Hashtable<>();
ConstraintSet<B> ret = new ConstraintSet<>();
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new));
List<Set<Constraint<B>>> newOder = new ArrayList<>();
@Override
public String toString() {
BinaryOperator<String> b = (x, y) -> x + y;
return "\nUND:\n" + this.undConstraints.toString() +
"ODER:" + this.oderConstraints.stream().reduce("", (x, y) -> x + "\n\t" + y, b) +
"\n";
//cartesianProduct().toString();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ConstraintSet<?> other)) return false;
return Objects.equals(undConstraints, other.undConstraints)
&& Objects.equals(oderConstraints, other.oderConstraints);
}
@Override
public int hashCode() {
return Objects.hash(undConstraints, oderConstraints);
}
public Set<List<Constraint<A>>> cartesianProduct() {
Set<Constraint<A>> toAdd = new HashSet<>();
toAdd.add(undConstraints);
List<Set<Constraint<A>>> allConstraints = new ArrayList<>();
allConstraints.add(toAdd);
allConstraints.addAll(oderConstraints);
return new GuavaSetOperations().cartesianProduct(allConstraints);
}
public <B extends IConstraintElement> ConstraintSet<B> map(Function<? super A, ? extends B> o) {
Hashtable<Constraint<A>, Constraint<B>> CSA2CSB = new Hashtable<>();
ConstraintSet<B> ret = new ConstraintSet<>();
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new));
List<Set<Constraint<B>>> newOder = new ArrayList<>();
/*
for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.forEach(as -> {
@@ -68,25 +85,25 @@ public class ConstraintSet<A> {
CSA2CSB.put(as, newConst);} );
}
*/
for(Set<Constraint<A>> oderConstraint : oderConstraints){
newOder.add(
oderConstraint.stream().map((Constraint<A> as) -> {
Constraint<B> newConst = as.stream()
.map(o)
.collect(Collectors.toCollection((
() -> new Constraint<B> (as.isInherited(),
as.isImplemented(),
(as.getExtendConstraint() != null)
? as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new))
: null,
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new))))
));
//CSA2CSB.put(as, newConst);
return newConst;
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
newOder.add(
oderConstraint.stream().map((Constraint<A> as) -> {
Constraint<B> newConst = as.stream()
.map(o)
.collect(Collectors.toCollection((
() -> new Constraint<B>(as.isInherited(),
as.isImplemented(),
(as.getExtendConstraint() != null)
? as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new))
: null,
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new))))
));
//CSA2CSB.put(as, newConst);
return newConst;
/*
Constraint<B> bs = CSA2CSB.get(as);
@@ -95,36 +112,60 @@ public class ConstraintSet<A> {
}
return bs;
*/
}).collect(Collectors.toSet())
);
}
ret.oderConstraints = newOder;
return ret;
}).collect(Collectors.toSet())
);
}
public void forEach (Consumer<? super A> c) {
undConstraints.stream().forEach(c);
for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
as.stream().forEach(c));
}
ret.oderConstraints = newOder;
return ret;
}
public void forEach(Consumer<? super A> c) {
undConstraints.stream().forEach(c);
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
as.stream().forEach(c));
}
public Set<A> getAll () {
Set<A> ret = new HashSet<>();
ret.addAll(undConstraints);
for(Set<Constraint<A>> oderConstraint : oderConstraints){
oderConstraint.parallelStream().forEach((Constraint<A> as) -> ret.addAll(as));
}
return ret;
}
public List<Set<Constraint<A>>> getOderConstraints() {
return oderConstraints;
}
public Set<A> getUndConstraints() {
return undConstraints;
}
public Set<A> getAll() {
Set<A> ret = new HashSet<>(undConstraints);
for (Set<Constraint<A>> oderConstraint : oderConstraints) {
oderConstraint.parallelStream().forEach(ret::addAll);
}
return ret;
}
public List<Set<Constraint<A>>> getOderConstraints() {
return oderConstraints;
}
public Set<A> getUndConstraints() {
return undConstraints;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("undConstraints", undConstraints.toSerial(keyStorage));
serialized.put("oderConstraints", SerialList.fromMapped(oderConstraints, oderConstraintSet ->
SerialList.fromMapped(oderConstraintSet, oderConstraint ->
oderConstraint.toSerial(keyStorage))
));
return serialized;
}
public static <T extends IConstraintElement> ConstraintSet<T> fromSerial(SerialMap data, UnifyContext context, Class<T> target, KeyStorage keyStorage) {
ConstraintSet<T> constraintSet = new ConstraintSet<>();
constraintSet.undConstraints = Constraint.fromSerial(data.getUUID("undConstraints"), context, target, keyStorage);
constraintSet.oderConstraints = data.getList("oderConstraints").assertListOfLists().stream()
.map(oderConstraintSetData -> oderConstraintSetData.assertListOfUUIDs().stream()
.map(oderConstraintData -> Constraint.fromSerial(oderConstraintData, context, target, keyStorage))
.collect(Collectors.toSet())
).toList();
return constraintSet;
}
}

View File

@@ -0,0 +1,4 @@
package de.dhbwstuttgart.typeinference.constraints;
public interface IConstraintElement {
}

View File

@@ -1,72 +1,70 @@
package de.dhbwstuttgart.typeinference.constraints;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.parser.SourceLoc;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import org.antlr.v4.runtime.Token;
public class Pair implements Serializable
{
public final RefTypeOrTPHOrWildcardOrGeneric TA1;
public final RefTypeOrTPHOrWildcardOrGeneric TA2;
public class Pair implements Serializable, IConstraintElement, ISerializableData {
public final RefTypeOrTPHOrWildcardOrGeneric TA1;
public final RefTypeOrTPHOrWildcardOrGeneric TA2;
private SourceLoc location;
private SourceLoc location;
private PairOperator eOperator = PairOperator.SMALLER;
private Boolean noUnification = false;
private PairOperator eOperator = PairOperator.SMALLER;
private boolean noUnification = false;
private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2 )
{
this.TA1 = TA1;
this.TA2 = TA2;
if(TA1 == null || TA2 == null)
throw new NullPointerException();
eOperator = PairOperator.SMALLER;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp)
{
// Konstruktor
this(TA1,TA2);
this.eOperator = eOp;
}
private Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2) {
this.TA1 = TA1;
this.TA2 = TA2;
if (TA1 == null || TA2 == null)
throw new NullPointerException();
eOperator = PairOperator.SMALLER;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator e0p, SourceLoc location) {
this(TA1, TA2, e0p);
this.location = location;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, Boolean noUnification)
{
// Konstruktor
this(TA1,TA2);
this.eOperator = eOp;
this.noUnification = noUnification;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp) {
// Konstruktor
this(TA1, TA2);
this.eOperator = eOp;
}
public SourceLoc getLocation() {
return this.location;
}
public String toString()
{
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen
String strElement1 = "NULL";
String strElement2 = "NULL";
String Operator = "<.";
if( TA1 != null )
strElement1 = TA1.toString();
if( TA2 != null )
strElement2 = TA2.toString();
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator e0p, SourceLoc location) {
this(TA1, TA2, e0p);
this.location = location;
}
public Pair(RefTypeOrTPHOrWildcardOrGeneric TA1, RefTypeOrTPHOrWildcardOrGeneric TA2, PairOperator eOp, boolean noUnification) {
// Konstruktor
this(TA1, TA2);
this.eOperator = eOp;
this.noUnification = noUnification;
}
public SourceLoc getLocation() {
return this.location;
}
public String toString() {
// otth: Gibt ein Paar als String aus --> zum Debuggen und Vergleichen
String strElement1 = "NULL";
String strElement2 = "NULL";
String Operator = "<.";
if (TA1 != null)
strElement1 = TA1.toString();
if (TA2 != null)
strElement2 = TA2.toString();
/* PL ausskommentiert 2018-05-24
if(OperatorEqual())
@@ -76,80 +74,104 @@ public class Pair implements Serializable
if(OperatorSmallerExtends())
Operator = "<?";
*/
return "\n(" + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
/*- Equals: " + bEqual*/
}
/**
* <br/>Author: J�rg B�uerle
* @param obj
* @return
*/
public boolean equals(Object obj)
{
boolean ret = true;
ret &= (obj instanceof Pair);
if(!ret)return ret;
ret &= ((Pair)obj).TA1.equals(this.TA1);
ret &= ((Pair)obj).TA2.equals(this.TA2);
return ret;
}
return "\n(P: " + strElement1 + " " + eOperator.toString() + " " + strElement2 + ")";
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Equal ist.
*/
public boolean OperatorEqual()
{
return eOperator == PairOperator.EQUALSDOT;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Smaller ist.
*/
public boolean OperatorSmaller()
{
return eOperator == PairOperator.SMALLER;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ SmallerExtends ist.
*/
public boolean OperatorSmallerExtends()
{
return eOperator == PairOperator.SMALLERDOTWC;
}
/**
* Author: Arne Lüdtke<br/>
* Gibt den Operator zurück.
*/
public PairOperator GetOperator()
{
return eOperator;
}
/*- Equals: " + bEqual*/
}
public boolean OperatorSmallerDot() {
return eOperator == PairOperator.SMALLERDOT;
}
static public Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
HashMap<String, TypePlaceholder> ret = new HashMap<>();
constraints.map((Pair p) -> {
if (p.TA1 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA1).getName(), (TypePlaceholder) p.TA1);
}
if (p.TA2 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA2).getName(), (TypePlaceholder) p.TA2);
}
return null;
});
return ret;
}
/**
* <br/>Author: J�rg B�uerle
*
* @param obj
* @return
*/
public boolean equals(Object obj) {
return (
(obj instanceof Pair pairObj) &&
pairObj.TA1.equals(this.TA1) &&
pairObj.TA2.equals(this.TA2)
);
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Equal ist.
*/
public boolean OperatorEqual() {
return eOperator == PairOperator.EQUALSDOT;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ Smaller ist.
*/
public boolean OperatorSmaller() {
return eOperator == PairOperator.SMALLER;
}
/**
* Author: Arne Lüdtke<br/>
* Abfrage, ob Operator vom Typ SmallerExtends ist.
*/
public boolean OperatorSmallerExtends() {
return eOperator == PairOperator.SMALLERDOTWC;
}
/**
* Author: Arne Lüdtke<br/>
* Gibt den Operator zurück.
*/
public PairOperator GetOperator() {
return eOperator;
}
public boolean OperatorSmallerDot() {
return eOperator == PairOperator.SMALLERDOT;
}
static public Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
HashMap<String, TypePlaceholder> ret = new HashMap<>();
constraints.map((Pair p) -> {
if (p.TA1 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA1).getName(), (TypePlaceholder) p.TA1);
}
if (p.TA2 instanceof TypePlaceholder) {
ret.put(((TypePlaceholder) p.TA2).getName(), (TypePlaceholder) p.TA2);
}
return null;
});
return ret;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
// because toString() will output TA1 and TA2 recursively, we can ignore potential infinite recursion here too
SerialMap serialized = new SerialMap();
serialized.put("ta1", this.TA1.toSerial(keyStorage));
serialized.put("ta2", this.TA2.toSerial(keyStorage));
serialized.put("op", this.eOperator.toString());
serialized.put("noUnification", this.noUnification ? 1 : 0);
serialized.put("location", this.location == null ? null : this.location.toSerial(keyStorage));
return serialized;
}
public static Pair fromSerial(SerialMap data, UnifyContext context) {
String op = data.getValue("op").getOf(String.class);
SerialMap ta1 = data.getMap("ta1");
SerialMap ta2 = data.getMap("ta2");
boolean noUnification = data.getValue("noUnification").getOf(Integer.class) == 1;
SerialMap location = data.getMapOrNull("location");
var pair = new Pair(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(ta1, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(ta2, context),
PairOperator.fromString(op),
noUnification
);
if (location != null) pair.location = SourceLoc.fromSerial(location);
return pair;
}
}
// ino.end

View File

@@ -1,15 +1,19 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* enthaelt alle Paare, die in einem Ergebnis nicht vorkommen koennen
* sie sind noetig fuer origPairs in PairTPHsmallerTPH, da hier auch
* Paare vorkommen koennen die keine Result sind (z.B. bei FunN$$)
*/
public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>{
public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
//public final TypePlaceholder left;
//public final TypePlaceholder right;
@@ -17,7 +21,7 @@ public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, Re
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
* wichtig fuer generated Generics
*/
ResultPair origPair;
ResultPair<?,?> origPair;
public PairNoResult(RefTypeOrTPHOrWildcardOrGeneric left, RefTypeOrTPHOrWildcardOrGeneric right){
super(left, right);
@@ -29,4 +33,24 @@ public class PairNoResult extends ResultPair<RefTypeOrTPHOrWildcardOrGeneric, Re
throw new NotImplementedException();
//visitor.visit(this);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairNoResult fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairNoResult(
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,9 +1,13 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder> {
public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder> implements ISerializableData {
public PairTPHEqualTPH(TypePlaceholder tl, TypePlaceholder tr) {
super(tl, tr);
}
@@ -12,4 +16,24 @@ public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder
public void accept(ResultPairVisitor visitor) {
visitor.visit(this);
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairTPHEqualTPH fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairTPHEqualTPH(
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,13 +1,17 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* Steht für A =. RefType
*/
public class PairTPHequalRefTypeOrWildcardType extends ResultPair{
public class PairTPHequalRefTypeOrWildcardType extends ResultPair<TypePlaceholder, RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
public final TypePlaceholder left;
public final RefTypeOrTPHOrWildcardOrGeneric right;
@@ -26,4 +30,24 @@ public class PairTPHequalRefTypeOrWildcardType extends ResultPair{
public String toString() {
return "(" + left.toString() + " = " + right.toString() + ")";
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairTPHequalRefTypeOrWildcardType fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairTPHequalRefTypeOrWildcardType(
(TypePlaceholder)RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,12 +1,17 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* Steht für: A <. B
*/
public class PairTPHsmallerTPH extends ResultPair{
public class PairTPHsmallerTPH extends ResultPair<TypePlaceholder,TypePlaceholder>
implements ISerializableData {
public final TypePlaceholder left;
public final TypePlaceholder right;
@@ -14,7 +19,7 @@ public class PairTPHsmallerTPH extends ResultPair{
* urspruengliches Paar aus diesem dieses Resultpair erzeugt wurde
* wichtig fuer generated Generics
*/
ResultPair origPair;
ResultPair<?,?> origPair;
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right){
super(left, right);
@@ -22,7 +27,7 @@ public class PairTPHsmallerTPH extends ResultPair{
this.right = right;
}
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right, ResultPair origPair){
public PairTPHsmallerTPH(TypePlaceholder left, TypePlaceholder right, ResultPair<?,?> origPair){
this(left, right);
this.origPair = origPair;
}
@@ -36,4 +41,24 @@ public class PairTPHsmallerTPH extends ResultPair{
public String toString() {
return "(" + left.toString() + " < " + right.toString() + ")";
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("left", this.getLeft().toSerial(keyStorage));
serialized.put("right", this.getRight().toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static PairTPHsmallerTPH fromSerial2(SerialMap data, UnifyContext context) {
SerialMap left = data.getMap("left");
SerialMap right = data.getMap("right");
return new PairTPHsmallerTPH(
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(left, context),
(TypePlaceholder) RefTypeOrTPHOrWildcardOrGeneric.fromSerial(right, context)
);
}
}

View File

@@ -1,11 +1,17 @@
package de.dhbwstuttgart.typeinference.result;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
/**
* Paare, welche das Unifikationsergebnis darstellen
*/
public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric, B extends RefTypeOrTPHOrWildcardOrGeneric> implements Comparable<ResultPair<A,B>> {
public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric>
implements ISerializableData {
private final A left;
private final B right;
@@ -59,13 +65,34 @@ public abstract class ResultPair<A extends RefTypeOrTPHOrWildcardOrGeneric, B ex
return true;
}
@Override
public int compareTo(ResultPair<A, B> o) {
if (o == null) {
return 1; // this is greater than null
}
return o.left.toString().compareTo(this.left.toString());
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
String type = switch (this) {
case PairNoResult _ -> "pnr";
case PairTPHEqualTPH _ -> "ptet";
case PairTPHsmallerTPH _ -> "ptst";
case PairTPHequalRefTypeOrWildcardType _ -> "ptertwt";
default -> throw new RuntimeException("No type defined for ResultPair of class " + this.getClass().getName());
};
serialized.put("type", type);
// we only insert null for the object and expect the child classes to call this and override the value with themselves
serialized.put("object", SerialValue.NULL);
return serialized;
}
public static <A extends RefTypeOrTPHOrWildcardOrGeneric,B extends RefTypeOrTPHOrWildcardOrGeneric> ResultPair<A,B>
fromSerial(SerialMap data, UnifyContext context) {
String type = data.getValue("type").getOf(String.class);
SerialMap object = data.getMap("object");
return switch (type) {
case "pnr" -> (ResultPair) PairNoResult.fromSerial2(object, context);
case "ptet" -> (ResultPair) PairTPHEqualTPH.fromSerial2(object, context);
case "ptst" -> (ResultPair) PairTPHsmallerTPH.fromSerial2(object, context);
case "ptertwt" -> (ResultPair) PairTPHequalRefTypeOrWildcardType.fromSerial2(object, context);
default -> throw new RuntimeException("Could not unserialize class of unhandled type " + type);
};
}
}

View File

@@ -1,8 +1,16 @@
package de.dhbwstuttgart.typeinference.result;
import java.util.Collections;
import com.google.common.collect.Ordering;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import de.dhbwstuttgart.exceptions.NotImplementedException;
@@ -12,167 +20,179 @@ import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import java.util.stream.Collectors;
@SuppressWarnings("rawtypes")
public class ResultSet implements Comparable<ResultSet>{
public class ResultSet implements ISerializableData {
public final Set<ResultPair> results;
public Set<ResultPair<TypePlaceholder, TypePlaceholder>> genIns;
public final Set<ResultPair> results;
public Set<ResultPair<TypePlaceholder, TypePlaceholder>> genIns;
public ResultSet(Set<ResultPair> set){
this.results = set;
this.genIns = new HashSet<>();
results.forEach(x -> { if (x instanceof PairTPHsmallerTPH) { this.genIns.add(x);}} );
public ResultSet(Set<ResultPair> set) {
this.results = set;
this.genIns = TypeUnifyTaskHelper.getPresizedHashSet(results.size());
results.forEach(x -> {
if (x instanceof PairTPHsmallerTPH) {
this.genIns.add(x);
}
});
}
public boolean contains(ResultPair toCheck) {
return this.results.contains(toCheck);
}
public void remove(ResultPair toCheck) {
results.remove(toCheck);
}
public ResolvedType resolveType(RefTypeOrTPHOrWildcardOrGeneric type) {
if (type instanceof TypePlaceholder)
return new Resolver(this).resolve((TypePlaceholder) type);
if (type instanceof GenericRefType) return new ResolvedType(type, new HashSet<>());
if (type instanceof RefType) {
RelatedTypeWalker related = new RelatedTypeWalker(null, this);
type.accept(related);
return new ResolvedType(type, related.relatedTPHs);
} else {
throw new NotImplementedException();
//return new ResolvedType(type,new HashSet<>());
}
}
public List<ResultPair> getSortedResults() {
return results.stream().sorted().toList();
}
public boolean contains(ResultPair toCheck) {
return this.results.contains(toCheck);
}
public void remove(ResultPair toCheck) {
results.remove(toCheck);
public String toString() {
var results = new ArrayList<>(this.results);
results.sort(
Comparator
.comparingInt((ResultPair o) -> o.getLeft().toString().length())
.thenComparing(o -> o.getLeft().toString())
.thenComparingInt(o -> o.getRight().toString().length())
.thenComparing(o -> o.getRight().toString())
);
return results.toString();
}
@Override
public boolean equals(Object o) {
if (o instanceof ResultSet other) {
// sort both result lists
var thisElements = new ArrayList<>(this.results);
thisElements.sort(Ordering.usingToString());
var otherElements = new ArrayList<>(other.results);
otherElements.sort(Ordering.usingToString());
return thisElements.equals(otherElements);
} else {
return false;
}
}
public ResolvedType resolveType(RefTypeOrTPHOrWildcardOrGeneric type) {
if(type instanceof TypePlaceholder)
return new Resolver(this).resolve((TypePlaceholder)type);
if(type instanceof GenericRefType)return new ResolvedType(type, new HashSet<>());
if(type instanceof RefType) {
RelatedTypeWalker related = new RelatedTypeWalker(null, this);
type.accept(related);
return new ResolvedType(type, related.relatedTPHs);
} else {
throw new NotImplementedException();
//return new ResolvedType(type,new HashSet<>());
}
}
@Override
public int hashCode() {
return results.hashCode();
}
public String toString() {
return results.toString();
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();;
serialized.put("results", SerialList.fromMapped(results, result -> result.toSerial(keyStorage)));
return serialized;
}
@Override
public boolean equals(Object o) {
if (o instanceof ResultSet) {
ResultSet other = (ResultSet)o;
return this.results.equals(other.results);
} else {
return false;
}
}
@Override
public int hashCode() {
return results.hashCode();
}
@Override
public int compareTo(ResultSet o) {
List<ResultPair> thisSorted = this.getSortedResults();
List<ResultPair> otherSorted = o.getSortedResults();
int sizeCompare = Integer.compare(thisSorted.size(), otherSorted.size());
if (sizeCompare != 0) return sizeCompare;
for (int i = 0; i < thisSorted.size(); i++) {
int cmp = thisSorted.get(i).compareTo(otherSorted.get(i));
if (cmp != 0) return cmp;
}
return 0;
}
public static ResultSet fromSerial(SerialMap data, UnifyContext context) {
var resultsData = data.getList("results").assertListOfMaps();
return new ResultSet(resultsData.stream().map(resultData -> ResultPair.fromSerial(resultData, context)).collect(Collectors.toSet()));
}
}
class Resolver implements ResultSetVisitor {
private final ResultSet result;
private TypePlaceholder toResolve;
private RefTypeOrTPHOrWildcardOrGeneric resolved;
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
private ResultPair<?,?> currentPair;
private final ResultSet result;
private TypePlaceholder toResolve;
private RefTypeOrTPHOrWildcardOrGeneric resolved;
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
private ResultPair<?, ?> currentPair;
public Resolver(ResultSet resultPairs){
this.result = resultPairs;
public static Logger logger = new Logger("Resolver");
public Resolver(ResultSet resultPairs) {
this.result = resultPairs;
}
public ResolvedType resolve(TypePlaceholder tph) {
toResolve = tph;
resolved = null;
logger.info(tph.toString());
for (ResultPair<?, ?> resultPair : result.results) {
if (resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)) {
currentPair = resultPair;
return resolve(((PairTPHEqualTPH) resultPair).getRight());
}
}
for (ResultPair<?, ?> resultPair : result.results) {
currentPair = resultPair;
resultPair.accept(this);
}
if (resolved == null) {//TPH kommt nicht im Result vor:
resolved = tph;
}
public ResolvedType resolve(TypePlaceholder tph){
toResolve = tph;
resolved = null;
System.out.println(tph.toString());
for(ResultPair<?,?> resultPair : result.results) {
if(resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)){
currentPair = resultPair;
return resolve(((PairTPHEqualTPH) resultPair).getRight());
}
}
for(ResultPair<?,?> resultPair : result.results){
currentPair = resultPair;
resultPair.accept(this);
}
if(resolved==null){//TPH kommt nicht im Result vor:
resolved = tph;
}
ResolvedType result = new ResolvedType(resolved, additionalTPHs);//resolved;
result.setResultPair(currentPair);
return result;
}
ResolvedType result = new ResolvedType(resolved, additionalTPHs);//resolved;
result.setResultPair(currentPair);
return result;
@Override
public void visit(PairTPHsmallerTPH p) {
currentPair = p;
if (p.left.equals(toResolve)) {
additionalTPHs.add(new GenericInsertPair(p.left, p.right));
additionalTPHs.addAll(new RelatedTypeWalker(p.right, result).relatedTPHs);
}
if (p.right.equals(toResolve))
additionalTPHs.addAll(new RelatedTypeWalker(p.left, result).relatedTPHs);
}
@Override
public void visit(PairTPHsmallerTPH p) {
currentPair = p;
if(p.left.equals(toResolve)){
additionalTPHs.add(new GenericInsertPair(p.left, p.right));
additionalTPHs.addAll(new RelatedTypeWalker(p.right, result).relatedTPHs);
}
if(p.right.equals(toResolve))
additionalTPHs.addAll(new RelatedTypeWalker(p.left, result).relatedTPHs);
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
currentPair = p;
if (p.left.equals(toResolve)) {
resolved = p.right;
RelatedTypeWalker related = new RelatedTypeWalker(null, result);
p.right.accept(related);
additionalTPHs.addAll(related.relatedTPHs);
}
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
currentPair = p;
if(p.left.equals(toResolve)){
resolved = p.right;
RelatedTypeWalker related = new RelatedTypeWalker(null, result);
p.right.accept(related);
additionalTPHs.addAll(related.relatedTPHs);
}
}
@Override
public void visit(PairTPHEqualTPH p) {
//Do nothing. Dieser Fall wird in der resolve-Methode abgefangen
}
@Override
public void visit(PairTPHEqualTPH p) {
//Do nothing. Dieser Fall wird in der resolve-Methode abgefangen
}
@Override
public void visit(RefType refType) {
@Override
public void visit(RefType refType) {
}
}
@Override
public void visit(GenericRefType genericRefType) {
@Override
public void visit(GenericRefType genericRefType) {
}
}
@Override
public void visit(SuperWildcardType superWildcardType) {
@Override
public void visit(SuperWildcardType superWildcardType) {
}
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
@Override
public void visit(TypePlaceholder typePlaceholder) {
}
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
}
}
}
@@ -182,149 +202,150 @@ class Resolver implements ResultSetVisitor {
@SuppressWarnings("rawtypes")
class TPHResolver implements ResultSetVisitor {
private final TypePlaceholder tph;
Set<GenericInsertPair> resolved = new HashSet<>();
private final ResultSet resultSet;
private final TypePlaceholder tph;
Set<GenericInsertPair> resolved = new HashSet<>();
private final ResultSet resultSet;
TPHResolver(TypePlaceholder tph, ResultSet resultSet){
this.resultSet = resultSet;
this.tph = tph;
for(ResultPair p : resultSet.results){
p.accept(this);
}
if(resolved.size() == 0){
resolved.add(new GenericInsertPair(tph, null));
}
TPHResolver(TypePlaceholder tph, ResultSet resultSet) {
this.resultSet = resultSet;
this.tph = tph;
for (ResultPair p : resultSet.results) {
p.accept(this);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if(p.left.equals(tph) || p.right.equals(tph)){
resolved.add(new GenericInsertPair(p.left, p.right));
}
if (resolved.size() == 0) {
resolved.add(new GenericInsertPair(tph, null));
}
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
TypePlaceholder otherSide = null;
if(p.right.equals(tph)){
otherSide = p.left;
}
if(otherSide != null){
Set<ResultPair> newResultSet = new HashSet<>(this.resultSet.results);
newResultSet.remove(p);
resolved.addAll(new TPHResolver(otherSide, new ResultSet(newResultSet)).resolved);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if (p.left.equals(tph) || p.right.equals(tph)) {
resolved.add(new GenericInsertPair(p.left, p.right));
}
}
@Override
public void visit(PairTPHEqualTPH p) {
//ignorieren. Wird vom Resolver behandelt
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
TypePlaceholder otherSide = null;
if (p.right.equals(tph)) {
otherSide = p.left;
}
@Override
public void visit(RefType refType) {
if (otherSide != null) {
Set<ResultPair> newResultSet = new HashSet<>(this.resultSet.results);
newResultSet.remove(p);
resolved.addAll(new TPHResolver(otherSide, new ResultSet(newResultSet)).resolved);
}
}
@Override
public void visit(GenericRefType genericRefType) {
@Override
public void visit(PairTPHEqualTPH p) {
//ignorieren. Wird vom Resolver behandelt
}
}
@Override
public void visit(RefType refType) {
@Override
public void visit(SuperWildcardType superWildcardType) {
}
}
@Override
public void visit(GenericRefType genericRefType) {
@Override
public void visit(TypePlaceholder typePlaceholder) {
}
}
@Override
public void visit(SuperWildcardType superWildcardType) {
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
}
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
}
}
@SuppressWarnings("rawtypes")
class RelatedTypeWalker implements ResultSetVisitor {
final Set<GenericInsertPair> relatedTPHs = new HashSet<>();
private final TypePlaceholder toResolve;
private final ResultSet resultSet;
final Set<GenericInsertPair> relatedTPHs = new HashSet<>();
private final TypePlaceholder toResolve;
private final ResultSet resultSet;
/**
* Läuft über das resultSet und speichert alle TPHs, welche mit start in Verbindung stehen
* @param start - kann null sein, wenn der Walker für einen RefType benutzt wird
* @param resultSet
*/
RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet){
this.toResolve = start;
this.resultSet = resultSet;
int resolved = 0;
do{
resolved = relatedTPHs.size();
for(ResultPair p : resultSet.results){
p.accept(this);
p.accept(this);
}
}while(resolved - relatedTPHs.size() > 0);
}
/**
* Läuft über das resultSet und speichert alle TPHs, welche mit start in Verbindung stehen
*
* @param start - kann null sein, wenn der Walker für einen RefType benutzt wird
* @param resultSet
*/
RelatedTypeWalker(TypePlaceholder start, ResultSet resultSet) {
this.toResolve = start;
this.resultSet = resultSet;
int resolved = 0;
do {
resolved = relatedTPHs.size();
for (ResultPair p : resultSet.results) {
p.accept(this);
p.accept(this);
}
} while (resolved - relatedTPHs.size() > 0);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if(p.getRight().equals(toResolve)){
relatedTPHs.addAll(new TPHResolver(p.right, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.right, resultSet).relatedTPHs);
}
if(p.getLeft().equals(toResolve)){
relatedTPHs.addAll(new TPHResolver(p.left, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.left, resultSet).relatedTPHs);
}
@Override
public void visit(PairTPHsmallerTPH p) {
if (p.getRight().equals(toResolve)) {
relatedTPHs.addAll(new TPHResolver(p.right, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.right, resultSet).relatedTPHs);
}
if (p.getLeft().equals(toResolve)) {
relatedTPHs.addAll(new TPHResolver(p.left, resultSet).resolved);
//relatedTPHs.addAll(new RelatedTypeWalker(p.left, resultSet).relatedTPHs);
}
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
if(p.getLeft().equals(toResolve)){
p.getRight().accept(this);
}
@Override
public void visit(PairTPHequalRefTypeOrWildcardType p) {
if (p.getLeft().equals(toResolve)) {
p.getRight().accept(this);
}
}
@Override
public void visit(PairTPHEqualTPH p) {
//Kann ignoriert werden. Diese Fälle werden vom Resolver behandelt
}
@Override
public void visit(PairTPHEqualTPH p) {
//Kann ignoriert werden. Diese Fälle werden vom Resolver behandelt
}
/*
Die folgenden Funktionen fügen alle TPHs an die relatedTPHs an, denen sie begegnen:
Das wird verwendet, wenn alle relatedTPHs aus den Parametern eines RefTypes angefügt werden sollen
*/
@Override
public void visit(RefType refType) {
for(RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()){
param.accept(this);
}
@Override
public void visit(RefType refType) {
for (RefTypeOrTPHOrWildcardOrGeneric param : refType.getParaList()) {
param.accept(this);
}
}
@Override
public void visit(SuperWildcardType superWildcardType) {
superWildcardType.getInnerType().accept(this);
}
@Override
public void visit(SuperWildcardType superWildcardType) {
superWildcardType.getInnerType().accept(this);
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
relatedTPHs.addAll(new TPHResolver(typePlaceholder, resultSet).resolved);
}
@Override
public void visit(TypePlaceholder typePlaceholder) {
relatedTPHs.addAll(new TPHResolver(typePlaceholder, resultSet).resolved);
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
extendsWildcardType.getInnerType().accept(this);
}
@Override
public void visit(ExtendsWildcardType extendsWildcardType) {
extendsWildcardType.getInnerType().accept(this);
}
@Override
public void visit(GenericRefType genericRefType) {
}
@Override
public void visit(GenericRefType genericRefType) {
}
}

View File

@@ -14,6 +14,7 @@ import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceBlockInformation;
import de.dhbwstuttgart.typeinference.assumptions.TypeInferenceInformation;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.util.BiRelation;
import org.antlr.v4.runtime.Token;
@@ -33,7 +34,7 @@ public class TYPE {
public ConstraintSet getConstraints() {
ConstraintSet ret = new ConstraintSet();
for (ClassOrInterface cl : sf.KlassenVektor) {
var allClasses = new HashSet<ClassOrInterface>();
Set<ClassOrInterface> allClasses = TypeUnifyTaskHelper.getPresizedHashSet(allAvailableClasses.size() + sf.availableClasses.size());
allClasses.addAll(allAvailableClasses);
allClasses.addAll(sf.availableClasses);
ret.addAll(getConstraintsClass(cl, new TypeInferenceInformation(allClasses)));
@@ -68,7 +69,7 @@ public class TYPE {
for(SourceFile sourceFile : sfs){
for(JavaClassName importName : sourceFile.imports){
System.out.println(importName);
context.logger().info(importName);
try {
classes.add(ASTFactory.createClass(classLoader.loadClass(importName.toString())));
} catch (ClassNotFoundException e) {

View File

@@ -1,6 +1,8 @@
//PL 2018-12-19: Merge chekcen
package de.dhbwstuttgart.typeinference.typeAlgo;
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTaskHelper;
import java.util.*;
import java.util.stream.Collectors;
@@ -73,7 +75,7 @@ public class TYPEStmt implements StatementVisitor {
@Override
public void visit(LambdaExpression lambdaExpression) {
TypePlaceholder tphRetType = TypePlaceholder.fresh(new NullToken(), -1, false);
TypePlaceholder tphRetType = TypePlaceholder.fresh(new NullToken());
List<RefTypeOrTPHOrWildcardOrGeneric> lambdaParams = lambdaExpression.params.getFormalparalist().stream().map((formalParameter -> formalParameter.getType())).collect(Collectors.toList());
lambdaParams.add(tphRetType);
// lambdaParams.add(0,tphRetType);
@@ -116,17 +118,18 @@ public class TYPEStmt implements StatementVisitor {
@Override
public void visit(FieldVar fieldVar) {
fieldVar.receiver.accept(this);
Set<Constraint> oderConstraints = new HashSet<>();
List<FieldAssumption> fieldAssumptions = info.getFields(fieldVar.fieldVarName);
Set<Constraint> oderConstraints = TypeUnifyTaskHelper.getPresizedHashSet(fieldAssumptions.size());
for (FieldAssumption fieldAssumption : info.getFields(fieldVar.fieldVarName)) {
for (FieldAssumption fieldAssumption : fieldAssumptions) {
Constraint constraint = new Constraint();
GenericsResolver resolver = getResolverInstance();
constraint.add(new Pair(fieldVar.receiver.getType(), fieldAssumption.getReceiverType(resolver), PairOperator.SMALLERDOT, loc(fieldVar.getOffset()))); // PL 2019-12-09: SMALLERDOT eingefuegt, EQUALSDOT entfernt, wenn ds Field privat ist muesste es EQUALSDOT lauten
constraint.add(new Pair(fieldVar.getType(), fieldAssumption.getType(resolver), PairOperator.EQUALSDOT, loc(fieldVar.getOffset())));
oderConstraints.add(constraint);
}
if (oderConstraints.size() == 0)
if (oderConstraints.isEmpty())
throw new TypeinferenceException("Kein Feld " + fieldVar.fieldVarName + " gefunden", fieldVar.getOffset());
constraintsSet.addOderConstraint(oderConstraints);
}
@@ -141,7 +144,7 @@ public class TYPEStmt implements StatementVisitor {
@Override
public void visit(ForEachStmt forEachStmt) {
var iterableType = new RefType(ASTFactory.createClass(java.lang.Iterable.class).getClassName(), Arrays.asList(new ExtendsWildcardType(forEachStmt.statement.getType(), new NullToken())), new NullToken());
var iterableType = new RefType(ASTFactory.createClass(java.lang.Iterable.class).getClassName(), List.of(new ExtendsWildcardType(forEachStmt.statement.getType(), new NullToken())), new NullToken());
constraintsSet.addUndConstraint(new Pair(forEachStmt.expression.getType(), iterableType, PairOperator.SMALLERDOT, loc(forEachStmt.getOffset())));
forEachStmt.statement.accept(this);
forEachStmt.expression.accept(this);
@@ -189,7 +192,7 @@ public class TYPEStmt implements StatementVisitor {
methodCall.receiver.accept(this);
// Overloading:
Set<Constraint<Pair>> methodConstraints = new HashSet<>();
for (MethodAssumption m : this.getMethods(methodCall.name, methodCall.arglist, info)) {
for (MethodAssumption m : TYPEStmt.getMethods(methodCall.name, methodCall.arglist, info)) {
GenericsResolver resolver = getResolverInstance();
Set<Constraint<Pair>> oneMethodConstraints = generateConstraint(methodCall, m, info, resolver);
methodConstraints.addAll(oneMethodConstraints);
@@ -199,7 +202,7 @@ public class TYPEStmt implements StatementVisitor {
* oneMethodConstraint.setExtendConstraint(extendsOneMethodConstraint); extendsOneMethodConstraint.setExtendConstraint(oneMethodConstraint); methodConstraints.add(extendsOneMethodConstraint);
*/
}
if (methodConstraints.size() < 1) {
if (methodConstraints.isEmpty()) {
throw new TypeinferenceException("Methode " + methodCall.name + " ist nicht vorhanden!", methodCall.getOffset());
}
constraintsSet.addOderConstraint(methodConstraints);
@@ -212,7 +215,7 @@ public class TYPEStmt implements StatementVisitor {
for (MethodAssumption m : this.getConstructors(info, (RefType) methodCall.getType(), methodCall.getArgumentList())) {
methodConstraints.add(generateConstructorConstraint(methodCall, m, info, getResolverInstance()));
}
if (methodConstraints.size() < 1) {
if (methodConstraints.isEmpty()) {
throw new TypeinferenceException("Konstruktor in Klasse " + methodCall.getType().toString() + " ist nicht vorhanden!", methodCall.getOffset());
}
constraintsSet.addOderConstraint(methodConstraints);
@@ -282,8 +285,13 @@ public class TYPEStmt implements StatementVisitor {
// see: https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17
// Expression muss zu Numeric Convertierbar sein. also von Numeric erben
Constraint<Pair> numeric;
HashSet<JavaClassName> classNames = TypeUnifyTaskHelper.getPresizedHashSet(info.getAvailableClasses().size());
for (var classEl : info.getAvailableClasses()) {
classNames.add(classEl.getClassName());
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(bytee.getName())) {
if (classNames.contains(bytee.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), bytee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), bytee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -291,7 +299,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(shortt.getName())) {
if (classNames.contains(shortt.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), shortt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), shortt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -299,7 +307,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(integer.getName())) {
if (classNames.contains(integer.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), integer, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), integer, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -307,7 +315,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(longg.getName())) {
if (classNames.contains(longg.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), longg, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), longg, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -315,7 +323,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(floatt.getName())) {
if (classNames.contains(floatt.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), floatt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), floatt, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -323,7 +331,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(numeric);
}
// PL eingefuegt 2018-07-17
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(doublee.getName())) {
if (classNames.contains(doublee.getName())) {
numeric = new Constraint<>();
numeric.add(new Pair(binary.lexpr.getType(), doublee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
numeric.add(new Pair(binary.rexpr.getType(), doublee, PairOperator.SMALLERDOT, loc(binary.getOffset())));
@@ -338,7 +346,7 @@ public class TYPEStmt implements StatementVisitor {
if (binary.operation.equals(BinaryExpr.Operator.ADD)) {
// Dann kann der Ausdruck auch das aneinanderfügen zweier Strings sein: ("a" + "b") oder (1 + 2)
if (info.getAvailableClasses().stream().map(x -> x.getClassName()).collect(Collectors.toCollection(HashSet::new)).contains(string.getName())) {
if (classNames.contains(string.getName())) {
Constraint<Pair> stringConcat = new Constraint<>();
stringConcat.add(new Pair(binary.lexpr.getType(), string, PairOperator.EQUALSDOT, loc(binary.getOffset())));
stringConcat.add(new Pair(binary.rexpr.getType(), string, PairOperator.EQUALSDOT, loc(binary.getOffset())));
@@ -346,7 +354,7 @@ public class TYPEStmt implements StatementVisitor {
numericAdditionOrStringConcatenation.add(stringConcat);
}
}
if (numericAdditionOrStringConcatenation.size() < 1) {
if (numericAdditionOrStringConcatenation.isEmpty()) {
throw new TypeinferenceException("Kein Typ für " + binary.operation.toString() + " vorhanden", binary.getOffset());
}
constraintsSet.addOderConstraint(numericAdditionOrStringConcatenation);
@@ -635,7 +643,6 @@ public class TYPEStmt implements StatementVisitor {
params.add(resolver.resolve(new GenericRefType(gtv.getName(), new NullToken())));
}
RefTypeOrTPHOrWildcardOrGeneric receiverType;
if (receiver instanceof FunNClass) {
receiverType = new RefType(new JavaClassName(receiver.getClassName().toString() + "$$"), params, new NullToken()); // new FunN(params);
} else {
@@ -694,8 +701,8 @@ public class TYPEStmt implements StatementVisitor {
Set<Pair> methodSignatureConstraint = generatemethodSignatureConstraint(forMethod, assumption, info, resolver);
//System.out.println("methodSignatureConstraint: " + methodSignatureConstraint);
//System.out.println("methodConstraint: " + methodConstraint);
//context.logger().info("methodSignatureConstraint: " + methodSignatureConstraint);
//context.logger().info("methodConstraint: " + methodConstraint);
methodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
extendsMethodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
@@ -733,7 +740,7 @@ public class TYPEStmt implements StatementVisitor {
}
// Zuordnung von MethodCall.signature(ReturnType) zu dem ReturnType der ausgewaehlten Methode (assumption.returnType)
ret.add(new Pair(foMethod.signature.get(foMethod.signature.size() - 1), assumption.getReturnType(), PairOperator.EQUALSDOT));
ret.add(new Pair(foMethod.signature.getLast(), assumption.getReturnType(), PairOperator.EQUALSDOT));
return ret;
}
@@ -746,8 +753,8 @@ public class TYPEStmt implements StatementVisitor {
// funNParams.add(TypePlaceholder.fresh(new NullToken()));
funNParams.add(new GenericRefType(NameGenerator.makeNewName(), new NullToken()));
}
funNParams.get(funNParams.size() - 1);
ret.add(new MethodAssumption(new FunNClass(funNParams), funNParams.get(funNParams.size() - 1), funNParams.subList(0, funNParams.size() - 1), new TypeScope() {
funNParams.getLast();
ret.add(new MethodAssumption(new FunNClass(funNParams), funNParams.getLast(), funNParams.subList(0, funNParams.size() - 1), new TypeScope() {
@Override
public Iterable<? extends GenericTypeVar> getGenerics() {
throw new NotImplementedException();
@@ -842,7 +849,7 @@ public class TYPEStmt implements StatementVisitor {
for (var child : switchStmt.getBlocks()) {
for (var label : child.getLabels()) {
if (label.getPattern() == null) {
//System.out.println("DefaultCase");
//context.logger().info("DefaultCase");
} else {
constraintsSet.addUndConstraint(
new Pair(
@@ -883,13 +890,9 @@ public class TYPEStmt implements StatementVisitor {
child.getLabels().forEach(el -> {
if (el.getType() instanceof RefType) {
var recType = el;
if (el.getPattern() instanceof RecordPattern) {
var pattern = (RecordPattern) recType.getPattern();
recursivelyAddRecordConstraints(pattern);
}
if (el.getPattern() instanceof RecordPattern pattern) {
recursivelyAddRecordConstraints(pattern);
}
}
});
@@ -905,13 +908,13 @@ public class TYPEStmt implements StatementVisitor {
var allClasses = info.getAvailableClasses();
var interestingClasses = allClasses.stream().filter(as -> as.getClassName().equals(((RefType) pattern.getType()).getName())).toList();
var constructors = interestingClasses.get(0).getConstructors();
var constructors = interestingClasses.getFirst().getConstructors();
int counter = 0;
for (var subPattern : pattern.getSubPattern()) {
for (Constructor con : constructors) {
//System.out.println("----------------------\n" + subPattern.getType() + " | " + con.getParameterList().getParameterAt(counter).getType() + "\n----------------------\n");
//context.logger().info("----------------------\n" + subPattern.getType() + " | " + con.getParameterList().getParameterAt(counter).getType() + "\n----------------------\n");
constraintsSet.addUndConstraint(new Pair(subPattern.getType(), con.getParameterList().getParameterAt(counter).getType(), PairOperator.SMALLERDOT, loc(con.getParameterList().getParameterAt(counter).getOffset())));
}
if (subPattern instanceof RecordPattern) recursivelyAddRecordConstraints((RecordPattern) subPattern);

View File

@@ -0,0 +1,91 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* An intermediate class for the recursive steps of the TypeUnifyTask:
* This allows canceling parts of the recursion tree, instead of only the whole execution as before. But in
* order for that to work, all cancellable child tasks must be added when they are created
*
* @param <T>
*/
public abstract class CancellableTask<T> extends RecursiveTask<T> {
private final AtomicBoolean executionCancelled = new AtomicBoolean(false);
private final List<CancellableTask<?>> childTasks = new LinkedList<>();
private CancellableTask<?> parentTask = null;
/**
* Set the execution for this task and all its (recursive) children to be canceled
*/
protected void cancelExecution() {
// is this branch already canceled? Then do nothing
if (this.executionCancelled.getAndSet(true)) return;
this.cancelChildExecution();
}
private void cancelChildExecution() {
synchronized (this.childTasks) {
for (var childTask : childTasks) {
// no need to cancel a branch that is already finished
if (!childTask.isDone()) {
childTask.cancelExecution();
}
}
}
}
private void cancelChildExecutionAfter(CancellableTask<?> checkpointTask) {
boolean reachedCheckpoint = false;
int i = 0;
for (var childTask : childTasks) {
if (!reachedCheckpoint) {
reachedCheckpoint = childTask == checkpointTask;
}
else {
// no need to cancel a branch that is already finished
if (!childTask.isDone()) {
childTask.cancelExecution();
}
i++;
}
}
System.out.println("Skipped " + i + " younger siblings");
}
protected void cancelSiblingTasks() {
if (this.parentTask != null) {
boolean thisWasCancelledBefore = this.executionCancelled.get();
this.parentTask.cancelChildExecution();
this.executionCancelled.set(thisWasCancelledBefore);
}
}
public void cancelYoungerSiblingTasks() {
if (this.parentTask != null) {
this.parentTask.cancelChildExecutionAfter(this);
}
}
public Boolean isExecutionCancelled() {
return executionCancelled.get();
}
public void addChildTask(CancellableTask<?> childTask) {
this.childTasks.add(childTask);
childTask.setParentTask(this);
if (this.executionCancelled.get()) {
childTask.executionCancelled.set(true);
}
}
private void setParentTask(CancellableTask<?> parentTask) {
this.parentTask = parentTask;
}
}

View File

@@ -0,0 +1,61 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.RecursiveTask;
public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
public static <E> Set<E> merge(List<Set<E>> list) {
if (list.isEmpty()) {
return new HashSet<>();
}
var task = new ConcurrentSetMergeTask<>(list, 0, list.size());
return task.compute();
}
private static final int LIST_THRESHOLD = 3;
private static final int ELEMENT_THRESHOLD = 1000;
private final List<Set<T>> list;
private final int start;
private final int end;
private ConcurrentSetMergeTask(List<Set<T>> list, int start, int end) {
this.list = list;
this.start = start;
this.end = end;
}
@Override
protected Set<T> compute() {
int size = end - start;
int totalElements = 0;
for (int i = start+1; i < end; i++) {
totalElements += list.get(i).size();
}
// size will always be at least one
if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) {
Set<T> result = this.list.get(start);
for (int i = start+1; i < end; i++) {
result.addAll(list.get(i));
}
return result;
} else {
int mid = start + (size / 2);
ConcurrentSetMergeTask<T> leftTask = new ConcurrentSetMergeTask<>(list, start, mid);
ConcurrentSetMergeTask<T> rightTask = new ConcurrentSetMergeTask<>(list, mid, end);
leftTask.fork();
Set<T> rightResult = rightTask.compute();
Set<T> leftResult = leftTask.join();
// Merge results
leftResult.addAll(rightResult);
return leftResult;
}
}
}

View File

@@ -94,8 +94,8 @@ public class MartelliMontanariUnify implements IUnify {
// SUBST - Rule
if(lhsType instanceof PlaceholderType) {
mgu.add((PlaceholderType) lhsType, rhsType);
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
termsList = termsList.stream().map(x -> mgu.apply(x)).collect(Collectors.toCollection(ArrayList::new));
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
termsList.replaceAll(mgu::apply);
idx = idx+1 == termsList.size() ? 0 : idx+1;
continue;
}

View File

@@ -0,0 +1,84 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import java.util.ArrayList;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Calculate unique placeholder names
*/
public class PlaceholderRegistry implements ISerializableData {
private final Set<String> existingPlaceholders = ConcurrentHashMap.newKeySet();
private final AtomicInteger placeholderCount = new AtomicInteger();
public ArrayList<PlaceholderType> UnifyTypeFactory_PLACEHOLDERS = new ArrayList<>();
/**
* Add a placeholder into the list of existing ones, as soon as a new PlaceholderType is created
*
* @param placeholderName The placeholder to add
*/
public void addPlaceholder(String placeholderName) {
this.existingPlaceholders.add(placeholderName);
}
/**
* Generate a random placeholder name, that is unique to this context
*
* @return The generated name
*/
public String generateFreshPlaceholderName() {
String name;
do {
int pc = placeholderCount.incrementAndGet();
name = getUppercaseTokenFromInt(pc);
}
while (existingPlaceholders.contains(name));
this.addPlaceholder(name);
return name;
}
public PlaceholderRegistry deepClone() {
PlaceholderRegistry pr2 = new PlaceholderRegistry();
this.existingPlaceholders.forEach(pr2::addPlaceholder);
pr2.UnifyTypeFactory_PLACEHOLDERS.addAll(this.UnifyTypeFactory_PLACEHOLDERS);
pr2.placeholderCount.set(this.placeholderCount.get());
return pr2;
}
/**
* Generate a token that consists of uppercase letters and contains the given prefix and suffix from the value i
*
* @param i The value that will be represented as a token
* @return The generated token
*/
private String getUppercaseTokenFromInt(int i) {
StringBuilder sb = new StringBuilder();
while (i >= 0) {
sb.append((char)(i % 26 + 65));
i = i / 26 - 1;
}
//sb.append(suffix);
return sb.toString();
}
@Override
public String toString() {
return this.existingPlaceholders.toString();
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("ph", new SerialValue<>(new ArrayList<>(this.existingPlaceholders)));
serialized.put("factoryPh", SerialList.fromMapped(this.UnifyTypeFactory_PLACEHOLDERS, t -> t.toSerial(keyStorage)));
return serialized;
}
}

View File

@@ -1,10 +1,12 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.util.Logger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
@@ -12,24 +14,16 @@ import java.util.Stack;
import java.util.function.Function;
import java.util.stream.Collectors;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.exceptions.DebugException;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.model.*;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.io.OutputStreamWriter;
import org.apache.commons.io.output.NullOutputStream;
/**
* Implementation of the type inference rules.
* @author Florian Steurer
@@ -37,15 +31,18 @@ import org.apache.commons.io.output.NullOutputStream;
*/
public class RuleSet implements IRuleSet{
Writer logFile;
Logger logger;
final PlaceholderRegistry placeholderRegistry;
public RuleSet() {
public RuleSet(PlaceholderRegistry placeholderRegistry) {
super();
logFile = new OutputStreamWriter(new NullOutputStream());
logger = Logger.NULL_LOGGER;
this.placeholderRegistry = placeholderRegistry;
}
RuleSet(Writer logFile) {
this.logFile = logFile;
RuleSet(Logger logger, PlaceholderRegistry placeholderRegistry) {
this.logger = logger;
this.placeholderRegistry = placeholderRegistry;
}
@Override
@@ -297,8 +294,8 @@ public class RuleSet implements IRuleSet{
if(dFromFc == null || !dFromFc.getTypeParams().arePlaceholders() || dFromFc.getTypeParams().size() != cFromFc.getTypeParams().size())
return Optional.empty();
//System.out.println("cFromFc: " + cFromFc);
//System.out.println("dFromFc: " + dFromFc);
//context.logger().info("cFromFc: " + cFromFc);
//context.logger().info("dFromFc: " + dFromFc);
int[] pi = pi(cFromFc.getTypeParams(), dFromFc.getTypeParams());
if(pi.length == 0)
@@ -507,17 +504,17 @@ public class RuleSet implements IRuleSet{
TypeParams typeDParams = typeD.getTypeParams();
TypeParams typeDgenParams = typeDgen.getTypeParams();
//System.out.println("Pair: " +pair);
//System.out.println("typeD: " +typeD);
//System.out.println("typeDParams: " +typeDParams);
//System.out.println("typeDgen: " +typeD);
//System.out.println("typeDgenParams: " +typeDgenParams);
//context.logger().info("Pair: " +pair);
//context.logger().info("typeD: " +typeD);
//context.logger().info("typeDParams: " +typeDParams);
//context.logger().info("typeDgen: " +typeD);
//context.logger().info("typeDgenParams: " +typeDgenParams);
Unifier unif = Unifier.identity();
for(int i = 0; i < typeDParams.size(); i++) {
//System.out.println("ADAPT" +typeDgenParams);
//context.logger().info("ADAPT" +typeDgenParams);
if (typeDgenParams.get(i) instanceof PlaceholderType)
unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
else System.out.println("ERROR");
unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
else logger.exception(new Exception("ERROR in adapt rule: cannot add non placeholder type"));
}
return Optional.of(new UnifyPair(unif.apply(newLhs), typeDs, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
@@ -652,15 +649,17 @@ public class RuleSet implements IRuleSet{
@Override
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs, List<Set<Constraint<UnifyPair>>> oderConstraints) {
HashMap<UnifyType, Integer> typeMap = new HashMap<>();
// Statistically, typeMap will fill up quickly and resize multiple times. To reduce this, we start with a higher capacity
HashMap<UnifyType, Integer> typeMap = new HashMap<>(200);
Stack<UnifyType> occuringTypes = new Stack<>();
occuringTypes.ensureCapacity(pairs.size() * 3);
for(UnifyPair pair : pairs) {
occuringTypes.push(pair.getLhsType());
occuringTypes.push(pair.getRhsType());
}
while(!occuringTypes.isEmpty()) {
UnifyType t1 = occuringTypes.pop();
if(!typeMap.containsKey(t1))
@@ -672,12 +671,12 @@ public class RuleSet implements IRuleSet{
if(t1 instanceof SuperType)
occuringTypes.push(((SuperType) t1).getSuperedType());
else
t1.getTypeParams().forEach(x -> occuringTypes.push(x));
t1.getTypeParams().forEach(occuringTypes::push);
}
Queue<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
LinkedList<UnifyPair> result1 = new LinkedList<UnifyPair>(pairs);
ArrayList<UnifyPair> result = new ArrayList<UnifyPair>();
boolean applied = false;
while(!result1.isEmpty()) {
UnifyPair pair = result1.poll();
PlaceholderType lhsType = null;
@@ -695,19 +694,30 @@ public class RuleSet implements IRuleSet{
&& !((rhsType instanceof WildcardType) && ((WildcardType)rhsType).getWildcardedType().equals(lhsType))) //PL eigefuegt 2018-02-18
{
Unifier uni = new Unifier(lhsType, rhsType);
result = result.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(ArrayList::new));
result1 = result1.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(LinkedList::new));
// apply unifier to result and result1 in place
result.replaceAll(p -> uni.apply(pair, p));
ListIterator<UnifyPair> result1Iterator = result1.listIterator();
while (result1Iterator.hasNext()) {
UnifyPair x = result1Iterator.next();
result1Iterator.set(uni.apply(pair, x));
}
Function<? super Constraint<UnifyPair>,? extends Constraint<UnifyPair>> applyUni = b -> b.stream().map(
x -> uni.apply(pair,x)).collect(Collectors.toCollection((b.getExtendConstraint() != null)
? () -> new Constraint<UnifyPair>(
b.isInherited(),
b.isImplemented(),
b.getExtendConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(Constraint::new)),
b.getExtendConstraint().createdMapped(x -> uni.apply(pair,x)),
b.getmethodSignatureConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(HashSet::new)))
: () -> new Constraint<UnifyPair>(b.isInherited(), b.isImplemented())
));
oderConstraints.replaceAll(oc -> oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new)));
oderConstraints.replaceAll(oc -> {
HashSet<Constraint<UnifyPair>> mapped = new HashSet<>(oc.size());
for (var element : oc) {
mapped.add(applyUni.apply(element));
}
return mapped;
});
/*
oderConstraints = oderConstraints.stream().map(
a -> a.stream().map(applyUni
@@ -861,14 +871,11 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNred: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("logFile-Error");
}
logger.debug(() -> "FUNgreater: " + pair);
logger.debug(() -> "FUNred: " + result);
return Optional.of(result);
}
@@ -934,15 +941,15 @@ public class RuleSet implements IRuleSet{
Set<UnifyPair> result = new HashSet<UnifyPair>();
Integer variance = ((PlaceholderType)rhsType).getVariance();
Integer inversVariance = distributeVariance.inverseVariance(variance);
int variance = ((PlaceholderType)rhsType).getVariance();
int inversVariance = distributeVariance.inverseVariance(variance);
UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) {
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
freshPlaceholders[i] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
}
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), freshPlaceholders[funNLhsType.getTypeParams().size()-1], PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
@@ -953,18 +960,14 @@ public class RuleSet implements IRuleSet{
result.add(new UnifyPair(rhsType, funNLhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.stream().forEach(x -> { UnifyType l = x.getLhsType();
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNgreater: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
logger.debug(() -> "FUNgreater: " + pair);
logger.debug(() -> "FUNgreater: " + result);
return Optional.of(result);
}
@@ -983,15 +986,15 @@ public class RuleSet implements IRuleSet{
Set<UnifyPair> result = new HashSet<UnifyPair>();
Integer variance = ((PlaceholderType)lhsType).getVariance();
Integer inversVariance = distributeVariance.inverseVariance(variance);
int variance = ((PlaceholderType)lhsType).getVariance();
int inversVariance = distributeVariance.inverseVariance(variance);
UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()];
for(int i = 0; i < freshPlaceholders.length-1; i++) {
freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
freshPlaceholders[i] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
}
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder(placeholderRegistry);
((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
result.add(new UnifyPair(freshPlaceholders[funNRhsType.getTypeParams().size()-1], funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
@@ -1003,18 +1006,15 @@ public class RuleSet implements IRuleSet{
result.add(new UnifyPair(lhsType, funNRhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.stream().forEach(x -> { UnifyType l = x.getLhsType();
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNsmaller: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
logger.debug(() -> "FUNgreater: " + pair);
logger.debug(() -> "FUNsmaller: " + result);
return Optional.of(result);
}
@@ -1051,7 +1051,7 @@ public class RuleSet implements IRuleSet{
if(isGen)
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
else {
UnifyType freshTph = PlaceholderType.freshPlaceholder();
UnifyType freshTph = PlaceholderType.freshPlaceholder(placeholderRegistry);
result.add(new UnifyPair(rhsType, new ExtendsType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
result.add(new UnifyPair(extendedType, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
}
@@ -1079,7 +1079,7 @@ public class RuleSet implements IRuleSet{
if(isGen)
result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
else {
UnifyType freshTph = PlaceholderType.freshPlaceholder();
UnifyType freshTph = PlaceholderType.freshPlaceholder(placeholderRegistry);
result.add(new UnifyPair(rhsType, new SuperType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
Set<UnifyType> fBounded = pair.getfBounded();
fBounded.add(lhsType);

View File

@@ -1,90 +1,65 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import de.dhbwstuttgart.util.Logger;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify {
private TypeUnify() {}
private static <T> T joinFuture(CompletableFuture<T> future) {
try {
return future.get();
}
catch (InterruptedException | ExecutionException exception) {
throw new RuntimeException(exception);
}
}
/**
* unify parallel ohne result modell
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
public static Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
return res;
}
/**
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @param ret
* @return
*/
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
return ret;
public static UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
unifyTask.compute();
return unifyContext.resultModel();
}
/**
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @param ret
* @return
*/
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return ret;
public static Set<Set<UnifyPair>> unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
var result = joinFuture(unifyTask.compute());
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
return result;
}
/*
@@ -97,25 +72,22 @@ public class TypeUnify {
/**
* unify sequentiell mit oderconstraints
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
Set<Set<UnifyPair>> res = unifyTask.compute();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
public static Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(false), 0);
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
return res;
}
private static ForkJoinPool createThreadPool(Logger logger) {
logger.info("Available processors: " + Runtime.getRuntime().availableProcessors());
return new ForkJoinPool(
Runtime.getRuntime().availableProcessors(),
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
null,
false
);
}
}

View File

@@ -13,54 +13,46 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.concurrent.CompletableFuture;
public class TypeUnify2Task extends TypeUnifyTask {
Set<Set<UnifyPair>> setToFlatten;
Set<UnifyPair> methodSignatureConstraintUebergabe;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
}
Set<UnifyPair> getNextSetElement() {
return nextSetElement;
}
@Override
protected Set<Set<UnifyPair>> compute() {
if (one) {
System.out.println("two");
}
one = true;
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
Set<Set<UnifyPair>> setToFlatten;
Set<UnifyPair> methodSignatureConstraintUebergabe;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, UnifyContext context, int rekTiefe, Set<UnifyPair> methodSignatureConstraintUebergabe) {
super(eq, oderConstraints, fc, context, rekTiefe);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
}
public Set<UnifyPair> getNextSetElement() {
return nextSetElement;
}
@Override
public CompletableFuture<Set<Set<UnifyPair>>> compute() {
if (one) {
context.logger().info("two");
}
one = true;
CompletableFuture<Set<Set<UnifyPair>>> res =
unify2(setToFlatten, eq, oderConstraintsField, fc, context.parallel(), rekTiefeField, methodSignatureConstraintUebergabe);
/*if (isUndefinedPairSetSet(res)) {
return new HashSet<>(); }
else
*/
//writeLog("xxx");
//noOfThread--;
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
else {
return res;
}
}
}
public void closeLogFile() {
*/
//writeLog("xxx");
//noOfThread--;
if (this.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new HashSet<>());
} else {
return res;
}
}
try {
logFile.close();
}
catch (IOException ioE) {
System.err.println("no log-File" + thNo);
}
}
public void closeLogFile() {
context.logger().close();
}
}

View File

@@ -0,0 +1,221 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* A collection of capsuled (and thus static) functions to split up large algorithms in TypeUnifyTask
*/
public class TypeUnifyTaskHelper {
/**
* Filter all topLevelSets for those with a single element that contain only one pair:
* a <. theta,
* theta <. a or
* a =. theta
*/
public static Set<Set<UnifyPair>> getSingleElementSets(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets) {
return topLevelSets.stream()
.filter(x -> x.size() == 1)
.map(y -> y.stream().findFirst().get()).collect(Collectors.toSet());
}
/**
* Varianzbestimmung Anfang
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
* Varianz = 1 => Argumentvariable
* Varianz = -1 => Rückgabevariable
* Varianz = 0 => unklar
* Varianz = 2 => Operatoren oderConstraints
*/
public static int calculateVariance(List<Set<UnifyPair>> nextSetasList) {
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
.reduce((a, b) -> {
if (a.intValue() == b.intValue()) return a;
else return 0;
})) //2 kommt insbesondere bei Oder-Constraints vor
.filter(Optional::isPresent)
.map(Optional::get)
.findAny();
return xi.orElse(0);
}
/**
*
*/
public static int calculateOderConstraintVariance(List<Set<UnifyPair>> nextSetAsList) {
Optional<Integer> optVariance =
nextSetAsList
.getFirst()
.stream()
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
!(x.getRhsType() instanceof PlaceholderType) &&
x.getPairOp() == PairOperator.EQUALSDOT)
.map(x ->
((PlaceholderType) x.getGroundBasePair().getLhsType()).getVariance())
.reduce((n, m) -> (n != 0) ? n : m);
//Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
//da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
return optVariance.orElse(2);
}
/**
* Find the first occurrence (if any) of a UnifyPair with operator EQUALSDOT while having
* one side equal to its base pair counterpart
*/
public static Optional<UnifyPair> findEqualityConstrainedUnifyPair(Set<UnifyPair> nextSetElement) {
return nextSetElement.stream().filter(x ->
x.getPairOp()
.equals(PairOperator.EQUALSDOT))
.filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
x.getLhsType()
.equals(x.getBasePair().getLhsType()) ||
x.getLhsType()
.equals(x.getBasePair().getRhsType())
).findFirst();
}
/**
* Find all unifyPairs, that associate the identified type variable of origPair with any concrete type. That means:
* If "a = type" is in origPair, then we get all UnifyPairs that contain either "a < typeA" or "typeB < a"
*/
public static Set<UnifyPair> findConstraintsWithSameTVAssociation(UnifyPair origPair, Set<Set<UnifyPair>> singleElementSets) {
UnifyType tyVar = origPair.getLhsType();
if (!(tyVar instanceof PlaceholderType)) {
tyVar = origPair.getRhsType();
}
UnifyType tyVarEF = tyVar;
return singleElementSets.stream()
.map(xx ->
xx.iterator().next())
.filter(x ->
(x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
||
(x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType))
)
.collect(Collectors.toCollection(HashSet::new));
}
/**
*
*/
public static boolean doesFirstNextSetHasSameBase(List<Set<UnifyPair>> nextSetAsList) {
if (nextSetAsList.isEmpty()) {
return false;
}
UnifyPair firstBasePair = null;
for (var unifyPair : nextSetAsList.getFirst().stream().toList()) {
var basePair = unifyPair.getBasePair();
// if any base pair is null, there is NOT always the same base!
if (basePair == null) {
return false;
}
if (firstBasePair == null) {
firstBasePair = basePair;
}
else if (!basePair.equals(firstBasePair)) {
return false;
}
}
return true;
}
/**
* Extracts data from every element in the nested lists of results. What data depends on the given
* extractor function
*/
public static Set<UnifyPair> collectFromThreadResult (
Set<Set<UnifyPair>> currentThreadResult,
Function<UnifyPair, Set<UnifyPair>> extractor
) {
return currentThreadResult.stream()
.map(b ->
b.stream()
.map(extractor)
.reduce((y, z) -> {
y.addAll(z);
return y;
})
.orElse(new HashSet<>()))
.reduce((y, z) -> {
y.addAll(z);
return y;
})
.orElse(new HashSet<>());
}
/**
* Extract a list of PlaceholderTypes from a set of pairs, such that each resulting element:
* - Is the LHS of a pair
* - Is a PlaceholderType
* - has a basePair Side that is a PlaceholderType with the same name
*/
public static List<PlaceholderType> extractMatchingPlaceholderTypes(Set<UnifyPair> pairs) {
return pairs.stream()
.filter(x -> {
UnifyType lhs = x.getLhsType();
UnifyType baseLhs = x.getBasePair().getLhsType();
UnifyType baseRhs = x.getBasePair().getRhsType();
return (lhs instanceof PlaceholderType) &&
((baseLhs instanceof PlaceholderType && lhs.getName().equals(baseLhs.getName())) ||
(baseRhs instanceof PlaceholderType && lhs.getName().equals(baseRhs.getName())));
})
.map(x -> (PlaceholderType) x.getLhsType())
.collect(Collectors.toCollection(ArrayList::new));
}
public static Set<UnifyPair> occursCheck(final Set<UnifyPair> eq) {
Set<UnifyPair> ocurrPairs = new HashSet<>(eq.size());
for (UnifyPair x : eq) {
UnifyType lhs = x.getLhsType();
UnifyType rhs = x.getRhsType();
if (lhs instanceof PlaceholderType lhsPlaceholder &&
!(rhs instanceof PlaceholderType) &&
rhs.getTypeParams().occurs(lhsPlaceholder))
{
x.setUndefinedPair();
ocurrPairs.add(x);
}
}
return ocurrPairs;
}
public static <T> HashSet<T> getPresizedHashSet(int minElements) {
if (minElements < 16) return new HashSet<>();
// HashSet and HashMap will resize at 75% load, so we account for that by multiplying with 1.5
int n = (int)(minElements * 1.5);
return new HashSet<>(n);
}
public static <S,T> HashMap<S,T> getPresizedHashMap(int minElements) {
if (minElements < 16) return new HashMap<>();
// HashSet and HashMap will resize at 75% load, so we account for that by multiplying with 1.5
int n = (int)(minElements * 1.5);
return new HashMap<>(n);
}
}

View File

@@ -0,0 +1,67 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.util.Logger;
import java.io.Writer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool;
public record UnifyContext(
// main logger of a unification
Logger logger,
// if the unify algorithm should run in parallel
boolean parallel,
// the model for storing calculated results
UnifyResultModel resultModel,
// the executor used for thread management in parallel execution
ExecutorService executor,
// a generator for new placeholders in this unify context
PlaceholderRegistry placeholderRegistry,
// a control structure to cancel the unification early
UnifyTaskModel usedTasks
) {
public UnifyContext(
Logger logger,
boolean parallel,
UnifyResultModel resultModel,
UnifyTaskModel usedTasks,
ExecutorService executor,
PlaceholderRegistry placeholderRegistry
) {
this(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext(
Logger logger,
boolean parallel,
UnifyResultModel resultModel,
UnifyTaskModel usedTasks,
PlaceholderRegistry placeholderRegistry
) {
this(logger, parallel, resultModel, usedTasks, ForkJoinPool.commonPool(), placeholderRegistry);
}
/*
* Shortcuts for creating a similar context with some properties changed. This combined with the final properties
* causes the UnifyContext to be essentially handled as a
*/
public UnifyContext newWithLogger(Logger logger) {
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithParallel(boolean parallel) {
if (this.parallel == parallel) return this;
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithExecutor(ExecutorService executor) {
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
public UnifyContext newWithResultModel(UnifyResultModel resultModel) {
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
}
}

View File

@@ -36,19 +36,19 @@ public class UnifyResultModel {
listeners.remove(listenerToRemove);
}
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet, UnifyContext context) {
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
Optional<Set<UnifyPair>> res = new RuleSet(context.placeholderRegistry()).subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
return y; //alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), fc);
}
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons))))
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons), context.placeholderRegistry())))
.collect(Collectors.toList());
UnifyResultEvent evt = new UnifyResultEvent(newResult);

View File

@@ -12,7 +12,7 @@ public class UnifyTaskModel {
public synchronized void cancel() {
for(TypeUnifyTask t : usedTasks) {
t.myCancel(true);
t.cancelExecution();
}
}
}

View File

@@ -0,0 +1,209 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
public class ContravarianceCase extends VarianceCase {
protected final int variance = 1;
protected ContravarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
context.logger().debug("Max: a in " + variance + " " + a);
nextSetAsList.remove(a);
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
context.logger().debug(() -> "nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
//Alle maximale Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
Set<UnifyPair> finalA = a;
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
}
@Override
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
typeUnifyTask.addChildTask(forkOrig);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
(currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig 1");
forkOrig.closeLogFile();
return new ComputationResults(currentThreadResult);
});
//forks.add(forkOrig);
if (typeUnifyTask.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new ComputationResults());
}
/* FORK ENDE */
context.logger().debug("a in " + variance + " " + a);
context.logger().debug("nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
context.logger().debug("1 RM" + nSaL.toString());
if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
} else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> {
if (typeUnifyTask.isExecutionCancelled()) {
return new ComputationResults();
}
context.logger().debug("fork_res: " + fork_res.toString());
context.logger().debug(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.addForkResult(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.context.logger().debug("final 1");
fork.closeLogFile();
return prevResults;
}
);
if (typeUnifyTask.isExecutionCancelled()) {
return CompletableFuture.completedFuture(new ComputationResults());
}
}
return resultValues;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == -1) {
context.logger().debug("Geloescht result: " + result);
result.clear();
result.addAll(currentThreadResult);
}
else if (resOfCompare == 0) {
result.addAll(currentThreadResult);
}
else if (resOfCompare == 1) {
context.logger().debug("Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// context.logger().info("");
context.logger().debug("a: " + rekTiefe + " variance: " + variance + a.toString());
context.logger().debug("aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
context.logger().debug("Removed: " + nextSetasListOderConstraints);
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList);
context.logger().debug("smallerSetasList: " + smallerSetasList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
.collect(Collectors.toCollection(ArrayList::new));
context.logger().debug("notInherited: " + notInherited + "\n");
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> {
notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList));
});
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
context.logger().debug("notErased: " + notErased + "\n");
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
context.logger().debug("Removed: " + erased);
context.logger().debug("Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
context.logger().debug("Removed: " + erased);
context.logger().debug("Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -0,0 +1,226 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
public class CovarianceCase extends VarianceCase {
protected final int variance = -1;
protected CovarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
context.logger().debug(() -> "Min: a in " + variance + " " + a);
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
}
context.logger().debug(() -> "nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
nextSetAsList.remove(a);
//Alle minimalen Elemente in nextSetasListRest bestimmen
//nur für diese wird parallele Berechnung angestossen.
Set<UnifyPair> finalA = a;
nextSetasListRest = typeUnifyTask.oup.minElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
);
}
@Override
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
typeUnifyTask.addChildTask(forkOrig);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
(currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig -1");
forkOrig.closeLogFile();
return new ComputationResults(currentThreadResult);
});
//forks.add(forkOrig);
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
/* FORK ENDE */
context.logger().debug(() -> "a in " + variance + " " + a);
context.logger().debug(() -> "nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL);
context.logger().debug(() -> "-1 RM" + nSaL.toString());
if (!this.isOderConstraint) {
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
} else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture,
(prevResults, fork_res) -> {
if (typeUnifyTask.isExecutionCancelled()) {
return prevResults;
}
context.logger().debug(() -> "fork_res: " + fork_res.toString());
context.logger().debug(() -> Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
prevResults.addForkResult(fork_res);
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.context.logger().debug("final -1");
fork.closeLogFile();
return prevResults;
}
);
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
}
return resultValues;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
if (resOfCompare == 1) {
context.logger().debug(() -> "Geloescht result: " + result);
result.clear();
result.addAll(currentThreadResult);
} else if (resOfCompare == 0) {
result.addAll(currentThreadResult);
} else if (resOfCompare == -1) {
context.logger().debug(() -> "Geloescht currentThreadResult: " + currentThreadResult);
//result = result;
}
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// context.logger().info("");
context.logger().debug(() -> "a: " + rekTiefe + " variance: " + variance + a.toString());
context.logger().debug(() -> "aParDef: " + aParDef.toString());
aParDef.add(a);
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
if (this.isOderConstraint) {
nextSetAsList.removeAll(nextSetasListOderConstraints);
context.logger().debug(() -> "Removed: " + nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
while (aParDefIt.hasNext()) {
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> greaterSetasList = typeUnifyTask.oup.greaterThan(a_new, nextSetAsList);
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
if (!((Constraint<UnifyPair>) a_new).isInherited()) {
greaterSetasList.add(a_new);
}
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
//Wenn x nicht vererbt ist, beginnt beim naechstgroesseren Element die naechste Ueberladung
notInherited.forEach(x -> {
notErased.addAll(typeUnifyTask.oup.greaterEqThan(x, greaterSetasList));
});
//das kleineste Element ist das Element von dem a_new geerbt hat
//muss deshalb geloescht werden
Iterator<Set<UnifyPair>> notErasedIt = notErased.iterator();
if (notErasedIt.hasNext()) {
Set<UnifyPair> min = typeUnifyTask.oup.min(notErasedIt);
notErased.remove(min);
notErased.remove(((Constraint<UnifyPair>) min).getExtendConstraint());
}
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
context.logger().debug(() -> "Removed: " + erased);
context.logger().debug(() -> "Not Removed: " + nextSetAsList);
}
} else {
while (aParDefIt.hasNext()) {
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
Set<UnifyPair> a_new = aParDefIt.next();
List<Set<UnifyPair>> erased = typeUnifyTask.oup.greaterEqThan(a_new, nextSetAsList);
nextSetAsList.removeAll(erased);
context.logger().debug(() -> "Removed: " + erased);
context.logger().debug(() -> "Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -0,0 +1,132 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
public class InvarianceOrConstraintCase extends VarianceCase {
// either for invariance or for oderConstraints
protected final int variance = 2;
protected InvarianceOrConstraintCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
a = nextSetAsList.removeFirst();
//Fuer alle Elemente wird parallele Berechnung angestossen.
nextSetasListRest = new ArrayList<>(nextSetAsList);
}
@Override
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
context.logger().debug("var2einstieg");
Set<TypeUnify2Task> forks = new HashSet<>();
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
typeUnifyTask.addChildTask(forkOrig);
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply((currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig 2");
forkOrig.closeLogFile();
return new ComputationResults(currentThreadResult);
});
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
/* FORK ENDE */
context.logger().debug(() -> "a in " + variance + " " + a);
context.logger().debug(() -> "nextSetasListRest: " + nextSetasListRest.toString());
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
Set<UnifyPair> methodSignatureConstraintForParallel = new HashSet<>(methodSignatureConstraint);
Set<UnifyPair> nSaL = a;
while (!nextSetasListRest.isEmpty()) {
methodSignatureConstraintForParallel.removeAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
nSaL = nextSetasListRest.removeFirst();
nextSetAsList.remove(nSaL); //PL einkommentiert 20-02-03
methodSignatureConstraintForParallel.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
typeUnifyTask.addChildTask(fork);
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture, (prevResults, fork_res) -> {
if (typeUnifyTask.isExecutionCancelled()) {
return prevResults;
}
prevResults.addForkResult(fork_res);
fork.context.logger().debug("final 2");
fork.closeLogFile();
return prevResults;
});
if (typeUnifyTask.isExecutionCancelled()) {
return resultValues;
}
}
return resultValues;
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
// Nothing
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
// Nothing
return false;
}
}

View File

@@ -0,0 +1,241 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.exceptions.UnifyCancelException;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
public class UnknownVarianceCase extends VarianceCase {
protected final int variance = 0;
protected final AtomicBoolean shouldBreak = new AtomicBoolean(false);
protected UnknownVarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
super(isOderConstraint, typeUnifyTask, context);
}
@Override
public void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
) {
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
} else {
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
}
nextSetAsList.remove(a);
} else if (this.isOderConstraint) {
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
nextSetAsList.remove(a);
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
} else {
a = nextSetAsList.removeFirst();
}
Set<UnifyPair> finalA = a;
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
} else {
nextSetasListRest = typeUnifyTask.oup.minElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
);
}
} else if (this.isOderConstraint) {
nextSetasListRest = typeUnifyTask.oup.maxElements(
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
);
} else {
nextSetasListRest = (nextSetAsList.size() > 5) ? nextSetAsList.subList(0, 5) : nextSetAsList;
}
nextSetAsList.removeAll(nextSetasListRest);
// */
}
@Override
public CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
) {
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
Set<UnifyPair> newMethodSignatureConstraintOrig = new HashSet<>(methodSignatureConstraint);
if (isOderConstraint) {
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
}
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, newMethodSignatureConstraintOrig);
typeUnifyTask.addChildTask(forkOrig);
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
(currentThreadResult) -> {
forkOrig.context.logger().debug("final Orig 0");
forkOrig.closeLogFile();
return new ComputationResults(currentThreadResult);
});
int i = 0;
Set<Set<UnifyPair>>[] additionalResults = new HashSet[nextSetasListRest.size()];
Constraint<UnifyPair>[] extendConstraints = new Constraint[nextSetasListRest.size()];
while (!nextSetasListRest.isEmpty()) {
final int finalI = i++;
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
context.logger().debug(() -> "0 RM" + nSaL.toString());
if (this.isOderConstraint) {
Constraint<UnifyPair> extendConstraint = ((Constraint<UnifyPair>) nSaL).getExtendConstraint();
extendConstraints[finalI] = extendConstraint;
}
else if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
TypeUnifyTask.noShortendElements++;
continue;
}
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
Set<UnifyPair> newMethodSignatureConstraint = new HashSet<>(methodSignatureConstraint);
if (isOderConstraint) {
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
}
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, newMethodSignatureConstraint);
typeUnifyTask.addChildTask(fork);
// schedule compute() on another thread
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
resultValues = resultValues.thenCombine(forkFuture, (compResult, forkResult) -> {
additionalResults[finalI] = forkResult;
context.logger().error("finalI: " + finalI);
return compResult;
});
}
int finalI1 = i;
return resultValues.thenCompose(compResult -> {
var oldResult = compResult.mainResult;
for (int e = 0; e < finalI1; e++) {
Set<Set<UnifyPair>> currentResult = additionalResults[e];
boolean oldResultInvalid = typeUnifyTask.isUndefinedPairSetSet(oldResult);
boolean currentResultInvalid = typeUnifyTask.isUndefinedPairSetSet(currentResult);
if (!oldResult.isEmpty() && !oldResultInvalid) {
boolean shouldBreak = this.eraseInvalidSets(rekTiefe, new HashSet<>(), nextSetAsList);
if (shouldBreak) {
return CompletableFuture.completedFuture(compResult);
}
}
if (this.isOderConstraint) {
nextSetasListOderConstraints.add(extendConstraints[e]);
}
if (!currentResultInvalid && oldResultInvalid) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
oldResult = currentResult;
} else if (oldResultInvalid == currentResultInvalid || oldResult.isEmpty()) {
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
Set<Set<UnifyPair>> finalOldResult = oldResult;
context.logger().debug(() -> "RES var1 ADD:" + finalOldResult.toString() + " " + currentResult.toString());
oldResult.addAll(currentResult);
}
}
compResult.mainResult = oldResult;
return CompletableFuture.completedFuture(compResult);
});
}
@Override
public void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
) {
context.logger().debug("RES var=0 ADD:" + result.toString() + " " + currentThreadResult.toString());
result.addAll(currentThreadResult);
}
@Override
public boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
) {
if (!this.isOderConstraint) {
return true;
} else {
nextSetAsList.removeAll(nextSetasListOderConstraints);
nextSetasListOderConstraints = new ArrayList<>();
context.logger().debug("Removed: " + nextSetasListOderConstraints);
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
List<Set<UnifyPair>> notErased = new ArrayList<>();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
context.logger().debug("Removed: " + erased);
context.logger().debug("Not Removed: " + nextSetAsList);
for (Set<UnifyPair> aPar : aParDef) {
smallerSetasList.clear();
smallerSetasList.addAll(typeUnifyTask.oup.smallerThan(aPar, nextSetAsList));
notInherited = smallerSetasList.stream()
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
.collect(Collectors.toCollection(ArrayList::new));
notErased.clear();
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
erased = new ArrayList<>(smallerSetasList);
erased.removeAll(notErased);
nextSetAsList.removeAll(erased);
context.logger().debug("Removed: " + erased);
context.logger().debug("Not Removed: " + nextSetAsList);
}
}
return false;
}
}

View File

@@ -0,0 +1,132 @@
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.util.Tuple;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
public abstract class VarianceCase {
public static VarianceCase createFromVariance(int variance, boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
return switch (variance) {
case 0 -> new UnknownVarianceCase(isOderConstraint, typeUnifyTask, context);
case 1 -> new ContravarianceCase(isOderConstraint, typeUnifyTask, context);
case -1 -> new CovarianceCase(isOderConstraint, typeUnifyTask, context);
case 2 -> new InvarianceOrConstraintCase(isOderConstraint, typeUnifyTask, context);
default -> throw new RuntimeException("Invalid variance: " + variance);
};
}
protected final boolean isOderConstraint;
protected final TypeUnifyTask typeUnifyTask;
protected final UnifyContext context;
/**
* Aktueller Fall
*/
public Set<UnifyPair> a;
/**
* Liste der Faelle für die parallele Verarbeitung
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
* Deshalb wird ihre Berechnung parallel angestossen.
*/
public List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
/**
* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
* In der Regel ist dies genau ein Element
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
* gefuehrt hat.
*/
public List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
protected VarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
this.isOderConstraint = isOderConstraint;
this.typeUnifyTask = typeUnifyTask;
this.context = context;
}
/**
* Selects values for the next iteration in the run method:
* - a : The element to ???
* - nextSetAsList: The list of cases that have no relation to the selected a and will have to be worked on
* - nextSetasListOderConstraints: The list of cases of which the receiver contains "? extends", typically one element
*/
public abstract void selectNextData(
TypeUnifyTask typeUnifyTask,
List<Set<UnifyPair>> nextSetAsList,
Optional<UnifyPair> optOrigPair
);
/**
*
*/
public abstract CompletableFuture<ComputationResults> computeParallel(
Set<Set<UnifyPair>> elems,
Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
IFiniteClosure fc,
int rekTiefe,
Set<UnifyPair> methodSignatureConstraint,
List<Set<UnifyPair>> nextSetAsList,
Set<UnifyPair> sameEqSet,
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> aParDef
);
/**
*
*/
public abstract void applyComputedResults(
Set<Set<UnifyPair>> result,
Set<Set<UnifyPair>> currentThreadResult,
Set<UnifyPair> compResult,
Set<UnifyPair> compRes
);
/**
*
* @return If the current iteration should be broken out of
*/
public abstract boolean eraseInvalidSets(
int rekTiefe,
Set<Set<UnifyPair>> aParDef,
List<Set<UnifyPair>> nextSetAsList
);
/**
* Wrapper class for the parallel computation results
*/
public static class ComputationResults {
public Set<Set<UnifyPair>> mainResult;
public Set<Set<Set<UnifyPair>>> forkResults;
public ComputationResults() {
this(new HashSet<>(), new HashSet<>());
}
public ComputationResults(Set<Set<UnifyPair>> mainResult) {
this(mainResult, new HashSet<>());
}
public ComputationResults(Set<Set<UnifyPair>> mainResult, Set<Set<Set<UnifyPair>>> forkResults) {
this.mainResult = mainResult;
this.forkResults = forkResults;
}
void addForkResult(Set<Set<UnifyPair>> forkResult) {
forkResults.add(forkResult);
}
}
}

View File

@@ -12,15 +12,12 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
public class distributeVariance extends visitUnifyTypeVisitor<Integer> {
public static int inverseVariance(int variance) {
Integer ret = 0;
if (variance == 1) {
ret = -1;
}
if (variance == -1) {
ret = 1;
}
return ret;
public static int inverseVariance(int variance) {
return switch (variance) {
case 1 -> -1;
case -1 -> 1;
default -> 0;
};
}
@@ -42,7 +39,7 @@ public class distributeVariance extends visitUnifyTypeVisitor<Integer> {
List<UnifyType> param = new ArrayList<>(funnty.getTypeParams().get().length);
param.addAll(Arrays.asList(funnty.getTypeParams().get()));
UnifyType resultType = param.remove(param.size()-1);
Integer htInverse = inverseVariance(ht);
int htInverse = inverseVariance(ht);
param = param.stream()
.map(x -> x.accept(this, htInverse))
.collect(Collectors.toCollection(ArrayList::new));

View File

@@ -1,6 +1,7 @@
package de.dhbwstuttgart.typeinference.unify.interfaces;
import java.util.List;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Optional;
import java.util.Set;
@@ -18,9 +19,8 @@ import org.antlr.v4.runtime.Token;
*
* @author Florian Steurer
*/
public interface IFiniteClosure {
public interface IFiniteClosure extends ISerializableData {
public void setLogTrue();
/**
* Returns all types of the finite closure that are subtypes of the argument.
* @return The set of subtypes of the argument.
@@ -74,5 +74,5 @@ public interface IFiniteClosure {
public Set<UnifyType> getChildren(UnifyType t);
public Set<UnifyType> getAllTypesByName(String typeName);
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop);
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop, UnifyContext context);
}

View File

@@ -1,8 +1,9 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.Set;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
@@ -11,7 +12,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
/**
* An extends wildcard type "? extends T".
*/
public final class ExtendsType extends WildcardType {
public final class ExtendsType extends WildcardType implements ISerializableData {
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
return visitor.visit(this, ht);
@@ -23,9 +24,6 @@ public final class ExtendsType extends WildcardType {
*/
public ExtendsType(UnifyType extendedType) {
super("? extends " + extendedType.getName(), extendedType);
if (extendedType instanceof ExtendsType) {
System.out.print("");
}
}
/**
@@ -92,5 +90,21 @@ public final class ExtendsType extends WildcardType {
return "? extends " + wildcardedType;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
SerialMap serialized = new SerialMap();
serialized.put("wildcardedType", this.wildcardedType.toSerial(keyStorage));
// create the wrapper and put this as the object
var serializedWrapper = super.toSerial(keyStorage).assertType(SerialMap.class);
serializedWrapper.put("object", serialized);
return serializedWrapper;
}
public static ExtendsType fromSerial(SerialMap data, UnifyContext context) {
return new ExtendsType(
UnifyType.fromSerial(data.getMap("wildcardedType"), context)
);
}
}

View File

@@ -1,14 +1,17 @@
package de.dhbwstuttgart.typeinference.unify.model;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.server.packet.dataContainers.ISerializableData;
import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialValue;
import java.lang.reflect.Modifier;
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class FunInterfaceType extends ReferenceType {
public class FunInterfaceType extends ReferenceType implements ISerializableData {
final List<UnifyType> intfArgTypes;
final UnifyType intfReturnType;
final List<String> generics;
@@ -46,4 +49,29 @@ public class FunInterfaceType extends ReferenceType {
return args;
}
@Override
public SerialMap toSerial(KeyStorage keyStorage) {
var serializedWrapper = super.toSerial(keyStorage);
SerialMap serialized = serializedWrapper.getMap("object");
serialized.put("intfArgTypes", SerialList.fromMapped(intfArgTypes, u -> u.toSerial(keyStorage)));
serialized.put("intfReturnType", intfReturnType.toSerial(keyStorage));
serialized.put("generics", SerialList.fromMapped(generics, SerialValue::new));
return serializedWrapper;
}
public static FunInterfaceType fromSerial(SerialMap data, UnifyContext context) {
var name = data.getValue("name").getOf(String.class);
var params = data.getList("params").assertListOfMaps().stream().map(
paramData -> UnifyType.fromSerial(paramData, context)).toList();
var intfArgTypes = data.getList("intfArgTypes").assertListOfMaps().stream().map(
argTypeData -> UnifyType.fromSerial(argTypeData, context)).toList();
var intfReturnType = UnifyType.fromSerial(data.getMap("intfReturnType"), context);
var generics = data.getList("generics").assertListOfValues().stream().map(
generic -> generic.getOf(String.class)).toList();
return new FunInterfaceType(name, new TypeParams(params), intfArgTypes, intfReturnType, generics);
}
}

Some files were not shown because too many files have changed in this diff Show More