forked from i21017/JavaCompilerCore
Compare commits
6 Commits
76f589dd33
...
c54e012426
Author | SHA1 | Date | |
---|---|---|---|
|
c54e012426 | ||
|
52f6ebcf3c | ||
|
c80a0c8596 | ||
|
2278fb1b91 | ||
|
32b16cd5fd | ||
|
fd30c5f63f |
@@ -51,8 +51,8 @@ public class ConsoleInterface {
|
||||
if (serverPort.isPresent()) {
|
||||
if (unifyServer.isPresent()) throw new RuntimeException("Cannot use unifyServer when in server mode!");
|
||||
|
||||
JavaTXServer server = new JavaTXServer();
|
||||
server.listen(serverPort.get());
|
||||
JavaTXServer server = new JavaTXServer(serverPort.get());
|
||||
server.listen();
|
||||
}
|
||||
else {
|
||||
JavaTXCompiler compiler = new JavaTXCompiler(input, classpath, outputPath != null ? new File(outputPath) : null, unifyServer);
|
||||
|
@@ -65,6 +65,9 @@ import org.apache.commons.io.output.NullOutputStream;
|
||||
|
||||
public class JavaTXCompiler {
|
||||
|
||||
// do not use this in any code, that can be executed serverside!
|
||||
public static PlaceholderRegistry defaultClientPlaceholderRegistry = new PlaceholderRegistry();
|
||||
|
||||
// public static JavaTXCompiler INSTANCE;
|
||||
final CompilationEnvironment environment;
|
||||
Boolean resultmodel = true;
|
||||
@@ -101,6 +104,9 @@ public class JavaTXCompiler {
|
||||
}
|
||||
|
||||
public JavaTXCompiler(List<File> sources, List<File> contextPath, File outputPath, Optional<String> unifyServer) throws IOException, ClassNotFoundException {
|
||||
// ensure new default placeholder registry for tests
|
||||
defaultClientPlaceholderRegistry = new PlaceholderRegistry();
|
||||
|
||||
this.unifyServer = unifyServer;
|
||||
var path = new ArrayList<>(contextPath);
|
||||
if (contextPath.isEmpty()) {
|
||||
@@ -311,7 +317,7 @@ public class JavaTXCompiler {
|
||||
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||
UnifyResultModel urm = null;
|
||||
// urm.addUnifyResultListener(resultListener);
|
||||
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, PlaceholderRegistry.defaultRegistry);
|
||||
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, defaultClientPlaceholderRegistry);
|
||||
try {
|
||||
logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile;
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this, context.placeholderRegistry());
|
||||
|
@@ -4,13 +4,27 @@ import de.dhbwstuttgart.server.SocketServer;
|
||||
|
||||
public class JavaTXServer {
|
||||
|
||||
public void listen(int port) {
|
||||
public static boolean isRunning = false;
|
||||
|
||||
final SocketServer socketServer;
|
||||
|
||||
public JavaTXServer(int port) {
|
||||
this.socketServer = new SocketServer(port);
|
||||
isRunning = true;
|
||||
}
|
||||
|
||||
public void listen() {
|
||||
socketServer.start();
|
||||
}
|
||||
|
||||
public void forceStop() {
|
||||
try {
|
||||
SocketServer socketServer = new SocketServer(port);
|
||||
socketServer.start();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
socketServer.stop();
|
||||
}
|
||||
catch (InterruptedException exception) {
|
||||
System.err.println("Interrupted socketServer: " + exception);
|
||||
}
|
||||
isRunning = false;
|
||||
}
|
||||
|
||||
|
||||
|
@@ -111,7 +111,7 @@ public class FCGenerator {
|
||||
//Generics mit gleichem Namen müssen den selben TPH bekommen
|
||||
for(GenericTypeVar gtv : forType.getGenerics()){
|
||||
if(!gtvs.containsKey(gtv.getName())){
|
||||
TypePlaceholder replacePlaceholder = TypePlaceholder.fresh(new NullToken(), placeholderRegistry);
|
||||
TypePlaceholder replacePlaceholder = TypePlaceholder.fresh(new NullToken());
|
||||
gtvs.put(gtv.getName(), replacePlaceholder);
|
||||
newGTVs.put(gtv.getName(), replacePlaceholder);
|
||||
}
|
||||
|
@@ -0,0 +1,98 @@
|
||||
package de.dhbwstuttgart.syntaxtree.factory;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.core.JavaTXServer;
|
||||
|
||||
public class NameGenerator {
|
||||
|
||||
private static String strNextName = "A";
|
||||
|
||||
/**
|
||||
* Setzt den zu Beginn der Typinferenz auf "A" zurueck.
|
||||
* Dies ist bei JUnit-Test noetig
|
||||
* <code>TypePlaceholder</code>. <br>Author: Martin Pluemicke
|
||||
* @return void
|
||||
*/
|
||||
public static void reset() {
|
||||
strNextName = "A";
|
||||
}
|
||||
|
||||
/**
|
||||
* Berechnet einen neuen, eindeutigen Namen f�r eine neue
|
||||
* <code>TypePlaceholder</code>. <br>Author: J�rg B�uerle
|
||||
* @return Der Name
|
||||
*/
|
||||
public static String makeNewName()
|
||||
{
|
||||
// otth: Funktion berechnet einen neuen Namen anhand eines alten gespeicherten
|
||||
String strReturn = strNextName;
|
||||
|
||||
// n�chster Name berechnen und in strNextName speichern
|
||||
inc( strNextName.length() - 1 );
|
||||
|
||||
if (JavaTXServer.isRunning) {
|
||||
throw new RuntimeException("Using the NameGenerator on a server is not allowed");
|
||||
}
|
||||
JavaTXCompiler.defaultClientPlaceholderRegistry.addPlaceholder(strReturn);
|
||||
return strReturn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hilfsfunktion zur Berechnung eines neuen Namens
|
||||
* <br>Author: J�rg B�uerle
|
||||
* @param i
|
||||
*/
|
||||
private static void inc(int i)
|
||||
{
|
||||
// otth: Hilfsfunktion zur Berechnung eines neuen Namens
|
||||
// otth: Erh�hung des Buchstabens an der Stelle i im String strNextName
|
||||
// otth: Nach �berlauf: rekursiver Aufruf
|
||||
|
||||
// falls i = -1 --> neuer Buchstabe vorne anf�gen
|
||||
if ( i == -1 )
|
||||
{
|
||||
strNextName = "A" + strNextName;
|
||||
return;
|
||||
}
|
||||
|
||||
char cBuchstabe = (char)(strNextName.charAt( i ));
|
||||
cBuchstabe++;
|
||||
if ( cBuchstabe - 65 > 25 )
|
||||
{
|
||||
// aktuelle Stelle: auf A zuruecksetzen
|
||||
manipulate( i, 'A' );
|
||||
|
||||
// vorherige Stelle erh�hen
|
||||
inc( i - 1 );
|
||||
}
|
||||
else
|
||||
{
|
||||
// aktueller Buchstabe �ndern
|
||||
manipulate( i, cBuchstabe );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Hilfsfunktion zur Berechnung eines neuen Namens.
|
||||
* <br>Author: J�rg B�uerle
|
||||
* @param nStelle
|
||||
* @param nWert
|
||||
*/
|
||||
private static void manipulate( int nStelle, char nWert )
|
||||
{
|
||||
// otth: Hilfsfunktion zur Berechnung eines neuen Namens
|
||||
// otth: Ersetzt im String 'strNextName' an der Position 'nStelle' den Buchstaben durch 'nWert'
|
||||
|
||||
String strTemp = "";
|
||||
for( int i = 0; i < strNextName.length(); i++)
|
||||
{
|
||||
if ( i == nStelle )
|
||||
strTemp = strTemp + nWert;
|
||||
else
|
||||
strTemp = strTemp + strNextName.charAt( i );
|
||||
}
|
||||
strNextName = strTemp;
|
||||
}
|
||||
|
||||
}
|
@@ -7,6 +7,7 @@ import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.syntaxtree.ASTVisitor;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSetVisitor;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
|
||||
@@ -65,20 +66,16 @@ public class TypePlaceholder extends RefTypeOrTPHOrWildcardOrGeneric implements
|
||||
* @return
|
||||
*/
|
||||
public static TypePlaceholder fresh(Token position){
|
||||
return fresh(position, 0, true);
|
||||
return new TypePlaceholder(NameGenerator.makeNewName(), position, 0, true);
|
||||
}
|
||||
|
||||
public static TypePlaceholder fresh(Token position, PlaceholderRegistry placeholderRegistry){
|
||||
return fresh(position, 0, true, placeholderRegistry);
|
||||
}
|
||||
public static TypePlaceholder fresh(Token position, PlaceholderRegistry placeholderRegistry){
|
||||
String newName = placeholderRegistry.generateFreshPlaceholderName();
|
||||
return new TypePlaceholder(newName, position, 0, true);
|
||||
}
|
||||
|
||||
public static TypePlaceholder fresh(Token position, int variance, boolean wildcardable){
|
||||
return fresh(position, variance, wildcardable, PlaceholderRegistry.defaultRegistry);
|
||||
}
|
||||
|
||||
public static TypePlaceholder fresh(Token position, int variance, boolean wildcardable, PlaceholderRegistry placeholderRegistry){
|
||||
String newName = placeholderRegistry.generateFreshPlaceholderName();
|
||||
return new TypePlaceholder(newName, position, variance, wildcardable);
|
||||
return new TypePlaceholder(NameGenerator.makeNewName(), position, variance, wildcardable);
|
||||
}
|
||||
|
||||
public static RefTypeOrTPHOrWildcardOrGeneric of(String name) {
|
||||
|
@@ -7,6 +7,7 @@ import de.dhbwstuttgart.syntaxtree.GenericDeclarationList;
|
||||
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
|
||||
import de.dhbwstuttgart.syntaxtree.Method;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
|
||||
|
||||
|
||||
|
@@ -38,7 +38,7 @@ implements ISerializableData {
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("left", this.getLeft().toSerial(keyStorage));
|
||||
serialized.put("right", this.getLeft().toSerial(keyStorage));
|
||||
serialized.put("right", this.getRight().toSerial(keyStorage));
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
serializedWrapper.put("object", serialized);
|
||||
|
@@ -21,7 +21,7 @@ public class PairTPHEqualTPH extends ResultPair<TypePlaceholder, TypePlaceholder
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("left", this.getLeft().toSerial(keyStorage));
|
||||
serialized.put("right", this.getLeft().toSerial(keyStorage));
|
||||
serialized.put("right", this.getRight().toSerial(keyStorage));
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
serializedWrapper.put("object", serialized);
|
||||
|
@@ -46,7 +46,7 @@ public class PairTPHsmallerTPH extends ResultPair<TypePlaceholder,TypePlaceholde
|
||||
public SerialMap toSerial(KeyStorage keyStorage) {
|
||||
SerialMap serialized = new SerialMap();
|
||||
serialized.put("left", this.getLeft().toSerial(keyStorage));
|
||||
serialized.put("right", this.getLeft().toSerial(keyStorage));
|
||||
serialized.put("right", this.getRight().toSerial(keyStorage));
|
||||
// create the wrapper and put this as the object
|
||||
var serializedWrapper = super.toSerial(keyStorage);
|
||||
serializedWrapper.put("object", serialized);
|
||||
|
@@ -7,6 +7,7 @@ import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
@@ -59,15 +60,19 @@ public class ResultSet implements ISerializableData {
|
||||
|
||||
public String toString() {
|
||||
var results = new ArrayList<>(this.results);
|
||||
results.sort(Ordering.usingToString());
|
||||
results.sort(
|
||||
Comparator
|
||||
.comparingInt((ResultPair o) -> o.getLeft().toString().length())
|
||||
.thenComparing(o -> o.getLeft().toString())
|
||||
.thenComparingInt(o -> o.getRight().toString().length())
|
||||
.thenComparing(o -> o.getRight().toString())
|
||||
);
|
||||
return results.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o instanceof ResultSet) {
|
||||
ResultSet other = (ResultSet) o;
|
||||
|
||||
if (o instanceof ResultSet other) {
|
||||
// sort both result lists
|
||||
var thisElements = new ArrayList<>(this.results);
|
||||
thisElements.sort(Ordering.usingToString());
|
||||
|
@@ -13,6 +13,7 @@ import de.dhbwstuttgart.parser.SyntaxTreeGenerator.AssignToLocal;
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
import de.dhbwstuttgart.syntaxtree.*;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
|
||||
import de.dhbwstuttgart.syntaxtree.statement.*;
|
||||
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
|
||||
@@ -743,7 +744,7 @@ public class TYPEStmt implements StatementVisitor {
|
||||
List<GenericRefType> funNParams = new ArrayList<>();
|
||||
for (int i = 0; i < numArgs + 1; i++) {
|
||||
// funNParams.add(TypePlaceholder.fresh(new NullToken()));
|
||||
funNParams.add(new GenericRefType(PlaceholderRegistry.defaultRegistry.generateFreshPlaceholderName(), new NullToken()));
|
||||
funNParams.add(new GenericRefType(NameGenerator.makeNewName(), new NullToken()));
|
||||
}
|
||||
funNParams.get(funNParams.size() - 1);
|
||||
ret.add(new MethodAssumption(new FunNClass(funNParams), funNParams.get(funNParams.size() - 1), funNParams.subList(0, funNParams.size() - 1), new TypeScope() {
|
||||
|
@@ -0,0 +1,62 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.concurrent.RecursiveTask;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/**
|
||||
* An intermediate class for the recursive steps of the TypeUnifyTask:
|
||||
* This allows to cancel parts of the recursion tree, instead of only the whole execution as before. But in
|
||||
* order for that to work, all cancellable child tasks must be added when they are created
|
||||
*
|
||||
* @param <T>
|
||||
*/
|
||||
public abstract class CancellableTask<T> extends RecursiveTask<T> {
|
||||
|
||||
private final AtomicBoolean executionCancelled = new AtomicBoolean(false);
|
||||
private final List<CancellableTask<?>> childTasks = new ArrayList<>();
|
||||
private CancellableTask<?> parentTask = null;
|
||||
|
||||
/**
|
||||
* Set the execution for this task and all its (recursive) children to be cancelled
|
||||
*/
|
||||
protected void cancelExecution() {
|
||||
// is this branch already cancelled? Then do nothing
|
||||
if (this.executionCancelled.get()) return;
|
||||
executionCancelled.set(true);
|
||||
this.cancelChildExecution();
|
||||
}
|
||||
|
||||
public void cancelChildExecution() {
|
||||
for (var childTask : childTasks) {
|
||||
// no need to cancel a branch that is already finished
|
||||
if (!childTask.isDone()) {
|
||||
childTask.cancelExecution();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void cancelSiblingTasks() {
|
||||
if (this.parentTask != null) {
|
||||
boolean thisWasCancelledBefore = this.executionCancelled.get();
|
||||
this.parentTask.cancelChildExecution();
|
||||
this.executionCancelled.set(thisWasCancelledBefore);
|
||||
}
|
||||
}
|
||||
|
||||
public Boolean isExecutionCancelled() {
|
||||
return executionCancelled.get();
|
||||
}
|
||||
|
||||
public void addChildTask(CancellableTask<?> childTask) {
|
||||
this.childTasks.add(childTask);
|
||||
childTask.setParentTask(this);
|
||||
}
|
||||
|
||||
private void setParentTask(CancellableTask<?> parentTask) {
|
||||
this.parentTask = parentTask;
|
||||
}
|
||||
|
||||
}
|
@@ -16,10 +16,6 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
*/
|
||||
public class PlaceholderRegistry implements ISerializableData {
|
||||
|
||||
// The default registry should only ever be used outside on the Client-Side and NEVER on the Server-Side!
|
||||
// This includes at least the server-handled packets and the unification algorithm
|
||||
public static PlaceholderRegistry defaultRegistry = new PlaceholderRegistry();
|
||||
|
||||
private final Set<String> existingPlaceholders = ConcurrentHashMap.newKeySet();
|
||||
private final AtomicInteger placeholderCount = new AtomicInteger();
|
||||
public ArrayList<PlaceholderType> UnifyTypeFactory_PLACEHOLDERS = new ArrayList<>();
|
||||
@@ -49,6 +45,14 @@ public class PlaceholderRegistry implements ISerializableData {
|
||||
return name;
|
||||
}
|
||||
|
||||
public PlaceholderRegistry deepClone() {
|
||||
PlaceholderRegistry pr2 = new PlaceholderRegistry();
|
||||
this.existingPlaceholders.forEach(pr2::addPlaceholder);
|
||||
pr2.UnifyTypeFactory_PLACEHOLDERS.addAll(this.UnifyTypeFactory_PLACEHOLDERS);
|
||||
pr2.placeholderCount.set(this.placeholderCount.get());
|
||||
return pr2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a token that consists of uppercase letters and contains the given prefix and suffix from the value i
|
||||
*
|
||||
|
@@ -59,7 +59,7 @@ import org.apache.commons.io.output.NullOutputStream;
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<UnifyPair>>>> {
|
||||
public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<UnifyPair>>>> {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
@@ -259,9 +259,6 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
throw new TypeinferenceException("Unresolved constraints: " + res, new NullToken()); //return new HashSet<>();
|
||||
}
|
||||
else {
|
||||
System.out.println(res);
|
||||
}
|
||||
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
@@ -855,6 +852,11 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
||||
|
||||
if (parallel) {
|
||||
for (Set<Set<UnifyPair>> par_res : forkResults) {
|
||||
if (variance == 0 && (!result.isEmpty() && (!isUndefinedPairSetSet(currentThreadResult)))) {
|
||||
this.cancelChildExecution();
|
||||
return CompletableFuture.completedFuture(result);
|
||||
}
|
||||
|
||||
if (!isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) {
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
result = par_res;
|
||||
@@ -887,6 +889,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
||||
// Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator();
|
||||
boolean shouldBreak = varianceCase.eraseInvalidSets(rekTiefe, aParDef, nextSetAsList);
|
||||
if (shouldBreak) {
|
||||
this.cancelChildExecution();
|
||||
return CompletableFuture.completedFuture(result);
|
||||
}
|
||||
|
||||
@@ -1387,15 +1390,13 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
||||
{
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraintsVariance = oderConstraintsOutput.stream() //Alle Elemente rauswerfen, die Variance 0 haben oder keine TPH in LHS oder RHS sind
|
||||
.filter(x -> x.stream()
|
||||
.anyMatch(y ->
|
||||
y.stream().anyMatch(z ->
|
||||
( (z.getLhsType() instanceof PlaceholderType)
|
||||
&& (((PlaceholderType) (z.getLhsType())).getVariance() != 0))
|
||||
||
|
||||
( (z.getRhsType() instanceof PlaceholderType)
|
||||
&& (((PlaceholderType) (z.getRhsType())).getVariance() != 0))
|
||||
)
|
||||
)).toList();
|
||||
.filter(y ->
|
||||
y.stream().filter(z -> ((z.getLhsType() instanceof PlaceholderType)
|
||||
&& (((PlaceholderType) (z.getLhsType())).getVariance() != 0))
|
||||
|| ((z.getRhsType() instanceof PlaceholderType)
|
||||
&& (((PlaceholderType) (z.getRhsType())).getVariance() != 0))
|
||||
).findFirst().isPresent()
|
||||
).findFirst().isPresent()).collect(Collectors.toList());
|
||||
if (!oderConstraintsVariance.isEmpty()) {
|
||||
Set<Constraint<UnifyPair>> ret = oderConstraintsVariance.getFirst();
|
||||
oderConstraintsOutput.remove(ret);
|
||||
@@ -1806,7 +1807,13 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
||||
//eingefuegt PL 2018-03-29 Anfang ? ext. theta hinzufuegen
|
||||
if (a.isWildcardable()) {
|
||||
Set<UnifyType> smaller_ext = smaller.stream().filter(x -> !(x instanceof ExtendsType) && !(x instanceof SuperType))
|
||||
.map(ExtendsType::new)//.accept(new freshPlaceholder(), hm));}
|
||||
.map(x -> {
|
||||
//BinaryOperator<HashMap<PlaceholderType,PlaceholderType>> combiner = (aa,b) -> { aa.putAll(b); return aa;}; //Variablenumbenennung rausgenommen
|
||||
//HashMap<PlaceholderType,PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream() //Variablen muessen wahrscheinlich erhalten bleiben
|
||||
// .reduce(new HashMap<PlaceholderType,PlaceholderType>(),
|
||||
// (aa, b)-> { aa.put(b,PlaceholderType.freshPlaceholder()); return aa; }, combiner);
|
||||
return new ExtendsType(x);
|
||||
})//.accept(new freshPlaceholder(), hm));}
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
smaller.addAll(smaller_ext);
|
||||
}
|
||||
@@ -1926,7 +1933,13 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
||||
//writeLog("GREATER: " + greater + pair + "THETA: " + theta + "FBOUNDED: " + pair.getfBounded() + " ");
|
||||
if (a.isWildcardable()) {
|
||||
Set<UnifyType> greater_ext = greater.stream().filter(x -> !(x instanceof ExtendsType) && !(x instanceof SuperType))
|
||||
.map(SuperType::new)//.accept(new freshPlaceholder(), hm));}
|
||||
.map(x -> {
|
||||
//BinaryOperator<HashMap<PlaceholderType,PlaceholderType>> combiner = (aa,b) -> { aa.putAll(b); return aa;}; //Variablenumbenennung rausgenommen
|
||||
//HashMap<PlaceholderType,PlaceholderType> hm = x.getInvolvedPlaceholderTypes().stream() //Variablen muessen wahrscheinlich erhalten bleiben
|
||||
// .reduce(new HashMap<PlaceholderType,PlaceholderType>(),
|
||||
// (aa, b)-> { aa.put(b,PlaceholderType.freshPlaceholder()); return aa; }, combiner);
|
||||
return new SuperType(x);
|
||||
})//.accept(new freshPlaceholder(), hm));}
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
greater.addAll(greater_ext);
|
||||
}
|
||||
|
@@ -99,6 +99,7 @@ public class Variance0Case extends VarianceCase {
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
@@ -136,6 +137,7 @@ public class Variance0Case extends VarianceCase {
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkFuture,
|
||||
@@ -200,6 +202,7 @@ public class Variance0Case extends VarianceCase {
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
for (Set<UnifyPair> aPar : aParDef) {
|
||||
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
|
@@ -72,6 +72,7 @@ public class Variance1Case extends VarianceCase {
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
@@ -108,6 +109,7 @@ public class Variance1Case extends VarianceCase {
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkFuture,
|
||||
|
@@ -60,6 +60,7 @@ public class Variance2Case extends VarianceCase {
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValuesFuture = forkOrigFuture.thenApply((currentThreadResult) -> {
|
||||
forkOrig.writeLog("final Orig 2");
|
||||
@@ -89,6 +90,7 @@ public class Variance2Case extends VarianceCase {
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValuesFuture = resultValuesFuture.thenCombine(forkFuture, (resultValues, fork_res) -> {
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
|
@@ -72,6 +72,7 @@ public class VarianceM1Case extends VarianceCase {
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
@@ -109,6 +110,7 @@ public class VarianceM1Case extends VarianceCase {
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkFuture,
|
||||
|
@@ -50,8 +50,7 @@ public class Unifier implements Function<UnifyType, UnifyType>, Iterable<Entry<P
|
||||
Unifier tempU = new Unifier(source, target);
|
||||
// Every new substitution must be applied to previously added substitutions
|
||||
// otherwise the unifier needs to be applied multiple times to unify two terms
|
||||
for(PlaceholderType pt : substitutions.keySet())
|
||||
substitutions.put(pt, substitutions.get(pt).apply(tempU));
|
||||
substitutions.replaceAll((pt, ut) -> ut.apply(tempU));
|
||||
substitutions.put(source, target);
|
||||
}
|
||||
|
||||
|
@@ -1,5 +1,6 @@
|
||||
package finiteClosure;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.parser.SyntaxTreeGenerator.FCGenerator;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
@@ -15,14 +16,14 @@ public class SuperInterfacesTest {
|
||||
public void test() throws ClassNotFoundException {
|
||||
Collection<ClassOrInterface> classes = new ArrayList<>();
|
||||
classes.add(ASTFactory.createClass(TestClass.class));
|
||||
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), new PlaceholderRegistry()));
|
||||
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), JavaTXCompiler.defaultClientPlaceholderRegistry));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGeneric() throws ClassNotFoundException {
|
||||
Collection<ClassOrInterface> classes = new ArrayList<>();
|
||||
classes.add(ASTFactory.createClass(TestClassGeneric.class));
|
||||
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), new PlaceholderRegistry()));
|
||||
System.out.println(FCGenerator.toFC(classes, ClassLoader.getSystemClassLoader(), JavaTXCompiler.defaultClientPlaceholderRegistry));
|
||||
}
|
||||
}
|
||||
|
||||
|
141
src/test/java/server/ServerTest.java
Normal file
141
src/test/java/server/ServerTest.java
Normal file
@@ -0,0 +1,141 @@
|
||||
package server;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.core.JavaTXServer;
|
||||
import de.dhbwstuttgart.environment.CompilationEnvironment;
|
||||
import de.dhbwstuttgart.server.SocketClient;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.nio.file.Path;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.stream.Collectors;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import targetast.TestCodegen;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
@Ignore("Server tests create huge overhead, so they are ignored until required")
|
||||
public class ServerTest {
|
||||
|
||||
@Test
|
||||
public void checkServer_Scalar() throws IOException, ClassNotFoundException {
|
||||
compareLocalAndServerResult("Scalar.jav");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void checkServer_Matrix() throws IOException, ClassNotFoundException {
|
||||
compareLocalAndServerResult("Matrix.jav");
|
||||
}
|
||||
|
||||
protected void compareLocalAndServerResult(final String filename) throws IOException, ClassNotFoundException {
|
||||
File file = Path.of(TestCodegen.path.toString(), filename).toFile();
|
||||
|
||||
// get information from compiler
|
||||
JavaTXCompiler compiler = new JavaTXCompiler(List.of(file));
|
||||
|
||||
// NOW: simulate the call to method typeInference. Once via server and once locally
|
||||
// if everything works, they should neither interfere with each other, nor differ in their result
|
||||
|
||||
// get the values from the compiler
|
||||
PlaceholderRegistry placeholderRegistry = JavaTXCompiler.defaultClientPlaceholderRegistry; //new PlaceholderRegistry();
|
||||
ConstraintSet<Pair> cons = compiler.getConstraints(file);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(compiler, cons, placeholderRegistry);
|
||||
unifyCons = unifyCons.map(ServerTest::distributeInnerVars);
|
||||
|
||||
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(
|
||||
ServerTest.getAllClasses(compiler, file).stream().toList(),
|
||||
Writer.nullWriter(),
|
||||
compiler.classLoader,
|
||||
compiler,
|
||||
placeholderRegistry
|
||||
);
|
||||
UnifyTaskModel usedTasks = new UnifyTaskModel();
|
||||
|
||||
|
||||
|
||||
// create the server
|
||||
JavaTXServer server = new JavaTXServer(5000);
|
||||
try (ExecutorService executor = Executors.newSingleThreadExecutor()) {
|
||||
// run the server in a separate thread
|
||||
executor.submit(server::listen);
|
||||
}
|
||||
|
||||
// run the unification on the server
|
||||
PlaceholderRegistry prCopy = JavaTXCompiler.defaultClientPlaceholderRegistry.deepClone();
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyContext context = new UnifyContext(Writer.nullWriter(), false, true, urm, usedTasks, prCopy);
|
||||
SocketClient socketClient = new SocketClient("ws://localhost:5000");
|
||||
List<ResultSet> serverResult = socketClient.execute(finiteClosure, cons, unifyCons, context);
|
||||
|
||||
// close the server
|
||||
server.forceStop();
|
||||
|
||||
// run the unification on the client (do this second, because it changes the initial placeholder registry)
|
||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||
urm.addUnifyResultListener(li);
|
||||
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), unifyCons.getOderConstraints(), finiteClosure, context);
|
||||
List<ResultSet> clientResult = li.getResults();
|
||||
|
||||
|
||||
// create the bytecode from both results
|
||||
var sf = compiler.sourceFiles.get(file);
|
||||
var serverBytecode = compiler.generateBytecode(sf, serverResult);
|
||||
var localBytecode = compiler.generateBytecode(sf, clientResult);
|
||||
|
||||
// test if the generated code is the same
|
||||
for (var serverEntry : serverBytecode.entrySet()) {
|
||||
var serverBytes = serverEntry.getValue();
|
||||
var localBytes = localBytecode.get(serverEntry.getKey());
|
||||
assertArrayEquals(serverBytes, localBytes);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected static UnifyPair distributeInnerVars(UnifyPair x) {
|
||||
UnifyType lhs, rhs;
|
||||
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
|
||||
((PlaceholderType) lhs).setInnerType(true);
|
||||
((PlaceholderType) rhs).setInnerType(true);
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
protected static Set<ClassOrInterface> getAllClasses(JavaTXCompiler compiler, File file)
|
||||
throws ClassNotFoundException, IOException
|
||||
{
|
||||
var sf = compiler.sourceFiles.get(file);
|
||||
Set<ClassOrInterface> allClasses = new HashSet<>();
|
||||
allClasses.addAll(compiler.getAvailableClasses(sf));
|
||||
allClasses.addAll(sf.getClasses());
|
||||
var newClasses = CompilationEnvironment.loadDefaultPackageClasses(sf.getPkgName(), file, compiler).stream().map(ASTFactory::createClass).collect(Collectors.toSet());
|
||||
for (var clazz : newClasses) {
|
||||
// Don't load classes that get recompiled
|
||||
if (sf.getClasses().stream().anyMatch(nf -> nf.getClassName().equals(clazz.getClassName())))
|
||||
continue;
|
||||
if (allClasses.stream().noneMatch(old -> old.getClassName().equals(clazz.getClassName())))
|
||||
allClasses.add(clazz);
|
||||
}
|
||||
return allClasses;
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user