forked from i21017/JavaCompilerCore
Compare commits
8 Commits
9ec32970ee
...
e8335715fb
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e8335715fb | ||
|
|
e574174962 | ||
|
|
6df99fcacb | ||
|
|
d3fbaea8c7 | ||
|
|
3415c250a3 | ||
|
|
1b905cb3e2 | ||
|
|
d02c3583e9 | ||
|
|
ca98e83fd2 |
5
pom.xml
5
pom.xml
@@ -59,6 +59,11 @@ http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>2.17.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.diogonunes</groupId>
|
||||
<artifactId>JColor</artifactId>
|
||||
<version>5.5.1</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
@@ -8,6 +8,7 @@ import de.dhbwstuttgart.target.generate.ASTToTargetAST;
|
||||
import de.dhbwstuttgart.target.tree.*;
|
||||
import de.dhbwstuttgart.target.tree.expression.*;
|
||||
import de.dhbwstuttgart.target.tree.type.*;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import org.objectweb.asm.*;
|
||||
|
||||
import java.lang.invoke.*;
|
||||
@@ -19,6 +20,8 @@ import static de.dhbwstuttgart.target.tree.expression.TargetBinaryOp.*;
|
||||
import static de.dhbwstuttgart.target.tree.expression.TargetLiteral.*;
|
||||
|
||||
public class Codegen {
|
||||
public static Logger logger = new Logger("codegen");
|
||||
|
||||
private final TargetStructure clazz;
|
||||
private final ClassWriter cw;
|
||||
public final String className;
|
||||
@@ -1317,7 +1320,7 @@ public class Codegen {
|
||||
types.add(Type.getObjectType(guard.inner().type().getInternalName()));
|
||||
// TODO Same here we need to evaluate constant;
|
||||
} else {
|
||||
System.out.println(label);
|
||||
logger.info(label);
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@ public class FunNGenerator {
|
||||
superFunNMethodDescriptor.append(")V");
|
||||
}
|
||||
|
||||
System.out.println(superFunNMethodSignature);
|
||||
Codegen.logger.info(superFunNMethodSignature);
|
||||
|
||||
ClassWriter classWriter = new ClassWriter(0);
|
||||
MethodVisitor methodVisitor;
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
package de.dhbwstuttgart.core;
|
||||
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.util.*;
|
||||
|
||||
public class ConsoleInterface {
|
||||
private static final String directory = System.getProperty("user.dir");
|
||||
|
||||
public static Logger.LogLevel logLevel = Logger.LogLevel.ERROR;
|
||||
public static boolean writeLogFiles = false;
|
||||
|
||||
public static void main(String[] args) throws IOException, ClassNotFoundException {
|
||||
List<File> input = new ArrayList<>();
|
||||
@@ -25,7 +26,9 @@ public class ConsoleInterface {
|
||||
"\t-cp\tSet Classpath\n" +
|
||||
"\t-d\tSet destination directory\n" +
|
||||
"\t[--server-mode <port>]\n" +
|
||||
"\t[--unify-server <url>]\n");
|
||||
"\t[--unify-server <url>]\n" +
|
||||
"\t[--write-logs]\n" +
|
||||
"\t[-v|-vv-|-vvv]");
|
||||
System.exit(1);
|
||||
}
|
||||
while (it.hasNext()) {
|
||||
@@ -43,6 +46,15 @@ public class ConsoleInterface {
|
||||
serverPort = Optional.of(Integer.parseInt(it.next()));
|
||||
} else if (arg.equals("--unify-server")) {
|
||||
unifyServer = Optional.of(it.next());
|
||||
} else if (arg.equals("--write-logs")) {
|
||||
ConsoleInterface.writeLogFiles = true;
|
||||
} else if (arg.startsWith("-v")) {
|
||||
logLevel = switch (arg) {
|
||||
case "-v" -> Logger.LogLevel.WARNING;
|
||||
case "-vv" -> Logger.LogLevel.INFO;
|
||||
case "-vvv" -> Logger.LogLevel.DEBUG;
|
||||
default -> throw new IllegalArgumentException("Argument " + arg + " is not a valid verbosity level");
|
||||
};
|
||||
} else {
|
||||
input.add(new File(arg));
|
||||
}
|
||||
|
||||
@@ -53,6 +53,7 @@ import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
|
||||
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.io.*;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.nio.file.Path;
|
||||
@@ -67,13 +68,13 @@ public class JavaTXCompiler {
|
||||
|
||||
// do not use this in any code, that can be executed serverside!
|
||||
public static PlaceholderRegistry defaultClientPlaceholderRegistry = new PlaceholderRegistry();
|
||||
public static Logger defaultLogger = new Logger();
|
||||
|
||||
// public static JavaTXCompiler INSTANCE;
|
||||
final CompilationEnvironment environment;
|
||||
Boolean resultmodel = true;
|
||||
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
|
||||
|
||||
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
|
||||
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
|
||||
public final DirectoryClassLoader classLoader;
|
||||
|
||||
@@ -90,11 +91,6 @@ public class JavaTXCompiler {
|
||||
this(Arrays.asList(sourceFile), List.of(), new File("."), Optional.empty());
|
||||
}
|
||||
|
||||
public JavaTXCompiler(File sourceFile, Boolean log) throws IOException, ClassNotFoundException {
|
||||
this(sourceFile);
|
||||
this.log = log;
|
||||
}
|
||||
|
||||
public JavaTXCompiler(List<File> sourceFiles) throws IOException, ClassNotFoundException {
|
||||
this(sourceFiles, List.of(), new File("."), Optional.empty());
|
||||
}
|
||||
@@ -317,52 +313,51 @@ public class JavaTXCompiler {
|
||||
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||
UnifyResultModel urm = null;
|
||||
// urm.addUnifyResultListener(resultListener);
|
||||
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, defaultClientPlaceholderRegistry);
|
||||
try {
|
||||
logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile;
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this, context.placeholderRegistry());
|
||||
System.out.println(finiteClosure);
|
||||
urm = new UnifyResultModel(cons, finiteClosure);
|
||||
urm.addUnifyResultListener(resultListener);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, context.placeholderRegistry());
|
||||
logFile = logFile == null ? new FileWriter("log_" + sourceFiles.keySet().iterator().next().getName()) : logFile;
|
||||
Logger logger = new Logger(logFile, "TypeInferenceAsync");
|
||||
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, defaultClientPlaceholderRegistry);
|
||||
|
||||
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
|
||||
UnifyType lhs, rhs;
|
||||
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
|
||||
((PlaceholderType) lhs).setInnerType(true);
|
||||
((PlaceholderType) rhs).setInnerType(true);
|
||||
}
|
||||
return x;
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logger, getClassLoader(), this, context.placeholderRegistry());
|
||||
logger.info(finiteClosure.toString());
|
||||
urm = new UnifyResultModel(cons, finiteClosure);
|
||||
urm.addUnifyResultListener(resultListener);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, context.placeholderRegistry());
|
||||
|
||||
};
|
||||
logFile.write(unifyCons.toString());
|
||||
unifyCons = unifyCons.map(distributeInnerVars);
|
||||
logFile.write(unifyCons.toString());
|
||||
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
|
||||
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
|
||||
for (SourceFile f : this.sourceFiles.values()) {
|
||||
logFile.write(ASTTypePrinter.print(f));
|
||||
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
|
||||
UnifyType lhs, rhs;
|
||||
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
|
||||
((PlaceholderType) lhs).setInnerType(true);
|
||||
((PlaceholderType) rhs).setInnerType(true);
|
||||
}
|
||||
// logFile.flush();
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||
return x;
|
||||
|
||||
/*
|
||||
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
|
||||
*/
|
||||
|
||||
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
|
||||
// logFile, log);
|
||||
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
|
||||
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
|
||||
*/;
|
||||
TypeUnify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||
} catch (IOException e) {
|
||||
System.err.println("kein LogFile");
|
||||
};
|
||||
logger.debug(unifyCons.toString());
|
||||
unifyCons = unifyCons.map(distributeInnerVars);
|
||||
logger.debug(unifyCons.toString());
|
||||
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
|
||||
logger.debug("FC:\\" + finiteClosure.toString() + "\n");
|
||||
for (SourceFile f : this.sourceFiles.values()) {
|
||||
logger.debug(ASTTypePrinter.print(f));
|
||||
}
|
||||
// logFile.flush();
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||
|
||||
/*
|
||||
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
|
||||
*/
|
||||
|
||||
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
|
||||
// logFile, log);
|
||||
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
|
||||
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
|
||||
*/;
|
||||
TypeUnify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||
|
||||
return urm;
|
||||
}
|
||||
|
||||
@@ -383,103 +378,103 @@ public class JavaTXCompiler {
|
||||
final ConstraintSet<Pair> cons = getConstraints(file);
|
||||
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||
PlaceholderRegistry placeholderRegistry = new PlaceholderRegistry();
|
||||
try {
|
||||
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
|
||||
if (log) logFolder.mkdirs();
|
||||
Writer logFile = log ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
|
||||
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logFile, classLoader, this, placeholderRegistry);
|
||||
System.out.println(finiteClosure);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, placeholderRegistry);
|
||||
System.out.println("xxx1");
|
||||
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
|
||||
UnifyType lhs, rhs;
|
||||
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
|
||||
((PlaceholderType) lhs).setInnerType(true);
|
||||
((PlaceholderType) rhs).setInnerType(true);
|
||||
}
|
||||
return x;
|
||||
|
||||
};
|
||||
var logFolder = new File(System.getProperty("user.dir") + "/logFiles/");
|
||||
if (ConsoleInterface.writeLogFiles && !logFolder.mkdirs()) throw new RuntimeException("Could not creat directoy for log files: " + logFolder);
|
||||
Writer logFile = ConsoleInterface.writeLogFiles ? new FileWriter(new File(logFolder, "log_" + sourceFiles.keySet().iterator().next().getName())) : new OutputStreamWriter(new NullOutputStream());
|
||||
Logger logger = new Logger(logFile, "TypeInference");
|
||||
|
||||
logFile.write("Unify:" + unifyCons.toString());
|
||||
System.out.println("Unify:" + unifyCons.toString());
|
||||
unifyCons = unifyCons.map(distributeInnerVars);
|
||||
logFile.write("\nUnify_distributeInnerVars: " + unifyCons.toString());
|
||||
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
|
||||
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
|
||||
logFile.write(ASTTypePrinter.print(sf));
|
||||
System.out.println(ASTTypePrinter.print(sf));
|
||||
// logFile.flush();
|
||||
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||
|
||||
/*
|
||||
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
|
||||
*/
|
||||
|
||||
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
|
||||
// logFile, log);
|
||||
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()// .stream().map(x -> {
|
||||
/*
|
||||
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
|
||||
*/;
|
||||
|
||||
if (unifyServer.isPresent()) {
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
|
||||
SocketClient socketClient = new SocketClient(unifyServer.get());
|
||||
return socketClient.execute(finiteClosure, cons, unifyCons, context);
|
||||
}
|
||||
else if (resultmodel) {
|
||||
/* UnifyResultModel Anfang */
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||
urm.addUnifyResultListener(li);
|
||||
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks, placeholderRegistry);
|
||||
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||
System.out.println("RESULT Final: " + li.getResults());
|
||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
|
||||
// logFile.flush();
|
||||
return li.getResults();
|
||||
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses.stream().toList(), logger, classLoader, this, placeholderRegistry);
|
||||
logger.info(finiteClosure.toString());
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(this, cons, placeholderRegistry);
|
||||
logger.info("xxx1");
|
||||
Function<UnifyPair, UnifyPair> distributeInnerVars = x -> {
|
||||
UnifyType lhs, rhs;
|
||||
if (((lhs = x.getLhsType()) instanceof PlaceholderType) && ((rhs = x.getRhsType()) instanceof PlaceholderType) && (((PlaceholderType) lhs).isInnerType() || ((PlaceholderType) rhs).isInnerType())) {
|
||||
((PlaceholderType) lhs).setInnerType(true);
|
||||
((PlaceholderType) rhs).setInnerType(true);
|
||||
}
|
||||
/* UnifyResultModel End */
|
||||
else {
|
||||
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
|
||||
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
|
||||
// finiteClosure));
|
||||
UnifyContext context = new UnifyContext(logFile, log, false, new UnifyResultModel(cons, finiteClosure), usedTasks, placeholderRegistry);
|
||||
Set<Set<UnifyPair>> result = TypeUnify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||
System.out.println("RESULT: " + result);
|
||||
logFile.write("RES: " + result.toString() + "\n");
|
||||
// logFile.flush();
|
||||
results.addAll(result);
|
||||
return x;
|
||||
|
||||
results = results.stream().map(x -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet(placeholderRegistry).subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
|
||||
y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y; // alle Paare a <.? b erden durch a =. b ersetzt
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
|
||||
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure);
|
||||
} else
|
||||
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
System.out.println("RESULT Final: " + results);
|
||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||
logFile.write("RES_FINAL: " + results.toString() + "\n");
|
||||
// logFile.flush();
|
||||
logFile.write("PLACEHOLDERS: " + placeholderRegistry);
|
||||
// logFile.flush();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
System.err.println("kein LogFile");
|
||||
};
|
||||
|
||||
logger.debug("Unify:" + unifyCons.toString());
|
||||
logger.info("Unify:" + unifyCons.toString());
|
||||
unifyCons = unifyCons.map(distributeInnerVars);
|
||||
logger.debug("\nUnify_distributeInnerVars: " + unifyCons.toString());
|
||||
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
|
||||
logger.debug("FC:\\" + finiteClosure.toString() + "\n");
|
||||
logger.debug(ASTTypePrinter.print(sf));
|
||||
logger.info(ASTTypePrinter.print(sf));
|
||||
// logFile.flush();
|
||||
logger.info("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||
|
||||
/*
|
||||
* PL 2018-11-07 wird in varianceInheritanceConstraintSet erledigt do { //PL 2018-11-05 Huellenbildung Variance auf alle TPHs der Terme auf der jeweiligen //anderen Seite übertragen varianceTPHold = new HashSet<>(varianceTPH); varianceTPH = varianceInheritanceConstraintSet(unifyCons); unifyCons.map( y -> { if ((y.getLhsType() instanceof PlaceholderType) && (y.getRhsType() instanceof PlaceholderType)) { if (((PlaceholderType)y.getLhsType()).getVariance() != 0 &&
|
||||
* ((PlaceholderType)y.getRhsType()).getVariance() == 0) { ((PlaceholderType)y.getRhsType()).setVariance(((PlaceholderType)y.getLhsType( )).getVariance()); } if (((PlaceholderType)y.getLhsType()).getVariance() == 0 && ((PlaceholderType)y.getRhsType()).getVariance() != 0) { ((PlaceholderType)y.getLhsType()).setVariance(((PlaceholderType)y.getRhsType( )).getVariance()); } } return y; } ); } while (!varianceTPHold.equals(varianceTPH));
|
||||
*/
|
||||
|
||||
// Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure,
|
||||
// logFile, log);
|
||||
// Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()// .stream().map(x -> {
|
||||
/*
|
||||
* Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors.toCollection(ArrayList::new))
|
||||
*/;
|
||||
|
||||
if (unifyServer.isPresent()) {
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, placeholderRegistry);
|
||||
SocketClient socketClient = new SocketClient(unifyServer.get());
|
||||
return socketClient.execute(finiteClosure, cons, unifyCons, context);
|
||||
}
|
||||
else if (resultmodel) {
|
||||
/* UnifyResultModel Anfang */
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||
urm.addUnifyResultListener(li);
|
||||
UnifyContext context = new UnifyContext(logger, true, urm, usedTasks, placeholderRegistry);
|
||||
TypeUnify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||
logger.info("RESULT Final: " + li.getResults());
|
||||
logger.info("Constraints for Generated Generics: " + " ???");
|
||||
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
|
||||
// logFile.flush();
|
||||
return li.getResults();
|
||||
}
|
||||
/* UnifyResultModel End */
|
||||
else {
|
||||
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
|
||||
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
|
||||
// finiteClosure));
|
||||
UnifyContext context = new UnifyContext(logger, false, new UnifyResultModel(cons, finiteClosure), usedTasks, placeholderRegistry);
|
||||
Set<Set<UnifyPair>> result = TypeUnify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||
logger.info("RESULT: " + result);
|
||||
logFile.write("RES: " + result.toString() + "\n");
|
||||
// logFile.flush();
|
||||
results.addAll(result);
|
||||
|
||||
results = results.stream().map(x -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet(placeholderRegistry).subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC)
|
||||
y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y; // alle Paare a <.? b erden durch a =. b ersetzt
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
|
||||
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure);
|
||||
} else
|
||||
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
logger.info("RESULT Final: " + results);
|
||||
logger.info("Constraints for Generated Generics: " + " ???");
|
||||
logger.debug("RES_FINAL: " + results.toString() + "\n");
|
||||
// logFile.flush();
|
||||
logger.debug("PLACEHOLDERS: " + placeholderRegistry);
|
||||
// logFile.flush();
|
||||
}
|
||||
|
||||
return results.stream().map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons), placeholderRegistry)))).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@@ -666,15 +661,15 @@ public class JavaTXCompiler {
|
||||
FileOutputStream output;
|
||||
for (JavaClassName name : classFiles.keySet()) {
|
||||
byte[] bytecode = classFiles.get(name);
|
||||
System.out.println("generating " + name + ".class file ...");
|
||||
defaultLogger.info("generating " + name + ".class file ...");
|
||||
var subPath = preserveHierarchy ? path : Path.of(path.toString(), name.getPackageName().split("\\.")).toFile();
|
||||
File outputFile = new File(subPath, name.getClassName() + ".class");
|
||||
outputFile.getAbsoluteFile().getParentFile().mkdirs();
|
||||
System.out.println(outputFile);
|
||||
defaultLogger.info(outputFile.toString());
|
||||
output = new FileOutputStream(outputFile);
|
||||
output.write(bytecode);
|
||||
output.close();
|
||||
System.out.println(name + ".class file generated");
|
||||
defaultLogger.info(name + ".class file generated");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import de.dhbwstuttgart.parser.antlr.Java17Parser;
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
|
||||
import de.dhbwstuttgart.syntaxtree.SourceFile;
|
||||
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import org.antlr.v4.runtime.CharStream;
|
||||
import org.antlr.v4.runtime.CharStreams;
|
||||
import org.antlr.v4.runtime.CommonTokenStream;
|
||||
@@ -17,6 +18,9 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class JavaTXParser {
|
||||
|
||||
public static Logger logger = new Logger("Parser");
|
||||
|
||||
public static Java17Parser.SourceFileContext parse(File source) throws IOException, java.lang.ClassNotFoundException {
|
||||
InputStream stream = new FileInputStream(source);
|
||||
// DEPRECATED: ANTLRInputStream input = new ANTLRInputStream(stream);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package de.dhbwstuttgart.parser.SyntaxTreeGenerator;
|
||||
|
||||
import de.dhbwstuttgart.parser.JavaTXParser;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@@ -259,7 +260,7 @@ public class StatementGenerator {
|
||||
ret.setStatement();
|
||||
return ret;
|
||||
default:
|
||||
System.out.println(stmt.getClass());
|
||||
JavaTXParser.logger.info(stmt.getClass());
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,7 +74,7 @@ public class TypeGenerator {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
} else if (!typeContext.LBRACK().isEmpty()) { // ArrayType über eckige Klammer prüfen
|
||||
// System.out.println(unannTypeContext.getText());
|
||||
// JavaTXParser.logger.info(unannTypeContext.getText());
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
/*
|
||||
|
||||
@@ -12,6 +12,7 @@ import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.net.URI;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@@ -29,6 +30,8 @@ import org.java_websocket.handshake.ServerHandshake;
|
||||
*/
|
||||
public class SocketClient extends WebSocketClient {
|
||||
|
||||
public static Logger logger = new Logger("SocketClient");
|
||||
|
||||
// use a latch to wait until the connection is closed by the remote host
|
||||
private final CountDownLatch closeLatch = new CountDownLatch(1);
|
||||
// temporarily: The received unify result
|
||||
@@ -125,19 +128,19 @@ public class SocketClient extends WebSocketClient {
|
||||
|
||||
@Override
|
||||
public void onOpen(ServerHandshake handshakedata) {
|
||||
System.out.println("Connected to server with status " + handshakedata.getHttpStatus());
|
||||
logger.info("Connected to server with status " + handshakedata.getHttpStatus());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMessage(String message) {
|
||||
// System.out.println("received: " + message);
|
||||
// logger.info("received: " + message);
|
||||
IPacket packet = PacketContainer.deserialize(message);
|
||||
this.handleReceivedPacket(packet);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClose(int code, String reason, boolean remote) {
|
||||
System.out.println(
|
||||
logger.info(
|
||||
"Disconnected from server " +
|
||||
"with code " + code + " " +
|
||||
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
|
||||
@@ -148,7 +151,7 @@ public class SocketClient extends WebSocketClient {
|
||||
|
||||
@Override
|
||||
public void onError(Exception e) {
|
||||
System.out.println("Error: " + e.getMessage());
|
||||
logger.error("Error: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
package de.dhbwstuttgart.server;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.server.packet.ErrorPacket;
|
||||
import de.dhbwstuttgart.server.packet.IClientToServerPacket;
|
||||
import de.dhbwstuttgart.server.packet.IPacket;
|
||||
import de.dhbwstuttgart.server.packet.MessagePacket;
|
||||
import de.dhbwstuttgart.server.packet.PacketContainer;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.Objects;
|
||||
import java.util.UUID;
|
||||
@@ -17,19 +19,17 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
import org.java_websocket.WebSocket;
|
||||
import org.java_websocket.handshake.ClientHandshake;
|
||||
import org.java_websocket.server.WebSocketServer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class SocketServer extends WebSocketServer {
|
||||
|
||||
public static Logger logger = new Logger("SocketServer");
|
||||
|
||||
/**
|
||||
* Increase this every time a breaking change to the server communication is done.
|
||||
* This will prevent errors when server version and client version do not match.
|
||||
*/
|
||||
public static final String packetProtocolVersion = "1";
|
||||
|
||||
public static final Logger log = LoggerFactory.getLogger(SocketServer.class);
|
||||
|
||||
public SocketServer(int port) {
|
||||
super(new InetSocketAddress(port));
|
||||
}
|
||||
@@ -55,7 +55,7 @@ public class SocketServer extends WebSocketServer {
|
||||
|
||||
SocketData socketData = new SocketData(UUID.randomUUID().toString());
|
||||
webSocket.setAttachment(socketData);
|
||||
System.out.println("New connection: " + socketData.id + " (with ppv " + ppv + ")");
|
||||
logger.info("New connection: " + socketData.id + " (with ppv " + ppv + ")");
|
||||
|
||||
try {
|
||||
sendMessage(webSocket, "Welcome to the server!");
|
||||
@@ -75,7 +75,7 @@ public class SocketServer extends WebSocketServer {
|
||||
|
||||
// and finally, when your program wants to exit
|
||||
} catch (Exception e) {
|
||||
log.error("e: ", e);
|
||||
logger.exception(e);
|
||||
webSocket.close(1, e.getMessage());
|
||||
}
|
||||
}
|
||||
@@ -83,8 +83,8 @@ public class SocketServer extends WebSocketServer {
|
||||
@Override
|
||||
public void onClose(WebSocket webSocket, int code, String reason, boolean remote) {
|
||||
SocketData socketData = webSocket.getAttachment();
|
||||
System.out.println("Connection closed: " + socketData.id);
|
||||
System.out.println(
|
||||
logger.info("Connection closed: " + socketData.id);
|
||||
logger.info(
|
||||
"Disconnected client " + socketData.id + " " +
|
||||
"with code " + code + " " +
|
||||
(reason.isEmpty() ? "" : "and reason " + reason + " ") +
|
||||
@@ -95,24 +95,26 @@ public class SocketServer extends WebSocketServer {
|
||||
|
||||
@Override
|
||||
public void onMessage(WebSocket webSocket, String s) {
|
||||
// System.out.println("Received: " + s.substring(0, 50));
|
||||
// logger.info("Received: " + s.substring(0, 50));
|
||||
IPacket reconstructedPacket = PacketContainer.deserialize(s);
|
||||
try {
|
||||
this.onPacketReceived(webSocket, reconstructedPacket);
|
||||
} catch (JsonProcessingException e) {
|
||||
logger.exception(e);
|
||||
this.log("Error on processing incoming package: " + e.getMessage(), webSocket);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(WebSocket webSocket, Exception e) {
|
||||
logger.exception(e);
|
||||
log(e.getMessage(), webSocket);
|
||||
webSocket.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStart() {
|
||||
System.out.println("Websocket server started on port " + this.getPort());
|
||||
logger.info("Websocket server started on port " + this.getPort());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -125,7 +127,7 @@ public class SocketServer extends WebSocketServer {
|
||||
webSocket.send(PacketContainer.serialize(message));
|
||||
} catch (Exception e) {
|
||||
System.err.println("Failed to send message: " + text);
|
||||
log.error("e: ", e);
|
||||
logger.exception(e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,8 +140,8 @@ public class SocketServer extends WebSocketServer {
|
||||
error.error = text;
|
||||
webSocket.send(PacketContainer.serialize(error));
|
||||
} catch (Exception e) {
|
||||
System.err.println("Failed to send error: " + text);
|
||||
log.error("e: ", e);
|
||||
logger.exception(e);
|
||||
log("Failed to send error: " + text, webSocket);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -182,7 +184,7 @@ public class SocketServer extends WebSocketServer {
|
||||
|
||||
public void log(String msg, WebSocket webSocket) {
|
||||
SocketData socketData = webSocket == null ? new SocketData("???") : webSocket.getAttachment();
|
||||
System.out.println("["+socketData.id+"] " + msg);
|
||||
logger.info("["+socketData.id+"] " + msg);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -3,6 +3,7 @@ package de.dhbwstuttgart.server.packet;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
|
||||
/**
|
||||
* A wrapper for the packet to ensure correct serialization/deserialization and make it possible to detect the matching
|
||||
@@ -79,7 +80,7 @@ public class PacketContainer {
|
||||
|
||||
throw new RuntimeException("Cannot map received json to any known packet class");
|
||||
} catch (Exception e) {
|
||||
System.out.println(e);
|
||||
(new Logger()).exception(e);
|
||||
InvalidPacket packet = new InvalidPacket();
|
||||
packet.error = e.getMessage();
|
||||
return packet;
|
||||
|
||||
@@ -19,6 +19,7 @@ import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
@@ -94,15 +95,15 @@ public class UnifyRequestPacket implements IClientToServerPacket {
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketServer socketServer) {
|
||||
socketServer.sendMessage(webSocket, "You requested a unify! Please wait until I calculated everything...");
|
||||
System.out.println("Client " + webSocket.<SocketServer.SocketData>getAttachment().id + " requested a unification. Starting now...");
|
||||
SocketServer.logger.info("Client " + webSocket.<SocketServer.SocketData>getAttachment().id + " requested a unification. Starting now...");
|
||||
|
||||
try {
|
||||
var placeholderRegistry = new PlaceholderRegistry();
|
||||
ArrayList<String> existingPlaceholders = (ArrayList) this.placeholders.getOf(ArrayList.class);
|
||||
existingPlaceholders.forEach(placeholderRegistry::addPlaceholder);
|
||||
|
||||
var unifyContext = new UnifyContext(Writer.nullWriter(), false, true,
|
||||
new UnifyResultModel(new ConstraintSet<>(), new FiniteClosure(new HashSet<>(), null, placeholderRegistry)),
|
||||
var unifyContext = new UnifyContext(Logger.NULL_LOGGER, true,
|
||||
new UnifyResultModel(new ConstraintSet<>(), new FiniteClosure(new HashSet<>(), Logger.NULL_LOGGER, placeholderRegistry)),
|
||||
new UnifyTaskModel(), ForkJoinPool.commonPool(), placeholderRegistry
|
||||
);
|
||||
|
||||
@@ -129,7 +130,7 @@ public class UnifyRequestPacket implements IClientToServerPacket {
|
||||
|
||||
var resultSets = resultListener.getResults();
|
||||
|
||||
System.out.println("Finished unification for client " + webSocket.<SocketServer.SocketData>getAttachment().id);
|
||||
SocketServer.logger.info("Finished unification for client " + webSocket.<SocketServer.SocketData>getAttachment().id);
|
||||
socketServer.sendMessage(webSocket, "Unification finished. Found " + resultSets.size() + " result sets");
|
||||
|
||||
if (webSocket.isOpen()) {
|
||||
@@ -137,8 +138,8 @@ public class UnifyRequestPacket implements IClientToServerPacket {
|
||||
webSocket.send(PacketContainer.serialize(resultPacket));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.err.println(e);
|
||||
SocketServer.log.error("e: ", e);
|
||||
SocketServer.logger.exception(e);
|
||||
socketServer.log(e.getMessage(), webSocket);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ public class UnifyResultPacket implements IServerToClientPacket {
|
||||
|
||||
@JsonIgnore
|
||||
public void onHandle(WebSocket webSocket, SocketClient socketClient) {
|
||||
System.out.println("[socket] Received unify result");
|
||||
SocketClient.logger.info("[socket] Received unify result");
|
||||
socketClient.setUnifyResultSets(this);
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
package de.dhbwstuttgart.syntaxtree;
|
||||
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
|
||||
public class SyntaxTree {
|
||||
|
||||
public static Logger logger = new Logger("SyntaxTree");
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
package de.dhbwstuttgart.syntaxtree.factory;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.SyntaxTree;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.io.Writer;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.*;
|
||||
@@ -34,7 +36,7 @@ public class UnifyTypeFactory {
|
||||
|
||||
public static FiniteClosure generateFC(
|
||||
List<ClassOrInterface> fromClasses,
|
||||
Writer logFile,
|
||||
Logger logger,
|
||||
ClassLoader classLoader,
|
||||
JavaTXCompiler compiler,
|
||||
PlaceholderRegistry placeholderRegistry
|
||||
@@ -49,7 +51,7 @@ public class UnifyTypeFactory {
|
||||
Generell dürfen sie immer die gleichen Namen haben.
|
||||
TODO: die transitive Hülle bilden
|
||||
*/
|
||||
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader, placeholderRegistry), logFile, compiler, placeholderRegistry);
|
||||
return new FiniteClosure(FCGenerator.toUnifyFC(compiler, fromClasses, classLoader, placeholderRegistry), logger, compiler, placeholderRegistry);
|
||||
}
|
||||
|
||||
public static UnifyPair generateSmallerPair(UnifyType tl, UnifyType tr, SourceLoc location){
|
||||
@@ -132,7 +134,7 @@ public class UnifyTypeFactory {
|
||||
|
||||
public static UnifyType convert(JavaTXCompiler compiler, TypePlaceholder tph, Boolean innerType, PlaceholderRegistry placeholderRegistry) {
|
||||
if (tph.getName().equals("AFR")) {
|
||||
System.out.println("XXX"+innerType);
|
||||
SyntaxTree.logger.info("XXX"+innerType);
|
||||
}
|
||||
PlaceholderType ntph = new PlaceholderType(tph.getName(), tph.getVariance(), placeholderRegistry);
|
||||
ntph.setVariance(tph.getVariance());
|
||||
@@ -199,7 +201,7 @@ public class UnifyTypeFactory {
|
||||
&& ((PlaceholderType)lhs).isWildcardable()
|
||||
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) {
|
||||
if (lhs.getName().equals("AQ")) {
|
||||
// System.out.println("");
|
||||
// SyntaxTree.logger.info("");
|
||||
}
|
||||
((PlaceholderType)rhs).enableWildcardtable();
|
||||
}
|
||||
@@ -208,7 +210,7 @@ public class UnifyTypeFactory {
|
||||
&& ((PlaceholderType)rhs).isWildcardable()
|
||||
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) {
|
||||
if (rhs.getName().equals("AQ")) {
|
||||
// System.out.println("");
|
||||
// SyntaxTree.logger.info("");
|
||||
}
|
||||
((PlaceholderType)lhs).enableWildcardtable();
|
||||
}
|
||||
|
||||
8
src/main/java/de/dhbwstuttgart/target/Target.java
Normal file
8
src/main/java/de/dhbwstuttgart/target/Target.java
Normal file
@@ -0,0 +1,8 @@
|
||||
package de.dhbwstuttgart.target;
|
||||
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
|
||||
public class Target {
|
||||
public static Logger logger = new Logger("Target");
|
||||
|
||||
}
|
||||
@@ -12,6 +12,7 @@ import de.dhbwstuttgart.syntaxtree.Record;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.statement.*;
|
||||
import de.dhbwstuttgart.syntaxtree.type.*;
|
||||
import de.dhbwstuttgart.target.Target;
|
||||
import de.dhbwstuttgart.target.tree.*;
|
||||
import de.dhbwstuttgart.target.tree.expression.*;
|
||||
import de.dhbwstuttgart.target.tree.type.*;
|
||||
@@ -337,10 +338,10 @@ public class ASTToTargetAST {
|
||||
|
||||
var result = r0.stream().map(l -> l.stream().toList()).toList();
|
||||
|
||||
System.out.println("============== OUTPUT ==============");
|
||||
Target.logger.info("============== OUTPUT ==============");
|
||||
for (var l : result) {
|
||||
for (var m : l) System.out.println(m.name() + " " + m.getSignature());
|
||||
System.out.println();
|
||||
for (var m : l) Target.logger.info(m.name() + " " + m.getSignature());
|
||||
Target.logger.info("");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
package de.dhbwstuttgart.target.generate;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.syntaxtree.*;
|
||||
import de.dhbwstuttgart.syntaxtree.statement.*;
|
||||
import de.dhbwstuttgart.syntaxtree.type.*;
|
||||
import de.dhbwstuttgart.syntaxtree.type.Void;
|
||||
import de.dhbwstuttgart.target.Target;
|
||||
import de.dhbwstuttgart.target.tree.type.TargetGenericType;
|
||||
import de.dhbwstuttgart.target.tree.type.TargetType;
|
||||
import de.dhbwstuttgart.typeinference.result.PairTPHEqualTPH;
|
||||
@@ -138,17 +140,17 @@ public abstract class GenerateGenerics {
|
||||
this.astToTargetAST = astToTargetAST;
|
||||
for (var constraint : constraints.results) {
|
||||
if (constraint instanceof PairTPHsmallerTPH p) {
|
||||
System.out.println(p.left + " " + p.left.getVariance());
|
||||
Target.logger.info(p.left + " " + p.left.getVariance());
|
||||
simplifiedConstraints.add(new PairLT(new TPH(p.left), new TPH(p.right)));
|
||||
} else if (constraint instanceof PairTPHEqualTPH p) {
|
||||
equality.put(p.getLeft(), p.getRight());
|
||||
} else if (constraint instanceof PairTPHequalRefTypeOrWildcardType p) {
|
||||
System.out.println(p.left + " = " + p.right);
|
||||
Target.logger.info(p.left + " = " + p.right);
|
||||
concreteTypes.put(new TPH(p.left), p.right);
|
||||
}
|
||||
}
|
||||
|
||||
System.out.println("Simplified constraints: " + simplifiedConstraints);
|
||||
Target.logger.info("Simplified constraints: " + simplifiedConstraints);
|
||||
}
|
||||
|
||||
/*public record GenericsState(Map<TPH, RefTypeOrTPHOrWildcardOrGeneric> concreteTypes, Map<TypePlaceholder, TypePlaceholder> equality) {}
|
||||
@@ -248,7 +250,7 @@ public abstract class GenerateGenerics {
|
||||
equality.put(entry.getKey(), to);
|
||||
}
|
||||
}
|
||||
System.out.println(from + " -> " + to + " " + from.getVariance());
|
||||
Target.logger.info(from + " -> " + to + " " + from.getVariance());
|
||||
//from.setVariance(to.getVariance());
|
||||
equality.put(from, to);
|
||||
referenced.remove(new TPH(from));
|
||||
@@ -317,7 +319,7 @@ public abstract class GenerateGenerics {
|
||||
Set<TPH> T2s = new HashSet<>();
|
||||
findTphs(superType, T2s);
|
||||
|
||||
System.out.println("T1s: " + T1s + " T2s: " + T2s);
|
||||
Target.logger.info("T1s: " + T1s + " T2s: " + T2s);
|
||||
//Ende
|
||||
|
||||
superType = methodCall.receiverType;
|
||||
@@ -332,7 +334,7 @@ public abstract class GenerateGenerics {
|
||||
var optMethod = astToTargetAST.findMethod(owner, methodCall.name, methodCall.signatureArguments().stream().map(astToTargetAST::convert).toList());
|
||||
if (optMethod.isEmpty()) return;
|
||||
var method2 = optMethod.get();
|
||||
System.out.println("In: " + method.getName() + " Method: " + method2.getName());
|
||||
Target.logger.info("In: " + method.getName() + " Method: " + method2.getName());
|
||||
var generics = family(owner, method2);
|
||||
|
||||
// transitive and
|
||||
@@ -365,7 +367,7 @@ public abstract class GenerateGenerics {
|
||||
if (!T1s.contains(R1) || !T2s.contains(R2)) continue;
|
||||
|
||||
var newPair = new PairLT(R1, R2);
|
||||
System.out.println("New pair: " + newPair);
|
||||
Target.logger.info("New pair: " + newPair);
|
||||
newPairs.add(newPair);
|
||||
|
||||
if (!containsRelation(result, newPair))
|
||||
@@ -567,7 +569,7 @@ public abstract class GenerateGenerics {
|
||||
Set<Pair> generics(ClassOrInterface owner, Method method) {
|
||||
if (computedGenericsOfMethods.containsKey(method)) {
|
||||
var cached = computedGenericsOfMethods.get(method);
|
||||
System.out.println("Cached " + method.getName() + ": " + cached);
|
||||
Target.logger.info("Cached " + method.getName() + ": " + cached);
|
||||
return cached;
|
||||
}
|
||||
|
||||
@@ -596,7 +598,7 @@ public abstract class GenerateGenerics {
|
||||
|
||||
normalize(result, classGenerics, usedTphs);
|
||||
|
||||
System.out.println(this.getClass().getSimpleName() + " " + method.name + ": " + result);
|
||||
Target.logger.info(this.getClass().getSimpleName() + " " + method.name + ": " + result);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -675,7 +677,7 @@ public abstract class GenerateGenerics {
|
||||
|
||||
normalize(javaResult, null, referencedByClass);
|
||||
|
||||
System.out.println(this.getClass().getSimpleName() + " Class " + classOrInterface.getClassName().getClassName() + ": " + javaResult);
|
||||
Target.logger.info(this.getClass().getSimpleName() + " Class " + classOrInterface.getClassName().getClassName() + ": " + javaResult);
|
||||
return javaResult;
|
||||
}
|
||||
|
||||
@@ -726,7 +728,7 @@ public abstract class GenerateGenerics {
|
||||
if (!added) break;
|
||||
}
|
||||
|
||||
System.out.println(chain + " " + chain.stream().map(e -> e.resolve().getVariance()).toList());
|
||||
Target.logger.info(chain + " " + chain.stream().map(e -> e.resolve().getVariance()).toList());
|
||||
var variance = chain.get(0).resolve().getVariance();
|
||||
if (variance != 1) continue;
|
||||
var index = 0;
|
||||
@@ -764,7 +766,7 @@ public abstract class GenerateGenerics {
|
||||
}
|
||||
|
||||
for (var pair : elementsToAddToEquality) {
|
||||
System.out.println(pair);
|
||||
Target.logger.info(pair);
|
||||
addToEquality(pair.left, pair.right, referenced);
|
||||
}
|
||||
}
|
||||
@@ -917,11 +919,11 @@ public abstract class GenerateGenerics {
|
||||
}
|
||||
}
|
||||
if (infima.size() > 1) {
|
||||
System.out.println(infima);
|
||||
Target.logger.info(infima);
|
||||
for (var pair : infima) {
|
||||
var returnTypes = findTypeVariables(method.getReturnType());
|
||||
var chain = findConnectionToReturnType(returnTypes, input, new HashSet<>(), pair.left);
|
||||
System.out.println("Find: " + pair.left + " " + chain);
|
||||
Target.logger.info("Find: " + pair.left + " " + chain);
|
||||
chain.remove(pair.left);
|
||||
if (chain.size() > 0) {
|
||||
for (var tph : chain)
|
||||
@@ -959,8 +961,8 @@ public abstract class GenerateGenerics {
|
||||
}
|
||||
}
|
||||
newTph.setVariance(variance);
|
||||
System.out.println(infima + " " + infima.stream().map(i -> i.right.resolve().getVariance()).toList());
|
||||
System.out.println("Infima new TPH " + newTph + " variance " + variance);
|
||||
Target.logger.info(infima + " " + infima.stream().map(i -> i.right.resolve().getVariance()).toList());
|
||||
Target.logger.info("Infima new TPH " + newTph + " variance " + variance);
|
||||
|
||||
//referenced.add(newTph);
|
||||
addToPairs(input, new PairLT(left, new TPH(newTph)));
|
||||
|
||||
@@ -8,6 +8,7 @@ import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
import de.dhbwstuttgart.syntaxtree.*;
|
||||
import de.dhbwstuttgart.syntaxtree.statement.*;
|
||||
import de.dhbwstuttgart.syntaxtree.type.*;
|
||||
import de.dhbwstuttgart.target.Target;
|
||||
import de.dhbwstuttgart.target.tree.MethodParameter;
|
||||
import de.dhbwstuttgart.target.tree.TargetMethod;
|
||||
import de.dhbwstuttgart.target.tree.expression.*;
|
||||
@@ -120,7 +121,7 @@ public class StatementToTargetExpression implements ASTVisitor {
|
||||
|
||||
@Override
|
||||
public void visit(BoolExpression bool) {
|
||||
System.out.println("BoolExpression");
|
||||
Target.logger.info("BoolExpression");
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -234,7 +235,7 @@ public class StatementToTargetExpression implements ASTVisitor {
|
||||
isInterface = receiverClass.isInterface();
|
||||
}
|
||||
|
||||
System.out.println(argList);
|
||||
Target.logger.info(argList);
|
||||
result = new TargetMethodCall(converter.convert(methodCall.getType()), returnType, argList, converter.convert(methodCall.receiver), methodCall.getArgumentList().getArguments().stream().map(converter::convert).toList(), receiverType, methodCall.name, isStatic, isInterface, isPrivate);
|
||||
}
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import de.dhbwstuttgart.server.packet.dataContainers.KeyStorage;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashSet;
|
||||
@@ -110,6 +111,8 @@ class Resolver implements ResultSetVisitor {
|
||||
private final Set<GenericInsertPair> additionalTPHs = new HashSet<>();
|
||||
private ResultPair<?, ?> currentPair;
|
||||
|
||||
public static Logger logger = new Logger("Resolver");
|
||||
|
||||
public Resolver(ResultSet resultPairs) {
|
||||
this.result = resultPairs;
|
||||
}
|
||||
@@ -117,7 +120,7 @@ class Resolver implements ResultSetVisitor {
|
||||
public ResolvedType resolve(TypePlaceholder tph) {
|
||||
toResolve = tph;
|
||||
resolved = null;
|
||||
System.out.println(tph.toString());
|
||||
logger.info(tph.toString());
|
||||
for (ResultPair<?, ?> resultPair : result.results) {
|
||||
if (resultPair instanceof PairTPHEqualTPH && ((PairTPHEqualTPH) resultPair).getLeft().equals(toResolve)) {
|
||||
currentPair = resultPair;
|
||||
|
||||
@@ -68,7 +68,7 @@ public class TYPE {
|
||||
|
||||
for(SourceFile sourceFile : sfs){
|
||||
for(JavaClassName importName : sourceFile.imports){
|
||||
System.out.println(importName);
|
||||
context.logger().info(importName);
|
||||
try {
|
||||
classes.add(ASTFactory.createClass(classLoader.loadClass(importName.toString())));
|
||||
} catch (ClassNotFoundException e) {
|
||||
|
||||
@@ -694,8 +694,8 @@ public class TYPEStmt implements StatementVisitor {
|
||||
|
||||
Set<Pair> methodSignatureConstraint = generatemethodSignatureConstraint(forMethod, assumption, info, resolver);
|
||||
|
||||
//System.out.println("methodSignatureConstraint: " + methodSignatureConstraint);
|
||||
//System.out.println("methodConstraint: " + methodConstraint);
|
||||
//context.logger().info("methodSignatureConstraint: " + methodSignatureConstraint);
|
||||
//context.logger().info("methodConstraint: " + methodConstraint);
|
||||
|
||||
methodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
|
||||
extendsMethodConstraint.setmethodSignatureConstraint(methodSignatureConstraint);
|
||||
@@ -842,7 +842,7 @@ public class TYPEStmt implements StatementVisitor {
|
||||
for (var child : switchStmt.getBlocks()) {
|
||||
for (var label : child.getLabels()) {
|
||||
if (label.getPattern() == null) {
|
||||
//System.out.println("DefaultCase");
|
||||
//context.logger().info("DefaultCase");
|
||||
} else {
|
||||
constraintsSet.addUndConstraint(
|
||||
new Pair(
|
||||
@@ -911,7 +911,7 @@ public class TYPEStmt implements StatementVisitor {
|
||||
|
||||
for (var subPattern : pattern.getSubPattern()) {
|
||||
for (Constructor con : constructors) {
|
||||
//System.out.println("----------------------\n" + subPattern.getType() + " | " + con.getParameterList().getParameterAt(counter).getType() + "\n----------------------\n");
|
||||
//context.logger().info("----------------------\n" + subPattern.getType() + " | " + con.getParameterList().getParameterAt(counter).getType() + "\n----------------------\n");
|
||||
constraintsSet.addUndConstraint(new Pair(subPattern.getType(), con.getParameterList().getParameterAt(counter).getType(), PairOperator.SMALLERDOT, loc(con.getParameterList().getParameterAt(counter).getOffset())));
|
||||
}
|
||||
if (subPattern instanceof RecordPattern) recursivelyAddRecordConstraints((RecordPattern) subPattern);
|
||||
|
||||
@@ -37,9 +37,6 @@ public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
|
||||
totalElements += list.get(i).size();
|
||||
}
|
||||
|
||||
System.out.println("ConcurrentSetMerge? -> " + (size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD ? "true" : "false"));
|
||||
|
||||
|
||||
// size will always be at least one
|
||||
if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) {
|
||||
Set<T> result = this.list.get(start);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
@@ -12,24 +13,16 @@ import java.util.Stack;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.exceptions.DebugException;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
|
||||
import de.dhbwstuttgart.syntaxtree.type.RefType;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.*;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
|
||||
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.io.OutputStreamWriter;
|
||||
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
|
||||
/**
|
||||
* Implementation of the type inference rules.
|
||||
* @author Florian Steurer
|
||||
@@ -37,17 +30,17 @@ import org.apache.commons.io.output.NullOutputStream;
|
||||
*/
|
||||
public class RuleSet implements IRuleSet{
|
||||
|
||||
Writer logFile;
|
||||
Logger logger;
|
||||
final PlaceholderRegistry placeholderRegistry;
|
||||
|
||||
public RuleSet(PlaceholderRegistry placeholderRegistry) {
|
||||
super();
|
||||
logFile = OutputStreamWriter.nullWriter();
|
||||
logger = Logger.NULL_LOGGER;
|
||||
this.placeholderRegistry = placeholderRegistry;
|
||||
}
|
||||
|
||||
RuleSet(Writer logFile, PlaceholderRegistry placeholderRegistry) {
|
||||
this.logFile = logFile;
|
||||
RuleSet(Logger logger, PlaceholderRegistry placeholderRegistry) {
|
||||
this.logger = logger;
|
||||
this.placeholderRegistry = placeholderRegistry;
|
||||
}
|
||||
|
||||
@@ -300,8 +293,8 @@ public class RuleSet implements IRuleSet{
|
||||
|
||||
if(dFromFc == null || !dFromFc.getTypeParams().arePlaceholders() || dFromFc.getTypeParams().size() != cFromFc.getTypeParams().size())
|
||||
return Optional.empty();
|
||||
//System.out.println("cFromFc: " + cFromFc);
|
||||
//System.out.println("dFromFc: " + dFromFc);
|
||||
//context.logger().info("cFromFc: " + cFromFc);
|
||||
//context.logger().info("dFromFc: " + dFromFc);
|
||||
int[] pi = pi(cFromFc.getTypeParams(), dFromFc.getTypeParams());
|
||||
|
||||
if(pi.length == 0)
|
||||
@@ -510,17 +503,17 @@ public class RuleSet implements IRuleSet{
|
||||
TypeParams typeDParams = typeD.getTypeParams();
|
||||
TypeParams typeDgenParams = typeDgen.getTypeParams();
|
||||
|
||||
//System.out.println("Pair: " +pair);
|
||||
//System.out.println("typeD: " +typeD);
|
||||
//System.out.println("typeDParams: " +typeDParams);
|
||||
//System.out.println("typeDgen: " +typeD);
|
||||
//System.out.println("typeDgenParams: " +typeDgenParams);
|
||||
//context.logger().info("Pair: " +pair);
|
||||
//context.logger().info("typeD: " +typeD);
|
||||
//context.logger().info("typeDParams: " +typeDParams);
|
||||
//context.logger().info("typeDgen: " +typeD);
|
||||
//context.logger().info("typeDgenParams: " +typeDgenParams);
|
||||
Unifier unif = Unifier.identity();
|
||||
for(int i = 0; i < typeDParams.size(); i++) {
|
||||
//System.out.println("ADAPT" +typeDgenParams);
|
||||
//context.logger().info("ADAPT" +typeDgenParams);
|
||||
if (typeDgenParams.get(i) instanceof PlaceholderType)
|
||||
unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
|
||||
else System.out.println("ERROR");
|
||||
unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
|
||||
else logger.exception(new Exception("ERROR in adapt rule: cannot add non placeholder type"));
|
||||
}
|
||||
return Optional.of(new UnifyPair(unif.apply(newLhs), typeDs, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
|
||||
}
|
||||
@@ -864,14 +857,11 @@ public class RuleSet implements IRuleSet{
|
||||
UnifyType r = x.getRhsType();
|
||||
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
|
||||
} );
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNred: " + result + "\n");
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("logFile-Error");
|
||||
}
|
||||
|
||||
logger.debug("FUNgreater: " + pair);
|
||||
logger.debug("FUNred: " + result);
|
||||
|
||||
|
||||
return Optional.of(result);
|
||||
}
|
||||
|
||||
@@ -960,14 +950,10 @@ public class RuleSet implements IRuleSet{
|
||||
UnifyType r = x.getRhsType();
|
||||
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
|
||||
} );
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNgreater: " + result + "\n");
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("lofFile-Error");
|
||||
}
|
||||
|
||||
logger.debug("FUNgreater: " + pair);
|
||||
logger.debug("FUNgreater: " + result);
|
||||
|
||||
return Optional.of(result);
|
||||
}
|
||||
|
||||
@@ -1010,14 +996,11 @@ public class RuleSet implements IRuleSet{
|
||||
UnifyType r = x.getRhsType();
|
||||
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
|
||||
} );
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNsmaller: " + result + "\n");
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("lofFile-Error");
|
||||
}
|
||||
|
||||
|
||||
logger.debug("FUNgreater: " + pair);
|
||||
logger.debug("FUNsmaller: " + result);
|
||||
|
||||
return Optional.of(result);
|
||||
}
|
||||
|
||||
|
||||
@@ -29,17 +29,12 @@ public class TypeUnify {
|
||||
* unify parallel ohne result modell
|
||||
*/
|
||||
public static Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||
ForkJoinPool pool = TypeUnify.createThreadPool();
|
||||
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
|
||||
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
|
||||
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
|
||||
try {
|
||||
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
|
||||
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -47,7 +42,7 @@ public class TypeUnify {
|
||||
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
|
||||
*/
|
||||
public static UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||
ForkJoinPool pool = TypeUnify.createThreadPool();
|
||||
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
|
||||
UnifyContext context = unifyContext.newWithExecutor(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
|
||||
unifyTask.compute();
|
||||
@@ -58,18 +53,12 @@ public class TypeUnify {
|
||||
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
|
||||
*/
|
||||
public static Set<Set<UnifyPair>> unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||
ForkJoinPool pool = TypeUnify.createThreadPool();
|
||||
ForkJoinPool pool = TypeUnify.createThreadPool(unifyContext.logger());
|
||||
UnifyContext context = unifyContext.newWithParallel(true).newWithExecutor(pool);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, context, 0);
|
||||
var result = joinFuture(unifyTask.compute());
|
||||
|
||||
try {
|
||||
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements + "\n");
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -87,18 +76,12 @@ public class TypeUnify {
|
||||
public static Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext.newWithParallel(false), 0);
|
||||
Set<Set<UnifyPair>> res = joinFuture(unifyTask.compute());
|
||||
try {
|
||||
unifyContext.logFile().write("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
unifyContext.logger().debug("\nnoShortendElements: " + TypeUnifyTask.noShortendElements +"\n");
|
||||
return res;
|
||||
}
|
||||
|
||||
private static ForkJoinPool createThreadPool() {
|
||||
Logger.print("Available processors: " + Runtime.getRuntime().availableProcessors());
|
||||
private static ForkJoinPool createThreadPool(Logger logger) {
|
||||
logger.info("Available processors: " + Runtime.getRuntime().availableProcessors());
|
||||
return new ForkJoinPool(
|
||||
Runtime.getRuntime().availableProcessors(),
|
||||
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
|
||||
|
||||
@@ -34,7 +34,7 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
||||
@Override
|
||||
public CompletableFuture<Set<Set<UnifyPair>>> compute() {
|
||||
if (one) {
|
||||
System.out.println("two");
|
||||
context.logger().info("two");
|
||||
}
|
||||
one = true;
|
||||
CompletableFuture<Set<Set<UnifyPair>>> res =
|
||||
@@ -53,12 +53,6 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
||||
}
|
||||
|
||||
public void closeLogFile() {
|
||||
|
||||
try {
|
||||
context.logFile().close();
|
||||
} catch (IOException ioE) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
|
||||
context.logger().close();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,9 +45,7 @@ import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.RecursiveTask;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.BinaryOperator;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -66,7 +64,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
private static int i = 0;
|
||||
private final boolean printtag = false;
|
||||
|
||||
final UnifyContext context;
|
||||
public final UnifyContext context;
|
||||
|
||||
/**
|
||||
* Element, das aus dem nextSet den Gleichunen dieses Threads hinzugefuegt wurde
|
||||
@@ -122,7 +120,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
Boolean myIsCanceled = false;
|
||||
|
||||
public TypeUnifyTask(UnifyContext context) {
|
||||
this.context = context.newWithLogFile(new OutputStreamWriter(NullOutputStream.INSTANCE));
|
||||
this.context = context.newWithLogger(Logger.NULL_LOGGER);
|
||||
rules = new RuleSet(context.placeholderRegistry());
|
||||
}
|
||||
|
||||
@@ -157,24 +155,19 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
this.fc = fc;
|
||||
this.oup = new OrderingUnifyPair(fc, context);
|
||||
|
||||
Writer logFileWriter = OutputStreamWriter.nullWriter();
|
||||
if (context.log()) {
|
||||
try {
|
||||
logFileWriter = new FileWriter(System.getProperty("user.dir") + "/logFiles/" + "Thread");
|
||||
logFileWriter.write("");
|
||||
} catch (IOException e) {
|
||||
System.err.println("log-File nicht vorhanden");
|
||||
}
|
||||
}
|
||||
this.context = context.newWithLogFile(logFileWriter);
|
||||
|
||||
this.context = context.newWithLogger(
|
||||
Logger.forFile(
|
||||
System.getProperty("user.dir") + "/logFiles/" + "Thread",
|
||||
"Unify"
|
||||
)
|
||||
);
|
||||
|
||||
/*Abbruchtest
|
||||
if (thNo > 10) {
|
||||
System.out.println("cancel");
|
||||
context.logger().info("cancel");
|
||||
usedTasks.cancel();
|
||||
writeLog(nOfUnify.toString() + "cancel");
|
||||
System.out.println("cancel");
|
||||
context.logger().info("cancel");
|
||||
try {
|
||||
logFile.write("Abbruch");
|
||||
}
|
||||
@@ -183,7 +176,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
}
|
||||
}
|
||||
*/
|
||||
rules = new RuleSet(context.logFile(), context.placeholderRegistry());
|
||||
rules = new RuleSet(context.logger(), context.placeholderRegistry());
|
||||
this.rekTiefeField = rekTiefe;
|
||||
context.usedTasks().add(this);
|
||||
}
|
||||
@@ -230,7 +223,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
|
||||
public CompletableFuture<Set<Set<UnifyPair>>> compute() {
|
||||
if (one) {
|
||||
System.out.println("two");
|
||||
context.logger().info("two");
|
||||
}
|
||||
one = true;
|
||||
Set<UnifyPair> neweq = new HashSet<>(eq);
|
||||
@@ -247,11 +240,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
var unifyFuture = unify(neweq, remainingOderconstraints, fc, context.parallel(), rekTiefeField, methodSignatureConstraint);
|
||||
return unifyFuture.thenApply(res -> {
|
||||
try {
|
||||
context.logFile().close();
|
||||
} catch (IOException ioE) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
context.logger().close();
|
||||
if (isUndefinedPairSetSet(res)) {
|
||||
//fuer debug-Zwecke
|
||||
ArrayList<ArrayList<UnifyPair>> al = res.stream()
|
||||
@@ -292,7 +281,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
// ).collect(Collectors.toCollection(HashSet::new));
|
||||
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
|
||||
//if (aas.isEmpty()) {
|
||||
// System.out.println("");
|
||||
// context.logger().info("");
|
||||
//}
|
||||
|
||||
//.collect(Collectors.toCollection(HashSet::new)));
|
||||
@@ -303,8 +292,8 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
|
||||
rekTiefe++;
|
||||
nOfUnify++;
|
||||
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
|
||||
writeLog(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString());
|
||||
context.logger().debug(nOfUnify.toString() + " Unifikation: " + eq.toString());
|
||||
context.logger().debug(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString());
|
||||
|
||||
/*
|
||||
* Variancen auf alle Gleichungen vererben
|
||||
@@ -334,7 +323,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
})
|
||||
.peek(UnifyPair::setUndefinedPair)
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
writeLog("ocurrPairs: " + ocurrPairs);
|
||||
context.logger().debug("ocurrPairs: " + ocurrPairs);
|
||||
if (!ocurrPairs.isEmpty()) {
|
||||
Set<Set<UnifyPair>> ret = new HashSet<>();
|
||||
ret.add(ocurrPairs);
|
||||
@@ -363,7 +352,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
})
|
||||
.peek(UnifyPair::setUndefinedPair)
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
writeLog("ocurrPairs: " + ocurrPairs);
|
||||
context.logger().debug("ocurrPairs: " + ocurrPairs);
|
||||
if (!ocurrPairs.isEmpty()) {
|
||||
Set<Set<UnifyPair>> ret = new HashSet<>();
|
||||
ret.add(ocurrPairs);
|
||||
@@ -375,8 +364,8 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
|
||||
eq0.forEach(UnifyPair::disableCondWildcards);
|
||||
|
||||
writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString());
|
||||
writeLog(nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
|
||||
context.logger().debug(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString());
|
||||
context.logger().debug(nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
|
||||
|
||||
/*
|
||||
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
|
||||
@@ -396,7 +385,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
// cartesian product of the sets created by pattern matching.
|
||||
List<Set<? extends Set<UnifyPair>>> topLevelSets = new ArrayList<>();
|
||||
|
||||
//System.out.println(eq2s);
|
||||
//context.logger().info(eq2s);
|
||||
|
||||
if (!eq1s.isEmpty()) { // Do not add empty sets or the cartesian product will always be empty.
|
||||
Set<Set<UnifyPair>> wrap = new HashSet<>();
|
||||
@@ -420,7 +409,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
// Sets that originate from pair pattern matching
|
||||
// Sets of the "second level"
|
||||
Set<UnifyPair> undefinedPairs = new HashSet<>();
|
||||
if (printtag) System.out.println("eq2s " + eq2s);
|
||||
if (printtag) context.logger().info("eq2s " + eq2s);
|
||||
//writeLog("BufferSet: " + bufferSet.toString()+"\n");
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints);
|
||||
Set<Set<Set<? extends Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput);
|
||||
@@ -428,21 +417,21 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
//nicht ausgewertet Faculty Beispiel im 1. Schritt
|
||||
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
|
||||
//Typen getestet werden.
|
||||
writeLog(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput.toString());
|
||||
if (printtag) System.out.println("secondLevelSets:" + secondLevelSets);
|
||||
context.logger().debug(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput.toString());
|
||||
if (printtag) context.logger().info("secondLevelSets:" + secondLevelSets);
|
||||
// If pairs occured that did not match one of the cartesian product cases,
|
||||
// those pairs are contradictory and the unification is impossible.
|
||||
if (!undefinedPairs.isEmpty()) {
|
||||
noUndefPair++;
|
||||
for (UnifyPair up : undefinedPairs) {
|
||||
writeLog(noUndefPair.toString() + " UndefinedPairs; " + up);
|
||||
writeLog("BasePair; " + up.getBasePair());
|
||||
context.logger().debug(noUndefPair.toString() + " UndefinedPairs; " + up);
|
||||
context.logger().debug("BasePair; " + up.getBasePair());
|
||||
}
|
||||
Set<Set<UnifyPair>> error = new HashSet<>();
|
||||
undefinedPairs = undefinedPairs.stream().peek(UnifyPair::setUndefinedPair)
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
error.add(undefinedPairs);
|
||||
undefinedPairs.forEach(x -> writeLog("AllSubst: " + x.getAllSubstitutions().toString()));
|
||||
undefinedPairs.forEach(x -> context.logger().debug("AllSubst: " + x.getAllSubstitutions().toString()));
|
||||
return CompletableFuture.completedFuture(error);
|
||||
}
|
||||
|
||||
@@ -452,16 +441,16 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
// Alternative: Sub cartesian products of the second level (pattern matched) sets
|
||||
// "the big (x)"
|
||||
/* for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
|
||||
//System.out.println("secondLevelSet "+secondLevelSet.size());
|
||||
//context.logger().info("secondLevelSet "+secondLevelSet.size());
|
||||
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
|
||||
Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList);
|
||||
//System.out.println("CardResult: "+cartResult.size());
|
||||
//context.logger().info("CardResult: "+cartResult.size());
|
||||
// Flatten and add to top level sets
|
||||
Set<Set<UnifyPair>> flat = new HashSet<>();
|
||||
int j = 0;
|
||||
for(List<Set<UnifyPair>> s : cartResult) {
|
||||
j++;
|
||||
//System.out.println("s from CardResult: "+cartResult.size() + " " + j);
|
||||
//context.logger().info("s from CardResult: "+cartResult.size() + " " + j);
|
||||
Set<UnifyPair> flat1 = new HashSet<>();
|
||||
for(Set<UnifyPair> s1 : s)
|
||||
flat1.addAll(s1);
|
||||
@@ -475,8 +464,8 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
for (Set<Set<? extends Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
|
||||
topLevelSets.addAll(secondLevelSet);
|
||||
}
|
||||
//System.out.println(topLevelSets);
|
||||
//System.out.println();
|
||||
//context.logger().info(topLevelSets);
|
||||
//context.logger().info();
|
||||
|
||||
|
||||
//Aufruf von computeCartesianRecursive ANFANG
|
||||
@@ -517,18 +506,18 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
* Step 5: Substitution
|
||||
*/
|
||||
//writeLog("vor Subst: " + eqPrime);
|
||||
writeLog("vor Subst: " + oderConstraints);
|
||||
context.logger().debug("vor Subst: " + oderConstraints);
|
||||
String ocString = oderConstraints.toString();
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime, newOderConstraints);
|
||||
Set<Set<UnifyPair>> unifyres1 = null;
|
||||
Set<Set<UnifyPair>> unifyres2 = null;
|
||||
if (!ocString.equals(newOderConstraints.toString()))
|
||||
writeLog("nach Subst: " + newOderConstraints);
|
||||
context.logger().debug("nach Subst: " + newOderConstraints);
|
||||
|
||||
|
||||
{// sequentiell (Step 6b is included)
|
||||
if (printtag) System.out.println("nextStep: " + eqPrimePrime);
|
||||
if (printtag) context.logger().info("nextStep: " + eqPrimePrime);
|
||||
if (eqPrime.equals(eq) && eqPrimePrime.isEmpty()
|
||||
&& oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
|
||||
//PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
|
||||
@@ -547,12 +536,12 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
return eqPrimePrimeSet;
|
||||
});
|
||||
if (finalresult && isSolvedForm(eqPrime)) {
|
||||
writeLog("eqPrime:" + eqPrime.toString() + "\n");
|
||||
context.logger().debug("eqPrime:" + eqPrime.toString() + "\n");
|
||||
|
||||
/* methodconstraintsets werden zum Ergebnis hinzugefuegt
|
||||
* Anfang
|
||||
*/
|
||||
//System.out.println("methodSignatureConstraint Return: " + methodSignatureConstraint + "\n");
|
||||
//context.logger().info("methodSignatureConstraint Return: " + methodSignatureConstraint + "\n");
|
||||
eqPrimePrimeSetFuture = eqPrimePrimeSetFuture.thenApply(eqPrimePrimeSet -> {
|
||||
eqPrimePrimeSet.forEach(x -> x.addAll(methodSignatureConstraint));
|
||||
|
||||
@@ -603,7 +592,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
return eqPrimePrimeSetFuture.thenApply(eqPrimePrimeSet -> {
|
||||
eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new));
|
||||
if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) {
|
||||
writeLog("Result1 " + eqPrimePrimeSet);
|
||||
context.logger().debug("Result1 " + eqPrimePrimeSet);
|
||||
}
|
||||
return eqPrimePrimeSet;
|
||||
});
|
||||
@@ -675,7 +664,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
} else {
|
||||
//Varianz-Bestimmung Oder-Constraints
|
||||
if (printtag) {
|
||||
System.out.println("nextSetasList " + nextSetAsList);
|
||||
context.logger().info("nextSetasList " + nextSetAsList);
|
||||
}
|
||||
variance = TypeUnifyTaskHelper.calculateOderConstraintVariance(nextSetAsList);
|
||||
}
|
||||
@@ -694,7 +683,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
Optional<UnifyPair> optOrigPair = Optional.empty();
|
||||
if (!oderConstraint) {
|
||||
optOrigPair = TypeUnifyTaskHelper.findEqualityConstrainedUnifyPair(nextSetElement);
|
||||
writeLog("optOrigPair: " + optOrigPair);
|
||||
context.logger().debug("optOrigPair: " + optOrigPair);
|
||||
|
||||
if (optOrigPair.isPresent()) {
|
||||
sameEqSet = TypeUnifyTaskHelper.findConstraintsWithSameTVAssociation(optOrigPair.get(), singleElementSets);
|
||||
@@ -708,7 +697,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
|
||||
return resultFuture.thenApply(result -> {
|
||||
//2020-02-02: if (variance ==2) Hier Aufruf von filterOverriding einfuegen
|
||||
writeLog("Return computeCR: " + result.toString());
|
||||
context.logger().debug("Return computeCR: " + result.toString());
|
||||
return result;
|
||||
});
|
||||
}
|
||||
@@ -735,23 +724,23 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
|
||||
VarianceCase varianceCase = VarianceCase.createFromVariance(variance, oderConstraint, this, context);
|
||||
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
writeLog("nextSetasList: " + prevNextSetAsList.toString());
|
||||
context.logger().debug("nextSet: " + nextSet.toString());
|
||||
context.logger().debug("nextSetasList: " + prevNextSetAsList.toString());
|
||||
|
||||
varianceCase.selectNextData(this, prevNextSetAsList, optOrigPair);
|
||||
|
||||
if (oderConstraint) {//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen
|
||||
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) varianceCase.a).getmethodSignatureConstraint());
|
||||
writeLog("ERSTELLUNG methodSignatureConstraint: " + methodSignatureConstraint);
|
||||
//System.out.println("ERSTELLUNG methodSignatureConstraint: " +noOfThread+" "+methodSignatureConstraint);
|
||||
//System.out.println("a: " +a);
|
||||
//System.out.println("eq: " +eq);
|
||||
//System.out.println();
|
||||
context.logger().debug("ERSTELLUNG methodSignatureConstraint: " + methodSignatureConstraint);
|
||||
//context.logger().info("ERSTELLUNG methodSignatureConstraint: " +noOfThread+" "+methodSignatureConstraint);
|
||||
//context.logger().info("a: " +a);
|
||||
//context.logger().info("eq: " +eq);
|
||||
//context.logger().info();
|
||||
}
|
||||
|
||||
i++;
|
||||
Set<Set<UnifyPair>> elems = new HashSet<>(singleElementSets);
|
||||
writeLog("a1: " + rekTiefe + " " + "variance: " + variance + " " + varianceCase.a.toString() + "\n");
|
||||
context.logger().debug("a1: " + rekTiefe + " " + "variance: " + variance + " " + varianceCase.a.toString() + "\n");
|
||||
|
||||
Set<Set<UnifyPair>> aParDef = new HashSet<>();
|
||||
|
||||
@@ -798,7 +787,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
|
||||
if (oderConstraint) {//Wenn weiteres Element nextSetasList genommen wird, muss die vorherige methodsignatur geloescht werden
|
||||
methodSignatureConstraint.removeAll(((Constraint<UnifyPair>) varianceCase.a).getmethodSignatureConstraint());
|
||||
//System.out.println("REMOVE: " +methodSignatureConstraint);
|
||||
//context.logger().info("REMOVE: " +methodSignatureConstraint);
|
||||
}
|
||||
if (!isUndefinedPairSetSet(currentThreadResult) && isUndefinedPairSetSet(result)) {
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
@@ -816,16 +805,16 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
|
||||
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a
|
||||
//PL 2018-12-28: Hier gab es eine ClassCastException, war nicht reproduzierbar
|
||||
// System.out.println("");
|
||||
// context.logger().info("");
|
||||
List<PlaceholderType> vars_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(varianceCase.a);
|
||||
Set<UnifyPair> fstElemRes = currentThreadResult.iterator().next();
|
||||
Set<UnifyPair> compRes = fstElemRes.stream().filter(x -> vars_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
|
||||
//System.out.println(a_last);
|
||||
//context.logger().info(a_last);
|
||||
|
||||
try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen
|
||||
a_last.forEach(x -> writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair()));//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
|
||||
a_last.forEach(x -> context.logger().debug("a_last_elem:" + x + " basepair: " + x.getBasePair()));//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
|
||||
List<PlaceholderType> varsLast_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(a_last);
|
||||
//[(java.util.Vector<java.lang.Integer> <. gen_aq, , 1), (CEK =. ? extends gen_aq, 1)] KANN VORKOMMEN
|
||||
//erstes Element genügt, da vars immer auf die gleichen Elemente zugeordnet werden muessen
|
||||
@@ -834,11 +823,11 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
|
||||
varianceCase.applyComputedResults(result, currentThreadResult, compResult, compRes);
|
||||
} catch (NullPointerException e) {
|
||||
writeLog("NullPointerException: " + a_last.toString());
|
||||
context.logger().debug("NullPointerException: " + a_last.toString());
|
||||
}
|
||||
} else {
|
||||
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||
writeLog("RES Fst: result: " + result.toString() + " currentThreadResult: " + currentThreadResult.toString());
|
||||
context.logger().debug("RES Fst: result: " + result.toString() + " currentThreadResult: " + currentThreadResult.toString());
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
}
|
||||
@@ -861,14 +850,14 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
result = par_res;
|
||||
if (!par_res.isEmpty() && par_res.iterator().next() instanceof WildcardType) {
|
||||
// System.out.println();
|
||||
// context.logger().info();
|
||||
}
|
||||
} else {
|
||||
if ((isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result))
|
||||
|| (!isUndefinedPairSetSet(par_res) && !isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||
writeLog("RES var1 ADD:" + result.toString() + " " + par_res.toString());
|
||||
context.logger().debug("RES var1 ADD:" + result.toString() + " " + par_res.toString());
|
||||
result.addAll(par_res);
|
||||
}
|
||||
}
|
||||
@@ -889,11 +878,11 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
// Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator();
|
||||
boolean shouldBreak = varianceCase.eraseInvalidSets(rekTiefe, aParDef, nextSetAsList);
|
||||
if (shouldBreak) {
|
||||
this.cancelChildExecution();
|
||||
//this.cancelChildExecution();
|
||||
return CompletableFuture.completedFuture(result);
|
||||
}
|
||||
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + varianceCase.a.toString());
|
||||
context.logger().debug("a: " + rekTiefe + " variance: " + variance + varianceCase.a.toString());
|
||||
}
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
|
||||
|
||||
@@ -928,9 +917,9 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
*/
|
||||
|
||||
if (currentThreadResult.size() > 1) {
|
||||
// System.out.println();
|
||||
// context.logger().info();
|
||||
}
|
||||
writeLog("nextSetasList vor filter-Aufruf: " + nextSetAsList);
|
||||
context.logger().debug("nextSetasList vor filter-Aufruf: " + nextSetAsList);
|
||||
if (!oderConstraint) {//PL 2023-02-08 eingefuegt: Bei oderconstraints sind Subststitutionen nicht als Substitutionen in idesem Sinne zu sehen
|
||||
nextSetAsList = nextSetAsList.stream().filter(x -> {
|
||||
//Boolean ret = false;
|
||||
@@ -941,24 +930,24 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
})//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
}
|
||||
writeLog("nextSetasList nach filter-Aufruf: " + nextSetAsList);
|
||||
context.logger().debug("nextSetasList nach filter-Aufruf: " + nextSetAsList);
|
||||
nofstred = nextSetAsList.size();
|
||||
//NOCH NICHT korrekt PL 2018-10-12
|
||||
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
|
||||
// .collect(Collectors.toCollection(ArrayList::new));
|
||||
writeLog("currentThreadResult (undef): " + currentThreadResult.toString());
|
||||
writeLog("abhSubst: " + abhSubst.toString());
|
||||
writeLog("a2: " + rekTiefe + " " + varianceCase.a.toString());
|
||||
writeLog("Durchschnitt: " + durchschnitt.toString());
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
writeLog("nextSetasList: " + nextSetAsList.toString());
|
||||
writeLog("Number first erased Elements (undef): " + (len - nofstred));
|
||||
writeLog("Number second erased Elements (undef): " + (nofstred - nextSetAsList.size()));
|
||||
writeLog("Number erased Elements (undef): " + (len - nextSetAsList.size()));
|
||||
context.logger().debug("currentThreadResult (undef): " + currentThreadResult.toString());
|
||||
context.logger().debug("abhSubst: " + abhSubst.toString());
|
||||
context.logger().debug("a2: " + rekTiefe + " " + varianceCase.a.toString());
|
||||
context.logger().debug("Durchschnitt: " + durchschnitt.toString());
|
||||
context.logger().debug("nextSet: " + nextSet.toString());
|
||||
context.logger().debug("nextSetasList: " + nextSetAsList.toString());
|
||||
context.logger().debug("Number first erased Elements (undef): " + (len - nofstred));
|
||||
context.logger().debug("Number second erased Elements (undef): " + (nofstred - nextSetAsList.size()));
|
||||
context.logger().debug("Number erased Elements (undef): " + (len - nextSetAsList.size()));
|
||||
noAllErasedElements += (len - nextSetAsList.size());
|
||||
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
|
||||
writeLog("Number of Backtracking: " + noBacktracking++);
|
||||
// System.out.println("");
|
||||
context.logger().debug("Number of all erased Elements (undef): " + noAllErasedElements.toString());
|
||||
context.logger().debug("Number of Backtracking: " + noBacktracking++);
|
||||
// context.logger().info("");
|
||||
}
|
||||
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
|
||||
// return result;
|
||||
@@ -967,7 +956,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
// result.removeIf(y -> isUndefinedPairSet(y));
|
||||
//}
|
||||
//else result.stream().filter(y -> !isUndefinedPairSet(y));
|
||||
writeLog("currentThreadResult: " + currentThreadResult.toString());
|
||||
context.logger().debug("currentThreadResult: " + currentThreadResult.toString());
|
||||
|
||||
return this.innerCartesianLoop(variance, rekTiefe, oderConstraint, parallel, result, varianceCase.a, nextSet,
|
||||
nextSetAsList, optOrigPair, methodSignatureConstraint, singleElementSets, sameEqSet, oderConstraints);
|
||||
@@ -1004,7 +993,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
UnifyPair aPair = optAPair.get();
|
||||
//writeLog("optOrigPair: " + optOrigPair + " " + "aPair: " + aPair+ " " + "aPair.basePair(): " + aPair.getBasePair());
|
||||
|
||||
writeLog("checkA: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
context.logger().debug("checkA: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
for (UnifyPair sameEq : sameEqSet) {
|
||||
if (sameEq.getLhsType() instanceof PlaceholderType) {
|
||||
Set<UnifyPair> localEq = new HashSet<>();
|
||||
@@ -1025,7 +1014,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
if (result.isEmpty() || isUndefinedPairSetSet(result)) {
|
||||
result.addAll(localRes);
|
||||
}
|
||||
writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
context.logger().debug("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
@@ -1047,12 +1036,12 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
if (result.isEmpty() || isUndefinedPairSetSet(result)) {
|
||||
result.addAll(localRes);
|
||||
}
|
||||
writeLog("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
context.logger().debug("FALSE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
writeLog("TRUE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
context.logger().debug("TRUE: " + aPair + "sameEqSet: " + sameEqSet);
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
@@ -1138,7 +1127,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
// Through application of the rules, every pair should have one of the above forms.
|
||||
// Pairs that do not have one of the aboves form are contradictory.
|
||||
else {
|
||||
writeLog("Second erase:" + checkPair);
|
||||
context.logger().debug("Second erase:" + checkPair);
|
||||
return false;
|
||||
}
|
||||
//*/
|
||||
@@ -1337,7 +1326,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
* (as in case 1 where sigma is added to the innermost set).
|
||||
*/
|
||||
protected Set<Set<Set<? extends Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, List<Set<Constraint<UnifyPair>>> oderConstraintsInput, IFiniteClosure fc, Set<UnifyPair> undefined, List<Set<Constraint<UnifyPair>>> oderConstraintsOutput) {
|
||||
writeLog("eq2s: " + eq2s.toString());
|
||||
context.logger().debug("eq2s: " + eq2s.toString());
|
||||
oderConstraintsOutput.addAll(oderConstraintsInput);
|
||||
List<Set<Set<? extends Set<UnifyPair>>>> result = new ArrayList<>(9);
|
||||
|
||||
@@ -1424,10 +1413,10 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
}
|
||||
}
|
||||
|
||||
writeLog("eq2s: " + eq2s);
|
||||
writeLog("eq2sAsListFst: " + eq2sAsListFst);
|
||||
writeLog("eq2sAsListSnd: " + eq2sAsListSnd);
|
||||
writeLog("eq2sAsListBack: " + eq2sAsListBack);
|
||||
context.logger().debug("eq2s: " + eq2s);
|
||||
context.logger().debug("eq2sAsListFst: " + eq2sAsListFst);
|
||||
context.logger().debug("eq2sAsListSnd: " + eq2sAsListSnd);
|
||||
context.logger().debug("eq2sAsListBack: " + eq2sAsListBack);
|
||||
|
||||
eq2sAsList.addAll(eq2sAsListFst);
|
||||
eq2sAsList.addAll(eq2sAsListSnd);
|
||||
@@ -1440,7 +1429,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
if (!oderConstraintsOutput.isEmpty()) {
|
||||
Set<Constraint<UnifyPair>> ret = oderConstraintsOutput.removeFirst();
|
||||
//if (ret.iterator().next().iterator().next().getLhsType().getName().equals("M"))
|
||||
// System.out.println("M");
|
||||
// context.logger().info("M");
|
||||
//Set<UnifyPair> retFlat = new HashSet<>();
|
||||
//ret.stream().forEach(x -> retFlat.addAll(x));
|
||||
|
||||
@@ -1480,7 +1469,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
|
||||
// Case 1: (a <. Theta')
|
||||
if (((pairOp == PairOperator.SMALLERDOT) || (pairOp == PairOperator.SMALLERNEQDOT)) && lhsType instanceof PlaceholderType) {
|
||||
//System.out.println(pair);
|
||||
//context.logger().info(pair);
|
||||
if (first) { //writeLog(pair.toString()+"\n");
|
||||
Set<Set<UnifyPair>> x1 = new HashSet<>();
|
||||
if (pair.getRhsType().getName().equals("void")) {
|
||||
@@ -1502,7 +1491,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
x1.remove(remElem);
|
||||
}
|
||||
/* ZU LOESCHEN ANFANG
|
||||
//System.out.println(x1);
|
||||
//context.logger().info(x1);
|
||||
Set<UnifyPair> sameEqSet = eq2sAsList.stream()
|
||||
.filter(x -> ((x.getLhsType().equals(lhsType) || x.getRhsType().equals(lhsType)) && !x.equals(pair)))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
@@ -1720,11 +1709,11 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
// && ((ReferenceType)thetaPrime).getTypeParams().iterator().next() instanceof PlaceholderType) //.getName().equals("java.util.Vector"))
|
||||
// && ((ReferenceType)((ReferenceType)thetaPrime).getTypeParams().iterator().next()).getTypeParams().iterator().next().getName().equals("java.lang.Integer")) {
|
||||
// {
|
||||
// System.out.println("");
|
||||
// context.logger().info("");
|
||||
//}
|
||||
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());//cs= [java.util.Vector<NP>, java.util.Vector<java.util.Vector<java.lang.Integer>>, ????java.util.Vector<gen_hv>???]
|
||||
|
||||
writeLog("cs: " + cs.toString());
|
||||
context.logger().debug("cs: " + cs.toString());
|
||||
//PL 18-02-06 entfernt, kommt durch unify wieder rein
|
||||
//cs.add(thetaPrime);
|
||||
//PL 18-02-06 entfernt
|
||||
@@ -1756,7 +1745,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
if ((match.match(ml)).isEmpty()) {
|
||||
thetaQs.remove(c);
|
||||
}
|
||||
writeLog("thetaQs von " + c + ": " + thetaQs.toString());
|
||||
context.logger().debug("thetaQs von " + c + ": " + thetaQs.toString());
|
||||
//Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
|
||||
//thetaQs.add(thetaPrime); //PL 18-02-05 wieder geloescht
|
||||
//PL 2017-10-03: War auskommentiert habe ich wieder einkommentiert,
|
||||
@@ -1780,7 +1769,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
for (TypeParams tp : permuteParams(candidateParams))
|
||||
thetaQPrimes.add(c.setTypeParams(tp));
|
||||
}
|
||||
writeLog("thetaQPrimes von " + c + ": " + thetaQPrimes.toString());
|
||||
context.logger().debug("thetaQPrimes von " + c + ": " + thetaQPrimes.toString());
|
||||
for (UnifyType tqp : thetaQPrimes) {//PL 2020-03-08 umbauen in der Schleife wird nur unifizierbarer Typ gesucht break am Ende
|
||||
Collection<PlaceholderType> tphs = tqp.getInvolvedPlaceholderTypes();
|
||||
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
|
||||
@@ -1853,7 +1842,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
}
|
||||
}
|
||||
}
|
||||
writeLog("result von " + pair + ": " + result);
|
||||
context.logger().debug("result von " + pair + ": " + result);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -1964,7 +1953,7 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
return ((match.match(termList).isPresent()) || x);
|
||||
};
|
||||
//if (parai.getName().equals("java.lang.Integer")) {
|
||||
// System.out.println("");
|
||||
// context.logger().info("");
|
||||
//}
|
||||
BinaryOperator<Boolean> bo = (x, y) -> (x || y);
|
||||
if (fBounded.stream().reduce(false, f, bo)) {
|
||||
@@ -2054,22 +2043,4 @@ public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<Uni
|
||||
permuteParams(candidates, idx + 1, result, current);
|
||||
}
|
||||
}
|
||||
|
||||
public void writeLog(String str) {
|
||||
if (context.log() && finalresult) {
|
||||
synchronized (context.logFile()) {
|
||||
try {
|
||||
/*
|
||||
logFile.write("Thread no.:" + thNo + "\n");
|
||||
logFile.write("noOfThread:" + noOfThread + "\n");
|
||||
logFile.write("parallel:" + parallel + "\n");
|
||||
*/
|
||||
context.logFile().write(str + "\n\n");
|
||||
// logFile.flush();
|
||||
} catch (IOException e) {
|
||||
System.err.println("kein LogFile");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.io.Writer;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public record UnifyContext(
|
||||
// main log file of a unification
|
||||
Writer logFile,
|
||||
// if logs should be made
|
||||
Boolean log,
|
||||
// main logger of a unification
|
||||
Logger logger,
|
||||
// if the unify algorithm should run in parallel
|
||||
Boolean parallel,
|
||||
// the model for storing calculated results
|
||||
@@ -23,26 +21,24 @@ public record UnifyContext(
|
||||
) {
|
||||
|
||||
public UnifyContext(
|
||||
Writer logFile,
|
||||
Boolean log,
|
||||
Logger logger,
|
||||
Boolean parallel,
|
||||
UnifyResultModel resultModel,
|
||||
UnifyTaskModel usedTasks,
|
||||
ExecutorService executor,
|
||||
PlaceholderRegistry placeholderRegistry
|
||||
) {
|
||||
this(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
this(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
}
|
||||
|
||||
public UnifyContext(
|
||||
Writer logFile,
|
||||
Boolean log,
|
||||
Logger logger,
|
||||
Boolean parallel,
|
||||
UnifyResultModel resultModel,
|
||||
UnifyTaskModel usedTasks,
|
||||
PlaceholderRegistry placeholderRegistry
|
||||
) {
|
||||
this(logFile, log, parallel, resultModel, usedTasks, ForkJoinPool.commonPool(), placeholderRegistry);
|
||||
this(logger, parallel, resultModel, usedTasks, ForkJoinPool.commonPool(), placeholderRegistry);
|
||||
}
|
||||
|
||||
|
||||
@@ -51,20 +47,20 @@ public record UnifyContext(
|
||||
* causes the UnifyContext to be essentially handled as a
|
||||
*/
|
||||
|
||||
public UnifyContext newWithLogFile(Writer logFile) {
|
||||
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
public UnifyContext newWithLogger(Logger logger) {
|
||||
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
}
|
||||
|
||||
public UnifyContext newWithParallel(boolean parallel) {
|
||||
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
}
|
||||
|
||||
public UnifyContext newWithExecutor(ExecutorService executor) {
|
||||
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
}
|
||||
|
||||
public UnifyContext newWithResultModel(UnifyResultModel resultModel) {
|
||||
return new UnifyContext(logFile, log, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
return new UnifyContext(logger, parallel, resultModel, executor, placeholderRegistry, usedTasks);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -104,7 +104,7 @@ public class Variance0Case extends VarianceCase {
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
(prevResults, currentThreadResult) -> {
|
||||
forkOrig.writeLog("final Orig 0");
|
||||
forkOrig.context.logger().debug("final Orig 0");
|
||||
forkOrig.closeLogFile();
|
||||
return new Tuple<>(currentThreadResult, prevResults.getSecond());
|
||||
});
|
||||
@@ -115,13 +115,13 @@ public class Variance0Case extends VarianceCase {
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
context.logger().debug("a in " + variance + " " + a);
|
||||
context.logger().debug("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("0 RM" + nSaL.toString());
|
||||
context.logger().debug("0 RM" + nSaL.toString());
|
||||
|
||||
if (!this.isOderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
@@ -130,7 +130,8 @@ public class Variance0Case extends VarianceCase {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||
Constraint<UnifyPair> extendConstraint = ((Constraint<UnifyPair>) nSaL).getExtendConstraint();
|
||||
nextSetasListOderConstraints.add(extendConstraint);
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
@@ -145,13 +146,13 @@ public class Variance0Case extends VarianceCase {
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
context.logger().debug("fork_res: " + fork_res.toString());
|
||||
context.logger().debug(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
prevResults.getSecond().add(fork_res);
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final 0");
|
||||
fork.context.logger().debug("final 0");
|
||||
fork.closeLogFile();
|
||||
return prevResults;
|
||||
}
|
||||
@@ -172,7 +173,7 @@ public class Variance0Case extends VarianceCase {
|
||||
Set<UnifyPair> compResult,
|
||||
Set<UnifyPair> compRes
|
||||
) {
|
||||
writeLog("RES var=0 ADD:" + result.toString() + " " + currentThreadResult.toString());
|
||||
context.logger().debug("RES var=0 ADD:" + result.toString() + " " + currentThreadResult.toString());
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
|
||||
@@ -188,8 +189,8 @@ public class Variance0Case extends VarianceCase {
|
||||
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
final List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
|
||||
context.logger().debug("Removed: " + nextSetasListOderConstraints);
|
||||
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
|
||||
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
@@ -198,14 +199,14 @@ public class Variance0Case extends VarianceCase {
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
writeLog("Removed: " + erased);
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
context.logger().debug("Removed: " + erased);
|
||||
context.logger().debug("Not Removed: " + nextSetAsList);
|
||||
|
||||
for (Set<UnifyPair> aPar : aParDef) {
|
||||
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
context.logger().debug("Removed: " + nextSetasListOderConstraints);
|
||||
smallerSetasList.clear();
|
||||
smallerSetasList.addAll(typeUnifyTask.oup.smallerThan(aPar, nextSetAsList));
|
||||
notInherited = smallerSetasList.stream()
|
||||
@@ -216,8 +217,8 @@ public class Variance0Case extends VarianceCase {
|
||||
erased = new ArrayList<>(smallerSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
writeLog("Removed: " + erased);
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
context.logger().debug("Removed: " + erased);
|
||||
context.logger().debug("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -32,12 +32,12 @@ public class Variance1Case extends VarianceCase {
|
||||
Optional<UnifyPair> optOrigPair
|
||||
) {
|
||||
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||
writeLog("Max: a in " + variance + " " + a);
|
||||
context.logger().debug("Max: a in " + variance + " " + a);
|
||||
nextSetAsList.remove(a);
|
||||
if (this.isOderConstraint) {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
}
|
||||
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
|
||||
context.logger().debug("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
|
||||
|
||||
//Alle maximale Elemente in nextSetasListRest bestimmen
|
||||
//nur für diese wird parallele Berechnung angestossen.
|
||||
@@ -77,7 +77,7 @@ public class Variance1Case extends VarianceCase {
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
(prevResults, currentThreadResult) -> {
|
||||
forkOrig.writeLog("final Orig 1");
|
||||
forkOrig.context.logger().debug("final Orig 1");
|
||||
forkOrig.closeLogFile();
|
||||
return new Tuple<>(currentThreadResult, prevResults.getSecond());
|
||||
});
|
||||
@@ -88,12 +88,12 @@ public class Variance1Case extends VarianceCase {
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
context.logger().debug("a in " + variance + " " + a);
|
||||
context.logger().debug("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("1 RM" + nSaL.toString());
|
||||
context.logger().debug("1 RM" + nSaL.toString());
|
||||
|
||||
if (!this.isOderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
@@ -117,13 +117,13 @@ public class Variance1Case extends VarianceCase {
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
context.logger().debug("fork_res: " + fork_res.toString());
|
||||
context.logger().debug(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
prevResults.getSecond().add(fork_res);
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final 1");
|
||||
fork.context.logger().debug("final 1");
|
||||
fork.closeLogFile();
|
||||
return prevResults;
|
||||
}
|
||||
@@ -146,7 +146,7 @@ public class Variance1Case extends VarianceCase {
|
||||
) {
|
||||
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
|
||||
if (resOfCompare == -1) {
|
||||
writeLog("Geloescht result: " + result);
|
||||
context.logger().debug("Geloescht result: " + result);
|
||||
result.clear();
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
@@ -154,7 +154,7 @@ public class Variance1Case extends VarianceCase {
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
else if (resOfCompare == 1) {
|
||||
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
context.logger().debug("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
//result = result;
|
||||
}
|
||||
}
|
||||
@@ -165,35 +165,35 @@ public class Variance1Case extends VarianceCase {
|
||||
Set<Set<UnifyPair>> aParDef,
|
||||
List<Set<UnifyPair>> nextSetAsList
|
||||
) {
|
||||
// System.out.println("");
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
writeLog("aParDef: " + aParDef.toString());
|
||||
// context.logger().info("");
|
||||
context.logger().debug("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
context.logger().debug("aParDef: " + aParDef.toString());
|
||||
aParDef.add(a);
|
||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||
if (this.isOderConstraint) {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
context.logger().debug("Removed: " + nextSetasListOderConstraints);
|
||||
while (aParDefIt.hasNext()) {
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList);
|
||||
writeLog("smallerSetasList: " + smallerSetasList);
|
||||
context.logger().debug("smallerSetasList: " + smallerSetasList);
|
||||
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
writeLog("notInherited: " + notInherited + "\n");
|
||||
context.logger().debug("notInherited: " + notInherited + "\n");
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
notInherited.forEach(x -> {
|
||||
notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList));
|
||||
});
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||
writeLog("notErased: " + notErased + "\n");
|
||||
context.logger().debug("notErased: " + notErased + "\n");
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
context.logger().debug("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
context.logger().debug("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
} else {
|
||||
@@ -203,9 +203,9 @@ public class Variance1Case extends VarianceCase {
|
||||
List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
context.logger().debug("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
context.logger().debug("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -51,7 +51,7 @@ public class Variance2Case extends VarianceCase {
|
||||
) {
|
||||
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValuesFuture;
|
||||
|
||||
writeLog("var2einstieg");
|
||||
context.logger().debug("var2einstieg");
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
@@ -63,7 +63,7 @@ public class Variance2Case extends VarianceCase {
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValuesFuture = forkOrigFuture.thenApply((currentThreadResult) -> {
|
||||
forkOrig.writeLog("final Orig 2");
|
||||
forkOrig.context.logger().debug("final Orig 2");
|
||||
forkOrig.closeLogFile();
|
||||
return new Tuple<>(currentThreadResult, new HashSet<>());
|
||||
});
|
||||
@@ -72,8 +72,8 @@ public class Variance2Case extends VarianceCase {
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
context.logger().debug("a in " + variance + " " + a);
|
||||
context.logger().debug("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
|
||||
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
|
||||
@@ -97,7 +97,7 @@ public class Variance2Case extends VarianceCase {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
resultValues.getSecond().add(fork_res);
|
||||
fork.writeLog("final 2");
|
||||
fork.context.logger().debug("final 2");
|
||||
fork.closeLogFile();
|
||||
return resultValues;
|
||||
});
|
||||
|
||||
@@ -104,9 +104,4 @@ public abstract class VarianceCase {
|
||||
Set<Set<UnifyPair>> aParDef,
|
||||
List<Set<UnifyPair>> nextSetAsList
|
||||
);
|
||||
|
||||
protected void writeLog(String s) {
|
||||
typeUnifyTask.writeLog(s);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -32,11 +32,11 @@ public class VarianceM1Case extends VarianceCase {
|
||||
Optional<UnifyPair> optOrigPair
|
||||
) {
|
||||
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
|
||||
writeLog("Min: a in " + variance + " " + a);
|
||||
context.logger().debug("Min: a in " + variance + " " + a);
|
||||
if (this.isOderConstraint) {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
}
|
||||
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
|
||||
context.logger().debug("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
|
||||
nextSetAsList.remove(a);
|
||||
|
||||
//Alle minimalen Elemente in nextSetasListRest bestimmen
|
||||
@@ -77,7 +77,7 @@ public class VarianceM1Case extends VarianceCase {
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
(prevResults, currentThreadResult) -> {
|
||||
forkOrig.writeLog("final Orig -1");
|
||||
forkOrig.context.logger().debug("final Orig -1");
|
||||
forkOrig.closeLogFile();
|
||||
return new Tuple<>(currentThreadResult, prevResults.getSecond());
|
||||
});
|
||||
@@ -88,13 +88,13 @@ public class VarianceM1Case extends VarianceCase {
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
context.logger().debug("a in " + variance + " " + a);
|
||||
context.logger().debug("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("-1 RM" + nSaL.toString());
|
||||
context.logger().debug("-1 RM" + nSaL.toString());
|
||||
|
||||
if (!this.isOderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
@@ -118,13 +118,13 @@ public class VarianceM1Case extends VarianceCase {
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
context.logger().debug("fork_res: " + fork_res.toString());
|
||||
context.logger().debug(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
prevResults.getSecond().add(fork_res);
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final -1");
|
||||
fork.context.logger().debug("final -1");
|
||||
fork.closeLogFile();
|
||||
return prevResults;
|
||||
}
|
||||
@@ -148,13 +148,13 @@ public class VarianceM1Case extends VarianceCase {
|
||||
) {
|
||||
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
|
||||
if (resOfCompare == 1) {
|
||||
writeLog("Geloescht result: " + result);
|
||||
context.logger().debug("Geloescht result: " + result);
|
||||
result.clear();
|
||||
result.addAll(currentThreadResult);
|
||||
} else if (resOfCompare == 0) {
|
||||
result.addAll(currentThreadResult);
|
||||
} else if (resOfCompare == -1) {
|
||||
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
context.logger().debug("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
//result = result;
|
||||
}
|
||||
}
|
||||
@@ -166,14 +166,14 @@ public class VarianceM1Case extends VarianceCase {
|
||||
List<Set<UnifyPair>> nextSetAsList
|
||||
) {
|
||||
|
||||
// System.out.println("");
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
writeLog("aParDef: " + aParDef.toString());
|
||||
// context.logger().info("");
|
||||
context.logger().debug("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
context.logger().debug("aParDef: " + aParDef.toString());
|
||||
aParDef.add(a);
|
||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||
if (this.isOderConstraint) {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
context.logger().debug("Removed: " + nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
while (aParDefIt.hasNext()) {
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
@@ -206,9 +206,9 @@ public class VarianceM1Case extends VarianceCase {
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
context.logger().debug("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
context.logger().debug("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
} else {
|
||||
@@ -219,9 +219,9 @@ public class VarianceM1Case extends VarianceCase {
|
||||
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
context.logger().debug("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
context.logger().debug("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -24,9 +24,6 @@ public final class ExtendsType extends WildcardType implements ISerializableData
|
||||
*/
|
||||
public ExtendsType(UnifyType extendedType) {
|
||||
super("? extends " + extendedType.getName(), extendedType);
|
||||
if (extendedType instanceof ExtendsType) {
|
||||
System.out.print("");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,6 +6,7 @@ import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialList;
|
||||
import de.dhbwstuttgart.server.packet.dataContainers.serialized.SerialMap;
|
||||
import de.dhbwstuttgart.typeinference.unify.PlaceholderRegistry;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
@@ -42,7 +43,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
final JavaTXCompiler compiler;
|
||||
final PlaceholderRegistry placeholderRegistry;
|
||||
|
||||
Writer logFile;
|
||||
Logger logger;
|
||||
|
||||
/**
|
||||
* A map that maps every type to the node in the inheritance graph that contains that type.
|
||||
@@ -73,10 +74,10 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
/**
|
||||
* Creates a new instance using the inheritance tree defined in the pairs.
|
||||
*/
|
||||
public FiniteClosure(Set<UnifyPair> pairs, Writer logFile, JavaTXCompiler compiler, PlaceholderRegistry placeholderRegistry) {
|
||||
public FiniteClosure(Set<UnifyPair> pairs, Logger logger, JavaTXCompiler compiler, PlaceholderRegistry placeholderRegistry) {
|
||||
this.compiler = compiler;
|
||||
this.placeholderRegistry = placeholderRegistry;
|
||||
this.logFile = logFile;
|
||||
this.logger = logger;
|
||||
this.pairs = new HashSet<>(pairs);
|
||||
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
|
||||
|
||||
@@ -134,8 +135,8 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
}
|
||||
}
|
||||
|
||||
public FiniteClosure(Set<UnifyPair> constraints, Writer writer, PlaceholderRegistry placeholderRegistry) {
|
||||
this(constraints, writer, null, placeholderRegistry);
|
||||
public FiniteClosure(Set<UnifyPair> constraints, Logger logger, PlaceholderRegistry placeholderRegistry) {
|
||||
this(constraints, logger, null, placeholderRegistry);
|
||||
}
|
||||
|
||||
void testSmaller() {
|
||||
@@ -159,7 +160,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
|
||||
Set<UnifyType> ret;
|
||||
if ((ret = smallerHash.get(new hashKeyType(type))) != null) {
|
||||
//System.out.println(greaterHash);
|
||||
//context.logger().info(greaterHash);
|
||||
return new HashSet<>(ret);
|
||||
}
|
||||
|
||||
@@ -200,7 +201,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
result.add(new Pair<>(t, fBounded));
|
||||
}
|
||||
catch (StackOverflowError e) {
|
||||
// System.out.println("");
|
||||
// context.logger().info("");
|
||||
}
|
||||
|
||||
// if C<...> <* C<...> then ... (third case in definition of <*)
|
||||
@@ -273,7 +274,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
public Set<UnifyType> greater(UnifyType type, Set<UnifyType> fBounded, SourceLoc location) {
|
||||
Set<UnifyType> ret;
|
||||
if ((ret = greaterHash.get(new hashKeyType(type))) != null) {
|
||||
//System.out.println(greaterHash);
|
||||
//context.logger().info(greaterHash);
|
||||
return new HashSet<>(ret);
|
||||
}
|
||||
|
||||
@@ -323,7 +324,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
Set<UnifyType> fBoundedNew = new HashSet<>(fBounded);
|
||||
fBoundedNew.add(theta1);
|
||||
Set<UnifyType> theta2Set = candidate.getContentOfPredecessors();
|
||||
//System.out.println("");
|
||||
//context.logger().info("");
|
||||
for(UnifyType theta2 : theta2Set) {
|
||||
result.add(theta2.apply(sigma));
|
||||
PairResultFBounded.add(new Pair<>(theta2.apply(sigma), fBoundedNew));
|
||||
@@ -357,7 +358,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
return ((match.match(termList).isPresent()) || x);
|
||||
};
|
||||
//if (parai.getName().equals("java.lang.Integer")) {
|
||||
// System.out.println("");
|
||||
// context.logger().info("");
|
||||
//}
|
||||
BinaryOperator<Boolean> bo = (a,b) -> (a || b);
|
||||
if (lfBounded.stream().reduce(false,f,bo)) {
|
||||
@@ -370,7 +371,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
}
|
||||
}
|
||||
permuteParams(paramCandidates).forEach(x -> result.add(t.setTypeParams(x)));
|
||||
//System.out.println("");
|
||||
//context.logger().info("");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -430,7 +431,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
return ((match.match(termList).isPresent()) || x);
|
||||
};
|
||||
if (parai.getName().equals("java.lang.Integer")) {
|
||||
System.out.println("");
|
||||
context.logger().info("");
|
||||
}
|
||||
BinaryOperator<Boolean> bo = (a,b) -> (a || b);
|
||||
if (fBounded.stream().reduce(false,f,bo)) continue; //F-Bounded Endlosrekursion
|
||||
@@ -469,7 +470,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
}
|
||||
|
||||
HashSet<UnifyType> resut = result.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new));
|
||||
System.out.println(resut);
|
||||
context.logger().info(resut);
|
||||
if(resut.equals(types.stream().map(x -> x.getKey()).collect(Collectors.toCollection(HashSet::new))))
|
||||
return resut;
|
||||
return computeGreater(result);
|
||||
@@ -692,9 +693,9 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
*/
|
||||
|
||||
public int compare (UnifyType left, UnifyType right, PairOperator pairop, UnifyContext context) {
|
||||
try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {}
|
||||
logger.debug("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");
|
||||
// if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
|
||||
// System.out.println("");
|
||||
// context.logger().info("");
|
||||
/*
|
||||
pairop = PairOperator.SMALLERDOTWC;
|
||||
List<UnifyType> al = new ArrayList<>();
|
||||
@@ -751,13 +752,8 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this);
|
||||
|
||||
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
|
||||
{try {
|
||||
logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no LogFile");}}
|
||||
|
||||
logger.debug("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
|
||||
|
||||
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
|
||||
Predicate<UnifyPair> delFun = x -> !((x.getLhsType() instanceof PlaceholderType ||
|
||||
x.getRhsType() instanceof PlaceholderType)
|
||||
@@ -765,12 +761,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
((WildcardType)x.getLhsType()).getWildcardedType().equals(x.getRhsType()))
|
||||
);
|
||||
long smallerLen = smallerRes.stream().filter(delFun).count();
|
||||
try {
|
||||
logFile.write("\nsmallerLen: " + smallerLen +"\n");
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no LogFile");}
|
||||
logger.debug("\nsmallerLen: " + smallerLen +"\n");
|
||||
if (smallerLen == 0) return -1;
|
||||
else {
|
||||
up = new UnifyPair(right, left, pairop);
|
||||
@@ -780,13 +771,8 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
Set<UnifyPair> greaterRes = unifyTask.applyTypeUnificationRules(hs, this);
|
||||
|
||||
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
|
||||
{try {
|
||||
logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no LogFile");}}
|
||||
|
||||
logger.debug("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
|
||||
|
||||
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
|
||||
long greaterLen = greaterRes.stream().filter(delFun).count();
|
||||
if (greaterLen == 0) return 1;
|
||||
@@ -807,7 +793,7 @@ public class FiniteClosure implements IFiniteClosure, ISerializableData {
|
||||
var pairList = data.getList("pairs").assertListOfUUIDs();
|
||||
Set<UnifyPair> pairs = pairList.stream()
|
||||
.map(pairData -> UnifyPair.fromSerial(pairData, context, keyStorage)).collect(Collectors.toSet());
|
||||
return new FiniteClosure(pairs, context.logFile(), context.placeholderRegistry());
|
||||
return new FiniteClosure(pairs, context.logger(), context.placeholderRegistry());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -48,12 +48,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT, context);
|
||||
}}
|
||||
catch (ClassCastException e) {
|
||||
try {
|
||||
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
|
||||
// ((FiniteClosure)fc).logFile.flush();
|
||||
}
|
||||
catch (IOException ie) {
|
||||
}
|
||||
((FiniteClosure)fc).logger.debug("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
|
||||
return -99;
|
||||
}
|
||||
}
|
||||
@@ -61,7 +56,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
/*
|
||||
public int compareEq (UnifyPair left, UnifyPair right) {
|
||||
if (left == null || right == null)
|
||||
System.out.println("Fehler");
|
||||
context.logger().info("Fehler");
|
||||
if (left.getLhsType() instanceof PlaceholderType) {
|
||||
return fc.compare(left.getRhsType(), right.getRhsType(), left.getPairOp());
|
||||
}
|
||||
@@ -82,12 +77,12 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
||||
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
||||
{
|
||||
// System.out.println("");
|
||||
// context.logger().info("");
|
||||
}
|
||||
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
|
||||
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
|
||||
{
|
||||
// System.out.println("");
|
||||
// context.logger().info("");
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -109,11 +104,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
||||
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
||||
{
|
||||
// System.out.println("");
|
||||
// context.logger().info("");
|
||||
}
|
||||
if (right instanceof SuperType)
|
||||
{
|
||||
// System.out.println("");
|
||||
// context.logger().info("");
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -199,7 +194,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
.filter(x -> ((x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)
|
||||
&& x.getPairOp() == PairOperator.SMALLERDOTWC))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
//System.out.println(left.toString());
|
||||
//context.logger().info(left.toString());
|
||||
//Fall 2
|
||||
//if (lefteq.iterator().next().getLhsType().getName().equals("AJO")) {
|
||||
// System.out.print("");
|
||||
@@ -378,8 +373,6 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
}
|
||||
//Fall 1 und 4
|
||||
if (lefteq.size() >= 1 && righteq.size() >= 1 && (leftlewc.size() > 0 || rightlewc.size() > 0)) {
|
||||
if (lefteq.iterator().next().getLhsType().getName().equals("D"))
|
||||
System.out.print("");
|
||||
//Set<PlaceholderType> varsleft = lefteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
|
||||
//Set<PlaceholderType> varsright = righteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
|
||||
//filtern des Paares a = Theta, das durch a <. Thata' generiert wurde (nur im Fall 1 relevant) andere Substitutioen werden rausgefiltert
|
||||
|
||||
@@ -1,9 +1,238 @@
|
||||
package de.dhbwstuttgart.util;
|
||||
|
||||
import com.diogonunes.jcolor.Attribute;
|
||||
import de.dhbwstuttgart.core.ConsoleInterface;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static com.diogonunes.jcolor.Ansi.colorize;
|
||||
|
||||
public class Logger {
|
||||
|
||||
public static void print(String s) {
|
||||
System.out.println(s);
|
||||
protected final Writer writer;
|
||||
protected final String prefix;
|
||||
|
||||
public static Logger NULL_LOGGER = new NullLogger();
|
||||
|
||||
public Logger() {
|
||||
this(null, "");
|
||||
}
|
||||
|
||||
}
|
||||
public Logger(String prefix) {
|
||||
this(null, prefix);
|
||||
}
|
||||
|
||||
public Logger(Writer writer, String prefix) {
|
||||
this.prefix = (Objects.equals(prefix, "")) ? "" : "["+prefix+"] ";
|
||||
this.writer = writer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new logger object from the path provided
|
||||
*
|
||||
* @param filePath The path to the output file. Recommended file extension ".log"
|
||||
* @return The Logger object for this output file
|
||||
*/
|
||||
public static Logger forFile(String filePath, String prefix) {
|
||||
File file = Path.of(filePath).toFile();
|
||||
try {
|
||||
Writer fileWriter = new FileWriter(file);
|
||||
return new Logger(fileWriter, prefix);
|
||||
}
|
||||
catch (IOException exception) {
|
||||
throw new RuntimeException("Failed to created Logger for file " + filePath, exception);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new logger object that inherits the writer of the given logger object
|
||||
*
|
||||
* @param logger The old logger object, that will provide the writer
|
||||
* @return The new prefix for the new logger object
|
||||
*/
|
||||
public static Logger inherit(Logger logger, String newPrefix) {
|
||||
return new Logger(logger.writer, newPrefix);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tint the prefix in the color of the logLevel
|
||||
* @param logLevel The logLevel to set the tint to
|
||||
* @return The tinted string (using ANSI sequences)
|
||||
*/
|
||||
protected String getPrefix(LogLevel logLevel) {
|
||||
String fullPrefix = prefix + "[" + logLevel + "] ";
|
||||
return switch (logLevel) {
|
||||
case DEBUG -> colorize(fullPrefix, Attribute.BRIGHT_MAGENTA_TEXT());
|
||||
case INFO -> colorize(fullPrefix, Attribute.BLUE_TEXT());
|
||||
case WARNING -> colorize(fullPrefix, Attribute.YELLOW_TEXT());
|
||||
case ERROR -> colorize(fullPrefix, Attribute.RED_TEXT());
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Print text to the output or error stream, depending on the logLevel
|
||||
* @param s The string to print
|
||||
* @param logLevel If logLevel == error, then print to stderr or print to stdout otherwise
|
||||
*/
|
||||
protected void print(String s, LogLevel logLevel) {
|
||||
String coloredPrefix = this.getPrefix(logLevel);
|
||||
if (logLevel.value > LogLevel.WARNING.value) {
|
||||
System.out.println(coloredPrefix + s);
|
||||
}
|
||||
else {
|
||||
System.err.println(coloredPrefix + s);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write text to the attached writer, if there is any
|
||||
* @param s The string to print
|
||||
*/
|
||||
protected void write(String s) {
|
||||
if (writer != null && ConsoleInterface.writeLogFiles) {
|
||||
// writing to file should only be done when necessary
|
||||
synchronized (writer) {
|
||||
try {
|
||||
writer.write(s);
|
||||
}
|
||||
catch (IOException exception) {
|
||||
throw new RuntimeException("Failed writing to file", exception);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Base method for logging a string value. Should mostly be used by the Logger internal functions that
|
||||
* abstract the logLevel away from the parameters
|
||||
*
|
||||
* @hidden Only relevant for the Logger and very special cases with dynamic logLevel
|
||||
* @param s The text to log
|
||||
* @param logLevel The logLevel on which the text should be logged
|
||||
*/
|
||||
public void log(String s, LogLevel logLevel) {
|
||||
if (logLevel.value >= ConsoleInterface.logLevel.value) {
|
||||
this.print(s, logLevel);
|
||||
this.write(s);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces the old FileWriter.write() call
|
||||
* @param s The string to log
|
||||
*/
|
||||
public void debug(String s) {
|
||||
this.log(s, LogLevel.DEBUG);
|
||||
}
|
||||
public void debug(Object o) {
|
||||
this.debug(o.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces the old System.out.println() call
|
||||
* @param s The string to log
|
||||
*/
|
||||
public void info(String s) {
|
||||
this.log(s, LogLevel.INFO);
|
||||
}
|
||||
public void info(Object o) {
|
||||
this.info(o.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Output a warning message
|
||||
* @param s The string to log
|
||||
*/
|
||||
public void warn(String s) {
|
||||
this.log(s, LogLevel.WARNING);
|
||||
}
|
||||
public void warn(Object o) {
|
||||
this.warn(o.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Output an error message
|
||||
* @param s The string to log
|
||||
*/
|
||||
public void error(String s) {
|
||||
this.log(s, LogLevel.ERROR);
|
||||
}
|
||||
public void error(Object o) {
|
||||
this.error(o.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Special logging function that prints a throwable object and all of its recursive causes (including stacktrace)
|
||||
* as an error
|
||||
*
|
||||
* @param throwable The Throwable object to output
|
||||
*/
|
||||
public void exception(Throwable throwable) {
|
||||
// Format the exception output
|
||||
String s = "Exception: " + throwable.getMessage() + "\n" +
|
||||
Arrays.stream(throwable.getStackTrace()).map(stackTraceElement ->
|
||||
" | " + stackTraceElement.toString()
|
||||
).collect(Collectors.joining("\n"));
|
||||
|
||||
// if there will be a cause printed afterward, announce it with the print of the exception
|
||||
if (throwable.getCause() != null) {
|
||||
s += "\n\nCaused by: ";
|
||||
}
|
||||
|
||||
// print the exception
|
||||
this.error(s);
|
||||
|
||||
// print the cause recursively
|
||||
if (throwable.getCause() != null) {
|
||||
this.exception(throwable.getCause());
|
||||
}
|
||||
}
|
||||
|
||||
public void close() {
|
||||
if (this.writer != null) {
|
||||
try {
|
||||
this.writer.close();
|
||||
}
|
||||
catch (IOException exception) {
|
||||
throw new RuntimeException("Failed to close a loggers writer. Was it maybe already closed? ", exception);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* An enum representing the different log levels as integers
|
||||
*/
|
||||
public enum LogLevel {
|
||||
// all the output
|
||||
DEBUG(0),
|
||||
// a lot of info about the process
|
||||
INFO(1),
|
||||
// warnings and up
|
||||
WARNING(2),
|
||||
// only errors and exceptions
|
||||
ERROR(3);
|
||||
|
||||
private final int value;
|
||||
LogLevel(final int newValue) {
|
||||
value = newValue;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A special case of logger that will never output anything
|
||||
*/
|
||||
private static class NullLogger extends Logger {
|
||||
@Override
|
||||
public void log(String s, LogLevel logLevel) {
|
||||
// do nothing. Yay
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -18,7 +18,7 @@ public class TestTypeDeployment {
|
||||
public void testTypeDeployment() throws Exception {
|
||||
var path = Path.of(System.getProperty("user.dir"), "/resources/bytecode/javFiles/Cycle.jav");
|
||||
var file = path.toFile();
|
||||
var compiler = new JavaTXCompiler(file, false);
|
||||
var compiler = new JavaTXCompiler(file);
|
||||
var parsedSource = compiler.sourceFiles.get(file);
|
||||
var tiResults = compiler.typeInference(file);
|
||||
Set<TypeInsert> tips = new HashSet<>();
|
||||
|
||||
@@ -18,6 +18,7 @@ import de.dhbwstuttgart.typeinference.unify.model.SuperType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.io.Writer;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
@@ -84,13 +85,12 @@ class PacketExampleData {
|
||||
|
||||
static UnifyContext createTestContext() {
|
||||
var placeholderRegistry = new PlaceholderRegistry();
|
||||
var nullWriter = Writer.nullWriter();
|
||||
return new UnifyContext(nullWriter, false, true,
|
||||
return new UnifyContext(Logger.NULL_LOGGER, true,
|
||||
new UnifyResultModel(
|
||||
new ConstraintSet<>(),
|
||||
new FiniteClosure(
|
||||
new HashSet<>(),
|
||||
nullWriter,
|
||||
Logger.NULL_LOGGER,
|
||||
placeholderRegistry)),
|
||||
new UnifyTaskModel(),
|
||||
ForkJoinPool.commonPool(),
|
||||
|
||||
@@ -20,6 +20,7 @@ import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.util.Logger;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
@@ -65,7 +66,7 @@ public class ServerTest {
|
||||
|
||||
FiniteClosure finiteClosure = UnifyTypeFactory.generateFC(
|
||||
ServerTest.getAllClasses(compiler, file).stream().toList(),
|
||||
Writer.nullWriter(),
|
||||
Logger.NULL_LOGGER,
|
||||
compiler.classLoader,
|
||||
compiler,
|
||||
placeholderRegistry
|
||||
@@ -84,7 +85,7 @@ public class ServerTest {
|
||||
// run the unification on the server
|
||||
PlaceholderRegistry prCopy = JavaTXCompiler.defaultClientPlaceholderRegistry.deepClone();
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyContext context = new UnifyContext(Writer.nullWriter(), false, true, urm, usedTasks, prCopy);
|
||||
UnifyContext context = new UnifyContext(Logger.NULL_LOGGER, true, urm, usedTasks, prCopy);
|
||||
SocketClient socketClient = new SocketClient("ws://localhost:5000");
|
||||
List<ResultSet> serverResult = socketClient.execute(finiteClosure, cons, unifyCons, context);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user