forked from i21017/JavaCompilerCore
Compare commits
3 Commits
feat/unify
...
feat/unify
Author | SHA1 | Date | |
---|---|---|---|
|
3d99f282f5 | ||
|
303c91dc87 | ||
|
603a8b176a |
@@ -0,0 +1,91 @@
|
|||||||
|
package de.dhbwstuttgart.typeinference.unify;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.LinkedList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||||
|
import java.util.concurrent.RecursiveTask;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An intermediate class for the recursive steps of the TypeUnifyTask:
|
||||||
|
* This allows canceling parts of the recursion tree, instead of only the whole execution as before. But in
|
||||||
|
* order for that to work, all cancellable child tasks must be added when they are created
|
||||||
|
*
|
||||||
|
* @param <T>
|
||||||
|
*/
|
||||||
|
public abstract class CancellableTask<T> extends RecursiveTask<T> {
|
||||||
|
|
||||||
|
private final AtomicBoolean executionCancelled = new AtomicBoolean(false);
|
||||||
|
private final List<CancellableTask<?>> childTasks = new LinkedList<>();
|
||||||
|
private CancellableTask<?> parentTask = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the execution for this task and all its (recursive) children to be canceled
|
||||||
|
*/
|
||||||
|
protected void cancelExecution() {
|
||||||
|
// is this branch already canceled? Then do nothing
|
||||||
|
if (this.executionCancelled.getAndSet(true)) return;
|
||||||
|
this.cancelChildExecution();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void cancelChildExecution() {
|
||||||
|
synchronized (this.childTasks) {
|
||||||
|
for (var childTask : childTasks) {
|
||||||
|
// no need to cancel a branch that is already finished
|
||||||
|
if (!childTask.isDone()) {
|
||||||
|
childTask.cancelExecution();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void cancelChildExecutionAfter(CancellableTask<?> checkpointTask) {
|
||||||
|
boolean reachedCheckpoint = false;
|
||||||
|
int i = 0;
|
||||||
|
for (var childTask : childTasks) {
|
||||||
|
if (!reachedCheckpoint) {
|
||||||
|
reachedCheckpoint = childTask == checkpointTask;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// no need to cancel a branch that is already finished
|
||||||
|
if (!childTask.isDone()) {
|
||||||
|
childTask.cancelExecution();
|
||||||
|
}
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
System.out.println("Skipped " + i + " younger siblings");
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void cancelSiblingTasks() {
|
||||||
|
if (this.parentTask != null) {
|
||||||
|
boolean thisWasCancelledBefore = this.executionCancelled.get();
|
||||||
|
this.parentTask.cancelChildExecution();
|
||||||
|
this.executionCancelled.set(thisWasCancelledBefore);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void cancelYoungerSiblingTasks() {
|
||||||
|
if (this.parentTask != null) {
|
||||||
|
this.parentTask.cancelChildExecutionAfter(this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean isExecutionCancelled() {
|
||||||
|
return executionCancelled.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addChildTask(CancellableTask<?> childTask) {
|
||||||
|
this.childTasks.add(childTask);
|
||||||
|
childTask.setParentTask(this);
|
||||||
|
if (this.executionCancelled.get()) {
|
||||||
|
childTask.executionCancelled.set(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setParentTask(CancellableTask<?> parentTask) {
|
||||||
|
this.parentTask = parentTask;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -45,7 +45,7 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
|||||||
*/
|
*/
|
||||||
//writeLog("xxx");
|
//writeLog("xxx");
|
||||||
//noOfThread--;
|
//noOfThread--;
|
||||||
if (this.myIsCancelled()) {
|
if (this.isExecutionCancelled()) {
|
||||||
return CompletableFuture.completedFuture(new HashSet<>());
|
return CompletableFuture.completedFuture(new HashSet<>());
|
||||||
} else {
|
} else {
|
||||||
return res;
|
return res;
|
||||||
|
@@ -27,11 +27,7 @@ import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
|
|||||||
import de.dhbwstuttgart.util.Logger;
|
import de.dhbwstuttgart.util.Logger;
|
||||||
import de.dhbwstuttgart.util.Pair;
|
import de.dhbwstuttgart.util.Pair;
|
||||||
import de.dhbwstuttgart.util.Tuple;
|
import de.dhbwstuttgart.util.Tuple;
|
||||||
import java.io.FileWriter;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.OutputStreamWriter;
|
|
||||||
import java.io.Serial;
|
import java.io.Serial;
|
||||||
import java.io.Writer;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
@@ -47,12 +43,9 @@ import java.util.Set;
|
|||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
import java.util.concurrent.RecursiveTask;
|
import java.util.concurrent.RecursiveTask;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
|
||||||
import java.util.function.BiFunction;
|
import java.util.function.BiFunction;
|
||||||
import java.util.function.BinaryOperator;
|
import java.util.function.BinaryOperator;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import org.apache.commons.io.output.NullOutputStream;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -60,7 +53,7 @@ import org.apache.commons.io.output.NullOutputStream;
|
|||||||
*
|
*
|
||||||
* @author Florian Steurer
|
* @author Florian Steurer
|
||||||
*/
|
*/
|
||||||
public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<UnifyPair>>>> {
|
public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<UnifyPair>>>> {
|
||||||
|
|
||||||
@Serial
|
@Serial
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
@@ -120,8 +113,6 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
|||||||
public static int noShortendElements;
|
public static int noShortendElements;
|
||||||
public static int noou = 0;
|
public static int noou = 0;
|
||||||
|
|
||||||
boolean myIsCanceled = false;
|
|
||||||
|
|
||||||
public TypeUnifyTask(UnifyContext context) {
|
public TypeUnifyTask(UnifyContext context) {
|
||||||
this.context = context.newWithLogger(Logger.NULL_LOGGER);
|
this.context = context.newWithLogger(Logger.NULL_LOGGER);
|
||||||
rules = new RuleSet(context.placeholderRegistry());
|
rules = new RuleSet(context.placeholderRegistry());
|
||||||
@@ -216,13 +207,6 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
void myCancel(boolean b) {
|
|
||||||
myIsCanceled = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean myIsCancelled() {
|
|
||||||
return myIsCanceled;
|
|
||||||
}
|
|
||||||
|
|
||||||
public CompletableFuture<Set<Set<UnifyPair>>> compute() {
|
public CompletableFuture<Set<Set<UnifyPair>>> compute() {
|
||||||
if (one) {
|
if (one) {
|
||||||
@@ -252,7 +236,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
|||||||
throw new TypeinferenceException("Unresolved constraints: " + res, new NullToken()); //return new HashSet<>();
|
throw new TypeinferenceException("Unresolved constraints: " + res, new NullToken()); //return new HashSet<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.myIsCancelled()) {
|
if (this.isExecutionCancelled()) {
|
||||||
return new HashSet<>();
|
return new HashSet<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -289,7 +273,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
|||||||
|
|
||||||
//.collect(Collectors.toCollection(HashSet::new)));
|
//.collect(Collectors.toCollection(HashSet::new)));
|
||||||
|
|
||||||
if (this.myIsCancelled()) {
|
if (this.isExecutionCancelled()) {
|
||||||
return CompletableFuture.completedFuture(new HashSet<>());
|
return CompletableFuture.completedFuture(new HashSet<>());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -479,7 +463,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
|||||||
// .collect(Collectors.toCollection(HashSet::new));
|
// .collect(Collectors.toCollection(HashSet::new));
|
||||||
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
|
||||||
|
|
||||||
if (this.myIsCancelled()) {
|
if (this.isExecutionCancelled()) {
|
||||||
return CompletableFuture.completedFuture(new HashSet<>());
|
return CompletableFuture.completedFuture(new HashSet<>());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -756,7 +740,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
|||||||
// Ergebnisvariable für die parallele Verabeitung: Tupel aus
|
// Ergebnisvariable für die parallele Verabeitung: Tupel aus
|
||||||
// - forkOrig result : currentThreadResult (frueher "res")
|
// - forkOrig result : currentThreadResult (frueher "res")
|
||||||
// - fork results : forkResults (frueher "add_res")
|
// - fork results : forkResults (frueher "add_res")
|
||||||
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> parallelResultDataFuture;
|
CompletableFuture<VarianceCase.ComputationResults> parallelResultDataFuture;
|
||||||
|
|
||||||
|
|
||||||
if (parallel) {
|
if (parallel) {
|
||||||
@@ -768,13 +752,16 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
|||||||
// same as variance = 0
|
// same as variance = 0
|
||||||
elems.add(varianceCase.a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
|
elems.add(varianceCase.a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
|
||||||
parallelResultDataFuture = this.unify2(elems, eq, oderConstraints, fc, false, rekTiefe, new HashSet<>(methodSignatureConstraint))
|
parallelResultDataFuture = this.unify2(elems, eq, oderConstraints, fc, false, rekTiefe, new HashSet<>(methodSignatureConstraint))
|
||||||
.thenApply(currentThreadResult -> new Tuple<>(currentThreadResult, new HashSet<>()));
|
.thenApply(VarianceCase.ComputationResults::new);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (this.isExecutionCancelled()) {
|
||||||
|
return CompletableFuture.completedFuture(new HashSet<>());
|
||||||
|
}
|
||||||
|
|
||||||
return parallelResultDataFuture.thenCompose(parallelResultData -> {
|
return parallelResultDataFuture.thenCompose(parallelResultData -> {
|
||||||
Set<Set<UnifyPair>> currentThreadResult = parallelResultData.getFirst();
|
Set<Set<UnifyPair>> currentThreadResult = parallelResultData.mainResult;
|
||||||
Set<Set<Set<UnifyPair>>> forkResults = parallelResultData.getSecond();
|
Set<Set<Set<UnifyPair>>> forkResults = parallelResultData.forkResults;
|
||||||
|
|
||||||
Set<Set<UnifyPair>> result = prevResult;
|
Set<Set<UnifyPair>> result = prevResult;
|
||||||
List<Set<UnifyPair>> nextSetAsList = prevNextSetAsList;
|
List<Set<UnifyPair>> nextSetAsList = prevNextSetAsList;
|
||||||
@@ -871,6 +858,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
|||||||
// Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator();
|
// Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator();
|
||||||
boolean shouldBreak = varianceCase.eraseInvalidSets(rekTiefe, aParDef, nextSetAsList);
|
boolean shouldBreak = varianceCase.eraseInvalidSets(rekTiefe, aParDef, nextSetAsList);
|
||||||
if (shouldBreak) {
|
if (shouldBreak) {
|
||||||
|
// this.cancelYoungerSiblingTasks();
|
||||||
return CompletableFuture.completedFuture(result);
|
return CompletableFuture.completedFuture(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -12,7 +12,7 @@ public class UnifyTaskModel {
|
|||||||
|
|
||||||
public synchronized void cancel() {
|
public synchronized void cancel() {
|
||||||
for(TypeUnifyTask t : usedTasks) {
|
for(TypeUnifyTask t : usedTasks) {
|
||||||
t.myCancel(true);
|
t.cancelExecution();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,6 +1,5 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||||
|
|
||||||
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
|
||||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
||||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||||
@@ -17,11 +16,11 @@ import java.util.Set;
|
|||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class Variance1Case extends VarianceCase {
|
public class ContravarianceCase extends VarianceCase {
|
||||||
|
|
||||||
protected final int variance = 1;
|
protected final int variance = 1;
|
||||||
|
|
||||||
protected Variance1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
protected ContravarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||||
super(isOderConstraint, typeUnifyTask, context);
|
super(isOderConstraint, typeUnifyTask, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -37,7 +36,7 @@ public class Variance1Case extends VarianceCase {
|
|||||||
if (this.isOderConstraint) {
|
if (this.isOderConstraint) {
|
||||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||||
}
|
}
|
||||||
context.logger().debug("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
|
context.logger().debug(() -> "nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
|
||||||
|
|
||||||
//Alle maximale Elemente in nextSetasListRest bestimmen
|
//Alle maximale Elemente in nextSetasListRest bestimmen
|
||||||
//nur für diese wird parallele Berechnung angestossen.
|
//nur für diese wird parallele Berechnung angestossen.
|
||||||
@@ -49,7 +48,7 @@ public class Variance1Case extends VarianceCase {
|
|||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
|
public CompletableFuture<ComputationResults> computeParallel(
|
||||||
Set<Set<UnifyPair>> elems,
|
Set<Set<UnifyPair>> elems,
|
||||||
Set<UnifyPair> eq,
|
Set<UnifyPair> eq,
|
||||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||||
@@ -61,10 +60,6 @@ public class Variance1Case extends VarianceCase {
|
|||||||
Set<Set<UnifyPair>> result,
|
Set<Set<UnifyPair>> result,
|
||||||
Set<Set<UnifyPair>> aParDef
|
Set<Set<UnifyPair>> aParDef
|
||||||
) {
|
) {
|
||||||
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
|
|
||||||
new HashSet<>(), new HashSet<>()
|
|
||||||
));
|
|
||||||
|
|
||||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||||
@@ -72,18 +67,19 @@ public class Variance1Case extends VarianceCase {
|
|||||||
|
|
||||||
/* FORK ANFANG */
|
/* FORK ANFANG */
|
||||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
|
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
|
||||||
|
typeUnifyTask.addChildTask(forkOrig);
|
||||||
// schedule compute() on another thread
|
// schedule compute() on another thread
|
||||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
|
||||||
(prevResults, currentThreadResult) -> {
|
(currentThreadResult) -> {
|
||||||
forkOrig.context.logger().debug("final Orig 1");
|
forkOrig.context.logger().debug("final Orig 1");
|
||||||
forkOrig.closeLogFile();
|
forkOrig.closeLogFile();
|
||||||
return new Tuple<>(currentThreadResult, prevResults.getSecond());
|
return new ComputationResults(currentThreadResult);
|
||||||
});
|
});
|
||||||
|
|
||||||
//forks.add(forkOrig);
|
//forks.add(forkOrig);
|
||||||
if (typeUnifyTask.myIsCancelled()) {
|
if (typeUnifyTask.isExecutionCancelled()) {
|
||||||
throw new UnifyCancelException();
|
return CompletableFuture.completedFuture(new ComputationResults());
|
||||||
}
|
}
|
||||||
/* FORK ENDE */
|
/* FORK ENDE */
|
||||||
|
|
||||||
@@ -108,16 +104,17 @@ public class Variance1Case extends VarianceCase {
|
|||||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||||
newElems.add(nSaL);
|
newElems.add(nSaL);
|
||||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||||
|
typeUnifyTask.addChildTask(fork);
|
||||||
// schedule compute() on another thread
|
// schedule compute() on another thread
|
||||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||||
resultValues = resultValues.thenCombine(forkFuture,
|
resultValues = resultValues.thenCombine(forkFuture,
|
||||||
(prevResults, fork_res) -> {
|
(prevResults, fork_res) -> {
|
||||||
if (typeUnifyTask.myIsCancelled()) {
|
if (typeUnifyTask.isExecutionCancelled()) {
|
||||||
throw new UnifyCancelException();
|
return new ComputationResults();
|
||||||
}
|
}
|
||||||
context.logger().debug("fork_res: " + fork_res.toString());
|
context.logger().debug("fork_res: " + fork_res.toString());
|
||||||
context.logger().debug(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
context.logger().debug(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||||
prevResults.getSecond().add(fork_res);
|
prevResults.addForkResult(fork_res);
|
||||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||||
aParDef.add(fork.getNextSetElement());
|
aParDef.add(fork.getNextSetElement());
|
||||||
}
|
}
|
||||||
@@ -127,8 +124,8 @@ public class Variance1Case extends VarianceCase {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
if (typeUnifyTask.myIsCancelled()) {
|
if (typeUnifyTask.isExecutionCancelled()) {
|
||||||
throw new UnifyCancelException();
|
return CompletableFuture.completedFuture(new ComputationResults());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@@ -1,6 +1,5 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||||
|
|
||||||
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
|
||||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
||||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||||
@@ -17,11 +16,11 @@ import java.util.Set;
|
|||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class VarianceM1Case extends VarianceCase {
|
public class CovarianceCase extends VarianceCase {
|
||||||
|
|
||||||
protected final int variance = -1;
|
protected final int variance = -1;
|
||||||
|
|
||||||
protected VarianceM1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
protected CovarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||||
super(isOderConstraint, typeUnifyTask, context);
|
super(isOderConstraint, typeUnifyTask, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -49,7 +48,7 @@ public class VarianceM1Case extends VarianceCase {
|
|||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
|
public CompletableFuture<ComputationResults> computeParallel(
|
||||||
Set<Set<UnifyPair>> elems,
|
Set<Set<UnifyPair>> elems,
|
||||||
Set<UnifyPair> eq,
|
Set<UnifyPair> eq,
|
||||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||||
@@ -61,10 +60,6 @@ public class VarianceM1Case extends VarianceCase {
|
|||||||
Set<Set<UnifyPair>> result,
|
Set<Set<UnifyPair>> result,
|
||||||
Set<Set<UnifyPair>> aParDef
|
Set<Set<UnifyPair>> aParDef
|
||||||
) {
|
) {
|
||||||
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
|
|
||||||
new HashSet<>(), new HashSet<>()
|
|
||||||
));
|
|
||||||
|
|
||||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||||
@@ -72,18 +67,19 @@ public class VarianceM1Case extends VarianceCase {
|
|||||||
|
|
||||||
/* FORK ANFANG */
|
/* FORK ANFANG */
|
||||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
|
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
|
||||||
|
typeUnifyTask.addChildTask(forkOrig);
|
||||||
// schedule compute() on another thread
|
// schedule compute() on another thread
|
||||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply(
|
||||||
(prevResults, currentThreadResult) -> {
|
(currentThreadResult) -> {
|
||||||
forkOrig.context.logger().debug("final Orig -1");
|
forkOrig.context.logger().debug("final Orig -1");
|
||||||
forkOrig.closeLogFile();
|
forkOrig.closeLogFile();
|
||||||
return new Tuple<>(currentThreadResult, prevResults.getSecond());
|
return new ComputationResults(currentThreadResult);
|
||||||
});
|
});
|
||||||
|
|
||||||
//forks.add(forkOrig);
|
//forks.add(forkOrig);
|
||||||
if (typeUnifyTask.myIsCancelled()) {
|
if (typeUnifyTask.isExecutionCancelled()) {
|
||||||
throw new UnifyCancelException();
|
return resultValues;
|
||||||
}
|
}
|
||||||
/* FORK ENDE */
|
/* FORK ENDE */
|
||||||
|
|
||||||
@@ -109,16 +105,17 @@ public class VarianceM1Case extends VarianceCase {
|
|||||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||||
newElems.add(nSaL);
|
newElems.add(nSaL);
|
||||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||||
|
typeUnifyTask.addChildTask(fork);
|
||||||
// schedule compute() on another thread
|
// schedule compute() on another thread
|
||||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||||
resultValues = resultValues.thenCombine(forkFuture,
|
resultValues = resultValues.thenCombine(forkFuture,
|
||||||
(prevResults, fork_res) -> {
|
(prevResults, fork_res) -> {
|
||||||
if (typeUnifyTask.myIsCancelled()) {
|
if (typeUnifyTask.isExecutionCancelled()) {
|
||||||
throw new UnifyCancelException();
|
return prevResults;
|
||||||
}
|
}
|
||||||
context.logger().debug(() -> "fork_res: " + fork_res.toString());
|
context.logger().debug(() -> "fork_res: " + fork_res.toString());
|
||||||
context.logger().debug(() -> Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
context.logger().debug(() -> Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||||
prevResults.getSecond().add(fork_res);
|
prevResults.addForkResult(fork_res);
|
||||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||||
aParDef.add(fork.getNextSetElement());
|
aParDef.add(fork.getNextSetElement());
|
||||||
}
|
}
|
||||||
@@ -128,8 +125,8 @@ public class VarianceM1Case extends VarianceCase {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
if (typeUnifyTask.myIsCancelled()) {
|
if (typeUnifyTask.isExecutionCancelled()) {
|
||||||
throw new UnifyCancelException();
|
return resultValues;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@@ -1,6 +1,5 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||||
|
|
||||||
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
|
||||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
||||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||||
@@ -15,11 +14,12 @@ import java.util.Optional;
|
|||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
|
||||||
public class Variance2Case extends VarianceCase {
|
public class InvarianceOrConstraintCase extends VarianceCase {
|
||||||
|
|
||||||
|
// either for invariance or for oderConstraints
|
||||||
protected final int variance = 2;
|
protected final int variance = 2;
|
||||||
|
|
||||||
protected Variance2Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
protected InvarianceOrConstraintCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||||
super(isOderConstraint, typeUnifyTask, context);
|
super(isOderConstraint, typeUnifyTask, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -37,7 +37,7 @@ public class Variance2Case extends VarianceCase {
|
|||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
|
public CompletableFuture<ComputationResults> computeParallel(
|
||||||
Set<Set<UnifyPair>> elems,
|
Set<Set<UnifyPair>> elems,
|
||||||
Set<UnifyPair> eq,
|
Set<UnifyPair> eq,
|
||||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||||
@@ -49,8 +49,6 @@ public class Variance2Case extends VarianceCase {
|
|||||||
Set<Set<UnifyPair>> result,
|
Set<Set<UnifyPair>> result,
|
||||||
Set<Set<UnifyPair>> aParDef
|
Set<Set<UnifyPair>> aParDef
|
||||||
) {
|
) {
|
||||||
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValuesFuture;
|
|
||||||
|
|
||||||
context.logger().debug("var2einstieg");
|
context.logger().debug("var2einstieg");
|
||||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||||
@@ -60,14 +58,16 @@ public class Variance2Case extends VarianceCase {
|
|||||||
|
|
||||||
/* FORK ANFANG */
|
/* FORK ANFANG */
|
||||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||||
|
typeUnifyTask.addChildTask(forkOrig);
|
||||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||||
resultValuesFuture = forkOrigFuture.thenApply((currentThreadResult) -> {
|
CompletableFuture<ComputationResults> resultValues = forkOrigFuture.thenApply((currentThreadResult) -> {
|
||||||
forkOrig.context.logger().debug("final Orig 2");
|
forkOrig.context.logger().debug("final Orig 2");
|
||||||
forkOrig.closeLogFile();
|
forkOrig.closeLogFile();
|
||||||
return new Tuple<>(currentThreadResult, new HashSet<>());
|
return new ComputationResults(currentThreadResult);
|
||||||
});
|
});
|
||||||
if (typeUnifyTask.myIsCancelled()) {
|
|
||||||
throw new UnifyCancelException();
|
if (typeUnifyTask.isExecutionCancelled()) {
|
||||||
|
return resultValues;
|
||||||
}
|
}
|
||||||
/* FORK ENDE */
|
/* FORK ENDE */
|
||||||
|
|
||||||
@@ -89,27 +89,24 @@ public class Variance2Case extends VarianceCase {
|
|||||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||||
newElems.add(nSaL);
|
newElems.add(nSaL);
|
||||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
|
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
|
||||||
|
typeUnifyTask.addChildTask(fork);
|
||||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||||
resultValuesFuture = resultValuesFuture.thenCombine(forkFuture, (resultValues, fork_res) -> {
|
resultValues = resultValues.thenCombine(forkFuture, (prevResults, fork_res) -> {
|
||||||
if (typeUnifyTask.myIsCancelled()) {
|
if (typeUnifyTask.isExecutionCancelled()) {
|
||||||
throw new UnifyCancelException();
|
return prevResults;
|
||||||
}
|
}
|
||||||
resultValues.getSecond().add(fork_res);
|
prevResults.addForkResult(fork_res);
|
||||||
fork.context.logger().debug("final 2");
|
fork.context.logger().debug("final 2");
|
||||||
fork.closeLogFile();
|
fork.closeLogFile();
|
||||||
return resultValues;
|
return prevResults;
|
||||||
});
|
});
|
||||||
|
|
||||||
if (typeUnifyTask.myIsCancelled()) {
|
if (typeUnifyTask.isExecutionCancelled()) {
|
||||||
throw new UnifyCancelException();
|
return resultValues;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (typeUnifyTask.myIsCancelled()) {
|
return resultValues;
|
||||||
throw new UnifyCancelException();
|
|
||||||
}
|
|
||||||
|
|
||||||
return resultValuesFuture;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
@@ -13,14 +13,13 @@ import java.util.List;
|
|||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
import java.util.concurrent.ExecutionException;
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class Variance0Case extends VarianceCase {
|
public class UnknownVarianceCase extends VarianceCase {
|
||||||
|
|
||||||
protected final int variance = 0;
|
protected final int variance = 0;
|
||||||
|
|
||||||
protected Variance0Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
protected UnknownVarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||||
super(isOderConstraint, typeUnifyTask, context);
|
super(isOderConstraint, typeUnifyTask, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,7 +49,7 @@ public class Variance0Case extends VarianceCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
|
public CompletableFuture<ComputationResults> computeParallel(
|
||||||
Set<Set<UnifyPair>> elems,
|
Set<Set<UnifyPair>> elems,
|
||||||
Set<UnifyPair> eq,
|
Set<UnifyPair> eq,
|
||||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||||
@@ -62,10 +61,9 @@ public class Variance0Case extends VarianceCase {
|
|||||||
Set<Set<UnifyPair>> result,
|
Set<Set<UnifyPair>> result,
|
||||||
Set<Set<UnifyPair>> aParDef
|
Set<Set<UnifyPair>> aParDef
|
||||||
) {
|
) {
|
||||||
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
|
elems.add(a);
|
||||||
return typeUnifyTask.unify2(elems, eq, oderConstraints, fc, context.parallel(), rekTiefe, new HashSet<>(methodSignatureConstraint)).thenApply(
|
return typeUnifyTask.unify2(elems, eq, oderConstraints, fc, context.parallel(), rekTiefe, new HashSet<>(methodSignatureConstraint))
|
||||||
unify2Result -> new Tuple<>(unify2Result, new HashSet<>())
|
.thenApply(ComputationResults::new);
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -75,7 +73,7 @@ public class Variance0Case extends VarianceCase {
|
|||||||
Set<UnifyPair> compResult,
|
Set<UnifyPair> compResult,
|
||||||
Set<UnifyPair> compRes
|
Set<UnifyPair> compRes
|
||||||
) {
|
) {
|
||||||
context.logger().debug("RES var=1 ADD:" + result.toString() + " " + currentThreadResult.toString());
|
context.logger().debug("RES var=0 ADD:" + result.toString() + " " + currentThreadResult.toString());
|
||||||
result.addAll(currentThreadResult);
|
result.addAll(currentThreadResult);
|
||||||
}
|
}
|
||||||
|
|
@@ -5,9 +5,9 @@ import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
|||||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
import de.dhbwstuttgart.util.Logger;
|
|
||||||
import de.dhbwstuttgart.util.Tuple;
|
import de.dhbwstuttgart.util.Tuple;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
@@ -17,11 +17,11 @@ public abstract class VarianceCase {
|
|||||||
|
|
||||||
public static VarianceCase createFromVariance(int variance, boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
public static VarianceCase createFromVariance(int variance, boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||||
return switch (variance) {
|
return switch (variance) {
|
||||||
case 0 -> new Variance0Case(isOderConstraint, typeUnifyTask, context);
|
case 0 -> new UnknownVarianceCase(isOderConstraint, typeUnifyTask, context);
|
||||||
case 1 -> new Variance1Case(isOderConstraint, typeUnifyTask, context);
|
case 1 -> new ContravarianceCase(isOderConstraint, typeUnifyTask, context);
|
||||||
case -1 -> new VarianceM1Case(isOderConstraint, typeUnifyTask, context);
|
case -1 -> new CovarianceCase(isOderConstraint, typeUnifyTask, context);
|
||||||
case 2 -> new Variance2Case(isOderConstraint, typeUnifyTask, context);
|
case 2 -> new InvarianceOrConstraintCase(isOderConstraint, typeUnifyTask, context);
|
||||||
default -> throw new RuntimeException("Invalid variance: " + variance);
|
default -> throw new RuntimeException("Invalid variance: " + variance);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -72,7 +72,7 @@ public abstract class VarianceCase {
|
|||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public abstract CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> computeParallel(
|
public abstract CompletableFuture<ComputationResults> computeParallel(
|
||||||
Set<Set<UnifyPair>> elems,
|
Set<Set<UnifyPair>> elems,
|
||||||
Set<UnifyPair> eq,
|
Set<UnifyPair> eq,
|
||||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||||
@@ -104,4 +104,29 @@ public abstract class VarianceCase {
|
|||||||
Set<Set<UnifyPair>> aParDef,
|
Set<Set<UnifyPair>> aParDef,
|
||||||
List<Set<UnifyPair>> nextSetAsList
|
List<Set<UnifyPair>> nextSetAsList
|
||||||
);
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrapper class for the parallel computation results
|
||||||
|
*/
|
||||||
|
public static class ComputationResults {
|
||||||
|
public Set<Set<UnifyPair>> mainResult;
|
||||||
|
public Set<Set<Set<UnifyPair>>> forkResults;
|
||||||
|
|
||||||
|
public ComputationResults() {
|
||||||
|
this(new HashSet<>(), new HashSet<>());
|
||||||
|
}
|
||||||
|
|
||||||
|
public ComputationResults(Set<Set<UnifyPair>> mainResult) {
|
||||||
|
this(mainResult, new HashSet<>());
|
||||||
|
}
|
||||||
|
|
||||||
|
public ComputationResults(Set<Set<UnifyPair>> mainResult, Set<Set<Set<UnifyPair>>> forkResults) {
|
||||||
|
this.mainResult = mainResult;
|
||||||
|
this.forkResults = forkResults;
|
||||||
|
}
|
||||||
|
|
||||||
|
void addForkResult(Set<Set<UnifyPair>> forkResult) {
|
||||||
|
forkResults.add(forkResult);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -106,11 +106,7 @@ public class UnifyPair implements IConstraintElement, ISerializableData {
|
|||||||
|
|
||||||
public SourceLoc getLocation() {
|
public SourceLoc getLocation() {
|
||||||
if (location != null) return location;
|
if (location != null) return location;
|
||||||
else if (basePair != null) {
|
else if (basePair != null) return basePair.getLocation();
|
||||||
SourceLoc baseLocation = basePair.getLocation();
|
|
||||||
location = baseLocation;
|
|
||||||
return baseLocation;
|
|
||||||
}
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -19,12 +19,12 @@ import static com.diogonunes.jcolor.Ansi.colorize;
|
|||||||
|
|
||||||
public class Logger {
|
public class Logger {
|
||||||
|
|
||||||
|
public static final Logger NULL_LOGGER = new NullLogger();
|
||||||
|
private static final DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
|
||||||
|
|
||||||
protected final Writer writer;
|
protected final Writer writer;
|
||||||
protected final String prefix;
|
protected final String prefix;
|
||||||
|
|
||||||
public static Logger NULL_LOGGER = new NullLogger();
|
|
||||||
private static DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
|
|
||||||
|
|
||||||
public Logger() {
|
public Logger() {
|
||||||
this(null, "");
|
this(null, "");
|
||||||
}
|
}
|
||||||
@@ -58,7 +58,7 @@ public class Logger {
|
|||||||
/**
|
/**
|
||||||
* Create a new logger object that inherits the writer of the given logger object
|
* Create a new logger object that inherits the writer of the given logger object
|
||||||
*
|
*
|
||||||
* @param logger The old logger object, that will provide the writer
|
* @param logger The old logger object that will provide the writer
|
||||||
* @return The new prefix for the new logger object
|
* @return The new prefix for the new logger object
|
||||||
*/
|
*/
|
||||||
public static Logger inherit(Logger logger, String newPrefix) {
|
public static Logger inherit(Logger logger, String newPrefix) {
|
||||||
@@ -96,7 +96,7 @@ public class Logger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// output to the correct output-stream
|
// output to the correct output-stream
|
||||||
if (logLevel.isHigherOrEqualTo(LogLevel.ERROR)) {
|
if (logLevel.getValue() == LogLevel.ERROR.getValue()) {
|
||||||
System.out.println(coloredPrefix + s);
|
System.out.println(coloredPrefix + s);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -236,7 +236,7 @@ public class Logger {
|
|||||||
" | " + stackTraceElement.toString()
|
" | " + stackTraceElement.toString()
|
||||||
).collect(Collectors.joining("\n"));
|
).collect(Collectors.joining("\n"));
|
||||||
|
|
||||||
// if there will be a cause printed afterward, announce it with the print of the exception
|
// if there is a cause printed afterward, announce it with the print of the exception
|
||||||
if (throwable.getCause() != null) {
|
if (throwable.getCause() != null) {
|
||||||
s += "\n\nCaused by: ";
|
s += "\n\nCaused by: ";
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user