forked from i21017/JavaCompilerCore
Compare commits
2 Commits
feat/unify
...
c54e012426
Author | SHA1 | Date | |
---|---|---|---|
|
c54e012426 | ||
|
52f6ebcf3c |
@@ -0,0 +1,62 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.concurrent.RecursiveTask;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/**
|
||||
* An intermediate class for the recursive steps of the TypeUnifyTask:
|
||||
* This allows to cancel parts of the recursion tree, instead of only the whole execution as before. But in
|
||||
* order for that to work, all cancellable child tasks must be added when they are created
|
||||
*
|
||||
* @param <T>
|
||||
*/
|
||||
public abstract class CancellableTask<T> extends RecursiveTask<T> {
|
||||
|
||||
private final AtomicBoolean executionCancelled = new AtomicBoolean(false);
|
||||
private final List<CancellableTask<?>> childTasks = new ArrayList<>();
|
||||
private CancellableTask<?> parentTask = null;
|
||||
|
||||
/**
|
||||
* Set the execution for this task and all its (recursive) children to be cancelled
|
||||
*/
|
||||
protected void cancelExecution() {
|
||||
// is this branch already cancelled? Then do nothing
|
||||
if (this.executionCancelled.get()) return;
|
||||
executionCancelled.set(true);
|
||||
this.cancelChildExecution();
|
||||
}
|
||||
|
||||
public void cancelChildExecution() {
|
||||
for (var childTask : childTasks) {
|
||||
// no need to cancel a branch that is already finished
|
||||
if (!childTask.isDone()) {
|
||||
childTask.cancelExecution();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void cancelSiblingTasks() {
|
||||
if (this.parentTask != null) {
|
||||
boolean thisWasCancelledBefore = this.executionCancelled.get();
|
||||
this.parentTask.cancelChildExecution();
|
||||
this.executionCancelled.set(thisWasCancelledBefore);
|
||||
}
|
||||
}
|
||||
|
||||
public Boolean isExecutionCancelled() {
|
||||
return executionCancelled.get();
|
||||
}
|
||||
|
||||
public void addChildTask(CancellableTask<?> childTask) {
|
||||
this.childTasks.add(childTask);
|
||||
childTask.setParentTask(this);
|
||||
}
|
||||
|
||||
private void setParentTask(CancellableTask<?> parentTask) {
|
||||
this.parentTask = parentTask;
|
||||
}
|
||||
|
||||
}
|
@@ -59,7 +59,7 @@ import org.apache.commons.io.output.NullOutputStream;
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<UnifyPair>>>> {
|
||||
public class TypeUnifyTask extends CancellableTask<CompletableFuture<Set<Set<UnifyPair>>>> {
|
||||
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
@@ -852,6 +852,11 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
||||
|
||||
if (parallel) {
|
||||
for (Set<Set<UnifyPair>> par_res : forkResults) {
|
||||
if (variance == 0 && (!result.isEmpty() && (!isUndefinedPairSetSet(currentThreadResult)))) {
|
||||
this.cancelChildExecution();
|
||||
return CompletableFuture.completedFuture(result);
|
||||
}
|
||||
|
||||
if (!isUndefinedPairSetSet(par_res) && isUndefinedPairSetSet(result)) {
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
result = par_res;
|
||||
@@ -884,6 +889,7 @@ public class TypeUnifyTask extends RecursiveTask<CompletableFuture<Set<Set<Unify
|
||||
// Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator();
|
||||
boolean shouldBreak = varianceCase.eraseInvalidSets(rekTiefe, aParDef, nextSetAsList);
|
||||
if (shouldBreak) {
|
||||
this.cancelChildExecution();
|
||||
return CompletableFuture.completedFuture(result);
|
||||
}
|
||||
|
||||
|
@@ -1,6 +1,8 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
@@ -9,6 +11,7 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.util.Tuple;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
@@ -47,6 +50,29 @@ public class Variance0Case extends VarianceCase {
|
||||
} else {
|
||||
a = nextSetAsList.removeFirst();
|
||||
}
|
||||
|
||||
|
||||
Set<UnifyPair> finalA = a;
|
||||
if (!this.isOderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
|
||||
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
|
||||
nextSetasListRest = typeUnifyTask.oup.maxElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
|
||||
);
|
||||
} else {
|
||||
nextSetasListRest = typeUnifyTask.oup.minElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
|
||||
);
|
||||
}
|
||||
nextSetAsList.remove(a);
|
||||
} else if (this.isOderConstraint) {
|
||||
nextSetasListRest = typeUnifyTask.oup.maxElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
|
||||
);
|
||||
} else {
|
||||
for (int i = 0; i < Math.min(nextSetAsList.size(), 5); i++) {
|
||||
nextSetasListRest.add(nextSetAsList.removeFirst());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -62,10 +88,81 @@ public class Variance0Case extends VarianceCase {
|
||||
Set<Set<UnifyPair>> result,
|
||||
Set<Set<UnifyPair>> aParDef
|
||||
) {
|
||||
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
|
||||
return typeUnifyTask.unify2(elems, eq, oderConstraints, fc, context.parallel(), rekTiefe, new HashSet<>(methodSignatureConstraint)).thenApply(
|
||||
unify2Result -> new Tuple<>(unify2Result, new HashSet<>())
|
||||
);
|
||||
CompletableFuture<Tuple<Set<Set<UnifyPair>>, Set<Set<Set<UnifyPair>>>>> resultValues = CompletableFuture.completedFuture(new Tuple<>(
|
||||
new HashSet<>(), new HashSet<>()
|
||||
));
|
||||
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
(prevResults, currentThreadResult) -> {
|
||||
forkOrig.writeLog("final Orig 0");
|
||||
forkOrig.closeLogFile();
|
||||
return new Tuple<>(currentThreadResult, prevResults.getSecond());
|
||||
});
|
||||
|
||||
//forks.add(forkOrig);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("0 RM" + nSaL.toString());
|
||||
|
||||
if (!this.isOderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
TypeUnifyTask.noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkFuture,
|
||||
(prevResults, fork_res) -> {
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
prevResults.getSecond().add(fork_res);
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final 0");
|
||||
fork.closeLogFile();
|
||||
return prevResults;
|
||||
}
|
||||
);
|
||||
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
}
|
||||
|
||||
return resultValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -75,7 +172,7 @@ public class Variance0Case extends VarianceCase {
|
||||
Set<UnifyPair> compResult,
|
||||
Set<UnifyPair> compRes
|
||||
) {
|
||||
writeLog("RES var=1 ADD:" + result.toString() + " " + currentThreadResult.toString());
|
||||
writeLog("RES var=0 ADD:" + result.toString() + " " + currentThreadResult.toString());
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
|
||||
@@ -88,24 +185,43 @@ public class Variance0Case extends VarianceCase {
|
||||
if (!this.isOderConstraint) {
|
||||
return true;
|
||||
} else {
|
||||
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
|
||||
final List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
|
||||
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
final List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
for (Set<UnifyPair> aPar : aParDef) {
|
||||
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
smallerSetasList.clear();
|
||||
smallerSetasList.addAll(typeUnifyTask.oup.smallerThan(aPar, nextSetAsList));
|
||||
notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
notErased.clear();
|
||||
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
|
||||
erased = new ArrayList<>(smallerSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
writeLog("Removed: " + erased);
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@@ -72,6 +72,7 @@ public class Variance1Case extends VarianceCase {
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
@@ -108,6 +109,7 @@ public class Variance1Case extends VarianceCase {
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkFuture,
|
||||
|
@@ -60,6 +60,7 @@ public class Variance2Case extends VarianceCase {
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValuesFuture = forkOrigFuture.thenApply((currentThreadResult) -> {
|
||||
forkOrig.writeLog("final Orig 2");
|
||||
@@ -89,6 +90,7 @@ public class Variance2Case extends VarianceCase {
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValuesFuture = resultValuesFuture.thenCombine(forkFuture, (resultValues, fork_res) -> {
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
|
@@ -72,6 +72,7 @@ public class VarianceM1Case extends VarianceCase {
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
|
||||
typeUnifyTask.addChildTask(forkOrig);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkOrigFuture = CompletableFuture.supplyAsync(forkOrig::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkOrigFuture,
|
||||
@@ -109,6 +110,7 @@ public class VarianceM1Case extends VarianceCase {
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
typeUnifyTask.addChildTask(fork);
|
||||
// schedule compute() on another thread
|
||||
CompletableFuture<Set<Set<UnifyPair>>> forkFuture = CompletableFuture.supplyAsync(fork::compute, context.executor()).thenCompose(f -> f);
|
||||
resultValues = resultValues.thenCombine(forkFuture,
|
||||
|
@@ -30,10 +30,12 @@ import java.util.Set;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.stream.Collectors;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import targetast.TestCodegen;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
@Ignore("Server tests create huge overhead, so they are ignored until required")
|
||||
public class ServerTest {
|
||||
|
||||
@Test
|
||||
|
Reference in New Issue
Block a user