From 70b28bbfb06b5613da47467de7567d2f8b1e9b56 Mon Sep 17 00:00:00 2001 From: NoName11234 <47484268+NoName11234@users.noreply.github.com> Date: Fri, 15 Mar 2024 14:54:14 +0100 Subject: [PATCH] removed debug variables --- .../typeinference/unify/TypeUnify.java | 41 ++------- .../typeinference/unify/TypeUnify2Task.java | 20 ++--- .../typeinference/unify/TypeUnifyTask.java | 83 +++++-------------- 3 files changed, 30 insertions(+), 114 deletions(-) diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java index c2b52ca45..f43b87923 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java @@ -1,6 +1,5 @@ package de.dhbwstuttgart.typeinference.unify; -import java.io.IOException; import java.io.Writer; import java.util.List; import java.util.Set; @@ -20,23 +19,15 @@ public class TypeUnify { * @param fc * @param logFile * @param log - * @param cons * @return */ public Set> unify(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) { ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true); taskModel.setPool(pool); resultModel.setPool(pool); - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, resultModel, pool); + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool); pool.invoke(unifyTask); Set> res = unifyTask.join(); - try { - logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n"); - logFile.flush(); - } - catch (IOException e) { - System.err.println("no log-File"); - } return res; } @@ -47,15 +38,13 @@ public class TypeUnify { * @param fc * @param logFile * @param log - * @param cons - * @param ret * @return */ public UnifyResultModelParallel unifyAsync(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) { ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true); taskModel.setPool(pool); resultModel.setPool(pool); - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, resultModel, pool); + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool); pool.invoke(unifyTask); return resultModel; } @@ -67,8 +56,6 @@ public class TypeUnify { * @param fc * @param logFile * @param log - * @param cons - * @param ret * @return */ public UnifyResultModelParallel unifyParallel(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) { @@ -76,19 +63,11 @@ public class TypeUnify { taskModel.setPool(pool); resultModel.setPool(pool); TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks); - new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, resultModel, pool, statistics); + new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool, statistics); pool.invoke(unifyTask); - Set> res = unifyTask.join(); - try { - logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n"); - logFile.flush(); - unifyTask.statisticsFile.write("Backtracking: " + unifyTask.noBacktracking); - unifyTask.statisticsFile.write("\nLoops: " + unifyTask.noLoop); - } - catch (IOException e) { - System.err.println("no log-File"); - } + unifyTask.join(); + return resultModel; } @@ -107,21 +86,13 @@ public class TypeUnify { * @param fc * @param logFile * @param log - * @param cons * @return */ public Set> unifyOderConstraints(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) { resultModel.setPool(ForkJoinPool.commonPool()); - TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObject(logFile, ForkJoinPool.commonPool()), log, 0, resultModel, ForkJoinPool.commonPool()); + TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObject(logFile, ForkJoinPool.commonPool()), log, resultModel, ForkJoinPool.commonPool()); unifyTask.statisticsFile = statistics; Set> res = unifyTask.compute(); - try { - logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n"); - logFile.flush(); - } - catch (IOException e) { - System.err.println("no log-File"); - } return res; } diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java index 2546f8e1b..f2b10525c 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java @@ -1,17 +1,11 @@ package de.dhbwstuttgart.typeinference.unify; -import java.io.FileWriter; -import java.io.IOException; import java.io.Writer; -import java.util.ArrayList; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ForkJoinPool; import de.dhbwstuttgart.typeinference.constraints.Constraint; -import de.dhbwstuttgart.typeinference.constraints.ConstraintSet; -import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; @@ -24,14 +18,14 @@ public class TypeUnify2Task extends TypeUnifyTask { TypeUnify2Task(Set> setToFlatten, Set eq, List>> oderConstraints, Set nextSetElement, - IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, + IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, Set methodSignatureConstraintUebergabe, ForkJoinPool pool, Writer statistics) { - this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraintUebergabe, pool ); + this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, urm, methodSignatureConstraintUebergabe, pool ); } - public TypeUnify2Task(Set> setToFlatten, Set eq, List>> oderConstraints, Set nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, Set methodSignatureConstraintUebergabe, ForkJoinPool pool) { - super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, pool); + public TypeUnify2Task(Set> setToFlatten, Set eq, List>> oderConstraints, Set nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, Set methodSignatureConstraintUebergabe, ForkJoinPool pool) { + super(eq, oderConstraints, fc, parallel, logFile, log, urm, pool); this.setToFlatten = setToFlatten; this.nextSetElement = nextSetElement; this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe; @@ -43,11 +37,7 @@ public class TypeUnify2Task extends TypeUnifyTask { @Override protected Set> compute() { - if (one) { - System.out.println("two"); - } - one = true; - Set> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe); + Set> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, methodSignatureConstraintUebergabe); /*if (isUndefinedPairSetSet(res)) { return new HashSet<>(); } else diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index 7973a053b..53c22ae02 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -29,8 +29,6 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch; import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet; import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify; import de.dhbwstuttgart.typeinference.unify.model.ExtendsType; -import de.dhbwstuttgart.typeinference.unify.model.FunNType; -import de.dhbwstuttgart.typeinference.unify.model.OrderingExtend; import de.dhbwstuttgart.typeinference.unify.model.PairOperator; import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType; import de.dhbwstuttgart.typeinference.unify.model.ReferenceType; @@ -39,9 +37,6 @@ import de.dhbwstuttgart.typeinference.unify.model.TypeParams; import de.dhbwstuttgart.typeinference.unify.model.Unifier; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyType; -import de.dhbwstuttgart.typeinference.unify.model.WildcardType; -import de.dhbwstuttgart.util.Pair; -import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair; import org.apache.commons.io.output.NullWriter; import java.io.FileWriter; @@ -69,7 +64,6 @@ public class TypeUnifyTask extends RecursiveTask>> { protected UnifyResultModelParallel urm; private static int totalnoOfThread = 0; int thNo; - protected boolean one = false; public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/"; protected WriterActiveObject logFile; @@ -91,24 +85,8 @@ public class TypeUnifyTask extends RecursiveTask>> { protected IFiniteClosure fc; - protected OrderingExtend> oup; - protected boolean parallel; - int rekTiefeField; - - Integer nOfUnify = 0; - - Integer noUndefPair = 0; - - Integer noAllErasedElements = 0; - - static int noBacktracking; - - static int noLoop; - - static Integer noShortendElements = 0; - static Writer statisticsFile = new NullWriter(); public TypeUnifyTask() { @@ -130,11 +108,11 @@ public class TypeUnifyTask extends RecursiveTask>> { */ //statistics - public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, ForkJoinPool pool, Writer statisticsFile) { - this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, pool); + public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, ForkJoinPool pool, Writer statisticsFile) { + this(eq,oderConstraints, fc, parallel, logFile, log, urm, pool); this.statisticsFile = statisticsFile; } - public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, ForkJoinPool pool) { + public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, ForkJoinPool pool) { this.eq = eq; //this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); this.oderConstraintsField = oderConstraints; /*.stream().map(x -> { @@ -148,7 +126,6 @@ public class TypeUnifyTask extends RecursiveTask>> { //x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); this.fc = fc; - this.oup = new OrderingUnifyPair(fc); this.parallel = parallel; this.logFile = logFile; this.log = log; @@ -183,7 +160,6 @@ public class TypeUnifyTask extends RecursiveTask>> { } */ rules = new RuleSet(logFile); - this.rekTiefeField = rekTiefe; this.urm = urm; } @@ -228,7 +204,7 @@ public class TypeUnifyTask extends RecursiveTask>> { ArrayList>> remainingOderconstraints = oderConstraintsField.stream() .filter(x -> x.size()>1) .collect(Collectors.toCollection(ArrayList::new)); - Set> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, new HashSet<>()); + Set> res = unify(neweq, remainingOderconstraints, fc, parallel, new HashSet<>()); if(parallel){ logFile.close(); @@ -264,7 +240,7 @@ public class TypeUnifyTask extends RecursiveTask>> { * @param fc The finite closure * @return The set of all principal type unifiers */ - protected Set> unify(final Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set methodSignatureConstraint) { + protected Set> unify(final Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, Set methodSignatureConstraint) { //Set aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT) // ).collect(Collectors.toCollection(HashSet::new)); //writeLog(nOfUnify.toString() + " AA: " + aas.toString()); @@ -273,11 +249,6 @@ public class TypeUnifyTask extends RecursiveTask>> { //} //.collect(Collectors.toCollection(HashSet::new))); - - rekTiefe++; - nOfUnify++; - writeLog(nOfUnify + " Unifikation: " + eq.toString()); - writeLog(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString()); /* * Variancen auf alle Gleichungen vererben @@ -329,9 +300,6 @@ public class TypeUnifyTask extends RecursiveTask>> { } while (eqSubst.isPresent()); eq0.forEach(UnifyPair::disableCondWildcards); - - writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq + "\n" - + nOfUnify + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints); /* * Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs @@ -382,15 +350,9 @@ public class TypeUnifyTask extends RecursiveTask>> { //nicht ausgewertet Faculty Beispiel im 1. Schritt //PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren //Typen getestet werden. - writeLog(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput); // If pairs occured that did not match one of the cartesian product cases, // those pairs are contradictory and the unification is impossible. if(!undefinedPairs.isEmpty()) { - noUndefPair++; - for (UnifyPair up : undefinedPairs) { - writeLog(noUndefPair.toString() + " UndefinedPairs; " + up + "\n" - + "BasePair; " + up.getBasePair()); - } Set> error = new HashSet<>(); undefinedPairs = undefinedPairs.stream().peek(UnifyPair::setUndefinedPair).collect(Collectors.toCollection(HashSet::new)); error.add(undefinedPairs); @@ -433,12 +395,12 @@ public class TypeUnifyTask extends RecursiveTask>> { //Aufruf von computeCartesianRecursive ANFANG //writeLog("topLevelSets: " + topLevelSets.toString()); - return computeCartesianRecursive(new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe, methodSignatureConstraint); + return computeCartesianRecursive(new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, methodSignatureConstraint); } - Set> unify2(Set> setToFlatten, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set methodSignatureConstraint) { + Set> unify2(Set> setToFlatten, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, Set methodSignatureConstraint) { //Aufruf von computeCartesianRecursive ENDE //keine Ahnung woher das kommt @@ -535,12 +497,12 @@ public class TypeUnifyTask extends RecursiveTask>> { } } else if(eqPrimePrime.isPresent()) { - Set> unifyres = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, rekTiefe, methodSignatureConstraint); + Set> unifyres = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, methodSignatureConstraint); eqPrimePrimeSet.addAll(unifyres); } else { - Set> unifyres = unify(eqPrime, newOderConstraints, fc, parallel, rekTiefe, methodSignatureConstraint); + Set> unifyres = unify(eqPrime, newOderConstraints, fc, parallel, methodSignatureConstraint); eqPrimePrimeSet.addAll(unifyres); @@ -579,10 +541,9 @@ public class TypeUnifyTask extends RecursiveTask>> { * @param oderConstraints Remaining or-constraints * @param fc The finite closure * @param parallel If the algorithm should be parallelized run - * @param rekTiefe Deep of recursive calls * @return The set of all principal type unifiers */ - Set> computeCartesianRecursive(ArrayList>> topLevelSets, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set methodSignatureConstraint) { + Set> computeCartesianRecursive(ArrayList>> topLevelSets, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, Set methodSignatureConstraint) { //oneElems: Alle 1-elementigen Mengen, die nur ein Paar //a <. theta, theta <. a oder a =. theta enthalten @@ -598,7 +559,7 @@ public class TypeUnifyTask extends RecursiveTask>> { Optional>> optNextSet = topLevelSets.stream().filter(x -> x.size()>1).findAny(); if (optNextSet.isEmpty()) {//Alle Elemente sind 1-elementig - return unify2(oneElems, eq, oderConstraints, fc, parallel, rekTiefe, methodSignatureConstraint); + return unify2(oneElems, eq, oderConstraints, fc, parallel, methodSignatureConstraint); } Set> nextSet = optNextSet.get(); @@ -784,7 +745,7 @@ public class TypeUnifyTask extends RecursiveTask>> { } Set> elems = new HashSet<>(oneElems); - writeLog("a1: " + rekTiefe + " "+ "variance: "+ variance + " " + a.toString()+ "\n"); + writeLog("variance: "+ variance + " " + a.toString()+ "\n"); //Ergebnisvariable für den aktuelle Thread Set> res; @@ -817,7 +778,7 @@ public class TypeUnifyTask extends RecursiveTask>> { newElemsOrig.add(a); /* FORK ANFANG */ - TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraint, this.pool); + TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, urm, methodSignatureConstraint, this.pool); //forks.add(forkOrig); forkOrig.fork(); @@ -840,7 +801,7 @@ public class TypeUnifyTask extends RecursiveTask>> { Set> newElems = new HashSet<>(elems); List>> newOderConstraints = new ArrayList<>(oderConstraints); newElems.add(nSaL); - TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool); + TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool); forks.add(fork); fork.fork(); } @@ -886,7 +847,7 @@ public class TypeUnifyTask extends RecursiveTask>> { newElemsOrig.add(a); /* FORK ANFANG */ - TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool); + TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool); //forks.add(forkOrig); forkOrig.fork(); @@ -908,7 +869,7 @@ public class TypeUnifyTask extends RecursiveTask>> { Set> newElems = new HashSet<>(elems); List>> newOderConstraints = new ArrayList<>(oderConstraints); newElems.add(nSaL); - TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool); + TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool); forks.add(fork); fork.fork(); } @@ -955,7 +916,7 @@ public class TypeUnifyTask extends RecursiveTask>> { newElemsOrig.add(a); /* FORK ANFANG */ - TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool); + TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool); //forks.add(forkOrig); forkOrig.fork(); /* FORK ENDE */ @@ -970,7 +931,7 @@ public class TypeUnifyTask extends RecursiveTask>> { Set> newElems = new HashSet<>(elems); List>> newOderConstraints = new ArrayList<>(oderConstraints); newElems.add(nSaL); - TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraint, this.pool); + TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, urm, methodSignatureConstraint, this.pool); forks.add(fork); fork.fork(); } @@ -1004,7 +965,7 @@ public class TypeUnifyTask extends RecursiveTask>> { //noOfThread++; } else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859 - res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint)); + res = unify2(elems, eq, oderConstraints, fc, parallel, new HashSet<>(methodSignatureConstraint)); }}} //Ab hier alle parallele Berechnungen wieder zusammengeführt. @@ -1317,17 +1278,12 @@ public class TypeUnifyTask extends RecursiveTask>> { // .collect(Collectors.toCollection(ArrayList::new)); writeLog("res (undef): " + res.toString() + "\n" + "abhSubst: " + abhSubst.toString() + "\n" + - "a2: " + rekTiefe + " " + a.toString() + "\n" + "Durchschnitt: " + durchschnitt.toString() + "\n" + "nextSet: " + nextSet.toString() + "\n" + "nextSetasList: " + nextSetasList.toString() + "\n" + "Number first erased Elements (undef): " + (len - nofstred) + "\n" + "Number second erased Elements (undef): " + (nofstred- nextSetasList.size()) + "\n" + "Number erased Elements (undef): " + (len - nextSetasList.size())); - noAllErasedElements = noAllErasedElements + (len - nextSetasList.size()); - writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString()); - noBacktracking++; - writeLog("Number of Backtracking: " + noBacktracking); //writeStatistics("Number of erased elements: " + (len - nextSetasList.size())); //writeStatistics("Number of Backtracking: " + noBacktracking); //System.out.println(""); @@ -1342,7 +1298,6 @@ public class TypeUnifyTask extends RecursiveTask>> { //else result.stream().filter(y -> !isUndefinedPairSet(y)); writeLog("res: " + res.toString()); //writeStatistics(" End Number of Elements (" + rekTiefe + "): " + nextSetasList.size()); - noLoop++; //writeStatistics("Number of Loops: " + noLoop); } //2020-02-02: if (variance ==2) Hier Aufruf von filterOverriding einfuegen