From 72908f6fb4960926f385d89b0762f9e578b034b6 Mon Sep 17 00:00:00 2001 From: NoName11234 <47484268+NoName11234@users.noreply.github.com> Date: Thu, 25 Apr 2024 19:08:26 +0200 Subject: [PATCH] removed variance from typeunifytask --- .../typeinference/unify/TypeUnifyTask.java | 404 ++---------------- 1 file changed, 27 insertions(+), 377 deletions(-) diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java index ff9e6cd..5368471 100644 --- a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java +++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java @@ -689,15 +689,8 @@ public class TypeUnifyTask extends RecursiveTask>> { System.out.print(""); } */ - Set> result = new HashSet<>(); - int variance = 0; - - /* Varianzbestimmung Anfang - * Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true; - * Varianz = 1 => Argumentvariable - * Varianz = -1 => Rückgabevariable - * Varianz = 0 => unklar - * Varianz = 2 => Operatoren oderConstraints */ + Set> result = new HashSet<>(); + ArrayList zeroNextElem = new ArrayList<>(nextSetasList.get(0)); UnifyPair fstBasePair = zeroNextElem.remove(0).getBasePair(); Boolean oderConstraint = false; @@ -715,9 +708,6 @@ public class TypeUnifyTask extends RecursiveTask>> { .filter(d -> d.isPresent()) .map(e -> e.get()) .findAny(); - if (xi.isPresent()) { - variance = xi.get(); - } } else { oderConstraint = true; @@ -726,29 +716,6 @@ public class TypeUnifyTask extends RecursiveTask>> { else { oderConstraint = true; } - - //Varianz-Bestimmung Oder-Constraints - if (oderConstraint) { - if (printtag) System.out.println("nextSetasList " + nextSetasList); - Optional optVariance = - nextSetasList.iterator() - .next() - .stream() - .filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType && - ! (x.getRhsType() instanceof PlaceholderType) && - x.getPairOp() == PairOperator.EQUALSDOT) - .map(x -> - ((PlaceholderType)x.getGroundBasePair().getLhsType()).getVariance()) - .reduce((n,m) -> { if ((n == 0) && (m==0)) return 0; - else if (n !=0) return n; //es muss mindestens eine Variance != 0 sein - else return m; - }); - //Fuer Operatorenaufrufe wird variance auf 2 gesetzt. - //da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType - //Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet - variance = optVariance.isPresent() ? optVariance.get() : 2; - } - /* Varianzbestimmung Ende */ //writeLog("nextSetasList: " + nextSetasList.toString()); Set nextSetElem = nextSetasList.get(0); @@ -815,80 +782,7 @@ public class TypeUnifyTask extends RecursiveTask>> { writeLog("nextSet: " + nextSet.toString()); writeLog("nextSetasList: " + nextSetasList.toString()); - - /* staistics Nextvar an Hand Varianzbestimmung auskommentieren Anfang - if (variance == 1) { - a = oup.max(nextSetasList.iterator()); - nextSetasList.remove(a); - if (oderConstraint) { - nextSetasListOderConstraints.add(((Constraint)a).getExtendConstraint()); - } - writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints); - nextSetasListRest = new ArrayList<>(nextSetasList); - Iterator> nextSetasListItRest = new ArrayList>(nextSetasListRest).iterator(); - while (nextSetasListItRest.hasNext()) { - Set a_next = nextSetasListItRest.next(); - if (//a.equals(a_next) || - (oup.compare(a, a_next) == 1)) { - nextSetasListRest.remove(a_next); - } - } - - //Alle maximale Elemente in nextSetasListRest bestimmen - //nur für diese wird parallele Berechnung angestossen. - nextSetasListRest = oup.maxElements(nextSetasListRest); - } - else if (variance == -1) { - a = oup.min(nextSetasList.iterator()); - writeLog("Min: a in " + variance + " "+ a); - if (oderConstraint) { - nextSetasListOderConstraints.add(((Constraint)a).getExtendConstraint()); - } - writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints); - nextSetasList.remove(a); - nextSetasListRest = new ArrayList<>(nextSetasList); - Iterator> nextSetasListItRest = new ArrayList>(nextSetasListRest).iterator(); - while (nextSetasListItRest.hasNext()) { - Set a_next = nextSetasListItRest.next(); - if (//a.equals(a_next) || - (oup.compare(a, a_next) == -1)) { - nextSetasListRest.remove(a_next); - } - } - //Alle minimalen Elemente in nextSetasListRest bestimmen - //nur für diese wird parallele Berechnung angestossen. - nextSetasListRest = oup.minElements(nextSetasListRest); - } - else if (variance == 2) { - a = nextSetasList.remove(0); - - //Fuer alle Elemente wird parallele Berechnung angestossen. - nextSetasListRest = new ArrayList<>(nextSetasList); - } - else if (variance == 0) { - //wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich - //wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich - if (!oderConstraint && optOrigPair != null && optOrigPair.isPresent()) { - if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) { - a = oup.max(nextSetasList.iterator()); - } - else { - a = oup.min(nextSetasList.iterator()); - } - nextSetasList.remove(a); - } - else { - if (oderConstraint) { - a = oup.max(nextSetasList.iterator()); - nextSetasList.remove(a); - nextSetasListOderConstraints.add(((Constraint)a).getExtendConstraint()); - } - else { - a = nextSetasList.remove(0); - } - } - } - Nextvar an Hand Varianzbestimmung auskommentieren Ende */ + a = nextSetasList.remove(0); //statisticsList //writeStatistics(a.toString()); @@ -899,7 +793,6 @@ public class TypeUnifyTask extends RecursiveTask>> { i++; Set> elems = new HashSet>(oneElems); - writeLog("a1: " + rekTiefe + " "+ "variance: "+ variance + " " + a.toString()+ "\n"); //Ergebnisvariable für den aktuelle Thread Set> res = new HashSet<>(); @@ -924,292 +817,49 @@ public class TypeUnifyTask extends RecursiveTask>> { /* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread * gestartet, der parallel weiterarbeitet. */ - if(parallel && (variance == 1) && noOfThread <= MaxNoOfThreads) { + if(parallel){ Set forks = new HashSet<>(); Set newEqOrig = new HashSet<>(eq); Set> newElemsOrig = new HashSet<>(elems); List>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); newElemsOrig.add(a); - - /* FORK ANFANG */ TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint); - //forks.add(forkOrig); - synchronized(usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - forkOrig.fork(); - } - /* FORK ENDE */ - - synchronized (this) { - writeLog("a in " + variance + " "+ a); - writeLog("nextSetasListRest: " + nextSetasListRest.toString()); - } + //Überprüfen, ob das Set bereits berechnet wurde + forkOrig.fork(); + while (!nextSetasList.isEmpty()) { - Set nSaL = nextSetasList.remove(0); - synchronized (this) { //nextSetasList.remove(nSaL); - writeLog("1 RM" + nSaL.toString()); - } - - if (!oderConstraint) { - - /* statistics sameEq wird nicht betrachtet ANGFANG - //ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht - if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) { - nSaL = null; - noShortendElements++; - continue; - } - statistics sameEq wird nicht betrachtet ENDE */ - } - else { - nextSetasListOderConstraints.add(((Constraint)nSaL).getExtendConstraint()); + Set nSaL = nextSetasList.removeFirst(); + + if (oderConstraint) { + nextSetasListOderConstraints.add(((Constraint) nSaL).getExtendConstraint()); } + Set newEq = new HashSet<>(eq); Set> newElems = new HashSet<>(elems); List>> newOderConstraints = new ArrayList<>(oderConstraints); newElems.add(nSaL); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint)); forks.add(fork); - synchronized(usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - fork.fork(); - } + //Überprüfen, ob das Set bereits berechnet wurde + fork.fork(); } - //res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe); - - /* FORK ANFANG */ - synchronized (this) { - writeLog("wait "+ forkOrig.thNo); - noOfThread--; - res = forkOrig.join(); - synchronized (usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - } - //noOfThread++; - forkOrig.writeLog("final Orig 1"); - forkOrig.closeLogFile(); - //Set> fork_res = forkOrig.join(); - writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString()); - //noOfThread--; an das Ende von compute verschoben - //add_res.add(fork_res); - }; - /* FORK ENDE */ - + + res = forkOrig.join(); + forks.forEach(x -> writeLog("wait: " + x.thNo)); - for(TypeUnify2Task fork : forks) { - synchronized (this) { - noOfThread--; - Set> fork_res = fork.join(); - synchronized (usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - } - //noOfThread++; - writeLog("Join " + new Integer(fork.thNo).toString()); - //noOfThread--; an das Ende von compute verschoben - writeLog("fork_res: " + fork_res.toString()); - writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString()); - add_res.add(fork_res); - if (!isUndefinedPairSetSet(fork_res)) { - aParDef.add(fork.getNextSetElement()); - } - fork.writeLog("final 1"); - fork.closeLogFile(); - }; - } - //noOfThread++; - } else { - if(parallel && (variance == -1) && noOfThread <= MaxNoOfThreads) { - Set forks = new HashSet<>(); - Set newEqOrig = new HashSet<>(eq); - Set> newElemsOrig = new HashSet<>(elems); - List>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); - newElemsOrig.add(a); - - /* FORK ANFANG */ - TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint)); - //forks.add(forkOrig); - synchronized(usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - forkOrig.fork(); - } - /* FORK ENDE */ - - synchronized (this) { - writeLog("a in " + variance + " "+ a); - writeLog("nextSetasListRest: " + nextSetasListRest.toString()); - } - while (!nextSetasList.isEmpty()) { - Set nSaL = nextSetasList.remove(0); - synchronized (this) { //nextSetasList.remove(nSaL); - writeLog("-1 RM" + nSaL.toString()); - } - - if (!oderConstraint) { - /* statistics sameEq wird nicht betrachtet ANGFANG - //ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht - if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) { - nSaL = null; - noShortendElements++; - continue; - } - statistics sameEq wird nicht betrachtet ENDE */ - } - else { - nextSetasListOderConstraints.add(((Constraint)nSaL).getExtendConstraint()); - } - Set newEq = new HashSet<>(eq); - Set> newElems = new HashSet<>(elems); - List>> newOderConstraints = new ArrayList<>(oderConstraints); - newElems.add(nSaL); - TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint)); - forks.add(fork); - synchronized(usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - fork.fork(); + for (TypeUnify2Task fork : forks) { + Set> fork_res = new HashSet<>(); + fork_res = fork.join(); + add_res.add(fork_res); + if (!isUndefinedPairSetSet(fork_res)) { + aParDef.add(fork.getNextSetElement()); } } - //res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe); - - /* FORK ANFANG */ - synchronized (this) { - writeLog("wait "+ forkOrig.thNo); - noOfThread--; - res = forkOrig.join(); - synchronized (usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - } - //noOfThread++; - forkOrig.writeLog("final Orig -1"); - forkOrig.closeLogFile(); - //Set> fork_res = forkOrig.join(); - writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString()); - //noOfThread--; an das Ende von compute verschoben - //add_res.add(fork_res); - }; - /* FORK ENDE */ - - forks.forEach(x -> writeLog("wait: " + x.thNo)); - for(TypeUnify2Task fork : forks) { - synchronized (this) { - noOfThread--; - Set> fork_res = fork.join(); - synchronized (usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - } - //noOfThread++; - writeLog("Join " + new Integer(fork.thNo).toString()); - //noOfThread--; an das Ende von compute verschoben - writeLog("fork_res: " + fork_res.toString()); - writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString()); - add_res.add(fork_res); - if (!isUndefinedPairSetSet(fork_res)) { - aParDef.add(fork.getNextSetElement()); - } - fork.writeLog("final -1"); - fork.closeLogFile(); - }; - } - //noOfThread++; - } else { - if(parallel && (variance == 2) && noOfThread <= MaxNoOfThreads) { - writeLog("var2einstieg"); - Set forks = new HashSet<>(); - Set newEqOrig = new HashSet<>(eq); - Set> newElemsOrig = new HashSet<>(elems); - List>> newOderConstraintsOrig = new ArrayList<>(oderConstraints); - newElemsOrig.add(a); - - /* FORK ANFANG */ - TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint)); - //forks.add(forkOrig); - synchronized(usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - forkOrig.fork(); - } - /* FORK ENDE */ - - synchronized (this) { - writeLog("a in " + variance + " "+ a); - writeLog("nextSetasListRest: " + nextSetasListRest.toString()); - } - while (!nextSetasList.isEmpty()) { - Set nSaL = nextSetasList.remove(0); - //nextSetasList.remove(nSaL); //PL einkommentiert 20-02-03 - Set newEq = new HashSet<>(eq); - Set> newElems = new HashSet<>(elems); - List>> newOderConstraints = new ArrayList<>(oderConstraints); - newElems.add(nSaL); - TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint); - forks.add(fork); - synchronized(usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - fork.fork(); - } - } - //res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe); - - /* FORK ANFANG */ - synchronized (this) { - writeLog("wait "+ forkOrig.thNo); - noOfThread--; - res = forkOrig.join(); - synchronized (usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - } - //noOfThread++; - forkOrig.writeLog("final Orig 2"); - forkOrig.closeLogFile(); - //Set> fork_res = forkOrig.join(); - writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString()); - //noOfThread--; an das Ende von compute verschoben - //add_res.add(fork_res); //vermutlich falsch - }; - /* FORK ENDE */ - forks.forEach(x -> writeLog("wait: " + x.thNo)); - for(TypeUnify2Task fork : forks) { - synchronized (this) { - noOfThread--; - Set> fork_res = fork.join(); - synchronized (usedTasks) { - if (this.myIsCancelled()) { - return new HashSet<>(); - } - } - //noOfThread++; - writeLog("Join " + new Integer(fork.thNo).toString()); - //noOfThread--; an das Ende von compute verschoben - add_res.add(fork_res); - fork.writeLog("final 2"); - fork.closeLogFile(); - }; - } - //noOfThread++; - } else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten - elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859 + }else{ + elems.add(a); + //Überprüfen ob das Set bereits berechnet wurde res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint)); - }}} + } //Ab hier alle parallele Berechnungen wieder zusammengeführt. //if (hilf == 1)