From 10096cbaebc15e34f432069d788abb69a31b4a20 Mon Sep 17 00:00:00 2001
From: NoName11234 <47484268+NoName11234@users.noreply.github.com>
Date: Thu, 25 Apr 2024 16:03:18 +0200
Subject: [PATCH] initial commit
---
.gitignore | 31 +
Makefile | 3 +
README_aktuelle_Branches | 11 +
pom.xml | 126 +
.../exceptions/DebugException.java | 7 +
.../typeinference/constraints/Constraint.java | 69 +
.../constraints/ConstraintSet.java | 126 +
.../typeinference/constraints/Pair.java | 13 +
.../unify/GuavaSetOperations.java | 23 +
.../unify/MartelliMontanariUnify.java | 108 +
.../typeinference/unify/Match.java | 92 +
.../typeinference/unify/RuleSet.java | 1050 +++++++
.../typeinference/unify/TypeUnify.java | 125 +
.../typeinference/unify/TypeUnify2Task.java | 76 +
.../typeinference/unify/TypeUnifyTask.java | 2641 +++++++++++++++++
.../unify/Unifikationsalgorithmus.java | 11 +
.../typeinference/unify/UnifyResultEvent.java | 18 +
.../unify/UnifyResultListener.java | 7 +
.../unify/UnifyResultListenerImpl.java | 21 +
.../typeinference/unify/UnifyResultModel.java | 41 +
.../typeinference/unify/UnifyTaskModel.java | 18 +
.../unify/distributeVariance.java | 54 +
.../typeinference/unify/freshPlaceholder.java | 15 +
.../unify/interfaces/IFiniteClosure.java | 68 +
.../unify/interfaces/IMatch.java | 29 +
.../unify/interfaces/IRuleSet.java | 103 +
.../unify/interfaces/ISetOperations.java | 16 +
.../unify/interfaces/IUnify.java | 35 +
.../unify/interfaces/UnifyTypeVisitor.java | 23 +
.../unify/model/ExtendsType.java | 96 +
.../unify/model/FiniteClosure.java | 772 +++++
.../typeinference/unify/model/FunNType.java | 103 +
.../typeinference/unify/model/Node.java | 118 +
.../unify/model/OrderingExtend.java | 89 +
.../unify/model/OrderingUnifyPair.java | 457 +++
.../unify/model/PairOperator.java | 49 +
.../unify/model/PlaceholderType.java | 211 ++
.../unify/model/ReferenceType.java | 100 +
.../typeinference/unify/model/SuperType.java | 88 +
.../typeinference/unify/model/TypeParams.java | 191 ++
.../typeinference/unify/model/Unifier.java | 189 ++
.../typeinference/unify/model/UnifyPair.java | 247 ++
.../typeinference/unify/model/UnifyType.java | 119 +
.../unify/model/WildcardType.java | 72 +
.../unify/model/hashKeyType.java | 25 +
.../unify/visitUnifyTypeVisitor.java | 47 +
.../de/dhbwstuttgart/util/BiRelation.java | 19 +
src/main/java/de/dhbwstuttgart/util/Pair.java | 25 +
src/test/java/UnifyTest.java | 85 +
49 files changed, 8062 insertions(+)
create mode 100644 .gitignore
create mode 100644 Makefile
create mode 100644 README_aktuelle_Branches
create mode 100644 pom.xml
create mode 100644 src/main/java/de/dhbwstuttgart/exceptions/DebugException.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/constraints/Constraint.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/constraints/Pair.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/GuavaSetOperations.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/MartelliMontanariUnify.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/Match.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/RuleSet.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/Unifikationsalgorithmus.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/UnifyResultEvent.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/UnifyResultListener.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/UnifyResultListenerImpl.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/UnifyResultModel.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/UnifyTaskModel.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/distributeVariance.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/freshPlaceholder.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/interfaces/IFiniteClosure.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/interfaces/IMatch.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/interfaces/IRuleSet.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/interfaces/ISetOperations.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/interfaces/IUnify.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/interfaces/UnifyTypeVisitor.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/ExtendsType.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/FiniteClosure.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/FunNType.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/Node.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/OrderingExtend.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/OrderingUnifyPair.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/PairOperator.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/PlaceholderType.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/ReferenceType.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/SuperType.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/TypeParams.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/Unifier.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/UnifyPair.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/UnifyType.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/WildcardType.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/model/hashKeyType.java
create mode 100644 src/main/java/de/dhbwstuttgart/typeinference/unify/visitUnifyTypeVisitor.java
create mode 100644 src/main/java/de/dhbwstuttgart/util/BiRelation.java
create mode 100644 src/main/java/de/dhbwstuttgart/util/Pair.java
create mode 100644 src/test/java/UnifyTest.java
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..2d0d611
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,31 @@
+CVS
+bin
+*.class
+*.log
+
+# Mobile Tools for Java (J2ME)
+.mtj.tmp/
+
+# Package Files #
+*.jar
+*.war
+*.ear
+
+# IDEs
+.classpath
+*.iml
+.idea/
+/target/
+.DS_Store
+.project
+.settings/
+/target/
+
+#
+manually/
+
+logFiles/**
+!logFiles/.gitkeep
+
+src/main/java/de/dhbwstuttgart/parser/antlr/
+src/main/java/de/dhbwstuttgart/sat/asp/parser/antlr/
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..e73556a
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,3 @@
+NoOptParallel:
+ mvn -DskipTests package
+ cp target/JavaTXcompiler-0.1-jar-with-dependencies.jar target/JavaTXcompiler-0.1-jar-with-dependencies_NoOptParallel.jar
diff --git a/README_aktuelle_Branches b/README_aktuelle_Branches
new file mode 100644
index 0000000..5e9fc63
--- /dev/null
+++ b/README_aktuelle_Branches
@@ -0,0 +1,11 @@
+Stand: 24.5.21
+ bigRefactoring: Master-Brach
+ targetBytecode: Neuer Codegenerator mit generated generics Daniel
+ bigRefactoringUnifyComment: Dokumentation Unify, Martin
+ bytecodeGenericsSecond: Generated Generics, Ali, Martin
+ inferWildcards, Wildcards, Till
+ master, derzeit nicht genutzt
+ plugin, eigemntlicher Branch fuer Plugin-Basis, derzeit nicht aktuelle (aktuelle Version in simplifyRes
+ simplifyRes, Basis fuer Plugin, sollte auf Plugin gemerged werden, noch keine Packages, Michael
+ strucTypesNew, Struturelle Typen, alte Basis, arbeite derzeit niemand
+
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..1470115
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,126 @@
+
+ 4.0.0
+ de.dhbwstuttgart
+ JavaTXcompiler
+ jar
+
+ 0.1
+ JavaTXcompiler
+ http://maven.apache.org
+
+
+ junit
+ junit
+ 4.11
+ test
+
+
+
+ org.antlr
+ antlr4
+ 4.11.1
+
+
+ commons-io
+ commons-io
+ 2.6
+
+
+ com.google.guava
+ guava
+ 22.0
+
+
+ org.reflections
+ reflections
+ 0.9.11
+
+
+
+ org.ow2.asm
+ asm
+ 7.0
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.8.0
+
+ --enable-preview
+
+ 21
+
+
+
+ org.antlr
+ antlr4-maven-plugin
+ 4.11.1
+
+
+ antlr
+
+ antlr4
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+ de.dhbwstuttgart.core.ConsoleInterface
+
+
+
+
+
+ maven-assembly-plugin
+
+
+ package
+
+ single
+
+
+
+
+
+
+ de.dhbwstuttgart.core.ConsoleInterface
+
+
+
+ jar-with-dependencies
+
+
+
+
+
+
+
+ maven-repository
+ file:///${project.basedir}/target
+
+
+
+ 19
+ 19
+ de.dhbwstuttgart.core.ConsoleInterface
+
+
+
+ maven-repository
+ MyCo Internal Repository
+ file:///${project.basedir}/maven-repository/
+
+
+
diff --git a/src/main/java/de/dhbwstuttgart/exceptions/DebugException.java b/src/main/java/de/dhbwstuttgart/exceptions/DebugException.java
new file mode 100644
index 0000000..559b7f1
--- /dev/null
+++ b/src/main/java/de/dhbwstuttgart/exceptions/DebugException.java
@@ -0,0 +1,7 @@
+package de.dhbwstuttgart.exceptions;
+
+public class DebugException extends RuntimeException{
+ public DebugException(String message){
+ System.err.println(message);
+ }
+}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/constraints/Constraint.java b/src/main/java/de/dhbwstuttgart/typeinference/constraints/Constraint.java
new file mode 100644
index 0000000..ffac994
--- /dev/null
+++ b/src/main/java/de/dhbwstuttgart/typeinference/constraints/Constraint.java
@@ -0,0 +1,69 @@
+package de.dhbwstuttgart.typeinference.constraints;
+
+import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+
+public class Constraint extends HashSet {
+ private static final long serialVersionUID = 1L;
+ private Boolean isInherited = false;//wird nur für die Method-Constraints benoetigt
+
+ /*
+ * wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
+ * auszuwaehlen
+ */
+ /*private*/ Set methodSignatureConstraint = new HashSet<>();
+
+ private Constraint extendConstraint = null;
+
+ public Constraint() {
+ super();
+ }
+
+ public Constraint(Boolean isInherited) {
+ this.isInherited = isInherited;
+ }
+
+ public Constraint(Boolean isInherited, Constraint extendConstraint, Set methodSignatureConstraint) {
+ this.isInherited = isInherited;
+ this.extendConstraint = extendConstraint;
+ this.methodSignatureConstraint = methodSignatureConstraint;
+ }
+
+ public void setIsInherited(Boolean isInherited) {
+ this.isInherited = isInherited;
+ }
+
+ public Boolean isInherited() {
+ return isInherited;
+ }
+
+ public Constraint getExtendConstraint() {
+ return extendConstraint;
+ }
+
+ public void setExtendConstraint(Constraint c) {
+ extendConstraint = c;
+ }
+
+ public Set getmethodSignatureConstraint() {
+ return methodSignatureConstraint;
+ }
+
+ public void setmethodSignatureConstraint(Set c) {
+ methodSignatureConstraint = c;
+ }
+
+ public String toString() {
+ return super.toString() + "\nisInherited = " + isInherited
+ //" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
+ + "\n" ;
+ }
+
+ public String toStringBase() {
+ return super.toString();
+ }
+
+}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java b/src/main/java/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java
new file mode 100644
index 0000000..c88c12a
--- /dev/null
+++ b/src/main/java/de/dhbwstuttgart/typeinference/constraints/ConstraintSet.java
@@ -0,0 +1,126 @@
+package de.dhbwstuttgart.typeinference.constraints;
+
+
+import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations;
+
+import java.util.*;
+import java.util.function.BinaryOperator;
+import java.util.function.Consumer;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+public class ConstraintSet {
+ Constraint undConstraints = new Constraint<>();
+ List>> oderConstraints = new ArrayList<>();
+
+ public void addUndConstraint(A p){
+ undConstraints.add(p);
+ }
+
+ public void addOderConstraint(Set> methodConstraints) {
+ oderConstraints.add(methodConstraints);
+ }
+
+ public void addAllUndConstraint(Constraint allUndConstraints){
+ undConstraints.addAll(allUndConstraints);
+ }
+
+ public void addAllOderConstraint(List>> allOderConstraints){
+ this.oderConstraints.addAll(allOderConstraints);
+ }
+
+ public void addAll(ConstraintSet constraints) {
+ this.addAllUndConstraint(constraints.undConstraints);
+ this.addAllOderConstraint(constraints.oderConstraints);
+ }
+
+ @Override
+ public String toString(){
+ BinaryOperator b = (x,y) -> x+y;
+ return "\nUND:" + this.undConstraints.toString() + "\n" +
+ "ODER:" + this.oderConstraints.stream().reduce("", (x,y) -> x.toString()+ "\n" +y, b);
+ //cartesianProduct().toString();
+ }
+
+ public Set>> cartesianProduct(){
+ Set> toAdd = new HashSet<>();
+ toAdd.add(undConstraints);
+ List>> allConstraints = new ArrayList<>();
+ allConstraints.add(toAdd);
+ allConstraints.addAll(oderConstraints);
+ return new GuavaSetOperations().cartesianProduct(allConstraints);
+ }
+
+ public ConstraintSet map(Function super A, ? extends B> o) {
+ Hashtable,Constraint> CSA2CSB = new Hashtable<>();
+ ConstraintSet ret = new ConstraintSet<>();
+ ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint::new));
+ List>> newOder = new ArrayList<>();
+ /*
+ for(Set> oderConstraint : oderConstraints){
+ oderConstraint.forEach(as -> {
+ Constraint newConst = as.stream()
+ .map(o)
+ .collect(Collectors.toCollection(
+ () -> new Constraint (as.isInherited())));
+ CSA2CSB.put(as, newConst);} );
+ }
+ */
+
+ for(Set> oderConstraint : oderConstraints){
+ newOder.add(
+ oderConstraint.parallelStream().map((Constraint as) -> {
+
+ Constraint newConst = as.stream()
+ .map(o)
+ .collect(Collectors.toCollection((as.getExtendConstraint() != null)
+ ? () -> new Constraint (as.isInherited(),
+ as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new)),
+ as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new)))
+ : () -> new Constraint (as.isInherited())
+ ));
+
+ //CSA2CSB.put(as, newConst);
+
+ return newConst;
+
+ /*
+ Constraint bs = CSA2CSB.get(as);
+ if (as.getExtendConstraint() != null) {
+ bs.setExtendConstraint(CSA2CSB.get(as.getExtendConstraint()));
+ }
+ return bs;
+ */
+ }).collect(Collectors.toSet())
+ );
+ }
+
+ ret.oderConstraints = newOder;
+ return ret;
+ }
+
+ public void forEach (Consumer super A> c) {
+ undConstraints.stream().forEach(c);
+ for(Set> oderConstraint : oderConstraints){
+ oderConstraint.parallelStream().forEach((Constraint as) ->
+ as.stream().forEach(c));
+ }
+ }
+
+ public Set getAll () {
+ Set ret = new HashSet<>();
+ ret.addAll(undConstraints);
+ for(Set> oderConstraint : oderConstraints){
+ oderConstraint.parallelStream().forEach((Constraint as) -> ret.addAll(as));
+ }
+ return ret;
+ }
+
+ public List>> getOderConstraints() {
+ return oderConstraints;
+ }
+
+ public Set getUndConstraints() {
+ return undConstraints;
+ }
+}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/constraints/Pair.java b/src/main/java/de/dhbwstuttgart/typeinference/constraints/Pair.java
new file mode 100644
index 0000000..cbb9899
--- /dev/null
+++ b/src/main/java/de/dhbwstuttgart/typeinference/constraints/Pair.java
@@ -0,0 +1,13 @@
+package de.dhbwstuttgart.typeinference.constraints;
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+
+import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
+
+
+public class Pair implements Serializable
+{
+
+}
+// ino.end
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/GuavaSetOperations.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/GuavaSetOperations.java
new file mode 100644
index 0000000..19e7c48
--- /dev/null
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/GuavaSetOperations.java
@@ -0,0 +1,23 @@
+package de.dhbwstuttgart.typeinference.unify;
+
+import java.util.List;
+import java.util.Set;
+
+import com.google.common.collect.Sets;
+
+import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
+
+/**
+ * Implements set operations using google guava.
+ * @author DH10STF
+ *
+ */
+public class GuavaSetOperations implements ISetOperations {
+
+ @Override
+ public Set> cartesianProduct(List extends Set extends B>> sets) {
+ // Wraps the call to google guava
+ return Sets.cartesianProduct(sets);
+ }
+
+}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/MartelliMontanariUnify.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/MartelliMontanariUnify.java
new file mode 100644
index 0000000..2fceb24
--- /dev/null
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/MartelliMontanariUnify.java
@@ -0,0 +1,108 @@
+package de.dhbwstuttgart.typeinference.unify;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Optional;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
+import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
+import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
+import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
+import de.dhbwstuttgart.typeinference.unify.model.Unifier;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
+
+/**
+ * Implementation of the Martelli-Montanari unification algorithm.
+ * @author Florian Steurer
+ */
+public class MartelliMontanariUnify implements IUnify {
+
+ @Override
+ public Optional unify(Set terms) {
+ // Sets with less than 2 terms are trivially unified
+ if(terms.size() < 2)
+ return Optional.of(Unifier.identity());
+
+ // For the the set of terms {t1,...,tn},
+ // build a list of equations {(t1 = t2), (t2 = t3), (t3 = t4), ....}
+ ArrayList termsList = new ArrayList();
+ Iterator iter = terms.iterator();
+ UnifyType prev = iter.next();
+ while(iter.hasNext()) {
+ UnifyType next = iter.next();
+ termsList.add(new UnifyPair(prev, next, PairOperator.EQUALSDOT));
+ prev = next;
+ }
+
+ // Start with the identity unifier. Substitutions will be added later.
+ Unifier mgu = Unifier.identity();
+
+ // Apply rules while possible
+ int idx = 0;
+ while(idx < termsList.size()) {
+ UnifyPair pair = termsList.get(idx);
+ UnifyType rhsType = pair.getRhsType();
+ UnifyType lhsType = pair.getLhsType();
+ TypeParams rhsTypeParams = rhsType.getTypeParams();
+ TypeParams lhsTypeParams = lhsType.getTypeParams();
+
+ // REDUCE - Rule
+ if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)) {
+ Set result = new HashSet<>();
+
+ // f<...> = g<...> with f != g are not unifiable
+ if(!rhsType.getName().equals(lhsType.getName()))
+ return Optional.empty(); // conflict
+ // f = f are not unifiable
+ if(rhsTypeParams.size() != lhsTypeParams.size())
+ return Optional.empty(); // conflict
+ // f = g is not unifiable (cannot be f = f because erase rule would have been applied)
+ //if(rhsTypeParams.size() == 0)
+ //return Optional.empty();
+
+ // Unpack the arguments
+ for(int i = 0; i < rhsTypeParams.size(); i++)
+ result.add(new UnifyPair(rhsTypeParams.get(i), lhsTypeParams.get(i), PairOperator.EQUALSDOT));
+
+ termsList.remove(idx);
+ termsList.addAll(result);
+ continue;
+ }
+
+ // DELETE - Rule
+ if(pair.getRhsType().equals(pair.getLhsType())) {
+ termsList.remove(idx);
+ continue;
+ }
+
+ // SWAP - Rule
+ if(!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
+ termsList.remove(idx);
+ termsList.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT));
+ continue;
+ }
+
+ // OCCURS-CHECK
+ if(pair.getLhsType() instanceof PlaceholderType
+ && pair.getRhsType().getTypeParams().occurs((PlaceholderType) pair.getLhsType()))
+ return Optional.empty();
+
+ // SUBST - Rule
+ if(lhsType instanceof PlaceholderType) {
+ mgu.add((PlaceholderType) lhsType, rhsType);
+ //PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
+ termsList = termsList.stream().map(x -> mgu.apply(x)).collect(Collectors.toCollection(ArrayList::new));
+ idx = idx+1 == termsList.size() ? 0 : idx+1;
+ continue;
+ }
+
+ idx++;
+ }
+
+ return Optional.of(mgu);
+ }
+}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/Match.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/Match.java
new file mode 100644
index 0000000..9140661
--- /dev/null
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/Match.java
@@ -0,0 +1,92 @@
+package de.dhbwstuttgart.typeinference.unify;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Optional;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch;
+import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
+import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
+import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
+import de.dhbwstuttgart.typeinference.unify.model.Unifier;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
+
+/**
+ * Implementation of match derived from unification algorithm.
+ * @author Martin Pluemicke
+ */
+public class Match implements IMatch {
+
+ @Override
+ //vorne muss das Pattern stehen
+ //A =. A ==> True
+ //A =. A ==> False
+ public Optional match(ArrayList termsList) {
+
+ // Start with the identity unifier. Substitutions will be added later.
+ Unifier mgu = Unifier.identity();
+
+ // Apply rules while possible
+ int idx = 0;
+ while(idx < termsList.size()) {
+ UnifyPair pair = termsList.get(idx);
+ UnifyType rhsType = pair.getRhsType();
+ UnifyType lhsType = pair.getLhsType();
+ TypeParams rhsTypeParams = rhsType.getTypeParams();
+ TypeParams lhsTypeParams = lhsType.getTypeParams();
+
+ // REDUCE - Rule
+ if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)) {
+ Set result = new HashSet<>();
+
+ // f<...> = g<...> with f != g are not unifiable
+ if(!rhsType.getName().equals(lhsType.getName()))
+ return Optional.empty(); // conflict
+ // f = f are not unifiable
+ if(rhsTypeParams.size() != lhsTypeParams.size())
+ return Optional.empty(); // conflict
+ // f = g is not unifiable (cannot be f = f because erase rule would have been applied)
+ //if(rhsTypeParams.size() == 0)
+ //return Optional.empty();
+
+ // Unpack the arguments
+ for(int i = 0; i < rhsTypeParams.size(); i++)
+ result.add(new UnifyPair(lhsTypeParams.get(i), rhsTypeParams.get(i), PairOperator.EQUALSDOT));
+
+ termsList.remove(idx);
+ termsList.addAll(result);
+ continue;
+ }
+
+ // DELETE - Rule
+ if(pair.getRhsType().equals(pair.getLhsType())) {
+ termsList.remove(idx);
+ continue;
+ }
+
+ // SWAP - Rule
+ if(!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
+ return Optional.empty(); // conflict
+ }
+
+ // OCCURS-CHECK
+ //deleted
+
+ // SUBST - Rule
+ if(lhsType instanceof PlaceholderType) {
+ mgu.add((PlaceholderType) lhsType, rhsType);
+ termsList = termsList.stream().map(mgu::applyleft).collect(Collectors.toCollection(ArrayList::new));
+ idx = idx+1 == termsList.size() ? 0 : idx+1;
+ continue;
+ }
+
+ idx++;
+ }
+
+ return Optional.of(mgu);
+ }
+}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/RuleSet.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/RuleSet.java
new file mode 100644
index 0000000..3355975
--- /dev/null
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/RuleSet.java
@@ -0,0 +1,1050 @@
+package de.dhbwstuttgart.typeinference.unify;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Optional;
+import java.util.Queue;
+import java.util.Set;
+import java.util.Stack;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+import de.dhbwstuttgart.exceptions.DebugException;
+import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
+import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
+import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
+import de.dhbwstuttgart.typeinference.unify.model.FunNType;
+import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
+import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
+import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
+import de.dhbwstuttgart.typeinference.unify.model.SuperType;
+import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
+import de.dhbwstuttgart.typeinference.unify.model.Unifier;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
+import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
+import de.dhbwstuttgart.typeinference.constraints.Constraint;
+import de.dhbwstuttgart.typeinference.unify.distributeVariance;
+
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.Writer;
+import java.io.OutputStreamWriter;
+
+import org.apache.commons.io.output.NullOutputStream;
+
+/**
+ * Implementation of the type inference rules.
+ * @author Florian Steurer
+ *
+ */
+public class RuleSet implements IRuleSet{
+
+ Writer logFile;
+
+ public RuleSet() {
+ super();
+ logFile = new OutputStreamWriter(new NullOutputStream());
+ }
+
+ RuleSet(Writer logFile) {
+ this.logFile = logFile;
+ }
+
+ @Override
+ public Optional reduceUp(UnifyPair pair) {
+ // Check if reduce up is applicable
+ if(pair.getPairOp() != PairOperator.SMALLERDOT)
+ return Optional.empty();
+
+ UnifyType rhsType = pair.getRhsType();
+ if(!(rhsType instanceof SuperType))
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ if(!(lhsType instanceof ReferenceType) && !(lhsType instanceof PlaceholderType))
+ return Optional.empty();
+
+ // Rule is applicable, unpack the SuperType
+ return Optional.of(new UnifyPair(lhsType, ((SuperType) rhsType).getSuperedType(), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ @Override
+ public Optional reduceLow(UnifyPair pair) {
+ // Check if rule is applicable
+ if(pair.getPairOp() != PairOperator.SMALLERDOT)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ if(!(lhsType instanceof ExtendsType))
+ return Optional.empty();
+
+ UnifyType rhsType = pair.getRhsType();
+ if(!(rhsType instanceof ReferenceType) && !(rhsType instanceof PlaceholderType))
+ return Optional.empty();
+
+ // Rule is applicable, unpack the ExtendsType
+ return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(), rhsType, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ @Override
+ public Optional reduceUpLow(UnifyPair pair) {
+ // Check if rule is applicable
+ if(pair.getPairOp() != PairOperator.SMALLERDOT)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ if(!(lhsType instanceof ExtendsType))
+ return Optional.empty();
+
+ UnifyType rhsType = pair.getRhsType();
+ if(!(rhsType instanceof SuperType))
+ return Optional.empty();
+
+ // Rule is applicable, unpack both sides
+ return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(),((SuperType) rhsType).getSuperedType(), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ @Override
+ public Optional> reduceExt(UnifyPair pair, IFiniteClosure fc) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType x = pair.getLhsType();
+ UnifyType sTypeX;
+
+ if(x instanceof ReferenceType)
+ sTypeX = x;
+ else if(x instanceof ExtendsType)
+ sTypeX = ((ExtendsType) x).getExtendedType();
+ else
+ return Optional.empty();
+
+ UnifyType extY = pair.getRhsType();
+
+ if(!(extY instanceof ExtendsType))
+ return Optional.empty();
+
+ if(x.getTypeParams().empty() || extY.getTypeParams().size() != x.getTypeParams().size())
+ return Optional.empty();
+
+ UnifyType xFromFc = fc.getLeftHandedType(sTypeX.getName()).orElse(null);
+
+ if(xFromFc == null || !xFromFc.getTypeParams().arePlaceholders())
+ return Optional.empty();
+
+ if(x instanceof ExtendsType)
+ xFromFc = new ExtendsType(xFromFc);
+
+ UnifyType extYFromFc = fc.grArg(xFromFc, new HashSet<>()).stream().filter(t -> t.getName().equals(extY.getName())).filter(t -> t.getTypeParams().arePlaceholders()).findAny().orElse(null);
+
+ if(extYFromFc == null || extYFromFc.getTypeParams() != xFromFc.getTypeParams())
+ return Optional.empty();
+
+ TypeParams extYParams = extY.getTypeParams();
+ TypeParams xParams = x.getTypeParams();
+
+ int[] pi = pi(xParams, extYParams);
+
+ if(pi.length == 0)
+ return Optional.empty();
+
+ Set result = new HashSet<>();
+
+ for(int rhsIdx = 0; rhsIdx < extYParams.size(); rhsIdx++)
+ result.add(new UnifyPair(xParams.get(pi[rhsIdx]), extYParams.get(rhsIdx), PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
+
+ return Optional.of(result);
+ }
+
+ @Override
+ public Optional> reduceSup(UnifyPair pair, IFiniteClosure fc) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType x = pair.getLhsType();
+ UnifyType sTypeX;
+
+ if(x instanceof ReferenceType)
+ sTypeX = x;
+ else if(x instanceof SuperType)
+ sTypeX = ((SuperType) x).getSuperedType();
+ else
+ return Optional.empty();
+
+ UnifyType supY = pair.getRhsType();
+
+ if(!(supY instanceof SuperType))
+ return Optional.empty();
+
+ if(x.getTypeParams().empty() || supY.getTypeParams().size() != x.getTypeParams().size())
+ return Optional.empty();
+
+ UnifyType xFromFc = fc.getLeftHandedType(sTypeX.getName()).orElse(null);
+
+ if(xFromFc == null || !xFromFc.getTypeParams().arePlaceholders())
+ return Optional.empty();
+
+ if(x instanceof SuperType)
+ xFromFc = new SuperType(xFromFc);
+
+ UnifyType supYFromFc = fc.grArg(xFromFc, new HashSet<>()).stream().filter(t -> t.getName().equals(supY.getName())).filter(t -> t.getTypeParams().arePlaceholders()).findAny().orElse(null);
+
+ if(supYFromFc == null || supYFromFc.getTypeParams() != xFromFc.getTypeParams())
+ return Optional.empty();
+
+ TypeParams supYParams = supY.getTypeParams();
+ TypeParams xParams = x.getTypeParams();
+ Set result = new HashSet<>();
+
+ int[] pi = pi(xParams, supYParams);
+
+ if(pi.length == 0)
+ return Optional.empty();
+
+ for(int rhsIdx = 0; rhsIdx < supYParams.size(); rhsIdx++)
+ result.add(new UnifyPair(supYParams.get(rhsIdx), xParams.get(pi[rhsIdx]), PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
+
+ return Optional.of(result);
+ }
+
+ @Override
+ public Optional> reduceEq(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ if(lhsType instanceof PlaceholderType || lhsType.getTypeParams().empty())
+ return Optional.empty();
+
+ UnifyType rhsType = pair.getRhsType();
+
+ if(!rhsType.getName().equals(lhsType.getName()))
+ return Optional.empty();
+
+ if(rhsType instanceof PlaceholderType || lhsType instanceof PlaceholderType || rhsType.getTypeParams().empty())
+ return Optional.empty();
+
+ if(rhsType.getTypeParams().size() != lhsType.getTypeParams().size())
+ return Optional.empty();
+
+ // Keine Permutation wie im Paper nötig
+ Set result = new HashSet<>();
+ TypeParams lhsTypeParams = lhsType.getTypeParams();
+ TypeParams rhsTypeParams = rhsType.getTypeParams();
+
+ for(int i = 0; i < lhsTypeParams.size(); i++)
+ result.add(new UnifyPair(lhsTypeParams.get(i), rhsTypeParams.get(i), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+
+ return Optional.of(result);
+ }
+
+ @Override
+ public Optional> reduce1(UnifyPair pair, IFiniteClosure fc) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOT)
+ return Optional.empty();
+
+ UnifyType c = pair.getLhsType();
+ if(!(c instanceof ReferenceType))
+ return Optional.empty();
+
+ UnifyType d = pair.getRhsType();
+ if(!(d instanceof ReferenceType))
+ return Optional.empty();
+
+ ReferenceType lhsSType = (ReferenceType) c;
+ ReferenceType rhsSType = (ReferenceType) d;
+
+ //try {
+ // logFile.write("PAIR Rules: " + pair + "\n");
+ // logFile.flush();
+ //}
+ //catch (IOException e) { }
+
+ if(lhsSType.getTypeParams().empty() || lhsSType.getTypeParams().size() != rhsSType.getTypeParams().size())
+ return Optional.empty();
+
+ UnifyType cFromFc = fc.getLeftHandedType(c.getName()).orElse(null);
+ //2018-02-23: liefert Vector>: Das kann nicht sein.
+
+ //NOCHMAL UEBERPRUEFEN
+ //PL 18-02-09 Eingfuegt Anfang
+ //C und D koennen auch gleich sein.
+ if (c.getName().equals(d.getName())) {
+ Set result = new HashSet<>();
+ TypeParams rhsTypeParams = d.getTypeParams();
+ TypeParams lhsTypeParams = c.getTypeParams();
+ for(int rhsIdx = 0; rhsIdx < c.getTypeParams().size(); rhsIdx++)
+ result.add(new UnifyPair(lhsTypeParams.get(rhsIdx), rhsTypeParams.get(rhsIdx), PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
+
+ return Optional.of(result);
+ }
+ //PL 18-02-09 Eingfuegt ENDE
+
+ //try {
+ // logFile.write("cFromFc: " + cFromFc);
+ // logFile.flush();
+ //}
+ //catch (IOException e) { }
+
+ if(cFromFc == null || !cFromFc.getTypeParams().arePlaceholders())
+ return Optional.empty();
+
+ UnifyType dFromFc = fc.getAncestors(cFromFc).stream().filter(x -> x.getName().equals(d.getName())).findAny().orElse(null);
+
+ //try {
+ // logFile.write("cFromFc: " + cFromFc);
+ // logFile.flush();
+ //}
+ //catch (IOException e) { }
+
+ if(dFromFc == null || !dFromFc.getTypeParams().arePlaceholders() || dFromFc.getTypeParams().size() != cFromFc.getTypeParams().size())
+ return Optional.empty();
+ //System.out.println("cFromFc: " + cFromFc);
+ //System.out.println("dFromFc: " + dFromFc);
+ int[] pi = pi(cFromFc.getTypeParams(), dFromFc.getTypeParams());
+
+ if(pi.length == 0)
+ return Optional.empty();
+
+ TypeParams rhsTypeParams = d.getTypeParams();
+ TypeParams lhsTypeParams = c.getTypeParams();
+ Set result = new HashSet<>();
+
+ for(int rhsIdx = 0; rhsIdx < rhsTypeParams.size(); rhsIdx++)
+ result.add(new UnifyPair(lhsTypeParams.get(pi[rhsIdx]), rhsTypeParams.get(rhsIdx), PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
+
+ return Optional.of(result);
+ }
+
+ @Override
+ public Optional> reduce2(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.EQUALSDOT)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ ReferenceType lhsSType;
+ UnifyType rhsType = pair.getRhsType();
+ ReferenceType rhsSType;
+
+ if ((lhsType instanceof ReferenceType) && (rhsType instanceof ReferenceType)) {
+ lhsSType = (ReferenceType) lhsType;
+ rhsSType = (ReferenceType) rhsType;
+ }
+ else if (((lhsType instanceof ExtendsType) && (rhsType instanceof ExtendsType))
+ || ((lhsType instanceof SuperType) && (rhsType instanceof SuperType))) {
+ UnifyType lhsSTypeRaw = ((WildcardType) lhsType).getWildcardedType();
+ UnifyType rhsSTypeRaw = ((WildcardType) rhsType).getWildcardedType();
+ if ((lhsSTypeRaw instanceof ReferenceType) && (rhsSTypeRaw instanceof ReferenceType)) {
+ lhsSType = (ReferenceType) lhsSTypeRaw;
+ rhsSType = (ReferenceType) rhsSTypeRaw;
+ }
+ else
+ return Optional.empty();
+ }
+ else
+ return Optional.empty();
+
+ if(lhsSType.getTypeParams().empty())
+ return Optional.empty();
+
+ /* PL 2018-01-22 in obere Teil integriert
+ UnifyType rhsType = pair.getRhsType();
+ ReferenceType rhsSType;
+
+ if(rhsType instanceof ReferenceType)
+ rhsSType = (ReferenceType) rhsType;
+ else if(rhsType instanceof WildcardType) {
+ UnifyType rhsSTypeRaw = ((WildcardType) rhsType).getWildcardedType();
+ if(rhsSTypeRaw instanceof ReferenceType)
+ rhsSType = (ReferenceType) rhsSTypeRaw;
+ else
+ return Optional.empty();
+ }
+ else
+ return Optional.empty();
+ */
+
+ if(!rhsSType.getName().equals(lhsSType.getName()))
+ return Optional.empty();
+
+ if(!(lhsSType.getTypeParams().size()==rhsSType.getTypeParams().size()))throw new DebugException("Fehler in Unifizierung"+ " " + lhsSType.toString() + " " + rhsSType.toString());
+ //if(rhsSType.getTypeParams().size() != lhsSType.getTypeParams().size())
+ // return Optional.empty();
+
+ Set result = new HashSet<>();
+
+ TypeParams rhsTypeParams = rhsSType.getTypeParams();
+ TypeParams lhsTypeParams = lhsSType.getTypeParams();
+ for(int i = 0; i < rhsTypeParams.size(); i++)
+ result.add(new UnifyPair(lhsTypeParams.get(i), rhsTypeParams.get(i), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+
+ return Optional.of(result);
+ }
+
+ @Override
+ public boolean erase1(UnifyPair pair, IFiniteClosure fc) {
+ if((pair.getPairOp() != PairOperator.SMALLERDOT) && (pair.getPairOp() != PairOperator.SMALLERNEQDOT))
+ return false;
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+
+ /*
+ * ty <. ? extends ty' is wrong
+ */
+ if (rhsType instanceof ExtendsType) {
+ return false;
+ }
+
+ /*
+ * ? super ty <. ty' is wrong
+ * except Ty' = Object or ty' = ? super Object
+ */
+ if ((lhsType instanceof SuperType) &&
+ (!(rhsType.equals(new ReferenceType("java.lang.Object", false)))) &&
+ !(rhsType.equals(new SuperType (new ReferenceType("java.lang.Object", false))))) {
+ return false;
+ }
+
+ /*
+ * ? extends ty <. ty' is equivalent to ty < ty'
+ */
+ if (lhsType instanceof ExtendsType) {
+ lhsType = ((WildcardType)lhsType).getWildcardedType();
+ }
+
+ /*
+ * ty <. ? super ty' ist equivalent to ty <. ty'
+ */
+ if (rhsType instanceof SuperType) {
+ rhsType = ((WildcardType)rhsType).getWildcardedType();
+ }
+
+ /*
+ * SMALLERNEQDOT => type must not be equal
+ */
+ if (pair.getPairOp() == PairOperator.SMALLERNEQDOT && lhsType.equals(rhsType)){
+ return false;
+ }
+
+ if(!(lhsType instanceof ReferenceType) && !(lhsType instanceof PlaceholderType))
+ return false;
+
+
+ if(!(rhsType instanceof ReferenceType) && !(rhsType instanceof PlaceholderType))
+ return false;
+
+ return fc.greater(lhsType, new HashSet<>()).contains(rhsType);
+ }
+
+ @Override
+ public boolean erase2(UnifyPair pair, IFiniteClosure fc) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return false;
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+
+ return fc.grArg(lhsType, new HashSet<>()).contains(rhsType);
+ }
+
+ @Override
+ public boolean erase3(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.EQUALSDOT)
+ return false;
+
+ return pair.getLhsType().equals(pair.getRhsType());
+ }
+
+ @Override
+ public Optional swap(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.EQUALSDOT)
+ return Optional.empty();
+
+ if(pair.getLhsType() instanceof PlaceholderType)
+ return Optional.empty();
+
+ if(!(pair.getRhsType() instanceof PlaceholderType))
+ return Optional.empty();
+
+ return Optional.of(new UnifyPair(pair.getRhsType(), pair.getLhsType(), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ @Override
+ public Optional adapt(UnifyPair pair, IFiniteClosure fc) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOT)
+ return Optional.empty();
+
+ UnifyType typeD = pair.getLhsType();
+ if(!(typeD instanceof ReferenceType))
+ return Optional.empty();
+
+ UnifyType typeDs = pair.getRhsType();
+ if(!(typeDs instanceof ReferenceType))
+ return Optional.empty();
+
+ /*if(typeD.getTypeParams().size() == 0 || typeDs.getTypeParams().size() == 0)
+ return Optional.empty();*/
+
+ if(typeD.getName().equals(typeDs.getName()))
+ return Optional.empty();
+
+
+ Optional opt = fc.getLeftHandedType(typeD.getName());
+ if(!opt.isPresent())
+ return Optional.empty();
+
+ // The generic Version of Type D (D)
+ UnifyType typeDgen = opt.get();
+
+ // Actually greater+ because the types are ensured to have different names
+ Set greater = fc.getAncestors(typeDgen);
+ opt = greater.stream().filter(x -> x.getName().equals(typeDs.getName())).findAny();
+
+ if(!opt.isPresent())
+ return Optional.empty();
+
+ UnifyType newLhs = opt.get();
+
+ TypeParams typeDParams = typeD.getTypeParams();
+ TypeParams typeDgenParams = typeDgen.getTypeParams();
+
+ //System.out.println("Pair: " +pair);
+ //System.out.println("typeD: " +typeD);
+ //System.out.println("typeDParams: " +typeDParams);
+ //System.out.println("typeDgen: " +typeD);
+ //System.out.println("typeDgenParams: " +typeDgenParams);
+ Unifier unif = Unifier.identity();
+ for(int i = 0; i < typeDParams.size(); i++) {
+ //System.out.println("ADAPT" +typeDgenParams);
+ if (typeDgenParams.get(i) instanceof PlaceholderType)
+ unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
+ else System.out.println("ERROR");
+ }
+ return Optional.of(new UnifyPair(unif.apply(newLhs), typeDs, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ @Override
+ public Optional adaptExt(UnifyPair pair, IFiniteClosure fc) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType typeD = pair.getLhsType();
+ if(!(typeD instanceof ReferenceType) && !(typeD instanceof ExtendsType))
+ return Optional.empty();
+
+ UnifyType typeExtDs = pair.getRhsType();
+ if(!(typeExtDs instanceof ExtendsType))
+ return Optional.empty();
+
+ if(typeD.getTypeParams().size() == 0 || typeExtDs.getTypeParams().size() == 0)
+ return Optional.empty();
+
+ UnifyType typeDgen;
+ if(typeD instanceof ReferenceType)
+ typeDgen = fc.getLeftHandedType(typeD.getName()).orElse(null);
+ else {
+ Optional opt = fc.getLeftHandedType(((ExtendsType) typeD).getExtendedType().getName());
+ typeDgen = opt.isPresent() ? new ExtendsType(opt.get()) : null;
+ }
+
+ if(typeDgen == null)
+ return Optional.empty();
+
+ Set grArg = fc.grArg(typeDgen, new HashSet<>());
+
+ Optional opt = grArg.stream().filter(x -> x.getName().equals(typeExtDs.getName())).findAny();
+
+ if(!opt.isPresent())
+ return Optional.empty();
+
+ UnifyType newLhs = ((ExtendsType) opt.get()).getExtendedType();
+
+ TypeParams typeDParams = typeD.getTypeParams();
+ TypeParams typeDgenParams = typeDgen.getTypeParams();
+
+ Unifier unif = new Unifier((PlaceholderType) typeDgenParams.get(0), typeDParams.get(0));
+ for(int i = 1; i < typeDParams.size(); i++)
+ unif.add((PlaceholderType) typeDgenParams.get(i), typeDParams.get(i));
+
+ return Optional.of(new UnifyPair(unif.apply(newLhs), typeExtDs, PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ @Override
+ public Optional adaptSup(UnifyPair pair, IFiniteClosure fc) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType typeDs = pair.getLhsType();
+ if(!(typeDs instanceof ReferenceType) && !(typeDs instanceof SuperType))
+ return Optional.empty();
+
+ UnifyType typeSupD = pair.getRhsType();
+ if(!(typeSupD instanceof SuperType))
+ return Optional.empty();
+
+ if(typeDs.getTypeParams().size() == 0 || typeSupD.getTypeParams().size() == 0)
+ return Optional.empty();
+
+
+ Optional opt = fc.getLeftHandedType(((SuperType) typeSupD).getSuperedType().getName());
+
+ if(!opt.isPresent())
+ return Optional.empty();
+
+ UnifyType typeDgen = opt.get();
+ UnifyType typeSupDgen = new SuperType(typeDgen);
+
+ // Use of smArg instead of grArg because
+ // a in grArg(b) => b in smArg(a)
+ Set smArg = fc.smArg(typeSupDgen, new HashSet<>());
+ opt = smArg.stream().filter(x -> x.getName().equals(typeDs.getName())).findAny();
+
+ if(!opt.isPresent())
+ return Optional.empty();
+
+ // New RHS
+ UnifyType newRhs = null;
+ if(typeDs instanceof ReferenceType)
+ newRhs = new ExtendsType(typeDs);
+ else
+ newRhs = new ExtendsType(((SuperType) typeDs).getSuperedType());
+
+ // New LHS
+ UnifyType newLhs = opt.get();
+ TypeParams typeDParams = typeSupD.getTypeParams();
+ TypeParams typeSupDsgenParams = typeSupDgen.getTypeParams();
+
+ Unifier unif = new Unifier((PlaceholderType) typeSupDsgenParams.get(0), typeDParams.get(0));
+ for(int i = 1; i < typeDParams.size(); i++)
+ unif.add((PlaceholderType) typeSupDsgenParams.get(i), typeDParams.get(i));
+
+ return Optional.of(new UnifyPair(unif.apply(newLhs), newRhs, PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ /**
+ * Finds the permutation pi of the type arguments of two types based on the finite closure
+ * @param cArgs The type which arguments are permuted
+ * @param dArgs The other type
+ * @return An array containing the values of pi for every type argument of C or an empty array if the search failed.
+ */
+ private int[] pi(TypeParams cArgs, TypeParams dArgs) {
+ if(!(cArgs.size()==dArgs.size()))throw new DebugException("Fehler in Unifizierung");
+
+ int[] permutation = new int[dArgs.size()];
+
+ boolean succ = true;
+ for (int dArgIdx = 0; dArgIdx < dArgs.size() && succ; dArgIdx++) {
+ UnifyType dArg = dArgs.get(dArgIdx);
+ succ = false;
+ for (int pi = 0; pi < cArgs.size(); pi++)
+ if (cArgs.get(pi).getName().equals(dArg.getName())) {
+ permutation[dArgIdx] = pi;
+ succ = true;
+ break;
+ }
+ }
+
+ return succ ? permutation : new int[0];
+ }
+
+ public Optional> subst(Set pairs) {
+ return subst(pairs, new ArrayList<>());
+ }
+
+ @Override
+ public Optional> subst(Set pairs, List>> oderConstraints) {
+ HashMap typeMap = new HashMap<>();
+
+ Stack occuringTypes = new Stack<>();
+
+ for(UnifyPair pair : pairs) {
+ occuringTypes.push(pair.getLhsType());
+ occuringTypes.push(pair.getRhsType());
+ }
+
+ while(!occuringTypes.isEmpty()) {
+ UnifyType t1 = occuringTypes.pop();
+ if(!typeMap.containsKey(t1))
+ typeMap.put(t1, 0);
+ typeMap.put(t1, typeMap.get(t1)+1);
+
+ if(t1 instanceof ExtendsType)
+ occuringTypes.push(((ExtendsType) t1).getExtendedType());
+ if(t1 instanceof SuperType)
+ occuringTypes.push(((SuperType) t1).getSuperedType());
+ else
+ t1.getTypeParams().forEach(x -> occuringTypes.push(x));
+ }
+ Queue result1 = new LinkedList(pairs);
+ ArrayList result = new ArrayList();
+ boolean applied = false;
+
+ while(!result1.isEmpty()) {
+ UnifyPair pair = result1.poll();
+ PlaceholderType lhsType = null;
+ UnifyType rhsType;
+
+ if(pair.getPairOp() == PairOperator.EQUALSDOT
+ && pair.getLhsType() instanceof PlaceholderType)
+ lhsType = (PlaceholderType) pair.getLhsType();
+ rhsType = pair.getRhsType(); //PL eingefuegt 2017-09-29 statt !((rhsType = pair.getRhsType()) instanceof PlaceholderType)
+
+ if(lhsType != null
+ //&& !((rhsType = pair.getRhsType()) instanceof PlaceholderType) //PL geloescht am 2017-09-29 Begründung: auch Typvariablen muessen ersetzt werden.
+ && typeMap.get(lhsType) > 1 // The type occurs in more pairs in the set than just the recent pair.
+ && !rhsType.getTypeParams().occurs(lhsType)
+ && !((rhsType instanceof WildcardType) && ((WildcardType)rhsType).getWildcardedType().equals(lhsType))) //PL eigefuegt 2018-02-18
+ {
+ Unifier uni = new Unifier(lhsType, rhsType);
+ result = result.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(ArrayList::new));
+ result1 = result1.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(LinkedList::new));
+
+ Function super Constraint,? extends Constraint> applyUni = b -> b.stream().map(
+ x -> uni.apply(pair,x)).collect(Collectors.toCollection((b.getExtendConstraint() != null)
+ ? () -> new Constraint(
+ b.isInherited(),
+ b.getExtendConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(Constraint::new)),
+ b.getmethodSignatureConstraint().stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(HashSet::new)))
+ : () -> new Constraint(b.isInherited())
+ ));
+ oderConstraints.replaceAll(oc -> oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new)));
+ /*
+ oderConstraints = oderConstraints.stream().map(
+ a -> a.stream().map(applyUni
+ //b -> b.stream().map(
+ // x -> uni.apply(pair,x)).collect(Collectors.toCollection(HashSet::new) )
+ ).collect(Collectors.toCollection(HashSet::new))
+ ).collect(Collectors.toList(ArrayList::new));
+ }
+ */
+ applied = true;
+ }
+ result.add(pair);
+ }
+
+ return applied ? Optional.of(new HashSet<>(result)) : Optional.empty();
+ }
+
+ @Override
+ public Optional reduceWildcardLow(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+ if(!(lhsType instanceof ExtendsType) || !(rhsType instanceof ExtendsType))
+ return Optional.empty();
+
+ return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(), ((ExtendsType) rhsType).getExtendedType(), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ @Override
+ public Optional reduceWildcardLowRight(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+ if(!(lhsType instanceof ReferenceType) || !(rhsType instanceof ExtendsType))
+ return Optional.empty();
+
+ return Optional.of(new UnifyPair(lhsType, ((ExtendsType) rhsType).getExtendedType(), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ @Override
+ public Optional reduceWildcardUp(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+ if(!(lhsType instanceof SuperType) || !(rhsType instanceof SuperType))
+ return Optional.empty();
+
+ return Optional.of(new UnifyPair(((SuperType) rhsType).getSuperedType(), ((SuperType) lhsType).getSuperedType(), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ @Override
+ public Optional reduceWildcardUpRight(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+ if(!(lhsType instanceof ReferenceType) || !(rhsType instanceof SuperType))
+ return Optional.empty();
+
+ return Optional.of(new UnifyPair(((SuperType) rhsType).getSuperedType(), lhsType, PairOperator.SMALLERDOTWC, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ /* PL 2018-03-06 auskommentiert sind mutmaßlich falsch
+ * vgl. JAVA_BSP/Wildcard6.java
+ @Override
+ public Optional reduceWildcardLowUp(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+ if(!(lhsType instanceof ExtendsType) || !(rhsType instanceof SuperType))
+ return Optional.empty();
+
+ return Optional.of(new UnifyPair(((ExtendsType) lhsType).getExtendedType(), ((SuperType) rhsType).getSuperedType(), PairOperator.EQUALSDOT));
+ }
+
+ @Override
+ public Optional reduceWildcardUpLow(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+ if(!(lhsType instanceof SuperType) || !(rhsType instanceof ExtendsType))
+ return Optional.empty();
+
+ return Optional.of(new UnifyPair(((SuperType) lhsType).getSuperedType(), ((ExtendsType) rhsType).getExtendedType(), PairOperator.EQUALSDOT));
+ }
+
+
+ @Override
+ public Optional reduceWildcardLeft(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType rhsType = pair.getRhsType();
+ if(!(rhsType instanceof ReferenceType))
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+
+ if(lhsType instanceof WildcardType)
+ return Optional.of(new UnifyPair(((WildcardType) lhsType).getWildcardedType(), rhsType, PairOperator.EQUALSDOT));
+
+ return Optional.empty();
+ }
+ */
+ @Override
+ public Optional> reduceFunN(UnifyPair pair) {
+ if((pair.getPairOp() != PairOperator.SMALLERDOT)
+ && (pair.getPairOp() != PairOperator.EQUALSDOT)) //PL 2017-10-03 hinzugefuegt
+ //da Regel auch fuer EQUALSDOT anwendbar
+ //TODO: fuer allen anderen Relationen noch pruefen
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+
+ if(!(lhsType instanceof FunNType) || !(rhsType instanceof FunNType))
+ return Optional.empty();
+
+ FunNType funNLhsType = (FunNType) lhsType;
+ FunNType funNRhsType = (FunNType) rhsType;
+
+ if(funNLhsType.getN() != funNRhsType.getN())
+ return Optional.empty();
+
+ Set result = new HashSet();
+ if (pair.getPairOp() == PairOperator.SMALLERDOT) {
+ result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ for(int i = 0; i < funNLhsType.getTypeParams().size()-1; i++) {
+ result.add(new UnifyPair(funNRhsType.getTypeParams().get(i), funNLhsType.getTypeParams().get(i), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+ }
+ else {// pair.getPairOp() == PairOperator.EQUALDOT
+ result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+ for(int i = 0; i < funNLhsType.getTypeParams().size()-1; i++) {
+ result.add(new UnifyPair(funNRhsType.getTypeParams().get(i), funNLhsType.getTypeParams().get(i), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+ }
+ result.stream().forEach(x -> { UnifyType l = x.getLhsType();
+ if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
+ UnifyType r = x.getRhsType();
+ if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
+ } );
+ try {
+ logFile.write("FUNgreater: " + pair + "\n");
+ logFile.write("FUNred: " + result + "\n");
+ logFile.flush();
+ }
+ catch (IOException e) {
+ System.out.println("logFile-Error");
+ }
+ return Optional.of(result);
+ }
+
+
+ @Override
+ public Optional> greaterFunN(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOT)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+
+ if(!(lhsType instanceof FunNType) || !(rhsType instanceof PlaceholderType))
+ return Optional.empty();
+
+ FunNType funNLhsType = (FunNType) lhsType;
+
+ Set result = new HashSet();
+
+ Integer variance = ((PlaceholderType)rhsType).getVariance();
+ Integer inversVariance = distributeVariance.inverseVariance(variance);
+
+ UnifyType[] freshPlaceholders = new UnifyType[funNLhsType.getTypeParams().size()];
+ for(int i = 0; i < freshPlaceholders.length-1; i++) {
+ freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
+ ((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
+ }
+ freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
+ ((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
+ result.add(new UnifyPair(funNLhsType.getTypeParams().get(funNLhsType.getTypeParams().size()-1), freshPlaceholders[funNLhsType.getTypeParams().size()-1], PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+
+ for(int i = 0; i < funNLhsType.getTypeParams().size()-1; i++) {
+ result.add(new UnifyPair(freshPlaceholders[i], funNLhsType.getTypeParams().get(i), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ result.add(new UnifyPair(rhsType, funNLhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+
+ result.stream().forEach(x -> { UnifyType l = x.getLhsType();
+ if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
+ UnifyType r = x.getRhsType();
+ if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
+ } );
+ try {
+ logFile.write("FUNgreater: " + pair + "\n");
+ logFile.write("FUNgreater: " + result + "\n");
+ logFile.flush();
+ }
+ catch (IOException e) {
+ System.out.println("lofFile-Error");
+ }
+ return Optional.of(result);
+ }
+
+ @Override
+ public Optional> smallerFunN(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOT)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+
+ if(!(lhsType instanceof PlaceholderType) || !(rhsType instanceof FunNType))
+ return Optional.empty();
+
+ FunNType funNRhsType = (FunNType) rhsType;
+
+ Set result = new HashSet();
+
+ Integer variance = ((PlaceholderType)lhsType).getVariance();
+ Integer inversVariance = distributeVariance.inverseVariance(variance);
+
+ UnifyType[] freshPlaceholders = new UnifyType[funNRhsType.getTypeParams().size()];
+ for(int i = 0; i < freshPlaceholders.length-1; i++) {
+ freshPlaceholders[i] = PlaceholderType.freshPlaceholder();
+ ((PlaceholderType)freshPlaceholders[i]).setVariance(inversVariance);
+ }
+ freshPlaceholders[freshPlaceholders.length-1] = PlaceholderType.freshPlaceholder();
+ ((PlaceholderType)freshPlaceholders[freshPlaceholders.length-1]).setVariance(variance);
+
+ result.add(new UnifyPair(freshPlaceholders[funNRhsType.getTypeParams().size()-1], funNRhsType.getTypeParams().get(funNRhsType.getTypeParams().size()-1), PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+
+ for(int i = 0; i < funNRhsType.getTypeParams().size()-1; i++) {
+ result.add(new UnifyPair(funNRhsType.getTypeParams().get(i), freshPlaceholders[i], PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ result.add(new UnifyPair(lhsType, funNRhsType.setTypeParams(new TypeParams(freshPlaceholders)), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+
+ result.stream().forEach(x -> { UnifyType l = x.getLhsType();
+ if (l instanceof PlaceholderType) { ((PlaceholderType)l).disableWildcardtable(); }
+ UnifyType r = x.getRhsType();
+ if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
+ } );
+ try {
+ logFile.write("FUNgreater: " + pair + "\n");
+ logFile.write("FUNsmaller: " + result + "\n");
+ logFile.flush();
+ }
+ catch (IOException e) {
+ System.out.println("lofFile-Error");
+ }
+ return Optional.of(result);
+ }
+
+ @Override
+ public Optional reduceTph(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+ if(!(lhsType instanceof PlaceholderType) || !(rhsType instanceof ReferenceType))
+ return Optional.empty();
+
+ return Optional.of(new UnifyPair(lhsType, rhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ @Override
+ public Optional> reduceTphExt(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+ if(!(lhsType instanceof ExtendsType) || !(rhsType instanceof PlaceholderType))
+ return Optional.empty();
+
+ UnifyType extendedType = ((ExtendsType)lhsType).getExtendedType();
+
+ if (extendedType.equals(rhsType)) return Optional.empty(); //PL 2019-02-18 eingefügt ? extends a <.? a
+
+ boolean isGen = extendedType instanceof PlaceholderType && !((PlaceholderType) extendedType).isGenerated();
+
+ Set result = new HashSet<>();
+ if(isGen)
+ result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+ else {
+ UnifyType freshTph = PlaceholderType.freshPlaceholder();
+ result.add(new UnifyPair(rhsType, new ExtendsType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+ result.add(new UnifyPair(extendedType, freshTph, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair()));
+ }
+
+ return Optional.of(result);
+ }
+
+ @Override
+ public Optional> reduceTphSup(UnifyPair pair) {
+ if(pair.getPairOp() != PairOperator.SMALLERDOTWC)
+ return Optional.empty();
+
+ UnifyType lhsType = pair.getLhsType();
+ UnifyType rhsType = pair.getRhsType();
+ if(!(lhsType instanceof SuperType) || !(rhsType instanceof PlaceholderType))
+ return Optional.empty();
+
+ UnifyType superedType = ((SuperType)lhsType).getSuperedType();
+
+ if (superedType.equals(rhsType)) return Optional.empty(); //PL 2019-02-18 eingefügt ? super a <.? a
+
+ boolean isGen = superedType instanceof PlaceholderType && !((PlaceholderType) superedType).isGenerated();
+
+ Set result = new HashSet<>();
+ if(isGen)
+ result.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+ else {
+ UnifyType freshTph = PlaceholderType.freshPlaceholder();
+ result.add(new UnifyPair(rhsType, new SuperType(freshTph), PairOperator.EQUALSDOT, pair.getSubstitution(), pair.getBasePair()));
+ Set fBounded = pair.getfBounded();
+ fBounded.add(lhsType);
+ result.add(new UnifyPair(freshTph, superedType, PairOperator.SMALLERDOT, pair.getSubstitution(), pair.getBasePair(), fBounded));
+ }
+
+ return Optional.of(result);
+ }
+}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
new file mode 100644
index 0000000..a011aa9
--- /dev/null
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
@@ -0,0 +1,125 @@
+package de.dhbwstuttgart.typeinference.unify;
+
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.Writer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.ForkJoinPool;
+
+import de.dhbwstuttgart.typeinference.constraints.Constraint;
+import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
+import de.dhbwstuttgart.typeinference.constraints.Pair;
+import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
+import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
+
+public class TypeUnify {
+
+ public static Writer statistics;
+ /**
+ * unify parallel ohne result modell
+ * @param undConstrains
+ * @param oderConstraints
+ * @param fc
+ * @param logFile
+ * @param log
+ * @param cons
+ * @return
+ */
+ public Set> unify(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
+ TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
+ ForkJoinPool pool = new ForkJoinPool();
+ pool.invoke(unifyTask);
+ Set> res = unifyTask.join();
+ try {
+ logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
+ logFile.flush();
+ }
+ catch (IOException e) {
+ System.err.println("no log-File");
+ }
+ return res;
+ }
+
+ /**
+ * unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
+ * @param undConstrains
+ * @param oderConstraints
+ * @param fc
+ * @param logFile
+ * @param log
+ * @param cons
+ * @param ret
+ * @return
+ */
+ public UnifyResultModel unifyAsync(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
+ TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
+ ForkJoinPool pool = new ForkJoinPool();
+ pool.invoke(unifyTask);
+ return ret;
+ }
+
+ /**
+ * unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
+ * @param undConstrains
+ * @param oderConstraints
+ * @param fc
+ * @param logFile
+ * @param log
+ * @param cons
+ * @param ret
+ * @return
+ */
+ public UnifyResultModel unifyParallel(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
+ TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
+ new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks, statistics);
+ ForkJoinPool pool = new ForkJoinPool();
+ pool.invoke(unifyTask);
+ Set> res = unifyTask.join();
+ try {
+ logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
+ logFile.flush();
+ unifyTask.statistics.write("Backtracking: " + unifyTask.noBacktracking);
+ unifyTask.statistics.write("\nLoops: " + unifyTask.noLoop);
+ }
+ catch (IOException e) {
+ System.err.println("no log-File");
+ }
+ return ret;
+ }
+
+ /*
+ public Set> unifySequential(Set eq, IFiniteClosure fc, FileWriter logFile, Boolean log) {
+ TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc, false, logFile, log);
+ Set> res = unifyTask.compute();
+ return res;
+ }
+ */
+
+ /**
+ * unify sequentiell mit oderconstraints
+ * @param undConstrains
+ * @param oderConstraints
+ * @param fc
+ * @param logFile
+ * @param log
+ * @param cons
+ * @return
+ */
+ public Set> unifyOderConstraints(Set undConstrains, List>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
+ TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
+ unifyTask.statistics = statistics;
+ Set> res = unifyTask.compute();
+ try {
+ logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
+ logFile.flush();
+ }
+ catch (IOException e) {
+ System.err.println("no log-File");
+ }
+ return res;
+ }
+
+}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java
new file mode 100644
index 0000000..61d6ce8
--- /dev/null
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java
@@ -0,0 +1,76 @@
+package de.dhbwstuttgart.typeinference.unify;
+
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.Writer;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import de.dhbwstuttgart.typeinference.constraints.Constraint;
+import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
+import de.dhbwstuttgart.typeinference.constraints.Pair;
+import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
+
+public class TypeUnify2Task extends TypeUnifyTask {
+
+ Set> setToFlatten;
+ Set methodSignatureConstraintUebergabe;
+
+ //statistics
+ TypeUnify2Task(Set> setToFlatten, Set eq,
+ List>> oderConstraints,
+ Set nextSetElement,
+ IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks,
+ Set methodSignatureConstraintUebergabe, Writer statistics) {
+ this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraintUebergabe );
+
+ }
+
+ public TypeUnify2Task(Set> setToFlatten, Set eq, List>> oderConstraints, Set nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set methodSignatureConstraintUebergabe) {
+ super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
+ this.setToFlatten = setToFlatten;
+ this.nextSetElement = nextSetElement;
+ this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
+ }
+
+ Set getNextSetElement() {
+ return nextSetElement;
+ }
+
+ @Override
+ protected Set> compute() {
+ if (one) {
+ System.out.println("two");
+ }
+ one = true;
+ Set> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
+ /*if (isUndefinedPairSetSet(res)) {
+ return new HashSet<>(); }
+ else
+ */
+ //writeLog("xxx");
+ //noOfThread--;
+ synchronized (usedTasks) {
+ if (this.myIsCancelled()) {
+ return new HashSet<>();
+ }
+ else {
+ return res;
+ }
+ }
+ }
+
+ public void closeLogFile() {
+
+ try {
+ logFile.close();
+ }
+ catch (IOException ioE) {
+ System.err.println("no log-File" + thNo);
+ }
+
+ }
+}
diff --git a/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
new file mode 100644
index 0000000..ff9e6cd
--- /dev/null
+++ b/src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
@@ -0,0 +1,2641 @@
+//PL 2018-12-19: Merge checken
+package de.dhbwstuttgart.typeinference.unify;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map.Entry;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.RecursiveTask;
+import java.util.function.BiFunction;
+import java.util.function.BinaryOperator;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import de.dhbwstuttgart.exceptions.DebugException;
+import org.apache.commons.io.output.NullOutputStream;
+
+import de.dhbwstuttgart.typeinference.constraints.Constraint;
+import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
+import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
+import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch;
+import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
+import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
+import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
+import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
+import de.dhbwstuttgart.typeinference.unify.model.FunNType;
+import de.dhbwstuttgart.typeinference.unify.model.OrderingExtend;
+import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
+import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
+import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
+import de.dhbwstuttgart.typeinference.unify.model.SuperType;
+import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
+import de.dhbwstuttgart.typeinference.unify.model.Unifier;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
+import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
+import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
+import de.dhbwstuttgart.util.Pair;
+import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+
+import com.google.common.collect.Ordering;
+import org.apache.commons.io.output.NullWriter;
+
+
+/**
+ * Implementation of the type unification algorithm
+ * @author Florian Steurer
+ */
+public class TypeUnifyTask extends RecursiveTask>> {
+
+ private static final long serialVersionUID = 1L;
+ private static int i = 0;
+ private boolean printtag = false;
+ Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"/test/logFiles/log" geschrieben werden soll?
+
+ /**
+ * Element, das aus dem nextSet den Gleichunen dieses Threads hinzugefuegt wurde
+ */
+ Set nextSetElement;
+
+ /**
+ * Fuer die Threads
+ */
+ UnifyResultModel urm;
+ protected static int noOfThread = 0;
+ private static int totalnoOfThread = 0;
+ int thNo;
+ protected boolean one = false;
+ Integer MaxNoOfThreads = 128;
+
+ public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
+ Writer logFile;
+
+ /**
+ * The implementation of setOps that will be used during the unification
+ */
+ protected ISetOperations setOps = new GuavaSetOperations();
+
+ /**
+ * The implementation of the standard unify that will be used during the unification
+ */
+ protected IUnify stdUnify = new MartelliMontanariUnify();
+
+ /**
+ * The implementation of the rules that will be used during the unification.
+ */
+ protected IRuleSet rules;
+
+ protected Set eq; //und-constraints
+
+ protected List>> oderConstraintsField;
+
+ protected IFiniteClosure fc;
+
+ protected OrderingExtend> oup;
+
+ protected boolean parallel;
+
+ //Gives if unify is not called from checkA
+ private boolean finalresult = true;
+
+ int rekTiefeField;
+
+ Integer nOfUnify = 0;
+
+ Integer noUndefPair = 0;
+
+ Integer noAllErasedElements = 0;
+
+ static Integer noou = 0;
+
+ static int noBacktracking;
+
+ static int noLoop;
+
+ static Integer noShortendElements = 0;
+
+ Boolean myIsCanceled = false;
+
+ volatile UnifyTaskModel usedTasks;
+
+ static Writer statistics;
+
+ public TypeUnifyTask() {
+ rules = new RuleSet();
+ }
+
+ /*
+ public TypeUnifyTask(Set eq, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log) {
+ this.eq = eq;
+ this.fc = fc;
+ this.oup = new OrderingUnifyPair(fc);
+ this.parallel = parallel;
+ this.logFile = logFile;
+ this.log = log;
+ rules = new RuleSet(logFile);
+ noOfThread++;
+ thNo = noOfThread;
+ }
+ */
+
+ //statistics
+ public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Writer statistics) {
+ this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
+ this.statistics = statistics;
+ }
+ public TypeUnifyTask(Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
+ synchronized (this) {
+ if(statistics==null){
+ statistics = new NullWriter();
+ }
+ this.eq = eq;
+ //this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
+ this.oderConstraintsField = oderConstraints; /*.stream().map(x -> {
+ Set> ret = new HashSet<>();
+ for (Constraint y : x) {
+ ret.add(new HashSet<>(y));
+ }
+ return ret;
+ }).collect(Collectors.toCollection(ArrayList::new));
+ */
+
+ //x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
+ this.nextSetElement = nextSetElement;
+ this.fc = fc;
+ this.oup = new OrderingUnifyPair(fc);
+ this.parallel = parallel;
+ this.logFile = logFile;
+ this.log = log;
+
+ noOfThread++;
+ totalnoOfThread++;
+ //writeLog("thNo1 " + thNo);
+ thNo = totalnoOfThread;
+ writeLog("thNo2 " + thNo);
+ try {
+ this.logFile = log ? new FileWriter(new File(System.getProperty("user.dir") + "/logFiles/" + "Thread_"+thNo))
+ : new OutputStreamWriter(new NullOutputStream());
+ logFile.write("");
+ }
+ catch (IOException e) {
+ System.err.println("log-File nicht vorhanden");
+ }
+ /*Abbruchtest
+ if (thNo > 10) {
+ System.out.println("cancel");
+ usedTasks.cancel();
+ writeLog(nOfUnify.toString() + "cancel");
+ System.out.println("cancel");
+ try {
+ logFile.write("Abbruch");
+ }
+ catch (IOException e) {
+ System.err.println("log-File nicht vorhanden");
+ }
+ }
+ */
+ rules = new RuleSet(logFile);
+ this.rekTiefeField = rekTiefe;
+ this.urm = urm;
+ this.usedTasks = usedTasks;
+ this.usedTasks.add(this);
+ }
+ }
+
+ /**
+ * Vererbt alle Variancen
+ * @param eq The set of constraints
+ */
+ /* PL 2018-05- 17 verschoben nach JavaTXCompiler
+ private void varianceInheritance(Set eq) {
+ Set usedTPH = new HashSet<>();
+ Set phSet = eq.stream().map(x -> {
+ Set pair = new HashSet<>();
+ if (x.getLhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getLhsType());
+ if (x.getRhsType() instanceof PlaceholderType) pair.add((PlaceholderType)x.getRhsType());
+ return pair;
+ }).reduce(new HashSet<>(), (a,b) -> { a.addAll(b); return a;} , (c,d) -> { c.addAll(d); return c;});
+
+ ArrayList phSetVariance = new ArrayList<>(phSet);
+ phSetVariance.removeIf(x -> (x.getVariance() == 0));
+ while(!phSetVariance.isEmpty()) {
+ PlaceholderType a = phSetVariance.remove(0);
+ usedTPH.add(a);
+ //HashMap ht = new HashMap<>();
+ //ht.put(a, a.getVariance());
+ Set eq1 = new HashSet<>(eq);
+ eq1.removeIf(x -> !(x.getLhsType() instanceof PlaceholderType && ((PlaceholderType)x.getLhsType()).equals(a)));
+ eq1.stream().forEach(x -> { x.getRhsType().accept(new distributeVariance(), a.getVariance());});
+ eq1 = new HashSet<>(eq);
+ eq1.removeIf(x -> !(x.getRhsType() instanceof PlaceholderType && ((PlaceholderType)x.getRhsType()).equals(a)));
+ eq1.stream().forEach(x -> { x.getLhsType().accept(new distributeVariance(), a.getVariance());});
+ phSetVariance = new ArrayList<>(phSet);
+ phSetVariance.removeIf(x -> (x.getVariance() == 0 || usedTPH.contains(x)));
+ }
+}
+*/
+ void myCancel(Boolean b) {
+ myIsCanceled = true;
+ }
+
+ public boolean myIsCancelled() {
+ return myIsCanceled;
+ }
+
+ protected Set> compute() {
+ if (one) {
+ //System.out.println("two");
+ }
+ one = true;
+ Set neweq = new HashSet<>(eq);
+ /* 1-elementige Oder-Constraints werden in und-Constraints umgewandelt */
+ oderConstraintsField.stream()
+ .filter(x -> x.size()==1)
+ .map(y -> y.stream().findFirst().get()).forEach(x -> neweq.addAll(x));
+ ArrayList>> remainingOderconstraints = oderConstraintsField.stream()
+ .filter(x -> x.size()>1)
+ .collect(Collectors.toCollection(ArrayList::new));
+ Set> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, new HashSet<>());
+ noOfThread--;
+ try {
+ logFile.close();
+ }
+ catch (IOException ioE) {
+ System.err.println("no log-File");
+ }
+ if (isUndefinedPairSetSet(res)) {
+ //fuer debug-Zwecke
+ ArrayList al = res.stream().map(x -> x.stream().collect(Collectors.toCollection(ArrayList::new)))
+ .collect(Collectors.toCollection(ArrayList::new));
+ throw new DebugException("Unresolved constraints: " + res.toString()); //return new HashSet<>();
+ }
+ else {
+ synchronized (usedTasks) {
+ if (this.myIsCancelled()) {
+ return new HashSet<>();
+ }
+ else {
+ return res;
+ }
+ }
+ }
+ }
+/*
+ @Override
+ protected Set> compute() {
+ Set> fstElems = new HashSet<>();
+ fstElems.add(eq);
+ Set> res = computeCartesianRecursiveOderConstraints(fstElems, oderConstraints, fc, parallel);
+ if (isUndefinedPairSetSet(res)) { return new HashSet<>(); }
+ else return res;
+ }
+*/
+
+
+
+
+ /**
+ * Computes all principal type unifiers for a set of constraints.
+ * @param eq The set of constraints
+ * @param fc The finite closure
+ * @return The set of all principal type unifiers
+ */
+ protected Set> unify(final Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set methodSignatureConstraint) {
+ //Set aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
+ // ).collect(Collectors.toCollection(HashSet::new));
+ //writeLog(nOfUnify.toString() + " AA: " + aas.toString());
+ //if (aas.isEmpty()) {
+ // System.out.println("");
+ //}
+
+ //.collect(Collectors.toCollection(HashSet::new)));
+
+ synchronized (usedTasks) {
+ if (this.myIsCancelled()) {
+ return new HashSet<>();
+ }
+ }
+
+ rekTiefe++;
+ nOfUnify++;
+ writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
+ writeLog(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString());
+
+ /*
+ * Variancen auf alle Gleichungen vererben
+ */
+ //PL 2018-05-17 nach JavaTXCompiler verschoben
+ //varianceInheritance(eq);
+
+ /*
+ * ? extends ? extends Theta rausfiltern
+ */
+ Set doubleExt = eq.stream().filter(x -> (x.wrongWildcard())).map(x -> { x.setUndefinedPair(); return x;})
+ .collect(Collectors.toCollection(HashSet::new));
+ if (doubleExt.size() > 0) {
+ Set> ret = new HashSet<>();
+ ret.add(doubleExt);
+ return ret;
+ }
+
+ /*
+ * Occurs-Check durchfuehren
+ */
+
+ Set ocurrPairs = eq.stream().filter(x -> {
+ UnifyType lhs, rhs;
+ return (lhs = x.getLhsType()) instanceof PlaceholderType
+ && !((rhs = x.getRhsType()) instanceof PlaceholderType)
+ && rhs.getTypeParams().occurs((PlaceholderType)lhs);})
+ .map(x -> { x.setUndefinedPair(); return x;})
+ .collect(Collectors.toCollection(HashSet::new));
+ writeLog("ocurrPairs: " + ocurrPairs);
+ if (ocurrPairs.size() > 0) {
+ Set> ret = new HashSet<>();
+ ret.add(ocurrPairs);
+ return ret;
+ }
+
+
+
+ /*
+ * Step 1: Repeated application of reduce, adapt, erase, swap
+ */
+ Set eq0;
+ Set eq0Prime;
+ Optional> eqSubst = Optional.of(eq);
+ do {
+ eq0Prime = eqSubst.get();
+ eq0 = applyTypeUnificationRules(eq0Prime, fc);
+ eqSubst = rules.subst(eq0, oderConstraints);
+ } while (eqSubst.isPresent());
+
+ eq0.forEach(x -> x.disableCondWildcards());
+
+ writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString());
+ writeLog(nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
+
+ /*
+ * Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
+ */
+ Set eq1s = new HashSet<>();
+ Set eq2s = new HashSet<>();
+ splitEq(eq0, eq1s, eq2s);
+
+ /*
+ * Step 4: Create possible typings
+ *
+ * "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c)
+ * mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia
+ */
+
+ // There are up to 10 toplevel set. 8 of 10 are the result of the
+ // cartesian product of the sets created by pattern matching.
+ List>> topLevelSets = new ArrayList<>();
+
+ //System.out.println(eq2s);
+
+ if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
+ Set> wrap = new HashSet<>();
+ wrap.add(eq1s);
+ topLevelSets.add(wrap); // Add Eq1'
+ }
+
+ // Add the set of [a =. Theta | (a=. Theta) in Eq2']
+ //TODO: Occurscheck anwenden als Fehler identifizieren
+ Set bufferSet = eq2s.stream()
+ .filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
+ .collect(Collectors.toSet());
+
+ if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
+ Set> wrap = new HashSet<>();
+ wrap.add(bufferSet);
+ topLevelSets.add(wrap);
+ eq2s.removeAll(bufferSet);
+ }
+
+ // Sets that originate from pair pattern matching
+ // Sets of the "second level"
+ Set undefinedPairs = new HashSet<>();
+ if (printtag) System.out.println("eq2s " + eq2s);
+ //writeLog("BufferSet: " + bufferSet.toString()+"\n");
+ List>> oderConstraintsOutput = new ArrayList<>();//new ArrayList<>(oderConstraints);
+ Set>>> secondLevelSets = calculatePairSets(eq2s, oderConstraints, fc, undefinedPairs, oderConstraintsOutput);
+ //PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer
+ //nicht ausgewertet Faculty Beispiel im 1. Schritt
+ //PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
+ //Typen getestet werden.
+ writeLog(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput.toString());
+ if (printtag) System.out.println("secondLevelSets:" +secondLevelSets);
+ // If pairs occured that did not match one of the cartesian product cases,
+ // those pairs are contradictory and the unification is impossible.
+ if(!undefinedPairs.isEmpty()) {
+ noUndefPair++;
+ for (UnifyPair up : undefinedPairs) {
+ writeLog(noUndefPair.toString() + " UndefinedPairs; " + up);
+ writeLog("BasePair; " + up.getBasePair());
+ }
+ Set> error = new HashSet<>();
+ undefinedPairs = undefinedPairs.stream().map(x -> { x.setUndefinedPair(); return x;}).collect(Collectors.toCollection(HashSet::new));
+ error.add(undefinedPairs);
+ undefinedPairs.forEach(x -> writeLog("AllSubst: " +x.getAllSubstitutions().toString()));
+ return error;
+ }
+
+ /* Up to here, no cartesian products are calculated.
+ * filters for pairs and sets can be applied here */
+
+ // Alternative: Sub cartesian products of the second level (pattern matched) sets
+ // "the big (x)"
+ /* for(Set>> secondLevelSet : secondLevelSets) {
+ //System.out.println("secondLevelSet "+secondLevelSet.size());
+ List>> secondLevelSetList = new ArrayList<>(secondLevelSet);
+ Set>> cartResult = setOps.cartesianProduct(secondLevelSetList);
+ //System.out.println("CardResult: "+cartResult.size());
+ // Flatten and add to top level sets
+ Set> flat = new HashSet<>();
+ int j = 0;
+ for(List> s : cartResult) {
+ j++;
+ //System.out.println("s from CardResult: "+cartResult.size() + " " + j);
+ Set flat1 = new HashSet<>();
+ for(Set s1 : s)
+ flat1.addAll(s1);
+ flat.add(flat1);
+ }
+ //topLevelSets.add(flat);
+ }
+ */
+
+ //Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden
+ for(Set>> secondLevelSet : secondLevelSets) {
+ for (Set extends Set> secondlevelelem : secondLevelSet) {
+ topLevelSets.add(secondlevelelem);
+ }
+ }
+ //System.out.println(topLevelSets);
+ //System.out.println();
+
+
+ //Aufruf von computeCartesianRecursive ANFANG
+ //writeLog("topLevelSets: " + topLevelSets.toString());
+ return computeCartesianRecursive(new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe, methodSignatureConstraint);
+
+ }
+
+
+ Set> unify2(Set> setToFlatten, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set methodSignatureConstraint) {
+ //Aufruf von computeCartesianRecursive ENDE
+
+ //keine Ahnung woher das kommt
+ //Set> setToFlatten = topLevelSets.stream().map(x -> x.iterator().next()).collect(Collectors.toCollection(HashSet::new));
+
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
+ // Cartesian product over all (up to 10) top level sets
+ //Set>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
+ // .stream().map(x -> new HashSet<>(x))
+ // .collect(Collectors.toCollection(HashSet::new));
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
+
+ synchronized (usedTasks) {
+ if (this.myIsCancelled()) {
+ return new HashSet<>();
+ }
+ }
+
+ Set> eqPrimePrimeSet = new HashSet<>();
+
+ Set forks = new HashSet<>();
+
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
+ //for(Set> setToFlatten : eqPrimeSet) {
+ // Flatten the cartesian product
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
+ Set eqPrime = new HashSet<>();
+ setToFlatten.stream().forEach(x -> eqPrime.addAll(x));
+
+ /*
+ * Step 5: Substitution
+ */
+ //writeLog("vor Subst: " + eqPrime);
+ writeLog("vor Subst: " + oderConstraints);
+ String ocString = oderConstraints.toString();
+ List>> newOderConstraints = new ArrayList<>(oderConstraints);
+ Optional> eqPrimePrime = rules.subst(eqPrime, newOderConstraints);
+ Set> unifyres1 = null;
+ Set> unifyres2 = null;
+ if (!ocString.equals(newOderConstraints.toString())) writeLog("nach Subst: " + newOderConstraints);
+ //writeLog("nach Subst: " + eqPrimePrime);
+ /*
+ * Step 6 a) Restart (fork) for pairs where subst was applied
+ */
+ /*
+ if(parallel) {
+ if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()
+ && oderConstraints.isEmpty()) //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
+ //PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
+ //PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
+ //eqPrimePrime Veraenderungen in subst repraesentieren.
+ eqPrimePrimeSet.add(eqPrime);
+ else if(eqPrimePrime.isPresent()) {
+ //System.out.println("nextStep: " + eqPrimePrime.get());
+ TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc, true, logFile, log);
+ forks.add(fork);
+ fork.fork();
+ }
+ else {
+ //System.out.println("nextStep: " + eqPrime);
+ TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc, true, logFile, log);
+ forks.add(fork);
+ fork.fork();
+ }
+ }
+ else */
+ {// sequentiell (Step 6b is included)
+ if (printtag) System.out.println("nextStep: " + eqPrimePrime);
+ if (eqPrime.equals(eq) && !eqPrimePrime.isPresent()
+ && oderConstraints.isEmpty()) { //PL 2017-09-29 //(!eqPrimePrime.isPresent()) auskommentiert und durch
+ //PL 2017-09-29 dies ersetzt //(!eqPrimePrime.isPresent())
+ //PL 2018-05-18 beide Bedingungen muessen gelten, da eqPrime Veränderungen in allem ausser subst
+ //eqPrimePrime Veraenderungen in subst repraesentieren.
+ //try {
+ //if (isSolvedForm(eqPrime)) {
+ // writeLog("eqPrime:" + eqPrime.toString()+"\n");
+ //}
+ //}
+ //catch (IOException e) {
+ // System.err.println("log-File nicht vorhanden");
+ //}
+ eqPrimePrimeSet.add(eqPrime);
+ if (finalresult && isSolvedForm(eqPrime)) {
+ writeLog("eqPrime:" + eqPrime.toString()+"\n");
+
+ /* methodconstraintsets werden zum Ergebnis hinzugefuegt
+ * Anfang
+ */
+ //System.out.println("methodSignatureConstraint Return: " + methodSignatureConstraint);
+ eqPrimePrimeSet.forEach(x -> x.addAll(methodSignatureConstraint));
+
+ //Substitutionen in methodcontraintsets werdne ausgeführt
+ eqPrimePrimeSet = eqPrimePrimeSet.stream().map(
+ x -> { Optional> help = rules.subst(x);
+ return help.isPresent() ?
+ help.get():
+ x; }).collect(Collectors.toSet());
+ /*
+ * Ende
+ */
+
+
+ urm.notify(eqPrimePrimeSet);
+ writeStatistics("Result: " + eqPrimePrimeSet.toString());
+ }
+ }
+ else if(eqPrimePrime.isPresent()) {
+ Set> unifyres = unifyres1 = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, rekTiefe, methodSignatureConstraint);
+
+ eqPrimePrimeSet.addAll(unifyres);
+ }
+ else {
+ Set> unifyres = unifyres2 = unify(eqPrime, newOderConstraints, fc, parallel, rekTiefe, methodSignatureConstraint);
+
+
+ eqPrimePrimeSet.addAll(unifyres);
+ }
+ }
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
+ //}
+ //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
+
+ /*
+ * Step 6 b) Build the union over everything.
+ */
+ /*
+ * PL 2019-01-22: geloescht
+
+ if(parallel)
+ for(TypeUnifyTask fork : forks)
+ eqPrimePrimeSet.addAll(fork.join());
+ */
+ /*
+ * Step 7: Filter empty sets;
+ */
+ eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x) || this.isUndefinedPairSet(x)).collect(Collectors.toCollection(HashSet::new));
+ if (!eqPrimePrimeSet.isEmpty() && !isUndefinedPairSetSet(eqPrimePrimeSet)) {
+ writeLog("Result1 " + eqPrimePrimeSet.toString());
+ }
+ return eqPrimePrimeSet;
+ }
+
+
+ /**
+ * Computes the cartesian product of topLevelSets step by step.
+ * @param topLevelSets List of Sets of Sets, where a cartesian product have to be built
+ * Ex.: [{{a =. Integer}, {a = Object}}, {{a = Vector, b =. Integer}, {a = Vector, b =. Object}}]
+ * @param eq Original set of equations which should be unified
+ * @param oderConstraints Remaining or-constraints
+ * @param fc The finite closure
+ * @param parallel If the algorithm should be parallelized run
+ * @param rekTiefe Deep of recursive calls
+ * @return The set of all principal type unifiers
+ */
+ Set> computeCartesianRecursive(ArrayList>> topLevelSets, Set eq, List>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set methodSignatureConstraint) {
+
+ //oneElems: Alle 1-elementigen Mengen, die nur ein Paar
+ //a <. theta, theta <. a oder a =. theta enthalten
+
+ //statistics
+ //writeStatistics("\nNumber of Constraints (" + rekTiefe + "): " + topLevelSets.size());
+
+ Set> oneElems = new HashSet<>();
+ oneElems.addAll(topLevelSets.stream()
+ .filter(x -> x.size()==1)
+ .map(y -> y.stream().findFirst().get())
+ .collect(Collectors.toCollection(HashSet::new)));
+
+ //optNextSet: Eine mehrelementige Menge, wenn vorhanden
+ Optional>> optNextSet = topLevelSets.stream().filter(x -> x.size()>1).findAny();
+
+ if (!optNextSet.isPresent()) {//Alle Elemente sind 1-elementig
+ Set> result = unify2(oneElems, eq, oderConstraints, fc, parallel, rekTiefe, methodSignatureConstraint);
+ return result;
+ }
+
+ Set extends Set> nextSet = optNextSet.get();
+ //writeLog("nextSet: " + nextSet.toString());
+ List> nextSetasList =new ArrayList<>(nextSet);
+
+ //writeStatistics(" Start Number of elements ( " /* + nextSetasList.get(0).stream().findFirst().get().getBasePair()*/ +"): (" + rekTiefe + "): " + nextSetasList.size());
+
+ /*
+ try {
+ //List>
+ //nextSetasList = oup.sortedCopy(nextSet);//new ArrayList<>(nextSet);
+ }
+ catch (java.lang.IllegalArgumentException e) {
+ System.out.print("");
+ }
+ */
+ Set> result = new HashSet<>();
+ int variance = 0;
+
+ /* Varianzbestimmung Anfang
+ * Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
+ * Varianz = 1 => Argumentvariable
+ * Varianz = -1 => Rückgabevariable
+ * Varianz = 0 => unklar
+ * Varianz = 2 => Operatoren oderConstraints */
+ ArrayList zeroNextElem = new ArrayList<>(nextSetasList.get(0));
+ UnifyPair fstBasePair = zeroNextElem.remove(0).getBasePair();
+ Boolean oderConstraint = false;
+
+ if (fstBasePair != null) {
+ Boolean sameBase = true;
+ for (UnifyPair ele : nextSetasList.get(0)) {//check ob a <. ty base oder ob Ueberladung
+ sameBase = sameBase && ele.getBasePair() != null && ele.getBasePair().equals(fstBasePair);
+ }
+ if (sameBase) { //angefuegt PL 2020-02-30
+ Optional xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
+ .filter(z -> ((PlaceholderType)z.getLhsType()).getVariance() != 0)
+ .map(c -> ((PlaceholderType)c.getLhsType()).getVariance())
+ .reduce((a,b)-> {if (a==b) return a; else return 0; })) //2 kommt insbesondere bei Oder-Constraints vor
+ .filter(d -> d.isPresent())
+ .map(e -> e.get())
+ .findAny();
+ if (xi.isPresent()) {
+ variance = xi.get();
+ }
+ }
+ else {
+ oderConstraint = true;
+ }
+ }
+ else {
+ oderConstraint = true;
+ }
+
+ //Varianz-Bestimmung Oder-Constraints
+ if (oderConstraint) {
+ if (printtag) System.out.println("nextSetasList " + nextSetasList);
+ Optional optVariance =
+ nextSetasList.iterator()
+ .next()
+ .stream()
+ .filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
+ ! (x.getRhsType() instanceof PlaceholderType) &&
+ x.getPairOp() == PairOperator.EQUALSDOT)
+ .map(x ->
+ ((PlaceholderType)x.getGroundBasePair().getLhsType()).getVariance())
+ .reduce((n,m) -> { if ((n == 0) && (m==0)) return 0;
+ else if (n !=0) return n; //es muss mindestens eine Variance != 0 sein
+ else return m;
+ });
+ //Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
+ //da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
+ //Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
+ variance = optVariance.isPresent() ? optVariance.get() : 2;
+ }
+ /* Varianzbestimmung Ende */
+
+ //writeLog("nextSetasList: " + nextSetasList.toString());
+ Set nextSetElem = nextSetasList.get(0);
+ //writeLog("BasePair1: " + nextSetElem + " " + nextSetElem.iterator().next().getBasePair());
+
+ /* sameEqSet-Bestimmung: Wenn a = ty \in nextSet dann enthaelt sameEqSet
+ * alle Paare a < ty1 oder ty2 < a aus oneElems */
+ Set sameEqSet = new HashSet<>();
+
+ //optOrigPair enthaelt ggf. das Paar a = ty \in nextSet
+ Optional optOrigPair = null;
+ if (!oderConstraint) {
+ optOrigPair = nextSetElem.stream().filter(x -> (
+ //x.getBasePair() != null && ist gegeben wenn variance != 2
+ //x.getBasePair().getPairOp().equals(PairOperator.SMALLERDOT) &&
+ (x.getPairOp().equals(PairOperator.EQUALSDOT)
+ /*
+ (x.getBasePair().getLhsType() instanceof PlaceholderType
+ && x.getLhsType().equals(x.getBasePair().getLhsType()))
+ || (x.getBasePair().getRhsType() instanceof PlaceholderType
+ && x.getLhsType().equals(x.getBasePair().getRhsType())
+ */
+ ))).filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
+ x.getLhsType().equals(x.getBasePair().getLhsType()) ||
+ x.getLhsType().equals(x.getBasePair().getRhsType())
+ ).findFirst();
+ writeLog("optOrigPair: " + optOrigPair);
+ if (optOrigPair.isPresent()) {
+ UnifyPair origPair = optOrigPair.get();
+ UnifyType tyVar;
+ if (!((tyVar = origPair.getLhsType()) instanceof PlaceholderType)) {
+ tyVar = origPair.getRhsType();
+ }
+ UnifyType tyVarEF = tyVar;
+ sameEqSet = oneElems.stream().map(xx -> xx.iterator().next())
+ .filter(x -> (((x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
+ || (x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType)))))
+ .collect(Collectors.toCollection(HashSet::new));
+ }
+ }
+ /* sameEqSet-Bestimmung Ende */
+
+ int hilf = 0;
+ Set a = null;
+ while (nextSetasList.size() > 0) {
+
+ //statistics
+ //writeStatistics(" Actual Number of elements( " + nextSetasList.get(0).stream().findFirst().get().getBasePair() +"): (" + rekTiefe + "): " + nextSetasList.size());
+ Set a_last = a;
+
+ /* Liste der Faelle für die parallele Verarbeitung
+ * Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
+ * Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
+ * Deshalb wird ihre Berechnung parallel angestossen.
+ */
+ List> nextSetasListRest = new ArrayList<>();
+
+ /* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
+ * In der Regel ist dies genau ein Element
+ * Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
+ * gefuehrt hat.
+ */
+ List> nextSetasListOderConstraints = new ArrayList<>();
+
+ writeLog("nextSet: " + nextSet.toString());
+ writeLog("nextSetasList: " + nextSetasList.toString());
+
+ /* staistics Nextvar an Hand Varianzbestimmung auskommentieren Anfang
+ if (variance == 1) {
+ a = oup.max(nextSetasList.iterator());
+ nextSetasList.remove(a);
+ if (oderConstraint) {
+ nextSetasListOderConstraints.add(((Constraint)a).getExtendConstraint());
+ }
+ writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
+ nextSetasListRest = new ArrayList<>(nextSetasList);
+ Iterator> nextSetasListItRest = new ArrayList>(nextSetasListRest).iterator();
+ while (nextSetasListItRest.hasNext()) {
+ Set a_next = nextSetasListItRest.next();
+ if (//a.equals(a_next) ||
+ (oup.compare(a, a_next) == 1)) {
+ nextSetasListRest.remove(a_next);
+ }
+ }
+
+ //Alle maximale Elemente in nextSetasListRest bestimmen
+ //nur für diese wird parallele Berechnung angestossen.
+ nextSetasListRest = oup.maxElements(nextSetasListRest);
+ }
+ else if (variance == -1) {
+ a = oup.min(nextSetasList.iterator());
+ writeLog("Min: a in " + variance + " "+ a);
+ if (oderConstraint) {
+ nextSetasListOderConstraints.add(((Constraint)a).getExtendConstraint());
+ }
+ writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
+ nextSetasList.remove(a);
+ nextSetasListRest = new ArrayList<>(nextSetasList);
+ Iterator> nextSetasListItRest = new ArrayList>(nextSetasListRest).iterator();
+ while (nextSetasListItRest.hasNext()) {
+ Set a_next = nextSetasListItRest.next();
+ if (//a.equals(a_next) ||
+ (oup.compare(a, a_next) == -1)) {
+ nextSetasListRest.remove(a_next);
+ }
+ }
+ //Alle minimalen Elemente in nextSetasListRest bestimmen
+ //nur für diese wird parallele Berechnung angestossen.
+ nextSetasListRest = oup.minElements(nextSetasListRest);
+ }
+ else if (variance == 2) {
+ a = nextSetasList.remove(0);
+
+ //Fuer alle Elemente wird parallele Berechnung angestossen.
+ nextSetasListRest = new ArrayList<>(nextSetasList);
+ }
+ else if (variance == 0) {
+ //wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
+ //wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
+ if (!oderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
+ if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
+ a = oup.max(nextSetasList.iterator());
+ }
+ else {
+ a = oup.min(nextSetasList.iterator());
+ }
+ nextSetasList.remove(a);
+ }
+ else {
+ if (oderConstraint) {
+ a = oup.max(nextSetasList.iterator());
+ nextSetasList.remove(a);
+ nextSetasListOderConstraints.add(((Constraint)a).getExtendConstraint());
+ }
+ else {
+ a = nextSetasList.remove(0);
+ }
+ }
+ }
+ Nextvar an Hand Varianzbestimmung auskommentieren Ende */
+ a = nextSetasList.remove(0); //statisticsList
+
+ //writeStatistics(a.toString());
+ if (oderConstraint) {//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen
+ methodSignatureConstraint.addAll(((Constraint)a).getmethodSignatureConstraint());
+ //System.out.println("ERSTELLUNG: " +methodSignatureConstraint);
+ }
+
+ i++;
+ Set