forked from JavaTX/JavaCompilerCore
Filtern von Constraints vor dem Karthesischen Produkt bilden
This commit is contained in:
parent
0d51c3e94d
commit
07ea25b5b7
@ -4,11 +4,14 @@ package de.dhbwstuttgart.syntaxtree;
|
|||||||
|
|
||||||
// ino.module.SourceFile.8722.import
|
// ino.module.SourceFile.8722.import
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.lang.reflect.Array;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Enumeration;
|
import java.util.Enumeration;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Hashtable;
|
import java.util.Hashtable;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
@ -49,7 +52,9 @@ import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
|
|||||||
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
|
import de.dhbwstuttgart.typeinference.exceptions.DebugException;
|
||||||
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
|
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
|
||||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.Unifikationsalgorithmus;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
|
||||||
|
|
||||||
@ -195,7 +200,122 @@ public class SourceFile
|
|||||||
InterfaceVektor.addElement((Interface) e);
|
InterfaceVektor.addElement((Interface) e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PL 2014-10-25
|
||||||
|
* schnitt1 checkt ob die Typeplaceholders aus in den Elemeneten aus vars enthalten sind
|
||||||
|
* Rückgabe ist die Menge der Indizies von vars der Schnittmengen mit var nicht leer sind.
|
||||||
|
* @param var
|
||||||
|
* @param vars
|
||||||
|
* @param indexe
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
static Menge<Integer> schnitt1 (Menge<PlaceholderType> var, Menge<Menge<PlaceholderType>> vars, Menge<Integer> indexe) {
|
||||||
|
int j = -1;
|
||||||
|
for (Menge<PlaceholderType> varelems : vars) {
|
||||||
|
j++;
|
||||||
|
if (varelems != null) {
|
||||||
|
if (var.stream().map(x -> varelems.contains(x)).reduce(false, (a,b) -> (a || b))
|
||||||
|
&& (!indexe.contains(j)))
|
||||||
|
{
|
||||||
|
Menge<PlaceholderType> rekvarelements = vars.elementAt(j);
|
||||||
|
vars.setElementAt(null, j);//Element erledigt muss nicht nochmals bearbeitet werden.
|
||||||
|
indexe.addElement(j);
|
||||||
|
indexe = schnitt1(rekvarelements, vars, indexe);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return indexe;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bildet Schnittmengen der Mengen von Typeplaceholders
|
||||||
|
* Rueckgabe ist die Menge der Menge von Indizies die Schnittmengen sind.
|
||||||
|
* @param vars
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public static Menge<Menge<Integer>> schnitt (Menge<Menge<PlaceholderType>> vars) {
|
||||||
|
Menge<Menge<Integer>> ret = new Menge<>();
|
||||||
|
int i = -1;
|
||||||
|
for (Menge<PlaceholderType> var : vars) {
|
||||||
|
i++;
|
||||||
|
if (var != null) {//Element wurde noch bearbeitet
|
||||||
|
Menge<Integer> indexe = new Menge<>();
|
||||||
|
indexe.add(i);
|
||||||
|
ret.add(schnitt1(var, vars, indexe));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Set<Set<UnifyPair>> cartesianProduct(List<UnifyPair> constraints, FiniteClosure finiteClosure){
|
||||||
|
//IDEE: Man bildet Zusammenhangskomponenten von Paaren, die gemeinsame Variablen haben
|
||||||
|
// und unifizert nur die Zusammenhangskomponenten in Schritten 1 - 5
|
||||||
|
//Schritt 1: Alle Variablen in den Paaren von Elementen einsammeln
|
||||||
|
Menge<Menge<PlaceholderType>> constraintsclonevars = constraints.stream().map(p -> {Menge<PlaceholderType> TPHs = new Menge<>();
|
||||||
|
TPHs.addAll(p.getInvolvedPlaceholderTypes());
|
||||||
|
TPHs.addAll(p.getInvolvedPlaceholderTypes());
|
||||||
|
return TPHs;}
|
||||||
|
).collect(Menge::new, Menge::add, Menge::addAll);
|
||||||
|
|
||||||
|
|
||||||
|
//Schritt 2: Schnittmengen jedes Elements mit jedem Elememt von vars bilden und dann index zusammenfassen
|
||||||
|
//in indexset sind dann die Mengen von Indizes enthalten, die gemeisam unifiziert wreden müssen
|
||||||
|
Menge<Menge<Integer>> indexeset = new Menge<>();
|
||||||
|
if (constraintsclonevars != null && constraintsclonevars.size()>0) {
|
||||||
|
indexeset = SourceFile.schnitt(constraintsclonevars);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Schritt 3: Umwandlung der Indizes in die zugehoerigen Elemente
|
||||||
|
// In streamconstraintsclone sind die Mengen von Paar enthalten die unifiziert werden muessen
|
||||||
|
Stream<Menge<UnifyPair>> streamconstraintsclone = indexeset.stream().<Menge<UnifyPair>>map(x -> x.stream()
|
||||||
|
.<UnifyPair>map(i -> constraints.get(i))
|
||||||
|
.<Menge<UnifyPair>>collect(Menge::new, Menge::add, Menge::addAll));
|
||||||
|
//Menge<Menge<Pair>> vecconstraintsclone = streamconstraintsclone.collect(Menge::new, Menge::add, Menge::addAll);
|
||||||
|
//System.out.println();
|
||||||
|
//Schritt 4: Unifikation
|
||||||
|
Menge<Set<Set<UnifyPair>>> vecunifyResult =
|
||||||
|
//streamconstraintsclone.map(x -> Unify.unify(x, finiteClosure)).collect(Menge::new, Menge::add, Menge::addAll);
|
||||||
|
//DEBUG-Variante
|
||||||
|
streamconstraintsclone.map(x ->
|
||||||
|
{ Set<Set<UnifyPair>> z = new TypeUnify().unify(x, finiteClosure);
|
||||||
|
return z;
|
||||||
|
}
|
||||||
|
).collect(Menge::new, Menge::add, Menge::addAll);
|
||||||
|
|
||||||
|
|
||||||
|
//card gibt die Cardinalitaet der unifizierten Mengen an
|
||||||
|
Menge<Integer> card = vecunifyResult.stream().map(x -> x.size()).collect(Menge::new, Menge::add, Menge::addAll);
|
||||||
|
;//.reduce(1,(a,b) -> { if ((a > 0) && (b > 0)) return (a * b); else return 1; });
|
||||||
|
|
||||||
|
//Schritt 5: Bildung des cartesischen Produkts
|
||||||
|
//sollte wieder entfernt werden: Weiterarbeit mit:
|
||||||
|
//[[x_1 -> t_1, x_2 -> t2], [x_1 -> t'_1, x_2 -> t'_2]] x ... x [[x_n -> t_1n], [x_n -> t2n], [x_n -> t3n]]
|
||||||
|
Set<Set<UnifyPair>> cardprodret_start = new Menge<>();
|
||||||
|
cardprodret_start.add(new Menge<UnifyPair>());
|
||||||
|
|
||||||
|
//cart. Produkt mit Linkverschiebung
|
||||||
|
Set<Set<UnifyPair>> unifyResult = vecunifyResult.stream().reduce(cardprodret_start, (x, y) -> {
|
||||||
|
Set<Set<UnifyPair>> cardprodret= new Menge<>();
|
||||||
|
if (y.size() > 0) {
|
||||||
|
//System.out.println(y);
|
||||||
|
//Menge<Menge<Pair>> cardprodretold = x;
|
||||||
|
//cardprodret = new Menge<>();
|
||||||
|
for(Set<UnifyPair> xElement : x) {
|
||||||
|
for (Set<UnifyPair> yElement : y){
|
||||||
|
Set<UnifyPair> help = new Menge<>();
|
||||||
|
help.addAll(yElement);
|
||||||
|
help.addAll(xElement);
|
||||||
|
cardprodret.add(help);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
return new Menge<>(); //kein unifiziertes Ergebnis, damit wird das Geseamtergebnis []
|
||||||
|
return cardprodret;
|
||||||
|
});
|
||||||
|
return unifyResult;
|
||||||
|
}
|
||||||
|
|
||||||
/////////////////////////////////////////////////////////////////////////
|
/////////////////////////////////////////////////////////////////////////
|
||||||
// TypeReconstructionAlgorithmus
|
// TypeReconstructionAlgorithmus
|
||||||
@ -256,13 +376,11 @@ public class SourceFile
|
|||||||
UnifyConstraintsSet unifyConstraints = UnifyTypeFactory.convert(oderConstraints);
|
UnifyConstraintsSet unifyConstraints = UnifyTypeFactory.convert(oderConstraints);
|
||||||
|
|
||||||
//Unmögliche ConstraintsSets aussortieren durch Unifizierung
|
//Unmögliche ConstraintsSets aussortieren durch Unifizierung
|
||||||
Function<Menge<UnifyPair>,Menge<Menge<UnifyPair>>> unifier = (pairs)->{
|
Unifikationsalgorithmus unifier = (pairs)->new TypeUnify().unify(pairs, finiteClosure);
|
||||||
Menge<Menge<UnifyPair>> retValue = new Menge<>();
|
|
||||||
Set<Set<UnifyPair>> unifiedPairs = new TypeUnify().unify(pairs, finiteClosure);
|
|
||||||
return retValue;};
|
|
||||||
//oderConstraints.filterWrongConstraints(unifier);
|
|
||||||
|
|
||||||
//oderConstraints.unifyUndConstraints(unifier); //rausgeworfen für Tests (08.12.2015)
|
unifyConstraints.filterWrongConstraints(unifier);
|
||||||
|
|
||||||
|
unifyConstraints.unifyUndConstraints(unifier); //rausgeworfen für Tests (08.12.2015)
|
||||||
|
|
||||||
typinferenzLog.debug("Ãbriggebliebene Konstraints:\n"+oderConstraints+"\n", Section.TYPEINFERENCE);
|
typinferenzLog.debug("Ãbriggebliebene Konstraints:\n"+oderConstraints+"\n", Section.TYPEINFERENCE);
|
||||||
|
|
||||||
@ -273,7 +391,9 @@ public class SourceFile
|
|||||||
////////////////
|
////////////////
|
||||||
Set<Set<UnifyPair>> xConstraints = unifyConstraints.cartesianProduct();
|
Set<Set<UnifyPair>> xConstraints = unifyConstraints.cartesianProduct();
|
||||||
|
|
||||||
|
|
||||||
|
//Sets zu Listen umwandeln:
|
||||||
|
//Set<List<UnifyPair>> allUnifiedConstraints = xConstraints.stream().map((set)-> new ArrayList<>(set)).collect(Menge::new, Menge::add, Menge::addAll);;
|
||||||
|
|
||||||
typinferenzLog.debug("Finite Closure: "+finiteClosure, Section.TYPEINFERENCE);
|
typinferenzLog.debug("Finite Closure: "+finiteClosure, Section.TYPEINFERENCE);
|
||||||
typinferenzLog.debug("Karthesisches Produkt der Constraints: "+xConstraints, Section.TYPEINFERENCE);
|
typinferenzLog.debug("Karthesisches Produkt der Constraints: "+xConstraints, Section.TYPEINFERENCE);
|
||||||
@ -286,74 +406,7 @@ public class SourceFile
|
|||||||
boolean unifyFail = true;
|
boolean unifyFail = true;
|
||||||
for(Set<UnifyPair> constraints : xConstraints){
|
for(Set<UnifyPair> constraints : xConstraints){
|
||||||
//Alle durch das Karthesische Produkt entstandenen Möglichkeiten durchgehen:
|
//Alle durch das Karthesische Produkt entstandenen Möglichkeiten durchgehen:
|
||||||
//Menge<Menge<Pair>> result = new Menge<Menge<Pair>>();
|
|
||||||
|
|
||||||
//IDEE: Man bildet Zusammenhangskomponenten von Paaren, die gemeinsame Variablen haben
|
|
||||||
// und unifizert nur die Zusammenhangskomponenten in Schritten 1 - 5
|
|
||||||
/*
|
|
||||||
//Schritt 1: Alle Variablen in den Paaren von Elementen einsammeln
|
|
||||||
Menge<Menge<TypePlaceholder>> constraintsclonevars = constraints.stream().map(p -> {Menge<TypePlaceholder> TPHs = new Menge<>();
|
|
||||||
TPHs.addAll(p.TA1.getInvolvedTypePlaceholder());
|
|
||||||
TPHs.addAll(p.TA2.getInvolvedTypePlaceholder());
|
|
||||||
return TPHs;}
|
|
||||||
).collect(Menge::new, Menge::add, Menge::addAll);
|
|
||||||
|
|
||||||
//Schritt 2: Schnittmengen jedes Elements mit jedem Elememt von vars bilden und dann index zusammenfassen
|
|
||||||
//in indexset sind dann die Mengen von Indizes enthalten, die gemeisam unifiziert wreden müssen
|
|
||||||
Menge<Menge<Integer>> indexeset = new Menge<>();
|
|
||||||
if (constraintsclonevars != null && constraintsclonevars.size()>0) {
|
|
||||||
indexeset = Unify.schnitt(constraintsclonevars);
|
|
||||||
}
|
|
||||||
|
|
||||||
//Schritt 3: Umwandlung der Indizes in die zugehoerigen Elemente
|
|
||||||
// In streamconstraintsclone sind die Mengen von Paar enthalten die unifiziert werden muessen
|
|
||||||
Stream<Menge<MPair>> streamconstraintsclone = indexeset.stream().map(x -> x.stream()
|
|
||||||
.map(i -> constraintsClone.elementAt(i))
|
|
||||||
.<Menge<MPair>>collect(Menge::new, Menge::add, Menge::addAll));
|
|
||||||
//Menge<Menge<Pair>> vecconstraintsclone = streamconstraintsclone.collect(Menge::new, Menge::add, Menge::addAll);
|
|
||||||
//System.out.println();
|
|
||||||
//Schritt 4: Unifikation
|
|
||||||
Set<Set<Set<MPair>>> vecunifyResult =
|
|
||||||
//streamconstraintsclone.map(x -> Unify.unify(x, finiteClosure)).collect(Menge::new, Menge::add, Menge::addAll);
|
|
||||||
//DEBUG-Variante
|
|
||||||
streamconstraintsclone.map(x ->
|
|
||||||
{ Set<Set<MPair>> z = new Unify().unify(x, finiteClosure);
|
|
||||||
return z;
|
|
||||||
}
|
|
||||||
).collect(Menge::new, Menge::add, Menge::addAll);
|
|
||||||
|
|
||||||
|
|
||||||
//card gibt die Cardinalitaet der unifizierten Mengen an
|
|
||||||
Menge<Integer> card = vecunifyResult.stream().map(x -> x.size()).collect(Menge::new, Menge::add, Menge::addAll);
|
|
||||||
;//.reduce(1,(a,b) -> { if ((a > 0) && (b > 0)) return (a * b); else return 1; });
|
|
||||||
|
|
||||||
//Schritt 5: Bildung des cartesischen Produkts
|
|
||||||
//sollte wieder entfernt werden: Weiterarbeit mit:
|
|
||||||
//[[x_1 -> t_1, x_2 -> t2], [x_1 -> t'_1, x_2 -> t'_2]] x ... x [[x_n -> t_1n], [x_n -> t2n], [x_n -> t3n]]
|
|
||||||
Set<Set<Pair>> cardprodret_start = new Menge<>();
|
|
||||||
cardprodret_start.add(new Menge<Pair>());
|
|
||||||
|
|
||||||
//cart. Produkt mit Linkverschiebung
|
|
||||||
Set<Set<Pair>> unifyResult = vecunifyResult.stream().reduce(cardprodret_start, (x, y) -> {
|
|
||||||
Set<Set<Pair>> cardprodret= new Menge<>();
|
|
||||||
if (y.size() > 0) {
|
|
||||||
//System.out.println(y);
|
|
||||||
//Menge<Menge<Pair>> cardprodretold = x;
|
|
||||||
//cardprodret = new Menge<>();
|
|
||||||
for(int j = 0; j < x.size(); j++) {
|
|
||||||
for (int k = 0; k < y.size(); k++){
|
|
||||||
Set<Pair> help = new Menge<>();
|
|
||||||
help.addAll(y.elementAt(k));
|
|
||||||
help.addAll(x.elementAt(j));
|
|
||||||
cardprodret.add(help);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
return new Menge<>(); //kein unifiziertes Ergebnis, damit wird das Geseamtergebnis []
|
|
||||||
return cardprodret;
|
|
||||||
});
|
|
||||||
*/
|
|
||||||
typinferenzLog.debug("\nUnifiziere Constraints:\n"+constraints, Section.TYPEINFERENCE);
|
typinferenzLog.debug("\nUnifiziere Constraints:\n"+constraints, Section.TYPEINFERENCE);
|
||||||
typinferenzLog.debug("\nFC:\n"+finiteClosure, Section.TYPEINFERENCE);
|
typinferenzLog.debug("\nFC:\n"+finiteClosure, Section.TYPEINFERENCE);
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
@ -361,7 +414,7 @@ public class SourceFile
|
|||||||
long time = System.currentTimeMillis()-start;
|
long time = System.currentTimeMillis()-start;
|
||||||
typinferenzLog.debug("\nErgebnis der Unifizierung:\n"+unifyResult, Section.TYPEINFERENCE);
|
typinferenzLog.debug("\nErgebnis der Unifizierung:\n"+unifyResult, Section.TYPEINFERENCE);
|
||||||
typinferenzLog.debug("\nAnzahl Lösungen:\n"+unifyResult.size(), Section.TYPEINFERENCE);
|
typinferenzLog.debug("\nAnzahl Lösungen:\n"+unifyResult.size(), Section.TYPEINFERENCE);
|
||||||
typinferenzLog.debug("\nZeit für Unifizierung: "+time + "ms", Section.TYPEINFERENCE);
|
//typinferenzLog.debug("\nZeit für Unifizierung: "+time + "ms", Section.TYPEINFERENCE);
|
||||||
|
|
||||||
|
|
||||||
Menge<Menge<Pair>> convertedResult = unifyResult.parallelStream().<Menge<Pair>>map((Set<UnifyPair> resultSet)->{
|
Menge<Menge<Pair>> convertedResult = unifyResult.parallelStream().<Menge<Pair>>map((Set<UnifyPair> resultSet)->{
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.model;
|
package de.dhbwstuttgart.typeinference.unify.model;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
@ -81,4 +83,5 @@ public final class ExtendsType extends WildcardType {
|
|||||||
return "? extends " + wildcardedType;
|
return "? extends " + wildcardedType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.model;
|
package de.dhbwstuttgart.typeinference.unify.model;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
@ -106,4 +108,12 @@ public final class PlaceholderType extends UnifyType{
|
|||||||
|
|
||||||
return ((PlaceholderType) obj).getName().equals(typeName);
|
return ((PlaceholderType) obj).getName().equals(typeName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Collection<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||||
|
ArrayList<PlaceholderType> ret = new ArrayList<>();
|
||||||
|
ret.add(this);
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.model;
|
package de.dhbwstuttgart.typeinference.unify.model;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
|
||||||
import de.dhbwstuttgart.typeinference.Menge;
|
import de.dhbwstuttgart.typeinference.Menge;
|
||||||
@ -168,5 +170,13 @@ public final class TypeParams implements Iterable<UnifyType>{
|
|||||||
res += t + ",";
|
res += t + ",";
|
||||||
return "<" + res.substring(0, res.length()-1) + ">";
|
return "<" + res.substring(0, res.length()-1) + ">";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Collection<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||||
|
ArrayList<PlaceholderType> ret = new ArrayList<>();
|
||||||
|
for(UnifyType t : typeParams){
|
||||||
|
ret.addAll(t.getInvolvedPlaceholderTypes());
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,9 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.model;
|
package de.dhbwstuttgart.typeinference.unify.model;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A pair which contains two types and an operator, e.q. (Integer <. a).
|
* A pair which contains two types and an operator, e.q. (Integer <. a).
|
||||||
* @author Florian Steurer
|
* @author Florian Steurer
|
||||||
@ -83,6 +87,13 @@ public class UnifyPair {
|
|||||||
public String toString() {
|
public String toString() {
|
||||||
return "(" + lhs + " " + pairOp + " " + rhs + ")";
|
return "(" + lhs + " " + pairOp + " " + rhs + ")";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||||
|
ArrayList<PlaceholderType> ret = new ArrayList<>();
|
||||||
|
ret.addAll(lhs.getInvolvedPlaceholderTypes());
|
||||||
|
ret.addAll(rhs.getInvolvedPlaceholderTypes());
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.model;
|
package de.dhbwstuttgart.typeinference.unify.model;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
@ -87,4 +90,10 @@ public abstract class UnifyType {
|
|||||||
|
|
||||||
return typeName + params;
|
return typeName + params;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Collection<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||||
|
ArrayList<PlaceholderType> ret = new ArrayList<>();
|
||||||
|
ret.addAll(typeParams.getInvolvedPlaceholderTypes());
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
}
|
}
|
@ -1,5 +1,8 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.model;
|
package de.dhbwstuttgart.typeinference.unify.model;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A wildcard type that is either a ExtendsType or a SuperType.
|
* A wildcard type that is either a ExtendsType or a SuperType.
|
||||||
* @author Florian Steurer
|
* @author Florian Steurer
|
||||||
@ -53,4 +56,12 @@ public abstract class WildcardType extends UnifyType {
|
|||||||
WildcardType other = (WildcardType) obj;
|
WildcardType other = (WildcardType) obj;
|
||||||
return other.getWildcardedType().equals(wildcardedType);
|
return other.getWildcardedType().equals(wildcardedType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Collection<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||||
|
ArrayList<PlaceholderType> ret = new ArrayList<>();
|
||||||
|
ret.addAll(wildcardedType.getInvolvedPlaceholderTypes());
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user