forked from JavaTX/JavaCompilerCore
modified: ../src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java
modified: ../src/main/java/de/dhbwstuttgart/typeinference/constraints/Constraint.java modified: ../src/main/java/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java modified: ../src/main/java/de/dhbwstuttgart/typeinference/unify/RuleSet.java modified: ../src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java modified: ../src/main/java/de/dhbwstuttgart/typeinference/unify/model/OrderingUnifyPair.java
This commit is contained in:
parent
b83e2c10e3
commit
9d4980d9a1
@ -562,11 +562,11 @@ public class JavaTXCompiler {
|
|||||||
final ConstraintSet<Pair> cons = getConstraints();
|
final ConstraintSet<Pair> cons = getConstraints();
|
||||||
Set<Set<UnifyPair>> results = new HashSet<>();
|
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||||
try {
|
try {
|
||||||
Writer logFile = new OutputStreamWriter(new NullOutputStream());
|
Writer logFile = //new OutputStreamWriter(new NullOutputStream());
|
||||||
// new FileWriter(new
|
// new FileWriter(new
|
||||||
// File(System.getProperty("user.dir")+"/src/test/resources/logFiles/"+"log_"+sourceFiles.keySet().iterator().next().getName()));
|
// File(System.getProperty("user.dir")+"/src/test/resources/logFiles/"+"log_"+sourceFiles.keySet().iterator().next().getName()));
|
||||||
//new FileWriter(new File(System.getProperty("user.dir") + "/logFiles/" + "log_"
|
new FileWriter(new File(System.getProperty("user.dir") + "/logFiles/" + "log_"
|
||||||
// + sourceFiles.keySet().iterator().next().getName()));
|
+ sourceFiles.keySet().iterator().next().getName()));
|
||||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, classLoader);
|
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, classLoader);
|
||||||
System.out.println(finiteClosure);
|
System.out.println(finiteClosure);
|
||||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(cons);
|
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(cons);
|
||||||
|
@ -26,6 +26,6 @@ public class Constraint<A> extends HashSet<A> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return super.toString() + " isInherited = " + isInherited;
|
return super.toString() + " isInherited = " + isInherited + "\n";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -169,14 +169,15 @@ public class TYPEStmt implements StatementVisitor{
|
|||||||
for(MethodAssumption m : this.getMethods(methodCall.name, methodCall.arglist, info)){
|
for(MethodAssumption m : this.getMethods(methodCall.name, methodCall.arglist, info)){
|
||||||
GenericsResolver resolver = getResolverInstance();
|
GenericsResolver resolver = getResolverInstance();
|
||||||
methodConstraints.add(generateConstraint(methodCall, m, info, resolver));
|
methodConstraints.add(generateConstraint(methodCall, m, info, resolver));
|
||||||
|
/* geloescht PL 2020-05-01
|
||||||
resolver = getResolverInstance();
|
resolver = getResolverInstance();
|
||||||
Constraint<Pair> oneMethodConstraint = generateConstraint(methodCall, m, info, resolver);
|
Constraint<Pair> oneMethodConstraint = generateConstraint(methodCall, m, info, resolver);
|
||||||
oneMethodConstraint = oneMethodConstraint.stream().map(x -> (x.TA1 instanceof TypePlaceholder && x.GetOperator() == PairOperator.EQUALSDOT &&
|
oneMethodConstraint = oneMethodConstraint.stream().map(x -> (x.TA1 instanceof TypePlaceholder && x.GetOperator() == PairOperator.EQUALSDOT &&
|
||||||
!(x.TA2 instanceof TypePlaceholder)) ?
|
!(x.TA2 instanceof TypePlaceholder)) ?
|
||||||
new Pair(x.TA1, new ExtendsWildcardType(x.TA2, x.TA2.getOffset()), PairOperator.EQUALSDOT) :
|
new Pair(x.TA1, new ExtendsWildcardType(x.TA2, x.TA2.getOffset()), PairOperator.EQUALSDOT) :
|
||||||
x).collect(Collectors.toCollection(Constraint::new));
|
x).collect(Collectors.toCollection(() -> new Constraint<Pair>(true)));
|
||||||
oneMethodConstraint.setIsInherited(m.isInherited());
|
|
||||||
methodConstraints.add(oneMethodConstraint);
|
methodConstraints.add(oneMethodConstraint);
|
||||||
|
*/
|
||||||
}
|
}
|
||||||
if(methodConstraints.size()<1){
|
if(methodConstraints.size()<1){
|
||||||
throw new TypeinferenceException("Methode "+methodCall.name+" ist nicht vorhanden!",methodCall.getOffset());
|
throw new TypeinferenceException("Methode "+methodCall.name+" ist nicht vorhanden!",methodCall.getOffset());
|
||||||
|
@ -681,14 +681,6 @@ public class RuleSet implements IRuleSet{
|
|||||||
|
|
||||||
Function<? super Constraint<UnifyPair>,? extends Constraint<UnifyPair>> applyUni = b -> b.stream().map(
|
Function<? super Constraint<UnifyPair>,? extends Constraint<UnifyPair>> applyUni = b -> b.stream().map(
|
||||||
x -> uni.apply(pair,x)).collect(Collectors.toCollection(() -> new Constraint<UnifyPair>(b.isInherited())));
|
x -> uni.apply(pair,x)).collect(Collectors.toCollection(() -> new Constraint<UnifyPair>(b.isInherited())));
|
||||||
List<Set<Set<UnifyPair>>> oderConstraintsRet = new ArrayList<>();
|
|
||||||
for(Set<Constraint<UnifyPair>> oc : oderConstraints) {
|
|
||||||
//Set<Set<UnifyPair>> ocRet = new HashSet<>();
|
|
||||||
//for(Set<UnifyPair> cs : oc) {
|
|
||||||
Set<Set<UnifyPair>> csRet = oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new));
|
|
||||||
oderConstraintsRet.add(csRet);
|
|
||||||
//}
|
|
||||||
}
|
|
||||||
oderConstraints.replaceAll(oc -> oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new)));
|
oderConstraints.replaceAll(oc -> oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new)));
|
||||||
/*
|
/*
|
||||||
oderConstraints = oderConstraints.stream().map(
|
oderConstraints = oderConstraints.stream().map(
|
||||||
|
@ -34,6 +34,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
|
|||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
|
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.OrderingExtend;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
|
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
|
||||||
@ -105,7 +106,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
|
|
||||||
protected IFiniteClosure fc;
|
protected IFiniteClosure fc;
|
||||||
|
|
||||||
protected Ordering<Set<UnifyPair>> oup;
|
protected OrderingExtend<Set<UnifyPair>> oup;
|
||||||
|
|
||||||
protected boolean parallel;
|
protected boolean parallel;
|
||||||
|
|
||||||
@ -173,9 +174,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
thNo = totalnoOfThread;
|
thNo = totalnoOfThread;
|
||||||
writeLog("thNo2 " + thNo);
|
writeLog("thNo2 " + thNo);
|
||||||
try {
|
try {
|
||||||
this.logFile = new OutputStreamWriter(new NullOutputStream());
|
this.logFile = //new OutputStreamWriter(new NullOutputStream());
|
||||||
//new FileWriter(new File(System.getProperty("user.dir")+"/src/test/resources/logFiles/"+"Thread_"+thNo));
|
//new FileWriter(new File(System.getProperty("user.dir")+"/src/test/resources/logFiles/"+"Thread_"+thNo));
|
||||||
//new FileWriter(new File(System.getProperty("user.dir")+"/logFiles/"+"Thread_"+thNo));
|
new FileWriter(new File(System.getProperty("user.dir")+"/logFiles/"+"Thread_"+thNo));
|
||||||
logFile.write("");
|
logFile.write("");
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
@ -630,6 +631,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
oderConstraint = true;
|
oderConstraint = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
UnifyPair oderRec = null;
|
||||||
if (oderConstraint) {
|
if (oderConstraint) {
|
||||||
if (printtag) System.out.println("nextSetasList " + nextSetasList);
|
if (printtag) System.out.println("nextSetasList " + nextSetasList);
|
||||||
Optional<Integer> optVariance =
|
Optional<Integer> optVariance =
|
||||||
@ -691,9 +693,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
/* sameEqSet-Bestimmung Ende */
|
/* sameEqSet-Bestimmung Ende */
|
||||||
|
|
||||||
Set<UnifyPair> a = null;
|
Set<UnifyPair> a = null;
|
||||||
|
UnifyPair a_rec = null;
|
||||||
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
|
while (nextSetasList.size() > 0) { //(nextSetasList.size() != 0) {
|
||||||
Set<UnifyPair> a_last = a;
|
Set<UnifyPair> a_last = a;
|
||||||
List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
|
List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
|
||||||
|
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
|
||||||
//List<Set<UnifyPair>> nextSetasListRestMin = new ArrayList<>();
|
//List<Set<UnifyPair>> nextSetasListRestMin = new ArrayList<>();
|
||||||
//List<Set<UnifyPair>> nextSetasListRestOder = new ArrayList<>();
|
//List<Set<UnifyPair>> nextSetasListRestOder = new ArrayList<>();
|
||||||
writeLog("qextSet: " + nextSet.toString());
|
writeLog("qextSet: " + nextSet.toString());
|
||||||
@ -701,6 +705,47 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
if (variance == 1) {
|
if (variance == 1) {
|
||||||
a = oup.max(nextSetasList.iterator());
|
a = oup.max(nextSetasList.iterator());
|
||||||
nextSetasList.remove(a);
|
nextSetasList.remove(a);
|
||||||
|
if (oderConstraint && nextSetasListOderConstraints.isEmpty()) {
|
||||||
|
Set<UnifyPair> a_final = a;
|
||||||
|
Optional<UnifyPair> opt_a_rec = a
|
||||||
|
.stream()
|
||||||
|
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
|
||||||
|
! (x.getRhsType() instanceof PlaceholderType) &&
|
||||||
|
x.getPairOp() == PairOperator.EQUALSDOT).findAny();
|
||||||
|
if (opt_a_rec.isPresent()) {
|
||||||
|
a_rec = opt_a_rec.get();
|
||||||
|
PlaceholderType phd;
|
||||||
|
if ((phd = ((PlaceholderType)a_rec.getGroundBasePair().getLhsType())).isWildcardable()) {
|
||||||
|
Set<UnifyPair> a_super = a.stream()
|
||||||
|
.map(x -> new UnifyPair(x.getLhsType(), x.getRhsType(), x.getPairOp()))
|
||||||
|
.collect(Collectors.toCollection(() -> new Constraint<>(((Constraint<UnifyPair>)a_final).isInherited())));
|
||||||
|
a_super.remove(a_rec);
|
||||||
|
a_super.add(new UnifyPair(a_rec.getLhsType(), new SuperType(a_rec.getRhsType()), a_rec.getPairOp()));
|
||||||
|
Set<UnifyPair> a_extends = a.stream()
|
||||||
|
.map(x -> new UnifyPair(x.getLhsType(), x.getRhsType(), x.getPairOp()))
|
||||||
|
.collect(Collectors.toCollection(() -> new Constraint<>(((Constraint<UnifyPair>)a_final).isInherited())));
|
||||||
|
a_extends.remove(a_rec);
|
||||||
|
a_extends.add(new UnifyPair(a_rec.getLhsType(), new ExtendsType(a_rec.getRhsType()), a_rec.getPairOp()));
|
||||||
|
if (phd.isInnerType()) {
|
||||||
|
nextSetasList.add(a_super);
|
||||||
|
nextSetasListOderConstraints.add(a_super);
|
||||||
|
nextSetasList.add(a);
|
||||||
|
nextSetasListOderConstraints.add(a);
|
||||||
|
a = a_extends;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
nextSetasList.add(a_extends);
|
||||||
|
nextSetasListOderConstraints.add(a_extends);
|
||||||
|
nextSetasList.add(a);
|
||||||
|
nextSetasListOderConstraints.add(a);
|
||||||
|
a = a_super;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
nextSetasListOderConstraints.remove(a);
|
||||||
|
}
|
||||||
nextSetasListRest = new ArrayList<>(nextSetasList);
|
nextSetasListRest = new ArrayList<>(nextSetasList);
|
||||||
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
|
Iterator<Set<UnifyPair>> nextSetasListItRest = new ArrayList<Set<UnifyPair>>(nextSetasListRest).iterator();
|
||||||
while (nextSetasListItRest.hasNext()) {
|
while (nextSetasListItRest.hasNext()) {
|
||||||
@ -1283,22 +1328,55 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
writeLog("aParDef: " + aParDef.toString());
|
writeLog("aParDef: " + aParDef.toString());
|
||||||
aParDef.add(a);
|
aParDef.add(a);
|
||||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||||
while(aParDefIt.hasNext()) {
|
if (oderConstraint) {
|
||||||
Set<UnifyPair> a_new = aParDefIt.next();
|
nextSetasList.removeAll(nextSetasListOderConstraints);
|
||||||
while (nextSetasListIt.hasNext()) {
|
nextSetasListOderConstraints = new ArrayList<>();
|
||||||
Set<UnifyPair> a_next = nextSetasListIt.next();
|
while(aParDefIt.hasNext()) {
|
||||||
if (a_new.equals(a_next) ||
|
Set<UnifyPair> a_new = aParDefIt.next();
|
||||||
((oup.compare(a_new, a_next) == 1) &&
|
List<Set<UnifyPair>> smallerSetasList = oup.smallerThan(a_new, nextSetasList);
|
||||||
(!oderConstraint || ((Constraint)a_next).isInherited()))) {
|
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||||
writeLog("Removed: " + a_next.toString());
|
.filter(x -> !((Constraint<UnifyPair>)x).isInherited())
|
||||||
nextSetasList.remove(a_next);
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
}
|
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||||
else {
|
notInherited.stream().forEach(x -> { notErased.addAll(oup.smallerEqThan(x, smallerSetasList)); });
|
||||||
writeLog("Not Removed: " + a_next.toString());
|
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||||
System.out.println("");
|
erased.removeAll(notErased);
|
||||||
|
nextSetasList.removeAll(erased);
|
||||||
|
|
||||||
|
writeLog("Removed: " + erased);
|
||||||
|
|
||||||
|
writeLog("Not Removed: " + nextSetasList);
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
else {
|
||||||
|
while(aParDefIt.hasNext()) {
|
||||||
|
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||||
|
Set<UnifyPair> a_new = aParDefIt.next();
|
||||||
|
List<Set<UnifyPair>> erased = oup.smallerEqThan(a_new, nextSetasList);
|
||||||
|
|
||||||
|
/* loeschen
|
||||||
|
while (nextSetasListIt.hasNext()) {
|
||||||
|
Set<UnifyPair> a_next = nextSetasListIt.next();
|
||||||
|
if (a_new.equals(a_next) ||
|
||||||
|
((oup.compare(a_new, a_next) == 1) &&
|
||||||
|
(!oderConstraint || ((Constraint)a_next).isInherited()))) {
|
||||||
|
writeLog("Removed: " + a_next.toString());
|
||||||
|
nextSetasList.remove(a_next);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
writeLog("Not Removed: " + a_next.toString());
|
||||||
|
System.out.println("");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ende loeschen */
|
||||||
|
nextSetasList.removeAll(erased);
|
||||||
|
|
||||||
|
writeLog("Removed: " + erased);
|
||||||
|
|
||||||
|
writeLog("Not Removed: " + nextSetasList);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else { if (variance == -1) {
|
else { if (variance == -1) {
|
||||||
/* vorgezogen vor das if
|
/* vorgezogen vor das if
|
||||||
@ -1332,8 +1410,32 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
writeLog("aParDef: " + aParDef.toString());
|
writeLog("aParDef: " + aParDef.toString());
|
||||||
aParDef.add(a);
|
aParDef.add(a);
|
||||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||||
while(aParDefIt.hasNext()) {
|
if (oderConstraint) {
|
||||||
Set<UnifyPair> a_new = aParDefIt.next();
|
while(aParDefIt.hasNext()) {
|
||||||
|
Set<UnifyPair> a_new = aParDefIt.next();
|
||||||
|
List<Set<UnifyPair>> greaterSetasList = oup.greaterThan(a_new, nextSetasList);
|
||||||
|
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
|
||||||
|
.filter(x -> !((Constraint<UnifyPair>)x).isInherited())
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||||
|
notInherited.stream().forEach(x -> { notErased.addAll(oup.greaterThan(x, greaterSetasList)); });
|
||||||
|
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
|
||||||
|
erased.removeAll(notErased);
|
||||||
|
nextSetasList.removeAll(erased);
|
||||||
|
|
||||||
|
writeLog("Removed: " + erased);
|
||||||
|
|
||||||
|
writeLog("Not Removed: " + nextSetasList);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
while(aParDefIt.hasNext()) {
|
||||||
|
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||||
|
Set<UnifyPair> a_new = aParDefIt.next();
|
||||||
|
List<Set<UnifyPair>> erased = oup.greaterEqThan(a_new, nextSetasList);
|
||||||
|
|
||||||
|
/* zu loeschen
|
||||||
while (nextSetasListIt.hasNext()) {
|
while (nextSetasListIt.hasNext()) {
|
||||||
Set<UnifyPair> a_next = nextSetasListIt.next();
|
Set<UnifyPair> a_next = nextSetasListIt.next();
|
||||||
if (a_new.equals(a_next) ||
|
if (a_new.equals(a_next) ||
|
||||||
@ -1345,16 +1447,28 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
else {
|
else {
|
||||||
System.out.println("");
|
System.out.println("");
|
||||||
writeLog("Not Removed: " + a_next.toString());
|
writeLog("Not Removed: " + a_next.toString());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Ende zu loeschen */
|
||||||
|
nextSetasList.removeAll(erased);
|
||||||
|
|
||||||
|
writeLog("Removed: " + erased);
|
||||||
|
|
||||||
|
writeLog("Not Removed: " + nextSetasList);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else { if (variance == 0) {
|
else { if (variance == 0) {
|
||||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||||
if (!oderConstraint) {
|
if (!oderConstraint) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
else {//TODO: Hier muessen alle kleineren und größeren Elemente von a geloescht werden
|
else {
|
||||||
|
nextSetasList = nextSetasList.stream()
|
||||||
|
.filter(x -> !((Constraint<UnifyPair>)x).isInherited())
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
/*
|
||||||
|
//TODO: Hier muessen alle kleineren und größeren Elemente von a geloescht werden
|
||||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||||
writeLog("aParDef: " + aParDef.toString());
|
writeLog("aParDef: " + aParDef.toString());
|
||||||
aParDef.add(a);
|
aParDef.add(a);
|
||||||
@ -1395,8 +1509,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
else { if (variance == 2) {
|
else { if (variance == 2) {
|
||||||
/* vorgezogen vor das if
|
/* vorgezogen vor das if
|
||||||
|
@ -22,7 +22,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
|
public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||||
|
|
||||||
protected IFiniteClosure fc;
|
protected IFiniteClosure fc;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user