Compare commits
14 Commits
KPS2025
...
5024a02447
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5024a02447 | ||
|
|
6c2d97b770 | ||
|
|
426c2916d3 | ||
|
|
f722a00fbb | ||
|
|
32797c9b9f | ||
|
|
87f655c85a | ||
|
|
613dceae1d | ||
|
|
81cac06e16 | ||
|
|
a47d5bc024 | ||
|
|
e5916d455a | ||
|
|
ebb639e72e | ||
|
|
f0a4a51ce6 | ||
|
|
7442880452 | ||
|
|
c4dc3b4245 |
36
independentTest.sh
Executable file
36
independentTest.sh
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
|
||||||
|
REPO="https://gitea.hb.dhbw-stuttgart.de/f.holzwarth/JavaCompilerCore.git"
|
||||||
|
TDIR="./testBuild"
|
||||||
|
|
||||||
|
rm -rf "$TDIR" 2>/dev/null
|
||||||
|
mkdir $TDIR
|
||||||
|
|
||||||
|
cd $TDIR
|
||||||
|
git clone $REPO .
|
||||||
|
git checkout feat/master-unify-webservice-dev
|
||||||
|
git checkout 6c2d97b7703d954e4a42eef3ec374bcf313af75c # 2:13
|
||||||
|
# git checkout f722a00fbb6e69423d48a890e4a6283471763e64 # 1:35
|
||||||
|
# git checkout f0a4a51ce65639ce9a9470ff0fdb538fdf9c02cc # 2:19
|
||||||
|
# git checkout 1391206dfe59263cdb22f93371cfd1dd5465d97f # 1:29
|
||||||
|
|
||||||
|
date "+%Y.%m.%d %H:%M:%S"
|
||||||
|
|
||||||
|
# mvn clean compile -DskipTests package
|
||||||
|
## prefix each stderr line with " | "
|
||||||
|
# exec 2> >(trap "" INT TERM; sed 's/^/ | /' >&2)
|
||||||
|
# echo -e "\nMatrix test:\n |"
|
||||||
|
# time java -jar target/JavaTXcompiler-0.1-jar-with-dependencies.jar resources/bytecode/javFiles/Matrix.jav >/dev/null;
|
||||||
|
|
||||||
|
|
||||||
|
mvn clean && mvn test
|
||||||
|
|
||||||
|
|
||||||
|
echo -e "\Cleanup... "
|
||||||
|
rm -rf "$TDIR" 2>/dev/null
|
||||||
|
|
||||||
|
echo -e "\nFinished "
|
||||||
|
date "+%Y.%m.%d %H:%M:%S"
|
||||||
|
echo -e "\n "
|
||||||
|
|
||||||
@@ -37,6 +37,7 @@ import de.dhbwstuttgart.typeinference.result.ResultSet;
|
|||||||
import de.dhbwstuttgart.typeinference.typeAlgo.TYPE;
|
import de.dhbwstuttgart.typeinference.typeAlgo.TYPE;
|
||||||
import de.dhbwstuttgart.typeinference.unify.RuleSet;
|
import de.dhbwstuttgart.typeinference.unify.RuleSet;
|
||||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||||
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
|
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||||
@@ -300,6 +301,7 @@ public class JavaTXCompiler {
|
|||||||
Set<Set<UnifyPair>> results = new HashSet<>();
|
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||||
UnifyResultModel urm = null;
|
UnifyResultModel urm = null;
|
||||||
// urm.addUnifyResultListener(resultListener);
|
// urm.addUnifyResultListener(resultListener);
|
||||||
|
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks);
|
||||||
try {
|
try {
|
||||||
logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile;
|
logFile = logFile == null ? new FileWriter(new File("log_" + sourceFiles.keySet().iterator().next().getName())) : logFile;
|
||||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this);
|
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, getClassLoader(), this);
|
||||||
@@ -326,7 +328,7 @@ public class JavaTXCompiler {
|
|||||||
for (SourceFile f : this.sourceFiles.values()) {
|
for (SourceFile f : this.sourceFiles.values()) {
|
||||||
logFile.write(ASTTypePrinter.print(f));
|
logFile.write(ASTTypePrinter.print(f));
|
||||||
}
|
}
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
Set<PlaceholderType> varianceTPHold;
|
Set<PlaceholderType> varianceTPHold;
|
||||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||||
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||||
@@ -342,7 +344,7 @@ public class JavaTXCompiler {
|
|||||||
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
|
List<Set<Constraint<UnifyPair>>> oderConstraints = unifyCons.getOderConstraints()/*
|
||||||
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
|
* .stream().map(x -> { Set<Set<UnifyPair>> ret = new HashSet<>(); for (Constraint<UnifyPair> y : x) { ret.add(new HashSet<>(y)); } return ret; }).collect(Collectors. toCollection(ArrayList::new))
|
||||||
*/;
|
*/;
|
||||||
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
|
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
System.err.println("kein LogFile");
|
System.err.println("kein LogFile");
|
||||||
}
|
}
|
||||||
@@ -392,7 +394,7 @@ public class JavaTXCompiler {
|
|||||||
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
|
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
|
||||||
logFile.write(ASTTypePrinter.print(sf));
|
logFile.write(ASTTypePrinter.print(sf));
|
||||||
System.out.println(ASTTypePrinter.print(sf));
|
System.out.println(ASTTypePrinter.print(sf));
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
|
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
|
||||||
Set<PlaceholderType> varianceTPHold;
|
Set<PlaceholderType> varianceTPHold;
|
||||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||||
@@ -415,11 +417,12 @@ public class JavaTXCompiler {
|
|||||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||||
urm.addUnifyResultListener(li);
|
urm.addUnifyResultListener(li);
|
||||||
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
|
UnifyContext context = new UnifyContext(logFile, log, true, urm, usedTasks);
|
||||||
|
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||||
System.out.println("RESULT Final: " + li.getResults());
|
System.out.println("RESULT Final: " + li.getResults());
|
||||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||||
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
|
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
return li.getResults();
|
return li.getResults();
|
||||||
}
|
}
|
||||||
/* UnifyResultModel End */
|
/* UnifyResultModel End */
|
||||||
@@ -427,10 +430,11 @@ public class JavaTXCompiler {
|
|||||||
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
|
// Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(),
|
||||||
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
|
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
|
||||||
// finiteClosure));
|
// finiteClosure));
|
||||||
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
|
UnifyContext context = new UnifyContext(logFile, log, false, new UnifyResultModel(cons, finiteClosure), usedTasks);
|
||||||
|
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, context);
|
||||||
System.out.println("RESULT: " + result);
|
System.out.println("RESULT: " + result);
|
||||||
logFile.write("RES: " + result.toString() + "\n");
|
logFile.write("RES: " + result.toString() + "\n");
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
results.addAll(result);
|
results.addAll(result);
|
||||||
|
|
||||||
results = results.stream().map(x -> {
|
results = results.stream().map(x -> {
|
||||||
@@ -440,16 +444,16 @@ public class JavaTXCompiler {
|
|||||||
return y; // alle Paare a <.? b erden durch a =. b ersetzt
|
return y; // alle Paare a <.? b erden durch a =. b ersetzt
|
||||||
}).collect(Collectors.toCollection(HashSet::new)));
|
}).collect(Collectors.toCollection(HashSet::new)));
|
||||||
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
|
if (res.isPresent()) {// wenn subst ein Erg liefert wurde was veraendert
|
||||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), finiteClosure);
|
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), finiteClosure);
|
||||||
} else
|
} else
|
||||||
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
|
return x; // wenn nichts veraendert wurde wird x zurueckgegeben
|
||||||
}).collect(Collectors.toCollection(HashSet::new));
|
}).collect(Collectors.toCollection(HashSet::new));
|
||||||
System.out.println("RESULT Final: " + results);
|
System.out.println("RESULT Final: " + results);
|
||||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||||
logFile.write("RES_FINAL: " + results.toString() + "\n");
|
logFile.write("RES_FINAL: " + results.toString() + "\n");
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
logFile.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS);
|
logFile.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS);
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
System.err.println("kein LogFile");
|
System.err.println("kein LogFile");
|
||||||
|
|||||||
@@ -0,0 +1,11 @@
|
|||||||
|
package de.dhbwstuttgart.exceptions;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Eine Runtime Exception, die für den Fall genutzt wird, dass eine Unifikation abgebrochen wird.
|
||||||
|
* Durch das Werfen einer Exception können Abbrüche auch aus Methodenaufrufen heraus
|
||||||
|
* geprüft werden, da zuvor nur ein return X; stattfinden würde.
|
||||||
|
*/
|
||||||
|
public class UnifyCancelException extends RuntimeException {
|
||||||
|
|
||||||
|
}
|
||||||
@@ -194,7 +194,7 @@ public class UnifyTypeFactory {
|
|||||||
&& ((PlaceholderType)lhs).isWildcardable()
|
&& ((PlaceholderType)lhs).isWildcardable()
|
||||||
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) {
|
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) {
|
||||||
if (lhs.getName().equals("AQ")) {
|
if (lhs.getName().equals("AQ")) {
|
||||||
System.out.println("");
|
// System.out.println("");
|
||||||
}
|
}
|
||||||
((PlaceholderType)rhs).enableWildcardtable();
|
((PlaceholderType)rhs).enableWildcardtable();
|
||||||
}
|
}
|
||||||
@@ -203,7 +203,7 @@ public class UnifyTypeFactory {
|
|||||||
&& ((PlaceholderType)rhs).isWildcardable()
|
&& ((PlaceholderType)rhs).isWildcardable()
|
||||||
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) {
|
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) {
|
||||||
if (rhs.getName().equals("AQ")) {
|
if (rhs.getName().equals("AQ")) {
|
||||||
System.out.println("");
|
// System.out.println("");
|
||||||
}
|
}
|
||||||
((PlaceholderType)lhs).enableWildcardtable();
|
((PlaceholderType)lhs).enableWildcardtable();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,64 @@
|
|||||||
|
package de.dhbwstuttgart.typeinference.unify;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.RecursiveTask;
|
||||||
|
|
||||||
|
public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
|
||||||
|
|
||||||
|
public static <E> Set<E> merge(List<Set<E>> list) {
|
||||||
|
if (list.isEmpty()) {
|
||||||
|
return new HashSet<>();
|
||||||
|
}
|
||||||
|
var task = new ConcurrentSetMergeTask<>(list, 0, list.size());
|
||||||
|
return task.compute();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final int LIST_THRESHOLD = 3;
|
||||||
|
private static final int ELEMENT_THRESHOLD = 1000;
|
||||||
|
|
||||||
|
private final List<Set<T>> list;
|
||||||
|
private final int start;
|
||||||
|
private final int end;
|
||||||
|
|
||||||
|
private ConcurrentSetMergeTask(List<Set<T>> list, int start, int end) {
|
||||||
|
this.list = list;
|
||||||
|
this.start = start;
|
||||||
|
this.end = end;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Set<T> compute() {
|
||||||
|
int size = end - start;
|
||||||
|
|
||||||
|
int totalElements = 0;
|
||||||
|
for (int i = start+1; i < end; i++) {
|
||||||
|
totalElements += list.get(i).size();
|
||||||
|
}
|
||||||
|
|
||||||
|
System.out.println("ConcurrentSetMerge? -> " + (size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD ? "true" : "false"));
|
||||||
|
|
||||||
|
|
||||||
|
// size will always be at least one
|
||||||
|
if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) {
|
||||||
|
Set<T> result = this.list.get(start);
|
||||||
|
for (int i = start+1; i < end; i++) {
|
||||||
|
result.addAll(list.get(i));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
} else {
|
||||||
|
int mid = start + (size / 2);
|
||||||
|
ConcurrentSetMergeTask<T> leftTask = new ConcurrentSetMergeTask<>(list, start, mid);
|
||||||
|
ConcurrentSetMergeTask<T> rightTask = new ConcurrentSetMergeTask<>(list, mid, end);
|
||||||
|
|
||||||
|
leftTask.fork();
|
||||||
|
Set<T> rightResult = rightTask.compute();
|
||||||
|
Set<T> leftResult = leftTask.join();
|
||||||
|
|
||||||
|
// Merge results
|
||||||
|
leftResult.addAll(rightResult);
|
||||||
|
return leftResult;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -864,7 +864,7 @@ public class RuleSet implements IRuleSet{
|
|||||||
try {
|
try {
|
||||||
logFile.write("FUNgreater: " + pair + "\n");
|
logFile.write("FUNgreater: " + pair + "\n");
|
||||||
logFile.write("FUNred: " + result + "\n");
|
logFile.write("FUNred: " + result + "\n");
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
System.out.println("logFile-Error");
|
System.out.println("logFile-Error");
|
||||||
@@ -960,7 +960,7 @@ public class RuleSet implements IRuleSet{
|
|||||||
try {
|
try {
|
||||||
logFile.write("FUNgreater: " + pair + "\n");
|
logFile.write("FUNgreater: " + pair + "\n");
|
||||||
logFile.write("FUNgreater: " + result + "\n");
|
logFile.write("FUNgreater: " + result + "\n");
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
System.out.println("lofFile-Error");
|
System.out.println("lofFile-Error");
|
||||||
@@ -1010,7 +1010,7 @@ public class RuleSet implements IRuleSet{
|
|||||||
try {
|
try {
|
||||||
logFile.write("FUNgreater: " + pair + "\n");
|
logFile.write("FUNgreater: " + pair + "\n");
|
||||||
logFile.write("FUNsmaller: " + result + "\n");
|
logFile.write("FUNsmaller: " + result + "\n");
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
System.out.println("lofFile-Error");
|
System.out.println("lofFile-Error");
|
||||||
|
|||||||
@@ -1,41 +1,28 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify;
|
package de.dhbwstuttgart.typeinference.unify;
|
||||||
|
|
||||||
import java.io.FileWriter;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ForkJoinPool;
|
import java.util.concurrent.ForkJoinPool;
|
||||||
|
|
||||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
|
||||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
|
||||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
|
||||||
public class TypeUnify {
|
public class TypeUnify {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* unify parallel ohne result modell
|
* unify parallel ohne result modell
|
||||||
* @param undConstrains
|
|
||||||
* @param oderConstraints
|
|
||||||
* @param fc
|
|
||||||
* @param logFile
|
|
||||||
* @param log
|
|
||||||
* @param cons
|
|
||||||
* @return
|
|
||||||
*/
|
*/
|
||||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext, 0);
|
||||||
ForkJoinPool pool = new ForkJoinPool();
|
ForkJoinPool pool = this.createThreadPool();
|
||||||
pool.invoke(unifyTask);
|
pool.invoke(unifyTask);
|
||||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||||
try {
|
try {
|
||||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
|
unifyContext.logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
System.err.println("no log-File");
|
System.err.println("no log-File");
|
||||||
@@ -45,46 +32,30 @@ public class TypeUnify {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
|
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
|
||||||
* @param undConstrains
|
|
||||||
* @param oderConstraints
|
|
||||||
* @param fc
|
|
||||||
* @param logFile
|
|
||||||
* @param log
|
|
||||||
* @param cons
|
|
||||||
* @param ret
|
|
||||||
* @return
|
|
||||||
*/
|
*/
|
||||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext, 0);
|
||||||
ForkJoinPool pool = new ForkJoinPool();
|
ForkJoinPool pool = this.createThreadPool();
|
||||||
pool.invoke(unifyTask);
|
pool.invoke(unifyTask);
|
||||||
return ret;
|
return unifyContext.resultModel;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
|
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
|
||||||
* @param undConstrains
|
|
||||||
* @param oderConstraints
|
|
||||||
* @param fc
|
|
||||||
* @param logFile
|
|
||||||
* @param log
|
|
||||||
* @param cons
|
|
||||||
* @param ret
|
|
||||||
* @return
|
|
||||||
*/
|
*/
|
||||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext, 0);
|
||||||
ForkJoinPool pool = new ForkJoinPool();
|
ForkJoinPool pool = this.createThreadPool();
|
||||||
pool.invoke(unifyTask);
|
pool.invoke(unifyTask);
|
||||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||||
try {
|
try {
|
||||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
unifyContext.logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
System.err.println("no log-File");
|
System.err.println("no log-File");
|
||||||
}
|
}
|
||||||
return ret;
|
return unifyContext.resultModel;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@@ -97,20 +68,13 @@ public class TypeUnify {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* unify sequentiell mit oderconstraints
|
* unify sequentiell mit oderconstraints
|
||||||
* @param undConstrains
|
|
||||||
* @param oderConstraints
|
|
||||||
* @param fc
|
|
||||||
* @param logFile
|
|
||||||
* @param log
|
|
||||||
* @param cons
|
|
||||||
* @return
|
|
||||||
*/
|
*/
|
||||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, UnifyContext unifyContext) {
|
||||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
|
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, unifyContext, 0);
|
||||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||||
try {
|
try {
|
||||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
unifyContext.logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
System.err.println("no log-File");
|
System.err.println("no log-File");
|
||||||
@@ -118,4 +82,13 @@ public class TypeUnify {
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private ForkJoinPool createThreadPool() {
|
||||||
|
return new ForkJoinPool(
|
||||||
|
Runtime.getRuntime().availableProcessors(),
|
||||||
|
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
|
||||||
|
null,
|
||||||
|
false // do not use asyncMode (FIFO), as we want smaller tasks to complete first -> Improves locality and cuts small branches first
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,52 +15,48 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
|||||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
|
||||||
public class TypeUnify2Task extends TypeUnifyTask {
|
public class TypeUnify2Task extends TypeUnifyTask {
|
||||||
|
|
||||||
Set<Set<UnifyPair>> setToFlatten;
|
|
||||||
Set<UnifyPair> methodSignatureConstraintUebergabe;
|
|
||||||
|
|
||||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
|
Set<Set<UnifyPair>> setToFlatten;
|
||||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
Set<UnifyPair> methodSignatureConstraintUebergabe;
|
||||||
this.setToFlatten = setToFlatten;
|
|
||||||
this.nextSetElement = nextSetElement;
|
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, UnifyContext context, int rekTiefe, Set<UnifyPair> methodSignatureConstraintUebergabe) {
|
||||||
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
super(eq, oderConstraints, fc, context, rekTiefe);
|
||||||
}
|
this.setToFlatten = setToFlatten;
|
||||||
|
this.nextSetElement = nextSetElement;
|
||||||
Set<UnifyPair> getNextSetElement() {
|
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
||||||
return nextSetElement;
|
}
|
||||||
}
|
|
||||||
|
public Set<UnifyPair> getNextSetElement() {
|
||||||
@Override
|
return nextSetElement;
|
||||||
protected Set<Set<UnifyPair>> compute() {
|
}
|
||||||
if (one) {
|
|
||||||
System.out.println("two");
|
@Override
|
||||||
}
|
public Set<Set<UnifyPair>> compute() {
|
||||||
one = true;
|
if (one) {
|
||||||
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
|
System.out.println("two");
|
||||||
|
}
|
||||||
|
one = true;
|
||||||
|
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, context.isParallel(), rekTiefeField, methodSignatureConstraintUebergabe);
|
||||||
/*if (isUndefinedPairSetSet(res)) {
|
/*if (isUndefinedPairSetSet(res)) {
|
||||||
return new HashSet<>(); }
|
return new HashSet<>(); }
|
||||||
else
|
else
|
||||||
*/
|
*/
|
||||||
//writeLog("xxx");
|
//writeLog("xxx");
|
||||||
//noOfThread--;
|
//noOfThread--;
|
||||||
synchronized (usedTasks) {
|
if (this.myIsCancelled()) {
|
||||||
if (this.myIsCancelled()) {
|
return new HashSet<>();
|
||||||
return new HashSet<>();
|
} else {
|
||||||
}
|
return res;
|
||||||
else {
|
}
|
||||||
return res;
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void closeLogFile() {
|
|
||||||
|
|
||||||
try {
|
public void closeLogFile() {
|
||||||
logFile.close();
|
|
||||||
}
|
try {
|
||||||
catch (IOException ioE) {
|
context.logFile.close();
|
||||||
System.err.println("no log-File" + thNo);
|
} catch (IOException ioE) {
|
||||||
}
|
System.err.println("no log-File");
|
||||||
|
}
|
||||||
}
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,188 @@
|
|||||||
|
package de.dhbwstuttgart.typeinference.unify;
|
||||||
|
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A collection of capsuled (and thus static) functions to split up large algorithms in TypeUnifyTask
|
||||||
|
*/
|
||||||
|
public class TypeUnifyTaskHelper {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter all topLevelSets for those with a single element that contain only one pair:
|
||||||
|
* a <. theta,
|
||||||
|
* theta <. a or
|
||||||
|
* a =. theta
|
||||||
|
*/
|
||||||
|
public static Set<Set<UnifyPair>> getSingleElementSets(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets) {
|
||||||
|
return topLevelSets.stream()
|
||||||
|
.filter(x -> x.size() == 1)
|
||||||
|
.map(y -> y.stream().findFirst().get()).collect(Collectors.toSet());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Varianzbestimmung Anfang
|
||||||
|
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
|
||||||
|
* Varianz = 1 => Argumentvariable
|
||||||
|
* Varianz = -1 => Rückgabevariable
|
||||||
|
* Varianz = 0 => unklar
|
||||||
|
* Varianz = 2 => Operatoren oderConstraints
|
||||||
|
*/
|
||||||
|
public static int calculateVariance(List<Set<UnifyPair>> nextSetasList) {
|
||||||
|
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
|
||||||
|
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
|
||||||
|
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
|
||||||
|
.reduce((a, b) -> {
|
||||||
|
if (a == b) return a;
|
||||||
|
else return 0;
|
||||||
|
})) //2 kommt insbesondere bei Oder-Constraints vor
|
||||||
|
.filter(d -> d.isPresent())
|
||||||
|
.map(e -> e.get())
|
||||||
|
.findAny();
|
||||||
|
|
||||||
|
return xi.orElse(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public static int calculateOderConstraintVariance(List<Set<UnifyPair>> nextSetAsList) {
|
||||||
|
Optional<Integer> optVariance =
|
||||||
|
nextSetAsList
|
||||||
|
.getFirst()
|
||||||
|
.stream()
|
||||||
|
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
|
||||||
|
!(x.getRhsType() instanceof PlaceholderType) &&
|
||||||
|
x.getPairOp() == PairOperator.EQUALSDOT)
|
||||||
|
.map(x ->
|
||||||
|
((PlaceholderType) x.getGroundBasePair().getLhsType()).getVariance())
|
||||||
|
.reduce((n, m) -> (n != 0) ? n : m);
|
||||||
|
|
||||||
|
//Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
|
||||||
|
//da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
|
||||||
|
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
|
||||||
|
return optVariance.orElse(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the first occurrence (if any) of a UnifyPair with operator EQUALSDOT while having
|
||||||
|
* one side equal to its base pair counterpart
|
||||||
|
*/
|
||||||
|
public static Optional<UnifyPair> findEqualityConstrainedUnifyPair(Set<UnifyPair> nextSetElement) {
|
||||||
|
return nextSetElement.stream().filter(x ->
|
||||||
|
x.getPairOp()
|
||||||
|
.equals(PairOperator.EQUALSDOT))
|
||||||
|
.filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
|
||||||
|
x.getLhsType()
|
||||||
|
.equals(x.getBasePair().getLhsType()) ||
|
||||||
|
x.getLhsType()
|
||||||
|
.equals(x.getBasePair().getRhsType())
|
||||||
|
).findFirst();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all unifyPairs, that associate the identified type variable of origPair with any concrete type. That means:
|
||||||
|
* If "a = type" is in origPair, then we get all UnifyPairs that contain either "a < typeA" or "typeB < a"
|
||||||
|
*/
|
||||||
|
public static Set<UnifyPair> findConstraintsWithSameTVAssociation(UnifyPair origPair, Set<Set<UnifyPair>> singleElementSets) {
|
||||||
|
UnifyType tyVar = origPair.getLhsType();
|
||||||
|
if (!(tyVar instanceof PlaceholderType)) {
|
||||||
|
tyVar = origPair.getRhsType();
|
||||||
|
}
|
||||||
|
|
||||||
|
UnifyType tyVarEF = tyVar;
|
||||||
|
return singleElementSets.stream()
|
||||||
|
.map(xx ->
|
||||||
|
xx.iterator().next())
|
||||||
|
.filter(x ->
|
||||||
|
(x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
|
||||||
|
||
|
||||||
|
(x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType))
|
||||||
|
)
|
||||||
|
.collect(Collectors.toCollection(HashSet::new));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public static boolean doesFirstNextSetHasSameBase(List<Set<UnifyPair>> nextSetAsList) {
|
||||||
|
if (nextSetAsList.isEmpty()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
UnifyPair firstBasePair = null;
|
||||||
|
|
||||||
|
for (var unifyPair : nextSetAsList.getFirst().stream().toList()) {
|
||||||
|
var basePair = unifyPair.getBasePair();
|
||||||
|
|
||||||
|
// if any base pair is null, there is NOT always the same base!
|
||||||
|
if (basePair == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (firstBasePair == null) {
|
||||||
|
firstBasePair = basePair;
|
||||||
|
}
|
||||||
|
else if (!basePair.equals(firstBasePair)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts data from every element in the nested lists of results. What data depends on the given
|
||||||
|
* extractor function
|
||||||
|
*/
|
||||||
|
public static Set<UnifyPair> collectFromThreadResult (
|
||||||
|
Set<Set<UnifyPair>> currentThreadResult,
|
||||||
|
Function<UnifyPair, Set<UnifyPair>> extractor
|
||||||
|
) {
|
||||||
|
return currentThreadResult.stream()
|
||||||
|
.map(b ->
|
||||||
|
b.stream()
|
||||||
|
.map(extractor)
|
||||||
|
.reduce((y, z) -> {
|
||||||
|
y.addAll(z);
|
||||||
|
return y;
|
||||||
|
})
|
||||||
|
.orElse(new HashSet<>()))
|
||||||
|
.reduce((y, z) -> {
|
||||||
|
y.addAll(z);
|
||||||
|
return y;
|
||||||
|
})
|
||||||
|
.orElse(new HashSet<>());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract a list of PlaceholderTypes from a set of pairs, such that each resulting element:
|
||||||
|
* - Is the LHS of a pair
|
||||||
|
* - Is a PlaceholderType
|
||||||
|
* - has a basePair Side that is a PlaceholderType with the same name
|
||||||
|
*/
|
||||||
|
public static List<PlaceholderType> extractMatchingPlaceholderTypes(Set<UnifyPair> pairs) {
|
||||||
|
return pairs.stream()
|
||||||
|
.filter(x -> {
|
||||||
|
UnifyType lhs = x.getLhsType();
|
||||||
|
UnifyType baseLhs = x.getBasePair().getLhsType();
|
||||||
|
UnifyType baseRhs = x.getBasePair().getRhsType();
|
||||||
|
return (lhs instanceof PlaceholderType) &&
|
||||||
|
((baseLhs instanceof PlaceholderType && lhs.getName().equals(baseLhs.getName())) ||
|
||||||
|
(baseRhs instanceof PlaceholderType && lhs.getName().equals(baseRhs.getName())));
|
||||||
|
})
|
||||||
|
.map(x -> (PlaceholderType) x.getLhsType())
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,39 @@
|
|||||||
|
package de.dhbwstuttgart.typeinference.unify;
|
||||||
|
|
||||||
|
import java.io.Writer;
|
||||||
|
|
||||||
|
public class UnifyContext {
|
||||||
|
|
||||||
|
Writer logFile;
|
||||||
|
Boolean log;
|
||||||
|
Boolean parallel;
|
||||||
|
UnifyResultModel resultModel;
|
||||||
|
volatile UnifyTaskModel usedTasks;
|
||||||
|
|
||||||
|
public UnifyContext(
|
||||||
|
Writer logFile,
|
||||||
|
Boolean log,
|
||||||
|
Boolean parallel,
|
||||||
|
UnifyResultModel resultModel,
|
||||||
|
UnifyTaskModel usedTasks
|
||||||
|
) {
|
||||||
|
this.logFile = logFile;
|
||||||
|
this.log = log;
|
||||||
|
this.parallel = parallel;
|
||||||
|
this.resultModel = resultModel;
|
||||||
|
this.usedTasks = usedTasks;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public UnifyContext newWithLogFile(Writer logFile) {
|
||||||
|
return new UnifyContext(logFile, log, parallel, resultModel, usedTasks);
|
||||||
|
}
|
||||||
|
|
||||||
|
public UnifyContext newWithParallel(boolean parallel) {
|
||||||
|
return new UnifyContext(logFile, log, parallel, resultModel, usedTasks);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean isParallel() {
|
||||||
|
return parallel;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -36,14 +36,14 @@ public class UnifyResultModel {
|
|||||||
listeners.remove(listenerToRemove);
|
listeners.remove(listenerToRemove);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
|
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet, UnifyContext context) {
|
||||||
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
|
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
|
||||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
|
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
|
||||||
return y; //alle Paare a <.? b erden durch a =. b ersetzt
|
return y; //alle Paare a <.? b erden durch a =. b ersetzt
|
||||||
}).collect(Collectors.toCollection(HashSet::new)));
|
}).collect(Collectors.toCollection(HashSet::new)));
|
||||||
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
|
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
|
||||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
|
return new TypeUnifyTask(context).applyTypeUnificationRules(res.get(), fc);
|
||||||
}
|
}
|
||||||
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
||||||
}).collect(Collectors.toCollection(HashSet::new));
|
}).collect(Collectors.toCollection(HashSet::new));
|
||||||
|
|||||||
@@ -0,0 +1,109 @@
|
|||||||
|
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||||
|
|
||||||
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
public class Variance0Case extends VarianceCase {
|
||||||
|
|
||||||
|
protected final int variance = 0;
|
||||||
|
|
||||||
|
protected Variance0Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||||
|
super(isOderConstraint, typeUnifyTask, context);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void selectNextData(
|
||||||
|
TypeUnifyTask typeUnifyTask,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList,
|
||||||
|
boolean oderConstraint,
|
||||||
|
Optional<UnifyPair> optOrigPair
|
||||||
|
|
||||||
|
) {
|
||||||
|
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
|
||||||
|
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
|
||||||
|
if (!oderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
|
||||||
|
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
|
||||||
|
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||||
|
} else {
|
||||||
|
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
|
||||||
|
}
|
||||||
|
nextSetAsList.remove(a);
|
||||||
|
} else if (oderConstraint) {
|
||||||
|
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||||
|
nextSetAsList.remove(a);
|
||||||
|
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||||
|
} else {
|
||||||
|
a = nextSetAsList.removeFirst();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<Set<UnifyPair>> computeParallel(
|
||||||
|
Set<Set<Set<UnifyPair>>> forkResults,
|
||||||
|
Set<Set<UnifyPair>> elems,
|
||||||
|
Set<UnifyPair> eq,
|
||||||
|
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||||
|
IFiniteClosure fc,
|
||||||
|
int rekTiefe,
|
||||||
|
Set<UnifyPair> methodSignatureConstraint,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList,
|
||||||
|
Set<UnifyPair> sameEqSet,
|
||||||
|
Set<Set<UnifyPair>> result,
|
||||||
|
Set<Set<UnifyPair>> aParDef
|
||||||
|
) {
|
||||||
|
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
|
||||||
|
return typeUnifyTask.unify2(elems, eq, oderConstraints, fc, context.isParallel(), rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void applyComputedResults(
|
||||||
|
Set<Set<UnifyPair>> result,
|
||||||
|
Set<Set<UnifyPair>> currentThreadResult,
|
||||||
|
Set<UnifyPair> compResult,
|
||||||
|
Set<UnifyPair> compRes
|
||||||
|
) {
|
||||||
|
writeLog("RES var=1 ADD:" + result.toString() + " " + currentThreadResult.toString());
|
||||||
|
result.addAll(currentThreadResult);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean eraseInvalidSets(
|
||||||
|
int rekTiefe,
|
||||||
|
Set<Set<UnifyPair>> aParDef,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList
|
||||||
|
) {
|
||||||
|
if (!this.isOderConstraint) {
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||||
|
nextSetasListOderConstraints = new ArrayList<>();
|
||||||
|
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||||
|
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
|
||||||
|
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||||
|
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||||
|
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
|
||||||
|
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||||
|
erased.removeAll(notErased);
|
||||||
|
nextSetAsList.removeAll(erased);
|
||||||
|
|
||||||
|
writeLog("Removed: " + erased);
|
||||||
|
|
||||||
|
writeLog("Not Removed: " + nextSetAsList);
|
||||||
|
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,212 @@
|
|||||||
|
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||||
|
|
||||||
|
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
||||||
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
public class Variance1Case extends VarianceCase {
|
||||||
|
|
||||||
|
protected final int variance = 1;
|
||||||
|
|
||||||
|
protected Variance1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||||
|
super(isOderConstraint, typeUnifyTask, context);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void selectNextData(
|
||||||
|
TypeUnifyTask typeUnifyTask,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList,
|
||||||
|
boolean oderConstraint,
|
||||||
|
Optional<UnifyPair> optOrigPair
|
||||||
|
) {
|
||||||
|
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||||
|
writeLog("Max: a in " + variance + " " + a);
|
||||||
|
nextSetAsList.remove(a);
|
||||||
|
if (oderConstraint) {
|
||||||
|
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||||
|
}
|
||||||
|
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
|
||||||
|
|
||||||
|
//Alle maximale Elemente in nextSetasListRest bestimmen
|
||||||
|
//nur für diese wird parallele Berechnung angestossen.
|
||||||
|
Set<UnifyPair> finalA = a;
|
||||||
|
nextSetasListRest = typeUnifyTask.oup.maxElements(
|
||||||
|
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<Set<UnifyPair>> computeParallel(
|
||||||
|
Set<Set<Set<UnifyPair>>> forkResults,
|
||||||
|
Set<Set<UnifyPair>> elems,
|
||||||
|
Set<UnifyPair> eq,
|
||||||
|
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||||
|
IFiniteClosure fc,
|
||||||
|
int rekTiefe,
|
||||||
|
Set<UnifyPair> methodSignatureConstraint,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList,
|
||||||
|
Set<UnifyPair> sameEqSet,
|
||||||
|
Set<Set<UnifyPair>> result,
|
||||||
|
Set<Set<UnifyPair>> aParDef
|
||||||
|
) {
|
||||||
|
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||||
|
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||||
|
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||||
|
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||||
|
newElemsOrig.add(a);
|
||||||
|
|
||||||
|
/* FORK ANFANG */
|
||||||
|
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, methodSignatureConstraint);
|
||||||
|
//forks.add(forkOrig);
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
/* FORK ENDE */
|
||||||
|
|
||||||
|
writeLog("a in " + variance + " " + a);
|
||||||
|
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||||
|
while (!nextSetasListRest.isEmpty()) {
|
||||||
|
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||||
|
nextSetAsList.remove(nSaL);
|
||||||
|
writeLog("1 RM" + nSaL.toString());
|
||||||
|
|
||||||
|
if (!this.isOderConstraint) {
|
||||||
|
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||||
|
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||||
|
TypeUnifyTask.noShortendElements++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||||
|
}
|
||||||
|
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||||
|
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||||
|
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||||
|
newElems.add(nSaL);
|
||||||
|
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||||
|
forks.add(fork);
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
fork.fork();
|
||||||
|
}
|
||||||
|
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||||
|
|
||||||
|
/* FORK ANFANG */
|
||||||
|
Set<Set<UnifyPair>> currentThreadResult = forkOrig.compute();
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
//noOfThread++;
|
||||||
|
forkOrig.writeLog("final Orig 1");
|
||||||
|
forkOrig.closeLogFile();
|
||||||
|
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||||
|
//forkResults.add(fork_res);;
|
||||||
|
/* FORK ENDE */
|
||||||
|
|
||||||
|
for (TypeUnify2Task fork : forks) {
|
||||||
|
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
writeLog("fork_res: " + fork_res.toString());
|
||||||
|
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||||
|
forkResults.add(fork_res);
|
||||||
|
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||||
|
aParDef.add(fork.getNextSetElement());
|
||||||
|
}
|
||||||
|
fork.writeLog("final 1");
|
||||||
|
fork.closeLogFile();
|
||||||
|
}
|
||||||
|
//noOfThread++;
|
||||||
|
|
||||||
|
return currentThreadResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void applyComputedResults(
|
||||||
|
Set<Set<UnifyPair>> result,
|
||||||
|
Set<Set<UnifyPair>> currentThreadResult,
|
||||||
|
Set<UnifyPair> compResult,
|
||||||
|
Set<UnifyPair> compRes
|
||||||
|
) {
|
||||||
|
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
|
||||||
|
if (resOfCompare == -1) {
|
||||||
|
writeLog("Geloescht result: " + result);
|
||||||
|
result.clear();
|
||||||
|
result.addAll(currentThreadResult);
|
||||||
|
}
|
||||||
|
else if (resOfCompare == 0) {
|
||||||
|
result.addAll(currentThreadResult);
|
||||||
|
}
|
||||||
|
else if (resOfCompare == 1) {
|
||||||
|
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||||
|
//result = result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean eraseInvalidSets(
|
||||||
|
int rekTiefe,
|
||||||
|
Set<Set<UnifyPair>> aParDef,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList
|
||||||
|
) {
|
||||||
|
// System.out.println("");
|
||||||
|
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||||
|
writeLog("aParDef: " + aParDef.toString());
|
||||||
|
aParDef.add(a);
|
||||||
|
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||||
|
if (this.isOderConstraint) {
|
||||||
|
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||||
|
nextSetasListOderConstraints = new ArrayList<>();
|
||||||
|
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||||
|
while (aParDefIt.hasNext()) {
|
||||||
|
Set<UnifyPair> a_new = aParDefIt.next();
|
||||||
|
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList);
|
||||||
|
writeLog("smallerSetasList: " + smallerSetasList);
|
||||||
|
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||||
|
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
writeLog("notInherited: " + notInherited + "\n");
|
||||||
|
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||||
|
notInherited.forEach(x -> {
|
||||||
|
notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList));
|
||||||
|
});
|
||||||
|
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||||
|
writeLog("notErased: " + notErased + "\n");
|
||||||
|
erased.removeAll(notErased);
|
||||||
|
nextSetAsList.removeAll(erased);
|
||||||
|
|
||||||
|
writeLog("Removed: " + erased);
|
||||||
|
|
||||||
|
writeLog("Not Removed: " + nextSetAsList);
|
||||||
|
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
while (aParDefIt.hasNext()) {
|
||||||
|
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||||
|
Set<UnifyPair> a_new = aParDefIt.next();
|
||||||
|
List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList);
|
||||||
|
nextSetAsList.removeAll(erased);
|
||||||
|
|
||||||
|
writeLog("Removed: " + erased);
|
||||||
|
|
||||||
|
writeLog("Not Removed: " + nextSetAsList);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,138 @@
|
|||||||
|
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||||
|
|
||||||
|
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
||||||
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
public class Variance2Case extends VarianceCase {
|
||||||
|
|
||||||
|
protected final int variance = 2;
|
||||||
|
|
||||||
|
protected Variance2Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||||
|
super(isOderConstraint, typeUnifyTask, context);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void selectNextData(
|
||||||
|
TypeUnifyTask typeUnifyTask,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList,
|
||||||
|
boolean oderConstraint,
|
||||||
|
Optional<UnifyPair> optOrigPair
|
||||||
|
|
||||||
|
) {
|
||||||
|
a = nextSetAsList.removeFirst();
|
||||||
|
//Fuer alle Elemente wird parallele Berechnung angestossen.
|
||||||
|
nextSetasListRest = new ArrayList<>(nextSetAsList);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<Set<UnifyPair>> computeParallel(
|
||||||
|
Set<Set<Set<UnifyPair>>> forkResults,
|
||||||
|
Set<Set<UnifyPair>> elems,
|
||||||
|
Set<UnifyPair> eq,
|
||||||
|
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||||
|
IFiniteClosure fc,
|
||||||
|
int rekTiefe,
|
||||||
|
Set<UnifyPair> methodSignatureConstraint,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList,
|
||||||
|
Set<UnifyPair> sameEqSet,
|
||||||
|
Set<Set<UnifyPair>> result,
|
||||||
|
Set<Set<UnifyPair>> aParDef
|
||||||
|
) {
|
||||||
|
writeLog("var2einstieg");
|
||||||
|
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||||
|
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||||
|
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||||
|
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||||
|
newElemsOrig.add(a);
|
||||||
|
|
||||||
|
/* FORK ANFANG */
|
||||||
|
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||||
|
//forks.add(forkOrig);
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
/* FORK ENDE */
|
||||||
|
|
||||||
|
writeLog("a in " + variance + " " + a);
|
||||||
|
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||||
|
|
||||||
|
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
|
||||||
|
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
|
||||||
|
Set<UnifyPair> methodSignatureConstraintForParallel = new HashSet<>(methodSignatureConstraint);
|
||||||
|
Set<UnifyPair> nSaL = a;
|
||||||
|
|
||||||
|
while (!nextSetasListRest.isEmpty()) {
|
||||||
|
methodSignatureConstraintForParallel.removeAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
|
||||||
|
nSaL = nextSetasListRest.removeFirst();
|
||||||
|
nextSetAsList.remove(nSaL); //PL einkommentiert 20-02-03
|
||||||
|
methodSignatureConstraintForParallel.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
|
||||||
|
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||||
|
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||||
|
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||||
|
newElems.add(nSaL);
|
||||||
|
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraintForParallel));
|
||||||
|
forks.add(fork);
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
fork.fork();
|
||||||
|
}
|
||||||
|
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||||
|
|
||||||
|
/* FORK ANFANG */
|
||||||
|
Set<Set<UnifyPair>> currentThreadResult = forkOrig.compute();
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
//noOfThread++;
|
||||||
|
forkOrig.writeLog("final Orig 2");
|
||||||
|
forkOrig.closeLogFile();
|
||||||
|
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||||
|
//forkResults.add(fork_res); //vermutlich falsch
|
||||||
|
/* FORK ENDE */
|
||||||
|
for (TypeUnify2Task fork : forks) {
|
||||||
|
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
forkResults.add(fork_res);
|
||||||
|
fork.writeLog("final 2");
|
||||||
|
fork.closeLogFile();
|
||||||
|
}
|
||||||
|
//noOfThread++;
|
||||||
|
|
||||||
|
return currentThreadResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void applyComputedResults(
|
||||||
|
Set<Set<UnifyPair>> result,
|
||||||
|
Set<Set<UnifyPair>> currentThreadResult,
|
||||||
|
Set<UnifyPair> compResult,
|
||||||
|
Set<UnifyPair> compRes
|
||||||
|
) {
|
||||||
|
// Nothing
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean eraseInvalidSets(
|
||||||
|
int rekTiefe,
|
||||||
|
Set<Set<UnifyPair>> aParDef,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList
|
||||||
|
) {
|
||||||
|
// Nothing
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,111 @@
|
|||||||
|
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||||
|
|
||||||
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
public abstract class VarianceCase {
|
||||||
|
|
||||||
|
public static VarianceCase createFromVariance(int variance, boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||||
|
return switch (variance) {
|
||||||
|
case 0 -> new Variance0Case(isOderConstraint, typeUnifyTask, context);
|
||||||
|
case 1 -> new Variance1Case(isOderConstraint, typeUnifyTask, context);
|
||||||
|
case -1 -> new VarianceM1Case(isOderConstraint, typeUnifyTask, context);
|
||||||
|
case 2 -> new Variance2Case(isOderConstraint, typeUnifyTask, context);
|
||||||
|
default -> throw new RuntimeException("Invalid variance: " + variance);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
protected final boolean isOderConstraint;
|
||||||
|
protected final TypeUnifyTask typeUnifyTask;
|
||||||
|
protected final UnifyContext context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aktueller Fall
|
||||||
|
*/
|
||||||
|
public Set<UnifyPair> a;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Liste der Faelle für die parallele Verarbeitung
|
||||||
|
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
|
||||||
|
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
|
||||||
|
* Deshalb wird ihre Berechnung parallel angestossen.
|
||||||
|
*/
|
||||||
|
public List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
|
||||||
|
* In der Regel ist dies genau ein Element
|
||||||
|
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
|
||||||
|
* gefuehrt hat.
|
||||||
|
*/
|
||||||
|
public List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
|
||||||
|
|
||||||
|
|
||||||
|
protected VarianceCase(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||||
|
this.isOderConstraint = isOderConstraint;
|
||||||
|
this.typeUnifyTask = typeUnifyTask;
|
||||||
|
this.context = context;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Selects values for the next iteration in the run method:
|
||||||
|
* - a : The element to ???
|
||||||
|
* - nextSetAsList: The list of cases that have no relation to the selected a and will have to be worked on
|
||||||
|
* - nextSetasListOderConstraints: The list of cases of which the receiver contains "? extends", typically one element
|
||||||
|
*/
|
||||||
|
public abstract void selectNextData(
|
||||||
|
TypeUnifyTask typeUnifyTask,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList,
|
||||||
|
boolean oderConstraint,
|
||||||
|
Optional<UnifyPair> optOrigPair
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public abstract Set<Set<UnifyPair>> computeParallel(
|
||||||
|
Set<Set<Set<UnifyPair>>> forkResults,
|
||||||
|
Set<Set<UnifyPair>> elems,
|
||||||
|
Set<UnifyPair> eq,
|
||||||
|
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||||
|
IFiniteClosure fc,
|
||||||
|
int rekTiefe,
|
||||||
|
Set<UnifyPair> methodSignatureConstraint,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList,
|
||||||
|
Set<UnifyPair> sameEqSet,
|
||||||
|
Set<Set<UnifyPair>> result,
|
||||||
|
Set<Set<UnifyPair>> aParDef
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public abstract void applyComputedResults(
|
||||||
|
Set<Set<UnifyPair>> result,
|
||||||
|
Set<Set<UnifyPair>> currentThreadResult,
|
||||||
|
Set<UnifyPair> compResult,
|
||||||
|
Set<UnifyPair> compRes
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @return If the current iteration should be broken out of
|
||||||
|
*/
|
||||||
|
public abstract boolean eraseInvalidSets(
|
||||||
|
int rekTiefe,
|
||||||
|
Set<Set<UnifyPair>> aParDef,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList
|
||||||
|
);
|
||||||
|
|
||||||
|
protected void writeLog(String s) {
|
||||||
|
// TODO
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,232 @@
|
|||||||
|
package de.dhbwstuttgart.typeinference.unify.cartesianproduct;
|
||||||
|
|
||||||
|
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
||||||
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.TypeUnify2Task;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
public class VarianceM1Case extends VarianceCase {
|
||||||
|
|
||||||
|
protected final int variance = -1;
|
||||||
|
|
||||||
|
protected VarianceM1Case(boolean isOderConstraint, TypeUnifyTask typeUnifyTask, UnifyContext context) {
|
||||||
|
super(isOderConstraint, typeUnifyTask, context);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void selectNextData(
|
||||||
|
TypeUnifyTask typeUnifyTask,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList,
|
||||||
|
boolean oderConstraint,
|
||||||
|
Optional<UnifyPair> optOrigPair
|
||||||
|
) {
|
||||||
|
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
|
||||||
|
writeLog("Min: a in " + variance + " " + a);
|
||||||
|
if (oderConstraint) {
|
||||||
|
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||||
|
}
|
||||||
|
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
|
||||||
|
nextSetAsList.remove(a);
|
||||||
|
|
||||||
|
//Alle minimalen Elemente in nextSetasListRest bestimmen
|
||||||
|
//nur für diese wird parallele Berechnung angestossen.
|
||||||
|
Set<UnifyPair> finalA = a;
|
||||||
|
nextSetasListRest = typeUnifyTask.oup.minElements(
|
||||||
|
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<Set<UnifyPair>> computeParallel(
|
||||||
|
Set<Set<Set<UnifyPair>>> forkResults,
|
||||||
|
Set<Set<UnifyPair>> elems,
|
||||||
|
Set<UnifyPair> eq,
|
||||||
|
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||||
|
IFiniteClosure fc,
|
||||||
|
int rekTiefe,
|
||||||
|
Set<UnifyPair> methodSignatureConstraint,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList,
|
||||||
|
Set<UnifyPair> sameEqSet,
|
||||||
|
Set<Set<UnifyPair>> result,
|
||||||
|
Set<Set<UnifyPair>> aParDef
|
||||||
|
) {
|
||||||
|
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||||
|
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||||
|
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||||
|
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||||
|
newElemsOrig.add(a);
|
||||||
|
|
||||||
|
/* FORK ANFANG */
|
||||||
|
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||||
|
//forks.add(forkOrig);
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
/* FORK ENDE */
|
||||||
|
|
||||||
|
writeLog("a in " + variance + " " + a);
|
||||||
|
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||||
|
|
||||||
|
while (!nextSetasListRest.isEmpty()) {
|
||||||
|
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||||
|
nextSetAsList.remove(nSaL);
|
||||||
|
writeLog("-1 RM" + nSaL.toString());
|
||||||
|
|
||||||
|
if (!this.isOderConstraint) {
|
||||||
|
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||||
|
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||||
|
TypeUnifyTask.noShortendElements++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||||
|
}
|
||||||
|
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||||
|
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||||
|
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||||
|
newElems.add(nSaL);
|
||||||
|
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, context, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||||
|
forks.add(fork);
|
||||||
|
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
fork.fork();
|
||||||
|
}
|
||||||
|
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||||
|
|
||||||
|
/* FORK ANFANG */
|
||||||
|
Set<Set<UnifyPair>> currentThreadResult = forkOrig.compute();
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
//noOfThread++;
|
||||||
|
forkOrig.writeLog("final Orig -1");
|
||||||
|
forkOrig.closeLogFile();
|
||||||
|
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||||
|
//forkResults.add(fork_res);
|
||||||
|
/* FORK ENDE */
|
||||||
|
|
||||||
|
for (TypeUnify2Task fork : forks) {
|
||||||
|
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||||
|
if (typeUnifyTask.myIsCancelled()) {
|
||||||
|
throw new UnifyCancelException();
|
||||||
|
}
|
||||||
|
//noOfThread++;
|
||||||
|
//noOfThread--; an das Ende von compute verschoben
|
||||||
|
writeLog("fork_res: " + fork_res.toString());
|
||||||
|
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||||
|
forkResults.add(fork_res);
|
||||||
|
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||||
|
aParDef.add(fork.getNextSetElement());
|
||||||
|
}
|
||||||
|
fork.writeLog("final -1");
|
||||||
|
fork.closeLogFile();
|
||||||
|
}
|
||||||
|
//noOfThread++;
|
||||||
|
|
||||||
|
return currentThreadResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void applyComputedResults(
|
||||||
|
Set<Set<UnifyPair>> result,
|
||||||
|
Set<Set<UnifyPair>> currentThreadResult,
|
||||||
|
Set<UnifyPair> compResult,
|
||||||
|
Set<UnifyPair> compRes
|
||||||
|
) {
|
||||||
|
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
|
||||||
|
if (resOfCompare == 1) {
|
||||||
|
writeLog("Geloescht result: " + result);
|
||||||
|
result.clear();
|
||||||
|
result.addAll(currentThreadResult);
|
||||||
|
} else if (resOfCompare == 0) {
|
||||||
|
result.addAll(currentThreadResult);
|
||||||
|
} else if (resOfCompare == -1) {
|
||||||
|
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||||
|
//result = result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean eraseInvalidSets(
|
||||||
|
int rekTiefe,
|
||||||
|
Set<Set<UnifyPair>> aParDef,
|
||||||
|
List<Set<UnifyPair>> nextSetAsList
|
||||||
|
) {
|
||||||
|
|
||||||
|
// System.out.println("");
|
||||||
|
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||||
|
writeLog("aParDef: " + aParDef.toString());
|
||||||
|
aParDef.add(a);
|
||||||
|
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||||
|
if (this.isOderConstraint) {
|
||||||
|
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||||
|
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||||
|
nextSetasListOderConstraints = new ArrayList<>();
|
||||||
|
while (aParDefIt.hasNext()) {
|
||||||
|
Set<UnifyPair> a_new = aParDefIt.next();
|
||||||
|
List<Set<UnifyPair>> greaterSetasList = typeUnifyTask.oup.greaterThan(a_new, nextSetAsList);
|
||||||
|
|
||||||
|
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
|
||||||
|
if (!((Constraint<UnifyPair>) a_new).isInherited()) {
|
||||||
|
greaterSetasList.add(a_new);
|
||||||
|
}
|
||||||
|
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
|
||||||
|
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||||
|
|
||||||
|
//Wenn x nicht vererbt ist, beginnt beim naechstgroesseren Element die naechste Ueberladung
|
||||||
|
notInherited.forEach(x -> {
|
||||||
|
notErased.addAll(typeUnifyTask.oup.greaterEqThan(x, greaterSetasList));
|
||||||
|
});
|
||||||
|
|
||||||
|
//das kleineste Element ist das Element von dem a_new geerbt hat
|
||||||
|
//muss deshalb geloescht werden
|
||||||
|
Iterator<Set<UnifyPair>> notErasedIt = notErased.iterator();
|
||||||
|
if (notErasedIt.hasNext()) {
|
||||||
|
Set<UnifyPair> min = typeUnifyTask.oup.min(notErasedIt);
|
||||||
|
notErased.remove(min);
|
||||||
|
notErased.remove(((Constraint<UnifyPair>) min).getExtendConstraint());
|
||||||
|
}
|
||||||
|
|
||||||
|
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
|
||||||
|
erased.removeAll(notErased);
|
||||||
|
nextSetAsList.removeAll(erased);
|
||||||
|
|
||||||
|
writeLog("Removed: " + erased);
|
||||||
|
|
||||||
|
writeLog("Not Removed: " + nextSetAsList);
|
||||||
|
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
while (aParDefIt.hasNext()) {
|
||||||
|
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||||
|
Set<UnifyPair> a_new = aParDefIt.next();
|
||||||
|
List<Set<UnifyPair>> erased = typeUnifyTask.oup.greaterEqThan(a_new, nextSetAsList);
|
||||||
|
|
||||||
|
nextSetAsList.removeAll(erased);
|
||||||
|
|
||||||
|
writeLog("Removed: " + erased);
|
||||||
|
|
||||||
|
writeLog("Not Removed: " + nextSetAsList);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.interfaces;
|
package de.dhbwstuttgart.typeinference.unify.interfaces;
|
||||||
|
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
@@ -74,5 +75,5 @@ public interface IFiniteClosure {
|
|||||||
public Set<UnifyType> getChildren(UnifyType t);
|
public Set<UnifyType> getChildren(UnifyType t);
|
||||||
public Set<UnifyType> getAllTypesByName(String typeName);
|
public Set<UnifyType> getAllTypesByName(String typeName);
|
||||||
|
|
||||||
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop);
|
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop, UnifyContext context);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.model;
|
package de.dhbwstuttgart.typeinference.unify.model;
|
||||||
|
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||||
import java.io.FileWriter;
|
import java.io.FileWriter;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
@@ -42,8 +43,7 @@ import org.apache.commons.io.output.NullWriter;
|
|||||||
* The finite closure for the type unification
|
* The finite closure for the type unification
|
||||||
* @author Florian Steurer
|
* @author Florian Steurer
|
||||||
*/
|
*/
|
||||||
public class FiniteClosure //extends Ordering<UnifyType> //entfernt PL 2018-12-11
|
public class FiniteClosure implements IFiniteClosure {
|
||||||
implements IFiniteClosure {
|
|
||||||
|
|
||||||
final JavaTXCompiler compiler;
|
final JavaTXCompiler compiler;
|
||||||
|
|
||||||
@@ -207,7 +207,7 @@ implements IFiniteClosure {
|
|||||||
result.add(new Pair<>(t, fBounded));
|
result.add(new Pair<>(t, fBounded));
|
||||||
}
|
}
|
||||||
catch (StackOverflowError e) {
|
catch (StackOverflowError e) {
|
||||||
System.out.println("");
|
// System.out.println("");
|
||||||
}
|
}
|
||||||
|
|
||||||
// if C<...> <* C<...> then ... (third case in definition of <*)
|
// if C<...> <* C<...> then ... (third case in definition of <*)
|
||||||
@@ -698,10 +698,10 @@ implements IFiniteClosure {
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public int compare (UnifyType left, UnifyType right, PairOperator pairop) {
|
public int compare (UnifyType left, UnifyType right, PairOperator pairop, UnifyContext context) {
|
||||||
try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {}
|
try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {}
|
||||||
if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
|
// if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
|
||||||
System.out.println("");
|
// System.out.println("");
|
||||||
/*
|
/*
|
||||||
pairop = PairOperator.SMALLERDOTWC;
|
pairop = PairOperator.SMALLERDOTWC;
|
||||||
List<UnifyType> al = new ArrayList<>();
|
List<UnifyType> al = new ArrayList<>();
|
||||||
@@ -752,7 +752,7 @@ implements IFiniteClosure {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
UnifyPair up = new UnifyPair(left, right, pairop);
|
UnifyPair up = new UnifyPair(left, right, pairop);
|
||||||
TypeUnifyTask unifyTask = new TypeUnifyTask();
|
TypeUnifyTask unifyTask = new TypeUnifyTask(context);
|
||||||
HashSet<UnifyPair> hs = new HashSet<>();
|
HashSet<UnifyPair> hs = new HashSet<>();
|
||||||
hs.add(up);
|
hs.add(up);
|
||||||
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this);
|
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, this);
|
||||||
@@ -760,7 +760,7 @@ implements IFiniteClosure {
|
|||||||
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
|
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
|
||||||
{try {
|
{try {
|
||||||
logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
|
logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
System.err.println("no LogFile");}}
|
System.err.println("no LogFile");}}
|
||||||
@@ -774,7 +774,7 @@ implements IFiniteClosure {
|
|||||||
long smallerLen = smallerRes.stream().filter(delFun).count();
|
long smallerLen = smallerRes.stream().filter(delFun).count();
|
||||||
try {
|
try {
|
||||||
logFile.write("\nsmallerLen: " + smallerLen +"\n");
|
logFile.write("\nsmallerLen: " + smallerLen +"\n");
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
System.err.println("no LogFile");}
|
System.err.println("no LogFile");}
|
||||||
@@ -789,7 +789,7 @@ implements IFiniteClosure {
|
|||||||
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
|
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
|
||||||
{try {
|
{try {
|
||||||
logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
|
logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
|
||||||
logFile.flush();
|
// logFile.flush();
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
System.err.println("no LogFile");}}
|
System.err.println("no LogFile");}}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.model;
|
package de.dhbwstuttgart.typeinference.unify.model;
|
||||||
|
|
||||||
|
import de.dhbwstuttgart.typeinference.unify.UnifyContext;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
@@ -26,9 +27,11 @@ import de.dhbwstuttgart.util.Pair;
|
|||||||
public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||||
|
|
||||||
protected IFiniteClosure fc;
|
protected IFiniteClosure fc;
|
||||||
|
protected UnifyContext context;
|
||||||
public OrderingUnifyPair(IFiniteClosure fc) {
|
|
||||||
|
public OrderingUnifyPair(IFiniteClosure fc, UnifyContext context) {
|
||||||
this.fc = fc;
|
this.fc = fc;
|
||||||
|
this.context = context;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@@ -39,15 +42,15 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
|||||||
try {
|
try {
|
||||||
//if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht
|
//if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht
|
||||||
if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) {
|
if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) {
|
||||||
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC);
|
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC, context);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT);
|
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT, context);
|
||||||
}}
|
}}
|
||||||
catch (ClassCastException e) {
|
catch (ClassCastException e) {
|
||||||
try {
|
try {
|
||||||
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
|
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
|
||||||
((FiniteClosure)fc).logFile.flush();
|
// ((FiniteClosure)fc).logFile.flush();
|
||||||
}
|
}
|
||||||
catch (IOException ie) {
|
catch (IOException ie) {
|
||||||
}
|
}
|
||||||
@@ -79,18 +82,18 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
|||||||
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
||||||
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
||||||
{
|
{
|
||||||
System.out.println("");
|
// System.out.println("");
|
||||||
}
|
}
|
||||||
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
|
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
|
||||||
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
|
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
|
||||||
{
|
{
|
||||||
System.out.println("");
|
// System.out.println("");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
up = new UnifyPair(left, right, PairOperator.SMALLERDOT);
|
up = new UnifyPair(left, right, PairOperator.SMALLERDOT);
|
||||||
}
|
}
|
||||||
TypeUnifyTask unifyTask = new TypeUnifyTask();
|
TypeUnifyTask unifyTask = new TypeUnifyTask(context);
|
||||||
HashSet<UnifyPair> hs = new HashSet<>();
|
HashSet<UnifyPair> hs = new HashSet<>();
|
||||||
hs.add(up);
|
hs.add(up);
|
||||||
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, fc);
|
Set<UnifyPair> smallerRes = unifyTask.applyTypeUnificationRules(hs, fc);
|
||||||
@@ -106,11 +109,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
|||||||
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
||||||
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
||||||
{
|
{
|
||||||
System.out.println("");
|
// System.out.println("");
|
||||||
}
|
}
|
||||||
if (right instanceof SuperType)
|
if (right instanceof SuperType)
|
||||||
{
|
{
|
||||||
System.out.println("");
|
// System.out.println("");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -411,13 +414,13 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
|||||||
if (leftlewc.iterator().next().getLhsType() instanceof PlaceholderType) {
|
if (leftlewc.iterator().next().getLhsType() instanceof PlaceholderType) {
|
||||||
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getLhsType(),y); return x; }, combiner);
|
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getLhsType(),y); return x; }, combiner);
|
||||||
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getLhsType()) == null));
|
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getLhsType()) == null));
|
||||||
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
|
si = lslewcstr.map(x -> fc.compare(x.getRhsType(), hm.get(x.getLhsType()).getRhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
|
||||||
}
|
}
|
||||||
//4. Fall
|
//4. Fall
|
||||||
else {
|
else {
|
||||||
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getRhsType(),y); return x; }, combiner);
|
hm = rsleuni.stream().reduce(new HashMap<UnifyType,UnifyPair>(), (x, y)-> { x.put(y.getRhsType(),y); return x; }, combiner);
|
||||||
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getRhsType()) == null));
|
Stream<UnifyPair> lslewcstr = lsleuni.stream().filter(x -> !(hm.get(x.getRhsType()) == null));
|
||||||
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
|
si = lslewcstr.map(x -> fc.compare(x.getLhsType(), hm.get(x.getRhsType()).getLhsType(), PairOperator.SMALLERDOTWC, context)).reduce((x,y)-> { if (x == y) return x; else return 0; } );
|
||||||
}
|
}
|
||||||
if (!si.isPresent()) return 0;
|
if (!si.isPresent()) return 0;
|
||||||
else return si.get();
|
else return si.get();
|
||||||
|
|||||||
@@ -1,73 +1,60 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify.model;
|
package de.dhbwstuttgart.typeinference.unify.model;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileWriter;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Random;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
|
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An unbounded placeholder type.
|
* An unbounded placeholder type.
|
||||||
* @author Florian Steurer
|
* @author Florian Steurer
|
||||||
*/
|
*/
|
||||||
public final class PlaceholderType extends UnifyType{
|
public final class PlaceholderType extends UnifyType{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Static list containing the names of all existing placeholders.
|
* Static list containing the names of all existing placeholders.
|
||||||
* Used for generating fresh placeholders.
|
* Used for generating fresh placeholders.
|
||||||
*/
|
*/
|
||||||
public static final ArrayList<String> EXISTING_PLACEHOLDERS = new ArrayList<String>();
|
public static final ArrayList<String> EXISTING_PLACEHOLDERS = new ArrayList<String>();
|
||||||
|
|
||||||
/**
|
|
||||||
* Prefix of auto-generated placeholder names.
|
private static final AtomicInteger placeholderCount = new AtomicInteger(0);
|
||||||
*/
|
|
||||||
protected static String nextName = "gen_";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Random number generator used to generate fresh placeholder name.
|
|
||||||
*/
|
|
||||||
protected static Random rnd = new Random(43558747548978L);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* True if this object was auto-generated, false if this object was user-generated.
|
* True if this object was auto-generated, false if this object was user-generated.
|
||||||
*/
|
*/
|
||||||
private final boolean IsGenerated;
|
private final boolean IsGenerated;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
|
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
|
||||||
*/
|
*/
|
||||||
private boolean wildcardable = true;
|
private boolean wildcardable = true;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* is innerType gibt an, ob der Type des PlaceholderType innerhalb eines Typkonstruktorsverwendet wird
|
* is innerType gibt an, ob der Type des PlaceholderType innerhalb eines Typkonstruktorsverwendet wird
|
||||||
*/
|
*/
|
||||||
private boolean innerType = false;
|
private boolean innerType = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* variance shows the variance of the pair
|
* variance shows the variance of the pair
|
||||||
* 1: contravariant
|
* 1: contravariant
|
||||||
* -1 covariant
|
* -1 covariant
|
||||||
* 0 invariant
|
* 0 invariant
|
||||||
* PL 2018-03-21
|
* PL 2018-03-21
|
||||||
*/
|
*/
|
||||||
private int variance = 0;
|
private int variance = 0;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Fuer Oder-Constraints:
|
* Fuer Oder-Constraints:
|
||||||
* orCons = 1: Receiver
|
* orCons = 1: Receiver
|
||||||
* orCons = 0: Argument oder kein Oder-Constraint
|
* orCons = 0: Argument oder kein Oder-Constraint
|
||||||
* orCons = -1: RetType
|
* orCons = -1: RetType
|
||||||
*/
|
*/
|
||||||
private byte orCons = 0;
|
private byte orCons = 0;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new placeholder type with the specified name.
|
* Creates a new placeholder type with the specified name.
|
||||||
*/
|
*/
|
||||||
@@ -76,17 +63,17 @@ public final class PlaceholderType extends UnifyType{
|
|||||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||||
IsGenerated = false; // This type is user generated
|
IsGenerated = false; // This type is user generated
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public PlaceholderType(String name, int variance) {
|
public PlaceholderType(String name, int variance) {
|
||||||
super(name, new TypeParams());
|
super(name, new TypeParams());
|
||||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||||
IsGenerated = false; // This type is user generated
|
IsGenerated = false; // This type is user generated
|
||||||
this.variance = variance;
|
this.variance = variance;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new placeholdertype
|
* Creates a new placeholdertype
|
||||||
* @param isGenerated true if this placeholder is auto-generated, false if it is user-generated.
|
* @param isGenerated true if this placeholder is auto-generated, false if it is user-generated.
|
||||||
*/
|
*/
|
||||||
protected PlaceholderType(String name, boolean isGenerated) {
|
protected PlaceholderType(String name, boolean isGenerated) {
|
||||||
@@ -94,26 +81,42 @@ public final class PlaceholderType extends UnifyType{
|
|||||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||||
IsGenerated = isGenerated;
|
IsGenerated = isGenerated;
|
||||||
}
|
}
|
||||||
|
|
||||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||||
return visitor.visit(this, ht);
|
return visitor.visit(this, ht);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a fresh placeholder type with a name that does so far not exist.
|
* Creates a fresh placeholder type with a name that does so far not exist from the chars A-Z.
|
||||||
* A user could later instantiate a type using the same name that is equivalent to this type.
|
* A user could later instantiate a type using the same name that is equivalent to this type.
|
||||||
* @return A fresh placeholder type.
|
* @return A fresh placeholder type.
|
||||||
*/
|
*/
|
||||||
public synchronized static PlaceholderType freshPlaceholder() {
|
public static PlaceholderType freshPlaceholder() {
|
||||||
String name = nextName + (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
|
String name;
|
||||||
// Add random chars while the name is in use.
|
|
||||||
while(EXISTING_PLACEHOLDERS.contains(name)) {
|
int attempts = 1000;
|
||||||
name += (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
|
while (attempts-- > 0) {
|
||||||
|
int pc = PlaceholderType.placeholderCount.incrementAndGet();
|
||||||
|
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
while (pc >= 0) {
|
||||||
|
sb.append((char)(pc % 26 + 97));
|
||||||
|
pc = pc / 26 - 1;
|
||||||
|
}
|
||||||
|
name = sb.toString();
|
||||||
|
|
||||||
|
|
||||||
|
synchronized (EXISTING_PLACEHOLDERS) {
|
||||||
|
if (!EXISTING_PLACEHOLDERS.contains(name)) {
|
||||||
|
return new PlaceholderType(name, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return new PlaceholderType(name, true);
|
|
||||||
|
throw new RuntimeException("Failed to generate placeholder name in the allowed number of attempts");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* True if this placeholder is auto-generated, false if it is user-generated.
|
* True if this placeholder is auto-generated, false if it is user-generated.
|
||||||
*/
|
*/
|
||||||
@@ -124,51 +127,51 @@ public final class PlaceholderType extends UnifyType{
|
|||||||
public void setVariance(int v) {
|
public void setVariance(int v) {
|
||||||
variance = v;
|
variance = v;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getVariance() {
|
public int getVariance() {
|
||||||
return variance;
|
return variance;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void reversVariance() {
|
public void reversVariance() {
|
||||||
if (variance == 1) {
|
if (variance == 1) {
|
||||||
setVariance(-1);
|
setVariance(-1);
|
||||||
} else {
|
} else {
|
||||||
if (variance == -1) {
|
if (variance == -1) {
|
||||||
setVariance(1);
|
setVariance(1);
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setOrCons(byte i) {
|
public void setOrCons(byte i) {
|
||||||
orCons = i;
|
orCons = i;
|
||||||
}
|
}
|
||||||
|
|
||||||
public byte getOrCons() {
|
public byte getOrCons() {
|
||||||
return orCons;
|
return orCons;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Boolean isWildcardable() {
|
public Boolean isWildcardable() {
|
||||||
return wildcardable;
|
return wildcardable;
|
||||||
}
|
}
|
||||||
public void disableWildcardtable() {
|
public void disableWildcardtable() {
|
||||||
wildcardable = false;
|
wildcardable = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void enableWildcardtable() {
|
public void enableWildcardtable() {
|
||||||
wildcardable = true;
|
wildcardable = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setWildcardtable(Boolean wildcardable) {
|
public void setWildcardtable(Boolean wildcardable) {
|
||||||
this.wildcardable = wildcardable;
|
this.wildcardable = wildcardable;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Boolean isInnerType() {
|
public Boolean isInnerType() {
|
||||||
return innerType;
|
return innerType;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setInnerType(Boolean innerType) {
|
public void setInnerType(Boolean innerType) {
|
||||||
this.innerType = innerType;
|
this.innerType = innerType;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||||
return fc.smArg(this, fBounded);
|
return fc.smArg(this, fBounded);
|
||||||
@@ -178,17 +181,17 @@ public final class PlaceholderType extends UnifyType{
|
|||||||
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||||
return fc.grArg(this, fBounded);
|
return fc.grArg(this, fBounded);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public UnifyType setTypeParams(TypeParams newTp) {
|
public UnifyType setTypeParams(TypeParams newTp) {
|
||||||
return this; // Placeholders never have params.
|
return this; // Placeholders never have params.
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return typeName.hashCode();
|
return typeName.hashCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
UnifyType apply(Unifier unif) {
|
UnifyType apply(Unifier unif) {
|
||||||
if(unif.hasSubstitute(this)) {
|
if(unif.hasSubstitute(this)) {
|
||||||
@@ -200,15 +203,15 @@ public final class PlaceholderType extends UnifyType{
|
|||||||
}
|
}
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if(!(obj instanceof PlaceholderType))
|
if(!(obj instanceof PlaceholderType))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return ((PlaceholderType) obj).getName().equals(typeName);
|
return ((PlaceholderType) obj).getName().equals(typeName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
|
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||||
|
|||||||
9
src/main/java/de/dhbwstuttgart/util/Logger.java
Normal file
9
src/main/java/de/dhbwstuttgart/util/Logger.java
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
package de.dhbwstuttgart.util;
|
||||||
|
|
||||||
|
public class Logger {
|
||||||
|
|
||||||
|
public static void print(String s) {
|
||||||
|
System.out.println(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user