Compare commits
22 Commits
sealedInte
...
dcdecd4b83
Author | SHA1 | Date | |
---|---|---|---|
|
dcdecd4b83 | ||
|
6c2d97b770 | ||
|
426c2916d3 | ||
|
f722a00fbb | ||
|
32797c9b9f | ||
|
87f655c85a | ||
|
613dceae1d | ||
|
81cac06e16 | ||
|
a47d5bc024 | ||
|
e5916d455a | ||
|
ebb639e72e | ||
|
f0a4a51ce6 | ||
|
7442880452 | ||
|
c4dc3b4245 | ||
1391206dfe | |||
659bf6b500 | |||
33ed22c06a | |||
70f7857661 | |||
45275b6888 | |||
2144dd9341 | |||
69c2bb3dc9 | |||
3a57d5e025 |
@@ -15,7 +15,7 @@ jobs:
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: '22'
|
||||
java-version: '23'
|
||||
cache: 'maven'
|
||||
- name: Compile project
|
||||
run: |
|
||||
|
4
pom.xml
4
pom.xml
@@ -54,8 +54,8 @@ http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<version>3.11.0</version>
|
||||
<configuration>
|
||||
<compilerArgs>--enable-preview</compilerArgs>
|
||||
<source>22</source>
|
||||
<target>22</target>
|
||||
<source>23</source>
|
||||
<target>23</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
|
@@ -1306,16 +1306,17 @@ public class Codegen {
|
||||
var mt = MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, Object[].class);
|
||||
var bootstrap = new Handle(H_INVOKESTATIC, "java/lang/runtime/SwitchBootstraps", "typeSwitch", mt.toMethodDescriptorString(), false);
|
||||
|
||||
var types = new ArrayList<Object>(aSwitch.cases().size());
|
||||
var types = new ArrayList<>(aSwitch.cases().size());
|
||||
for (var cse : aSwitch.cases()) for (var label : cse.labels()) {
|
||||
if (label instanceof TargetTypePattern || label instanceof TargetComplexPattern)
|
||||
types.add(Type.getObjectType(label.type().getInternalName()));
|
||||
else if (label instanceof TargetLiteral lit)
|
||||
if (label instanceof TargetTypePattern || label instanceof TargetComplexPattern) {
|
||||
if (label.type() instanceof TargetGenericType) types.add(Type.getType(Object.class));
|
||||
else types.add(Type.getObjectType(label.type().getInternalName()));
|
||||
} else if (label instanceof TargetLiteral lit) {
|
||||
types.add(lit.value());
|
||||
else if (label instanceof TargetGuard guard)
|
||||
} else if (label instanceof TargetGuard guard) {
|
||||
types.add(Type.getObjectType(guard.inner().type().getInternalName()));
|
||||
// TODO Same here we need to evaluate constant;
|
||||
else {
|
||||
// TODO Same here we need to evaluate constant;
|
||||
} else {
|
||||
System.out.println(label);
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
@@ -326,7 +326,7 @@ public class JavaTXCompiler {
|
||||
for (SourceFile f : this.sourceFiles.values()) {
|
||||
logFile.write(ASTTypePrinter.print(f));
|
||||
}
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
varianceTPH = varianceInheritanceConstraintSet(unifyCons);
|
||||
@@ -392,7 +392,7 @@ public class JavaTXCompiler {
|
||||
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
|
||||
logFile.write(ASTTypePrinter.print(sf));
|
||||
System.out.println(ASTTypePrinter.print(sf));
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
System.out.println("Unify nach Oder-Constraints-Anpassung:" + unifyCons.toString());
|
||||
Set<PlaceholderType> varianceTPHold;
|
||||
Set<PlaceholderType> varianceTPH = new HashSet<>();
|
||||
@@ -419,7 +419,7 @@ public class JavaTXCompiler {
|
||||
System.out.println("RESULT Final: " + li.getResults());
|
||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
return li.getResults();
|
||||
}
|
||||
/* UnifyResultModel End */
|
||||
@@ -430,7 +430,7 @@ public class JavaTXCompiler {
|
||||
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
|
||||
System.out.println("RESULT: " + result);
|
||||
logFile.write("RES: " + result.toString() + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
results.addAll(result);
|
||||
|
||||
results = results.stream().map(x -> {
|
||||
@@ -447,9 +447,9 @@ public class JavaTXCompiler {
|
||||
System.out.println("RESULT Final: " + results);
|
||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||
logFile.write("RES_FINAL: " + results.toString() + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
logFile.write("PLACEHOLDERS: " + PlaceholderType.EXISTING_PLACEHOLDERS);
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
System.err.println("kein LogFile");
|
||||
|
@@ -0,0 +1,11 @@
|
||||
package de.dhbwstuttgart.exceptions;
|
||||
|
||||
|
||||
/**
|
||||
* Eine Runtime Exception, die für den Fall genutzt wird, dass eine Unifikation abgebrochen wird.
|
||||
* Durch das Werfen einer Exception können Abbrüche auch aus Methodenaufrufen heraus
|
||||
* geprüft werden, da zuvor nur ein return X; stattfinden würde.
|
||||
*/
|
||||
public class UnifyCancelException extends RuntimeException {
|
||||
|
||||
}
|
@@ -194,7 +194,7 @@ public class UnifyTypeFactory {
|
||||
&& ((PlaceholderType)lhs).isWildcardable()
|
||||
&& (rhs = ret.getLhsType()) instanceof PlaceholderType) {
|
||||
if (lhs.getName().equals("AQ")) {
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
((PlaceholderType)rhs).enableWildcardtable();
|
||||
}
|
||||
@@ -203,7 +203,7 @@ public class UnifyTypeFactory {
|
||||
&& ((PlaceholderType)rhs).isWildcardable()
|
||||
&& (lhs = ret.getLhsType()) instanceof PlaceholderType) {
|
||||
if (rhs.getName().equals("AQ")) {
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
((PlaceholderType)lhs).enableWildcardtable();
|
||||
}
|
||||
|
@@ -17,7 +17,6 @@ import de.dhbwstuttgart.target.tree.expression.*;
|
||||
import de.dhbwstuttgart.target.tree.type.*;
|
||||
import de.dhbwstuttgart.typeinference.result.*;
|
||||
|
||||
import java.lang.annotation.Target;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
@@ -145,49 +144,25 @@ public class ASTToTargetAST {
|
||||
return ret;
|
||||
}
|
||||
|
||||
// This is used to serve as a custom equality to signature that performs a weak check without going into record patterns.
|
||||
// The two signatures are considered equal if all the argument types match.
|
||||
// This also turns equal if both types implement a sealed super interface
|
||||
class PatternSignature {
|
||||
final TargetMethod.Signature signature;
|
||||
final String name;
|
||||
PatternSignature(String name, TargetMethod.Signature signature) {
|
||||
this.signature = signature;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof PatternSignature other)) return false;
|
||||
if (!this.name.equals(other.name)) return false;
|
||||
if (other.signature.parameters().size() != signature.parameters().size()) return false;
|
||||
for (var i = 0; i < signature.parameters().size(); i++) {
|
||||
var p1 = signature.parameters().get(i).pattern().type();
|
||||
var p2 = other.signature.parameters().get(i).pattern().type();
|
||||
if (p1 instanceof TargetGenericType && p2 instanceof TargetGenericType) continue;
|
||||
if (!p1.equals(p2) && commonSuperInterfaceTypes(p1, p2).isEmpty()) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return signature.parameters().size();
|
||||
}
|
||||
}
|
||||
|
||||
// This finds a common sealed interface type to group together methods that use different records
|
||||
private List<ClassOrInterface> commonSuperInterfaceTypes(TargetType a, TargetType b) {
|
||||
if (a instanceof TargetGenericType && b instanceof TargetGenericType) return List.of(ASTFactory.createClass(Object.class));
|
||||
if (a instanceof TargetGenericType && b instanceof TargetGenericType) return List.of(ASTFactory.createObjectClass());
|
||||
if (a instanceof TargetRefType ta && b instanceof TargetGenericType)
|
||||
return List.of(compiler.getClass(new JavaClassName(ta.name())));
|
||||
if (b instanceof TargetRefType tb && a instanceof TargetGenericType)
|
||||
return List.of(compiler.getClass(new JavaClassName(tb.name())));
|
||||
|
||||
if (a instanceof TargetRefType ta && b instanceof TargetRefType tb) {
|
||||
var res = new HashSet<ClassOrInterface>();
|
||||
|
||||
var cla = compiler.getClass(new JavaClassName(ta.name()));
|
||||
var clb = compiler.getClass(new JavaClassName(tb.name()));
|
||||
|
||||
while (!cla.equals(ASTFactory.createClass(Object.class))) {
|
||||
if (cla.equals(clb)) return List.of(cla);
|
||||
|
||||
while (!cla.equals(ASTFactory.createObjectClass())) {
|
||||
var clb2 = clb;
|
||||
while (!clb2.equals(ASTFactory.createClass(Object.class))) {
|
||||
while (!clb2.equals(ASTFactory.createObjectClass())) {
|
||||
for (var intfa : cla.getSuperInterfaces()) {
|
||||
for (var intfb : clb.getSuperInterfaces()) {
|
||||
if (intfa.equals(intfb)) {
|
||||
@@ -207,48 +182,167 @@ public class ASTToTargetAST {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
// TODO This is ugly and probably doesn't work right
|
||||
private boolean patternStrictlyEquals(TargetComplexPattern a, TargetComplexPattern b) {
|
||||
if (!a.name().equals(b.name())) return false;
|
||||
if (a.subPatterns().size() != b.subPatterns().size()) return false;
|
||||
for (var i = 0; i < a.subPatterns().size(); i++) {
|
||||
var p1 = a.subPatterns().get(i);
|
||||
var p2 = b.subPatterns().get(i);
|
||||
if (p1 instanceof TargetComplexPattern pc1 && p2 instanceof TargetComplexPattern pc2 &&
|
||||
patternStrictlyEquals(pc1, pc2)) return false;
|
||||
if (p1 instanceof TargetTypePattern pt1 && p2 instanceof TargetTypePattern pt2) {
|
||||
if (pt1.type() instanceof TargetGenericType && pt2.type() instanceof TargetGenericType) continue;
|
||||
}
|
||||
if (!p1.type().equals(p2.type()) && commonSuperInterfaceTypes(p1.type(), p2.type()).isEmpty()) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean canCombine(TargetMethod m1, TargetMethod m2) {
|
||||
if (!m1.name().equals(m2.name())) return false;
|
||||
var s1 = m1.signature();
|
||||
var s2 = m2.signature();
|
||||
if (s1.parameters().size() != s2.parameters().size()) return false;
|
||||
if (s1.parameters().isEmpty()) return false;
|
||||
for (var i = 0; i < s1.parameters().size(); i++) {
|
||||
var p1 = s1.parameters().get(i).pattern();
|
||||
var p2 = s2.parameters().get(i).pattern();
|
||||
if (p1.type() instanceof TargetGenericType || p2.type() instanceof TargetGenericType) continue;
|
||||
if (p1 instanceof TargetComplexPattern pc1 && p2 instanceof TargetComplexPattern pc2 &&
|
||||
patternStrictlyEquals(pc1, pc2)) return false;
|
||||
if (!p1.equals(p2) && commonSuperInterfaceTypes(p1.type(), p2.type()).isEmpty()) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private record Combination(TargetMethod a, TargetMethod b) {
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof Combination(TargetMethod a1, TargetMethod b1))) return false;
|
||||
return this.a.equals(a1) && this.b.equals(b1) ||
|
||||
this.a.equals(b1) && this.b.equals(a1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(a) + Objects.hashCode(b);
|
||||
}
|
||||
}
|
||||
|
||||
public List<List<TargetMethod>> groupOverloads(ClassOrInterface input, List<Method> methods) {
|
||||
var mapOfSignatures = new HashMap<PatternSignature, List<TargetMethod>>();
|
||||
for (var method : methods) {
|
||||
var mapOfTargetMethods = new HashMap<Generics, TargetMethod[]>();
|
||||
for (var generics : all) {
|
||||
mapOfTargetMethods.put(generics, new TargetMethod[methods.size()]);
|
||||
}
|
||||
|
||||
for (var i = 0; i < methods.size(); i++) {
|
||||
var method = methods.get(i);
|
||||
// Convert all methods
|
||||
var methodsWithTphs = convert(input, method);
|
||||
// Then check for methods with the same signature
|
||||
var resMethods = new HashSet<MethodWithTphs>();
|
||||
|
||||
for (var m1 : methodsWithTphs) {
|
||||
System.out.println(m1.method.name() + " -> " + m1.method.signature().parameters().stream().map(m -> m.pattern().type()).toList());
|
||||
for (var m : methodsWithTphs) {
|
||||
var resultMethods = mapOfTargetMethods.get(m.generics);
|
||||
resultMethods[i] = m.method;
|
||||
}
|
||||
}
|
||||
/*System.out.println("============== INPUT ==============");
|
||||
for (var m : mapOfTargetMethods.values()) {
|
||||
for (var v : m) System.out.println(v.name() + " " + v.getSignature());
|
||||
System.out.println();
|
||||
}*/
|
||||
|
||||
outer:
|
||||
for (var m1 : methodsWithTphs) {
|
||||
for (var m2 : methodsWithTphs) {
|
||||
for (var i = 0; i < m1.args.size(); i++) {
|
||||
var arg1 = m1.args.get(i);
|
||||
var arg2 = m2.args.get(i);
|
||||
if (arg1.parameter.equals(arg2.parameter)) {
|
||||
if (isSupertype(arg1.signature, arg2.signature) &&
|
||||
!arg1.signature.equals(arg2.signature)) continue outer;
|
||||
var allCombinations = new HashSet<Set<Combination>>();
|
||||
// Combine methods based on their signature and position in the result set
|
||||
for (var g1 : all) {
|
||||
var resMeth1 = mapOfTargetMethods.get(g1);
|
||||
for (var i = 0; i < methods.size(); i++) {
|
||||
var m1 = resMeth1[i];
|
||||
if (m1 == null) continue;
|
||||
|
||||
for (var g2 : all) {
|
||||
if (g1 == g2) continue; // No need to combine the same method
|
||||
var resMeth2 = mapOfTargetMethods.get(g2);
|
||||
var m2 = resMeth2[i];
|
||||
if (m2 == null) continue;
|
||||
|
||||
var combinations = new HashSet<Combination>();
|
||||
|
||||
if (canCombine(m1, m2)) {
|
||||
//System.out.println(" Combining " + m1.getSignature() + " and " + m2.getSignature());
|
||||
combinations.add(new Combination(m1, m2));
|
||||
for (var j = 0; j < methods.size(); j++) {
|
||||
if (j == i) continue;
|
||||
var m3 = resMeth2[j];
|
||||
if (m3 == null) continue;
|
||||
var m4 = resMeth1[j];
|
||||
if (m4 == null) continue;
|
||||
combinations.add(new Combination(m4, m3));
|
||||
//System.out.println("Also Combining " + m4.getSignature() + " and " + m3.getSignature());
|
||||
}
|
||||
} else {
|
||||
//System.out.println(" Not Combining " + m1.getSignature() + " and " + m2.getSignature());
|
||||
}
|
||||
if (!combinations.isEmpty()) allCombinations.add(combinations);
|
||||
}
|
||||
resMethods.add(m1);
|
||||
}
|
||||
|
||||
for (var m : resMethods) {
|
||||
var signature = new PatternSignature(m.method.name(), m.method.signature());
|
||||
var methodsWithSameSignature = mapOfSignatures.getOrDefault(signature, new ArrayList<>());
|
||||
methodsWithSameSignature.add(m.method);
|
||||
mapOfSignatures.put(signature, methodsWithSameSignature);
|
||||
}
|
||||
}
|
||||
|
||||
mapOfSignatures.values().forEach(e -> {
|
||||
e.forEach(e2 -> {
|
||||
System.out.println(e2.name() + " -> " + e2.signature().parameters().stream().map(m -> m.pattern().type()).toList());
|
||||
});
|
||||
if (allCombinations.isEmpty()) allCombinations.add(new HashSet<>());
|
||||
|
||||
// Combine back into output format
|
||||
var r0 = new HashSet<Set<TargetMethod>>();
|
||||
for (var combinations : allCombinations) {
|
||||
var r1 = new HashSet<Set<TargetMethod>>();
|
||||
// This is used to weed out duplicates
|
||||
var uniqued = new HashSet<TargetMethod>();
|
||||
// We go over all methods in the result
|
||||
for (var g : all) for (var i = 0; i < methods.size(); i++) {
|
||||
var r2 = new HashSet<TargetMethod>();
|
||||
var m = mapOfTargetMethods.get(g)[i];
|
||||
if (m == null) continue;
|
||||
if (!uniqued.contains(m)) {
|
||||
// Add the method to r2
|
||||
r2.add(m);
|
||||
uniqued.add(m);
|
||||
} else continue;
|
||||
// Find all combinations that contain the method and add them to the result
|
||||
// if not filtered out by uniqued
|
||||
for (var c : combinations) {
|
||||
if (c.a.equals(m) || c.b.equals(m)) {
|
||||
if (!uniqued.contains(c.a)) {
|
||||
r2.add(c.a);
|
||||
uniqued.add(c.a);
|
||||
}
|
||||
if (!uniqued.contains(c.b)) {
|
||||
r2.add(c.b);
|
||||
uniqued.add(c.b);
|
||||
}
|
||||
}
|
||||
}
|
||||
r1.add(r2);
|
||||
}
|
||||
outer: for (var s1 : r1) {
|
||||
for (var s2 : new HashSet<>(r0)) {
|
||||
if (s2.containsAll(s1)) {
|
||||
continue outer;
|
||||
} else if (s1.containsAll(s2)) {
|
||||
r0.remove(s2);
|
||||
r0.add(s1);
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
r0.add(s1);
|
||||
}
|
||||
}
|
||||
|
||||
var result = r0.stream().map(l -> l.stream().toList()).toList();
|
||||
|
||||
System.out.println("============== OUTPUT ==============");
|
||||
for (var l : result) {
|
||||
for (var m : l) System.out.println(m.name() + " " + m.getSignature());
|
||||
System.out.println();
|
||||
});
|
||||
return mapOfSignatures.values().stream().toList();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public TargetStructure convert(ClassOrInterface input) {
|
||||
@@ -281,7 +375,8 @@ public class ASTToTargetAST {
|
||||
var superInterfaces = input.getSuperInterfaces().stream().map(clazz -> convert(clazz, generics.javaGenerics)).toList();
|
||||
var constructors = input.getConstructors().stream().map(constructor -> this.convert(input, constructor, finalFieldInitializer)).flatMap(List::stream).toList();
|
||||
var fields = input.getFieldDecl().stream().map(this::convert).toList();
|
||||
var methods = groupOverloads(input, input.getMethods()).stream().map(m -> generatePatternOverloads(input, m)).flatMap(List::stream).toList();
|
||||
var methods = groupOverloads(input, input.getMethods()).stream().map(m -> generatePatternOverloads(input, m)).flatMap(List::stream)
|
||||
.collect(Collectors.toSet()).stream().toList(); // Unique generated methods
|
||||
|
||||
TargetMethod staticConstructor = null;
|
||||
if (input.getStaticInitializer().isPresent())
|
||||
@@ -403,7 +498,7 @@ public class ASTToTargetAST {
|
||||
}
|
||||
|
||||
var cases = new ArrayList<TargetSwitch.Case>();
|
||||
var usedPatterns = new HashSet<TargetType>();
|
||||
var usedPatterns = new HashSet<TargetPattern>();
|
||||
|
||||
for (var method : methods) {
|
||||
var patternsRec = new ArrayList<>(patterns);
|
||||
@@ -423,9 +518,8 @@ public class ASTToTargetAST {
|
||||
}
|
||||
|
||||
var lastPattern = patternsRec.getLast();
|
||||
var type = unwrap(lastPattern.type());
|
||||
if (usedPatterns.contains(type)) continue;
|
||||
usedPatterns.add(type);
|
||||
if (usedPatterns.contains(lastPattern)) continue;
|
||||
usedPatterns.add(lastPattern);
|
||||
|
||||
var candidates = methods.stream().filter(m -> {
|
||||
var j = 0;
|
||||
@@ -471,9 +565,12 @@ public class ASTToTargetAST {
|
||||
var t3 = m.signature().parameters().get(i).pattern().type();
|
||||
commonSubTypes.retainAll(commonSuperInterfaceTypes(t1, t3));
|
||||
}
|
||||
if (commonSubTypes.size() != 1) throw new DebugException("Invalid overload");
|
||||
if (commonSubTypes.size() > 1) throw new DebugException("Invalid overload");
|
||||
// TODO accept multiple types
|
||||
var superType = commonSubTypes.iterator().next();
|
||||
var superType = ASTFactory.createObjectClass();
|
||||
if (!commonSubTypes.isEmpty())
|
||||
superType = commonSubTypes.iterator().next();
|
||||
|
||||
String name;
|
||||
if (p1 instanceof TargetComplexPattern) name = "__var" + i;
|
||||
else name = p1.name();
|
||||
@@ -543,7 +640,19 @@ public class ASTToTargetAST {
|
||||
}).findFirst();
|
||||
}
|
||||
|
||||
record MethodWithTphs(TargetMethod method, List<SignaturePairTarget> args) {}
|
||||
record MethodWithTphs(TargetMethod method, Generics generics, List<SignaturePairTarget> args) {
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof MethodWithTphs that)) return false;
|
||||
return Objects.equals(method, that.method) && Objects.equals(args, that.args);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(method, args);
|
||||
}
|
||||
}
|
||||
|
||||
record Signature(TargetMethod.Signature java, TargetMethod.Signature tx, Generics generics) {}
|
||||
|
||||
@@ -592,7 +701,7 @@ public class ASTToTargetAST {
|
||||
var newMethod = new TargetMethod(method.modifier, method.name, convert(method.block), signature.java, signature.tx);
|
||||
var concreteParams = tphsInMethods.getOrDefault(method, new HashSet<>()).stream().map(sig -> new SignaturePairTarget(convert(sig.signature), convert(sig.parameter))).toList();
|
||||
|
||||
result.add(new MethodWithTphs(newMethod, concreteParams));
|
||||
result.add(new MethodWithTphs(newMethod, generics, concreteParams));
|
||||
}
|
||||
|
||||
return result;
|
||||
|
@@ -0,0 +1,774 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.UnifyCancelException;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
|
||||
import de.dhbwstuttgart.util.Pair;
|
||||
import de.dhbwstuttgart.util.Triple;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.RecursiveTask;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
class CartesianRecursiveHelper extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
// Inherited values from TypeUnifyTask
|
||||
protected Set<? extends Set<UnifyPair>> nextSet;
|
||||
protected List<Set<UnifyPair>> nextSetAsList;
|
||||
protected boolean parallel;
|
||||
protected boolean oderConstraint;
|
||||
protected int variance;
|
||||
protected Optional<UnifyPair> optOrigPair;
|
||||
protected Set<UnifyPair> methodSignatureConstraint;
|
||||
protected Set<Set<UnifyPair>> singleElementSets;
|
||||
protected int rekTiefe;
|
||||
protected Set<UnifyPair> sameEqSet;
|
||||
protected List<Set<Constraint<UnifyPair>>> oderConstraints;
|
||||
protected Set<UnifyPair> eq;
|
||||
protected IFiniteClosure fc;
|
||||
protected Writer logFile;
|
||||
protected boolean log;
|
||||
|
||||
// Custom Properties
|
||||
protected TypeUnifyTask typeUnifyTask;
|
||||
protected Set<Set<UnifyPair>> result;
|
||||
|
||||
public CartesianRecursiveHelper(TypeUnifyTask typeUnifyTask) {
|
||||
// TODO receive arguments
|
||||
}
|
||||
|
||||
protected Set<Set<UnifyPair>> run() {
|
||||
result = new HashSet<>();
|
||||
|
||||
Set<UnifyPair> a_last = null;
|
||||
while (!nextSetAsList.isEmpty()) {
|
||||
// determine value, then start as fork
|
||||
|
||||
// select the next case to work on from nextSetAsList (removing it)
|
||||
var nextCase = this.selectNextCases();
|
||||
Set<UnifyPair> a = nextCase.getValue1();
|
||||
List<Set<UnifyPair>> nextSetasListRest = nextCase.getValue2();
|
||||
List<Set<UnifyPair>> nextSetasListOderConstraints = nextCase.getValue3();
|
||||
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
writeLog("nextSetasList: " + nextSetAsList.toString());
|
||||
|
||||
if (oderConstraint) {
|
||||
//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen
|
||||
methodSignatureConstraint.addAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
|
||||
writeLog("ERSTELLUNG methodSignatureConstraint: " + methodSignatureConstraint);
|
||||
//System.out.println("ERSTELLUNG methodSignatureConstraint: " +noOfThread+" "+methodSignatureConstraint);
|
||||
//System.out.println("a: " +a);
|
||||
//System.out.println("eq: " +eq);
|
||||
//System.out.println();
|
||||
}
|
||||
|
||||
/* Wenn bei (a \in theta) \in a zu Widerspruch in oneElems wird
|
||||
* a verworfen und zu nächstem Element von nextSetasList gegangen
|
||||
*/
|
||||
if (!oderConstraint && !sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(a, sameEqSet, result)) {
|
||||
TypeUnifyTask.noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
|
||||
var innerLoopResult = innerLoop(a, a_last,nextSetasListRest, nextSetasListOderConstraints);
|
||||
if (innerLoopResult == null) {
|
||||
break;
|
||||
}
|
||||
|
||||
a_last = a;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Selects values for the next iteration in the run method:
|
||||
* - a : The element to ???
|
||||
* - nextSetAsList: The list of cases that have no relation to the selected a and will have to be worked on
|
||||
* - nextSetasListOderConstraints: The list of cases of which the receiver contains "? extends", typically one element
|
||||
*
|
||||
* @return The new a, the new nextSetAsList, the new nextSetasListOderConstraints
|
||||
*/
|
||||
protected Triple<Set<UnifyPair>, List<Set<UnifyPair>>, List<Set<UnifyPair>>> selectNextCases() {
|
||||
Set<UnifyPair> a;
|
||||
/* Liste der Faelle für die parallele Verarbeitung
|
||||
* Enthaelt Elemente, die nicht in Relation zu aktuellem Fall in der
|
||||
* Variablen a stehen. Diese muesse auf alle Faelle bearbeitet werden,
|
||||
* Deshalb wird ihre Berechnung parallel angestossen.
|
||||
*/
|
||||
List<Set<UnifyPair>> nextSetasListRest = new ArrayList<>();
|
||||
|
||||
/* Liste der Faelle, bei dem Receiver jeweils "? extends" enthaelt bzw. nicht enthaelt
|
||||
* In der Regel ist dies genau ein Element
|
||||
* Dieses Element wird später aus nextSetasList geloescht, wenn das jeweils andere Element zum Erfolg
|
||||
* gefuehrt hat.
|
||||
*/
|
||||
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
|
||||
|
||||
if (variance == 1) {
|
||||
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||
writeLog("Max: a in " + variance + " " + a);
|
||||
nextSetAsList.remove(a);
|
||||
if (oderConstraint) {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
}
|
||||
writeLog("nextSetasListOderConstraints 1: " + nextSetasListOderConstraints);
|
||||
|
||||
//Alle maximale Elemente in nextSetasListRest bestimmen
|
||||
//nur für diese wird parallele Berechnung angestossen.
|
||||
Set<UnifyPair> finalA = a;
|
||||
nextSetasListRest = typeUnifyTask.oup.maxElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != 1).toList()
|
||||
);
|
||||
} else if (variance == -1) {
|
||||
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
|
||||
writeLog("Min: a in " + variance + " " + a);
|
||||
if (oderConstraint) {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
}
|
||||
writeLog("nextSetasListOderConstraints -1: " + nextSetasListOderConstraints);
|
||||
nextSetAsList.remove(a);
|
||||
|
||||
//Alle minimalen Elemente in nextSetasListRest bestimmen
|
||||
//nur für diese wird parallele Berechnung angestossen.
|
||||
Set<UnifyPair> finalA = a;
|
||||
nextSetasListRest = typeUnifyTask.oup.minElements(
|
||||
nextSetAsList.stream().filter(a_next -> typeUnifyTask.oup.compare(finalA, a_next) != -1).toList()
|
||||
);
|
||||
} else if (variance == 2) {
|
||||
a = nextSetAsList.removeFirst();
|
||||
|
||||
//Fuer alle Elemente wird parallele Berechnung angestossen.
|
||||
nextSetasListRest = new ArrayList<>(nextSetAsList);
|
||||
} else if (variance == 0) {
|
||||
//wenn a <. theta dann ist ein maximales Element sehr wahrscheinlich
|
||||
//wenn theta <. a dann ist ein minimales Element sehr wahrscheinlich
|
||||
if (!oderConstraint && optOrigPair != null && optOrigPair.isPresent()) {
|
||||
if (optOrigPair.get().getBasePair().getLhsType() instanceof PlaceholderType) {
|
||||
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||
} else {
|
||||
a = typeUnifyTask.oup.min(nextSetAsList.iterator());
|
||||
}
|
||||
nextSetAsList.remove(a);
|
||||
} else if (oderConstraint) {
|
||||
a = typeUnifyTask.oup.max(nextSetAsList.iterator());
|
||||
nextSetAsList.remove(a);
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) a).getExtendConstraint());
|
||||
} else {
|
||||
a = nextSetAsList.removeFirst();
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new RuntimeException("Invalid variance in cartesian product calculation: " + variance);
|
||||
}
|
||||
|
||||
return new Triple<>(a, nextSetasListRest, nextSetasListOderConstraints);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private Object innerLoop(
|
||||
Set<UnifyPair> a,
|
||||
Set<UnifyPair> a_last,
|
||||
List<Set<UnifyPair>> nextSetasListRest,
|
||||
List<Set<UnifyPair>> nextSetasListOderConstraints
|
||||
) {
|
||||
Set<Set<UnifyPair>> elems = new HashSet<>(singleElementSets);
|
||||
writeLog("a1: " + rekTiefe + " " + "variance: " + variance + " " + a.toString() + "\n");
|
||||
|
||||
//Ergebnisvariable für den aktuelle Thread
|
||||
Set<Set<UnifyPair>> currentThreadResult;
|
||||
|
||||
//Menge der Ergebnisse der geforkten Threads
|
||||
Set<Set<Set<UnifyPair>>> forkResults = new HashSet<>();
|
||||
|
||||
Set<Set<UnifyPair>> aParDef = new HashSet<>();
|
||||
|
||||
/* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread
|
||||
* gestartet, der parallel weiterarbeitet.
|
||||
*/
|
||||
if (parallel && (variance == 1)) {
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, methodSignatureConstraint);
|
||||
//forks.add(forkOrig);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("1 RM" + nSaL.toString());
|
||||
|
||||
if (!oderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
nSaL = null;
|
||||
typeUnifyTask.noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
forks.add(fork);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
currentThreadResult = forkOrig.compute();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig 1");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
//forkResults.add(fork_res);;
|
||||
/* FORK ENDE */
|
||||
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
forkResults.add(fork_res);
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final 1");
|
||||
fork.closeLogFile();
|
||||
}
|
||||
//noOfThread++;
|
||||
} else if (parallel && (variance == -1)) {
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
//forks.add(forkOrig);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL);
|
||||
writeLog("-1 RM" + nSaL.toString());
|
||||
|
||||
if (!oderConstraint) {
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !typeUnifyTask.checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
nSaL = null;
|
||||
typeUnifyTask.noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>) nSaL).getExtendConstraint());
|
||||
}
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
forks.add(fork);
|
||||
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
currentThreadResult = forkOrig.compute();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig -1");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
//forkResults.add(fork_res);
|
||||
/* FORK ENDE */
|
||||
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
//noOfThread++;
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(Boolean.valueOf((typeUnifyTask.isUndefinedPairSetSet(fork_res))).toString());
|
||||
forkResults.add(fork_res);
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final -1");
|
||||
fork.closeLogFile();
|
||||
}
|
||||
//noOfThread++;
|
||||
} else if (parallel && (variance == 2)) {
|
||||
writeLog("var2einstieg");
|
||||
Set<TypeUnify2Task> forks = new HashSet<>();
|
||||
Set<UnifyPair> newEqOrig = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
//forks.add(forkOrig);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
writeLog("a in " + variance + " " + a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
//Fuer parallele Berechnung der Oder-Contraints wird methodSignature kopiert
|
||||
//und jeweils die methodSignature von a bzw. nSaL wieder gelöscht, wenn es keine Lösung ist.
|
||||
Set<UnifyPair> methodSignatureConstraintForParallel = new HashSet<>(methodSignatureConstraint);
|
||||
Set<UnifyPair> nSaL = a;
|
||||
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
methodSignatureConstraintForParallel.removeAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
|
||||
nSaL = nextSetasListRest.removeFirst();
|
||||
nextSetAsList.remove(nSaL); //PL einkommentiert 20-02-03
|
||||
methodSignatureConstraintForParallel.addAll(((Constraint<UnifyPair>) nSaL).getmethodSignatureConstraint());
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, typeUnifyTask.urm, typeUnifyTask.usedTasks, new HashSet<>(methodSignatureConstraintForParallel));
|
||||
forks.add(fork);
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
fork.fork();
|
||||
}
|
||||
//currentThreadResult = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
currentThreadResult = forkOrig.compute();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig 2");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
//forkResults.add(fork_res); //vermutlich falsch
|
||||
/* FORK ENDE */
|
||||
for (TypeUnify2Task fork : forks) {
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
if (typeUnifyTask.myIsCancelled()) {
|
||||
throw new UnifyCancelException();
|
||||
}
|
||||
forkResults.add(fork_res);
|
||||
fork.writeLog("final 2");
|
||||
fork.closeLogFile();
|
||||
}
|
||||
//noOfThread++;
|
||||
} else {
|
||||
//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
|
||||
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
|
||||
currentThreadResult = typeUnifyTask.unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
||||
}
|
||||
|
||||
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
|
||||
if (oderConstraint) {//Wenn weiteres Element nextSetasList genommen wird, muss die vorherige methodsignatur geloescht werden
|
||||
methodSignatureConstraint.removeAll(((Constraint<UnifyPair>) a).getmethodSignatureConstraint());
|
||||
//System.out.println("REMOVE: " +methodSignatureConstraint);
|
||||
}
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) && typeUnifyTask.isUndefinedPairSetSet(result)) {
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
synchronized (result) {
|
||||
result.clear();
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
} else {
|
||||
if ((typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) && typeUnifyTask.isUndefinedPairSetSet(result))
|
||||
|| (!typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) && !typeUnifyTask.isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
|
||||
if ((!result.isEmpty() && !currentThreadResult.isEmpty() && !typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) && !typeUnifyTask.isUndefinedPairSetSet(result)) //korrekte Loesungen aus und-constraints
|
||||
&& (a.stream().map(x -> (x.getBasePair() != null)).reduce(true, (x, y) -> (x && y)))) //bei oder-Constraints nicht ausfuehren
|
||||
{
|
||||
//TODO: PL 2019-01-15: Bug 129: Im Moment wird nur das Maximum und das Minimum des aktuellen Elements betrachtet.
|
||||
//Die zu vereinigenden Mengen können mehrere Elemente enthalten. Das ist bisher nicht berücksichtigt
|
||||
|
||||
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a
|
||||
//PL 2018-12-28: Hier gab es eine ClassCastException, war nicht reproduzierbar
|
||||
// System.out.println("");
|
||||
List<PlaceholderType> vars_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(a);
|
||||
Set<UnifyPair> fstElemRes = currentThreadResult.iterator().next();
|
||||
Set<UnifyPair> compRes = fstElemRes.stream().filter(x -> vars_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
|
||||
//System.out.println(a_last);
|
||||
|
||||
try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen
|
||||
a_last.forEach(x -> {
|
||||
writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());
|
||||
});//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
|
||||
List<PlaceholderType> varsLast_a = TypeUnifyTaskHelper.extractMatchingPlaceholderTypes(a_last);
|
||||
//[(java.util.Vector<java.lang.Integer> <. gen_aq, , 1), (CEK =. ? extends gen_aq, 1)] KANN VORKOMMEN
|
||||
//erstes Element genügt, da vars immer auf die gleichen Elemente zugeordnet werden muessen
|
||||
Set<UnifyPair> fstElemResult = result.iterator().next();
|
||||
Set<UnifyPair> compResult = fstElemResult.stream().filter(x -> varsLast_a.contains(((PlaceholderType) x.getLhsType()))).collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
if (variance == 1) {
|
||||
writeLog("a_last:" + a_last + " a: " + a);
|
||||
writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
|
||||
writeLog("compResult:" + compResult + " compRes: " + compRes);
|
||||
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
|
||||
if (resOfCompare == -1) {
|
||||
writeLog("Geloescht result: " + result);
|
||||
synchronized (result) {
|
||||
result.clear();
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
} else {
|
||||
if (resOfCompare == 0) {
|
||||
result.addAll(currentThreadResult);
|
||||
} //else {
|
||||
if (resOfCompare == 1) {
|
||||
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
//result = result;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (variance == -1) {
|
||||
writeLog("a_last:" + a_last + " a: " + a);
|
||||
writeLog("varsLast_a:" + varsLast_a + " vars_a: " + vars_a);
|
||||
writeLog("compResult:" + compResult + " compRes: " + compRes);
|
||||
int resOfCompare = typeUnifyTask.oup.compare(compResult, compRes);
|
||||
if (resOfCompare == 1) {
|
||||
writeLog("Geloescht result: " + result);
|
||||
synchronized (result) {
|
||||
result.clear();
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
} else {
|
||||
if (resOfCompare == 0) {
|
||||
result.addAll(currentThreadResult);
|
||||
} else {
|
||||
if (resOfCompare == -1) {
|
||||
writeLog("Geloescht currentThreadResult: " + currentThreadResult);
|
||||
//result = result;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (variance == 0) {
|
||||
writeLog("RES var=1 ADD:" + result.toString() + " " + currentThreadResult.toString());
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (NullPointerException e) {
|
||||
writeLog("NullPointerException: " + a_last.toString());
|
||||
}
|
||||
} else {
|
||||
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||
writeLog("RES Fst: result: " + result.toString() + " currentThreadResult: " + currentThreadResult.toString());
|
||||
result.addAll(currentThreadResult);
|
||||
}
|
||||
}
|
||||
//else {
|
||||
//wenn Korrekte Ergebnisse da und Feherfälle dazukommen Fehlerfälle ignorieren
|
||||
// if (isUndefinedPairSetSet(currentThreadResult) && !isUndefinedPairSetSet(result)) {
|
||||
// result = result;
|
||||
// }
|
||||
//}
|
||||
}
|
||||
|
||||
if (parallel) {
|
||||
for (Set<Set<UnifyPair>> par_res : forkResults) {
|
||||
if (!typeUnifyTask.isUndefinedPairSetSet(par_res) && typeUnifyTask.isUndefinedPairSetSet(result)) {
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
synchronized (result) {
|
||||
result.clear();
|
||||
result.addAll(par_res);
|
||||
}
|
||||
if (!par_res.isEmpty() && par_res.iterator().next() instanceof WildcardType) {
|
||||
// System.out.println();
|
||||
}
|
||||
} else {
|
||||
if ((typeUnifyTask.isUndefinedPairSetSet(par_res) && typeUnifyTask.isUndefinedPairSetSet(result))
|
||||
|| (!typeUnifyTask.isUndefinedPairSetSet(par_res) && !typeUnifyTask.isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||
writeLog("RES var1 ADD:" + result.toString() + " " + par_res.toString());
|
||||
result.addAll(par_res);
|
||||
}
|
||||
}
|
||||
}
|
||||
//break;
|
||||
}
|
||||
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
|
||||
if (!result.isEmpty() && (!typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) || !aParDef.isEmpty())) {
|
||||
if (nextSetAsList.iterator().hasNext()
|
||||
&& nextSetAsList.getFirst().stream().anyMatch(x -> x.getLhsType().getName().equals("B"))
|
||||
&& nextSetAsList.size() > 1) {
|
||||
// System.out.print("");
|
||||
}
|
||||
|
||||
// Iterator<Set<UnifyPair>> nextSetasListIt = new ArrayList<>(nextSetAsList).iterator();
|
||||
if (variance == 1) {
|
||||
// System.out.println("");
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
writeLog("aParDef: " + aParDef.toString());
|
||||
aParDef.add(a);
|
||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||
if (oderConstraint) {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
while (aParDefIt.hasNext()) {
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a_new, nextSetAsList);
|
||||
writeLog("smallerSetasList: " + smallerSetasList);
|
||||
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited() && !((Constraint<UnifyPair>) x).isImplemented())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
writeLog("notInherited: " + notInherited + "\n");
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
notInherited.forEach(x -> {
|
||||
notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList));
|
||||
});
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||
writeLog("notErased: " + notErased + "\n");
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
} else {
|
||||
while (aParDefIt.hasNext()) {
|
||||
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> erased = typeUnifyTask.oup.smallerEqThan(a_new, nextSetAsList);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
}
|
||||
} else if (variance == -1) {
|
||||
// System.out.println("");
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
writeLog("aParDef: " + aParDef.toString());
|
||||
aParDef.add(a);
|
||||
Iterator<Set<UnifyPair>> aParDefIt = aParDef.iterator();
|
||||
if (oderConstraint) {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
while (aParDefIt.hasNext()) {
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> greaterSetasList = typeUnifyTask.oup.greaterThan(a_new, nextSetAsList);
|
||||
|
||||
//a_new muss hingefuegt werden, wenn es nicht vererbt ist, dann wird es spaeter wieder geloescht
|
||||
if (!((Constraint<UnifyPair>) a_new).isInherited()) {
|
||||
greaterSetasList.add(a_new);
|
||||
}
|
||||
List<Set<UnifyPair>> notInherited = greaterSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
|
||||
//Wenn x nicht vererbt ist, beginnt beim naechstgroesseren Element die naechste Ueberladung
|
||||
notInherited.forEach(x -> {
|
||||
notErased.addAll(typeUnifyTask.oup.greaterEqThan(x, greaterSetasList));
|
||||
});
|
||||
|
||||
//das kleineste Element ist das Element von dem a_new geerbt hat
|
||||
//muss deshalb geloescht werden
|
||||
Iterator<Set<UnifyPair>> notErasedIt = notErased.iterator();
|
||||
if (notErasedIt.hasNext()) {
|
||||
Set<UnifyPair> min = typeUnifyTask.oup.min(notErasedIt);
|
||||
notErased.remove(min);
|
||||
notErased.remove(((Constraint<UnifyPair>) min).getExtendConstraint());
|
||||
}
|
||||
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(greaterSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
} else {
|
||||
while (aParDefIt.hasNext()) {
|
||||
//nextSetasListIt = nextSetasList.iterator(); Sollte eingefuegt werden PL 2020-04-28
|
||||
Set<UnifyPair> a_new = aParDefIt.next();
|
||||
List<Set<UnifyPair>> erased = typeUnifyTask.oup.greaterEqThan(a_new, nextSetAsList);
|
||||
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
}
|
||||
}
|
||||
} else if (variance == 0) {
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
if (!oderConstraint) {
|
||||
return null;
|
||||
} else {
|
||||
nextSetAsList.removeAll(nextSetasListOderConstraints);
|
||||
nextSetasListOderConstraints = new ArrayList<>();
|
||||
writeLog("Removed: " + nextSetasListOderConstraints);
|
||||
List<Set<UnifyPair>> smallerSetasList = typeUnifyTask.oup.smallerThan(a, nextSetAsList);
|
||||
List<Set<UnifyPair>> notInherited = smallerSetasList.stream()
|
||||
.filter(x -> !((Constraint<UnifyPair>) x).isInherited())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
List<Set<UnifyPair>> notErased = new ArrayList<>();
|
||||
notInherited.forEach(x -> notErased.addAll(typeUnifyTask.oup.smallerEqThan(x, smallerSetasList)));
|
||||
List<Set<UnifyPair>> erased = new ArrayList<>(smallerSetasList);
|
||||
erased.removeAll(notErased);
|
||||
nextSetAsList.removeAll(erased);
|
||||
|
||||
writeLog("Removed: " + erased);
|
||||
|
||||
writeLog("Not Removed: " + nextSetAsList);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
}
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
|
||||
|
||||
if (typeUnifyTask.isUndefinedPairSetSet(currentThreadResult) && aParDef.isEmpty()) {
|
||||
int nofstred = 0;
|
||||
Set<UnifyPair> abhSubst = TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getAllSubstitutions);
|
||||
abhSubst.addAll(
|
||||
TypeUnifyTaskHelper.collectFromThreadResult(currentThreadResult, UnifyPair::getThisAndAllBases)
|
||||
);
|
||||
|
||||
Set<UnifyPair> durchschnitt = abhSubst.stream()
|
||||
.filter(a::contains)
|
||||
//.filter(y -> abhSubst.contains(y))
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
|
||||
//Set<PlaceholderType> vars = durchschnitt.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
|
||||
int len = nextSetAsList.size();
|
||||
Set<UnifyPair> undefRes = currentThreadResult.stream().reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
}).get(); //flatten aller undef results
|
||||
|
||||
/*
|
||||
Set<Pair<Set<UnifyPair>, UnifyPair>> reducedUndefResSubstGroundedBasePair = undefRes.stream()
|
||||
.map(x -> {
|
||||
Set<UnifyPair> su = x.getAllSubstitutions(); //alle benutzten Substitutionen
|
||||
su.add(x.getGroundBasePair()); // urspruengliches Paar
|
||||
su.removeAll(durchschnitt); //alle aktuell genänderten Paare entfernen
|
||||
return new Pair<>(su, x.getGroundBasePair());
|
||||
})
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
*/
|
||||
|
||||
if (currentThreadResult.size() > 1) {
|
||||
// System.out.println();
|
||||
}
|
||||
writeLog("nextSetasList vor filter-Aufruf: " + nextSetAsList);
|
||||
if (!oderConstraint) {//PL 2023-02-08 eingefuegt: Bei oderconstraints sind Subststitutionen nicht als Substitutionen in idesem Sinne zu sehen
|
||||
nextSetAsList = nextSetAsList.stream().filter(x -> {
|
||||
//Boolean ret = false;
|
||||
//for (PlaceholderType var : vars) {
|
||||
// ret = ret || x.stream().map(b -> b.getLhsType().equals(var)).reduce((c,d) -> c || d).get();
|
||||
//}
|
||||
return (!x.containsAll(durchschnitt));
|
||||
})//.filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //fuer testzwecke auskommentiert um nofstred zu bestimmen PL 2018-10-10
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
}
|
||||
writeLog("nextSetasList nach filter-Aufruf: " + nextSetAsList);
|
||||
nofstred = nextSetAsList.size();
|
||||
//NOCH NICHT korrekt PL 2018-10-12
|
||||
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
|
||||
// .collect(Collectors.toCollection(ArrayList::new));
|
||||
writeLog("currentThreadResult (undef): " + currentThreadResult.toString());
|
||||
writeLog("abhSubst: " + abhSubst.toString());
|
||||
writeLog("a2: " + rekTiefe + " " + a.toString());
|
||||
writeLog("Durchschnitt: " + durchschnitt.toString());
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
writeLog("nextSetasList: " + nextSetAsList.toString());
|
||||
writeLog("Number first erased Elements (undef): " + (len - nofstred));
|
||||
writeLog("Number second erased Elements (undef): " + (nofstred - nextSetAsList.size()));
|
||||
writeLog("Number erased Elements (undef): " + (len - nextSetAsList.size()));
|
||||
typeUnifyTask.noAllErasedElements += (len - nextSetAsList.size());
|
||||
writeLog("Number of all erased Elements (undef): " + typeUnifyTask.noAllErasedElements.toString());
|
||||
typeUnifyTask.noBacktracking++;
|
||||
writeLog("Number of Backtracking: " + typeUnifyTask.noBacktracking);
|
||||
// System.out.println("");
|
||||
}
|
||||
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
|
||||
// return result;
|
||||
//}
|
||||
//else {
|
||||
// result.removeIf(y -> isUndefinedPairSet(y));
|
||||
//}
|
||||
//else result.stream().filter(y -> !isUndefinedPairSet(y));
|
||||
writeLog("currentThreadResult: " + currentThreadResult.toString());
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
protected void writeLog(String s) {
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1,64 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.RecursiveTask;
|
||||
|
||||
public class ConcurrentSetMergeTask<T> extends RecursiveTask<Set<T>> {
|
||||
|
||||
public static <E> Set<E> merge(List<Set<E>> list) {
|
||||
if (list.isEmpty()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
var task = new ConcurrentSetMergeTask<>(list, 0, list.size());
|
||||
return task.compute();
|
||||
}
|
||||
|
||||
private static final int LIST_THRESHOLD = 3;
|
||||
private static final int ELEMENT_THRESHOLD = 1000;
|
||||
|
||||
private final List<Set<T>> list;
|
||||
private final int start;
|
||||
private final int end;
|
||||
|
||||
private ConcurrentSetMergeTask(List<Set<T>> list, int start, int end) {
|
||||
this.list = list;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<T> compute() {
|
||||
int size = end - start;
|
||||
|
||||
int totalElements = 0;
|
||||
for (int i = start+1; i < end; i++) {
|
||||
totalElements += list.get(i).size();
|
||||
}
|
||||
|
||||
System.out.println("ConcurrentSetMerge? -> " + (size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD ? "true" : "false"));
|
||||
|
||||
|
||||
// size will always be at least one
|
||||
if (true || size <= LIST_THRESHOLD || totalElements < ELEMENT_THRESHOLD) {
|
||||
Set<T> result = this.list.get(start);
|
||||
for (int i = start+1; i < end; i++) {
|
||||
result.addAll(list.get(i));
|
||||
}
|
||||
return result;
|
||||
} else {
|
||||
int mid = start + (size / 2);
|
||||
ConcurrentSetMergeTask<T> leftTask = new ConcurrentSetMergeTask<>(list, start, mid);
|
||||
ConcurrentSetMergeTask<T> rightTask = new ConcurrentSetMergeTask<>(list, mid, end);
|
||||
|
||||
leftTask.fork();
|
||||
Set<T> rightResult = rightTask.compute();
|
||||
Set<T> leftResult = leftTask.join();
|
||||
|
||||
// Merge results
|
||||
leftResult.addAll(rightResult);
|
||||
return leftResult;
|
||||
}
|
||||
}
|
||||
}
|
@@ -864,7 +864,7 @@ public class RuleSet implements IRuleSet{
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNred: " + result + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("logFile-Error");
|
||||
@@ -960,7 +960,7 @@ public class RuleSet implements IRuleSet{
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNgreater: " + result + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("lofFile-Error");
|
||||
@@ -1010,7 +1010,7 @@ public class RuleSet implements IRuleSet{
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNsmaller: " + result + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("lofFile-Error");
|
||||
|
@@ -1,41 +1,28 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class TypeUnify {
|
||||
|
||||
|
||||
/**
|
||||
* unify parallel ohne result modell
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
ForkJoinPool pool = this.createThreadPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
@@ -45,41 +32,25 @@ public class TypeUnify {
|
||||
|
||||
/**
|
||||
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
ForkJoinPool pool = this.createThreadPool();
|
||||
pool.invoke(unifyTask);
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
ForkJoinPool pool = this.createThreadPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
@@ -97,20 +68,13 @@ public class TypeUnify {
|
||||
|
||||
/**
|
||||
* unify sequentiell mit oderconstraints
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
@@ -118,4 +82,13 @@ public class TypeUnify {
|
||||
return res;
|
||||
}
|
||||
|
||||
private ForkJoinPool createThreadPool() {
|
||||
return new ForkJoinPool(
|
||||
Runtime.getRuntime().availableProcessors(),
|
||||
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
|
||||
null,
|
||||
true // do not use asyncMode (FIFO), as we want smaller tasks to complete first -> Improves locality and cuts small branches first
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -59,7 +59,7 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
||||
logFile.close();
|
||||
}
|
||||
catch (IOException ioE) {
|
||||
System.err.println("no log-File" + thNo);
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,188 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A collection of capsuled (and thus static) functions to split up large algorithms in TypeUnifyTask
|
||||
*/
|
||||
public class TypeUnifyTaskHelper {
|
||||
|
||||
/**
|
||||
* Filter all topLevelSets for those with a single element that contain only one pair:
|
||||
* a <. theta,
|
||||
* theta <. a or
|
||||
* a =. theta
|
||||
*/
|
||||
public static Set<Set<UnifyPair>> getSingleElementSets(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets) {
|
||||
return topLevelSets.stream()
|
||||
.filter(x -> x.size() == 1)
|
||||
.map(y -> y.stream().findFirst().get()).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Varianzbestimmung Anfang
|
||||
* Oderconstraint, wenn entweder kein Basepair oder unterschiedliche Basepairs => oderConstraint = true;
|
||||
* Varianz = 1 => Argumentvariable
|
||||
* Varianz = -1 => Rückgabevariable
|
||||
* Varianz = 0 => unklar
|
||||
* Varianz = 2 => Operatoren oderConstraints
|
||||
*/
|
||||
public static int calculateVariance(List<Set<UnifyPair>> nextSetasList) {
|
||||
Optional<Integer> xi = nextSetasList.stream().map(x -> x.stream().filter(y -> (y.getLhsType() instanceof PlaceholderType && !(y.getRhsType() instanceof PlaceholderType)))
|
||||
.filter(z -> ((PlaceholderType) z.getLhsType()).getVariance() != 0)
|
||||
.map(c -> ((PlaceholderType) c.getLhsType()).getVariance())
|
||||
.reduce((a, b) -> {
|
||||
if (a == b) return a;
|
||||
else return 0;
|
||||
})) //2 kommt insbesondere bei Oder-Constraints vor
|
||||
.filter(d -> d.isPresent())
|
||||
.map(e -> e.get())
|
||||
.findAny();
|
||||
|
||||
return xi.orElse(0);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public static int calculateOderConstraintVariance(List<Set<UnifyPair>> nextSetAsList) {
|
||||
Optional<Integer> optVariance =
|
||||
nextSetAsList
|
||||
.getFirst()
|
||||
.stream()
|
||||
.filter(x -> x.getGroundBasePair().getLhsType() instanceof PlaceholderType &&
|
||||
!(x.getRhsType() instanceof PlaceholderType) &&
|
||||
x.getPairOp() == PairOperator.EQUALSDOT)
|
||||
.map(x ->
|
||||
((PlaceholderType) x.getGroundBasePair().getLhsType()).getVariance())
|
||||
.reduce((n, m) -> (n != 0) ? n : m);
|
||||
|
||||
//Fuer Operatorenaufrufe wird variance auf 2 gesetzt.
|
||||
//da kein Receiver existiert also kein x.getGroundBasePair().getLhsType() instanceof PlaceholderType
|
||||
//Bei Varianz = 2 werden alle Elemente des Kartesischen Produkts abgearbeitet
|
||||
return optVariance.orElse(2);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Find the first occurrence (if any) of a UnifyPair with operator EQUALSDOT while having
|
||||
* one side equal to its base pair counterpart
|
||||
*/
|
||||
public static Optional<UnifyPair> findEqualityConstrainedUnifyPair(Set<UnifyPair> nextSetElement) {
|
||||
return nextSetElement.stream().filter(x ->
|
||||
x.getPairOp()
|
||||
.equals(PairOperator.EQUALSDOT))
|
||||
.filter(x -> //Sicherstellen, dass bei a = ty a auch wirklich die gesuchte Typvariable ist
|
||||
x.getLhsType()
|
||||
.equals(x.getBasePair().getLhsType()) ||
|
||||
x.getLhsType()
|
||||
.equals(x.getBasePair().getRhsType())
|
||||
).findFirst();
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all unifyPairs, that associate the identified type variable of origPair with any concrete type. That means:
|
||||
* If "a = type" is in origPair, then we get all UnifyPairs that contain either "a < typeA" or "typeB < a"
|
||||
*/
|
||||
public static Set<UnifyPair> findConstraintsWithSameTVAssociation(UnifyPair origPair, Set<Set<UnifyPair>> singleElementSets) {
|
||||
UnifyType tyVar = origPair.getLhsType();
|
||||
if (!(tyVar instanceof PlaceholderType)) {
|
||||
tyVar = origPair.getRhsType();
|
||||
}
|
||||
|
||||
UnifyType tyVarEF = tyVar;
|
||||
return singleElementSets.stream()
|
||||
.map(xx ->
|
||||
xx.iterator().next())
|
||||
.filter(x ->
|
||||
(x.getLhsType().equals(tyVarEF) && !(x.getRhsType() instanceof PlaceholderType))
|
||||
||
|
||||
(x.getRhsType().equals(tyVarEF) && !(x.getLhsType() instanceof PlaceholderType))
|
||||
)
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public static boolean doesFirstNextSetHasSameBase(List<Set<UnifyPair>> nextSetAsList) {
|
||||
if (nextSetAsList.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
UnifyPair firstBasePair = null;
|
||||
|
||||
for (var unifyPair : nextSetAsList.getFirst().stream().toList()) {
|
||||
var basePair = unifyPair.getBasePair();
|
||||
|
||||
// if any base pair is null, there is NOT always the same base!
|
||||
if (basePair == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (firstBasePair == null) {
|
||||
firstBasePair = basePair;
|
||||
}
|
||||
else if (!basePair.equals(firstBasePair)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts data from every element in the nested lists of results. What data depends on the given
|
||||
* extractor function
|
||||
*/
|
||||
public static Set<UnifyPair> collectFromThreadResult (
|
||||
Set<Set<UnifyPair>> currentThreadResult,
|
||||
Function<UnifyPair, Set<UnifyPair>> extractor
|
||||
) {
|
||||
return currentThreadResult.stream()
|
||||
.map(b ->
|
||||
b.stream()
|
||||
.map(extractor)
|
||||
.reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
})
|
||||
.orElse(new HashSet<>()))
|
||||
.reduce((y, z) -> {
|
||||
y.addAll(z);
|
||||
return y;
|
||||
})
|
||||
.orElse(new HashSet<>());
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a list of PlaceholderTypes from a set of pairs, such that each resulting element:
|
||||
* - Is the LHS of a pair
|
||||
* - Is a PlaceholderType
|
||||
* - has a basePair Side that is a PlaceholderType with the same name
|
||||
*/
|
||||
public static List<PlaceholderType> extractMatchingPlaceholderTypes(Set<UnifyPair> pairs) {
|
||||
return pairs.stream()
|
||||
.filter(x -> {
|
||||
UnifyType lhs = x.getLhsType();
|
||||
UnifyType baseLhs = x.getBasePair().getLhsType();
|
||||
UnifyType baseRhs = x.getBasePair().getRhsType();
|
||||
return (lhs instanceof PlaceholderType) &&
|
||||
((baseLhs instanceof PlaceholderType && lhs.getName().equals(baseLhs.getName())) ||
|
||||
(baseRhs instanceof PlaceholderType && lhs.getName().equals(baseRhs.getName())));
|
||||
})
|
||||
.map(x -> (PlaceholderType) x.getLhsType())
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
}
|
||||
|
||||
}
|
@@ -207,7 +207,7 @@ implements IFiniteClosure {
|
||||
result.add(new Pair<>(t, fBounded));
|
||||
}
|
||||
catch (StackOverflowError e) {
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
|
||||
// if C<...> <* C<...> then ... (third case in definition of <*)
|
||||
@@ -700,8 +700,8 @@ implements IFiniteClosure {
|
||||
|
||||
public int compare (UnifyType left, UnifyType right, PairOperator pairop) {
|
||||
try {logFile.write("left: "+ left + " right: " + right + " pairop: " + pairop +"\n");} catch (IOException ie) {}
|
||||
if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
|
||||
System.out.println("");
|
||||
// if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
|
||||
// System.out.println("");
|
||||
/*
|
||||
pairop = PairOperator.SMALLERDOTWC;
|
||||
List<UnifyType> al = new ArrayList<>();
|
||||
@@ -760,7 +760,7 @@ implements IFiniteClosure {
|
||||
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
|
||||
{try {
|
||||
logFile.write("\nsmallerRes: " + smallerRes);//"smallerHash: " + greaterHash.toString());
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no LogFile");}}
|
||||
@@ -774,7 +774,7 @@ implements IFiniteClosure {
|
||||
long smallerLen = smallerRes.stream().filter(delFun).count();
|
||||
try {
|
||||
logFile.write("\nsmallerLen: " + smallerLen +"\n");
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no LogFile");}
|
||||
@@ -789,7 +789,7 @@ implements IFiniteClosure {
|
||||
//if (left.getName().equals("Vector") || right.getName().equals("AbstractList"))
|
||||
{try {
|
||||
logFile.write("\ngreaterRes: " + greaterRes);//"smallerHash: " + greaterHash.toString());
|
||||
logFile.flush();
|
||||
// logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no LogFile");}}
|
||||
|
@@ -15,14 +15,12 @@ public abstract class OrderingExtend<T> extends com.google.common.collect.Orderi
|
||||
|
||||
T max = max(iterable);
|
||||
ret.add(max);
|
||||
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T elem = it.next();
|
||||
if (!(compare(max, elem) == 1) && !max.equals(elem)) {
|
||||
believe.add(elem);
|
||||
}
|
||||
}
|
||||
|
||||
for (T elem : iterable) {
|
||||
if (!(compare(max, elem) == 1) && !max.equals(elem)) {
|
||||
believe.add(elem);
|
||||
}
|
||||
}
|
||||
iterable = believe;
|
||||
}
|
||||
return ret;
|
||||
@@ -35,14 +33,12 @@ public abstract class OrderingExtend<T> extends com.google.common.collect.Orderi
|
||||
|
||||
T min = min(iterable);
|
||||
ret.add(min);
|
||||
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T elem = it.next();
|
||||
if (!(compare(min, elem) == -1) && !min.equals(elem)) {
|
||||
believe.add(elem);
|
||||
}
|
||||
}
|
||||
|
||||
for (T elem : iterable) {
|
||||
if (!(compare(min, elem) == -1) && !min.equals(elem)) {
|
||||
believe.add(elem);
|
||||
}
|
||||
}
|
||||
iterable = believe;
|
||||
}
|
||||
return ret;
|
||||
@@ -58,13 +54,11 @@ public abstract class OrderingExtend<T> extends com.google.common.collect.Orderi
|
||||
|
||||
public List<T> smallerThan(T elem, Iterable<T> iterable) {
|
||||
ArrayList<T> ret = new ArrayList<>();
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T itElem = it.next();
|
||||
if (!itElem.equals(elem) && compare(elem, itElem) == 1) {
|
||||
ret.add(itElem);
|
||||
}
|
||||
}
|
||||
for (T itElem : iterable) {
|
||||
if (!itElem.equals(elem) && compare(elem, itElem) == 1) {
|
||||
ret.add(itElem);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
@@ -72,18 +66,15 @@ public abstract class OrderingExtend<T> extends com.google.common.collect.Orderi
|
||||
List<T> ret = greaterThan(elem, iterable);
|
||||
ret.add(elem);
|
||||
return ret;
|
||||
|
||||
}
|
||||
|
||||
public List<T> greaterThan(T elem, Iterable<T> iterable) {
|
||||
ArrayList<T> ret = new ArrayList<>();
|
||||
Iterator<T> it = iterable.iterator();
|
||||
while (it.hasNext()) {
|
||||
T itElem = it.next();
|
||||
if (!itElem.equals(elem) && (compare(elem, itElem) == -1)) {
|
||||
ret.add(itElem);
|
||||
}
|
||||
}
|
||||
for (T itElem : iterable) {
|
||||
if (!itElem.equals(elem) && (compare(elem, itElem) == -1)) {
|
||||
ret.add(itElem);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
@@ -47,7 +47,7 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
catch (ClassCastException e) {
|
||||
try {
|
||||
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() + " " + left.getGroundBasePair() + "\n\n");
|
||||
((FiniteClosure)fc).logFile.flush();
|
||||
// ((FiniteClosure)fc).logFile.flush();
|
||||
}
|
||||
catch (IOException ie) {
|
||||
}
|
||||
@@ -79,12 +79,12 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
||||
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
||||
{
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
if (((right instanceof SuperType) && (((SuperType)right).getSuperedType().getName().equals("java.lang.Object")))
|
||||
||((left instanceof SuperType) && (((SuperType)left).getSuperedType().getName().equals("java.lang.Object"))))
|
||||
{
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -106,11 +106,11 @@ public class OrderingUnifyPair extends OrderingExtend<Set<UnifyPair>> {
|
||||
&& (((ExtendsType)right).getExtendedType().getName().equals("java.util.Vector"))
|
||||
&& (((ReferenceType)((ExtendsType)right).getExtendedType()).getTypeParams().iterator().next() instanceof ExtendsType)))
|
||||
{
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
if (right instanceof SuperType)
|
||||
{
|
||||
System.out.println("");
|
||||
// System.out.println("");
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
@@ -1,73 +1,60 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.model;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.UnifyTypeVisitor;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* An unbounded placeholder type.
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public final class PlaceholderType extends UnifyType{
|
||||
|
||||
|
||||
/**
|
||||
* Static list containing the names of all existing placeholders.
|
||||
* Used for generating fresh placeholders.
|
||||
*/
|
||||
public static final ArrayList<String> EXISTING_PLACEHOLDERS = new ArrayList<String>();
|
||||
|
||||
/**
|
||||
* Prefix of auto-generated placeholder names.
|
||||
*/
|
||||
protected static String nextName = "gen_";
|
||||
|
||||
/**
|
||||
* Random number generator used to generate fresh placeholder name.
|
||||
*/
|
||||
protected static Random rnd = new Random(43558747548978L);
|
||||
|
||||
|
||||
|
||||
private static final AtomicInteger placeholderCount = new AtomicInteger(0);
|
||||
|
||||
/**
|
||||
* True if this object was auto-generated, false if this object was user-generated.
|
||||
*/
|
||||
private final boolean IsGenerated;
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
|
||||
* isWildcardable gibt an, ob ein Wildcardtyp dem PlaceholderType zugeordnet werden darf
|
||||
*/
|
||||
private boolean wildcardable = true;
|
||||
|
||||
private boolean wildcardable = true;
|
||||
|
||||
/**
|
||||
* is innerType gibt an, ob der Type des PlaceholderType innerhalb eines Typkonstruktorsverwendet wird
|
||||
*/
|
||||
private boolean innerType = false;
|
||||
|
||||
private boolean innerType = false;
|
||||
|
||||
/**
|
||||
* variance shows the variance of the pair
|
||||
* 1: contravariant
|
||||
* -1 covariant
|
||||
* 0 invariant
|
||||
* PL 2018-03-21
|
||||
*/
|
||||
private int variance = 0;
|
||||
|
||||
/*
|
||||
* Fuer Oder-Constraints:
|
||||
* orCons = 1: Receiver
|
||||
* orCons = 0: Argument oder kein Oder-Constraint
|
||||
* orCons = -1: RetType
|
||||
*/
|
||||
private byte orCons = 0;
|
||||
|
||||
* variance shows the variance of the pair
|
||||
* 1: contravariant
|
||||
* -1 covariant
|
||||
* 0 invariant
|
||||
* PL 2018-03-21
|
||||
*/
|
||||
private int variance = 0;
|
||||
|
||||
/*
|
||||
* Fuer Oder-Constraints:
|
||||
* orCons = 1: Receiver
|
||||
* orCons = 0: Argument oder kein Oder-Constraint
|
||||
* orCons = -1: RetType
|
||||
*/
|
||||
private byte orCons = 0;
|
||||
|
||||
/**
|
||||
* Creates a new placeholder type with the specified name.
|
||||
*/
|
||||
@@ -76,17 +63,17 @@ public final class PlaceholderType extends UnifyType{
|
||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||
IsGenerated = false; // This type is user generated
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public PlaceholderType(String name, int variance) {
|
||||
super(name, new TypeParams());
|
||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||
IsGenerated = false; // This type is user generated
|
||||
this.variance = variance;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new placeholdertype
|
||||
* Creates a new placeholdertype
|
||||
* @param isGenerated true if this placeholder is auto-generated, false if it is user-generated.
|
||||
*/
|
||||
protected PlaceholderType(String name, boolean isGenerated) {
|
||||
@@ -94,26 +81,42 @@ public final class PlaceholderType extends UnifyType{
|
||||
EXISTING_PLACEHOLDERS.add(name); // Add to list of existing placeholder names
|
||||
IsGenerated = isGenerated;
|
||||
}
|
||||
|
||||
|
||||
public <T> UnifyType accept(UnifyTypeVisitor<T> visitor, T ht) {
|
||||
return visitor.visit(this, ht);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a fresh placeholder type with a name that does so far not exist.
|
||||
* Creates a fresh placeholder type with a name that does so far not exist from the chars A-Z.
|
||||
* A user could later instantiate a type using the same name that is equivalent to this type.
|
||||
* @return A fresh placeholder type.
|
||||
*/
|
||||
public synchronized static PlaceholderType freshPlaceholder() {
|
||||
String name = nextName + (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
|
||||
// Add random chars while the name is in use.
|
||||
while(EXISTING_PLACEHOLDERS.contains(name)) {
|
||||
name += (char) (rnd.nextInt(22) + 97); // Returns random char between 'a' and 'z'
|
||||
public static PlaceholderType freshPlaceholder() {
|
||||
String name;
|
||||
|
||||
int attempts = 1000;
|
||||
while (attempts-- > 0) {
|
||||
int pc = PlaceholderType.placeholderCount.incrementAndGet();
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
while (pc >= 0) {
|
||||
sb.append((char)(pc % 26 + 97));
|
||||
pc = pc / 26 - 1;
|
||||
}
|
||||
name = sb.toString();
|
||||
|
||||
|
||||
synchronized (EXISTING_PLACEHOLDERS) {
|
||||
if (!EXISTING_PLACEHOLDERS.contains(name)) {
|
||||
return new PlaceholderType(name, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
return new PlaceholderType(name, true);
|
||||
|
||||
throw new RuntimeException("Failed to generate placeholder name in the allowed number of attempts");
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* True if this placeholder is auto-generated, false if it is user-generated.
|
||||
*/
|
||||
@@ -124,51 +127,51 @@ public final class PlaceholderType extends UnifyType{
|
||||
public void setVariance(int v) {
|
||||
variance = v;
|
||||
}
|
||||
|
||||
|
||||
public int getVariance() {
|
||||
return variance;
|
||||
}
|
||||
|
||||
|
||||
public void reversVariance() {
|
||||
if (variance == 1) {
|
||||
setVariance(-1);
|
||||
} else {
|
||||
if (variance == -1) {
|
||||
setVariance(1);
|
||||
}}
|
||||
if (variance == -1) {
|
||||
setVariance(1);
|
||||
}}
|
||||
}
|
||||
|
||||
|
||||
public void setOrCons(byte i) {
|
||||
orCons = i;
|
||||
}
|
||||
|
||||
|
||||
public byte getOrCons() {
|
||||
return orCons;
|
||||
}
|
||||
|
||||
|
||||
public Boolean isWildcardable() {
|
||||
return wildcardable;
|
||||
}
|
||||
public void disableWildcardtable() {
|
||||
wildcardable = false;
|
||||
}
|
||||
|
||||
|
||||
public void enableWildcardtable() {
|
||||
wildcardable = true;
|
||||
}
|
||||
|
||||
|
||||
public void setWildcardtable(Boolean wildcardable) {
|
||||
this.wildcardable = wildcardable;
|
||||
}
|
||||
|
||||
|
||||
public Boolean isInnerType() {
|
||||
return innerType;
|
||||
}
|
||||
|
||||
|
||||
public void setInnerType(Boolean innerType) {
|
||||
this.innerType = innerType;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
Set<UnifyType> smArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.smArg(this, fBounded);
|
||||
@@ -178,17 +181,17 @@ public final class PlaceholderType extends UnifyType{
|
||||
Set<UnifyType> grArg(IFiniteClosure fc, Set<UnifyType> fBounded) {
|
||||
return fc.grArg(this, fBounded);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public UnifyType setTypeParams(TypeParams newTp) {
|
||||
return this; // Placeholders never have params.
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return typeName.hashCode();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
UnifyType apply(Unifier unif) {
|
||||
if(unif.hasSubstitute(this)) {
|
||||
@@ -200,15 +203,15 @@ public final class PlaceholderType extends UnifyType{
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(!(obj instanceof PlaceholderType))
|
||||
return false;
|
||||
|
||||
|
||||
return ((PlaceholderType) obj).getName().equals(typeName);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public Collection<PlaceholderType> getInvolvedPlaceholderTypes() {
|
||||
|
9
src/main/java/de/dhbwstuttgart/util/Logger.java
Normal file
9
src/main/java/de/dhbwstuttgart/util/Logger.java
Normal file
@@ -0,0 +1,9 @@
|
||||
package de.dhbwstuttgart.util;
|
||||
|
||||
public class Logger {
|
||||
|
||||
public static void print(String s) {
|
||||
System.out.println(s);
|
||||
}
|
||||
|
||||
}
|
50
src/main/java/de/dhbwstuttgart/util/Triple.java
Normal file
50
src/main/java/de/dhbwstuttgart/util/Triple.java
Normal file
@@ -0,0 +1,50 @@
|
||||
package de.dhbwstuttgart.util;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class Triple<T1, T2, T3> {
|
||||
private final T1 value1;
|
||||
private final T2 value2;
|
||||
private final T3 value3;
|
||||
|
||||
|
||||
public Triple(T1 value1, T2 value2, T3 value3) {
|
||||
this.value1 = value1;
|
||||
this.value2 = value2;
|
||||
this.value3 = value3;
|
||||
}
|
||||
|
||||
public T1 getValue1() {
|
||||
return value1;
|
||||
}
|
||||
|
||||
public T2 getValue2() {
|
||||
return value2;
|
||||
}
|
||||
|
||||
public T3 getValue3() {
|
||||
return value3;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "(" + value1.toString() + "," + value2.toString() + "," + value3.toString() + ")\n";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
|
||||
Triple<?, ?, ?> oTriple = (Triple<?, ?, ?>) o;
|
||||
return Objects.equals(value1, oTriple.value1)
|
||||
&& Objects.equals(value2, oTriple.value2)
|
||||
&& Objects.equals(value3, oTriple.value3);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(value1, value2, value3);
|
||||
}
|
||||
}
|
@@ -303,7 +303,7 @@ public class TestComplete {
|
||||
var result = mul.invoke(instanceOfClass_s1, instanceOfClass_s2);
|
||||
System.out.println(instanceOfClass_s1.toString() + " * " + instanceOfClass_s2.toString() + " = " + result.toString());
|
||||
|
||||
assertEquals(result, 8);
|
||||
assertEquals(8, result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -444,8 +444,8 @@ public class TestComplete {
|
||||
var O = Arrays.stream(typeParaTypes).filter(t -> t.equals(paraTypes[0])).findFirst().get();
|
||||
var N2 = Arrays.stream(typeParaTypes).filter(t -> t.equals(paraTypes[1])).findFirst().get();
|
||||
assertEquals(N, N2);
|
||||
assertEquals(N.getBounds()[0], Object.class);
|
||||
assertEquals(O.getBounds()[0], Object.class);
|
||||
assertEquals(Object.class, N.getBounds()[0]);
|
||||
assertEquals(Object.class, O.getBounds()[0]);
|
||||
|
||||
var m2 = tph4.getDeclaredMethod("m2", Object.class);
|
||||
|
||||
@@ -500,8 +500,8 @@ public class TestComplete {
|
||||
var P = Arrays.stream(typeParaTypes).filter(t -> t.equals(paraTypes[0])).findFirst().get();
|
||||
var O = Arrays.stream(typeParaTypes).filter(t -> t.equals(paraTypes[1])).findFirst().get();
|
||||
|
||||
assertEquals(P.getBounds()[0], Object.class);
|
||||
assertEquals(O.getBounds()[0], Object.class);
|
||||
assertEquals(Object.class, P.getBounds()[0]);
|
||||
assertEquals(Object.class, O.getBounds()[0]);
|
||||
|
||||
var m2 = tph5.getDeclaredMethod("m2", Object.class);
|
||||
|
||||
@@ -605,7 +605,7 @@ public class TestComplete {
|
||||
var fac1 = classFiles.get("Fac1");
|
||||
var instance = fac1.getDeclaredConstructor().newInstance();
|
||||
var fac = fac1.getDeclaredMethod("fac", Integer.class);
|
||||
assertEquals(fac.invoke(instance, 10), 3628800);
|
||||
assertEquals(3628800, fac.invoke(instance, 10));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -676,16 +676,16 @@ public class TestComplete {
|
||||
var r2 = ctor.newInstance(10, 20f);
|
||||
var r3 = ctor.newInstance(10, r1);
|
||||
|
||||
assertEquals(swtch.invoke(instance, r1), 30);
|
||||
assertEquals(swtch.invoke(instance, r2), 20);
|
||||
assertEquals(swtch.invoke(instance, r3), 40);
|
||||
assertEquals(swtch.invoke(instance, 50), 50);
|
||||
assertEquals(30, swtch.invoke(instance, r1));
|
||||
assertEquals(20, swtch.invoke(instance, r2));
|
||||
assertEquals(40, swtch.invoke(instance, r3));
|
||||
assertEquals(50, swtch.invoke(instance, 50));
|
||||
try {
|
||||
assertEquals(swtch.invoke(instance, "Some string"), 0);
|
||||
assertEquals(0, swtch.invoke(instance, "Some string"));
|
||||
fail("No assertion thrown!");
|
||||
} catch (InvocationTargetException exception) {
|
||||
assertTrue(exception.getCause() instanceof IllegalArgumentException);
|
||||
assertEquals(exception.getCause().getMessage(), "Unhandled case value");
|
||||
assertEquals("Unhandled case value", exception.getCause().getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -702,7 +702,7 @@ public class TestComplete {
|
||||
var r1 = ctor.newInstance("asd", "asb");
|
||||
|
||||
|
||||
assertEquals(swtch.invoke(instance, r1), "asd");
|
||||
assertEquals("asd", swtch.invoke(instance, r1));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -753,8 +753,8 @@ public class TestComplete {
|
||||
var r2 = ctor.newInstance(1, clazzNumber.getConstructor().newInstance());
|
||||
|
||||
|
||||
assertEquals(swtch.invoke(instance, r1), "Second Element is a Text");
|
||||
assertEquals(swtch.invoke(instance, r2), "Second Element is a Number");
|
||||
assertEquals("Second Element is a Text", swtch.invoke(instance, r1));
|
||||
assertEquals("Second Element is a Number", swtch.invoke(instance, r2));
|
||||
}
|
||||
@Test
|
||||
public void testSwitchListRecord() throws Exception {
|
||||
@@ -776,10 +776,10 @@ public class TestComplete {
|
||||
|
||||
|
||||
var listWithMoreThanOneElement = constructorLinkedElem.newInstance(5, constructorLinkedElem.newInstance(1, constructorElem.newInstance(5)));
|
||||
assertEquals(swtch.invoke(instance, listWithMoreThanOneElement), 11);
|
||||
assertEquals(11, swtch.invoke(instance, listWithMoreThanOneElement));
|
||||
|
||||
var listWithOneElement = constructorLinkedElem.newInstance(5, constructorElem.newInstance(5));
|
||||
assertEquals(swtch.invoke(instance, listWithOneElement), 10);
|
||||
assertEquals(10, swtch.invoke(instance, listWithOneElement));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -795,9 +795,9 @@ public class TestComplete {
|
||||
var r2 = ctor.newInstance(20);
|
||||
var r3 = ctor.newInstance(30);
|
||||
|
||||
assertEquals(m.invoke(instance, r1), 1);
|
||||
assertEquals(m.invoke(instance, r2), 2);
|
||||
assertEquals(m.invoke(instance, r3), 3);
|
||||
assertEquals(1, m.invoke(instance, r1));
|
||||
assertEquals(2, m.invoke(instance, r2));
|
||||
assertEquals(3, m.invoke(instance, r3));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -820,10 +820,10 @@ public class TestComplete {
|
||||
|
||||
|
||||
var listWithMoreThanOneElement = constructorLinkedElem.newInstance(5, constructorLinkedElem.newInstance(1, constructorElem.newInstance(5)));
|
||||
assertEquals(swtch.invoke(instance, listWithMoreThanOneElement), 5);
|
||||
assertEquals(5, swtch.invoke(instance, listWithMoreThanOneElement));
|
||||
|
||||
var listWithOneElement = constructorLinkedElem.newInstance(5, constructorElem.newInstance(5));
|
||||
assertEquals(swtch.invoke(instance, listWithOneElement), 5);
|
||||
assertEquals(5, swtch.invoke(instance, listWithOneElement));
|
||||
}
|
||||
|
||||
|
||||
@@ -835,6 +835,7 @@ public class TestComplete {
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
}
|
||||
|
||||
@Ignore("Not implemented")
|
||||
@Test
|
||||
public void testStringSwitch() throws Exception {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "SwitchString.jav");
|
||||
@@ -842,11 +843,11 @@ public class TestComplete {
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var main = clazz.getDeclaredMethod("main", String.class);
|
||||
|
||||
assertEquals(main.invoke(instance, "AaAaAa"), 1);
|
||||
assertEquals(main.invoke(instance, "AaAaBB"), 2);
|
||||
assertEquals(main.invoke(instance, "test"), 3);
|
||||
assertEquals(main.invoke(instance, "TEST"), 3);
|
||||
assertEquals(main.invoke(instance, "awawa"), 4);
|
||||
assertEquals(1, main.invoke(instance, "AaAaAa"));
|
||||
assertEquals(2, main.invoke(instance, "AaAaBB"));
|
||||
assertEquals(3, main.invoke(instance, "test"));
|
||||
assertEquals(3, main.invoke(instance, "TEST"));
|
||||
assertEquals(4, main.invoke(instance, "awawa"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -856,6 +857,7 @@ public class TestComplete {
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
}
|
||||
|
||||
@Ignore("Not implemented")
|
||||
@Test
|
||||
public void testOverloadPattern() throws Exception {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "OverloadPattern.jav");
|
||||
@@ -870,17 +872,18 @@ public class TestComplete {
|
||||
var ptInt = rec.getDeclaredConstructor(Number.class, Number.class).newInstance(1, 2);
|
||||
var ptFlt = rec.getDeclaredConstructor(Number.class, Number.class).newInstance(1f, 2f);
|
||||
|
||||
assertEquals(m1.invoke(instance, ptInt, ptFlt), 1);
|
||||
assertEquals(m1.invoke(instance, ptInt, ptInt), 2);
|
||||
assertEquals(m1.invoke(instance, ptFlt, ptInt), 3);
|
||||
assertEquals(m1.invoke(instance, ptFlt, ptFlt), 4);
|
||||
assertEquals(1, m1.invoke(instance, ptInt, ptFlt));
|
||||
assertEquals(2, m1.invoke(instance, ptInt, ptInt));
|
||||
assertEquals(3, m1.invoke(instance, ptFlt, ptInt));
|
||||
assertEquals(4, m1.invoke(instance, ptFlt, ptFlt));
|
||||
|
||||
assertEquals(m2.invoke(instance, ptInt), 5);
|
||||
assertEquals(m2.invoke(instance, ptFlt), 6);
|
||||
assertEquals(5, m2.invoke(instance, ptInt));
|
||||
assertEquals(6, m2.invoke(instance, ptFlt));
|
||||
|
||||
assertEquals(m3.invoke(instance, 10), 10);
|
||||
assertEquals(10, m3.invoke(instance, 10));
|
||||
}
|
||||
|
||||
@Ignore("Not implemented")
|
||||
@Test
|
||||
public void testOverloadNestedPattern() throws Exception {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "OverloadNestedPattern.jav");
|
||||
@@ -893,16 +896,16 @@ public class TestComplete {
|
||||
var r2 = ctor.newInstance(r1);
|
||||
|
||||
var m = clazz.getDeclaredMethod("m", rec, rec);
|
||||
assertEquals(m.invoke(instance, r2, r1), 1);
|
||||
assertEquals(1, m.invoke(instance, r2, r1));
|
||||
|
||||
var r3 = ctor.newInstance(2f);
|
||||
var r4 = ctor.newInstance(r3);
|
||||
assertEquals(m.invoke(instance, r4, r3), 2);
|
||||
assertEquals(2, m.invoke(instance, r4, r3));
|
||||
|
||||
assertEquals(m.invoke(instance, r1, r1), 3);
|
||||
assertEquals(3, m.invoke(instance, r1, r1));
|
||||
}
|
||||
|
||||
//@Ignore("Not implemented")
|
||||
@Ignore("Not implemented")
|
||||
@Test
|
||||
public void testPatternMatchingHaskellStyle() throws Exception {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "PatternMatchingHaskellStyle.jav");
|
||||
@@ -920,11 +923,12 @@ public class TestComplete {
|
||||
var x = rctor.newInstance(rctor.newInstance(0, rctor.newInstance(0, r2ctor.newInstance(0))), r2ctor.newInstance(0));
|
||||
var y = rctor.newInstance(r2ctor.newInstance(0), r2ctor.newInstance(0));
|
||||
|
||||
assertEquals(m.invoke(instance, x, y), null);
|
||||
assertEquals(m.invoke(instance, y, y), null);
|
||||
assertNull(m.invoke(instance, x, y));
|
||||
assertNull(m.invoke(instance, y, y));
|
||||
|
||||
}
|
||||
|
||||
@Ignore("Not implemented")
|
||||
@Test
|
||||
public void testPatternMatchingListAppend() throws Exception {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "PatternMatchingListAppend.jav");
|
||||
@@ -941,7 +945,7 @@ public class TestComplete {
|
||||
var list1 = ConsCtor.newInstance(1, ConsCtor.newInstance(2, ConsCtor.newInstance(3, EmptyCtor.newInstance())));
|
||||
var list2 = ConsCtor.newInstance(4, ConsCtor.newInstance(5, ConsCtor.newInstance(6, EmptyCtor.newInstance())));
|
||||
|
||||
var append = clazz.getDeclaredMethod("append", List, List);
|
||||
var append = clazz.getDeclaredMethod("append", Cons, Cons);
|
||||
System.out.println(append.invoke(instance, list1, list2));
|
||||
}
|
||||
|
||||
@@ -959,8 +963,8 @@ public class TestComplete {
|
||||
var x = rctor.newInstance(10);
|
||||
var d = rctor.newInstance(20.0);
|
||||
|
||||
assertEquals(m.invoke(instance, x, 0), 50);
|
||||
assertEquals(m.invoke(instance, d, 0), 40.0);
|
||||
assertEquals(50, m.invoke(instance, x, 0));
|
||||
assertEquals(40.0, m.invoke(instance, d, 0));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -975,7 +979,7 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Static.jav");
|
||||
var clazz = classFiles.get("Static");
|
||||
var m = clazz.getDeclaredMethod("m");
|
||||
assertEquals(m.invoke(null), 50);
|
||||
assertEquals(50, m.invoke(null));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -984,7 +988,7 @@ public class TestComplete {
|
||||
var clazz = classFiles.get("For");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var m = clazz.getDeclaredMethod("m", Integer.class);
|
||||
assertEquals(m.invoke(instance, 10), 60);
|
||||
assertEquals(60, m.invoke(instance, 10));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -993,7 +997,7 @@ public class TestComplete {
|
||||
var clazz = classFiles.get("ForEach");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var m = clazz.getDeclaredMethod("m");
|
||||
assertEquals(m.invoke(instance), 6);
|
||||
assertEquals(6, m.invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1011,7 +1015,7 @@ public class TestComplete {
|
||||
var clazz = classFiles.get("FunctionalInterface");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var m = clazz.getDeclaredMethod("m");
|
||||
assertEquals(m.invoke(instance), 200);
|
||||
assertEquals(200, m.invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1020,7 +1024,7 @@ public class TestComplete {
|
||||
var clazz = classFiles.get("Chain");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var m = clazz.getDeclaredMethod("m");
|
||||
assertEquals(m.invoke(instance), 5);
|
||||
assertEquals(5, m.invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1073,9 +1077,9 @@ public class TestComplete {
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
|
||||
assertNull(clazz.getDeclaredMethod("m").invoke(instance));
|
||||
assertEquals(clazz.getDeclaredMethod("m2").invoke(instance), 'C');
|
||||
assertEquals(clazz.getDeclaredMethod("m3").invoke(instance), 10L);
|
||||
assertEquals(clazz.getDeclaredMethod("m4").invoke(instance), 10.5F);
|
||||
assertEquals('C', clazz.getDeclaredMethod("m2").invoke(instance));
|
||||
assertEquals(10L, clazz.getDeclaredMethod("m3").invoke(instance));
|
||||
assertEquals(10.5F, clazz.getDeclaredMethod("m4").invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1083,7 +1087,7 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "OLConstructor.jav");
|
||||
var clazz = classFiles.get("Child");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getSuperclass().getDeclaredField("x").get(instance), 3);
|
||||
assertEquals(3, clazz.getSuperclass().getDeclaredField("x").get(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1091,11 +1095,11 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Op1.jav");
|
||||
var clazz = classFiles.get("Op1");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getDeclaredMethod("not").invoke(instance), true);
|
||||
assertEquals(clazz.getDeclaredMethod("or").invoke(instance), 10 | 20);
|
||||
assertEquals(clazz.getDeclaredMethod("and").invoke(instance), 10 & 20);
|
||||
assertEquals(clazz.getDeclaredMethod("xor").invoke(instance), 10 ^ 20);
|
||||
assertEquals(clazz.getDeclaredMethod("mod").invoke(instance), 10 % 2);
|
||||
assertEquals(true, clazz.getDeclaredMethod("not").invoke(instance));
|
||||
assertEquals(10 | 20, clazz.getDeclaredMethod("or").invoke(instance));
|
||||
assertEquals(10 & 20, clazz.getDeclaredMethod("and").invoke(instance));
|
||||
assertEquals(10 ^ 20, clazz.getDeclaredMethod("xor").invoke(instance));
|
||||
assertEquals(10 % 2, clazz.getDeclaredMethod("mod").invoke(instance));
|
||||
}
|
||||
|
||||
@Ignore("Not implemented")
|
||||
@@ -1112,18 +1116,18 @@ public class TestComplete {
|
||||
var clazzPublic = classFiles.get("Access");
|
||||
var clazzDefault = classFiles.get("AccessDefault");
|
||||
|
||||
assertEquals(clazzPublic.getModifiers(), Modifier.PUBLIC);
|
||||
assertEquals(clazzDefault.getModifiers(), 0);
|
||||
assertEquals(Modifier.PUBLIC, clazzPublic.getModifiers());
|
||||
assertEquals(0, clazzDefault.getModifiers());
|
||||
|
||||
assertEquals(clazzPublic.getDeclaredMethod("mPublic").getModifiers(), Modifier.PUBLIC);
|
||||
assertEquals(clazzPublic.getDeclaredMethod("mProtected").getModifiers(), Modifier.PROTECTED);
|
||||
assertEquals(clazzPublic.getDeclaredMethod("mDefault").getModifiers(), 0);
|
||||
assertEquals(clazzPublic.getDeclaredMethod("mPrivate").getModifiers(), Modifier.PRIVATE);
|
||||
assertEquals(Modifier.PUBLIC, clazzPublic.getDeclaredMethod("mPublic").getModifiers());
|
||||
assertEquals(Modifier.PROTECTED, clazzPublic.getDeclaredMethod("mProtected").getModifiers());
|
||||
assertEquals(0, clazzPublic.getDeclaredMethod("mDefault").getModifiers());
|
||||
assertEquals(Modifier.PRIVATE, clazzPublic.getDeclaredMethod("mPrivate").getModifiers());
|
||||
|
||||
assertEquals(clazzPublic.getDeclaredField("fPublic").getModifiers(), Modifier.PUBLIC);
|
||||
assertEquals(clazzPublic.getDeclaredField("fProtected").getModifiers(), Modifier.PROTECTED);
|
||||
assertEquals(clazzPublic.getDeclaredField("fDefault").getModifiers(), 0);
|
||||
assertEquals(clazzPublic.getDeclaredField("fPrivate").getModifiers(), Modifier.PRIVATE);
|
||||
assertEquals(Modifier.PUBLIC, clazzPublic.getDeclaredField("fPublic").getModifiers());
|
||||
assertEquals(Modifier.PROTECTED, clazzPublic.getDeclaredField("fProtected").getModifiers());
|
||||
assertEquals(0, clazzPublic.getDeclaredField("fDefault").getModifiers());
|
||||
assertEquals(Modifier.PRIVATE, clazzPublic.getDeclaredField("fPrivate").getModifiers());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1161,7 +1165,7 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Ternary.jav");
|
||||
var clazz = classFiles.get("Ternary");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getDeclaredMethod("main", Integer.class).invoke(instance, 5), "small");
|
||||
assertEquals("small", clazz.getDeclaredMethod("main", Integer.class).invoke(instance, 5));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1169,8 +1173,8 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "While.jav");
|
||||
var clazz = classFiles.get("While");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getDeclaredMethod("m", Integer.class).invoke(instance, 5), 5);
|
||||
assertEquals(clazz.getDeclaredMethod("m2").invoke(instance), 10);
|
||||
assertEquals(5, clazz.getDeclaredMethod("m", Integer.class).invoke(instance, 5));
|
||||
assertEquals(10, clazz.getDeclaredMethod("m2").invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1186,7 +1190,7 @@ public class TestComplete {
|
||||
var clazz = classFiles.get("Assign");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
var m = clazz.getDeclaredMethod("m");
|
||||
assertEquals(m.invoke(instance), 20);
|
||||
assertEquals(20, m.invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1243,9 +1247,9 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Bug295.jav");
|
||||
var clazz = classFiles.get("Bug295");
|
||||
var instance = clazz.getDeclaredConstructor(Integer.class, Integer.class, Integer.class).newInstance(1, 2, 3);
|
||||
assertEquals(clazz.getDeclaredField("a").get(instance), 1);
|
||||
assertEquals(clazz.getDeclaredField("b").get(instance), 2);
|
||||
assertEquals(clazz.getDeclaredField("c").get(instance), 3);
|
||||
assertEquals(1, clazz.getDeclaredField("a").get(instance));
|
||||
assertEquals(2, clazz.getDeclaredField("b").get(instance));
|
||||
assertEquals(3, clazz.getDeclaredField("c").get(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1275,7 +1279,7 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Bug300.jav");
|
||||
var clazz = classFiles.get("Bug300");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getDeclaredMethod("m").invoke(instance), "Base");
|
||||
assertEquals("Base", clazz.getDeclaredMethod("m").invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1320,7 +1324,7 @@ public class TestComplete {
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Bug310.jav");
|
||||
var clazz = classFiles.get("Bug310");
|
||||
var instance = clazz.getDeclaredConstructor().newInstance();
|
||||
assertEquals(clazz.getDeclaredMethod("toString").invoke(instance), "3");
|
||||
assertEquals("3", clazz.getDeclaredMethod("toString").invoke(instance));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
Reference in New Issue
Block a user