2
0

Compare commits

..

5 Commits

Author SHA1 Message Date
Martin Plümicke
d33335a6d9 modified: ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
Bildung von newOderConstraints wieder rueckgaengig gemacht

	modified:   ../../resources/bytecode/javFiles/MatrixOP.jav
2019-02-14 09:38:26 +01:00
Martin Plümicke
25b1d3e62d modified: ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
ArrayList durch LinkedList ersetzt
computeCartesianRecursiveOderConstraints auskommentiert
2019-02-14 08:53:32 +01:00
Martin Plümicke
85a15cb256 modified: ../../../../main/java/de/dhbwstuttgart/typeinference/unify/PartialOrderSet.java
modified:   ../../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
PartialOrderSet als LinkedList realisiert

	modified:   ../../bytecode/javFiles/Matrix.jav
2019-02-13 18:34:12 +01:00
Martin Plümicke
0c03c2fbc8 Merge branch 'unify-test' of ssh://gohorb.ba-horb.de/bahome/projekt/git/JavaCompilerCore into unify-PartialOrderSet
Conflicts:
	src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java

	modified:   ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java
	modified:   ../../../main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
	modified:   ../../resources/bytecode/javFiles/Matrix.jav
	modified:   ../../resources/bytecode/javFiles/MatrixOP.jav
2019-02-11 14:41:28 +01:00
Martin Plümicke
c8daa665f6 new file: src/main/java/de/dhbwstuttgart/typeinference/unify/PartialOrderSet.java
modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java
	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
PartialOrderSet implemetiert und in TypeUnify, TypeUnify2Task, TypeUnifyTask eingepasst
2019-02-02 20:26:26 +01:00
48 changed files with 1022 additions and 2896 deletions

Binary file not shown.

14
pom.xml

@@ -55,14 +55,6 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<finalName>${project.artifactId}-${project.version}</finalName>
<testOutputDirectory>target/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>3.0.0-M3</version>
<configuration>
<skipTests>true</skipTests>
</configuration>
</plugin>
<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr4-maven-plugin</artifactId>
@@ -146,6 +138,10 @@ http://maven.apache.org/maven-v4_0_0.xsd">
</execution>
</executions>
</plugin>
<!-- plugin> <groupId>org.eclipse.tycho</groupId> <artifactId>tycho-p2-repository-plugin</artifactId>
<version>${tycho.version}</version> <executions> <execution> <phase>package</phase>
<goals> <goal>archive-repository</goal> </goals> </execution> </executions>
</plugin -->
</plugins>
</build>
<pluginRepositories>
@@ -173,3 +169,5 @@ http://maven.apache.org/maven-v4_0_0.xsd">
</repository>
</distributionManagement>
</project>

@@ -1,929 +0,0 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.RecursiveTask;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import com.google.common.collect.Ordering;
/**
* Implementation of the type unification algorithm
* @author Florian Steurer
*/
public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
private static final long serialVersionUID = 1L;
private static int i = 0;
private boolean printtag = false;
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
FileWriter logFile;
/**
* The implementation of setOps that will be used during the unification
*/
protected ISetOperations setOps = new GuavaSetOperations();
/**
* The implementation of the standard unify that will be used during the unification
*/
protected IUnify stdUnify = new MartelliMontanariUnify();
/**
* The implementation of the rules that will be used during the unification.
*/
protected IRuleSet rules;
protected Set<UnifyPair> eq;
protected IFiniteClosure fc;
protected Ordering<Set<UnifyPair>> oup;
protected boolean parallel;
public TypeUnifyTask() {
rules = new RuleSet();
}
public TypeUnifyTask(Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel, FileWriter logFile) {
this.eq = eq;
this.fc = fc;
this.oup = new OrderingUnifyPair(fc);
this.parallel = parallel;
this.logFile = logFile;
rules = new RuleSet(logFile);
}
@Override
protected Set<Set<UnifyPair>> compute() {
return unify(eq, fc, parallel);
}
/**
* Computes all principal type unifiers for a set of constraints.
* @param eq The set of constraints
* @param fc The finite closure
* @return The set of all principal type unifiers
*/
protected Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
writeLog("Unifikation: " + eq.toString());
eq = eq.stream().map(x -> {x.setVariance((byte)-1); return x;}).collect(Collectors.toCollection(HashSet::new));
Set<UnifyPair> eq0 = applyTypeUnificationRules(eq, fc);
/*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
*/
Set<UnifyPair> eq1s = new HashSet<>();
Set<UnifyPair> eq2s = new HashSet<>();
splitEq(eq0, eq1s, eq2s);
/*
* Step 4: Create possible typings
*
* "Manche Autoren identifizieren die Paare (a, (b,c)) und ((a,b),c)
* mit dem geordneten Tripel (a,b,c), wodurch das kartesische Produkt auch assoziativ wird." - Wikipedia
*/
// There are up to 10 toplevel set. 8 of 10 are the result of the
// cartesian product of the sets created by pattern matching.
List<Set<Set<UnifyPair>>> topLevelSets = new ArrayList<>();
//System.out.println(eq2s);
if(eq1s.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(eq1s);
topLevelSets.add(wrap); // Add Eq1'
}
// Add the set of [a =. Theta | (a=. Theta) in Eq2']
Set<UnifyPair> bufferSet = eq2s.stream()
.filter(x -> x.getPairOp() == PairOperator.EQUALSDOT && x.getLhsType() instanceof PlaceholderType)
.collect(Collectors.toSet());
if(bufferSet.size() != 0) { // Do not add empty sets or the cartesian product will always be empty.
Set<Set<UnifyPair>> wrap = new HashSet<>();
wrap.add(bufferSet);
topLevelSets.add(wrap);
eq2s.removeAll(bufferSet);
}
// Sets that originate from pair pattern matching
// Sets of the "second level"
Set<UnifyPair> undefinedPairs = new HashSet<>();
if (printtag) System.out.println("eq2s " + eq2s);
//writeLog("BufferSet: " + bufferSet.toString()+"\n");
Set<Set<Set<Set<UnifyPair>>>> secondLevelSets = calculatePairSets(eq2s, fc, undefinedPairs);
//PL 2017-09-20: Im calculatePairSets wird möglicherweise O .< java.lang.Integer
//nicht ausgewertet Faculty Beispiel im 1. Schritt
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
//Typen getestet werden.
if (printtag) System.out.println("secondLevelSets:" +secondLevelSets);
// If pairs occured that did not match one of the cartesian product cases,
// those pairs are contradictory and the unification is impossible.
if(!undefinedPairs.isEmpty()) {
writeLog("UndefinedPairs; " + undefinedPairs);
Set<Set<UnifyPair>> error = new HashSet<>();
error.add(undefinedPairs);
return error;
}
/* Up to here, no cartesian products are calculated.
* filters for pairs and sets can be applied here */
// Alternative: Sub cartesian products of the second level (pattern matched) sets
// "the big (x)"
/* for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
//System.out.println("secondLevelSet "+secondLevelSet.size());
List<Set<Set<UnifyPair>>> secondLevelSetList = new ArrayList<>(secondLevelSet);
Set<List<Set<UnifyPair>>> cartResult = setOps.cartesianProduct(secondLevelSetList);
//System.out.println("CardResult: "+cartResult.size());
// Flatten and add to top level sets
Set<Set<UnifyPair>> flat = new HashSet<>();
int j = 0;
for(List<Set<UnifyPair>> s : cartResult) {
j++;
//System.out.println("s from CardResult: "+cartResult.size() + " " + j);
Set<UnifyPair> flat1 = new HashSet<>();
for(Set<UnifyPair> s1 : s)
flat1.addAll(s1);
flat.add(flat1);
}
//topLevelSets.add(flat);
}
*/
//Alternative KEIN KARTESISCHES PRODUKT der secondlevel Ebene bilden
for(Set<Set<Set<UnifyPair>>> secondLevelSet : secondLevelSets) {
for (Set<Set<UnifyPair>> secondlevelelem : secondLevelSet) {
topLevelSets.add(secondlevelelem);
}
}
//System.out.println(topLevelSets);
//System.out.println();
//Aufruf von computeCartesianRecursive ANFANG
return computeCartesianRecursive(new HashSet<>(), new ArrayList<>(topLevelSets), eq, fc, parallel);
}
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
//Aufruf von computeCartesianRecursive ENDE
//keine Ahnung woher das kommt
//Set<Set<UnifyPair>> setToFlatten = topLevelSets.stream().map(x -> x.iterator().next()).collect(Collectors.toCollection(HashSet::new));
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
// Cartesian product over all (up to 10) top level sets
//Set<Set<Set<UnifyPair>>> eqPrimeSet = setOps.cartesianProduct(topLevelSets)
// .stream().map(x -> new HashSet<>(x))
// .collect(Collectors.toCollection(HashSet::new));
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
Set<TypeUnifyTask> forks = new HashSet<>();
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
//for(Set<Set<UnifyPair>> setToFlatten : eqPrimeSet) {
// Flatten the cartesian product
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
Set<UnifyPair> eqPrime = new HashSet<>();
setToFlatten.stream().forEach(x -> eqPrime.addAll(x));
/*
* Step 5: Substitution
*/
//System.out.println("vor Subst: " + eqPrime);
Optional<Set<UnifyPair>> eqPrimePrime = rules.subst(eqPrime);
/*
* Step 6 a) Restart (fork) for pairs where subst was applied
*/
if(parallel) {
if (eqPrime.equals(eq)) //PL 2017-09-29 auskommentiert und durch
//(!eqPrimePrime.isPresent()) //PL 2071-09-29 dies ersetzt
//Begruendung: Wenn in der Substitution keine Veraenderung
//(!eqPrimePrime.isPresent()) erfolgt ist, ist das Ergebnis erzielt.
eqPrimePrimeSet.add(eqPrime);
else if(eqPrimePrime.isPresent()) {
//System.out.println("nextStep: " + eqPrimePrime.get());
TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc, true, logFile);
forks.add(fork);
fork.fork();
}
else {
//System.out.println("nextStep: " + eqPrime);
TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc, true, logFile);
forks.add(fork);
fork.fork();
}
}
else { // sequentiell (Step 6b is included)
if (printtag) System.out.println("nextStep: " + eqPrimePrime);
if (eqPrime.equals(eq)) { //PL 2017-09-29 auskommentiert und durch
//(!eqPrimePrime.isPresent()) //PL 2071-09-29 dies ersetzt
//Begruendung: Wenn in der Substitution keine Veraenderung
//(!eqPrimePrime.isPresent()) erfolgt ist, ist das Ergebnis erzielt.
try {
if (isSolvedForm(eqPrime)) {
logFile.write(eqPrime.toString()+"\n");
logFile.flush();
}
}
catch (IOException e) { }
eqPrimePrimeSet.add(eqPrime);
}
else if(eqPrimePrime.isPresent()) {
Set<Set<UnifyPair>> unifyres = unify(eqPrimePrime.get(), fc, false);
eqPrimePrimeSet.addAll(unifyres);
}
else {
Set<Set<UnifyPair>> unifyres = unify(eqPrime, fc, false);
eqPrimePrimeSet.addAll(unifyres);
}
}
//Muss auskommentiert werden, wenn computeCartesianRecursive ANFANG
//}
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
/*
* Step 6 b) Build the union over everything.
*/
if(parallel)
for(TypeUnifyTask fork : forks)
eqPrimePrimeSet.addAll(fork.join());
/*
* Step 7: Filter empty sets;
*/
eqPrimePrimeSet = eqPrimePrimeSet.stream().filter(x -> isSolvedForm(x)).collect(Collectors.toCollection(HashSet::new));
if (!eqPrimePrimeSet.isEmpty())
writeLog("Result " + eqPrimePrimeSet.toString());
return eqPrimePrimeSet;
}
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
Set<Set<UnifyPair>> nextSet = remainingSets.remove(0);
ArrayList<Set<UnifyPair>> nextSetasList = new ArrayList<>(nextSet);
Set<Set<UnifyPair>> result = new HashSet<>();
int i = 0;
byte variance = nextSetasList.iterator().next().iterator().next().getVariance();
Set<UnifyPair> a_next = null;
if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("D"))
System.out.print("");
if (nextSetasList.size()>1) {
if (variance == 1) {
a_next = oup.max(nextSetasList.iterator());
}
else if (variance == -1) {
a_next = oup.min(nextSetasList.iterator());
}
else if (variance == 0) {
a_next = nextSetasList.iterator().next();
}
}
else {
a_next = nextSetasList.iterator().next();
}
while (nextSetasList.size() != 0) {
Set<UnifyPair> a = a_next;
//writeLog("nextSet: " + nextSetasList.toString()+ "\n");
nextSetasList.remove(a);
if (nextSetasList.size() > 0) {
if (nextSetasList.size()>1) {
if (variance == 1) {
a_next = oup.max(nextSetasList.iterator());
}
else if (variance == -1) {
a_next = oup.min(nextSetasList.iterator());
}
else {
a_next = nextSetasList.iterator().next();
}
}
else {
a_next = nextSetasList.iterator().next();
}
}
//PL 2018-03-01
//TODO: 1. Maximum und Minimum unterscheiden
//TODO: 2. compare noch für alle Elmemente die nicht X =. ty sind erweitern
//for(Set<UnifyPair> a : newSet) {
i++;
Set<Set<UnifyPair>> elems = new HashSet<Set<UnifyPair>>(fstElems);
elems.add(a);
if (remainingSets.isEmpty()) {
result.addAll(unify2(elems, eq, fc, parallel));
System.out.println("");
}
else {
result.addAll(computeCartesianRecursive(elems, remainingSets, eq, fc, parallel));
}
if (!result.isEmpty()) {
if (variance == 1) {
if (a.iterator().next().getLhsType().getName().equals("WL"))
System.out.print("");
if (a.equals(a_next) ||
(oup.compare(a, a_next) == 1)) {
System.out.print("");
break;
}
else {
System.out.print("");
}
}
else { if (variance == -1) {
if (a.iterator().next().getLhsType().getName().equals("A"))
System.out.print("");
if (a.equals(a_next) || (oup.compare(a, a_next) == -1)) {
System.out.print("");
break;
}
else {
System.out.print("");
}
}
else if (variance == 0) {
break;
}
}
}
}
return result;
}
protected boolean isUndefinedPairSet(Set<Set<UnifyPair>> s) {
boolean res = true;
if (s.size() ==1) {
s.iterator().next().stream().forEach(x -> { res = res && x.isUndefinedPair(); return; });
return res;
}
}
/**
* Checks whether a set of pairs is in solved form.
* @param eqPrimePrime The set of pair
* @return True if in solved form, false otherwise.
*/
protected boolean isSolvedForm(Set<UnifyPair> eqPrimePrime) {
for(UnifyPair pair : eqPrimePrime) {
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
if(!(lhsType instanceof PlaceholderType))
return false;
// If operator is not equals, both sides must be placeholders
if(pair.getPairOp() != PairOperator.EQUALSDOT && !(rhsType instanceof PlaceholderType))
return false;
}
return true;
}
/**
* Repeatedly applies type unification rules to a set of equations.
* This is step one of the unification algorithm.
* @return The set of pairs that results from repeated application of the inference rules.
*/
public Set<UnifyPair> applyTypeUnificationRules(Set<UnifyPair> eq, IFiniteClosure fc) {
/*
* Rule Application Strategy:
*
* 1. Swap all pairs and erase all erasable pairs
* 2. Apply all possible rules to a single pair, then move it to the result set.
* Iterating over pairs first, then iterating over rules prevents the application
* of rules to a "finished" pair over and over.
* 2.1 Apply all rules repeatedly except for erase rules. If
* the application of a rule creates new pairs, check immediately
* against the erase rules.
*/
LinkedHashSet<UnifyPair> targetSet = new LinkedHashSet<UnifyPair>();
LinkedList<UnifyPair> eqQueue = new LinkedList<>();
/*
* Swap all pairs and erase all erasable pairs
*/
eq.forEach(x -> swapAddOrErase(x, fc, eqQueue));
/*
* Apply rules until the queue is empty
*/
while(!eqQueue.isEmpty()) {
UnifyPair pair = eqQueue.pollFirst();
// ReduceUp, ReduceLow, ReduceUpLow
Optional<UnifyPair> opt = rules.reduceUpLow(pair);
opt = opt.isPresent() ? opt : rules.reduceLow(pair);
opt = opt.isPresent() ? opt : rules.reduceUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLow(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardLowRight(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUp(pair);
opt = opt.isPresent() ? opt : rules.reduceWildcardUpRight(pair);
//PL 2018-03-06 auskommentiert muesste falsch sein vgl. JAVA_BSP/Wildcard6.java
//opt = opt.isPresent() ? opt : rules.reduceWildcardLowUp(pair);
//opt = opt.isPresent() ? opt : rules.reduceWildcardUpLow(pair);
//opt = opt.isPresent() ? opt : rules.reduceWildcardLeft(pair);
// Reduce TPH
opt = opt.isPresent() ? opt : rules.reduceTph(pair);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// Reduce1, Reduce2, ReduceExt, ReduceSup, ReduceEq
//try {
// logFile.write("PAIR1 " + pair + "\n");
// logFile.flush();
//}
//catch (IOException e) { }
Optional<Set<UnifyPair>> optSet = rules.reduce1(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduce2(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceExt(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceSup(pair, fc);
optSet = optSet.isPresent() ? optSet : rules.reduceEq(pair);
// ReduceTphExt, ReduceTphSup
optSet = optSet.isPresent() ? optSet : rules.reduceTphExt(pair);
optSet = optSet.isPresent() ? optSet : rules.reduceTphSup(pair);
// FunN Rules
optSet = optSet.isPresent() ? optSet : rules.reduceFunN(pair);
optSet = optSet.isPresent() ? optSet : rules.greaterFunN(pair);
optSet = optSet.isPresent() ? optSet : rules.smallerFunN(pair);
// One of the rules has been applied
if(optSet.isPresent()) {
optSet.get().forEach(x -> swapAddOrErase(x, fc, eqQueue));
continue;
}
// Adapt, AdaptExt, AdaptSup
//try {
// logFile.write("PAIR2 " + pair + "\n");
// logFile.flush();
//}
//catch (IOException e) { }
opt = rules.adapt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptExt(pair, fc);
opt = opt.isPresent() ? opt : rules.adaptSup(pair, fc);
// One of the rules has been applied
if(opt.isPresent()) {
swapAddOrErase(opt.get(), fc, eqQueue);
continue;
}
// None of the rules has been applied
targetSet.add(pair);
}
return targetSet;
}
/**
* Applies the rule swap to a pair if possible. Then adds the pair to the set if no erase rule applies.
* If an erase rule applies, the pair is not added (erased).
* @param pair The pair to swap and add or erase.
* @param collection The collection to which the pairs are added.
*/
protected void swapAddOrErase(UnifyPair pair, IFiniteClosure fc, Collection<UnifyPair> collection) {
Optional<UnifyPair> opt = rules.swap(pair);
UnifyPair pair2 = opt.isPresent() ? opt.get() : pair;
if(rules.erase1(pair2, fc) || rules.erase3(pair2) || rules.erase2(pair2, fc))
return;
collection.add(pair2);
}
/**
* Splits the equation eq into a set eq1s where both terms are type variables,
* and a set eq2s where one of both terms is not a type variable.
* @param eq Set of pairs to be splitted.
* @param eq1s Subset of eq where both terms are type variables.
* @param eq2s eq/eq1s.
*/
protected void splitEq(Set<UnifyPair> eq, Set<UnifyPair> eq1s, Set<UnifyPair> eq2s) {
for(UnifyPair pair : eq)
if(pair.getLhsType() instanceof PlaceholderType && pair.getRhsType() instanceof PlaceholderType)
eq1s.add(pair);
else
eq2s.add(pair);
}
/**
* Creates sets of pairs specified in the fourth step. Does not calculate cartesian products.
* @param undefined All pairs that did not match one of the 8 cases are added to this set.
* @return The set of the eight cases (without empty sets). Each case is a set, containing sets generated
* from the pairs that matched the case. Each generated set contains singleton sets or sets with few elements
* (as in case 1 where sigma is added to the innermost set).
*/
protected Set<Set<Set<Set<UnifyPair>>>> calculatePairSets(Set<UnifyPair> eq2s, IFiniteClosure fc, Set<UnifyPair> undefined) {
List<Set<Set<Set<UnifyPair>>>> result = new ArrayList<>(8);
// Init all 8 cases
for(int i = 0; i < 8; i++)
result.add(new HashSet<>());
Boolean first = true;
for(UnifyPair pair : eq2s) {
PairOperator pairOp = pair.getPairOp();
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
// Case 1: (a <. Theta')
if(pairOp == PairOperator.SMALLERDOT && lhsType instanceof PlaceholderType) {
//System.out.println(pair);
if (first) { //writeLog(pair.toString()+"\n");
Set<Set<UnifyPair>> x1 = unifyCase1((PlaceholderType) pair.getLhsType(), pair.getRhsType(), (byte)1, fc);
//System.out.println(x1);
result.get(0).add(x1);
}
else {
Set<UnifyPair> s1 = new HashSet<>();
s1.add(pair);
Set<Set<UnifyPair>> s2 = new HashSet<>();
s2.add(s1);
result.get(0).add(s2);
}
}
// Case 2: (a <.? ? ext Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof ExtendsType)
if (first) { //writeLog(pair.toString()+"\n");
result.get(1).add(unifyCase2((PlaceholderType) pair.getLhsType(), (ExtendsType) pair.getRhsType(), (byte)0, fc));
}
else {
Set<UnifyPair> s1 = new HashSet<>();
s1.add(pair);
Set<Set<UnifyPair>> s2 = new HashSet<>();
s2.add(s1);
result.get(1).add(s2);
}
// Case 3: (a <.? ? sup Theta')
else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType && rhsType instanceof SuperType)
if (first) { //writeLog(pair.toString()+"\n");
result.get(2).add(unifyCase3((PlaceholderType) lhsType, (SuperType) rhsType, (byte)0, fc));
}
else {
Set<UnifyPair> s1 = new HashSet<>();
s1.add(pair);
Set<Set<UnifyPair>> s2 = new HashSet<>();
s2.add(s1);
result.get(2).add(s2);
}
// Case 4 was replaced by an inference rule
// Case 4: (a <.? Theta')
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof PlaceholderType)
// result.get(3).add(unifyCase4((PlaceholderType) lhsType, rhsType, fc));
// Case 5: (Theta <. a)
else if(pairOp == PairOperator.SMALLERDOT && rhsType instanceof PlaceholderType)
if (first) { //writeLog(pair.toString()+"\n");
if (rhsType.getName().equals("A"))
System.out.println();
result.get(4).add(unifyCase5(lhsType, (PlaceholderType) rhsType, (byte)-1, fc));
}
else {
Set<UnifyPair> s1 = new HashSet<>();
s1.add(pair);
Set<Set<UnifyPair>> s2 = new HashSet<>();
s2.add(s1);
result.get(4).add(s2);
}
// Case 6 was replaced by an inference rule.
// Case 6: (? ext Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof ExtendsType && rhsType instanceof PlaceholderType)
// result.get(5).add(unifyCase6((ExtendsType) lhsType, (PlaceholderType) rhsType, fc));
// Case 7 was replaced by an inference rule
// Case 7: (? sup Theta <.? a)
//else if(pairOp == PairOperator.SMALLERDOTWC && lhsType instanceof SuperType && rhsType instanceof PlaceholderType)
// result.get(6).add(unifyCase7((SuperType) lhsType, (PlaceholderType) rhsType, fc));
// Case 8: (Theta <.? a)
else if(pairOp == PairOperator.SMALLERDOTWC && rhsType instanceof PlaceholderType)
if (first) { //writeLog(pair.toString()+"\n");
result.get(7).add(
unifyCase8(lhsType, (PlaceholderType) rhsType, (byte)0, fc));
}
else {
Set<UnifyPair> s1 = new HashSet<>();
s1.add(pair);
Set<Set<UnifyPair>> s2 = new HashSet<>();
s2.add(s1);
result.get(7).add(s2);
}
// Case unknown: If a pair fits no other case, then the type unification has failed.
// Through application of the rules, every pair should have one of the above forms.
// Pairs that do not have one of the aboves form are contradictory.
else {
// If a pair is not defined, the unificiation will fail, so the loop can be stopped here.
undefined.add(pair);
break;
}
first = false;
}
// Filter empty sets or sets that only contain an empty set.
return result.stream().map(x -> x.stream().filter(y -> y.size() > 0).collect(Collectors.toCollection(HashSet::new)))
.filter(x -> x.size() > 0).collect(Collectors.toCollection(HashSet::new));
}
/**
* Cartesian product Case 1: (a <. Theta')
*/
protected Set<Set<UnifyPair>> unifyCase1(PlaceholderType a, UnifyType thetaPrime, byte variance, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
boolean allGen = thetaPrime.getTypeParams().size() > 0;
for(UnifyType t : thetaPrime.getTypeParams())
if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
allGen = false;
break;
}
Set<UnifyType> cs = fc.getAllTypesByName(thetaPrime.getName());//cs= [java.util.Vector<NP>, java.util.Vector<java.util.Vector<java.lang.Integer>>, ????java.util.Vector<gen_hv>???]
//PL 18-02-06 entfernt, kommt durch unify wieder rein
//cs.add(thetaPrime);
//PL 18-02-06 entfernt
for(UnifyType c : cs) {
//PL 18-02-05 getChildren durch smaller ersetzt in getChildren werden die Varianlen nicht ersetzt.
Set<UnifyType> thetaQs = fc.smaller(c).stream().collect(Collectors.toCollection(HashSet::new));
//Set<UnifyType> thetaQs = fc.getChildren(c).stream().collect(Collectors.toCollection(HashSet::new));
//thetaQs.add(thetaPrime); //PL 18-02-05 wieder geloescht
//PL 2017-10-03: War auskommentiert habe ich wieder einkommentiert,
//da children offensichtlich ein echtes kleiner und kein kleinergleich ist
//PL 18-02-06: eingefuegt, thetaQs der Form V<V<...>> <. V'<V<...>> werden entfernt
thetaQs = thetaQs.stream().filter(ut -> ut.getTypeParams().arePlaceholders()).collect(Collectors.toCollection(HashSet::new));
//PL 18-02-06: eingefuegt
Set<UnifyType> thetaQPrimes = new HashSet<>();
TypeParams cParams = c.getTypeParams();
if(cParams.size() == 0)
thetaQPrimes.add(c);
else {
ArrayList<Set<UnifyType>> candidateParams = new ArrayList<>();
for(UnifyType param : cParams)
candidateParams.add(fc.grArg(param));
for(TypeParams tp : permuteParams(candidateParams))
thetaQPrimes.add(c.setTypeParams(tp));
}
for(UnifyType tqp : thetaQPrimes) {
//System.out.println(tqp.toString());
//i++;
//System.out.println(i);
//if (i == 62)
// System.out.println(tqp.toString());
Optional<Unifier> opt = stdUnify.unify(tqp, thetaPrime);
if (!opt.isPresent())
continue;
Unifier unifier = opt.get();
unifier.swapPlaceholderSubstitutions(thetaPrime.getTypeParams());
Set<UnifyPair> substitutionSet = new HashSet<>();
for (Entry<PlaceholderType, UnifyType> sigma : unifier)
substitutionSet.add(new UnifyPair(sigma.getKey(), sigma.getValue(), PairOperator.EQUALSDOT));
//List<UnifyType> freshTphs = new ArrayList<>(); PL 18-02-06 in die For-Schleife verschoben
for (UnifyType tq : thetaQs) {
Set<UnifyType> smaller = fc.smaller(unifier.apply(tq));
for(UnifyType theta : smaller) {
List<UnifyType> freshTphs = new ArrayList<>();
Set<UnifyPair> resultPrime = new HashSet<>();
for(int i = 0; !allGen && i < theta.getTypeParams().size(); i++) {
if(freshTphs.size()-1 < i)
freshTphs.add(PlaceholderType.freshPlaceholder());
resultPrime.add(new UnifyPair(freshTphs.get(i), theta.getTypeParams().get(i), PairOperator.SMALLERDOTWC));
}
if(allGen)
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
else
resultPrime.add(new UnifyPair(a, theta.setTypeParams(new TypeParams(freshTphs.toArray(new UnifyType[0]))), PairOperator.EQUALSDOT));
resultPrime.addAll(substitutionSet);
//writeLog("Substitution: " + substitutionSet.toString());
resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
result.add(resultPrime);
//writeLog("Result: " + resultPrime.toString());
//writeLog("MAX: " + oup.max(resultPrime.iterator()).toString());
}
}
}
}
return result;
}
/**
* Cartesian Product Case 2: (a <.? ? ext Theta')
*/
private Set<Set<UnifyPair>> unifyCase2(PlaceholderType a, ExtendsType extThetaPrime, byte variance, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType extAPrime = new ExtendsType(aPrime);
UnifyType thetaPrime = extThetaPrime.getExtendedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, thetaPrime, PairOperator.SMALLERDOT));
result.add(resultPrime);
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, extAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(aPrime, thetaPrime, PairOperator.SMALLERDOT));
resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
result.add(resultPrime);
//writeLog("Result: " + resultPrime.toString());
return result;
}
/**
* Cartesian Product Case 3: (a <.? ? sup Theta')
*/
private Set<Set<UnifyPair>> unifyCase3(PlaceholderType a, SuperType subThetaPrime, byte variance, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
UnifyType aPrime = PlaceholderType.freshPlaceholder();
UnifyType supAPrime = new SuperType(aPrime);
UnifyType thetaPrime = subThetaPrime.getSuperedType();
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(thetaPrime, a, PairOperator.SMALLERDOT));
result.add(resultPrime);
//writeLog(resultPrime.toString());
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, supAPrime, PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(thetaPrime, aPrime, PairOperator.SMALLERDOT));
resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
result.add(resultPrime);
//writeLog(resultPrime.toString());
return result;
}
/**
* Cartesian Product Case 5: (Theta <. a)
*/
private Set<Set<UnifyPair>> unifyCase5(UnifyType theta, PlaceholderType a, byte variance, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
boolean allGen = theta.getTypeParams().size() > 0;
for(UnifyType t : theta.getTypeParams())
if(!(t instanceof PlaceholderType) || !((PlaceholderType) t).isGenerated()) {
allGen = false;
break;
}
for(UnifyType thetaS : fc.greater(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();
UnifyType[] freshTphs = new UnifyType[thetaS.getTypeParams().size()];
for(int i = 0; !allGen && i < freshTphs.length; i++) {
freshTphs[i] = PlaceholderType.freshPlaceholder();
resultPrime.add(new UnifyPair(thetaS.getTypeParams().get(i), freshTphs[i], PairOperator.SMALLERDOTWC));
}
if(allGen)
resultPrime.add(new UnifyPair(a, thetaS, PairOperator.EQUALSDOT));
else
resultPrime.add(new UnifyPair(a, thetaS.setTypeParams(new TypeParams(freshTphs)), PairOperator.EQUALSDOT));
resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
result.add(resultPrime);
//writeLog(resultPrime.toString());
}
return result;
}
/**
* Cartesian Product Case 8: (Theta <.? a)
*/
private Set<Set<UnifyPair>> unifyCase8(UnifyType theta, PlaceholderType a, byte variance, IFiniteClosure fc) {
Set<Set<UnifyPair>> result = new HashSet<>();
//for(UnifyType thetaS : fc.grArg(theta)) {
Set<UnifyPair> resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, theta, PairOperator.EQUALSDOT));
result.add(resultPrime);
//writeLog(resultPrime.toString());
UnifyType freshTph = PlaceholderType.freshPlaceholder();
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new ExtendsType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(theta, freshTph, PairOperator.SMALLERDOT));
result.add(resultPrime);
//writeLog(resultPrime.toString());
resultPrime = new HashSet<>();
resultPrime.add(new UnifyPair(a, new SuperType(freshTph), PairOperator.EQUALSDOT));
resultPrime.add(new UnifyPair(freshTph, theta, PairOperator.SMALLERDOT));
resultPrime = resultPrime.stream().map(x -> { x.setVariance(variance); return x;}).collect(Collectors.toCollection(HashSet::new));
result.add(resultPrime);
//writeLog(resultPrime.toString());
//}
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
*/
protected Set<TypeParams> permuteParams(ArrayList<Set<UnifyType>> candidates) {
Set<TypeParams> result = new HashSet<>();
permuteParams(candidates, 0, result, new UnifyType[candidates.size()]);
return result;
}
/**
* Takes a set of candidates for each position and computes all possible permutations.
* @param candidates The length of the list determines the number of type params. Each set
* contains the candidates for the corresponding position.
* @param idx Idx for the current permutatiton.
* @param result Set of all permutations found so far
* @param current The permutation of type params that is currently explored
*/
private void permuteParams(ArrayList<Set<UnifyType>> candidates, int idx, Set<TypeParams> result, UnifyType[] current) {
if(candidates.size() == idx) {
result.add(new TypeParams(Arrays.copyOf(current, current.length)));
return;
}
Set<UnifyType> localCandidates = candidates.get(idx);
for(UnifyType t : localCandidates) {
current[idx] = t;
permuteParams(candidates, idx+1, result, current);
}
}
void writeLog(String str) {
try {
logFile.write(str+"\n");
logFile.flush();
}
catch (IOException e) { }
}
}

@@ -1,117 +0,0 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* A pair which contains two types and an operator, e.q. (Integer <. a).
* @author Florian Steurer
*/
public class UnifyPair {
/**
* The type on the left hand side of the pair.
*/
private final UnifyType lhs;
/**
* The type on the right hand side of the pair.
*/
private final UnifyType rhs;
/**
* The operator that determines the relation between the left and right hand side type.
*/
private PairOperator pairOp;
private byte variance = 0;
private boolean undefinedPair = false;
private final int hashCode;
/**
* Creates a new instance of the pair.
* @param lhs The type on the left hand side of the pair.
* @param rhs The type on the right hand side of the pair.
* @param op The operator that determines the relation between the left and right hand side type.
*/
public UnifyPair(UnifyType lhs, UnifyType rhs, PairOperator op) {
this.lhs = lhs;
this.rhs = rhs;
pairOp = op;
// Caching hashcode
hashCode = 17 + 31 * lhs.hashCode() + 31 * rhs.hashCode() + 31 * pairOp.hashCode();
}
/**
* Returns the type on the left hand side of the pair.
*/
public UnifyType getLhsType() {
return lhs;
}
/**
* Returns the type on the right hand side of the pair.
*/
public UnifyType getRhsType() {
return rhs;
}
/**
* Returns the operator that determines the relation between the left and right hand side type.
*/
public PairOperator getPairOp() {
return pairOp;
}
public byte getVariance() {
return variance;
}
public void setVariance(byte v) {
variance = v;
}
public boolean isUndefinedPair() {
return undefinedPair;
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof UnifyPair))
return false;
if(obj.hashCode() != this.hashCode())
return false;
UnifyPair other = (UnifyPair) obj;
return other.getPairOp() == pairOp
&& other.getLhsType().equals(lhs)
&& other.getRhsType().equals(rhs);
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public String toString() {
return "(" + lhs + " " + pairOp + " " + rhs + ")";
}
/*
public List<? extends PlaceholderType> getInvolvedPlaceholderTypes() {
ArrayList<PlaceholderType> ret = new ArrayList<>();
ret.addAll(lhs.getInvolvedPlaceholderTypes());
ret.addAll(rhs.getInvolvedPlaceholderTypes());
return ret;
}
*/
}

@@ -147,7 +147,23 @@ public class BytecodeGen implements ASTVisitor {
if(!tphExtractor.allTPHS.get(t))
tphsClass.add(t);
}
ArrayList<TPHConstraint> consClass = new ArrayList<>();
for(TPHConstraint cons : tphExtractor.allCons) {
TypePlaceholder right = null;
for(TypePlaceholder tph : tphsClass) {
if(cons.getLeft().equals(tph.getName())) {
consClass.add(cons);
right = getTPH(cons.getRight());
}
}
if(right != null) {
tphsClass.add(right);
removeFromMethod(right.getName());
right = null;
}
}
String sig = null;
/* if class has generics then creates signature
* Signature looks like:
@@ -156,23 +172,6 @@ public class BytecodeGen implements ASTVisitor {
if(classOrInterface.getGenerics().iterator().hasNext() || !commonPairs.isEmpty() ||
classOrInterface.getSuperClass().acceptTV(new TypeToSignature()).contains("<")
|| !tphsClass.isEmpty()) {
HashMap<TPHConstraint, HashSet<String>> constraints = Simplify.simplifyConstraintsClass(tphExtractor,tphsClass);
ArrayList<TPHConstraint> consClass = new ArrayList<>();
for(TPHConstraint cons : constraints.keySet()) {
TypePlaceholder right = null;
for(TypePlaceholder tph : tphsClass) {
if(cons.getLeft().equals(tph.getName())) {
consClass.add(cons);
right = getTPH(cons.getRight());
}
}
if(right != null) {
tphsClass.add(right);
removeFromMethod(right.getName());
right = null;
}
}
Signature signature = new Signature(classOrInterface, genericsAndBounds,commonPairs,tphsClass, consClass);
sig = signature.toString();
System.out.println("Signature: => " + sig);

@@ -5,19 +5,13 @@ package de.dhbwstuttgart.bytecode;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import de.dhbwstuttgart.bytecode.constraint.ExtendsConstraint;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint.Relation;
import de.dhbwstuttgart.bytecode.utilities.MethodAndTPH;
import de.dhbwstuttgart.syntaxtree.AbstractASTWalker;
import de.dhbwstuttgart.syntaxtree.Constructor;
import de.dhbwstuttgart.syntaxtree.FormalParameter;
import de.dhbwstuttgart.syntaxtree.Method;
import de.dhbwstuttgart.syntaxtree.ParameterList;
import de.dhbwstuttgart.syntaxtree.statement.LocalVar;
import de.dhbwstuttgart.syntaxtree.statement.LocalVarDecl;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import de.dhbwstuttgart.typeinference.result.GenericInsertPair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
@@ -26,42 +20,36 @@ import de.dhbwstuttgart.typeinference.result.ResultSet;
* @author Fayez Abu Alia
*
*/
public class TPHExtractor extends AbstractASTWalker {
public class TPHExtractor extends AbstractASTWalker{
// Alle TPHs der Felder werden iKopf der Klasse definiert
// alle TPHs der Klasse: (TPH, is in Method?)
final HashMap<TypePlaceholder, Boolean> allTPHS = new HashMap<>();
final HashMap<TypePlaceholder,Boolean> allTPHS = new HashMap<>();
MethodAndTPH methodAndTph;
Boolean inMethod = false;
boolean inLocalOrParam = false;
public final ArrayList<MethodAndTPH> ListOfMethodsAndTph = new ArrayList<>();
final ArrayList<GenericInsertPair> allPairs = new ArrayList<>();
public final ArrayList<TPHConstraint> allCons = new ArrayList<>();
private ResultSet resultSet;
public TPHExtractor() {
}
public void setResultSet(ResultSet resultSet) {
this.resultSet = resultSet;
}
@Override
public void visit(TypePlaceholder tph) {
if (resultSet.resolveType(tph).resolvedType instanceof TypePlaceholder) {
if(resultSet.resolveType(tph).resolvedType instanceof TypePlaceholder) {
TypePlaceholder resolvedTPH = (TypePlaceholder) resultSet.resolveType(tph).resolvedType;
if (inMethod) {
if(inMethod)
methodAndTph.getTphs().add(resolvedTPH.getName());
if (inLocalOrParam)
methodAndTph.getLocalTphs().add(resolvedTPH.getName());
}
allTPHS.put(resolvedTPH, inMethod);
resultSet.resolveType(tph).additionalGenerics.forEach(ag -> {
if (ag.contains(resolvedTPH) && ag.TA1.equals(resolvedTPH) && !contains(allPairs, ag)) {
if (inMethod)
allTPHS.put(resolvedTPH,inMethod);
resultSet.resolveType(tph).additionalGenerics.forEach(ag ->{
if(ag.contains(resolvedTPH)&&ag.TA1.equals(resolvedTPH)&&!contains(allPairs,ag)) {
if(inMethod)
methodAndTph.getPairs().add(ag);
allPairs.add(ag);
TPHConstraint con = new ExtendsConstraint(ag.TA1.getName(), ag.TA2.getName(), Relation.EXTENDS);
@@ -70,16 +58,14 @@ public class TPHExtractor extends AbstractASTWalker {
});
}
}
private boolean contains(ArrayList<GenericInsertPair> pairs, GenericInsertPair genPair) {
for (int i = 0; i < pairs.size(); ++i) {
for(int i=0; i<pairs.size();++i) {
GenericInsertPair p = pairs.get(i);
if (p.TA1.equals(genPair.TA1) && p.TA2.equals(genPair.TA2))
if(p.TA1.equals(genPair.TA1) && p.TA2.equals(genPair.TA2))
return true;
}
return false;
}
@Override
public void visit(Method method) {
inMethod = true;
@@ -88,33 +74,5 @@ public class TPHExtractor extends AbstractASTWalker {
inMethod = false;
ListOfMethodsAndTph.add(methodAndTph);
}
@Override
public void visit(Constructor cons) {
inMethod = false;
super.visit(cons);
inMethod = true;
}
@Override
public void visit(LocalVarDecl localVarDecl) {
inLocalOrParam = true;
super.visit(localVarDecl);
inLocalOrParam = false;
}
@Override
public void visit(LocalVar localVar) {
inLocalOrParam = true;
super.visit(localVar);
inLocalOrParam = false;
}
@Override
public void visit(ParameterList formalParameters) {
inLocalOrParam = true;
super.visit(formalParameters);
inLocalOrParam = false;
}
}

@@ -391,7 +391,7 @@ public class Signature {
}
for(TPHConstraint cons : consClass) {
if(!types.contains(cons.getRight()) && !genericsAndBounds.keySet().contains(cons.getRight()+"$")) {
if(!types.contains(cons.getRight())) {
String t = cons.getRight()+"$";
String bound = Type.getInternalName(Object.class);
sw.visitFormalTypeParameter(t);

@@ -1,51 +0,0 @@
package de.dhbwstuttgart.bytecode.utilities;
import java.util.ArrayList;
import java.util.List;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint.Relation;
public class ConstraintsFinder {
private List<TPHConstraint> allConstaints;
public ConstraintsFinder(List<TPHConstraint> allConstaints) {
super();
this.allConstaints = allConstaints;
}
public List<List<TPHConstraint>> findConstraints() {
List<List<TPHConstraint>> result = new ArrayList<>();
List<TPHConstraint> visitedCons = new ArrayList<>();
for(TPHConstraint c : allConstaints) {
if(c.getRel() == Relation.EXTENDS) {
// get constraints with the same left side
List<TPHConstraint> cons = getConstraints(c,visitedCons);
if(cons.size()>1)
result.add(cons);
}
}
return result;
}
private List<TPHConstraint> getConstraints(TPHConstraint c, List<TPHConstraint> visitedCons) {
List<TPHConstraint> res = new ArrayList<>();
for(TPHConstraint cons : allConstaints) {
if(!isVisited(cons,visitedCons) && cons.getLeft().equals(c.getLeft())) {
res.add(cons);
visitedCons.add(cons);
}
}
return res;
}
private boolean isVisited(TPHConstraint cons, List<TPHConstraint> visitedCons) {
for(TPHConstraint c : visitedCons) {
if(c.getLeft().equals(cons.getLeft()) && c.getRight().equals(cons.getRight()))
return true;
}
return false;
}
}

@@ -10,8 +10,6 @@ public class MethodAndTPH {
private String name;
private final ArrayList<String> tphs = new ArrayList<>();
private final ArrayList<GenericInsertPair> pairs = new ArrayList<>();
// tphs of local variables and parameters
private final ArrayList<String> localTphs = new ArrayList<>();
public MethodAndTPH(String name) {
this.name = name;
@@ -28,9 +26,4 @@ public class MethodAndTPH {
public String getName() {
return name;
}
public ArrayList<String> getLocalTphs() {
return localTphs;
}
}

@@ -1,49 +0,0 @@
package de.dhbwstuttgart.bytecode.utilities;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
import de.dhbwstuttgart.syntaxtree.factory.NameGenerator;
public class NameReplacer {
private List<TPHConstraint> constraints;
private List<TPHConstraint> allConstraints;
private List<String> tphs;
private List<String> localTphs;
public NameReplacer(List<TPHConstraint> constraints, List<TPHConstraint> allConstraints,List<String> tphs, ArrayList<String> localTphs) {
super();
this.constraints = constraints;
this.allConstraints = allConstraints;
this.tphs = tphs;
this.localTphs = localTphs;
}
public Map<String, List<String>> replaceNames() {
String newName = NameGenerator.makeNewName();
ArrayList<String> names = new ArrayList<>();
for(TPHConstraint cons : constraints) {
names.add(cons.getRight());
cons.setRight(newName);
}
for(TPHConstraint cons : allConstraints) {
if(names.contains(cons.getLeft()))
cons.setLeft(newName);
if(names.contains(cons.getRight()))
cons.setRight(newName);
}
tphs.removeAll(names);
tphs.add(newName);
localTphs.removeAll(names);
localTphs.add(newName);
HashMap<String, List<String>> res = new HashMap<>();
res.put(newName, names);
return res;
}
}

File diff suppressed because it is too large Load Diff

@@ -34,7 +34,6 @@ import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListener;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
@@ -42,20 +41,17 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.antlr.v4.parse.ANTLRParser.throwsSpec_return;
//import org.apache.commons.io.output.NullOutputStream;
public class JavaTXCompiler {
final CompilationEnvironment environment;
Boolean resultmodel = false;
Boolean resultmodel = true;
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"src/test/java/logFiles" geschrieben werden soll?
@@ -278,7 +274,7 @@ public class JavaTXCompiler {
}
*/
public UnifyResultModel typeInferenceAsync(UnifyResultListener resultListener, Writer logFile) throws ClassNotFoundException {
public UnifyResultModel typeInferenceAsync() throws ClassNotFoundException {
List<ClassOrInterface> allClasses = new ArrayList<>();//environment.getAllAvailableClasses();
//Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
for(SourceFile sf : this.sourceFiles.values()) {
@@ -288,14 +284,12 @@ public class JavaTXCompiler {
final ConstraintSet<Pair> cons = getConstraints();
Set<Set<UnifyPair>> results = new HashSet<>();
UnifyResultModel urm = null;
//urm.addUnifyResultListener(resultListener);
UnifyResultModel urm = new UnifyResultModel();
try {
logFile = logFile == null ? new FileWriter(new File("log_"+sourceFiles.keySet().iterator().next().getName())) : logFile;
FileWriter logFile = new FileWriter(new File(System.getProperty("user.dir")+"/src/test/java/logFiles/"+"log_"+sourceFiles.keySet().iterator().next().getName()));
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses,logFile);
System.out.println(finiteClosure);
urm = new UnifyResultModel(cons, finiteClosure);
urm.addUnifyResultListener(resultListener);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(cons);
Function<UnifyPair, UnifyPair> distributeInnerVars =
@@ -407,7 +401,7 @@ public class JavaTXCompiler {
}
return ret;
}).collect(Collectors.toCollection(ArrayList::new));
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm);
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, cons, urm);
}
catch (IOException e) {
System.err.println("kein LogFile");
@@ -427,8 +421,8 @@ public class JavaTXCompiler {
final ConstraintSet<Pair> cons = getConstraints();
Set<Set<UnifyPair>> results = new HashSet<>();
try {
Writer logFile = //new OutputStreamWriter(new NullOutputStream());
new FileWriter(new File(System.getProperty("user.dir")+"/src/test/java/logFiles/"+"log_"+sourceFiles.keySet().iterator().next().getName()));
FileWriter logFile = new FileWriter(new File(System.getProperty("user.dir")+"/src/test/java/logFiles/"+"log_"+sourceFiles.keySet().iterator().next().getName()));
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses,logFile);
System.out.println(finiteClosure);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(cons);
@@ -544,10 +538,10 @@ public class JavaTXCompiler {
}).collect(Collectors.toCollection(ArrayList::new));
if (resultmodel) {
/* UnifyResultModel Anfang */
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultModel urm = new UnifyResultModel();
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm);
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, cons, urm);
System.out.println("RESULT Final: " + li.getResults());
logFile.write("RES_FINAL: " + li.getResults().toString()+"\n");
logFile.flush();
@@ -555,8 +549,8 @@ public class JavaTXCompiler {
}
/* UnifyResultModel End */
else {
Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure));
//Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure));
Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, cons);
//Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, cons);
System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString()+"\n");
logFile.flush();

@@ -3,37 +3,31 @@ package de.dhbwstuttgart.parser.SyntaxTreeGenerator;
import de.dhbwstuttgart.exceptions.NotImplementedException;
import de.dhbwstuttgart.exceptions.TypeinferenceException;
import de.dhbwstuttgart.parser.antlr.Java8Parser;
import de.dhbwstuttgart.parser.antlr.Java8Parser.UnannClassType_lfno_unannClassOrInterfaceTypeContext;
import de.dhbwstuttgart.parser.scope.GenericsRegistry;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.parser.scope.JavaClassRegistry;
import de.dhbwstuttgart.syntaxtree.GenericDeclarationList;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.factory.ASTFactory;
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
import de.dhbwstuttgart.syntaxtree.type.SuperWildcardType;
import de.dhbwstuttgart.syntaxtree.type.TypePlaceholder;
import org.antlr.v4.runtime.Token;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class TypeGenerator {
public static RefTypeOrTPHOrWildcardOrGeneric convert(Java8Parser.UnannClassOrInterfaceTypeContext unannClassOrInterfaceTypeContext, JavaClassRegistry reg, GenericsRegistry generics) {
Java8Parser.TypeArgumentsContext arguments=null;
/* PL 2019-03-19 auskommentiert ANFANG
Java8Parser.TypeArgumentsContext arguments;
if(unannClassOrInterfaceTypeContext.unannClassType_lfno_unannClassOrInterfaceType() != null){
arguments = unannClassOrInterfaceTypeContext.unannClassType_lfno_unannClassOrInterfaceType().typeArguments();
}else{// if(unannClassOrInterfaceTypeContext.unannInterfaceType_lfno_unannClassOrInterfaceType() != null){
arguments = unannClassOrInterfaceTypeContext.unannInterfaceType_lfno_unannClassOrInterfaceType().unannClassType_lfno_unannClassOrInterfaceType().typeArguments();
}
PL 2019-03-19 auskommentiert ENDE */
/**
* Problem sind hier die verschachtelten Typen mit verschachtelten Typargumenten
* Beispiel: Typ<String>.InnererTyp<Integer>
@@ -41,28 +35,6 @@ public class TypeGenerator {
String name = unannClassOrInterfaceTypeContext.getText();
if(name.contains("<")){
name = name.split("<")[0]; //Der Typ ist alles vor den ersten Argumenten
/* Fuer Debug-Zwecke
unannClassOrInterfaceTypeContext.unannInterfaceType_lfno_unannClassOrInterfaceType();
unannClassOrInterfaceTypeContext.unannClassType_lfno_unannClassOrInterfaceType().getText();
//unannClassOrInterfaceTypeContext.unannInterfaceType_lfno_unannClassOrInterfaceType().unannClassType_lfno_unannClassOrInterfaceType().typeArguments();
//UnannClassType_lfno_unannClassOrInterfaceTypeContext
unannClassOrInterfaceTypeContext.unannClassType_lf_unannClassOrInterfaceType(0);
unannClassOrInterfaceTypeContext.unannClassType_lf_unannClassOrInterfaceType(0).getText();
unannClassOrInterfaceTypeContext.unannClassType_lf_unannClassOrInterfaceType(1);
unannClassOrInterfaceTypeContext.unannClassType_lf_unannClassOrInterfaceType(1).getText();
unannClassOrInterfaceTypeContext.unannClassType_lfno_unannClassOrInterfaceType().getText();
//unannClassOrInterfaceTypeContext.unannInterfaceType_lf_unannClassOrInterfaceType();
//unannClassOrInterfaceTypeContext.unannInterfaceType_lf_unannClassOrInterfaceType(0).getText();
//unannClassOrInterfaceTypeContext.unannInterfaceType_lf_unannClassOrInterfaceType(1).getText();
//unannClassOrInterfaceTypeContext.unannInterfaceType_lfno_unannClassOrInterfaceType().getText();
*/
int lastElement = new ArrayList<>(unannClassOrInterfaceTypeContext.unannClassType_lf_unannClassOrInterfaceType()).size()-1;
if (lastElement >=0) {//qualifizierter Name z.B.: java.util.Vector
arguments = unannClassOrInterfaceTypeContext.unannClassType_lf_unannClassOrInterfaceType(lastElement).typeArguments();
}
else { //unqualifizierter Name z.B.: Vector
arguments = unannClassOrInterfaceTypeContext.unannClassType_lfno_unannClassOrInterfaceType().typeArguments();
}
}
return convertTypeName(name, arguments, unannClassOrInterfaceTypeContext.getStart(), reg, generics);
}
@@ -160,18 +132,6 @@ public class TypeGenerator {
throw new NotImplementedException();
}
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(Java8Parser.WildcardContext wildcardContext, JavaClassRegistry reg, GenericsRegistry generics) {
if(wildcardContext.wildcardBounds() != null){
if(wildcardContext.wildcardBounds().getText().substring(0, 7).equals("extends")){
return new ExtendsWildcardType(convert(wildcardContext.wildcardBounds().referenceType(), reg, generics), wildcardContext.getStart());
}else{
return new SuperWildcardType(convert(wildcardContext.wildcardBounds().referenceType(), reg, generics), wildcardContext.getStart());
}
}else{
throw new NotImplementedException(); //Wildcard ohne Bound
}
}
public static RefTypeOrTPHOrWildcardOrGeneric convertTypeName(String name, Token offset, JavaClassRegistry reg, GenericsRegistry generics){
return convertTypeName(name, null, offset, reg, generics);
@@ -179,17 +139,12 @@ public class TypeGenerator {
public static RefTypeOrTPHOrWildcardOrGeneric convertTypeName(
String name, Java8Parser.TypeArgumentsContext typeArguments, Token offset, JavaClassRegistry reg, GenericsRegistry generics){
if(!reg.contains(name)){ //Dann könnte es ein generischer Type oder ein FunN$$-Type sein
if(!reg.contains(name)){ //Dann könnte es ein Generische Type sein
if(generics.contains(name)){
return new GenericRefType(name, offset);
}else{
Pattern p = Pattern.compile("Fun(\\d+)[$][$]");
Matcher m = p.matcher(name);
if (m.matches()) {//es ist FunN$$-Type
return new RefType(new JavaClassName(name), convert(typeArguments, reg, generics), offset);
} else {
throw new TypeinferenceException("Der Typ "+ name + " ist nicht vorhanden",offset);
}}
}
}
if(typeArguments == null){
List<RefTypeOrTPHOrWildcardOrGeneric> params = new ArrayList<>();
@@ -207,7 +162,7 @@ public class TypeGenerator {
List<RefTypeOrTPHOrWildcardOrGeneric> ret = new ArrayList<>();
for(Java8Parser.TypeArgumentContext arg : typeArguments.typeArgumentList().typeArgument()){
if(arg.wildcard() != null){
ret.add(convert(arg.wildcard(), reg, generics));
throw new NotImplementedException();
}else{
ret.add(convert(arg.referenceType(), reg, generics));
}

@@ -80,9 +80,6 @@ public abstract class AbstractASTWalker implements ASTVisitor{
for(Field f : classOrInterface.getFieldDecl()){
f.accept(this);
}
for(Constructor c : classOrInterface.getConstructors()){
c.accept(this);
}
for(Method m : classOrInterface.getMethods()){
m.accept(this);
}

@@ -1,7 +1,6 @@
package de.dhbwstuttgart.syntaxtree.factory;
import java.io.FileWriter;
import java.io.Writer;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -31,7 +30,7 @@ public class UnifyTypeFactory {
private static ArrayList<PlaceholderType> PLACEHOLDERS = new ArrayList<>();
public static FiniteClosure generateFC(List<ClassOrInterface> fromClasses, Writer logFile) throws ClassNotFoundException {
public static FiniteClosure generateFC(List<ClassOrInterface> fromClasses, FileWriter logFile) throws ClassNotFoundException {
/*
Die transitive Hülle muss funktionieren.
Man darf schreiben List<A> extends AL<A>
@@ -68,6 +67,9 @@ public class UnifyTypeFactory {
public static UnifyType convert(RefTypeOrTPHOrWildcardOrGeneric t, Boolean innerType){
if(t instanceof GenericRefType){
return UnifyTypeFactory.convert((GenericRefType)t, innerType);
}else
if(t instanceof FunN){
return UnifyTypeFactory.convert((FunN)t, innerType);
}else if(t instanceof TypePlaceholder){
return UnifyTypeFactory.convert((TypePlaceholder)t, innerType);
}else if(t instanceof ExtendsWildcardType){
@@ -83,13 +85,13 @@ public class UnifyTypeFactory {
public static UnifyType convert(RefType t, Boolean innerType){
//Check if it is a FunN Type:
Pattern p = Pattern.compile("Fun(\\d+)[$][$]");
Pattern p = Pattern.compile("Fun(\\d+)");
Matcher m = p.matcher(t.getName().toString());
boolean b = m.matches();
if(b){
Integer N = Integer.valueOf(m.group(1));
if((N + 1) == t.getParaList().size()){
return convertFunN(t.getParaList(), false);
return convert(new FunN(t.getParaList()), false);
}
}
UnifyType ret;
@@ -105,11 +107,11 @@ public class UnifyTypeFactory {
return ret;
}
public static UnifyType convertFunN(List<RefTypeOrTPHOrWildcardOrGeneric> paraList, Boolean innerType){
public static UnifyType convert(FunN t, Boolean innerType){
UnifyType ret;
List<UnifyType> params = new ArrayList<>();
if(paraList != null && paraList.size() > 0){
for(RefTypeOrTPHOrWildcardOrGeneric pT : paraList){
if(t.getParaList() != null && t.getParaList().size() > 0){
for(RefTypeOrTPHOrWildcardOrGeneric pT : t.getParaList()){
params.add(UnifyTypeFactory.convert(pT, false));
}
}
@@ -233,7 +235,7 @@ public class UnifyTypeFactory {
}
public static RefTypeOrTPHOrWildcardOrGeneric convert(FunNType t, Map<String,TypePlaceholder> tphs) {
RefType ret = new RefType(new JavaClassName(t.getName()), convert(t.getTypeParams(), tphs), new NullToken());
RefType ret = new RefType(new JavaClassName(t.getName()+"$$"), convert(t.getTypeParams(), tphs), new NullToken());
return ret;
}

@@ -0,0 +1,41 @@
package de.dhbwstuttgart.syntaxtree.type;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import java.util.List;
/**
* @see Spezifikation "Complete Typeinference in Java 8" von Martin Plümicke
* "interface FunN<R,T1, T2, ... ,TN> { R apply(T1 arg1, T2 arg2, ... , TN argN); }"
* @author A10023 - Andreas Stadelmeier
*
* Bemerkung:
* FunN ist ein RefType. Der RefType ist nicht mit einem FunNInterface verbunden.
*
*/
public class FunN extends RefType {
/**
* @author Andreas Stadelmeier, a10023
* Benötigt für den Typinferenzalgorithmus für Java 8
* Generiert einen RefType auf eine FunN<R,T1,...,TN> - Klasse.
* @param params
* @return
*/
public FunN(List<RefTypeOrTPHOrWildcardOrGeneric> params) {
super(new JavaClassName("Fun"+(params.size()-1)), params, new NullToken());
}
/**
* Spezieller Konstruktor um eine FunN ohne Returntype zu generieren
protected FunN(List<? extends Type> list){
super("",0);
if(list==null)throw new NullPointerException();
setT(list);
this.name = new JavaClassName("Fun"+list.size());//getName();
}
*/
}

@@ -21,7 +21,7 @@ import java.util.Optional;
public class FunNClass extends ClassOrInterface {
public FunNClass(List<GenericRefType> funNParams) {
super(0, new JavaClassName("Fun"+(funNParams.size()-1)), new ArrayList<>(), Optional.empty() /* eingefuegt PL 2018-11-24 */,
super(0, new JavaClassName("Fun"+(funNParams.size())), new ArrayList<>(), Optional.empty() /* eingefuegt PL 2018-11-24 */,
createMethods(funNParams), new ArrayList<>(), createGenerics(funNParams),
ASTFactory.createObjectType(), true, new ArrayList<>(), new NullToken());

@@ -1,10 +1,10 @@
package de.dhbwstuttgart.typeinference.assumptions;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.GenericTypeVar;
import de.dhbwstuttgart.syntaxtree.TypeScope;
import de.dhbwstuttgart.syntaxtree.type.FunN;
import de.dhbwstuttgart.syntaxtree.type.GenericRefType;
import de.dhbwstuttgart.syntaxtree.type.RefType;
import de.dhbwstuttgart.syntaxtree.type.RefTypeOrTPHOrWildcardOrGeneric;
@@ -63,7 +63,7 @@ public class MethodAssumption extends Assumption{
}
RefTypeOrTPHOrWildcardOrGeneric receiverType;
if(receiver instanceof FunNClass){
receiverType = new RefType(new JavaClassName(receiver.getClassName().toString()+"$$"), params, new NullToken()); // new FunN(params);
receiverType = new FunN(params);
}else{
receiverType = new RefType(receiver.getClassName(), params, new NullToken());
}

@@ -65,9 +65,7 @@ public class TYPEStmt implements StatementVisitor{
//lambdaParams.add(0,tphRetType);
constraintsSet.addUndConstraint(
new Pair(lambdaExpression.getType(),
new RefType(new JavaClassName("Fun"+(lambdaParams.size()-1)+"$$"), lambdaParams, new NullToken()),
//new FunN(lambdaParams),
PairOperator.EQUALSDOT));
new FunN(lambdaParams),PairOperator.EQUALSDOT));
constraintsSet.addUndConstraint(
new Pair(lambdaExpression.getReturnType(),
tphRetType,PairOperator.EQUALSDOT));

@@ -0,0 +1,132 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.Vector;
import com.google.common.collect.Ordering;
public class PartialOrderSet<E, F extends Ordering<E>> implements Set<E> {
//HashSet<E> hs = new HashSet<>();
List<E> ve = new LinkedList<>();//new Vector<>();
F ordering;
PartialOrderSet(F ordering) {
this.ordering= ordering;
}
PartialOrderSet(F ordering, Set<E> s) {
this.ordering= ordering;
this.addAll(s);
}
PartialOrderSet(PartialOrderSet<E, F> pos) {
this.ordering= pos.ordering;
this.ve = new LinkedList<>(pos.ve);
}
@Override
public int size() {
return ve.size();
}
@Override
public boolean isEmpty() {
return ve.isEmpty();
}
@Override
public boolean contains(Object o) {
return ve.contains(o);
}
@Override
public Iterator<E> iterator() {
return ve.iterator();
}
@Override
public Object[] toArray() {
return ve.toArray();
}
@Override
public <T> T[] toArray(T[] a) {
return ve.toArray(a);
}
public ArrayList<E> toArrayList() {
return new ArrayList<>(ve);
}
public List<E> toList() {
return new LinkedList<>(ve);
}
@Override
public boolean add(E e) {
if (this.contains(e)) {
return false;
}
//hs.add(e);
for(int i = 0; i< ve.size(); i++) {
if (ordering.compare(e, ve.get(i)) == -1) {
ve.add(i,e);
return true;
}
}
ve.add(e);
return true;
}
@Override
public boolean remove(Object o) {
//hs.remove(o);
return ve.remove(o);
}
@Override
public boolean containsAll(Collection<?> c) {
return ve.containsAll(c);
}
@Override
public boolean addAll(Collection<? extends E> c) {
Boolean ret = false;
Iterator<? extends E> cit = c.iterator();
while(cit.hasNext()) {
Boolean retnew = this.add(cit.next());
ret = ret || retnew;
}
return ret;
}
@Override
public boolean retainAll(Collection<?> c) {
//hs.retainAll(c);
return ve.retainAll(c);
}
@Override
public boolean removeAll(Collection<?> c) {
//hs.removeAll(c);
return ve.removeAll(c);
}
@Override
public void clear() {
//hs.clear();
ve.clear();
}
@Override
public String toString() {
return ve.toString();
}
}

@@ -4,12 +4,10 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.Stack;
import java.util.function.Function;
import java.util.stream.Collectors;
import de.dhbwstuttgart.exceptions.DebugException;
@@ -30,7 +28,6 @@ import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
/**
* Implementation of the type inference rules.
@@ -39,13 +36,13 @@ import java.io.Writer;
*/
public class RuleSet implements IRuleSet{
Writer logFile;
FileWriter logFile;
public RuleSet() {
super();
}
RuleSet(Writer logFile) {
RuleSet(FileWriter logFile) {
this.logFile = logFile;
}
@@ -622,12 +619,8 @@ public class RuleSet implements IRuleSet{
return succ ? permutation : new int[0];
}
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs) {
return subst(pairs, new ArrayList<>());
}
@Override
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs, List<Set<Set<UnifyPair>>> oderConstraints) {
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs) {
HashMap<UnifyType, Integer> typeMap = new HashMap<>();
Stack<UnifyType> occuringTypes = new Stack<>();
@@ -667,35 +660,13 @@ public class RuleSet implements IRuleSet{
if(lhsType != null
//&& !((rhsType = pair.getRhsType()) instanceof PlaceholderType) //PL geloescht am 2017-09-29 Begründung: auch Typvariablen muessen ersetzt werden.
&& typeMap.get(lhsType) > 1 // The type occurs in more pairs in the set than just the recent pair.
&& !rhsType.getTypeParams().occurs(lhsType)
&& !((rhsType instanceof WildcardType) && ((WildcardType)rhsType).getWildcardedType().equals(lhsType))) //PL eigefuegt 2018-02-18
{
&& !rhsType.getTypeParams().occurs(lhsType)) {
Unifier uni = new Unifier(lhsType, rhsType);
result = result.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(ArrayList::new));
result1 = result1.stream().map(x -> uni.apply(pair,x)).collect(Collectors.toCollection(LinkedList::new));
Function<? super Set<UnifyPair>,? extends HashSet<UnifyPair>> applyUni = b -> b.stream().map(
x -> uni.apply(pair,x)).collect(Collectors.toCollection(HashSet::new));
List<Set<Set<UnifyPair>>> oderConstraintsRet = new ArrayList<>();
for(Set<Set<UnifyPair>> oc : oderConstraints) {
//Set<Set<UnifyPair>> ocRet = new HashSet<>();
//for(Set<UnifyPair> cs : oc) {
Set<Set<UnifyPair>> csRet = oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new));
oderConstraintsRet.add(csRet);
//}
}
oderConstraints.replaceAll(oc -> oc.stream().map(applyUni).collect(Collectors.toCollection(HashSet::new)));
/*
oderConstraints = oderConstraints.stream().map(
a -> a.stream().map(applyUni
//b -> b.stream().map(
// x -> uni.apply(pair,x)).collect(Collectors.toCollection(HashSet::new) )
).collect(Collectors.toCollection(HashSet::new))
).collect(Collectors.toList(ArrayList::new));
}
*/
applied = true;
}
result.add(pair);
}
@@ -974,8 +945,6 @@ public class RuleSet implements IRuleSet{
UnifyType extendedType = ((ExtendsType)lhsType).getExtendedType();
if (extendedType.equals(rhsType)) return Optional.empty(); //PL 2019-02-18 eingefügt ? extends a <.? a
boolean isGen = extendedType instanceof PlaceholderType && !((PlaceholderType) extendedType).isGenerated();
Set<UnifyPair> result = new HashSet<>();
@@ -1002,8 +971,6 @@ public class RuleSet implements IRuleSet{
UnifyType superedType = ((SuperType)lhsType).getSuperedType();
if (superedType.equals(rhsType)) return Optional.empty(); //PL 2019-02-18 eingefügt ? super a <.? a
boolean isGen = superedType instanceof PlaceholderType && !((PlaceholderType) superedType).isGenerated();
Set<UnifyPair> result = new HashSet<>();

@@ -1,87 +1,47 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import java.util.stream.Collectors;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify {
/**
* unify parallel ohne result modell
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons) {
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, new UnifyResultModel(), cons);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return res;
}
/**
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @param ret
* @return
*/
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons, UnifyResultModel ret) {
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, ret, cons);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
return ret;
}
/**
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @param ret
* @return
*/
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons, UnifyResultModel ret) {
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, true, logFile, log, 0, ret, cons);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return ret;
}
@@ -93,26 +53,11 @@ public class TypeUnify {
}
*/
/**
* unify sequentiell mit oderconstraints
* @param undConstrains
* @param oderConstraints
* @param fc
* @param logFile
* @param log
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret);
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, FileWriter logFile, Boolean log, ConstraintSet<Pair> cons) {
List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraintsPartial =
oderConstraints.stream().map(x -> new PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>(new OrderingUnifyPair(fc),x)).collect(Collectors.toList());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraintsPartial, fc, false, logFile, log, 0, new UnifyResultModel(), cons);
Set<Set<UnifyPair>> res = unifyTask.compute();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("no log-File");
}
return res;
}

@@ -1,8 +1,6 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -11,14 +9,15 @@ import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify2Task extends TypeUnifyTask {
Set<Set<UnifyPair>> setToFlatten;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm);
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<PartialOrderSet<Set<UnifyPair>,OrderingUnifyPair>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, FileWriter logFile, Boolean log, int rekTiefe, UnifyResultModel urm, ConstraintSet<Pair> cons) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, cons);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
}
@@ -33,23 +32,12 @@ public class TypeUnify2Task extends TypeUnifyTask {
System.out.println("two");
}
one = true;
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, true);
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField);
/*if (isUndefinedPairSetSet(res)) {
return new HashSet<>(); }
else
*/
noOfThread--;
return res;
}
public void closeLogFile() {
try {
logFile.close();
}
catch (IOException ioE) {
System.err.println("no log-File" + thNo);
}
return res;
}
}

File diff suppressed because it is too large Load Diff

@@ -1,31 +1,13 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class UnifyResultModel {
ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons;
IFiniteClosure fc;
public UnifyResultModel(ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons,
IFiniteClosure fc) {
this.cons = cons;
this.fc = fc;
}
private List<UnifyResultListener> listeners = new ArrayList<>();
public void addUnifyResultListener(UnifyResultListener listenerToAdd) {
@@ -36,20 +18,7 @@ public class UnifyResultModel {
listeners.remove(listenerToRemove);
}
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
return y; //alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
}
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons))))
.collect(Collectors.toList());
public void notify(List<ResultSet> newResult) {
UnifyResultEvent evt = new UnifyResultEvent(newResult);
for (UnifyResultListener listener : listeners) {

@@ -1,6 +1,5 @@
package de.dhbwstuttgart.typeinference.unify.interfaces;
import java.util.List;
import java.util.Optional;
import java.util.Set;
@@ -86,13 +85,6 @@ public interface IRuleSet {
public Optional<UnifyPair> adaptExt(UnifyPair pair, IFiniteClosure fc);
public Optional<UnifyPair> adaptSup(UnifyPair pair, IFiniteClosure fc);
/**
* Applies the subst-Rule to a set of pairs (usually Eq').
* @param pairs The set of pairs where the subst rule should apply.
* @return An optional of the modified set, if there were any substitutions. An empty optional if there were no substitutions.
*/
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs, List<Set<Set<UnifyPair>>> oderConstraints);
/**
* Applies the subst-Rule to a set of pairs (usually Eq').
* @param pairs The set of pairs where the subst rule should apply.

@@ -2,7 +2,6 @@ package de.dhbwstuttgart.typeinference.unify.model;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -13,7 +12,6 @@ import java.util.Optional;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.BinaryOperator;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import com.google.common.collect.Ordering;
@@ -34,7 +32,7 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
public class FiniteClosure //extends Ordering<UnifyType> //entfernt PL 2018-12-11
implements IFiniteClosure {
Writer logFile;
FileWriter logFile;
static Boolean log = false;
public void setLogTrue() {
log = true;
@@ -68,7 +66,7 @@ implements IFiniteClosure {
/**
* Creates a new instance using the inheritance tree defined in the pairs.
*/
public FiniteClosure(Set<UnifyPair> pairs, Writer logFile) {
public FiniteClosure(Set<UnifyPair> pairs, FileWriter logFile) {
this.logFile = logFile;
this.pairs = new HashSet<>(pairs);
inheritanceGraph = new HashMap<UnifyType, Node<UnifyType>>();
@@ -658,16 +656,11 @@ implements IFiniteClosure {
if (left.getName().equals("Matrix") || right.getName().equals("Matrix"))
System.out.println("");
/*
pairop = PairOperator.SMALLERDOTWC;
List<UnifyType> al = new ArrayList<>();
PlaceholderType xx =new PlaceholderType("xx");
al.add(xx);
left = new ExtendsType(new ReferenceType("Vector", new TypeParams(al)));
List<UnifyType> alr = new ArrayList<>();
UnifyType exx = new ExtendsType(xx);
alr.add(exx);
right = new ExtendsType(new ReferenceType("Vector", new TypeParams(alr)));
right = new ReferenceType("Vector", new TypeParams(new ArrayList<>(al)));
*/
/*
List<UnifyType> al = new ArrayList<>();
@@ -718,13 +711,8 @@ implements IFiniteClosure {
}
catch (IOException e) {
System.err.println("no LogFile");}}
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
Predicate<UnifyPair> delFun = x -> !((x.getLhsType() instanceof PlaceholderType ||
x.getRhsType() instanceof PlaceholderType)
&& !((x.getLhsType() instanceof WildcardType) && //? extends/super a <.? a
((WildcardType)x.getLhsType()).getWildcardedType().equals(x.getRhsType()))
);
long smallerLen = smallerRes.stream().filter(delFun).count();
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
long smallerLen = smallerRes.stream().filter(x -> !(x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)).count();
if (smallerLen == 0) return -1;
else {
up = new UnifyPair(right, left, pairop);
@@ -740,7 +728,7 @@ implements IFiniteClosure {
catch (IOException e) {
System.err.println("no LogFile");}}
//Gleichungen der Form a <./=. Theta oder Theta <./=. a oder a <./=. b sind ok.
long greaterLen = greaterRes.stream().filter(delFun).count();
long greaterLen = greaterRes.stream().filter(x -> !(x.getLhsType() instanceof PlaceholderType || x.getRhsType() instanceof PlaceholderType)).count();
if (greaterLen == 0) return 1;
else return 0;
}

@@ -22,7 +22,7 @@ public class FunNType extends UnifyType {
* Creates a FunN-Type with the specified TypeParameters.
*/
protected FunNType(TypeParams p) {
super("Fun"+(p.size()-1)+"$$", p);
super("Fun"+(p.size()-1), p);
}
/**

@@ -1,11 +1,9 @@
package de.dhbwstuttgart.typeinference.unify.model;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.BinaryOperator;
@@ -32,25 +30,14 @@ public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
* in dem compare(Theta, Theta') aufgerufen wird.
*/
public int compareEq (UnifyPair left, UnifyPair right) {
try {
//if (left.getRhsType() instanceof WildcardType || right.getRhsType() instanceof WildcardType) {//PL 2019-01-12 ausgetauscht
if (((PlaceholderType)left.getLhsType()).isInnerType() && ((PlaceholderType)right.getLhsType()).isInnerType()) {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOTWC);
}
else {
return fc.compare(left.getRhsType(), right.getRhsType(), PairOperator.SMALLERDOT);
}}
catch (ClassCastException e) {
try {
((FiniteClosure)fc).logFile.write("ClassCastException: " + left.toString() +"\n\n");
((FiniteClosure)fc).logFile.flush();
}
catch (IOException ie) {
}
return -99;
}
}
/*
public int compareEq (UnifyPair left, UnifyPair right) {
if (left == null || right == null)
@@ -128,43 +115,6 @@ public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
* @see com.google.common.collect.Ordering#compare(java.lang.Object, java.lang.Object)
*/
public int compare (Set<UnifyPair> left, Set<UnifyPair> right) {
/*
//pairop = PairOperator.SMALLERDOTWC;
List<UnifyType> al = new ArrayList<>();
PlaceholderType xx = PlaceholderType.freshPlaceholder();
al.add(xx);
UnifyType t1 = new ExtendsType(new ReferenceType("Vector", new TypeParams(al)));
//PlaceholderType yy =new PlaceholderType("yy");
List<UnifyType> alr = new ArrayList<>();
UnifyType exx = new ExtendsType(xx);
alr.add(exx);
UnifyType t2 = new ExtendsType(new ReferenceType("Vector", new TypeParams(alr)));
PlaceholderType a = PlaceholderType.freshPlaceholder();
a.setInnerType(true);
UnifyPair p1 = new UnifyPair(a, t1, PairOperator.SMALLERDOTWC);
PlaceholderType b = PlaceholderType.freshPlaceholder();
b.setInnerType(true);
UnifyPair p2 = new UnifyPair(b, t2, PairOperator.SMALLERDOTWC);
List<UnifyType> al3 = new ArrayList<>();
al3.add(a);
List<UnifyType> al4 = new ArrayList<>();
al4.add(b);
UnifyPair p3 = new UnifyPair(new PlaceholderType("c"), new ReferenceType("Vector", new TypeParams(al3)), PairOperator.EQUALSDOT);
UnifyPair p4 = new UnifyPair(new PlaceholderType("c"), new ReferenceType("Vector", new TypeParams(al4)), PairOperator.EQUALSDOT);
right = new HashSet<>();
right.add(p1);
right.add(p3);
left = new HashSet<>();
left.add(p2);
left.add(p4);
*/
if ((left.size() == 1) && right.size() == 1) {
if (left.iterator().next().getLhsType().getName().equals("AFS")) {
System.out.println("");
@@ -248,9 +198,9 @@ public class OrderingUnifyPair extends Ordering<Set<UnifyPair>> {
//Set<PlaceholderType> varsleft = lefteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
//Set<PlaceholderType> varsright = righteq.stream().map(x -> (PlaceholderType)x.getLhsType()).collect(Collectors.toCollection(HashSet::new));
//filtern des Paares a = Theta, das durch a <. Thata' generiert wurde (nur im Fall 1 relevant) andere Substitutioen werden rausgefiltert
lefteq.removeIf(x -> (x.getBasePair()!=null) && !(x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
lefteq.removeIf(x -> !(x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
||x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName())));//removeIf(x -> !varsright.contains(x.getLhsType()));
righteq.removeIf(x -> (x.getBasePair()!=null) && !(x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
righteq.removeIf(x -> !(x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
||x.getLhsType().getName().equals(x.getBasePair().getRhsType().getName())));//.removeIf(x -> !varsleft.contains(x.getLhsType()));
UnifyPair lseq = lefteq.iterator().next();
UnifyPair rseq = righteq.iterator().next();

@@ -177,20 +177,6 @@ public class UnifyPair {
}
}
/**
* wenn in einem Paar bestehend aus 2 Typvariablen eine nicht wildcardtable ist,
* so beide auf nicht wildcardtable setzen
*/
public void disableCondWildcards() {
if (lhs instanceof PlaceholderType && rhs instanceof PlaceholderType
&& (!((PlaceholderType)lhs).isWildcardable() || !((PlaceholderType)rhs).isWildcardable()))
{
((PlaceholderType)lhs).disableWildcardtable();
((PlaceholderType)rhs).disableWildcardtable();
}
}
public Boolean wrongWildcard() {
return lhs.wrongWildcard() || rhs.wrongWildcard();
}
@@ -233,12 +219,10 @@ public class UnifyPair {
public String toString() {
String ret = "";
if (lhs instanceof PlaceholderType) {
ret = new Integer(((PlaceholderType)lhs).getVariance()).toString() + " " + ((PlaceholderType)lhs).isInnerType()
+ " " + ((PlaceholderType)lhs).isWildcardable();
ret = new Integer(((PlaceholderType)lhs).getVariance()).toString() + " " + ((PlaceholderType)lhs).isInnerType();
}
if (rhs instanceof PlaceholderType) {
ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString() + " " + ((PlaceholderType)rhs).isInnerType()
+ " " + ((PlaceholderType)rhs).isWildcardable();
ret = ret + ", " + new Integer(((PlaceholderType)rhs).getVariance()).toString() + " " + ((PlaceholderType)rhs).isInnerType();
}
return "(" + lhs + " " + pairOp + " " + rhs + ", " + ret + ")"; //+ ", [" + getfBounded().toString()+ "])";
}

@@ -3,7 +3,7 @@ package bytecode;
import static org.junit.Assert.*;
import java.io.File;
import java.lang.reflect.Method;
import java.lang.reflect.Field;
import java.net.URL;
import java.net.URLClassLoader;
@@ -12,8 +12,8 @@ import org.junit.Test;
import de.dhbwstuttgart.core.JavaTXCompiler;
public class KompTphTest {
public class FieldTph {
private static String path;
private static File fileToTest;
private static JavaTXCompiler compiler;
@@ -22,16 +22,22 @@ public class KompTphTest {
private static String pathToClassFile;
private static Object instanceOfClass;
@Test
public void generateBC() throws Exception {
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/KompTph.jav";
@BeforeClass
public static void setUpBeforeClass() throws Exception {
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/FieldTph.jav";
fileToTest = new File(path);
compiler = new JavaTXCompiler(fileToTest);
compiler.generateBytecode(System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/");
pathToClassFile = System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/";
compiler.generateBytecode(pathToClassFile);
loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
classToTest = loader.loadClass("KompTph");
classToTest = loader.loadClass("FieldTph");
instanceOfClass = classToTest.getDeclaredConstructor().newInstance();
}
@Test
public void test() {
Field[] fields = classToTest.getFields();
assertEquals(1, fields.length);
}
}

@@ -32,11 +32,11 @@ public class FieldTphConsMethTest {
pathToClassFile = System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/";
loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
classToTest = loader.loadClass("FieldTphConsMeth");
instanceOfClass = classToTest.getDeclaredConstructor().newInstance();
}
@Test
public void test() throws Exception {
instanceOfClass = classToTest.getConstructor(Object.class).newInstance("C");
Field a = classToTest.getDeclaredField("a");
a.setAccessible(true);
@@ -44,7 +44,6 @@ public class FieldTphConsMethTest {
Object result = m.invoke(instanceOfClass, 42);
assertEquals(42,result);
assertEquals("C", a.get(instanceOfClass));
}
}

@@ -1,66 +0,0 @@
package bytecode;
import static org.junit.Assert.*;
import java.io.File;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import org.junit.BeforeClass;
import org.junit.Test;
import de.dhbwstuttgart.core.JavaTXCompiler;
public class FieldTphMMethTest {
private static String path;
private static File fileToTest;
private static JavaTXCompiler compiler;
private static ClassLoader loader;
private static Class<?> classToTest;
private static String pathToClassFile;
private static Object instanceOfClass;
private static Object instanceOfClass2;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/FieldTphMMeth.jav";
fileToTest = new File(path);
compiler = new JavaTXCompiler(fileToTest);
compiler.generateBytecode(System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/");
pathToClassFile = System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/";
loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
classToTest = loader.loadClass("FieldTphMMeth");
instanceOfClass = classToTest.getConstructor(Object.class,Object.class,Boolean.class).newInstance("C",42,true);
instanceOfClass2 = classToTest.getConstructor(Object.class,Object.class,Boolean.class).newInstance("C",42,false);
}
@Test
public void testM() throws Exception {
Method m = classToTest.getDeclaredMethod("m", Object.class,Object.class,Boolean.class);
Object result = m.invoke(instanceOfClass, "C",42,false);
assertEquals(42,result);
}
@Test
public void testWithTrue() throws Exception {
Field a = classToTest.getDeclaredField("a");
a.setAccessible(true);
assertEquals("C", a.get(instanceOfClass));
}
@Test
public void testWithFalse() throws Exception {
Field a = classToTest.getDeclaredField("a");
a.setAccessible(true);
assertEquals(42, a.get(instanceOfClass2));
}
}

@@ -4,7 +4,6 @@ import static org.junit.Assert.*;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
@@ -28,7 +27,7 @@ public class MatrixOpTest {
private static Object instanceOfClass_m3;
@Test
public void test() throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, ClassNotFoundException, IOException, InstantiationException, NoSuchFieldException {
public void test() throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, ClassNotFoundException, IOException, InstantiationException {
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/MatrixOP.jav";
fileToTest = new File(path);
compiler = new JavaTXCompiler(fileToTest);
@@ -36,7 +35,7 @@ public class MatrixOpTest {
compiler.generateBytecode(pathToClassFile);
loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
classToTest = loader.loadClass("MatrixOP");
/*
Vector<Vector<Integer>> vv = new Vector<Vector<Integer>>();
Vector<Integer> v1 = new Vector<Integer> ();
v1.addElement(2);
@@ -49,7 +48,6 @@ public class MatrixOpTest {
//m1.addElement(v2);
vv.addElement(v1);
vv.addElement(v2);
instanceOfClass_m1 = classToTest.getDeclaredConstructor(Vector.class).newInstance(vv); //Matrix m1 = new Matrix(vv);
Vector<Vector<Integer>> vv1 = new Vector<Vector<Integer>>();
@@ -64,23 +62,13 @@ public class MatrixOpTest {
//m2.addElement(v4);
vv1.addElement(v3);
vv1.addElement(v4);
instanceOfClass_m2 = classToTest.getDeclaredConstructor(Vector.class).newInstance(vv1);//Matrix m2 = new Matrix(vv1);
//Matrix m3 = m1.mul(vv1);
// Method mul = classToTest.getDeclaredMethod("mul", Vector.class);
// Object result = mul.invoke(instanceOfClass_m1, instanceOfClass_m2);
Field mul = classToTest.getField("mul");
mul.setAccessible(true);
Class<?> lambda = mul.get(instanceOfClass_m1).getClass();
Method apply = lambda.getMethod("apply", Object.class,Object.class);
// Damit man auf die Methode zugreifen kann
apply.setAccessible(true);
Object result = apply.invoke(mul.get(instanceOfClass_m1),instanceOfClass_m1, instanceOfClass_m2);
Method mul = classToTest.getDeclaredMethod("mul", Vector.class);
Object result = mul.invoke(instanceOfClass_m1, instanceOfClass_m2);
System.out.println(instanceOfClass_m1.toString() + " * " + instanceOfClass_m2.toString() + " = " + result.toString());
Vector<Vector<Integer>> res = new Vector<Vector<Integer>>();
@@ -97,7 +85,7 @@ public class MatrixOpTest {
res.addElement(v6);
instanceOfClass_m3 = classToTest.getDeclaredConstructor(Vector.class).newInstance(res);
assertEquals(result, instanceOfClass_m3);
*/
}
}

0
src/test/java/bytecode/MatrixTest.java Executable file → Normal file

@@ -6,13 +6,14 @@ import java.io.File;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Vector;
import org.junit.BeforeClass;
import org.junit.Test;
import de.dhbwstuttgart.core.JavaTXCompiler;
public class InfTest {
public class VectorSuperTest {
private static String path;
private static File fileToTest;
@@ -22,16 +23,23 @@ public class InfTest {
private static String pathToClassFile;
private static Object instanceOfClass;
@Test
public void generateBC() throws Exception {
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/Inf.jav";
@BeforeClass
public static void setUpBeforeClass() throws Exception {
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/VectorSuper.jav";
fileToTest = new File(path);
compiler = new JavaTXCompiler(fileToTest);
pathToClassFile = System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/";
compiler.generateBytecode(pathToClassFile);
loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
classToTest = loader.loadClass("Inf");
classToTest = loader.loadClass("VectorSuper");
instanceOfClass = classToTest.getDeclaredConstructor().newInstance();
}
@Test
public void test1() throws Exception {
Method m = classToTest.getDeclaredMethod("m", Vector.class);
//Object result = m.invoke(instanceOfClass, 1);
//assertEquals(1,result);
}
}

@@ -1,64 +0,0 @@
package bytecode.simplifyalgo;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import org.junit.BeforeClass;
import org.junit.Test;
import org.objectweb.asm.Type;
import de.dhbwstuttgart.bytecode.TPHExtractor;
import de.dhbwstuttgart.bytecode.constraint.ExtendsConstraint;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint;
import de.dhbwstuttgart.bytecode.constraint.TPHConstraint.Relation;
import de.dhbwstuttgart.bytecode.utilities.ConstraintsFinder;
import de.dhbwstuttgart.bytecode.utilities.MethodAndTPH;
import de.dhbwstuttgart.bytecode.utilities.Simplify;
import de.dhbwstuttgart.typedeployment.TypeInsertPlacer;
/**
*
* @author Fayez Abu Alia
*
*/
public class FinderTest {
@Test
public void testM1() {
List<TPHConstraint> allCons = new ArrayList<>();
// L < M
TPHConstraint c1 = new ExtendsConstraint("L", "M", Relation.EXTENDS);
// L < N
TPHConstraint c2 = new ExtendsConstraint("L", "N", Relation.EXTENDS);
// M < O
TPHConstraint c3 = new ExtendsConstraint("M", "O", Relation.EXTENDS);
// M < P
TPHConstraint c4 = new ExtendsConstraint("M", "P", Relation.EXTENDS);
allCons.add(c1);
allCons.add(c2);
allCons.add(c3);
allCons.add(c4);
List<List<TPHConstraint>> res = new ArrayList<>();
List<TPHConstraint> l1 = new ArrayList<>();
List<TPHConstraint> l2 = new ArrayList<>();
l1.add(c1);
l1.add(c2);
l2.add(c3);
l2.add(c4);
res.add(l1);
res.add(l2);
ConstraintsFinder finder = new ConstraintsFinder(allCons);
assertEquals(finder.findConstraints(), res);
}
}

5
src/test/java/logFiles/.gitignore vendored Normal file

@@ -0,0 +1,5 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

@@ -1,27 +0,0 @@
import java.lang.Boolean;
public class FieldTphMMeth {
a;
public FieldTphMMeth(c,d,e) {
a = m(c,d,e);
}
m(b,d,e) {
if(e) {
return m3(b);
} else{
return m3(d);
}
}
m2(b) {
a = m3(b);
}
m3(b){
return b;
}
}

@@ -1,6 +1,6 @@
public class Id<FTU extends FTT,FTT> {
public class Id {
id(FTU b){
id(b){
return b;
}
}

@@ -1,19 +0,0 @@
public class Inf {
m(x,y,a){
var z;
var v;
var w;
var b;
y=x;
z=x;
v=y;
w=y;
y=a;
b=a;
}
}
// v w
// \ /
// z y b
// \ / \ /
// x a

@@ -1,13 +0,0 @@
public class KompTph {
public m(a, b, c) {
var d = a;
var e = a;
a = b;
c = b;
m2(a,c);
}
public m2(a,b){
m(a,a,b);
}
}

@@ -4,7 +4,7 @@ import java.lang.Float;
//import java.lang.Byte;
//import java.lang.Boolean;
public class Matrix extends Vector<Vector<Integer>> {
public class Matrix extends Vector<Vector<Integer>> {
Matrix () {
}
@@ -16,10 +16,10 @@ public class Matrix extends Vector<Vector<Integer>> {
// Boolean a = this.add(vv.elementAt(i));
this.add(vv.elementAt(i));
i=i+1;
}
}
}
mul(java.util.Vector<? extends Vector<? extends java.lang.Integer>> m) {
mul(m) {
var ret = new Matrix();
var i = 0;
while(i < size()) {

@@ -1,9 +1,9 @@
import java.util.Vector;
import java.lang.Integer;
import java.lang.Byte;
//import java.lang.Byte;
import java.lang.Boolean;
public class MatrixOP extends Vector<Vector<Integer>> {
public class MatrixOP extends Vector<Vector<Integer>> {
MatrixOP () {
}
@@ -18,10 +18,10 @@ public class MatrixOP extends Vector<Vector<Integer>> {
}
}
Fun2$$<java.util.Vector<? extends java.util.Vector<? extends java.lang.Byte>>, java.util.Vector<? extends java.util.Vector<? extends java.lang.Byte>>, MatrixOP> mul = (m1, m2) -> {
mul = (m1, m2) -> {
var ret = new MatrixOP();
var i = 0;
while(i < m1.size()) {
while(i < size()) {
var v1 = m1.elementAt(i);
var v2 = new Vector<Integer>();
var j = 0;

@@ -14,10 +14,10 @@ sort(in){
return merge(sort(firstHalf), sort(secondHalf));
}
*/
/*
void sort(a){
void sort(ArrayList<String> a){
a = merge(a,a);
}
*/
}

@@ -0,0 +1,11 @@
import java.util.Vector;
import java.lang.Integer;
public class VectorSuper {
m(x){
Integer y = 1;
x.addElement(y);
//return x;
}
}

@@ -1,14 +1,13 @@
import java.lang.Integer;
class Faculty {
//fact;
Integer mul(Integer x, Integer y) {
Integer mul(Integer x, Integer y) {
return x;
}
m () {
var fact = (Integer x) -> {
var fact = (Integer x) -> {
return mul(x, fact.apply(x));
};
return fact;

@@ -1,8 +1,13 @@
class MathStruc <A> {
A model;
innerOp = o -> ms -> new MathStruc<A>(o.apply(this.model, ms.model));
class mathStruc<A> {
MathStruc(A m) { model=m; }
}
mathStruc(A a) { }
A model(){ A a; return a; }
methode(){
var innerOp = o -> ms ->
new mathStruc<A>(o.apply(this.model(),ms.model()));
return innerOp;
}
}