modified: ../../src/de/dhbwstuttgart/core/JavaTXCompiler.java
modified: ../../src/de/dhbwstuttgart/typeinference/unify/TypeUnify.java modified: ../../src/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java compare(Matrix, Vector<gen_ab>, ? extends Vector<? extends Integer>> <.? gen_ab funktioniert nicht
This commit is contained in:
parent
b5b5b5d9c9
commit
0315a1f144
@ -18,11 +18,13 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.typeAlgo.TYPE;
|
||||
import de.dhbwstuttgart.typeinference.unify.RuleSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
@ -94,21 +96,28 @@ public class JavaTXCompiler {
|
||||
|
||||
TypeUnify unify = new TypeUnify();
|
||||
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||
for (List<Constraint<UnifyPair>> xCons : unifyCons.cartesianProduct()) {
|
||||
Set<UnifyPair> xConsSet = new HashSet<>();
|
||||
for (Constraint<UnifyPair> constraint : xCons) {
|
||||
xConsSet.addAll(constraint);
|
||||
}
|
||||
try {
|
||||
FileWriter logFile = new FileWriter(new File(System.getProperty("user.dir")+"/test/logFiles/"+"log"));
|
||||
for (List<Constraint<UnifyPair>> xCons : unifyCons.cartesianProduct()) {
|
||||
Set<UnifyPair> xConsSet = new HashSet<>();
|
||||
for (Constraint<UnifyPair> constraint : xCons) {
|
||||
xConsSet.addAll(constraint);
|
||||
}
|
||||
|
||||
System.out.println(xConsSet);
|
||||
Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure);
|
||||
//Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
System.out.println("RESULT: " + result);
|
||||
results.addAll(result);
|
||||
System.out.println(xConsSet);
|
||||
Set<Set<UnifyPair>> result = unify.unifySequential(xConsSet, finiteClosure, logFile);
|
||||
//Set<Set<UnifyPair>> result = unify.unify(xConsSet, finiteClosure);
|
||||
System.out.println("RESULT: " + result);
|
||||
logFile.write("RES: " + result.toString()+"\n");
|
||||
logFile.flush();
|
||||
results.addAll(result);
|
||||
}
|
||||
}
|
||||
catch (IOException e) { }
|
||||
|
||||
return results.stream().map((unifyPairs ->
|
||||
new ResultSet(UnifyTypeFactory.convert(unifyPairs, generateTPHMap(cons))))).collect(Collectors.toList());
|
||||
}
|
||||
return results.stream().map((unifyPairs ->
|
||||
new ResultSet(UnifyTypeFactory.convert(unifyPairs, generateTPHMap(cons))))).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private Map<String, TypePlaceholder> generateTPHMap(ConstraintSet<Pair> constraints) {
|
||||
HashMap<String, TypePlaceholder> ret = new HashMap<>();
|
||||
|
@ -1,5 +1,6 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.io.FileWriter;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
@ -7,16 +8,16 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class TypeUnify {
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc, true);
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> eq, IFiniteClosure fc, FileWriter logFile) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc, true, logFile);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
return res;
|
||||
}
|
||||
|
||||
public Set<Set<UnifyPair>> unifySequential(Set<UnifyPair> eq, IFiniteClosure fc) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc, false);
|
||||
public Set<Set<UnifyPair>> unifySequential(Set<UnifyPair> eq, IFiniteClosure fc, FileWriter logFile) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc, false, logFile);
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
return res;
|
||||
}
|
||||
|
@ -75,18 +75,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
rules = new RuleSet();
|
||||
}
|
||||
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel) {
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, IFiniteClosure fc, boolean parallel, FileWriter logFile) {
|
||||
this.eq = eq;
|
||||
this.fc = fc;
|
||||
this.oup = new OrderingUnifyPair(fc);
|
||||
this.parallel = parallel;
|
||||
try {
|
||||
logFile = new FileWriter(new File(rootDirectory+"log"));
|
||||
logFile.write("xxx");
|
||||
logFile.flush();
|
||||
rules = new RuleSet(logFile);
|
||||
}
|
||||
catch (IOException e) { }
|
||||
this.logFile = logFile;
|
||||
rules = new RuleSet(logFile);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -105,6 +100,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
* Step 1: Repeated application of reduce, adapt, erase, swap
|
||||
*/
|
||||
writeLog("Unifikation: " + eq.toString());
|
||||
eq = eq.stream().map(x -> {x.setVariance((byte)-1); return x;}).collect(Collectors.toCollection(HashSet::new));
|
||||
Set<UnifyPair> eq0 = applyTypeUnificationRules(eq, fc);
|
||||
|
||||
/*
|
||||
@ -245,13 +241,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
eqPrimePrimeSet.add(eqPrime);
|
||||
else if(eqPrimePrime.isPresent()) {
|
||||
//System.out.println("nextStep: " + eqPrimePrime.get());
|
||||
TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc, true);
|
||||
TypeUnifyTask fork = new TypeUnifyTask(eqPrimePrime.get(), fc, true, logFile);
|
||||
forks.add(fork);
|
||||
fork.fork();
|
||||
}
|
||||
else {
|
||||
//System.out.println("nextStep: " + eqPrime);
|
||||
TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc, true);
|
||||
TypeUnifyTask fork = new TypeUnifyTask(eqPrime, fc, true, logFile);
|
||||
forks.add(fork);
|
||||
fork.fork();
|
||||
}
|
||||
@ -314,6 +310,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
int i = 0;
|
||||
byte variance = nextSetasList.iterator().next().iterator().next().getVariance();
|
||||
Set<UnifyPair> a_next = null;
|
||||
if (nextSetasList.iterator().next().iterator().next().getLhsType().getName().equals("A"))
|
||||
System.out.print("");
|
||||
if (nextSetasList.size()>1) {
|
||||
if (variance == 1) {
|
||||
a_next = oup.max(nextSetasList.iterator());
|
||||
@ -373,6 +371,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
}
|
||||
else { if (variance == -1) {
|
||||
if (a.iterator().next().getLhsType().getName().equals("A"))
|
||||
System.out.print("");
|
||||
if (a.equals(a_next) || (oup.compare(a, a_next) == -1)) {
|
||||
System.out.print("");
|
||||
break;
|
||||
|
Loading…
Reference in New Issue
Block a user