forked from JavaTX/JavaCompilerCore
removed debug variables
This commit is contained in:
parent
701962b0e7
commit
70b28bbfb0
@ -1,6 +1,5 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify;
|
package de.dhbwstuttgart.typeinference.unify;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
@ -20,23 +19,15 @@ public class TypeUnify {
|
|||||||
* @param fc
|
* @param fc
|
||||||
* @param logFile
|
* @param logFile
|
||||||
* @param log
|
* @param log
|
||||||
* @param cons
|
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
||||||
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
||||||
taskModel.setPool(pool);
|
taskModel.setPool(pool);
|
||||||
resultModel.setPool(pool);
|
resultModel.setPool(pool);
|
||||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, resultModel, pool);
|
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool);
|
||||||
pool.invoke(unifyTask);
|
pool.invoke(unifyTask);
|
||||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||||
try {
|
|
||||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
|
|
||||||
logFile.flush();
|
|
||||||
}
|
|
||||||
catch (IOException e) {
|
|
||||||
System.err.println("no log-File");
|
|
||||||
}
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,15 +38,13 @@ public class TypeUnify {
|
|||||||
* @param fc
|
* @param fc
|
||||||
* @param logFile
|
* @param logFile
|
||||||
* @param log
|
* @param log
|
||||||
* @param cons
|
|
||||||
* @param ret
|
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public UnifyResultModelParallel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
public UnifyResultModelParallel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
||||||
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
||||||
taskModel.setPool(pool);
|
taskModel.setPool(pool);
|
||||||
resultModel.setPool(pool);
|
resultModel.setPool(pool);
|
||||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, resultModel, pool);
|
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool);
|
||||||
pool.invoke(unifyTask);
|
pool.invoke(unifyTask);
|
||||||
return resultModel;
|
return resultModel;
|
||||||
}
|
}
|
||||||
@ -67,8 +56,6 @@ public class TypeUnify {
|
|||||||
* @param fc
|
* @param fc
|
||||||
* @param logFile
|
* @param logFile
|
||||||
* @param log
|
* @param log
|
||||||
* @param cons
|
|
||||||
* @param ret
|
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public UnifyResultModelParallel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
public UnifyResultModelParallel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
||||||
@ -76,19 +63,11 @@ public class TypeUnify {
|
|||||||
taskModel.setPool(pool);
|
taskModel.setPool(pool);
|
||||||
resultModel.setPool(pool);
|
resultModel.setPool(pool);
|
||||||
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||||
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, resultModel, pool, statistics);
|
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool, statistics);
|
||||||
|
|
||||||
pool.invoke(unifyTask);
|
pool.invoke(unifyTask);
|
||||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
unifyTask.join();
|
||||||
try {
|
|
||||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
|
||||||
logFile.flush();
|
|
||||||
unifyTask.statisticsFile.write("Backtracking: " + unifyTask.noBacktracking);
|
|
||||||
unifyTask.statisticsFile.write("\nLoops: " + unifyTask.noLoop);
|
|
||||||
}
|
|
||||||
catch (IOException e) {
|
|
||||||
System.err.println("no log-File");
|
|
||||||
}
|
|
||||||
return resultModel;
|
return resultModel;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -107,21 +86,13 @@ public class TypeUnify {
|
|||||||
* @param fc
|
* @param fc
|
||||||
* @param logFile
|
* @param logFile
|
||||||
* @param log
|
* @param log
|
||||||
* @param cons
|
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
|
||||||
resultModel.setPool(ForkJoinPool.commonPool());
|
resultModel.setPool(ForkJoinPool.commonPool());
|
||||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObject(logFile, ForkJoinPool.commonPool()), log, 0, resultModel, ForkJoinPool.commonPool());
|
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObject(logFile, ForkJoinPool.commonPool()), log, resultModel, ForkJoinPool.commonPool());
|
||||||
unifyTask.statisticsFile = statistics;
|
unifyTask.statisticsFile = statistics;
|
||||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||||
try {
|
|
||||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
|
||||||
logFile.flush();
|
|
||||||
}
|
|
||||||
catch (IOException e) {
|
|
||||||
System.err.println("no log-File");
|
|
||||||
}
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,17 +1,11 @@
|
|||||||
package de.dhbwstuttgart.typeinference.unify;
|
package de.dhbwstuttgart.typeinference.unify;
|
||||||
|
|
||||||
import java.io.FileWriter;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ForkJoinPool;
|
import java.util.concurrent.ForkJoinPool;
|
||||||
|
|
||||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
|
||||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
|
|
||||||
@ -24,14 +18,14 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
|||||||
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
|
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
|
||||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||||
Set<UnifyPair> nextSetElement,
|
Set<UnifyPair> nextSetElement,
|
||||||
IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm,
|
IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm,
|
||||||
Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool, Writer statistics) {
|
Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool, Writer statistics) {
|
||||||
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraintUebergabe, pool );
|
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, urm, methodSignatureConstraintUebergabe, pool );
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool) {
|
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool) {
|
||||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, pool);
|
super(eq, oderConstraints, fc, parallel, logFile, log, urm, pool);
|
||||||
this.setToFlatten = setToFlatten;
|
this.setToFlatten = setToFlatten;
|
||||||
this.nextSetElement = nextSetElement;
|
this.nextSetElement = nextSetElement;
|
||||||
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
||||||
@ -43,11 +37,7 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Set<Set<UnifyPair>> compute() {
|
protected Set<Set<UnifyPair>> compute() {
|
||||||
if (one) {
|
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, methodSignatureConstraintUebergabe);
|
||||||
System.out.println("two");
|
|
||||||
}
|
|
||||||
one = true;
|
|
||||||
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
|
|
||||||
/*if (isUndefinedPairSetSet(res)) {
|
/*if (isUndefinedPairSetSet(res)) {
|
||||||
return new HashSet<>(); }
|
return new HashSet<>(); }
|
||||||
else
|
else
|
||||||
|
@ -29,8 +29,6 @@ import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch;
|
|||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
|
||||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
|
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
|
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.OrderingExtend;
|
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
|
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
|
||||||
@ -39,9 +37,6 @@ import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
|||||||
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
|
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
|
|
||||||
import de.dhbwstuttgart.util.Pair;
|
|
||||||
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
|
|
||||||
import org.apache.commons.io.output.NullWriter;
|
import org.apache.commons.io.output.NullWriter;
|
||||||
|
|
||||||
import java.io.FileWriter;
|
import java.io.FileWriter;
|
||||||
@ -69,7 +64,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
protected UnifyResultModelParallel urm;
|
protected UnifyResultModelParallel urm;
|
||||||
private static int totalnoOfThread = 0;
|
private static int totalnoOfThread = 0;
|
||||||
int thNo;
|
int thNo;
|
||||||
protected boolean one = false;
|
|
||||||
|
|
||||||
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
|
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
|
||||||
protected WriterActiveObject logFile;
|
protected WriterActiveObject logFile;
|
||||||
@ -91,24 +85,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
|
|
||||||
protected IFiniteClosure fc;
|
protected IFiniteClosure fc;
|
||||||
|
|
||||||
protected OrderingExtend<Set<UnifyPair>> oup;
|
|
||||||
|
|
||||||
protected boolean parallel;
|
protected boolean parallel;
|
||||||
|
|
||||||
int rekTiefeField;
|
|
||||||
|
|
||||||
Integer nOfUnify = 0;
|
|
||||||
|
|
||||||
Integer noUndefPair = 0;
|
|
||||||
|
|
||||||
Integer noAllErasedElements = 0;
|
|
||||||
|
|
||||||
static int noBacktracking;
|
|
||||||
|
|
||||||
static int noLoop;
|
|
||||||
|
|
||||||
static Integer noShortendElements = 0;
|
|
||||||
|
|
||||||
static Writer statisticsFile = new NullWriter();
|
static Writer statisticsFile = new NullWriter();
|
||||||
|
|
||||||
public TypeUnifyTask() {
|
public TypeUnifyTask() {
|
||||||
@ -130,11 +108,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
//statistics
|
//statistics
|
||||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, ForkJoinPool pool, Writer statisticsFile) {
|
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, ForkJoinPool pool, Writer statisticsFile) {
|
||||||
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, pool);
|
this(eq,oderConstraints, fc, parallel, logFile, log, urm, pool);
|
||||||
this.statisticsFile = statisticsFile;
|
this.statisticsFile = statisticsFile;
|
||||||
}
|
}
|
||||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, ForkJoinPool pool) {
|
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, ForkJoinPool pool) {
|
||||||
this.eq = eq;
|
this.eq = eq;
|
||||||
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
|
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
|
||||||
this.oderConstraintsField = oderConstraints; /*.stream().map(x -> {
|
this.oderConstraintsField = oderConstraints; /*.stream().map(x -> {
|
||||||
@ -148,7 +126,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
|
|
||||||
//x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
|
//x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
|
||||||
this.fc = fc;
|
this.fc = fc;
|
||||||
this.oup = new OrderingUnifyPair(fc);
|
|
||||||
this.parallel = parallel;
|
this.parallel = parallel;
|
||||||
this.logFile = logFile;
|
this.logFile = logFile;
|
||||||
this.log = log;
|
this.log = log;
|
||||||
@ -183,7 +160,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
rules = new RuleSet(logFile);
|
rules = new RuleSet(logFile);
|
||||||
this.rekTiefeField = rekTiefe;
|
|
||||||
this.urm = urm;
|
this.urm = urm;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -228,7 +204,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
ArrayList<Set<Constraint<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream()
|
ArrayList<Set<Constraint<UnifyPair>>> remainingOderconstraints = oderConstraintsField.stream()
|
||||||
.filter(x -> x.size()>1)
|
.filter(x -> x.size()>1)
|
||||||
.collect(Collectors.toCollection(ArrayList::new));
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, new HashSet<>());
|
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, new HashSet<>());
|
||||||
|
|
||||||
if(parallel){
|
if(parallel){
|
||||||
logFile.close();
|
logFile.close();
|
||||||
@ -264,7 +240,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
* @param fc The finite closure
|
* @param fc The finite closure
|
||||||
* @return The set of all principal type unifiers
|
* @return The set of all principal type unifiers
|
||||||
*/
|
*/
|
||||||
protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set<UnifyPair> methodSignatureConstraint) {
|
protected Set<Set<UnifyPair>> unify(final Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Set<UnifyPair> methodSignatureConstraint) {
|
||||||
//Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
|
//Set<UnifyPair> aas = eq.stream().filter(x -> x.getLhsType().getName().equals("AA") //&& x.getPairOp().equals(PairOperator.SMALLERDOT)
|
||||||
// ).collect(Collectors.toCollection(HashSet::new));
|
// ).collect(Collectors.toCollection(HashSet::new));
|
||||||
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
|
//writeLog(nOfUnify.toString() + " AA: " + aas.toString());
|
||||||
@ -274,11 +250,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
|
|
||||||
//.collect(Collectors.toCollection(HashSet::new)));
|
//.collect(Collectors.toCollection(HashSet::new)));
|
||||||
|
|
||||||
rekTiefe++;
|
|
||||||
nOfUnify++;
|
|
||||||
writeLog(nOfUnify + " Unifikation: " + eq.toString());
|
|
||||||
writeLog(nOfUnify.toString() + " Oderconstraints: " + oderConstraints.toString());
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Variancen auf alle Gleichungen vererben
|
* Variancen auf alle Gleichungen vererben
|
||||||
*/
|
*/
|
||||||
@ -330,9 +301,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
|
|
||||||
eq0.forEach(UnifyPair::disableCondWildcards);
|
eq0.forEach(UnifyPair::disableCondWildcards);
|
||||||
|
|
||||||
writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq + "\n"
|
|
||||||
+ nOfUnify + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints);
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
|
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
|
||||||
*/
|
*/
|
||||||
@ -382,15 +350,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
//nicht ausgewertet Faculty Beispiel im 1. Schritt
|
//nicht ausgewertet Faculty Beispiel im 1. Schritt
|
||||||
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
|
//PL 2017-10-03 geloest, muesste noch mit FCs mit kleineren
|
||||||
//Typen getestet werden.
|
//Typen getestet werden.
|
||||||
writeLog(nOfUnify.toString() + " Oderconstraints2: " + oderConstraintsOutput);
|
|
||||||
// If pairs occured that did not match one of the cartesian product cases,
|
// If pairs occured that did not match one of the cartesian product cases,
|
||||||
// those pairs are contradictory and the unification is impossible.
|
// those pairs are contradictory and the unification is impossible.
|
||||||
if(!undefinedPairs.isEmpty()) {
|
if(!undefinedPairs.isEmpty()) {
|
||||||
noUndefPair++;
|
|
||||||
for (UnifyPair up : undefinedPairs) {
|
|
||||||
writeLog(noUndefPair.toString() + " UndefinedPairs; " + up + "\n"
|
|
||||||
+ "BasePair; " + up.getBasePair());
|
|
||||||
}
|
|
||||||
Set<Set<UnifyPair>> error = new HashSet<>();
|
Set<Set<UnifyPair>> error = new HashSet<>();
|
||||||
undefinedPairs = undefinedPairs.stream().peek(UnifyPair::setUndefinedPair).collect(Collectors.toCollection(HashSet::new));
|
undefinedPairs = undefinedPairs.stream().peek(UnifyPair::setUndefinedPair).collect(Collectors.toCollection(HashSet::new));
|
||||||
error.add(undefinedPairs);
|
error.add(undefinedPairs);
|
||||||
@ -433,12 +395,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
|
|
||||||
//Aufruf von computeCartesianRecursive ANFANG
|
//Aufruf von computeCartesianRecursive ANFANG
|
||||||
//writeLog("topLevelSets: " + topLevelSets.toString());
|
//writeLog("topLevelSets: " + topLevelSets.toString());
|
||||||
return computeCartesianRecursive(new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, rekTiefe, methodSignatureConstraint);
|
return computeCartesianRecursive(new ArrayList<>(topLevelSets), eq, oderConstraintsOutput, fc, parallel, methodSignatureConstraint);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set<UnifyPair> methodSignatureConstraint) {
|
Set<Set<UnifyPair>> unify2(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Set<UnifyPair> methodSignatureConstraint) {
|
||||||
//Aufruf von computeCartesianRecursive ENDE
|
//Aufruf von computeCartesianRecursive ENDE
|
||||||
|
|
||||||
//keine Ahnung woher das kommt
|
//keine Ahnung woher das kommt
|
||||||
@ -535,12 +497,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if(eqPrimePrime.isPresent()) {
|
else if(eqPrimePrime.isPresent()) {
|
||||||
Set<Set<UnifyPair>> unifyres = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, rekTiefe, methodSignatureConstraint);
|
Set<Set<UnifyPair>> unifyres = unify(eqPrimePrime.get(), newOderConstraints, fc, parallel, methodSignatureConstraint);
|
||||||
|
|
||||||
eqPrimePrimeSet.addAll(unifyres);
|
eqPrimePrimeSet.addAll(unifyres);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
Set<Set<UnifyPair>> unifyres = unify(eqPrime, newOderConstraints, fc, parallel, rekTiefe, methodSignatureConstraint);
|
Set<Set<UnifyPair>> unifyres = unify(eqPrime, newOderConstraints, fc, parallel, methodSignatureConstraint);
|
||||||
|
|
||||||
|
|
||||||
eqPrimePrimeSet.addAll(unifyres);
|
eqPrimePrimeSet.addAll(unifyres);
|
||||||
@ -579,10 +541,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
* @param oderConstraints Remaining or-constraints
|
* @param oderConstraints Remaining or-constraints
|
||||||
* @param fc The finite closure
|
* @param fc The finite closure
|
||||||
* @param parallel If the algorithm should be parallelized run
|
* @param parallel If the algorithm should be parallelized run
|
||||||
* @param rekTiefe Deep of recursive calls
|
|
||||||
* @return The set of all principal type unifiers
|
* @return The set of all principal type unifiers
|
||||||
*/
|
*/
|
||||||
Set<Set<UnifyPair>> computeCartesianRecursive(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Set<UnifyPair> methodSignatureConstraint) {
|
Set<Set<UnifyPair>> computeCartesianRecursive(ArrayList<Set<? extends Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Set<UnifyPair> methodSignatureConstraint) {
|
||||||
|
|
||||||
//oneElems: Alle 1-elementigen Mengen, die nur ein Paar
|
//oneElems: Alle 1-elementigen Mengen, die nur ein Paar
|
||||||
//a <. theta, theta <. a oder a =. theta enthalten
|
//a <. theta, theta <. a oder a =. theta enthalten
|
||||||
@ -598,7 +559,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
Optional<Set<? extends Set<UnifyPair>>> optNextSet = topLevelSets.stream().filter(x -> x.size()>1).findAny();
|
Optional<Set<? extends Set<UnifyPair>>> optNextSet = topLevelSets.stream().filter(x -> x.size()>1).findAny();
|
||||||
|
|
||||||
if (optNextSet.isEmpty()) {//Alle Elemente sind 1-elementig
|
if (optNextSet.isEmpty()) {//Alle Elemente sind 1-elementig
|
||||||
return unify2(oneElems, eq, oderConstraints, fc, parallel, rekTiefe, methodSignatureConstraint);
|
return unify2(oneElems, eq, oderConstraints, fc, parallel, methodSignatureConstraint);
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<? extends Set<UnifyPair>> nextSet = optNextSet.get();
|
Set<? extends Set<UnifyPair>> nextSet = optNextSet.get();
|
||||||
@ -784,7 +745,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Set<Set<UnifyPair>> elems = new HashSet<>(oneElems);
|
Set<Set<UnifyPair>> elems = new HashSet<>(oneElems);
|
||||||
writeLog("a1: " + rekTiefe + " "+ "variance: "+ variance + " " + a.toString()+ "\n");
|
writeLog("variance: "+ variance + " " + a.toString()+ "\n");
|
||||||
|
|
||||||
//Ergebnisvariable für den aktuelle Thread
|
//Ergebnisvariable für den aktuelle Thread
|
||||||
Set<Set<UnifyPair>> res;
|
Set<Set<UnifyPair>> res;
|
||||||
@ -817,7 +778,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
newElemsOrig.add(a);
|
newElemsOrig.add(a);
|
||||||
|
|
||||||
/* FORK ANFANG */
|
/* FORK ANFANG */
|
||||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraint, this.pool);
|
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, urm, methodSignatureConstraint, this.pool);
|
||||||
//forks.add(forkOrig);
|
//forks.add(forkOrig);
|
||||||
forkOrig.fork();
|
forkOrig.fork();
|
||||||
|
|
||||||
@ -840,7 +801,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||||
newElems.add(nSaL);
|
newElems.add(nSaL);
|
||||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||||
forks.add(fork);
|
forks.add(fork);
|
||||||
fork.fork();
|
fork.fork();
|
||||||
}
|
}
|
||||||
@ -886,7 +847,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
newElemsOrig.add(a);
|
newElemsOrig.add(a);
|
||||||
|
|
||||||
/* FORK ANFANG */
|
/* FORK ANFANG */
|
||||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||||
//forks.add(forkOrig);
|
//forks.add(forkOrig);
|
||||||
forkOrig.fork();
|
forkOrig.fork();
|
||||||
|
|
||||||
@ -908,7 +869,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||||
newElems.add(nSaL);
|
newElems.add(nSaL);
|
||||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||||
forks.add(fork);
|
forks.add(fork);
|
||||||
fork.fork();
|
fork.fork();
|
||||||
}
|
}
|
||||||
@ -955,7 +916,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
newElemsOrig.add(a);
|
newElemsOrig.add(a);
|
||||||
|
|
||||||
/* FORK ANFANG */
|
/* FORK ANFANG */
|
||||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, urm, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||||
//forks.add(forkOrig);
|
//forks.add(forkOrig);
|
||||||
forkOrig.fork();
|
forkOrig.fork();
|
||||||
/* FORK ENDE */
|
/* FORK ENDE */
|
||||||
@ -970,7 +931,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||||
newElems.add(nSaL);
|
newElems.add(nSaL);
|
||||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraint, this.pool);
|
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, urm, methodSignatureConstraint, this.pool);
|
||||||
forks.add(fork);
|
forks.add(fork);
|
||||||
fork.fork();
|
fork.fork();
|
||||||
}
|
}
|
||||||
@ -1004,7 +965,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
//noOfThread++;
|
//noOfThread++;
|
||||||
} else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
|
} else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
|
||||||
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
|
elems.add(a); //PL 2019-01-16 muss das wirklich hin steht schon in Zeile 859 ja braucht man siehe Zeile 859
|
||||||
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint));
|
res = unify2(elems, eq, oderConstraints, fc, parallel, new HashSet<>(methodSignatureConstraint));
|
||||||
}}}
|
}}}
|
||||||
|
|
||||||
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
|
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
|
||||||
@ -1317,17 +1278,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
// .collect(Collectors.toCollection(ArrayList::new));
|
// .collect(Collectors.toCollection(ArrayList::new));
|
||||||
writeLog("res (undef): " + res.toString() + "\n" +
|
writeLog("res (undef): " + res.toString() + "\n" +
|
||||||
"abhSubst: " + abhSubst.toString() + "\n" +
|
"abhSubst: " + abhSubst.toString() + "\n" +
|
||||||
"a2: " + rekTiefe + " " + a.toString() + "\n" +
|
|
||||||
"Durchschnitt: " + durchschnitt.toString() + "\n" +
|
"Durchschnitt: " + durchschnitt.toString() + "\n" +
|
||||||
"nextSet: " + nextSet.toString() + "\n" +
|
"nextSet: " + nextSet.toString() + "\n" +
|
||||||
"nextSetasList: " + nextSetasList.toString() + "\n" +
|
"nextSetasList: " + nextSetasList.toString() + "\n" +
|
||||||
"Number first erased Elements (undef): " + (len - nofstred) + "\n" +
|
"Number first erased Elements (undef): " + (len - nofstred) + "\n" +
|
||||||
"Number second erased Elements (undef): " + (nofstred- nextSetasList.size()) + "\n" +
|
"Number second erased Elements (undef): " + (nofstred- nextSetasList.size()) + "\n" +
|
||||||
"Number erased Elements (undef): " + (len - nextSetasList.size()));
|
"Number erased Elements (undef): " + (len - nextSetasList.size()));
|
||||||
noAllErasedElements = noAllErasedElements + (len - nextSetasList.size());
|
|
||||||
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
|
|
||||||
noBacktracking++;
|
|
||||||
writeLog("Number of Backtracking: " + noBacktracking);
|
|
||||||
//writeStatistics("Number of erased elements: " + (len - nextSetasList.size()));
|
//writeStatistics("Number of erased elements: " + (len - nextSetasList.size()));
|
||||||
//writeStatistics("Number of Backtracking: " + noBacktracking);
|
//writeStatistics("Number of Backtracking: " + noBacktracking);
|
||||||
//System.out.println("");
|
//System.out.println("");
|
||||||
@ -1342,7 +1298,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
|||||||
//else result.stream().filter(y -> !isUndefinedPairSet(y));
|
//else result.stream().filter(y -> !isUndefinedPairSet(y));
|
||||||
writeLog("res: " + res.toString());
|
writeLog("res: " + res.toString());
|
||||||
//writeStatistics(" End Number of Elements (" + rekTiefe + "): " + nextSetasList.size());
|
//writeStatistics(" End Number of Elements (" + rekTiefe + "): " + nextSetasList.size());
|
||||||
noLoop++;
|
|
||||||
//writeStatistics("Number of Loops: " + noLoop);
|
//writeStatistics("Number of Loops: " + noLoop);
|
||||||
}
|
}
|
||||||
//2020-02-02: if (variance ==2) Hier Aufruf von filterOverriding einfuegen
|
//2020-02-02: if (variance ==2) Hier Aufruf von filterOverriding einfuegen
|
||||||
|
Loading…
Reference in New Issue
Block a user