forked from JavaTX/JavaCompilerCore
Compare commits
23 Commits
performanc
...
performanc
Author | SHA1 | Date | |
---|---|---|---|
8f6eb8ff0a | |||
bfefe90650 | |||
c292ff2d9e | |||
c14dd6e97c | |||
8f8ee9a385 | |||
3863968a6e | |||
03c432455d | |||
441e50a70d | |||
0807885465 | |||
8abe7f6c28 | |||
2a92a0e48e | |||
b3639a3d08 | |||
28969e7931 | |||
b806fa88ff | |||
301e9143ec | |||
bb4eaaccc5 | |||
4dbae46765 | |||
c64071f235 | |||
94dbf1f7ad | |||
890f64c813 | |||
5aadb7545e | |||
0c0ac61a02 | |||
46192e3fd3 |
4
pom.xml
4
pom.xml
@ -54,8 +54,8 @@ http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<version>3.8.0</version>
|
||||
<configuration>
|
||||
<compilerArgs>--enable-preview</compilerArgs>
|
||||
<source>19</source>
|
||||
<target>19</target>
|
||||
<source>21</source>
|
||||
<target>21</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
|
@ -88,7 +88,8 @@ public class JavaTXCompiler {
|
||||
this(sourceFiles, null);
|
||||
}
|
||||
public JavaTXCompiler(List<File> sources, List<File> contextPath) throws IOException, ClassNotFoundException {
|
||||
statistics = new FileWriter(new File(System.getProperty("user.dir") + "/" + sources.get(0).getName() + "_"+ new Timestamp(System.currentTimeMillis())));
|
||||
//statistics = new FileWriter(new File(System.getProperty("user.dir") + "/" + sources.get(0).getName() + "_"+ new Timestamp(System.currentTimeMillis())));
|
||||
statistics = new OutputStreamWriter(new NullOutputStream());
|
||||
statistics.write("test");
|
||||
if(contextPath == null || contextPath.isEmpty()){
|
||||
//When no contextPaths are given, the working directory is the sources root
|
||||
|
@ -9,6 +9,7 @@ import java.util.Optional;
|
||||
import java.util.Queue;
|
||||
import java.util.Set;
|
||||
import java.util.Stack;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@ -44,14 +45,14 @@ import org.apache.commons.io.output.NullOutputStream;
|
||||
*/
|
||||
public class RuleSet implements IRuleSet{
|
||||
|
||||
Writer logFile;
|
||||
WriterActiveObject logFile;
|
||||
|
||||
public RuleSet() {
|
||||
super();
|
||||
logFile = new OutputStreamWriter(new NullOutputStream());
|
||||
logFile = new WriterActiveObject(new OutputStreamWriter(new NullOutputStream()), ForkJoinPool.commonPool());
|
||||
}
|
||||
|
||||
RuleSet(Writer logFile) {
|
||||
RuleSet(WriterActiveObject logFile) {
|
||||
this.logFile = logFile;
|
||||
}
|
||||
|
||||
@ -867,14 +868,8 @@ public class RuleSet implements IRuleSet{
|
||||
UnifyType r = x.getRhsType();
|
||||
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
|
||||
} );
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNred: " + result + "\n");
|
||||
logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("logFile-Error");
|
||||
}
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNred: " + result + "\n");
|
||||
return Optional.of(result);
|
||||
}
|
||||
|
||||
@ -917,14 +912,10 @@ public class RuleSet implements IRuleSet{
|
||||
UnifyType r = x.getRhsType();
|
||||
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
|
||||
} );
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNgreater: " + result + "\n");
|
||||
logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("lofFile-Error");
|
||||
}
|
||||
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNgreater: " + result + "\n");
|
||||
|
||||
return Optional.of(result);
|
||||
}
|
||||
|
||||
@ -967,14 +958,9 @@ public class RuleSet implements IRuleSet{
|
||||
UnifyType r = x.getRhsType();
|
||||
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
|
||||
} );
|
||||
try {
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNsmaller: " + result + "\n");
|
||||
logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.out.println("lofFile-Error");
|
||||
}
|
||||
|
||||
logFile.write("FUNgreater: " + pair + "\n");
|
||||
logFile.write("FUNsmaller: " + result + "\n");
|
||||
return Optional.of(result);
|
||||
}
|
||||
|
||||
|
@ -1,19 +1,13 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class TypeUnify {
|
||||
@ -30,8 +24,8 @@ public class TypeUnify {
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, ret, usedTasks, pool);
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
try {
|
||||
@ -56,8 +50,8 @@ public class TypeUnify {
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, ret, usedTasks, pool);
|
||||
pool.invoke(unifyTask);
|
||||
return ret;
|
||||
}
|
||||
@ -74,16 +68,17 @@ public class TypeUnify {
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
|
||||
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks, statistics);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, ret, usedTasks, pool, statistics);
|
||||
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
logFile.flush();
|
||||
unifyTask.statistics.write("Backtracking: " + unifyTask.noBacktracking);
|
||||
unifyTask.statistics.write("\nLoops: " + unifyTask.noLoop);
|
||||
unifyTask.statisticsFile.write("Backtracking: " + unifyTask.noBacktracking);
|
||||
unifyTask.statisticsFile.write("\nLoops: " + unifyTask.noLoop);
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
@ -110,8 +105,8 @@ public class TypeUnify {
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
|
||||
unifyTask.statistics = statistics;
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObject(logFile, ForkJoinPool.commonPool()), log, 0, ret, usedTasks, ForkJoinPool.commonPool());
|
||||
unifyTask.statisticsFile = statistics;
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
|
@ -7,6 +7,7 @@ import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
@ -20,17 +21,17 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe;
|
||||
|
||||
//statistics
|
||||
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
Set<UnifyPair> nextSetElement,
|
||||
IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks,
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe, Writer statistics) {
|
||||
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraintUebergabe );
|
||||
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
Set<UnifyPair> nextSetElement,
|
||||
IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks,
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool, Writer statistics) {
|
||||
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraintUebergabe, pool );
|
||||
|
||||
}
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, pool);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
||||
@ -64,13 +65,10 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
||||
}
|
||||
|
||||
public void closeLogFile() {
|
||||
|
||||
try {
|
||||
logFile.close();
|
||||
}
|
||||
catch (IOException ioE) {
|
||||
System.err.println("no log-File" + thNo);
|
||||
}
|
||||
|
||||
if(parallel){
|
||||
logFile.close();
|
||||
}else{
|
||||
logFile.closeNonThreaded();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -13,21 +13,17 @@ import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.concurrent.RecursiveTask;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.BinaryOperator;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.exceptions.TypeinferenceException;
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
|
||||
@ -47,7 +43,7 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
|
||||
import de.dhbwstuttgart.util.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import org.apache.commons.io.output.NullWriter;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
@ -55,8 +51,6 @@ import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
|
||||
import com.google.common.collect.Ordering;
|
||||
|
||||
|
||||
/**
|
||||
* Implementation of the type unification algorithm
|
||||
@ -85,8 +79,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Integer MaxNoOfThreads = 128;
|
||||
|
||||
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
|
||||
Writer logFile;
|
||||
|
||||
protected WriterActiveObject logFile;
|
||||
protected ForkJoinPool pool;
|
||||
/**
|
||||
* The implementation of setOps that will be used during the unification
|
||||
*/
|
||||
@ -135,7 +129,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
volatile UnifyTaskModel usedTasks;
|
||||
|
||||
static Writer statistics;
|
||||
static Writer statisticsFile = new NullWriter();
|
||||
|
||||
public TypeUnifyTask() {
|
||||
rules = new RuleSet();
|
||||
@ -156,12 +150,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
*/
|
||||
|
||||
//statistics
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Writer statistics) {
|
||||
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
this.statistics = statistics;
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, ForkJoinPool pool, Writer statisticsFile) {
|
||||
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, pool);
|
||||
this.statisticsFile = statisticsFile;
|
||||
}
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
|
||||
synchronized (this) {
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, ForkJoinPool pool) {
|
||||
this.eq = eq;
|
||||
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
|
||||
this.oderConstraintsField = oderConstraints; /*.stream().map(x -> {
|
||||
@ -180,6 +173,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
this.parallel = parallel;
|
||||
this.logFile = logFile;
|
||||
this.log = log;
|
||||
this.pool = pool;
|
||||
|
||||
noOfThread++;
|
||||
totalnoOfThread++;
|
||||
@ -187,9 +181,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
thNo = totalnoOfThread;
|
||||
writeLog("thNo2 " + thNo);
|
||||
try {
|
||||
this.logFile = log ? new FileWriter(new File(System.getProperty("user.dir") + "/logFiles/" + "Thread_"+thNo))
|
||||
: new OutputStreamWriter(new NullOutputStream());
|
||||
logFile.write("");
|
||||
if(log){
|
||||
this.logFile = new WriterActiveObject(new FileWriter(new File(System.getProperty("user.dir") + "/logFiles/" + "Thread_"+thNo)), pool);
|
||||
}else{
|
||||
this.logFile = new WriterActiveObject(new OutputStreamWriter(new NullOutputStream()), pool);
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("log-File nicht vorhanden");
|
||||
@ -213,7 +209,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
this.urm = urm;
|
||||
this.usedTasks = usedTasks;
|
||||
this.usedTasks.add(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -271,12 +266,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, new HashSet<>());
|
||||
noOfThread--;
|
||||
try {
|
||||
|
||||
if(parallel){
|
||||
logFile.close();
|
||||
}else{
|
||||
logFile.closeNonThreaded();
|
||||
}
|
||||
catch (IOException ioE) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
|
||||
if (isUndefinedPairSetSet(res)) {
|
||||
//fuer debug-Zwecke
|
||||
ArrayList al = res.stream().map(x -> x.stream().collect(Collectors.toCollection(ArrayList::new)))
|
||||
@ -386,8 +382,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
eq0.forEach(x -> x.disableCondWildcards());
|
||||
|
||||
writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString());
|
||||
writeLog(nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
|
||||
writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString() + "\n"
|
||||
+ nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
|
||||
|
||||
/*
|
||||
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
|
||||
@ -446,8 +442,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
if(!undefinedPairs.isEmpty()) {
|
||||
noUndefPair++;
|
||||
for (UnifyPair up : undefinedPairs) {
|
||||
writeLog(noUndefPair.toString() + " UndefinedPairs; " + up);
|
||||
writeLog("BasePair; " + up.getBasePair());
|
||||
writeLog(noUndefPair.toString() + " UndefinedPairs; " + up + "\n"
|
||||
+ "BasePair; " + up.getBasePair());
|
||||
}
|
||||
Set<Set<UnifyPair>> error = new HashSet<>();
|
||||
undefinedPairs = undefinedPairs.stream().map(x -> { x.setUndefinedPair(); return x;}).collect(Collectors.toCollection(HashSet::new));
|
||||
@ -814,8 +810,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
*/
|
||||
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
|
||||
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
writeLog("nextSetasList: " + nextSetasList.toString());
|
||||
writeLog("nextSet: " + nextSet.toString() + "\n"
|
||||
+ "nextSetasList: " + nextSetasList.toString());
|
||||
|
||||
/* staistics Nextvar an Hand Varianzbestimmung auskommentieren Anfang
|
||||
if (variance == 1) {
|
||||
@ -933,7 +929,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint, this.pool);
|
||||
//forks.add(forkOrig);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -942,16 +938,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
forkOrig.fork();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
synchronized (this) {
|
||||
writeLog("a in " + variance + " "+ a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
}
|
||||
|
||||
writeLog("a in " + variance + " "+ a + "\n" +
|
||||
"nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
while (!nextSetasList.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasList.remove(0);
|
||||
synchronized (this) { //nextSetasList.remove(nSaL);
|
||||
writeLog("1 RM" + nSaL.toString());
|
||||
}
|
||||
//nextSetasList.remove(nSaL);
|
||||
writeLog("1 RM" + nSaL.toString());
|
||||
|
||||
|
||||
if (!oderConstraint) {
|
||||
|
||||
@ -971,7 +966,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
forks.add(fork);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -983,47 +978,44 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
synchronized (this) {
|
||||
writeLog("wait "+ forkOrig.thNo);
|
||||
|
||||
writeLog("wait "+ forkOrig.thNo);
|
||||
noOfThread--;
|
||||
res = forkOrig.join();
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig 1");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
//add_res.add(fork_res);
|
||||
/* FORK ENDE */
|
||||
|
||||
forks.forEach(x -> writeLog("wait: " + x.thNo));
|
||||
for(TypeUnify2Task fork : forks) {
|
||||
noOfThread--;
|
||||
res = forkOrig.join();
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig 1");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
//add_res.add(fork_res);
|
||||
};
|
||||
/* FORK ENDE */
|
||||
|
||||
forks.forEach(x -> writeLog("wait: " + x.thNo));
|
||||
for(TypeUnify2Task fork : forks) {
|
||||
synchronized (this) {
|
||||
noOfThread--;
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
//noOfThread++;
|
||||
writeLog("Join " + new Integer(fork.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
|
||||
add_res.add(fork_res);
|
||||
if (!isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final 1");
|
||||
fork.closeLogFile();
|
||||
};
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
writeLog("Join " + new Integer(fork.thNo).toString() + "\n" +
|
||||
"fork_res: " + fork_res.toString() + "\n" +
|
||||
new Boolean((isUndefinedPairSetSet(fork_res))).toString());
|
||||
add_res.add(fork_res);
|
||||
if (!isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final 1");
|
||||
fork.closeLogFile();
|
||||
}
|
||||
//noOfThread++;
|
||||
} else {
|
||||
@ -1035,7 +1027,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
//forks.add(forkOrig);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -1045,15 +1037,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
synchronized (this) {
|
||||
writeLog("a in " + variance + " "+ a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
}
|
||||
|
||||
writeLog("a in " + variance + " "+ a + "\n" +
|
||||
"nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
while (!nextSetasList.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasList.remove(0);
|
||||
synchronized (this) { //nextSetasList.remove(nSaL);
|
||||
writeLog("-1 RM" + nSaL.toString());
|
||||
}
|
||||
//nextSetasList.remove(nSaL);
|
||||
writeLog("-1 RM" + nSaL.toString());
|
||||
|
||||
if (!oderConstraint) {
|
||||
/* statistics sameEq wird nicht betrachtet ANGFANG
|
||||
@ -1072,7 +1063,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
forks.add(fork);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -1084,48 +1075,45 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
synchronized (this) {
|
||||
writeLog("wait "+ forkOrig.thNo);
|
||||
writeLog("wait "+ forkOrig.thNo);
|
||||
noOfThread--;
|
||||
res = forkOrig.join();
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig -1");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
//add_res.add(fork_res);
|
||||
/* FORK ENDE */
|
||||
|
||||
forks.forEach(x -> writeLog("wait: " + x.thNo));
|
||||
for(TypeUnify2Task fork : forks) {
|
||||
noOfThread--;
|
||||
res = forkOrig.join();
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig -1");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
//add_res.add(fork_res);
|
||||
writeLog("Join " + new Integer(fork.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
|
||||
add_res.add(fork_res);
|
||||
if (!isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final -1");
|
||||
fork.closeLogFile();
|
||||
};
|
||||
/* FORK ENDE */
|
||||
|
||||
forks.forEach(x -> writeLog("wait: " + x.thNo));
|
||||
for(TypeUnify2Task fork : forks) {
|
||||
synchronized (this) {
|
||||
noOfThread--;
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
//noOfThread++;
|
||||
writeLog("Join " + new Integer(fork.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
writeLog("fork_res: " + fork_res.toString());
|
||||
writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
|
||||
add_res.add(fork_res);
|
||||
if (!isUndefinedPairSetSet(fork_res)) {
|
||||
aParDef.add(fork.getNextSetElement());
|
||||
}
|
||||
fork.writeLog("final -1");
|
||||
fork.closeLogFile();
|
||||
};
|
||||
}
|
||||
|
||||
//noOfThread++;
|
||||
} else {
|
||||
if(parallel && (variance == 2) && noOfThread <= MaxNoOfThreads) {
|
||||
@ -1137,7 +1125,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint), this.pool);
|
||||
//forks.add(forkOrig);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -1146,11 +1134,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
forkOrig.fork();
|
||||
}
|
||||
/* FORK ENDE */
|
||||
|
||||
synchronized (this) {
|
||||
writeLog("a in " + variance + " "+ a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
}
|
||||
|
||||
writeLog("a in " + variance + " "+ a + "\n" +
|
||||
"nextSetasListRest: " + nextSetasListRest.toString());
|
||||
|
||||
while (!nextSetasList.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasList.remove(0);
|
||||
//nextSetasList.remove(nSaL); //PL einkommentiert 20-02-03
|
||||
@ -1158,7 +1145,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint, this.pool);
|
||||
forks.add(fork);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -1170,41 +1157,39 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
|
||||
|
||||
/* FORK ANFANG */
|
||||
synchronized (this) {
|
||||
writeLog("wait "+ forkOrig.thNo);
|
||||
noOfThread--;
|
||||
res = forkOrig.join();
|
||||
|
||||
writeLog("wait "+ forkOrig.thNo);
|
||||
noOfThread--;
|
||||
res = forkOrig.join();
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig 2");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
//add_res.add(fork_res); //vermutlich falsch
|
||||
|
||||
/* FORK ENDE */
|
||||
forks.forEach(x -> writeLog("wait: " + x.thNo));
|
||||
for(TypeUnify2Task fork : forks) {
|
||||
noOfThread--;
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
//noOfThread++;
|
||||
forkOrig.writeLog("final Orig 2");
|
||||
forkOrig.closeLogFile();
|
||||
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
|
||||
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
//add_res.add(fork_res); //vermutlich falsch
|
||||
};
|
||||
/* FORK ENDE */
|
||||
forks.forEach(x -> writeLog("wait: " + x.thNo));
|
||||
for(TypeUnify2Task fork : forks) {
|
||||
synchronized (this) {
|
||||
noOfThread--;
|
||||
Set<Set<UnifyPair>> fork_res = fork.join();
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
}
|
||||
//noOfThread++;
|
||||
writeLog("Join " + new Integer(fork.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
add_res.add(fork_res);
|
||||
fork.writeLog("final 2");
|
||||
fork.closeLogFile();
|
||||
};
|
||||
writeLog("Join " + new Integer(fork.thNo).toString());
|
||||
//noOfThread--; an das Ende von compute verschoben
|
||||
add_res.add(fork_res);
|
||||
fork.writeLog("final 2");
|
||||
fork.closeLogFile();
|
||||
}
|
||||
//noOfThread++;
|
||||
} else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
|
||||
@ -1533,15 +1518,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//NOCH NICHT korrekt PL 2018-10-12
|
||||
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
|
||||
// .collect(Collectors.toCollection(ArrayList::new));
|
||||
writeLog("res (undef): " + res.toString());
|
||||
writeLog("abhSubst: " + abhSubst.toString());
|
||||
writeLog("a2: " + rekTiefe + " " + a.toString());
|
||||
writeLog("Durchschnitt: " + durchschnitt.toString());
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
writeLog("nextSetasList: " + nextSetasList.toString());
|
||||
writeLog("Number first erased Elements (undef): " + (len - nofstred));
|
||||
writeLog("Number second erased Elements (undef): " + (nofstred- nextSetasList.size()));
|
||||
writeLog("Number erased Elements (undef): " + (len - nextSetasList.size()));
|
||||
writeLog("res (undef): " + res.toString() + "\n" +
|
||||
"abhSubst: " + abhSubst.toString() + "\n" +
|
||||
"a2: " + rekTiefe + " " + a.toString() + "\n" +
|
||||
"Durchschnitt: " + durchschnitt.toString() + "\n" +
|
||||
"nextSet: " + nextSet.toString() + "\n" +
|
||||
"nextSetasList: " + nextSetasList.toString() + "\n" +
|
||||
"Number first erased Elements (undef): " + (len - nofstred) + "\n" +
|
||||
"Number second erased Elements (undef): " + (nofstred- nextSetasList.size()) + "\n" +
|
||||
"Number erased Elements (undef): " + (len - nextSetasList.size()));
|
||||
noAllErasedElements = noAllErasedElements + (len - nextSetasList.size());
|
||||
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
|
||||
noBacktracking++;
|
||||
@ -2001,10 +1986,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
}
|
||||
|
||||
writeLog("eq2s: " + eq2s.toString());
|
||||
writeLog("eq2sAsListFst: " + eq2sAsListFst.toString());
|
||||
writeLog("eq2sAsListSnd: " + eq2sAsListSnd.toString());
|
||||
writeLog("eq2sAsListBack: " + eq2sAsListBack.toString());
|
||||
writeLog("eq2s: " + eq2s.toString() + "\n" +
|
||||
"eq2sAsListFst: " + eq2sAsListFst.toString() + "\n" +
|
||||
"eq2sAsListSnd: " + eq2sAsListSnd.toString() + "\n" +
|
||||
"eq2sAsListBack: " + eq2sAsListBack.toString());
|
||||
|
||||
eq2sAsList.addAll(eq2sAsListFst);
|
||||
eq2sAsList.addAll(eq2sAsListSnd);
|
||||
@ -2609,34 +2594,33 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
|
||||
void writeLog(String str) {
|
||||
synchronized ( this ) {
|
||||
if (log && finalresult) {
|
||||
try {
|
||||
logFile.write("Thread no.:" + thNo + "\n");
|
||||
logFile.write("noOfThread:" + noOfThread + "\n");
|
||||
logFile.write("parallel:" + parallel + "\n");
|
||||
logFile.write(str+"\n\n");
|
||||
logFile.flush();
|
||||
|
||||
if(parallel){
|
||||
logFile.write("Thread no.:" + thNo + "\n"
|
||||
+ "noOfThread:" + noOfThread + "\n"
|
||||
+ "parallel:" + parallel + "\n"
|
||||
+ str+"\n\n"
|
||||
);
|
||||
}else{
|
||||
logFile.writeNonThreaded("Thread no.:" + thNo + "\n"
|
||||
+ "noOfThread:" + noOfThread + "\n"
|
||||
+ "parallel:" + parallel + "\n"
|
||||
+ str+"\n\n"
|
||||
);
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("kein LogFile");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void writeStatistics(String str) {
|
||||
if (finalresult) {
|
||||
synchronized ( this ) {
|
||||
try {
|
||||
statistics.write("Thread No. " + thNo + ": " + str + "\n");
|
||||
statistics.flush();
|
||||
statisticsFile.write("Thread No. " + thNo + ": " + str + "\n");
|
||||
statisticsFile.flush();
|
||||
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("kein StatisticsFile");
|
||||
}
|
||||
}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,53 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
public class WriterActiveObject {
|
||||
private Writer writer;
|
||||
private ForkJoinPool pool;
|
||||
|
||||
public WriterActiveObject(Writer writer, ForkJoinPool pool){
|
||||
this.writer = writer;
|
||||
this.pool = pool;
|
||||
}
|
||||
|
||||
public void close(){
|
||||
pool.execute(()->{
|
||||
try {
|
||||
writer.close();
|
||||
} catch (IOException e) {
|
||||
System.out.println(e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void write(String message){
|
||||
pool.execute(()->{
|
||||
try {
|
||||
writer.write(message);
|
||||
writer.flush();
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void writeNonThreaded(String message){
|
||||
try {
|
||||
writer.write(message);
|
||||
writer.flush();
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public void closeNonThreaded(){
|
||||
try {
|
||||
writer.close();
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,13 +1,27 @@
|
||||
package typeinference;
|
||||
|
||||
import com.google.common.base.Optional;
|
||||
import de.dhbwstuttgart.bytecode.genericsGeneratorTypes.GenericGenratorResultForSourceFile;
|
||||
import de.dhbwstuttgart.core.JavaTXCompiler;
|
||||
import de.dhbwstuttgart.parser.scope.JavaClassName;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
|
||||
import de.dhbwstuttgart.syntaxtree.Constructor;
|
||||
import de.dhbwstuttgart.syntaxtree.SourceFile;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.syntaxtree.visual.ASTPrinter;
|
||||
import de.dhbwstuttgart.syntaxtree.visual.ASTTypePrinter;
|
||||
import de.dhbwstuttgart.typedeployment.TypeInsert;
|
||||
import de.dhbwstuttgart.typedeployment.TypeInsertFactory;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.*;
|
||||
import org.apache.commons.io.output.NullWriter;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
@ -24,7 +38,71 @@ import java.util.Set;
|
||||
public class UnifyTest {
|
||||
|
||||
public static final String rootDirectory = System.getProperty("user.dir")+"/resources/javFiles/";
|
||||
/*
|
||||
|
||||
private UnifyPair genPairListOfInteger(String name){
|
||||
|
||||
UnifyType type1 = new PlaceholderType(name);
|
||||
UnifyType type2 = new ReferenceType("List", new TypeParams(new ReferenceType("Integer")));
|
||||
UnifyPair pair1 = new UnifyPair(type2, type1, PairOperator.SMALLERDOT);
|
||||
|
||||
return pair1;
|
||||
}
|
||||
private UnifyPair genPairListOfString(String name){
|
||||
|
||||
PlaceholderType type1 = new PlaceholderType(name);
|
||||
UnifyType type2 = new ReferenceType("List", new TypeParams(new ReferenceType("String")));
|
||||
UnifyPair pair1 = new UnifyPair(type2, type1, PairOperator.SMALLERDOT);
|
||||
|
||||
return pair1;
|
||||
}
|
||||
@Test
|
||||
public void unifyTest(){
|
||||
UnifyType type1;
|
||||
UnifyType type2;
|
||||
|
||||
Set<UnifyPair> undConstraints = new HashSet<>();
|
||||
undConstraints.add(genPairListOfInteger("a"));
|
||||
undConstraints.add(genPairListOfString("a"));
|
||||
|
||||
undConstraints.add(genPairListOfInteger("b"));
|
||||
undConstraints.add(genPairListOfString("b"));
|
||||
undConstraints.add(genPairListOfInteger("c"));
|
||||
undConstraints.add(genPairListOfString("c"));
|
||||
undConstraints.add(genPairListOfInteger("d"));
|
||||
undConstraints.add(genPairListOfString("d"));
|
||||
undConstraints.add(genPairListOfInteger("e"));
|
||||
undConstraints.add(genPairListOfString("e"));
|
||||
undConstraints.add(genPairListOfInteger("e1"));
|
||||
undConstraints.add(genPairListOfString("e1"));
|
||||
undConstraints.add(genPairListOfInteger("e2"));
|
||||
undConstraints.add(genPairListOfString("e2"));
|
||||
undConstraints.add(genPairListOfInteger("e3"));
|
||||
undConstraints.add(genPairListOfString("e3"));
|
||||
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = new ArrayList<>();
|
||||
|
||||
Set<UnifyPair> constraints = new HashSet<>();
|
||||
type1 = new ReferenceType("Object");
|
||||
type2 = new ReferenceType("List", new TypeParams(new PlaceholderType("X")));
|
||||
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
|
||||
type1 = new ReferenceType("Object");
|
||||
type2 = new ReferenceType("Integer");
|
||||
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
|
||||
type1 = new ReferenceType("Object");
|
||||
type2 = new ReferenceType("String");
|
||||
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
|
||||
|
||||
IFiniteClosure finiteClosure = new FiniteClosure(constraints, new NullWriter());
|
||||
|
||||
TypeUnify unifyAlgo = new TypeUnify();
|
||||
ConstraintSet< Pair> cons = new ConstraintSet<>();
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyTaskModel tasks = new UnifyTaskModel();
|
||||
Set<Set<UnifyPair>> solution = unifyAlgo.unify(undConstraints, oderConstraints, finiteClosure, new NullWriter(), false, urm, tasks);
|
||||
System.out.println(solution.size());
|
||||
System.out.println(solution);
|
||||
}
|
||||
/*
|
||||
@Test
|
||||
public void finiteClosure() throws IOException, ClassNotFoundException {
|
||||
execute(new File(rootDirectory+"fc.jav"));
|
||||
|
Reference in New Issue
Block a user