Compare commits

..

5 Commits

Author SHA1 Message Date
NoName11234
69936ffb58 no longer printing solutions to cmd 2024-04-23 19:44:16 +02:00
NoName11234
e4af54a2bf added unifyTest 2024-02-11 14:38:15 +01:00
NoName11234
d7b693204e changed java version to 21 2024-02-11 14:37:59 +01:00
NoName11234
697cfcc2af changed Writer to NullWriter 2024-02-11 14:37:05 +01:00
NoName11234
73f996991e added unifyTest 2024-02-11 14:35:05 +01:00
10 changed files with 401 additions and 439 deletions

View File

@@ -37,13 +37,20 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet; import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.typeAlgo.TYPE; import de.dhbwstuttgart.typeinference.typeAlgo.TYPE;
import de.dhbwstuttgart.typeinference.unify.*; import de.dhbwstuttgart.typeinference.unify.RuleSet;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator; import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType; import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType; import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.util.BiRelation; import de.dhbwstuttgart.util.BiRelation;
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListener;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import java.io.File; import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
@@ -66,7 +73,7 @@ public class JavaTXCompiler {
Boolean resultmodel = true; Boolean resultmodel = true;
public final Map<File, SourceFile> sourceFiles = new HashMap<>(); public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll? Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
public UnifyTaskModelParallel usedTasks = new UnifyTaskModelParallel(); public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
private final DirectoryClassLoader classLoader; private final DirectoryClassLoader classLoader;
static Writer statistics; static Writer statistics;
@@ -82,8 +89,8 @@ public class JavaTXCompiler {
} }
public JavaTXCompiler(List<File> sources, List<File> contextPath) throws IOException, ClassNotFoundException { public JavaTXCompiler(List<File> sources, List<File> contextPath) throws IOException, ClassNotFoundException {
//statistics = new FileWriter(new File(System.getProperty("user.dir") + "/" + sources.get(0).getName() + "_"+ new Timestamp(System.currentTimeMillis()))); //statistics = new FileWriter(new File(System.getProperty("user.dir") + "/" + sources.get(0).getName() + "_"+ new Timestamp(System.currentTimeMillis())));
statistics = new OutputStreamWriter(new NullOutputStream()); statistics = new OutputStreamWriter(new NullOutputStream());
statistics.write("test"); statistics.write("test");
if(contextPath == null || contextPath.isEmpty()){ if(contextPath == null || contextPath.isEmpty()){
//When no contextPaths are given, the working directory is the sources root //When no contextPaths are given, the working directory is the sources root
contextPath = Lists.newArrayList(new File(System.getProperty("user.dir"))); contextPath = Lists.newArrayList(new File(System.getProperty("user.dir")));
@@ -369,7 +376,7 @@ public class JavaTXCompiler {
* } * }
*/ */
public UnifyResultModelParallel typeInferenceAsync(UnifyResultListener resultListener, Writer logFile) public UnifyResultModel typeInferenceAsync(UnifyResultListener resultListener, Writer logFile)
throws ClassNotFoundException, IOException { throws ClassNotFoundException, IOException {
List<ClassOrInterface> allClasses = new ArrayList<>();// environment.getAllAvailableClasses(); List<ClassOrInterface> allClasses = new ArrayList<>();// environment.getAllAvailableClasses();
// Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC // Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
@@ -382,7 +389,7 @@ public class JavaTXCompiler {
final ConstraintSet<Pair> cons = getConstraints(); final ConstraintSet<Pair> cons = getConstraints();
Set<Set<UnifyPair>> results = new HashSet<>(); Set<Set<UnifyPair>> results = new HashSet<>();
UnifyResultModelParallel urm = null; UnifyResultModel urm = null;
// urm.addUnifyResultListener(resultListener); // urm.addUnifyResultListener(resultListener);
try { try {
logFile = logFile == null logFile = logFile == null
@@ -390,7 +397,7 @@ public class JavaTXCompiler {
: logFile; : logFile;
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, classLoader); IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, classLoader);
System.out.println(finiteClosure); System.out.println(finiteClosure);
urm = new UnifyResultModelParallel(cons, finiteClosure); urm = new UnifyResultModel(cons, finiteClosure);
urm.addUnifyResultListener(resultListener); urm.addUnifyResultListener(resultListener);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(cons); ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(cons);
@@ -720,7 +727,7 @@ public class JavaTXCompiler {
}).collect(Collectors.toCollection(ArrayList::new))*/; }).collect(Collectors.toCollection(ArrayList::new))*/;
if (resultmodel) { if (resultmodel) {
/* UnifyResultModel Anfang */ /* UnifyResultModel Anfang */
UnifyResultModelParallel urm = new UnifyResultModelParallel(cons, finiteClosure); UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl(); UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li); urm.addUnifyResultListener(li);
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm,
@@ -738,7 +745,7 @@ public class JavaTXCompiler {
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, // oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
// finiteClosure)); // finiteClosure));
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints,
finiteClosure, logFile, log, new UnifyResultModelParallel(cons, finiteClosure), usedTasks); finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
System.out.println("RESULT: " + result); System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString() + "\n"); logFile.write("RES: " + result.toString() + "\n");
logFile.flush(); logFile.flush();

View File

@@ -9,7 +9,6 @@ import java.util.Optional;
import java.util.Queue; import java.util.Queue;
import java.util.Set; import java.util.Set;
import java.util.Stack; import java.util.Stack;
import java.util.concurrent.ForkJoinPool;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@@ -45,14 +44,14 @@ import org.apache.commons.io.output.NullOutputStream;
*/ */
public class RuleSet implements IRuleSet{ public class RuleSet implements IRuleSet{
WriterActiveObject logFile; Writer logFile;
public RuleSet() { public RuleSet() {
super(); super();
logFile = new WriterActiveObject(new OutputStreamWriter(new NullOutputStream()), ForkJoinPool.commonPool()); logFile = new OutputStreamWriter(new NullOutputStream());
} }
RuleSet(WriterActiveObject logFile) { RuleSet(Writer logFile) {
this.logFile = logFile; this.logFile = logFile;
} }
@@ -868,8 +867,14 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType(); UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); } if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} ); } );
logFile.write("FUNgreater: " + pair + "\n"); try {
logFile.write("FUNred: " + result + "\n"); logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNred: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("logFile-Error");
}
return Optional.of(result); return Optional.of(result);
} }
@@ -912,10 +917,14 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType(); UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); } if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} ); } );
try {
logFile.write("FUNgreater: " + pair + "\n"); logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNgreater: " + result + "\n"); logFile.write("FUNgreater: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
return Optional.of(result); return Optional.of(result);
} }
@@ -958,9 +967,14 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType(); UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); } if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} ); } );
try {
logFile.write("FUNgreater: " + pair + "\n"); logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNsmaller: " + result + "\n"); logFile.write("FUNsmaller: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
return Optional.of(result); return Optional.of(result);
} }

View File

@@ -1,13 +1,19 @@
package de.dhbwstuttgart.typeinference.unify; package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair; import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify { public class TypeUnify {
@@ -23,11 +29,9 @@ public class TypeUnify {
* @param cons * @param cons
* @return * @return
*/ */
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) { public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
taskModel.setPool(pool); ForkJoinPool pool = new ForkJoinPool();
resultModel.setPool(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, resultModel, pool);
pool.invoke(unifyTask); pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join(); Set<Set<UnifyPair>> res = unifyTask.join();
try { try {
@@ -51,13 +55,11 @@ public class TypeUnify {
* @param ret * @param ret
* @return * @return
*/ */
public UnifyResultModelParallel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) { public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
taskModel.setPool(pool); ForkJoinPool pool = new ForkJoinPool();
resultModel.setPool(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, resultModel, pool);
pool.invoke(unifyTask); pool.invoke(unifyTask);
return resultModel; return ret;
} }
/** /**
@@ -71,25 +73,22 @@ public class TypeUnify {
* @param ret * @param ret
* @return * @return
*/ */
public UnifyResultModelParallel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) { public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
taskModel.setPool(pool);
resultModel.setPool(pool);
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks); TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, resultModel, pool, statistics); new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks, statistics);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask); pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join(); Set<Set<UnifyPair>> res = unifyTask.join();
try { try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n"); logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush(); logFile.flush();
unifyTask.statisticsFile.write("Backtracking: " + unifyTask.noBacktracking); unifyTask.statistics.write("Backtracking: " + unifyTask.noBacktracking);
unifyTask.statisticsFile.write("\nLoops: " + unifyTask.noLoop); unifyTask.statistics.write("\nLoops: " + unifyTask.noLoop);
} }
catch (IOException e) { catch (IOException e) {
System.err.println("no log-File"); System.err.println("no log-File");
} }
return resultModel; return ret;
} }
/* /*
@@ -110,10 +109,9 @@ public class TypeUnify {
* @param cons * @param cons
* @return * @return
*/ */
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) { public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
resultModel.setPool(ForkJoinPool.commonPool()); TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObject(logFile, ForkJoinPool.commonPool()), log, 0, resultModel, ForkJoinPool.commonPool()); unifyTask.statistics = statistics;
unifyTask.statisticsFile = statistics;
Set<Set<UnifyPair>> res = unifyTask.compute(); Set<Set<UnifyPair>> res = unifyTask.compute();
try { try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n"); logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");

View File

@@ -7,7 +7,6 @@ import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet; import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
@@ -22,16 +21,16 @@ public class TypeUnify2Task extends TypeUnifyTask {
//statistics //statistics
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints, List<Set<Constraint<UnifyPair>>> oderConstraints,
Set<UnifyPair> nextSetElement, Set<UnifyPair> nextSetElement,
IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks,
Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool, Writer statistics) { Set<UnifyPair> methodSignatureConstraintUebergabe, Writer statistics) {
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraintUebergabe, pool ); this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraintUebergabe );
} }
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool) { public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, pool); super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
this.setToFlatten = setToFlatten; this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement; this.nextSetElement = nextSetElement;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe; this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
@@ -54,14 +53,24 @@ public class TypeUnify2Task extends TypeUnifyTask {
*/ */
//writeLog("xxx"); //writeLog("xxx");
//noOfThread--; //noOfThread--;
return res; synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
else {
return res;
}
}
} }
public void closeLogFile() { public void closeLogFile() {
if(parallel){
logFile.close(); try {
}else{ logFile.close();
logFile.closeNonThreaded(); }
} catch (IOException ioE) {
System.err.println("no log-File" + thNo);
}
} }
} }

View File

@@ -13,17 +13,21 @@ import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveTask; import java.util.concurrent.RecursiveTask;
import java.util.function.BiFunction; import java.util.function.BiFunction;
import java.util.function.BinaryOperator; import java.util.function.BinaryOperator;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.io.output.NullOutputStream; import org.apache.commons.io.output.NullOutputStream;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.exceptions.TypeinferenceException; import de.dhbwstuttgart.exceptions.TypeinferenceException;
import de.dhbwstuttgart.parser.NullToken; import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.typeinference.constraints.Constraint; import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch; import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet; import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
@@ -43,7 +47,7 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.model.WildcardType; import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
import de.dhbwstuttgart.util.Pair; import de.dhbwstuttgart.util.Pair;
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair; import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
import org.apache.commons.io.output.NullWriter; import de.dhbwstuttgart.core.JavaTXCompiler;
import java.io.File; import java.io.File;
import java.io.FileWriter; import java.io.FileWriter;
@@ -51,6 +55,9 @@ import java.io.IOException;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.io.Writer; import java.io.Writer;
import com.google.common.collect.Ordering;
import org.apache.commons.io.output.NullWriter;
/** /**
* Implementation of the type unification algorithm * Implementation of the type unification algorithm
@@ -71,7 +78,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/** /**
* Fuer die Threads * Fuer die Threads
*/ */
protected UnifyResultModelParallel urm; UnifyResultModel urm;
protected static int noOfThread = 0; protected static int noOfThread = 0;
private static int totalnoOfThread = 0; private static int totalnoOfThread = 0;
int thNo; int thNo;
@@ -79,8 +86,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Integer MaxNoOfThreads = 128; Integer MaxNoOfThreads = 128;
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/"; public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
protected WriterActiveObject logFile; Writer logFile;
protected ForkJoinPool pool;
/** /**
* The implementation of setOps that will be used during the unification * The implementation of setOps that will be used during the unification
*/ */
@@ -125,7 +132,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
static Integer noShortendElements = 0; static Integer noShortendElements = 0;
static Writer statisticsFile = new NullWriter(); Boolean myIsCanceled = false;
volatile UnifyTaskModel usedTasks;
static Writer statistics;
public TypeUnifyTask() { public TypeUnifyTask() {
rules = new RuleSet(); rules = new RuleSet();
@@ -146,11 +157,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/ */
//statistics //statistics
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, ForkJoinPool pool, Writer statisticsFile) { public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Writer statistics) {
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, pool); this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
this.statisticsFile = statisticsFile; this.statistics = statistics;
} }
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModelParallel urm, ForkJoinPool pool) { public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
synchronized (this) {
if(statistics==null){
statistics = new NullWriter();
}
this.eq = eq; this.eq = eq;
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new)); //this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.oderConstraintsField = oderConstraints; /*.stream().map(x -> { this.oderConstraintsField = oderConstraints; /*.stream().map(x -> {
@@ -169,7 +184,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
this.parallel = parallel; this.parallel = parallel;
this.logFile = logFile; this.logFile = logFile;
this.log = log; this.log = log;
this.pool = pool;
noOfThread++; noOfThread++;
totalnoOfThread++; totalnoOfThread++;
@@ -177,11 +191,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
thNo = totalnoOfThread; thNo = totalnoOfThread;
writeLog("thNo2 " + thNo); writeLog("thNo2 " + thNo);
try { try {
if(log){ this.logFile = log ? new FileWriter(new File(System.getProperty("user.dir") + "/logFiles/" + "Thread_"+thNo))
this.logFile = new WriterActiveObject(new FileWriter(new File(System.getProperty("user.dir") + "/logFiles/" + "Thread_"+thNo)), pool); : new OutputStreamWriter(new NullOutputStream());
}else{ logFile.write("");
this.logFile = new WriterActiveObject(new OutputStreamWriter(new NullOutputStream()), pool);
}
} }
catch (IOException e) { catch (IOException e) {
System.err.println("log-File nicht vorhanden"); System.err.println("log-File nicht vorhanden");
@@ -203,6 +215,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
rules = new RuleSet(logFile); rules = new RuleSet(logFile);
this.rekTiefeField = rekTiefe; this.rekTiefeField = rekTiefe;
this.urm = urm; this.urm = urm;
this.usedTasks = usedTasks;
this.usedTasks.add(this);
}
} }
/** /**
@@ -237,6 +252,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
} }
*/ */
void myCancel(Boolean b) {
myIsCanceled = true;
}
public boolean myIsCancelled() {
return myIsCanceled;
}
protected Set<Set<UnifyPair>> compute() { protected Set<Set<UnifyPair>> compute() {
if (one) { if (one) {
@@ -253,13 +275,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.collect(Collectors.toCollection(ArrayList::new)); .collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, new HashSet<>()); Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, new HashSet<>());
noOfThread--; noOfThread--;
try {
if(parallel){
logFile.close(); logFile.close();
}else{
logFile.closeNonThreaded();
} }
catch (IOException ioE) {
System.err.println("no log-File");
}
if (isUndefinedPairSetSet(res)) { if (isUndefinedPairSetSet(res)) {
//fuer debug-Zwecke //fuer debug-Zwecke
ArrayList al = res.stream().map(x -> x.stream().collect(Collectors.toCollection(ArrayList::new))) ArrayList al = res.stream().map(x -> x.stream().collect(Collectors.toCollection(ArrayList::new)))
@@ -267,7 +288,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
throw new TypeinferenceException("Unresolved constraints: " + res.toString(), new NullToken()); //return new HashSet<>(); throw new TypeinferenceException("Unresolved constraints: " + res.toString(), new NullToken()); //return new HashSet<>();
} }
else { else {
return res; synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
else {
return res;
}
}
} }
} }
/* /*
@@ -300,6 +328,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//.collect(Collectors.toCollection(HashSet::new))); //.collect(Collectors.toCollection(HashSet::new)));
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
rekTiefe++; rekTiefe++;
nOfUnify++; nOfUnify++;
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString()); writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
@@ -356,8 +390,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
eq0.forEach(x -> x.disableCondWildcards()); eq0.forEach(x -> x.disableCondWildcards());
writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString() + "\n" writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString());
+ nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString()); writeLog(nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
/* /*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs * Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
@@ -416,8 +450,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if(!undefinedPairs.isEmpty()) { if(!undefinedPairs.isEmpty()) {
noUndefPair++; noUndefPair++;
for (UnifyPair up : undefinedPairs) { for (UnifyPair up : undefinedPairs) {
writeLog(noUndefPair.toString() + " UndefinedPairs; " + up + "\n" writeLog(noUndefPair.toString() + " UndefinedPairs; " + up);
+ "BasePair; " + up.getBasePair()); writeLog("BasePair; " + up.getBasePair());
} }
Set<Set<UnifyPair>> error = new HashSet<>(); Set<Set<UnifyPair>> error = new HashSet<>();
undefinedPairs = undefinedPairs.stream().map(x -> { x.setUndefinedPair(); return x;}).collect(Collectors.toCollection(HashSet::new)); undefinedPairs = undefinedPairs.stream().map(x -> { x.setUndefinedPair(); return x;}).collect(Collectors.toCollection(HashSet::new));
@@ -481,6 +515,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// .collect(Collectors.toCollection(HashSet::new)); // .collect(Collectors.toCollection(HashSet::new));
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE //Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>(); Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
Set<TypeUnifyTask> forks = new HashSet<>(); Set<TypeUnifyTask> forks = new HashSet<>();
@@ -778,8 +818,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/ */
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>(); List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
writeLog("nextSet: " + nextSet.toString() + "\n" writeLog("nextSet: " + nextSet.toString());
+ "nextSetasList: " + nextSetasList.toString()); writeLog("nextSetasList: " + nextSetasList.toString());
/* staistics Nextvar an Hand Varianzbestimmung auskommentieren Anfang /* staistics Nextvar an Hand Varianzbestimmung auskommentieren Anfang
if (variance == 1) { if (variance == 1) {
@@ -897,20 +937,25 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
newElemsOrig.add(a); newElemsOrig.add(a);
/* FORK ANFANG */ /* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraint, this.pool); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
//forks.add(forkOrig); //forks.add(forkOrig);
forkOrig.fork(); synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
forkOrig.fork();
}
/* FORK ENDE */ /* FORK ENDE */
writeLog("a in " + variance + " "+ a + "\n" + synchronized (this) {
"nextSetasListRest: " + nextSetasListRest.toString()); writeLog("a in " + variance + " "+ a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasList.isEmpty()) { while (!nextSetasList.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasList.remove(0); Set<UnifyPair> nSaL = nextSetasList.remove(0);
//nextSetasList.remove(nSaL); synchronized (this) { //nextSetasList.remove(nSaL);
writeLog("1 RM" + nSaL.toString()); writeLog("1 RM" + nSaL.toString());
}
if (!oderConstraint) { if (!oderConstraint) {
@@ -930,43 +975,59 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems); Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL); newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
forks.add(fork); forks.add(fork);
fork.fork(); synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
}
} }
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe); //res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */ /* FORK ANFANG */
synchronized (this) {
writeLog("wait "+ forkOrig.thNo); writeLog("wait "+ forkOrig.thNo);
noOfThread--; noOfThread--;
res = forkOrig.join(); res = forkOrig.join();
synchronized (usedTasks) {
//noOfThread++; if (this.myIsCancelled()) {
forkOrig.writeLog("final Orig 1"); return new HashSet<>();
forkOrig.closeLogFile(); }
//Set<Set<UnifyPair>> fork_res = forkOrig.join(); }
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString()); //noOfThread++;
//noOfThread--; an das Ende von compute verschoben forkOrig.writeLog("final Orig 1");
//add_res.add(fork_res); forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res);
};
/* FORK ENDE */ /* FORK ENDE */
forks.forEach(x -> writeLog("wait: " + x.thNo)); forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) { for(TypeUnify2Task fork : forks) {
noOfThread--; synchronized (this) {
Set<Set<UnifyPair>> fork_res = fork.join(); noOfThread--;
Set<Set<UnifyPair>> fork_res = fork.join();
//noOfThread++; synchronized (usedTasks) {
//noOfThread--; an das Ende von compute verschoben if (this.myIsCancelled()) {
writeLog("Join " + new Integer(fork.thNo).toString() + "\n" + return new HashSet<>();
"fork_res: " + fork_res.toString() + "\n" + }
new Boolean((isUndefinedPairSetSet(fork_res))).toString()); }
add_res.add(fork_res); //noOfThread++;
if (!isUndefinedPairSetSet(fork_res)) { writeLog("Join " + new Integer(fork.thNo).toString());
aParDef.add(fork.getNextSetElement()); //noOfThread--; an das Ende von compute verschoben
} writeLog("fork_res: " + fork_res.toString());
fork.writeLog("final 1"); writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
fork.closeLogFile(); add_res.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final 1");
fork.closeLogFile();
};
} }
//noOfThread++; //noOfThread++;
} else { } else {
@@ -978,20 +1039,25 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
newElemsOrig.add(a); newElemsOrig.add(a);
/* FORK ANFANG */ /* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
//forks.add(forkOrig); //forks.add(forkOrig);
forkOrig.fork(); synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
forkOrig.fork();
}
/* FORK ENDE */ /* FORK ENDE */
synchronized (this) {
writeLog("a in " + variance + " "+ a + "\n" + writeLog("a in " + variance + " "+ a);
"nextSetasListRest: " + nextSetasListRest.toString()); writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasList.isEmpty()) { while (!nextSetasList.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasList.remove(0); Set<UnifyPair> nSaL = nextSetasList.remove(0);
//nextSetasList.remove(nSaL); synchronized (this) { //nextSetasList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString()); writeLog("-1 RM" + nSaL.toString());
}
if (!oderConstraint) { if (!oderConstraint) {
/* statistics sameEq wird nicht betrachtet ANGFANG /* statistics sameEq wird nicht betrachtet ANGFANG
@@ -1010,44 +1076,60 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems); Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL); newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
forks.add(fork); forks.add(fork);
fork.fork(); synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
}
} }
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe); //res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */ /* FORK ANFANG */
writeLog("wait "+ forkOrig.thNo); synchronized (this) {
noOfThread--; writeLog("wait "+ forkOrig.thNo);
res = forkOrig.join(); noOfThread--;
res = forkOrig.join();
//noOfThread++; synchronized (usedTasks) {
forkOrig.writeLog("final Orig -1"); if (this.myIsCancelled()) {
forkOrig.closeLogFile(); return new HashSet<>();
//Set<Set<UnifyPair>> fork_res = forkOrig.join(); }
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString()); }
//noOfThread--; an das Ende von compute verschoben //noOfThread++;
//add_res.add(fork_res); forkOrig.writeLog("final Orig -1");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res);
};
/* FORK ENDE */ /* FORK ENDE */
forks.forEach(x -> writeLog("wait: " + x.thNo)); forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) { for(TypeUnify2Task fork : forks) {
noOfThread--; synchronized (this) {
Set<Set<UnifyPair>> fork_res = fork.join(); noOfThread--;
Set<Set<UnifyPair>> fork_res = fork.join();
//noOfThread++; synchronized (usedTasks) {
writeLog("Join " + new Integer(fork.thNo).toString()); if (this.myIsCancelled()) {
//noOfThread--; an das Ende von compute verschoben return new HashSet<>();
writeLog("fork_res: " + fork_res.toString()); }
writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString()); }
add_res.add(fork_res); //noOfThread++;
if (!isUndefinedPairSetSet(fork_res)) { writeLog("Join " + new Integer(fork.thNo).toString());
aParDef.add(fork.getNextSetElement()); //noOfThread--; an das Ende von compute verschoben
} writeLog("fork_res: " + fork_res.toString());
fork.writeLog("final -1"); writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
fork.closeLogFile(); add_res.add(fork_res);
}; if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final -1");
fork.closeLogFile();
};
}
//noOfThread++; //noOfThread++;
} else { } else {
if(parallel && (variance == 2) && noOfThread <= MaxNoOfThreads) { if(parallel && (variance == 2) && noOfThread <= MaxNoOfThreads) {
@@ -1059,14 +1141,20 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
newElemsOrig.add(a); newElemsOrig.add(a);
/* FORK ANFANG */ /* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, new HashSet<>(methodSignatureConstraint), this.pool); TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
//forks.add(forkOrig); //forks.add(forkOrig);
forkOrig.fork(); synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
forkOrig.fork();
}
/* FORK ENDE */ /* FORK ENDE */
writeLog("a in " + variance + " "+ a + "\n" + synchronized (this) {
"nextSetasListRest: " + nextSetasListRest.toString()); writeLog("a in " + variance + " "+ a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasList.isEmpty()) { while (!nextSetasList.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasList.remove(0); Set<UnifyPair> nSaL = nextSetasList.remove(0);
//nextSetasList.remove(nSaL); //PL einkommentiert 20-02-03 //nextSetasList.remove(nSaL); //PL einkommentiert 20-02-03
@@ -1074,38 +1162,53 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems); Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints); List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL); newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, methodSignatureConstraint, this.pool); TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
forks.add(fork); forks.add(fork);
fork.fork(); synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
}
} }
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe); //res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */ /* FORK ANFANG */
synchronized (this) {
writeLog("wait "+ forkOrig.thNo); writeLog("wait "+ forkOrig.thNo);
noOfThread--; noOfThread--;
res = forkOrig.join(); res = forkOrig.join();
synchronized (usedTasks) {
//noOfThread++; if (this.myIsCancelled()) {
forkOrig.writeLog("final Orig 2"); return new HashSet<>();
forkOrig.closeLogFile(); }
//Set<Set<UnifyPair>> fork_res = forkOrig.join(); }
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString()); //noOfThread++;
//noOfThread--; an das Ende von compute verschoben forkOrig.writeLog("final Orig 2");
//add_res.add(fork_res); //vermutlich falsch forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res); //vermutlich falsch
};
/* FORK ENDE */ /* FORK ENDE */
forks.forEach(x -> writeLog("wait: " + x.thNo)); forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) { for(TypeUnify2Task fork : forks) {
noOfThread--; synchronized (this) {
Set<Set<UnifyPair>> fork_res = fork.join(); noOfThread--;
Set<Set<UnifyPair>> fork_res = fork.join();
//noOfThread++; synchronized (usedTasks) {
writeLog("Join " + new Integer(fork.thNo).toString()); if (this.myIsCancelled()) {
//noOfThread--; an das Ende von compute verschoben return new HashSet<>();
add_res.add(fork_res); }
fork.writeLog("final 2"); }
fork.closeLogFile(); //noOfThread++;
writeLog("Join " + new Integer(fork.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
add_res.add(fork_res);
fork.writeLog("final 2");
fork.closeLogFile();
};
} }
//noOfThread++; //noOfThread++;
} else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten } else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
@@ -1434,15 +1537,15 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//NOCH NICHT korrekt PL 2018-10-12 //NOCH NICHT korrekt PL 2018-10-12
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y)) //nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
// .collect(Collectors.toCollection(ArrayList::new)); // .collect(Collectors.toCollection(ArrayList::new));
writeLog("res (undef): " + res.toString() + "\n" + writeLog("res (undef): " + res.toString());
"abhSubst: " + abhSubst.toString() + "\n" + writeLog("abhSubst: " + abhSubst.toString());
"a2: " + rekTiefe + " " + a.toString() + "\n" + writeLog("a2: " + rekTiefe + " " + a.toString());
"Durchschnitt: " + durchschnitt.toString() + "\n" + writeLog("Durchschnitt: " + durchschnitt.toString());
"nextSet: " + nextSet.toString() + "\n" + writeLog("nextSet: " + nextSet.toString());
"nextSetasList: " + nextSetasList.toString() + "\n" + writeLog("nextSetasList: " + nextSetasList.toString());
"Number first erased Elements (undef): " + (len - nofstred) + "\n" + writeLog("Number first erased Elements (undef): " + (len - nofstred));
"Number second erased Elements (undef): " + (nofstred- nextSetasList.size()) + "\n" + writeLog("Number second erased Elements (undef): " + (nofstred- nextSetasList.size()));
"Number erased Elements (undef): " + (len - nextSetasList.size())); writeLog("Number erased Elements (undef): " + (len - nextSetasList.size()));
noAllErasedElements = noAllErasedElements + (len - nextSetasList.size()); noAllErasedElements = noAllErasedElements + (len - nextSetasList.size());
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString()); writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
noBacktracking++; noBacktracking++;
@@ -1902,10 +2005,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
} }
writeLog("eq2s: " + eq2s.toString() + "\n" + writeLog("eq2s: " + eq2s.toString());
"eq2sAsListFst: " + eq2sAsListFst.toString() + "\n" + writeLog("eq2sAsListFst: " + eq2sAsListFst.toString());
"eq2sAsListSnd: " + eq2sAsListSnd.toString() + "\n" + writeLog("eq2sAsListSnd: " + eq2sAsListSnd.toString());
"eq2sAsListBack: " + eq2sAsListBack.toString()); writeLog("eq2sAsListBack: " + eq2sAsListBack.toString());
eq2sAsList.addAll(eq2sAsListFst); eq2sAsList.addAll(eq2sAsListFst);
eq2sAsList.addAll(eq2sAsListSnd); eq2sAsList.addAll(eq2sAsListSnd);
@@ -2510,33 +2613,34 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
} }
void writeLog(String str) { void writeLog(String str) {
synchronized ( this ) {
if (log && finalresult) { if (log && finalresult) {
if(parallel){ try {
logFile.write("Thread no.:" + thNo + "\n" logFile.write("Thread no.:" + thNo + "\n");
+ "noOfThread:" + noOfThread + "\n" logFile.write("noOfThread:" + noOfThread + "\n");
+ "parallel:" + parallel + "\n" logFile.write("parallel:" + parallel + "\n");
+ str+"\n\n" logFile.write(str+"\n\n");
); logFile.flush();
}else{
logFile.writeNonThreaded("Thread no.:" + thNo + "\n"
+ "noOfThread:" + noOfThread + "\n"
+ "parallel:" + parallel + "\n"
+ str+"\n\n"
);
} }
} catch (IOException e) {
System.err.println("kein LogFile");
}
}
}
} }
void writeStatistics(String str) { void writeStatistics(String str) {
if (finalresult) { if (finalresult) {
synchronized ( this ) {
try { try {
statisticsFile.write("Thread No. " + thNo + ": " + str + "\n"); statistics.write("Thread No. " + thNo + ": " + str + "\n");
statisticsFile.flush(); statistics.flush();
} }
catch (IOException e) { catch (IOException e) {
System.err.println("kein StatisticsFile"); System.err.println("kein StatisticsFile");
} }
} }}
} }
} }

View File

@@ -1,57 +0,0 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.*;
import java.util.concurrent.ForkJoinPool;
import java.util.stream.Collectors;
public class UnifyResultModelParallel {
private ForkJoinPool pool;
private ConstraintSet<Pair> cons;
private IFiniteClosure fc;
private List<UnifyResultListener> listeners = new ArrayList<>();
public UnifyResultModelParallel(ConstraintSet<Pair> cons, IFiniteClosure fc){
this.cons = cons;
this.fc = fc;
}
public void setPool(ForkJoinPool pool){
this.pool = pool;
}
public void addUnifyResultListener(UnifyResultListener listenerToAdd) {
listeners.add(listenerToAdd);
}
public void removeUnifyResultListener(UnifyResultListener listenerToRemove) {
listeners.remove(listenerToRemove);
}
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet){
pool.execute(()->{
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
return y; //alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
}
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
}).collect(Collectors.toCollection(HashSet::new));
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons))))
.collect(Collectors.toList());
UnifyResultEvent evt = new UnifyResultEvent(newResult);
for (UnifyResultListener listener : listeners) {
listener.onNewTypeResultFound(evt);
}
});
}
}

View File

@@ -0,0 +1,18 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
public class UnifyTaskModel {
ArrayList<TypeUnifyTask> usedTasks = new ArrayList<>();
public synchronized void add(TypeUnifyTask t) {
usedTasks.add(t);
}
public synchronized void cancel() {
for(TypeUnifyTask t : usedTasks) {
t.myCancel(true);
}
}
}

View File

@@ -1,17 +0,0 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.concurrent.ForkJoinPool;
public class UnifyTaskModelParallel {
private ForkJoinPool pool;
public void setPool(ForkJoinPool pool){
this.pool = pool;
}
public void cancel(){
if(this.pool != null) {
this.pool.shutdown();
}
}
}

View File

@@ -1,53 +0,0 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.IOException;
import java.io.Writer;
import java.util.concurrent.ForkJoinPool;
public class WriterActiveObject {
private Writer writer;
private ForkJoinPool pool;
public WriterActiveObject(Writer writer, ForkJoinPool pool){
this.writer = writer;
this.pool = pool;
}
public void close(){
pool.execute(()->{
try {
writer.close();
} catch (IOException e) {
System.out.println(e.getMessage());
}
});
}
public void write(String message){
pool.execute(()->{
try {
writer.write(message);
writer.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
public void writeNonThreaded(String message){
try {
writer.write(message);
writer.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void closeNonThreaded(){
try {
writer.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -12,8 +12,8 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair; import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet; import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.TypeUnify; import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModelParallel; import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModelParallel; import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure; import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.*; import de.dhbwstuttgart.typeinference.unify.model.*;
import org.apache.commons.io.output.NullWriter; import org.apache.commons.io.output.NullWriter;
@@ -34,66 +34,7 @@ public class UnifyTest {
public static final String rootDirectory = System.getProperty("user.dir")+"/resources/javFiles/"; public static final String rootDirectory = System.getProperty("user.dir")+"/resources/javFiles/";
@Test private UnifyPair genPairListOfInteger(String name){
public void unifyTest(){
UnifyType type1 = new PlaceholderType("a");
UnifyType type2 = new ReferenceType("List", new TypeParams(new PlaceholderType("b")));
UnifyPair pair1 = new UnifyPair(type1, type2, PairOperator.SMALLERDOT);
type1 = new ReferenceType("List", new TypeParams(new PlaceholderType("c")));
type2 = new PlaceholderType("a");
UnifyPair pair2 = new UnifyPair(type1, type2, PairOperator.SMALLERDOT);
type1 = new ReferenceType("String");
type2 = new PlaceholderType("b");
UnifyPair pair3 = new UnifyPair(type1, type2, PairOperator.SMALLERDOT);
type1 = new ReferenceType("Integer");
type2 = new PlaceholderType("c");
UnifyPair pair4 = new UnifyPair(type1, type2, PairOperator.SMALLERDOT);
Set<UnifyPair> undConstraints = new HashSet<>();
undConstraints.add(pair1);
undConstraints.add(pair2);
undConstraints.add(pair3);
undConstraints.add(pair4);
List<Set<Constraint<UnifyPair>>> oderConstraints = new ArrayList<>();
Set<UnifyPair> constraints = new HashSet<>();
type1 = new ReferenceType("Object");
type2 = new ReferenceType("String");
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
type1 = new ReferenceType("Number");
type2 = new ReferenceType("Integer");
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
type1 = new ReferenceType("Object");
type2 = new ReferenceType("Number");
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
/* type1 = new ReferenceType("AbstractList", new TypeParams(new PlaceholderType("X")));
type2 = new ReferenceType("List", new TypeParams(new PlaceholderType("X")));
constraints.add(new UnifyPair(type1, type2, PairOperator.SMALLER));
type1 = new ReferenceType("Object");
type2 = new ReferenceType("AbstractList", new TypeParams(new PlaceholderType("X")));
constraints.add(new UnifyPair(type1, type2, PairOperator.SMALLER));*/
type1 = new ReferenceType("AbstractList", new PlaceholderType("X"));
type2 = new ReferenceType("List", new PlaceholderType("X"));
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
type1 = new ReferenceType("Object");
type2 = new ReferenceType("AbstractList", new PlaceholderType("X"));
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
IFiniteClosure finiteClosure = new FiniteClosure(constraints, new NullWriter());
TypeUnify unifyAlgo = new TypeUnify();
ConstraintSet< Pair> cons = new ConstraintSet<>();
UnifyResultModelParallel urm = new UnifyResultModelParallel(cons, finiteClosure);
UnifyTaskModelParallel tasks = new UnifyTaskModelParallel();
Set<Set<UnifyPair>> solution = unifyAlgo.unify(undConstraints, oderConstraints, finiteClosure, new NullWriter(), false, urm, tasks);
System.out.println(solution.size());
}
private UnifyPair genPair(String name){
UnifyType type1 = new PlaceholderType(name); UnifyType type1 = new PlaceholderType(name);
UnifyType type2 = new ReferenceType("List", new TypeParams(new ReferenceType("Integer"))); UnifyType type2 = new ReferenceType("List", new TypeParams(new ReferenceType("Integer")));
@@ -101,39 +42,37 @@ public class UnifyTest {
return pair1; return pair1;
} }
private UnifyPair genPair2(String name){ private UnifyPair genPairListOfString(String name){
PlaceholderType type1 = new PlaceholderType(name); PlaceholderType type1 = new PlaceholderType(name);
type1.setVariance(1);
UnifyType type2 = new ReferenceType("List", new TypeParams(new ReferenceType("String"))); UnifyType type2 = new ReferenceType("List", new TypeParams(new ReferenceType("String")));
UnifyPair pair1 = new UnifyPair(type2, type1, PairOperator.SMALLERDOT); UnifyPair pair1 = new UnifyPair(type2, type1, PairOperator.SMALLERDOT);
return pair1; return pair1;
} }
@Test @Test
public void smallUnifyTest(){ public void unifyTest(){
UnifyType type1 = new PlaceholderType("a"); UnifyType type1;
UnifyType type2 = new ReferenceType("List", new TypeParams(new ReferenceType("Integer"))); UnifyType type2;
UnifyPair pair1 = new UnifyPair(type1, type2, PairOperator.SMALLERDOT);
Set<UnifyPair> undConstraints = new HashSet<>(); Set<UnifyPair> undConstraints = new HashSet<>();
undConstraints.add(genPair("a")); undConstraints.add(genPairListOfInteger("a"));
undConstraints.add(genPair2("a")); undConstraints.add(genPairListOfString("a"));
undConstraints.add(genPair("b")); undConstraints.add(genPairListOfInteger("b"));
undConstraints.add(genPair2("b")); undConstraints.add(genPairListOfString("b"));
undConstraints.add(genPair("c")); undConstraints.add(genPairListOfInteger("c"));
undConstraints.add(genPair2("c")); undConstraints.add(genPairListOfString("c"));
undConstraints.add(genPair("d")); undConstraints.add(genPairListOfInteger("d"));
undConstraints.add(genPair2("d")); undConstraints.add(genPairListOfString("d"));
undConstraints.add(genPair("e")); undConstraints.add(genPairListOfInteger("e"));
undConstraints.add(genPair2("e")); undConstraints.add(genPairListOfString("e"));
undConstraints.add(genPair("e1")); undConstraints.add(genPairListOfInteger("e1"));
undConstraints.add(genPair2("e1")); undConstraints.add(genPairListOfString("e1"));
undConstraints.add(genPair("e2")); undConstraints.add(genPairListOfInteger("e2"));
undConstraints.add(genPair2("e2")); undConstraints.add(genPairListOfString("e2"));
undConstraints.add(genPair("e3")); undConstraints.add(genPairListOfInteger("e3"));
undConstraints.add(genPair2("e3")); undConstraints.add(genPairListOfString("e3"));
List<Set<Constraint<UnifyPair>>> oderConstraints = new ArrayList<>(); List<Set<Constraint<UnifyPair>>> oderConstraints = new ArrayList<>();
@@ -151,12 +90,12 @@ public class UnifyTest {
IFiniteClosure finiteClosure = new FiniteClosure(constraints, new NullWriter()); IFiniteClosure finiteClosure = new FiniteClosure(constraints, new NullWriter());
TypeUnify unifyAlgo = new TypeUnify(); TypeUnify unifyAlgo = new TypeUnify();
ConstraintSet< Pair> cons = new ConstraintSet<>(); ConstraintSet<Pair> cons = new ConstraintSet<>();
UnifyResultModelParallel urm = new UnifyResultModelParallel(cons, finiteClosure); UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyTaskModelParallel tasks = new UnifyTaskModelParallel(); UnifyTaskModel tasks = new UnifyTaskModel();
Set<Set<UnifyPair>> solution = unifyAlgo.unify(undConstraints, oderConstraints, finiteClosure, new NullWriter(), false, urm, tasks); Set<Set<UnifyPair>> solution = unifyAlgo.unify(undConstraints, oderConstraints, finiteClosure, new NullWriter(), false, urm, tasks);
System.out.println(solution.size()); System.out.println(solution.size());
System.out.println(solution); //System.out.println(solution);
} }
/* /*
@Test @Test