4 Commits

11 changed files with 1172 additions and 26 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -28,11 +28,7 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.io.OutputStreamWriter;
import org.apache.commons.io.output.NullOutputStream;
@@ -42,16 +38,16 @@ import org.apache.commons.io.output.NullOutputStream;
* @author Florian Steurer
*
*/
public class RuleSet implements IRuleSet{
public class RuleSetFJP implements IRuleSet{
WriterActiveObject logFile;
WriterActiveObjectFJP logFile;
public RuleSet() {
public RuleSetFJP() {
super();
logFile = new WriterActiveObject(new OutputStreamWriter(new NullOutputStream()), ForkJoinPool.commonPool());
logFile = new WriterActiveObjectFJP(new OutputStreamWriter(new NullOutputStream()), ForkJoinPool.commonPool());
}
RuleSet(WriterActiveObject logFile) {
RuleSetFJP(WriterActiveObjectFJP logFile) {
this.logFile = logFile;
}

View File

@@ -25,7 +25,7 @@ public class TypeUnify {
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
taskModel.setPool(pool);
resultModel.setPool(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObjectFJP(logFile, pool), log, resultModel, pool);
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
return res;
@@ -44,7 +44,7 @@ public class TypeUnify {
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
taskModel.setPool(pool);
resultModel.setPool(pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObjectFJP(logFile, pool), log, resultModel, pool);
pool.invoke(unifyTask);
return resultModel;
}
@@ -63,7 +63,7 @@ public class TypeUnify {
taskModel.setPool(pool);
resultModel.setPool(pool);
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, resultModel, pool, statistics);
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObjectFJP(logFile, pool), log, resultModel, pool, statistics);
pool.invoke(unifyTask);
unifyTask.join();
@@ -90,7 +90,7 @@ public class TypeUnify {
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModelParallel taskModel) {
resultModel.setPool(ForkJoinPool.commonPool());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObject(logFile, ForkJoinPool.commonPool()), log, resultModel, ForkJoinPool.commonPool());
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObjectFJP(logFile, ForkJoinPool.commonPool()), log, resultModel, ForkJoinPool.commonPool());
unifyTask.statisticsFile = statistics;
Set<Set<UnifyPair>> res = unifyTask.compute();
return res;

View File

@@ -18,13 +18,13 @@ public class TypeUnify2Task extends TypeUnifyTask {
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
Set<UnifyPair> nextSetElement,
IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm,
IFiniteClosure fc, boolean parallel, WriterActiveObjectFJP logFile, Boolean log, UnifyResultModelParallel urm,
Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool, Writer statistics, ConstraintSetRepository constraintSetRepository) {
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, urm, methodSignatureConstraintUebergabe, pool, constraintSetRepository);
}
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool, ConstraintSetRepository constraintSetRepository) {
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObjectFJP logFile, Boolean log, UnifyResultModelParallel urm, Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool, ConstraintSetRepository constraintSetRepository) {
super(eq, oderConstraints, fc, parallel, logFile, log, urm, pool);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;

View File

@@ -0,0 +1,83 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.io.output.NullWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
public class TypeUnifyInitialTask implements Runnable{
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"/test/logFiles/log" geschrieben werden soll?
/**
* Fuer die Threads
*/
protected UnifyResultModelParallel urm;
public static final String rootDirectory = System.getProperty("user.dir") + "/test/logFiles/";
protected WriterActiveObjectES logFile;
protected ExecutorService executorService;
/**
* The implementation of the standard unify that will be used during the unification
*/
protected IUnify stdUnify = new MartelliMontanariUnify();
/**
* The implementation of the rules that will be used during the unification.
*/
protected IRuleSet rules;
protected Set<UnifyPair> eq; //und-constraints
protected List<Set<Constraint<UnifyPair>>> oderConstraintsField;
protected IFiniteClosure fc;
protected boolean parallel;
static Writer statisticsFile = new NullWriter();
//Attribute für die Rekursionsoptimierung -> bereits in einem Thread berechnete Pfade kein zweites Mal berechnen
protected volatile ConstraintSetRepository constraintSetRepository = new ConstraintSetRepository();
public TypeUnifyInitialTask(){
this.rules = new RuleSetFJP();
}
public TypeUnifyInitialTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Boolean log, UnifyResultModelParallel urm, ExecutorService executorService) {
this.eq = eq;
this.oderConstraintsField = oderConstraints;
this.fc = fc;
this.parallel = parallel;
this.log = log;
this.executorService = executorService;
try {
if (log) {
this.logFile = new WriterActiveObjectES(new FileWriter(System.getProperty("user.dir") + "/logFiles/" + "Thread_" + UUID.randomUUID()), this.executorService);
} else {
this.logFile = new WriterActiveObjectES(new OutputStreamWriter(new NullOutputStream()), this.executorService);
}
} catch (IOException e) {
System.err.println("log-File nicht vorhanden");
}
rules = new RuleSetES(logFile);
this.urm = urm;
}
@Override
public void run() {
}
}

View File

@@ -0,0 +1,5 @@
package de.dhbwstuttgart.typeinference.unify;
public class TypeUnifySubTask extends TypeUnifyInitialTask{
}

View File

@@ -66,7 +66,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
int thNo;
public static final String rootDirectory = System.getProperty("user.dir") + "/test/logFiles/";
protected WriterActiveObject logFile;
protected WriterActiveObjectFJP logFile;
protected ForkJoinPool pool;
/**
@@ -94,16 +94,16 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
public boolean hasBeenCalculated = false;
public TypeUnifyTask() {
rules = new RuleSet();
rules = new RuleSetFJP();
}
//statistics
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, ForkJoinPool pool, Writer statisticsFile) {
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObjectFJP logFile, Boolean log, UnifyResultModelParallel urm, ForkJoinPool pool, Writer statisticsFile) {
this(eq, oderConstraints, fc, parallel, logFile, log, urm, pool);
this.statisticsFile = statisticsFile;
}
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, UnifyResultModelParallel urm, ForkJoinPool pool) {
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObjectFJP logFile, Boolean log, UnifyResultModelParallel urm, ForkJoinPool pool) {
this.eq = eq;
this.oderConstraintsField = oderConstraints;
this.fc = fc;
@@ -117,14 +117,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
writeLog("thNo2 " + thNo);
try {
if (log) {
this.logFile = new WriterActiveObject(new FileWriter(System.getProperty("user.dir") + "/logFiles/" + "Thread_" + thNo), pool);
this.logFile = new WriterActiveObjectFJP(new FileWriter(System.getProperty("user.dir") + "/logFiles/" + "Thread_" + thNo), pool);
} else {
this.logFile = new WriterActiveObject(new OutputStreamWriter(new NullOutputStream()), pool);
this.logFile = new WriterActiveObjectFJP(new OutputStreamWriter(new NullOutputStream()), pool);
}
} catch (IOException e) {
System.err.println("log-File nicht vorhanden");
}
rules = new RuleSet(logFile);
rules = new RuleSetFJP(logFile);
this.urm = urm;
}

View File

@@ -36,7 +36,7 @@ public class UnifyResultModel {
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
Optional<Set<UnifyPair>> res = new RuleSetFJP().subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
return y; //alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));

View File

@@ -34,7 +34,7 @@ public class UnifyResultModelParallel {
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet){
pool.execute(()->{
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
Optional<Set<UnifyPair>> res = new RuleSetFJP().subst(x.stream().map(y -> {
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
return y; //alle Paare a <.? b erden durch a =. b ersetzt
}).collect(Collectors.toCollection(HashSet::new)));

View File

@@ -0,0 +1,53 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.IOException;
import java.io.Writer;
import java.util.concurrent.ExecutorService;
public class WriterActiveObjectES {
private Writer writer;
private ExecutorService executorService;
public WriterActiveObjectES(Writer writer, ExecutorService executorService){
this.writer = writer;
this.executorService = executorService;
}
public void close(){
executorService.submit(()->{
try {
writer.close();
} catch (IOException e) {
System.out.println(e.getMessage());
}
});
}
public void write(String message){
executorService.submit(()->{
try {
writer.write(message);
writer.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
public void writeNonThreaded(String message){
try {
writer.write(message);
writer.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void closeNonThreaded(){
try {
writer.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -4,11 +4,11 @@ import java.io.IOException;
import java.io.Writer;
import java.util.concurrent.ForkJoinPool;
public class WriterActiveObject {
public class WriterActiveObjectFJP {
private Writer writer;
private ForkJoinPool pool;
public WriterActiveObject(Writer writer, ForkJoinPool pool){
public WriterActiveObjectFJP(Writer writer, ForkJoinPool pool){
this.writer = writer;
this.pool = pool;
}