9 Commits

Author SHA1 Message Date
NoName11234
3951b8451b made constraintsetrepository volatile 2024-05-29 18:07:08 +02:00
NoName11234
a04be1336a fixed matrixk2 test 2024-05-23 19:30:22 +02:00
NoName11234
dee55f0b0f fixed missing oderConstraints 2024-05-23 10:50:23 +02:00
NoName11234
c58897f5ba implemented scalark3 test 2024-05-23 09:40:19 +02:00
NoName11234
c938e69c76 implemented scalark2test 2024-05-15 17:41:40 +02:00
NoName11234
1475661b84 Revert "added volatile for testing"
This reverts commit e397375d2a.
2024-05-02 17:57:29 +02:00
NoName11234
e397375d2a added volatile for testing 2024-05-02 17:33:00 +02:00
NoName11234
3bb7f38805 implemented ConstraintSetRepository 2024-05-01 20:05:28 +02:00
NoName11234
6a92b0f194 added class ConstraintSetRepository 2024-05-01 19:42:24 +02:00
6 changed files with 119 additions and 102 deletions

View File

@@ -0,0 +1,21 @@
package de.dhbwstuttgart.typeinference.unify;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
import java.util.HashSet;
import java.util.Set;
public class ConstraintSetRepository {
private Set<Integer> sets = new HashSet<>();
public boolean containsSet(Set<Set<UnifyPair>> set){
Integer hash = Integer.valueOf(set.hashCode());
if(sets.contains(hash)){
return true;
}else{
sets.add(hash);
return false;
}
}
}

View File

@@ -9,7 +9,6 @@ import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.Stack;
import java.util.concurrent.ForkJoinPool;
import java.util.function.Function;
import java.util.stream.Collectors;
@@ -36,7 +35,6 @@ import java.io.Writer;
import java.io.OutputStreamWriter;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.io.output.NullWriter;
/**
* Implementation of the type inference rules.
@@ -45,14 +43,14 @@ import org.apache.commons.io.output.NullWriter;
*/
public class RuleSet implements IRuleSet{
WriterActiveObject logFile;
Writer logFile;
public RuleSet() {
super();
logFile = new WriterActiveObject(new NullWriter(), ForkJoinPool.commonPool());
logFile = new OutputStreamWriter(new NullOutputStream());
}
RuleSet(WriterActiveObject logFile) {
RuleSet(Writer logFile) {
this.logFile = logFile;
}
@@ -868,8 +866,14 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNred: " + result + "\n");
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNred: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("logFile-Error");
}
return Optional.of(result);
}
@@ -912,9 +916,14 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNgreater: " + result + "\n");
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNgreater: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
return Optional.of(result);
}
@@ -957,9 +966,14 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNsmaller: " + result + "\n");
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNsmaller: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
return Optional.of(result);
}

View File

@@ -29,8 +29,8 @@ public class TypeUnify {
* @return
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks, new ConstraintSetRepository());
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, ret, usedTasks);
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
@@ -55,8 +55,8 @@ public class TypeUnify {
* @return
*/
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks, new ConstraintSetRepository());
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, ret, usedTasks);
pool.invoke(unifyTask);
return ret;
}
@@ -73,9 +73,9 @@ public class TypeUnify {
* @return
*/
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, ret, usedTasks, new WriterActiveObject(statistics, pool));
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks, new ConstraintSetRepository(), statistics);
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
@@ -109,7 +109,8 @@ public class TypeUnify {
* @return
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObject(logFile, ForkJoinPool.commonPool()), log, 0, ret, usedTasks, new WriterActiveObject(statistics, ForkJoinPool.commonPool()));
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks, new ConstraintSetRepository());
unifyTask.statistics = statistics;
Set<Set<UnifyPair>> res = unifyTask.compute();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");

View File

@@ -23,14 +23,14 @@ public class TypeUnify2Task extends TypeUnifyTask {
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
Set<UnifyPair> nextSetElement,
IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks,
Set<UnifyPair> methodSignatureConstraintUebergabe, WriterActiveObject statistics) {
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraintUebergabe );
IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks,
Set<UnifyPair> methodSignatureConstraintUebergabe, ConstraintSetRepository constraintSetRepository, Writer statistics) {
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraintUebergabe, constraintSetRepository);
}
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe, ConstraintSetRepository constraintSetRepository) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, constraintSetRepository);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
@@ -46,7 +46,11 @@ public class TypeUnify2Task extends TypeUnifyTask {
System.out.println("two");
}
one = true;
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
Set<Set<UnifyPair>> res = new HashSet<>();
if(!constraintSetRepository.containsSet(setToFlatten)){
res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
}
/*if (isUndefinedPairSetSet(res)) {
return new HashSet<>(); }
else
@@ -64,6 +68,13 @@ public class TypeUnify2Task extends TypeUnifyTask {
}
public void closeLogFile() {
logFile.close();
try {
logFile.close();
}
catch (IOException ioE) {
System.err.println("no log-File" + thNo);
}
}
}

View File

@@ -13,7 +13,6 @@ import java.util.List;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveTask;
import java.util.function.BiFunction;
import java.util.function.BinaryOperator;
@@ -80,7 +79,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
protected boolean one = false;
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
protected WriterActiveObject logFile;
Writer logFile;
/**
* The implementation of setOps that will be used during the unification
@@ -130,7 +129,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
volatile UnifyTaskModel usedTasks;
static WriterActiveObject statistics;
static Writer statistics;
protected volatile ConstraintSetRepository constraintSetRepository;
public TypeUnifyTask() {
rules = new RuleSet();
@@ -151,13 +151,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
//statistics
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, WriterActiveObject statistics) {
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, ConstraintSetRepository constraintSetRepository, Writer statistics) {
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, constraintSetRepository);
this.statistics = statistics;
}
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, ConstraintSetRepository constraintSetRepository) {
synchronized (this) {
if(statistics==null){
statistics = new WriterActiveObject(new NullWriter(), ForkJoinPool.commonPool());
statistics = new NullWriter();
}
this.eq = eq;
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
@@ -183,11 +184,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
thNo = totalnoOfThread;
writeLog("thNo2 " + thNo);
try {
if(log){
this.logFile = new WriterActiveObject(new FileWriter(new File(System.getProperty("user.dir") + "/logFiles/" + "Thread_"+thNo)), ForkJoinPool.commonPool());
}else{
this.logFile = new WriterActiveObject(new NullWriter(), ForkJoinPool.commonPool());
}
this.logFile = log ? new FileWriter(new File(System.getProperty("user.dir") + "/logFiles/" + "Thread_"+thNo))
: new OutputStreamWriter(new NullOutputStream());
logFile.write("");
}
catch (IOException e) {
System.err.println("log-File nicht vorhanden");
@@ -211,6 +210,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
this.urm = urm;
this.usedTasks = usedTasks;
this.usedTasks.add(this);
this.constraintSetRepository = constraintSetRepository;
}
}
/**
@@ -267,8 +268,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.filter(x -> x.size()>1)
.collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, new HashSet<>());
logFile.close();
try {
logFile.close();
}
catch (IOException ioE) {
System.err.println("no log-File");
}
if (isUndefinedPairSetSet(res)) {
//fuer debug-Zwecke
ArrayList al = res.stream().map(x -> x.stream().collect(Collectors.toCollection(ArrayList::new)))
@@ -816,8 +821,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElemsOrig = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraintsOrig = new ArrayList<>(oderConstraints);
newElemsOrig.add(a);
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
//Überprüfen, ob das Set bereits berechnet wurde
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint, constraintSetRepository);
forkOrig.fork();
while (!nextSetasList.isEmpty()) {
@@ -831,9 +835,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint), constraintSetRepository);
forks.add(fork);
//Überprüfen, ob das Set bereits berechnet wurde
fork.fork();
}
@@ -851,7 +854,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}else{
elems.add(a);
//Überprüfen ob das Set bereits berechnet wurde
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint));
if(!constraintSetRepository.containsSet(elems)){
res = unify2(elems, eq, oderConstraints, fc, parallel, rekTiefe, new HashSet<>(methodSignatureConstraint));
}
}
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
@@ -2251,15 +2256,33 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
void writeLog(String str) {
synchronized ( this ) {
if (log && finalresult) {
logFile.write("Thread no.:" + thNo + "\n" + "parallel:" + parallel + "\n" + str+"\n\n");
}
try {
logFile.write("Thread no.:" + thNo + "\n");
logFile.write("parallel:" + parallel + "\n");
logFile.write(str+"\n\n");
logFile.flush();
}
catch (IOException e) {
System.err.println("kein LogFile");
}
}
}
}
void writeStatistics(String str) {
if (finalresult) {
statistics.write("Thread No. " + thNo + ": " + str + "\n");
}
synchronized ( this ) {
try {
statistics.write("Thread No. " + thNo + ": " + str + "\n");
statistics.flush();
}
catch (IOException e) {
System.err.println("kein StatisticsFile");
}
}}
}
}

View File

@@ -1,53 +0,0 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.IOException;
import java.io.Writer;
import java.util.concurrent.ForkJoinPool;
public class WriterActiveObject {
private Writer writer;
private ForkJoinPool pool;
public WriterActiveObject(Writer writer, ForkJoinPool pool){
this.writer = writer;
this.pool = pool;
}
public void close(){
pool.execute(()->{
try {
writer.close();
} catch (IOException e) {
System.out.println(e.getMessage());
}
});
}
public void write(String message){
pool.execute(()->{
try {
writer.write(message);
writer.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
public void writeNonThreaded(String message){
try {
writer.write(message);
writer.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void closeNonThreaded(){
try {
writer.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}