Compare commits

...

47 Commits

Author SHA1 Message Date
NoName11234
8f6eb8ff0a added unifyTest 2024-02-11 17:53:09 +01:00
NoName11234
bfefe90650 removed not functional unifyTest 2024-02-11 15:09:53 +01:00
NoName11234
c292ff2d9e added non-threaded close() method for logging 2024-01-25 20:08:32 +01:00
NoName11234
c14dd6e97c added support for non-threaded logging to WriterActiveObject 2024-01-25 19:55:56 +01:00
NoName11234
8f8ee9a385 fixed bug where constraints in test had undefinded placeholder 2024-01-24 22:20:13 +01:00
NoName11234
3863968a6e added smallest unify test possible 2024-01-23 18:59:39 +01:00
NoName11234
03c432455d fixed bug where statisticsFile was not initialized when using one of the constructors of TypeUnifyTask 2024-01-23 18:58:59 +01:00
NoName11234
441e50a70d removed useless synchronized-blocks and concenated multiple writes to logfile 2024-01-23 17:57:07 +01:00
NoName11234
0807885465 WriterActiveObject now uses same forkjointhreadpool as tasks 2024-01-21 22:16:35 +01:00
NoName11234
8abe7f6c28 updated classes to use WriterActiveObject for logging 2024-01-21 14:21:43 +01:00
NoName11234
2a92a0e48e added function for closing file to WriterActiveObject 2024-01-21 13:41:21 +01:00
NoName11234
b3639a3d08 removed synchronized(this) from TypeUnifyTask constructor 2024-01-20 23:23:08 +01:00
NoName11234
28969e7931 added class for thread safe writing in logfiles 2024-01-20 23:01:21 +01:00
NoName11234
b806fa88ff removed synchronized-block from writeLog() and writeStatistics() in TypeUnifyTask 2024-01-18 21:21:42 +01:00
NoName11234
301e9143ec changed output of unifyTest() 2024-01-18 21:20:48 +01:00
NoName11234
bb4eaaccc5 implemented unify test 2024-01-18 18:29:36 +01:00
NoName11234
4dbae46765 implemented list of UnifyPairs in test unifyTest() 2024-01-16 19:23:19 +01:00
NoName11234
c64071f235 Revert "added resources for tests"
This reverts commit 5aadb7545e31acd3b095e969b50443bee502416f.
2024-01-16 18:29:37 +01:00
NoName11234
94dbf1f7ad Revert "added resource files for testing"
This reverts commit 890f64c8138a5c9d7c8848eb8611a62f0bf6f1f7.
2024-01-16 18:27:59 +01:00
NoName11234
890f64c813 added resource files for testing 2023-12-26 17:47:18 +01:00
NoName11234
5aadb7545e added resources for tests 2023-12-26 17:28:08 +01:00
NoName11234
0c0ac61a02 changed outputstream for debugging to NullOutputStream 2023-12-26 16:59:37 +01:00
NoName11234
46192e3fd3 changed java version from 19 to 21 2023-12-26 16:45:35 +01:00
pl@gohorb.ba-horb.de
61653c5d88 modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java 2023-05-25 10:05:10 +02:00
pl@gohorb.ba-horb.de
3cd608a4ac modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
2023-05-15 16:56:04 +02:00
pl@gohorb.ba-horb.de
deec0ae706 Start branch unif23NoOptParallel
modified:   Makefile
	modified:   src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java
	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
2023-04-24 17:18:45 +02:00
pl@gohorb.ba-horb.de
d6a79ea3a1 modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java 2023-04-11 22:45:15 +02:00
pl@gohorb.ba-horb.de
1f909f13ee modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java 2023-04-11 18:01:44 +02:00
pl@gohorb.ba-horb.de
be6f4bd578 Merge remote-tracking branch 'origin/targetBytecode' into unif23 2023-04-11 15:52:26 +02:00
pl@gohorb.ba-horb.de
478efd5649 Merge branch 'unif23' of ssh://gohorb.ba-horb.de/bahome/projekt/git/JavaCompilerCore into unif23 2023-04-11 15:40:27 +02:00
pl@gohorb.ba-horb.de
c73e57cf2b new file: resources/bytecode/javFiles/Scalar.jav 2023-04-11 15:35:15 +02:00
pl@gohorb.ba-horb.de
ce29f4bcf1 Merge branch 'unif23' of ssh://gohorb.ba-horb.de/bahome/projekt/git/JavaCompilerCore into Unif23
Conflicts:
	src/test/java/targetast/TestComplete.java
2023-04-10 22:14:46 +02:00
pl@gohorb.ba-horb.de
42821f3215 modified: src/main/java/de/dhbwstuttgart/typeinference/unify/RuleSet.java
modified:   src/test/java/targetast/TestComplete.java
2023-04-10 22:07:47 +02:00
pl@gohorb.ba-horb.de
f68afc88a6 modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java 2023-04-06 17:58:32 +02:00
pl@gohorb.ba-horb.de
82061474b2 modified: resources/bytecode/javFiles/Scalar.jav
modified:   src/main/java/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java
2023-04-04 17:26:13 +02:00
pl@gohorb.ba-horb.de
d849bc127f Merge remote-tracking branch 'origin/targetBytecode' into unif23 2023-04-04 16:47:19 +02:00
pl@gohorb.ba-horb.de
6815d8fc0a modified: Makefile
modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
2023-03-31 17:58:02 +02:00
pl@gohorb.ba-horb.de
317f8b1aad new file: Makefile
deleted:    Test.java
	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
2023-03-31 15:54:17 +02:00
pl@gohorb.ba-horb.de
79335449d0 modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
Statistics fertig (Version 1)
2023-03-23 12:00:04 +01:00
pl@gohorb.ba-horb.de
14606a846e modified: src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java
modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify.java
	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnify2Task.java
	modified:   src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
2023-03-22 19:01:36 +01:00
2b67230a15 Consider public and standard constructors ones 2023-03-21 16:31:45 +01:00
pl@gohorb.ba-horb.de
29b05b56cc Merge branch 'targetBytecode' into unif23 2023-03-21 16:29:14 +01:00
pl@gohorb.ba-horb.de
08b9fc0ea3 modified: src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java
statistics eingefuegt
2023-03-21 16:23:18 +01:00
pl@gohorb.ba-horb.de
070dd16999 modified: src/main/java/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java 2023-03-21 10:30:13 +01:00
pl@gohorb.ba-horb.de
9d7e46925d modified: src/main/java/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java 2023-03-21 10:26:55 +01:00
pl@gohorb.ba-horb.de
d780d322f0 modified: src/main/java/de/dhbwstuttgart/core/JavaTXCompiler.java
modified:   src/main/java/de/dhbwstuttgart/typeinference/typeAlgo/TYPEStmt.java
2023-03-20 18:33:58 +01:00
pl@gohorb.ba-horb.de
867f3d39e8 modified: src/main/java/de/dhbwstuttgart/typeinference/unify/TypeUnifyTask.java
modified:   src/test/java/targetast/TestComplete.java
2023-03-20 16:05:28 +01:00
13 changed files with 472 additions and 272 deletions
Makefilepom.xml
resources/bytecode/javFiles
src

3
Makefile Normal file

@ -0,0 +1,3 @@
NoOptParallel:
mvn -DskipTests package
cp target/JavaTXcompiler-0.1-jar-with-dependencies.jar target/JavaTXcompiler-0.1-jar-with-dependencies_NoOptParallel.jar

@ -54,8 +54,8 @@ http://maven.apache.org/maven-v4_0_0.xsd">
<version>3.8.0</version>
<configuration>
<compilerArgs>--enable-preview</compilerArgs>
<source>19</source>
<target>19</target>
<source>21</source>
<target>21</target>
</configuration>
</plugin>
<plugin>

@ -1,4 +1,4 @@
import java.util.Vector;
//import java.util.Vector;
import java.lang.Integer;
import java.lang.Float;
//import java.lang.Byte;
@ -6,21 +6,11 @@ import java.lang.Float;
public class Scalar extends Vector<Integer> {
Scalar(v) {
Integer i;
i = 0;
while(i < v.size()) {
this.add(v.elementAt(i));
i=i+1;
}
}
mul(v) {
var ret = 0;
var i = 0;
while(i < size()) {
ret = ret + this.elementAt(i) * v.elementAt(i);
i = i+1;
}
return ret;
}

@ -58,6 +58,7 @@ import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.sql.Timestamp;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
@ -69,11 +70,12 @@ public class JavaTXCompiler {
//public static JavaTXCompiler INSTANCE;
final CompilationEnvironment environment;
Boolean resultmodel = false;
Boolean resultmodel = true;
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
private final DirectoryClassLoader classLoader;
static Writer statistics;
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
this(Arrays.asList(sourceFile), null);
@ -86,6 +88,9 @@ public class JavaTXCompiler {
this(sourceFiles, null);
}
public JavaTXCompiler(List<File> sources, List<File> contextPath) throws IOException, ClassNotFoundException {
//statistics = new FileWriter(new File(System.getProperty("user.dir") + "/" + sources.get(0).getName() + "_"+ new Timestamp(System.currentTimeMillis())));
statistics = new OutputStreamWriter(new NullOutputStream());
statistics.write("test");
if(contextPath == null || contextPath.isEmpty()){
//When no contextPaths are given, the working directory is the sources root
contextPath = Lists.newArrayList(new File(System.getProperty("user.dir")));
@ -578,6 +583,7 @@ public class JavaTXCompiler {
unifyCons = unifyCons.map(distributeInnerVars);
logFile.write("\nUnify_distributeInnerVars: " + unifyCons.toString());
TypeUnify unify = new TypeUnify();
unify.statistics = statistics;
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
for (SourceFile sf : this.sourceFiles.values()) {
@ -730,6 +736,7 @@ public class JavaTXCompiler {
System.out.println("Constraints for Generated Generics: " + " ???");
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
logFile.flush();
statistics.close();
return li.getResults();
}
/* UnifyResultModel End */
@ -765,6 +772,7 @@ public class JavaTXCompiler {
} catch (IOException e) {
System.err.println("kein LogFile");
}
statistics.close();
return results.stream()
.map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons)))))
.collect(Collectors.toList());

@ -35,8 +35,8 @@ public class ASTFactory {
JavaClassName name = new JavaClassName(jreClass.getName());
List<Method> methoden = new ArrayList<>();
List<de.dhbwstuttgart.syntaxtree.Constructor> konstruktoren = new ArrayList<>();
for(java.lang.reflect.Constructor constructor : jreClass.getConstructors()){
createConstructor(constructor, jreClass).map(c -> konstruktoren.add(c));
for(java.lang.reflect.Constructor constructor : jreClass.getDeclaredConstructors()){
createConstructor(constructor, jreClass).map(c -> konstruktoren.add(c));
}
Set<java.lang.reflect.Method> allMethods = new HashSet<>(Arrays.asList(jreClass.getMethods()));
Set<java.lang.reflect.Method> allDeclaredMethods = new HashSet<>(Arrays.asList(jreClass.getDeclaredMethods()));

@ -495,7 +495,7 @@ public class TYPEStmt implements StatementVisitor{
@Override
public void visit(Return returnExpr) {
returnExpr.retexpr.accept(this);
constraintsSet.addUndConstraint(new Pair(returnExpr.getType(),info.getCurrentTypeScope().getReturnType(), PairOperator.SMALLERDOT));
constraintsSet.addUndConstraint(new Pair(returnExpr.getType(),info.getCurrentTypeScope().getReturnType(), PairOperator.EQUALSDOT));
}
@Override
@ -610,8 +610,8 @@ public class TYPEStmt implements StatementVisitor{
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ENDE
methodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.SMALLERDOT));
extendsMethodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.SMALLERDOT));
methodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));
extendsMethodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));
//methodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));
//extendsMethodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));

@ -9,10 +9,12 @@ import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.Stack;
import java.util.concurrent.ForkJoinPool;
import java.util.function.Function;
import java.util.stream.Collectors;
import de.dhbwstuttgart.exceptions.DebugException;
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
@ -43,14 +45,14 @@ import org.apache.commons.io.output.NullOutputStream;
*/
public class RuleSet implements IRuleSet{
Writer logFile;
WriterActiveObject logFile;
public RuleSet() {
super();
logFile = new OutputStreamWriter(new NullOutputStream());
logFile = new WriterActiveObject(new OutputStreamWriter(new NullOutputStream()), ForkJoinPool.commonPool());
}
RuleSet(Writer logFile) {
RuleSet(WriterActiveObject logFile) {
this.logFile = logFile;
}
@ -390,26 +392,51 @@ public class RuleSet implements IRuleSet{
if((pair.getPairOp() != PairOperator.SMALLERDOT) && (pair.getPairOp() != PairOperator.SMALLERNEQDOT))
return false;
if (pair.getPairOp() == PairOperator.SMALLERNEQDOT) {
UnifyType lhs = pair.getLhsType();
UnifyType rhs = pair.getRhsType();
if (lhs instanceof WildcardType) {
lhs = ((WildcardType)lhs).getWildcardedType();
}
if (rhs instanceof WildcardType) {
rhs = ((WildcardType)rhs).getWildcardedType();
}
if (lhs.equals(rhs)){
return false;
}
UnifyType lhsType = pair.getLhsType();
UnifyType rhsType = pair.getRhsType();
/*
* ty <. ? extends ty' is wrong
*/
if (rhsType instanceof ExtendsType) {
return false;
}
/*
* ? super ty <. ty' is wrong
* except Ty' = Object or ty' = ? super Object
*/
if ((lhsType instanceof SuperType) &&
(!(rhsType.equals(new ReferenceType("java.lang.Object", false)))) &&
!(rhsType.equals(new SuperType (new ReferenceType("java.lang.Object", false))))) {
return false;
}
/*
* ? extends ty <. ty' is equivalent to ty < ty'
*/
if (lhsType instanceof ExtendsType) {
lhsType = ((WildcardType)lhsType).getWildcardedType();
}
/*
* ty <. ? super ty' ist equivalent to ty <. ty'
*/
if (rhsType instanceof SuperType) {
rhsType = ((WildcardType)rhsType).getWildcardedType();
}
/*
* SMALLERNEQDOT => type must not be equal
*/
if (pair.getPairOp() == PairOperator.SMALLERNEQDOT && lhsType.equals(rhsType)){
return false;
}
UnifyType lhsType = pair.getLhsType();
if(!(lhsType instanceof ReferenceType) && !(lhsType instanceof PlaceholderType))
return false;
UnifyType rhsType = pair.getRhsType();
if(!(rhsType instanceof ReferenceType) && !(rhsType instanceof PlaceholderType))
return false;
@ -841,14 +868,8 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNred: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("logFile-Error");
}
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNred: " + result + "\n");
return Optional.of(result);
}
@ -891,14 +912,10 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNgreater: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNgreater: " + result + "\n");
return Optional.of(result);
}
@ -941,14 +958,9 @@ public class RuleSet implements IRuleSet{
UnifyType r = x.getRhsType();
if (r instanceof PlaceholderType) { ((PlaceholderType)r).disableWildcardtable(); }
} );
try {
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNsmaller: " + result + "\n");
logFile.flush();
}
catch (IOException e) {
System.out.println("lofFile-Error");
}
logFile.write("FUNgreater: " + pair + "\n");
logFile.write("FUNsmaller: " + result + "\n");
return Optional.of(result);
}

@ -1,23 +1,18 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
public class TypeUnify {
public static Writer statistics;
/**
* unify parallel ohne result modell
* @param undConstrains
@ -29,8 +24,8 @@ public class TypeUnify {
* @return
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, ret, usedTasks, pool);
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
@ -55,8 +50,8 @@ public class TypeUnify {
* @return
*/
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, ret, usedTasks, pool);
pool.invoke(unifyTask);
return ret;
}
@ -73,13 +68,17 @@ public class TypeUnify {
* @return
*/
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors(), ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true);
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, new WriterActiveObject(logFile, pool), log, 0, ret, usedTasks, pool, statistics);
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
logFile.flush();
unifyTask.statisticsFile.write("Backtracking: " + unifyTask.noBacktracking);
unifyTask.statisticsFile.write("\nLoops: " + unifyTask.noLoop);
}
catch (IOException e) {
System.err.println("no log-File");
@ -106,7 +105,8 @@ public class TypeUnify {
* @return
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, new WriterActiveObject(logFile, ForkJoinPool.commonPool()), log, 0, ret, usedTasks, ForkJoinPool.commonPool());
unifyTask.statisticsFile = statistics;
Set<Set<UnifyPair>> res = unifyTask.compute();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");

@ -7,6 +7,7 @@ import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
@ -19,8 +20,18 @@ public class TypeUnify2Task extends TypeUnifyTask {
Set<Set<UnifyPair>> setToFlatten;
Set<UnifyPair> methodSignatureConstraintUebergabe;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
//statistics
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
List<Set<Constraint<UnifyPair>>> oderConstraints,
Set<UnifyPair> nextSetElement,
IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks,
Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool, Writer statistics) {
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraintUebergabe, pool );
}
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe, ForkJoinPool pool) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, pool);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
@ -54,13 +65,10 @@ public class TypeUnify2Task extends TypeUnifyTask {
}
public void closeLogFile() {
try {
logFile.close();
}
catch (IOException ioE) {
System.err.println("no log-File" + thNo);
}
if(parallel){
logFile.close();
}else{
logFile.closeNonThreaded();
}
}
}

@ -13,20 +13,17 @@ import java.util.List;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveTask;
import java.util.function.BiFunction;
import java.util.function.BinaryOperator;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.io.output.NullOutputStream;
import de.dhbwstuttgart.exceptions.TypeinferenceException;
import de.dhbwstuttgart.parser.NullToken;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch;
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
@ -46,6 +43,7 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
import de.dhbwstuttgart.typeinference.unify.model.WildcardType;
import de.dhbwstuttgart.util.Pair;
import de.dhbwstuttgart.typeinference.unify.model.OrderingUnifyPair;
import org.apache.commons.io.output.NullWriter;
import java.io.File;
import java.io.FileWriter;
@ -53,8 +51,6 @@ import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import com.google.common.collect.Ordering;
/**
* Implementation of the type unification algorithm
@ -80,11 +76,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
private static int totalnoOfThread = 0;
int thNo;
protected boolean one = false;
Integer MaxNoOfThreads = 8;
Integer MaxNoOfThreads = 128;
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
Writer logFile;
protected WriterActiveObject logFile;
protected ForkJoinPool pool;
/**
* The implementation of setOps that will be used during the unification
*/
@ -125,12 +121,16 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
static int noBacktracking;
static int noLoop;
static Integer noShortendElements = 0;
Boolean myIsCanceled = false;
volatile UnifyTaskModel usedTasks;
static Writer statisticsFile = new NullWriter();
public TypeUnifyTask() {
rules = new RuleSet();
}
@ -149,9 +149,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
*/
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
synchronized (this) {
//statistics
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, ForkJoinPool pool, Writer statisticsFile) {
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, pool);
this.statisticsFile = statisticsFile;
}
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, WriterActiveObject logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, ForkJoinPool pool) {
this.eq = eq;
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
this.oderConstraintsField = oderConstraints; /*.stream().map(x -> {
@ -170,6 +173,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
this.parallel = parallel;
this.logFile = logFile;
this.log = log;
this.pool = pool;
noOfThread++;
totalnoOfThread++;
@ -177,9 +181,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
thNo = totalnoOfThread;
writeLog("thNo2 " + thNo);
try {
this.logFile = log ? new FileWriter(new File(System.getProperty("user.dir") + "/logFiles/" + "Thread_"+thNo))
: new OutputStreamWriter(new NullOutputStream());
logFile.write("");
if(log){
this.logFile = new WriterActiveObject(new FileWriter(new File(System.getProperty("user.dir") + "/logFiles/" + "Thread_"+thNo)), pool);
}else{
this.logFile = new WriterActiveObject(new OutputStreamWriter(new NullOutputStream()), pool);
}
}
catch (IOException e) {
System.err.println("log-File nicht vorhanden");
@ -203,7 +209,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
this.urm = urm;
this.usedTasks = usedTasks;
this.usedTasks.add(this);
}
}
/**
@ -248,7 +253,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
protected Set<Set<UnifyPair>> compute() {
if (one) {
System.out.println("two");
//System.out.println("two");
}
one = true;
Set<UnifyPair> neweq = new HashSet<>(eq);
@ -261,12 +266,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.collect(Collectors.toCollection(ArrayList::new));
Set<Set<UnifyPair>> res = unify(neweq, remainingOderconstraints, fc, parallel, rekTiefeField, new HashSet<>());
noOfThread--;
try {
if(parallel){
logFile.close();
}else{
logFile.closeNonThreaded();
}
catch (IOException ioE) {
System.err.println("no log-File");
}
if (isUndefinedPairSetSet(res)) {
//fuer debug-Zwecke
ArrayList al = res.stream().map(x -> x.stream().collect(Collectors.toCollection(ArrayList::new)))
@ -376,8 +382,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
eq0.forEach(x -> x.disableCondWildcards());
writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString());
writeLog(nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
writeLog(nOfUnify.toString() + " Unifikation nach applyTypeUnificationRules: " + eq.toString() + "\n"
+ nOfUnify.toString() + " Oderconstraints nach applyTypeUnificationRules: " + oderConstraints.toString());
/*
* Step 2 and 3: Create a subset eq1s of pairs where both sides are TPH and eq2s of the other pairs
@ -436,8 +442,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if(!undefinedPairs.isEmpty()) {
noUndefPair++;
for (UnifyPair up : undefinedPairs) {
writeLog(noUndefPair.toString() + " UndefinedPairs; " + up);
writeLog("BasePair; " + up.getBasePair());
writeLog(noUndefPair.toString() + " UndefinedPairs; " + up + "\n"
+ "BasePair; " + up.getBasePair());
}
Set<Set<UnifyPair>> error = new HashSet<>();
undefinedPairs = undefinedPairs.stream().map(x -> { x.setUndefinedPair(); return x;}).collect(Collectors.toCollection(HashSet::new));
@ -592,6 +598,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
urm.notify(eqPrimePrimeSet);
writeStatistics("Result: " + eqPrimePrimeSet.toString());
}
}
else if(eqPrimePrime.isPresent()) {
@ -646,6 +653,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//oneElems: Alle 1-elementigen Mengen, die nur ein Paar
//a <. theta, theta <. a oder a =. theta enthalten
//statistics
//writeStatistics("\nNumber of Constraints (" + rekTiefe + "): " + topLevelSets.size());
Set<Set<UnifyPair>> oneElems = new HashSet<>();
oneElems.addAll(topLevelSets.stream()
.filter(x -> x.size()==1)
@ -663,6 +674,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<? extends Set<UnifyPair>> nextSet = optNextSet.get();
//writeLog("nextSet: " + nextSet.toString());
List<Set<UnifyPair>> nextSetasList =new ArrayList<>(nextSet);
//writeStatistics(" Start Number of elements ( " /* + nextSetasList.get(0).stream().findFirst().get().getBasePair()*/ +"): (" + rekTiefe + "): " + nextSetasList.size());
/*
try {
//List<Set<UnifyPair>>
@ -774,8 +788,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
/* sameEqSet-Bestimmung Ende */
int hilf = 0;
Set<UnifyPair> a = null;
while (nextSetasList.size() > 0) {
//statistics
//writeStatistics(" Actual Number of elements( " + nextSetasList.get(0).stream().findFirst().get().getBasePair() +"): (" + rekTiefe + "): " + nextSetasList.size());
Set<UnifyPair> a_last = a;
/* Liste der Faelle für die parallele Verarbeitung
@ -792,8 +810,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
List<Set<UnifyPair>> nextSetasListOderConstraints = new ArrayList<>();
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetasList.toString());
writeLog("nextSet: " + nextSet.toString() + "\n"
+ "nextSetasList: " + nextSetasList.toString());
/* staistics Nextvar an Hand Varianzbestimmung auskommentieren Anfang
if (variance == 1) {
a = oup.max(nextSetasList.iterator());
nextSetasList.remove(a);
@ -865,7 +885,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
}
}
Nextvar an Hand Varianzbestimmung auskommentieren Ende */
a = nextSetasList.remove(0); //statisticsList
//writeStatistics(a.toString());
if (oderConstraint) {//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen
methodSignatureConstraint.addAll(((Constraint<UnifyPair>)a).getmethodSignatureConstraint());
//System.out.println("ERSTELLUNG: " +methodSignatureConstraint);
@ -887,11 +910,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/* Wenn bei (a \in theta) \in a zu Widerspruch in oneElems wird
* a verworfen und zu nächstem Element von nextSetasList gegangen
*/
/* statistics sameEq wird nicht betrachtet ANGFANG
if (!oderConstraint && !sameEqSet.isEmpty() && !checkNoContradiction(a, sameEqSet, result)) {
a = null;
noShortendElements++;
continue;
}
statistics sameEq wird nicht betrachtet ENDE */
/* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread
* gestartet, der parallel weiterarbeitet.
@ -904,7 +929,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint, this.pool);
//forks.add(forkOrig);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
@ -913,24 +938,26 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
forkOrig.fork();
}
/* FORK ENDE */
synchronized (this) {
writeLog("a in " + variance + " "+ a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
synchronized (this) { nextSetasList.remove(nSaL);
writeLog("1 RM" + nSaL.toString());
}
writeLog("a in " + variance + " "+ a + "\n" +
"nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasList.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasList.remove(0);
//nextSetasList.remove(nSaL);
writeLog("1 RM" + nSaL.toString());
if (!oderConstraint) {
/* statistics sameEq wird nicht betrachtet ANGFANG
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
nSaL = null;
noShortendElements++;
continue;
}
statistics sameEq wird nicht betrachtet ENDE */
}
else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)nSaL).getExtendConstraint());
@ -939,7 +966,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint), this.pool);
forks.add(fork);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
@ -951,47 +978,44 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */
synchronized (this) {
writeLog("wait "+ forkOrig.thNo);
writeLog("wait "+ forkOrig.thNo);
noOfThread--;
res = forkOrig.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
forkOrig.writeLog("final Orig 1");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res);
/* FORK ENDE */
forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) {
noOfThread--;
res = forkOrig.join();
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
forkOrig.writeLog("final Orig 1");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res);
};
/* FORK ENDE */
forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) {
synchronized (this) {
noOfThread--;
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
writeLog("Join " + new Integer(fork.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
writeLog("fork_res: " + fork_res.toString());
writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
add_res.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final 1");
fork.closeLogFile();
};
//noOfThread--; an das Ende von compute verschoben
writeLog("Join " + new Integer(fork.thNo).toString() + "\n" +
"fork_res: " + fork_res.toString() + "\n" +
new Boolean((isUndefinedPairSetSet(fork_res))).toString());
add_res.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final 1");
fork.closeLogFile();
}
//noOfThread++;
} else {
@ -1003,7 +1027,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint), this.pool);
//forks.add(forkOrig);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
@ -1013,23 +1037,24 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
/* FORK ENDE */
synchronized (this) {
writeLog("a in " + variance + " "+ a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
synchronized (this) { nextSetasList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString());
}
writeLog("a in " + variance + " "+ a + "\n" +
"nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasList.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasList.remove(0);
//nextSetasList.remove(nSaL);
writeLog("-1 RM" + nSaL.toString());
if (!oderConstraint) {
/* statistics sameEq wird nicht betrachtet ANGFANG
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
nSaL = null;
noShortendElements++;
continue;
}
statistics sameEq wird nicht betrachtet ENDE */
}
else {
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)nSaL).getExtendConstraint());
@ -1038,7 +1063,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint), this.pool);
forks.add(fork);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
@ -1050,48 +1075,45 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */
synchronized (this) {
writeLog("wait "+ forkOrig.thNo);
writeLog("wait "+ forkOrig.thNo);
noOfThread--;
res = forkOrig.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
forkOrig.writeLog("final Orig -1");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res);
/* FORK ENDE */
forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) {
noOfThread--;
res = forkOrig.join();
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
forkOrig.writeLog("final Orig -1");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res);
writeLog("Join " + new Integer(fork.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
writeLog("fork_res: " + fork_res.toString());
writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
add_res.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final -1");
fork.closeLogFile();
};
/* FORK ENDE */
forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) {
synchronized (this) {
noOfThread--;
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
writeLog("Join " + new Integer(fork.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
writeLog("fork_res: " + fork_res.toString());
writeLog(new Boolean((isUndefinedPairSetSet(fork_res))).toString());
add_res.add(fork_res);
if (!isUndefinedPairSetSet(fork_res)) {
aParDef.add(fork.getNextSetElement());
}
fork.writeLog("final -1");
fork.closeLogFile();
};
}
//noOfThread++;
} else {
if(parallel && (variance == 2) && noOfThread <= MaxNoOfThreads) {
@ -1103,7 +1125,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint), this.pool);
//forks.add(forkOrig);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
@ -1112,19 +1134,18 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
forkOrig.fork();
}
/* FORK ENDE */
synchronized (this) {
writeLog("a in " + variance + " "+ a);
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
}
while (!nextSetasListRest.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
nextSetasList.remove(nSaL); //PL einkommentiert 20-02-03
writeLog("a in " + variance + " "+ a + "\n" +
"nextSetasListRest: " + nextSetasListRest.toString());
while (!nextSetasList.isEmpty()) {
Set<UnifyPair> nSaL = nextSetasList.remove(0);
//nextSetasList.remove(nSaL); //PL einkommentiert 20-02-03
Set<UnifyPair> newEq = new HashSet<>(eq);
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint, this.pool);
forks.add(fork);
synchronized(usedTasks) {
if (this.myIsCancelled()) {
@ -1136,41 +1157,39 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
/* FORK ANFANG */
synchronized (this) {
writeLog("wait "+ forkOrig.thNo);
noOfThread--;
res = forkOrig.join();
writeLog("wait "+ forkOrig.thNo);
noOfThread--;
res = forkOrig.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
forkOrig.writeLog("final Orig 2");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res); //vermutlich falsch
/* FORK ENDE */
forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) {
noOfThread--;
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
forkOrig.writeLog("final Orig 2");
forkOrig.closeLogFile();
//Set<Set<UnifyPair>> fork_res = forkOrig.join();
writeLog("JoinOrig " + new Integer(forkOrig.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
//add_res.add(fork_res); //vermutlich falsch
};
/* FORK ENDE */
forks.forEach(x -> writeLog("wait: " + x.thNo));
for(TypeUnify2Task fork : forks) {
synchronized (this) {
noOfThread--;
Set<Set<UnifyPair>> fork_res = fork.join();
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
}
//noOfThread++;
writeLog("Join " + new Integer(fork.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
add_res.add(fork_res);
fork.writeLog("final 2");
fork.closeLogFile();
};
writeLog("Join " + new Integer(fork.thNo).toString());
//noOfThread--; an das Ende von compute verschoben
add_res.add(fork_res);
fork.writeLog("final 2");
fork.closeLogFile();
}
//noOfThread++;
} else {//parallel = false oder MaxNoOfThreads ist erreicht, sequentiell weiterarbeiten
@ -1179,6 +1198,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}}}
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
//if (hilf == 1)
//System.out.println();
//writeStatistics("Zusammengeführt(" + rekTiefe + "): " + nextSetasList.size());
if (oderConstraint) {//Wenn weiteres Element nextSetasList genommen wird, muss die vorherige methodsignatur geloescht werden
methodSignatureConstraint.removeAll(((Constraint<UnifyPair>)a).getmethodSignatureConstraint());
//System.out.println("REMOVE: " +methodSignatureConstraint);
@ -1192,6 +1214,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|| (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
|| result.isEmpty()) {
/* auskommentiert damit alle Lösungen reinkommen ANFANG
if ((!result.isEmpty() && !res.isEmpty() && !isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) //korrekte Loesungen aus und-constraints
&& (a.stream().map(x-> (x.getBasePair() != null)).reduce(true, (x, y) -> (x && y)))) //bei oder-Constraints nicht ausfuehren
{
@ -1213,7 +1236,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
//System.out.println(a_last);
if (a_last != null) {
try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen
a_last.forEach(x -> {writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());});//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
List<PlaceholderType> varsLast_a =
@ -1265,9 +1288,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
catch (NullPointerException e) {
writeLog("NullPointerException: " + a_last.toString());
}
}}
}
else {
else
auskommentiert damit alle Lösungen reinkommen ANFANG */
{
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
writeLog("RES Fst: result: " + result.toString() + " res: " + res.toString());
result.addAll(res);
@ -1287,7 +1312,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = par_res;
if (!par_res.isEmpty() && par_res.iterator().next() instanceof WildcardType) {
System.out.println("");
//System.out.println("");
}
}
else {
@ -1303,7 +1328,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//break;
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG
if (!result.isEmpty() && (!isUndefinedPairSetSet(res) || !aParDef.isEmpty())) {
if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1)
System.out.print("");
@ -1411,6 +1436,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
else { if (variance == 0) {
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
if (!oderConstraint) {
writeStatistics("break");
break;
}
else {
@ -1439,7 +1465,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
}
}
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
if (isUndefinedPairSetSet(res) && aParDef.isEmpty()) {
int nofstred= 0;
@ -1472,8 +1498,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
return new Pair<>(su, x.getGroundBasePair());})
.collect(Collectors.toCollection(HashSet::new));
if (res.size() > 1) {
System.out.println();
//System.out.println();
}
/* statistics no erase
writeLog("nextSetasList vor filter-Aufruf: " + nextSetasList);
if (!oderConstraint) {//PL 2023-02-08 eingefuegt: Bei oderconstraints sind Subststitutionen nicht als Substitutionen in idesem Sinne zu sehen
nextSetasList = nextSetasList.stream().filter(x -> {
@ -1486,25 +1513,29 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
.collect(Collectors.toCollection(ArrayList::new));
}
writeLog("nextSetasList nach filter-Aufruf: " + nextSetasList);
*/
nofstred = nextSetasList.size();
//NOCH NICHT korrekt PL 2018-10-12
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
// .collect(Collectors.toCollection(ArrayList::new));
writeLog("res (undef): " + res.toString());
writeLog("abhSubst: " + abhSubst.toString());
writeLog("a2: " + rekTiefe + " " + a.toString());
writeLog("Durchschnitt: " + durchschnitt.toString());
writeLog("nextSet: " + nextSet.toString());
writeLog("nextSetasList: " + nextSetasList.toString());
writeLog("Number first erased Elements (undef): " + (len - nofstred));
writeLog("Number second erased Elements (undef): " + (nofstred- nextSetasList.size()));
writeLog("Number erased Elements (undef): " + (len - nextSetasList.size()));
writeLog("res (undef): " + res.toString() + "\n" +
"abhSubst: " + abhSubst.toString() + "\n" +
"a2: " + rekTiefe + " " + a.toString() + "\n" +
"Durchschnitt: " + durchschnitt.toString() + "\n" +
"nextSet: " + nextSet.toString() + "\n" +
"nextSetasList: " + nextSetasList.toString() + "\n" +
"Number first erased Elements (undef): " + (len - nofstred) + "\n" +
"Number second erased Elements (undef): " + (nofstred- nextSetasList.size()) + "\n" +
"Number erased Elements (undef): " + (len - nextSetasList.size()));
noAllErasedElements = noAllErasedElements + (len - nextSetasList.size());
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
noBacktracking++;
writeLog("Number of Backtracking: " + noBacktracking);
System.out.println("");
//writeStatistics("Number of erased elements: " + (len - nextSetasList.size()));
//writeStatistics("Number of Backtracking: " + noBacktracking);
//System.out.println("");
}
else //writeStatistics("res: " + res.toString());
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
// return result;
//}
@ -1513,6 +1544,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//}
//else result.stream().filter(y -> !isUndefinedPairSet(y));
writeLog("res: " + res.toString());
//writeStatistics(" End Number of Elements (" + rekTiefe + "): " + nextSetasList.size());
noLoop++;
//writeStatistics("Number of Loops: " + noLoop);
}
//2020-02-02: if (variance ==2) Hier Aufruf von filterOverriding einfuegen
writeLog("Return computeCR: " + result.toString());
@ -1952,10 +1986,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
}
writeLog("eq2s: " + eq2s.toString());
writeLog("eq2sAsListFst: " + eq2sAsListFst.toString());
writeLog("eq2sAsListSnd: " + eq2sAsListSnd.toString());
writeLog("eq2sAsListBack: " + eq2sAsListBack.toString());
writeLog("eq2s: " + eq2s.toString() + "\n" +
"eq2sAsListFst: " + eq2sAsListFst.toString() + "\n" +
"eq2sAsListSnd: " + eq2sAsListSnd.toString() + "\n" +
"eq2sAsListBack: " + eq2sAsListBack.toString());
eq2sAsList.addAll(eq2sAsListFst);
eq2sAsList.addAll(eq2sAsListSnd);
@ -2008,7 +2042,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//System.out.println(pair);
if (first) { //writeLog(pair.toString()+"\n");
if (((PlaceholderType)(pair.getLhsType())).getName().equals("AR")) {
System.out.println("AR");
//System.out.println("AR");
}
Set<Set<UnifyPair>> x1 = unifyCase1(pair, fc);
if (pairOp == PairOperator.SMALLERNEQDOT) {
@ -2560,19 +2594,32 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
void writeLog(String str) {
synchronized ( this ) {
if (log && finalresult) {
if(parallel){
logFile.write("Thread no.:" + thNo + "\n"
+ "noOfThread:" + noOfThread + "\n"
+ "parallel:" + parallel + "\n"
+ str+"\n\n"
);
}else{
logFile.writeNonThreaded("Thread no.:" + thNo + "\n"
+ "noOfThread:" + noOfThread + "\n"
+ "parallel:" + parallel + "\n"
+ str+"\n\n"
);
}
}
}
void writeStatistics(String str) {
if (finalresult) {
try {
logFile.write("Thread no.:" + thNo + "\n");
logFile.write("noOfThread:" + noOfThread + "\n");
logFile.write("parallel:" + parallel + "\n");
logFile.write(str+"\n\n");
logFile.flush();
statisticsFile.write("Thread No. " + thNo + ": " + str + "\n");
statisticsFile.flush();
}
catch (IOException e) {
System.err.println("kein LogFile");
}
System.err.println("kein StatisticsFile");
}
}
}

@ -0,0 +1,53 @@
package de.dhbwstuttgart.typeinference.unify;
import java.io.IOException;
import java.io.Writer;
import java.util.concurrent.ForkJoinPool;
public class WriterActiveObject {
private Writer writer;
private ForkJoinPool pool;
public WriterActiveObject(Writer writer, ForkJoinPool pool){
this.writer = writer;
this.pool = pool;
}
public void close(){
pool.execute(()->{
try {
writer.close();
} catch (IOException e) {
System.out.println(e.getMessage());
}
});
}
public void write(String message){
pool.execute(()->{
try {
writer.write(message);
writer.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
public void writeNonThreaded(String message){
try {
writer.write(message);
writer.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void closeNonThreaded(){
try {
writer.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

@ -281,6 +281,7 @@ public class TestComplete {
@Test
public void scalarTest() throws Exception {
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Scalar.jav");
var scalar = classFiles.get("Scalar");

@ -1,13 +1,27 @@
package typeinference;
import com.google.common.base.Optional;
import de.dhbwstuttgart.bytecode.genericsGeneratorTypes.GenericGenratorResultForSourceFile;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.parser.scope.JavaClassName;
import de.dhbwstuttgart.syntaxtree.ClassOrInterface;
import de.dhbwstuttgart.syntaxtree.Constructor;
import de.dhbwstuttgart.syntaxtree.SourceFile;
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
import de.dhbwstuttgart.syntaxtree.visual.ASTPrinter;
import de.dhbwstuttgart.syntaxtree.visual.ASTTypePrinter;
import de.dhbwstuttgart.typedeployment.TypeInsert;
import de.dhbwstuttgart.typedeployment.TypeInsertFactory;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
import de.dhbwstuttgart.typeinference.result.ResultSet;
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
import de.dhbwstuttgart.typeinference.unify.model.*;
import org.apache.commons.io.output.NullWriter;
import org.junit.Test;
import java.io.File;
@ -24,7 +38,71 @@ import java.util.Set;
public class UnifyTest {
public static final String rootDirectory = System.getProperty("user.dir")+"/resources/javFiles/";
/*
private UnifyPair genPairListOfInteger(String name){
UnifyType type1 = new PlaceholderType(name);
UnifyType type2 = new ReferenceType("List", new TypeParams(new ReferenceType("Integer")));
UnifyPair pair1 = new UnifyPair(type2, type1, PairOperator.SMALLERDOT);
return pair1;
}
private UnifyPair genPairListOfString(String name){
PlaceholderType type1 = new PlaceholderType(name);
UnifyType type2 = new ReferenceType("List", new TypeParams(new ReferenceType("String")));
UnifyPair pair1 = new UnifyPair(type2, type1, PairOperator.SMALLERDOT);
return pair1;
}
@Test
public void unifyTest(){
UnifyType type1;
UnifyType type2;
Set<UnifyPair> undConstraints = new HashSet<>();
undConstraints.add(genPairListOfInteger("a"));
undConstraints.add(genPairListOfString("a"));
undConstraints.add(genPairListOfInteger("b"));
undConstraints.add(genPairListOfString("b"));
undConstraints.add(genPairListOfInteger("c"));
undConstraints.add(genPairListOfString("c"));
undConstraints.add(genPairListOfInteger("d"));
undConstraints.add(genPairListOfString("d"));
undConstraints.add(genPairListOfInteger("e"));
undConstraints.add(genPairListOfString("e"));
undConstraints.add(genPairListOfInteger("e1"));
undConstraints.add(genPairListOfString("e1"));
undConstraints.add(genPairListOfInteger("e2"));
undConstraints.add(genPairListOfString("e2"));
undConstraints.add(genPairListOfInteger("e3"));
undConstraints.add(genPairListOfString("e3"));
List<Set<Constraint<UnifyPair>>> oderConstraints = new ArrayList<>();
Set<UnifyPair> constraints = new HashSet<>();
type1 = new ReferenceType("Object");
type2 = new ReferenceType("List", new TypeParams(new PlaceholderType("X")));
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
type1 = new ReferenceType("Object");
type2 = new ReferenceType("Integer");
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
type1 = new ReferenceType("Object");
type2 = new ReferenceType("String");
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
IFiniteClosure finiteClosure = new FiniteClosure(constraints, new NullWriter());
TypeUnify unifyAlgo = new TypeUnify();
ConstraintSet< Pair> cons = new ConstraintSet<>();
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyTaskModel tasks = new UnifyTaskModel();
Set<Set<UnifyPair>> solution = unifyAlgo.unify(undConstraints, oderConstraints, finiteClosure, new NullWriter(), false, urm, tasks);
System.out.println(solution.size());
System.out.println(solution);
}
/*
@Test
public void finiteClosure() throws IOException, ClassNotFoundException {
execute(new File(rootDirectory+"fc.jav"));