Compare commits
32 Commits
master
...
performanc
Author | SHA1 | Date | |
---|---|---|---|
|
f8efe361e9 | ||
|
63f854d4ee | ||
|
532018241c | ||
|
9e7085bcce | ||
|
e4af54a2bf | ||
|
d7b693204e | ||
|
697cfcc2af | ||
|
73f996991e | ||
|
61653c5d88 | ||
|
3cd608a4ac | ||
|
deec0ae706 | ||
|
d6a79ea3a1 | ||
|
1f909f13ee | ||
|
be6f4bd578 | ||
|
478efd5649 | ||
|
c73e57cf2b | ||
|
ce29f4bcf1 | ||
|
42821f3215 | ||
|
f68afc88a6 | ||
|
82061474b2 | ||
|
d849bc127f | ||
|
6815d8fc0a | ||
|
317f8b1aad | ||
|
79335449d0 | ||
|
14606a846e | ||
2b67230a15 | |||
|
29b05b56cc | ||
|
08b9fc0ea3 | ||
|
070dd16999 | ||
|
9d7e46925d | ||
|
d780d322f0 | ||
|
867f3d39e8 |
3
Makefile
Normal file
3
Makefile
Normal file
@ -0,0 +1,3 @@
|
||||
NoOptParallel:
|
||||
mvn -DskipTests package
|
||||
cp target/JavaTXcompiler-0.1-jar-with-dependencies.jar target/JavaTXcompiler-0.1-jar-with-dependencies_NoOptParallel.jar
|
4
pom.xml
4
pom.xml
@ -54,8 +54,8 @@ http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<version>3.8.0</version>
|
||||
<configuration>
|
||||
<compilerArgs>--enable-preview</compilerArgs>
|
||||
<source>19</source>
|
||||
<target>19</target>
|
||||
<source>21</source>
|
||||
<target>21</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
|
@ -1,4 +1,4 @@
|
||||
import java.util.Vector;
|
||||
//import java.util.Vector;
|
||||
import java.lang.Integer;
|
||||
import java.lang.Float;
|
||||
//import java.lang.Byte;
|
||||
@ -6,21 +6,11 @@ import java.lang.Float;
|
||||
|
||||
public class Scalar extends Vector<Integer> {
|
||||
|
||||
Scalar(v) {
|
||||
Integer i;
|
||||
i = 0;
|
||||
while(i < v.size()) {
|
||||
this.add(v.elementAt(i));
|
||||
i=i+1;
|
||||
}
|
||||
}
|
||||
|
||||
mul(v) {
|
||||
var ret = 0;
|
||||
var i = 0;
|
||||
while(i < size()) {
|
||||
ret = ret + this.elementAt(i) * v.elementAt(i);
|
||||
i = i+1;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
@ -37,20 +37,13 @@ import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.typeAlgo.TYPE;
|
||||
import de.dhbwstuttgart.typeinference.unify.RuleSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.distributeVariance;
|
||||
import de.dhbwstuttgart.typeinference.unify.*;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.util.BiRelation;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListener;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
@ -58,6 +51,7 @@ import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
@ -69,11 +63,12 @@ public class JavaTXCompiler {
|
||||
|
||||
//public static JavaTXCompiler INSTANCE;
|
||||
final CompilationEnvironment environment;
|
||||
Boolean resultmodel = false;
|
||||
Boolean resultmodel = true;
|
||||
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
|
||||
Boolean log = false; //gibt an ob ein Log-File nach System.getProperty("user.dir")+""/logFiles/"" geschrieben werden soll?
|
||||
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
|
||||
private final DirectoryClassLoader classLoader;
|
||||
static Writer statistics;
|
||||
|
||||
public JavaTXCompiler(File sourceFile) throws IOException, ClassNotFoundException {
|
||||
this(Arrays.asList(sourceFile), null);
|
||||
@ -86,6 +81,9 @@ public class JavaTXCompiler {
|
||||
this(sourceFiles, null);
|
||||
}
|
||||
public JavaTXCompiler(List<File> sources, List<File> contextPath) throws IOException, ClassNotFoundException {
|
||||
//statistics = new FileWriter(new File(System.getProperty("user.dir") + "/" + sources.get(0).getName() + "_"+ new Timestamp(System.currentTimeMillis())));
|
||||
statistics = new OutputStreamWriter(new NullOutputStream());
|
||||
statistics.write("test");
|
||||
if(contextPath == null || contextPath.isEmpty()){
|
||||
//When no contextPaths are given, the working directory is the sources root
|
||||
contextPath = Lists.newArrayList(new File(System.getProperty("user.dir")));
|
||||
@ -371,7 +369,7 @@ public class JavaTXCompiler {
|
||||
* }
|
||||
*/
|
||||
|
||||
public UnifyResultModel typeInferenceAsync(UnifyResultListener resultListener, Writer logFile)
|
||||
public UnifyResultModelParallel typeInferenceAsync(UnifyResultListener resultListener, Writer logFile)
|
||||
throws ClassNotFoundException, IOException {
|
||||
List<ClassOrInterface> allClasses = new ArrayList<>();// environment.getAllAvailableClasses();
|
||||
// Alle Importierten Klassen in allen geparsten Sourcefiles kommen ins FC
|
||||
@ -384,7 +382,7 @@ public class JavaTXCompiler {
|
||||
|
||||
final ConstraintSet<Pair> cons = getConstraints();
|
||||
Set<Set<UnifyPair>> results = new HashSet<>();
|
||||
UnifyResultModel urm = null;
|
||||
UnifyResultModelParallel urm = null;
|
||||
// urm.addUnifyResultListener(resultListener);
|
||||
try {
|
||||
logFile = logFile == null
|
||||
@ -392,7 +390,7 @@ public class JavaTXCompiler {
|
||||
: logFile;
|
||||
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses, logFile, classLoader);
|
||||
System.out.println(finiteClosure);
|
||||
urm = new UnifyResultModel(cons, finiteClosure);
|
||||
urm = new UnifyResultModelParallel(cons, finiteClosure);
|
||||
urm.addUnifyResultListener(resultListener);
|
||||
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(cons);
|
||||
|
||||
@ -578,6 +576,7 @@ public class JavaTXCompiler {
|
||||
unifyCons = unifyCons.map(distributeInnerVars);
|
||||
logFile.write("\nUnify_distributeInnerVars: " + unifyCons.toString());
|
||||
TypeUnify unify = new TypeUnify();
|
||||
unify.statistics = statistics;
|
||||
// Set<Set<UnifyPair>> results = new HashSet<>(); Nach vorne gezogen
|
||||
logFile.write("FC:\\" + finiteClosure.toString() + "\n");
|
||||
for (SourceFile sf : this.sourceFiles.values()) {
|
||||
@ -721,7 +720,7 @@ public class JavaTXCompiler {
|
||||
}).collect(Collectors.toCollection(ArrayList::new))*/;
|
||||
if (resultmodel) {
|
||||
/* UnifyResultModel Anfang */
|
||||
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
|
||||
UnifyResultModelParallel urm = new UnifyResultModelParallel(cons, finiteClosure);
|
||||
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
|
||||
urm.addUnifyResultListener(li);
|
||||
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm,
|
||||
@ -730,6 +729,7 @@ public class JavaTXCompiler {
|
||||
System.out.println("Constraints for Generated Generics: " + " ???");
|
||||
logFile.write("RES_FINAL: " + li.getResults().toString() + "\n");
|
||||
logFile.flush();
|
||||
statistics.close();
|
||||
return li.getResults();
|
||||
}
|
||||
/* UnifyResultModel End */
|
||||
@ -738,7 +738,7 @@ public class JavaTXCompiler {
|
||||
// oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons,
|
||||
// finiteClosure));
|
||||
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints,
|
||||
finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
|
||||
finiteClosure, logFile, log, new UnifyResultModelParallel(cons, finiteClosure), usedTasks);
|
||||
System.out.println("RESULT: " + result);
|
||||
logFile.write("RES: " + result.toString() + "\n");
|
||||
logFile.flush();
|
||||
@ -765,6 +765,7 @@ public class JavaTXCompiler {
|
||||
} catch (IOException e) {
|
||||
System.err.println("kein LogFile");
|
||||
}
|
||||
statistics.close();
|
||||
return results.stream()
|
||||
.map((unifyPairs -> new ResultSet(UnifyTypeFactory.convert(unifyPairs, Pair.generateTPHMap(cons)))))
|
||||
.collect(Collectors.toList());
|
||||
|
@ -35,8 +35,8 @@ public class ASTFactory {
|
||||
JavaClassName name = new JavaClassName(jreClass.getName());
|
||||
List<Method> methoden = new ArrayList<>();
|
||||
List<de.dhbwstuttgart.syntaxtree.Constructor> konstruktoren = new ArrayList<>();
|
||||
for(java.lang.reflect.Constructor constructor : jreClass.getConstructors()){
|
||||
createConstructor(constructor, jreClass).map(c -> konstruktoren.add(c));
|
||||
for(java.lang.reflect.Constructor constructor : jreClass.getDeclaredConstructors()){
|
||||
createConstructor(constructor, jreClass).map(c -> konstruktoren.add(c));
|
||||
}
|
||||
Set<java.lang.reflect.Method> allMethods = new HashSet<>(Arrays.asList(jreClass.getMethods()));
|
||||
Set<java.lang.reflect.Method> allDeclaredMethods = new HashSet<>(Arrays.asList(jreClass.getDeclaredMethods()));
|
||||
|
@ -495,7 +495,7 @@ public class TYPEStmt implements StatementVisitor{
|
||||
@Override
|
||||
public void visit(Return returnExpr) {
|
||||
returnExpr.retexpr.accept(this);
|
||||
constraintsSet.addUndConstraint(new Pair(returnExpr.getType(),info.getCurrentTypeScope().getReturnType(), PairOperator.SMALLERDOT));
|
||||
constraintsSet.addUndConstraint(new Pair(returnExpr.getType(),info.getCurrentTypeScope().getReturnType(), PairOperator.EQUALSDOT));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -610,8 +610,8 @@ public class TYPEStmt implements StatementVisitor{
|
||||
//Fuer Bytecodegenerierung PL 2020-03-09 wird derzeit nicht benutzt ENDE
|
||||
|
||||
|
||||
methodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.SMALLERDOT));
|
||||
extendsMethodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.SMALLERDOT));
|
||||
methodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));
|
||||
extendsMethodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));
|
||||
|
||||
//methodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));
|
||||
//extendsMethodConstraint.add(new Pair(assumption.getReturnType(resolver), forMethod.getType(), PairOperator.EQUALSDOT));
|
||||
|
@ -13,6 +13,7 @@ import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.DebugException;
|
||||
import de.dhbwstuttgart.syntaxtree.type.ExtendsWildcardType;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
||||
@ -390,26 +391,51 @@ public class RuleSet implements IRuleSet{
|
||||
if((pair.getPairOp() != PairOperator.SMALLERDOT) && (pair.getPairOp() != PairOperator.SMALLERNEQDOT))
|
||||
return false;
|
||||
|
||||
if (pair.getPairOp() == PairOperator.SMALLERNEQDOT) {
|
||||
UnifyType lhs = pair.getLhsType();
|
||||
UnifyType rhs = pair.getRhsType();
|
||||
if (lhs instanceof WildcardType) {
|
||||
lhs = ((WildcardType)lhs).getWildcardedType();
|
||||
}
|
||||
if (rhs instanceof WildcardType) {
|
||||
rhs = ((WildcardType)rhs).getWildcardedType();
|
||||
}
|
||||
|
||||
if (lhs.equals(rhs)){
|
||||
return false;
|
||||
}
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
|
||||
/*
|
||||
* ty <. ? extends ty' is wrong
|
||||
*/
|
||||
if (rhsType instanceof ExtendsType) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* ? super ty <. ty' is wrong
|
||||
* except Ty' = Object or ty' = ? super Object
|
||||
*/
|
||||
if ((lhsType instanceof SuperType) &&
|
||||
(!(rhsType.equals(new ReferenceType("java.lang.Object", false)))) &&
|
||||
!(rhsType.equals(new SuperType (new ReferenceType("java.lang.Object", false))))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* ? extends ty <. ty' is equivalent to ty < ty'
|
||||
*/
|
||||
if (lhsType instanceof ExtendsType) {
|
||||
lhsType = ((WildcardType)lhsType).getWildcardedType();
|
||||
}
|
||||
|
||||
/*
|
||||
* ty <. ? super ty' ist equivalent to ty <. ty'
|
||||
*/
|
||||
if (rhsType instanceof SuperType) {
|
||||
rhsType = ((WildcardType)rhsType).getWildcardedType();
|
||||
}
|
||||
|
||||
/*
|
||||
* SMALLERNEQDOT => type must not be equal
|
||||
*/
|
||||
if (pair.getPairOp() == PairOperator.SMALLERNEQDOT && lhsType.equals(rhsType)){
|
||||
return false;
|
||||
}
|
||||
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
if(!(lhsType instanceof ReferenceType) && !(lhsType instanceof PlaceholderType))
|
||||
return false;
|
||||
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
|
||||
if(!(rhsType instanceof ReferenceType) && !(rhsType instanceof PlaceholderType))
|
||||
return false;
|
||||
|
||||
|
@ -18,6 +18,7 @@ import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class TypeUnify {
|
||||
|
||||
public static Writer statistics;
|
||||
/**
|
||||
* unify parallel ohne result modell
|
||||
* @param undConstrains
|
||||
@ -28,9 +29,10 @@ public class TypeUnify {
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, resultModel, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
resultModel.setPool(pool);
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
try {
|
||||
@ -54,11 +56,12 @@ public class TypeUnify {
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
public UnifyResultModelParallel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, resultModel, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
resultModel.setPool(pool);
|
||||
pool.invoke(unifyTask);
|
||||
return ret;
|
||||
return resultModel;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -72,19 +75,23 @@ public class TypeUnify {
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
public UnifyResultModelParallel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, resultModel, usedTasks, statistics);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
resultModel.setPool(pool);
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
logFile.flush();
|
||||
unifyTask.statistics.write("Backtracking: " + unifyTask.noBacktracking);
|
||||
unifyTask.statistics.write("\nLoops: " + unifyTask.noLoop);
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
return ret;
|
||||
return resultModel;
|
||||
}
|
||||
|
||||
/*
|
||||
@ -105,8 +112,9 @@ public class TypeUnify {
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModelParallel resultModel, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, resultModel, usedTasks);
|
||||
unifyTask.statistics = statistics;
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
|
@ -19,8 +19,18 @@ public class TypeUnify2Task extends TypeUnifyTask {
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe;
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
//statistics
|
||||
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
Set<UnifyPair> nextSetElement,
|
||||
IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel resultModel, UnifyTaskModel usedTasks,
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe, Writer statistics) {
|
||||
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, resultModel, usedTasks, methodSignatureConstraintUebergabe );
|
||||
|
||||
}
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel resultModel, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, resultModel, usedTasks);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
||||
|
@ -17,16 +17,12 @@ import java.util.concurrent.RecursiveTask;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.BinaryOperator;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
|
||||
import de.dhbwstuttgart.exceptions.TypeinferenceException;
|
||||
import de.dhbwstuttgart.parser.NullToken;
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IRuleSet;
|
||||
@ -53,7 +49,7 @@ import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
|
||||
import com.google.common.collect.Ordering;
|
||||
import org.apache.commons.io.output.NullWriter;
|
||||
|
||||
|
||||
/**
|
||||
@ -75,12 +71,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
/**
|
||||
* Fuer die Threads
|
||||
*/
|
||||
UnifyResultModel urm;
|
||||
protected UnifyResultModelParallel resultModel;
|
||||
protected static int noOfThread = 0;
|
||||
private static int totalnoOfThread = 0;
|
||||
int thNo;
|
||||
protected boolean one = false;
|
||||
Integer MaxNoOfThreads = 8;
|
||||
Integer MaxNoOfThreads = 128;
|
||||
|
||||
public static final String rootDirectory = System.getProperty("user.dir")+"/test/logFiles/";
|
||||
Writer logFile;
|
||||
@ -125,12 +121,16 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
static int noBacktracking;
|
||||
|
||||
static int noLoop;
|
||||
|
||||
static Integer noShortendElements = 0;
|
||||
|
||||
Boolean myIsCanceled = false;
|
||||
|
||||
volatile UnifyTaskModel usedTasks;
|
||||
|
||||
static Writer statistics;
|
||||
|
||||
public TypeUnifyTask() {
|
||||
rules = new RuleSet();
|
||||
}
|
||||
@ -149,9 +149,16 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
|
||||
synchronized (this) {
|
||||
//statistics
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel resultModel, UnifyTaskModel usedTasks, Writer statistics) {
|
||||
this(eq,oderConstraints, fc, parallel, logFile, log, rekTiefe, resultModel, usedTasks);
|
||||
this.statistics = statistics;
|
||||
}
|
||||
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModelParallel resultModel, UnifyTaskModel usedTasks) {
|
||||
synchronized (this) {
|
||||
if(statistics==null){
|
||||
statistics = new NullWriter();
|
||||
}
|
||||
this.eq = eq;
|
||||
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
|
||||
this.oderConstraintsField = oderConstraints; /*.stream().map(x -> {
|
||||
@ -200,7 +207,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
*/
|
||||
rules = new RuleSet(logFile);
|
||||
this.rekTiefeField = rekTiefe;
|
||||
this.urm = urm;
|
||||
this.resultModel = resultModel;
|
||||
this.usedTasks = usedTasks;
|
||||
this.usedTasks.add(this);
|
||||
}
|
||||
@ -248,7 +255,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
if (one) {
|
||||
System.out.println("two");
|
||||
//System.out.println("two");
|
||||
}
|
||||
one = true;
|
||||
Set<UnifyPair> neweq = new HashSet<>(eq);
|
||||
@ -591,7 +598,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
*/
|
||||
|
||||
|
||||
urm.notify(eqPrimePrimeSet);
|
||||
resultModel.notify(eqPrimePrimeSet);
|
||||
writeStatistics("Result: " + eqPrimePrimeSet.toString());
|
||||
}
|
||||
}
|
||||
else if(eqPrimePrime.isPresent()) {
|
||||
@ -646,6 +654,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
//oneElems: Alle 1-elementigen Mengen, die nur ein Paar
|
||||
//a <. theta, theta <. a oder a =. theta enthalten
|
||||
|
||||
//statistics
|
||||
//writeStatistics("\nNumber of Constraints (" + rekTiefe + "): " + topLevelSets.size());
|
||||
|
||||
Set<Set<UnifyPair>> oneElems = new HashSet<>();
|
||||
oneElems.addAll(topLevelSets.stream()
|
||||
.filter(x -> x.size()==1)
|
||||
@ -663,6 +675,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<? extends Set<UnifyPair>> nextSet = optNextSet.get();
|
||||
//writeLog("nextSet: " + nextSet.toString());
|
||||
List<Set<UnifyPair>> nextSetasList =new ArrayList<>(nextSet);
|
||||
|
||||
//writeStatistics(" Start Number of elements ( " /* + nextSetasList.get(0).stream().findFirst().get().getBasePair()*/ +"): (" + rekTiefe + "): " + nextSetasList.size());
|
||||
|
||||
/*
|
||||
try {
|
||||
//List<Set<UnifyPair>>
|
||||
@ -774,8 +789,12 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
/* sameEqSet-Bestimmung Ende */
|
||||
|
||||
int hilf = 0;
|
||||
Set<UnifyPair> a = null;
|
||||
while (nextSetasList.size() > 0) {
|
||||
|
||||
//statistics
|
||||
//writeStatistics(" Actual Number of elements( " + nextSetasList.get(0).stream().findFirst().get().getBasePair() +"): (" + rekTiefe + "): " + nextSetasList.size());
|
||||
Set<UnifyPair> a_last = a;
|
||||
|
||||
/* Liste der Faelle für die parallele Verarbeitung
|
||||
@ -794,6 +813,8 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
writeLog("nextSet: " + nextSet.toString());
|
||||
writeLog("nextSetasList: " + nextSetasList.toString());
|
||||
|
||||
/* staistics Nextvar an Hand Varianzbestimmung auskommentieren Anfang
|
||||
if (variance == 1) {
|
||||
a = oup.max(nextSetasList.iterator());
|
||||
nextSetasList.remove(a);
|
||||
@ -865,7 +886,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
}
|
||||
}
|
||||
Nextvar an Hand Varianzbestimmung auskommentieren Ende */
|
||||
a = nextSetasList.remove(0); //statisticsList
|
||||
|
||||
//writeStatistics(a.toString());
|
||||
if (oderConstraint) {//Methodconstraints werden abgespeichert für die Bytecodegenerierung von Methodenaufrufen
|
||||
methodSignatureConstraint.addAll(((Constraint<UnifyPair>)a).getmethodSignatureConstraint());
|
||||
//System.out.println("ERSTELLUNG: " +methodSignatureConstraint);
|
||||
@ -887,11 +911,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
/* Wenn bei (a \in theta) \in a zu Widerspruch in oneElems wird
|
||||
* a verworfen und zu nächstem Element von nextSetasList gegangen
|
||||
*/
|
||||
/* statistics sameEq wird nicht betrachtet ANGFANG
|
||||
if (!oderConstraint && !sameEqSet.isEmpty() && !checkNoContradiction(a, sameEqSet, result)) {
|
||||
a = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
statistics sameEq wird nicht betrachtet ENDE */
|
||||
|
||||
/* Wenn parallel gearbeitet wird, wird je nach Varianz ein neuer Thread
|
||||
* gestartet, der parallel weiterarbeitet.
|
||||
@ -904,7 +930,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, resultModel, usedTasks, methodSignatureConstraint);
|
||||
//forks.add(forkOrig);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -918,19 +944,22 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
writeLog("a in " + variance + " "+ a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
}
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
|
||||
synchronized (this) { nextSetasList.remove(nSaL);
|
||||
while (!nextSetasList.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasList.remove(0);
|
||||
synchronized (this) { //nextSetasList.remove(nSaL);
|
||||
writeLog("1 RM" + nSaL.toString());
|
||||
}
|
||||
|
||||
if (!oderConstraint) {
|
||||
|
||||
/* statistics sameEq wird nicht betrachtet ANGFANG
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
nSaL = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
statistics sameEq wird nicht betrachtet ENDE */
|
||||
}
|
||||
else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)nSaL).getExtendConstraint());
|
||||
@ -939,7 +968,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, resultModel, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
forks.add(fork);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -1003,7 +1032,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, resultModel, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
//forks.add(forkOrig);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -1017,19 +1046,21 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
writeLog("a in " + variance + " "+ a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
}
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
|
||||
synchronized (this) { nextSetasList.remove(nSaL);
|
||||
while (!nextSetasList.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasList.remove(0);
|
||||
synchronized (this) { //nextSetasList.remove(nSaL);
|
||||
writeLog("-1 RM" + nSaL.toString());
|
||||
}
|
||||
|
||||
if (!oderConstraint) {
|
||||
/* statistics sameEq wird nicht betrachtet ANGFANG
|
||||
//ueberpruefung ob zu a =. ty \in nSaL in sameEqSet ein Widerspruch besteht
|
||||
if (!sameEqSet.isEmpty() && !checkNoContradiction(nSaL, sameEqSet, result)) {
|
||||
nSaL = null;
|
||||
noShortendElements++;
|
||||
continue;
|
||||
}
|
||||
statistics sameEq wird nicht betrachtet ENDE */
|
||||
}
|
||||
else {
|
||||
nextSetasListOderConstraints.add(((Constraint<UnifyPair>)nSaL).getExtendConstraint());
|
||||
@ -1038,7 +1069,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, resultModel, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
forks.add(fork);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -1103,7 +1134,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
newElemsOrig.add(a);
|
||||
|
||||
/* FORK ANFANG */
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, resultModel, usedTasks, new HashSet<>(methodSignatureConstraint));
|
||||
//forks.add(forkOrig);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -1117,14 +1148,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
writeLog("a in " + variance + " "+ a);
|
||||
writeLog("nextSetasListRest: " + nextSetasListRest.toString());
|
||||
}
|
||||
while (!nextSetasListRest.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasListRest.remove(0);
|
||||
nextSetasList.remove(nSaL); //PL einkommentiert 20-02-03
|
||||
while (!nextSetasList.isEmpty()) {
|
||||
Set<UnifyPair> nSaL = nextSetasList.remove(0);
|
||||
//nextSetasList.remove(nSaL); //PL einkommentiert 20-02-03
|
||||
Set<UnifyPair> newEq = new HashSet<>(eq);
|
||||
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
|
||||
List<Set<Constraint<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
|
||||
newElems.add(nSaL);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraint);
|
||||
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, resultModel, usedTasks, methodSignatureConstraint);
|
||||
forks.add(fork);
|
||||
synchronized(usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
@ -1179,6 +1210,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}}}
|
||||
|
||||
//Ab hier alle parallele Berechnungen wieder zusammengeführt.
|
||||
//if (hilf == 1)
|
||||
//System.out.println();
|
||||
//writeStatistics("Zusammengeführt(" + rekTiefe + "): " + nextSetasList.size());
|
||||
if (oderConstraint) {//Wenn weiteres Element nextSetasList genommen wird, muss die vorherige methodsignatur geloescht werden
|
||||
methodSignatureConstraint.removeAll(((Constraint<UnifyPair>)a).getmethodSignatureConstraint());
|
||||
//System.out.println("REMOVE: " +methodSignatureConstraint);
|
||||
@ -1192,6 +1226,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|| (!isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result))
|
||||
|| result.isEmpty()) {
|
||||
|
||||
/* auskommentiert damit alle Lösungen reinkommen ANFANG
|
||||
if ((!result.isEmpty() && !res.isEmpty() && !isUndefinedPairSetSet(res) && !isUndefinedPairSetSet(result)) //korrekte Loesungen aus und-constraints
|
||||
&& (a.stream().map(x-> (x.getBasePair() != null)).reduce(true, (x, y) -> (x && y)))) //bei oder-Constraints nicht ausfuehren
|
||||
{
|
||||
@ -1213,7 +1248,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
|
||||
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
|
||||
//System.out.println(a_last);
|
||||
|
||||
if (a_last != null) {
|
||||
try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen
|
||||
a_last.forEach(x -> {writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());});//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
|
||||
List<PlaceholderType> varsLast_a =
|
||||
@ -1265,9 +1300,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
catch (NullPointerException e) {
|
||||
writeLog("NullPointerException: " + a_last.toString());
|
||||
}
|
||||
}}
|
||||
}
|
||||
else {
|
||||
else
|
||||
auskommentiert damit alle Lösungen reinkommen ANFANG */
|
||||
{
|
||||
//alle Fehlerfaelle und alle korrekten Ergebnis jeweils adden
|
||||
writeLog("RES Fst: result: " + result.toString() + " res: " + res.toString());
|
||||
result.addAll(res);
|
||||
@ -1287,7 +1324,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
|
||||
result = par_res;
|
||||
if (!par_res.isEmpty() && par_res.iterator().next() instanceof WildcardType) {
|
||||
System.out.println("");
|
||||
//System.out.println("");
|
||||
}
|
||||
}
|
||||
else {
|
||||
@ -1303,7 +1340,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//break;
|
||||
}
|
||||
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG */
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ANFANG
|
||||
if (!result.isEmpty() && (!isUndefinedPairSetSet(res) || !aParDef.isEmpty())) {
|
||||
if (nextSetasList.iterator().hasNext() && nextSetasList.iterator().next().stream().filter(x -> x.getLhsType().getName().equals("B")).findFirst().isPresent() && nextSetasList.size()>1)
|
||||
System.out.print("");
|
||||
@ -1411,6 +1448,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
else { if (variance == 0) {
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
if (!oderConstraint) {
|
||||
writeStatistics("break");
|
||||
break;
|
||||
}
|
||||
else {
|
||||
@ -1439,7 +1477,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
writeLog("a: " + rekTiefe + " variance: " + variance + a.toString());
|
||||
}
|
||||
}
|
||||
/* auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
|
||||
auskommentiert um alle Max und min Betrachtung auszuschalten ENDE */
|
||||
|
||||
if (isUndefinedPairSetSet(res) && aParDef.isEmpty()) {
|
||||
int nofstred= 0;
|
||||
@ -1472,8 +1510,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
return new Pair<>(su, x.getGroundBasePair());})
|
||||
.collect(Collectors.toCollection(HashSet::new));
|
||||
if (res.size() > 1) {
|
||||
System.out.println();
|
||||
//System.out.println();
|
||||
}
|
||||
/* statistics no erase
|
||||
writeLog("nextSetasList vor filter-Aufruf: " + nextSetasList);
|
||||
if (!oderConstraint) {//PL 2023-02-08 eingefuegt: Bei oderconstraints sind Subststitutionen nicht als Substitutionen in idesem Sinne zu sehen
|
||||
nextSetasList = nextSetasList.stream().filter(x -> {
|
||||
@ -1486,6 +1525,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
}
|
||||
writeLog("nextSetasList nach filter-Aufruf: " + nextSetasList);
|
||||
*/
|
||||
nofstred = nextSetasList.size();
|
||||
//NOCH NICHT korrekt PL 2018-10-12
|
||||
//nextSetasList = nextSetasList.stream().filter(y -> couldBecorrect(reducedUndefResSubstGroundedBasePair, y))
|
||||
@ -1503,8 +1543,11 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
writeLog("Number of all erased Elements (undef): " + noAllErasedElements.toString());
|
||||
noBacktracking++;
|
||||
writeLog("Number of Backtracking: " + noBacktracking);
|
||||
System.out.println("");
|
||||
//writeStatistics("Number of erased elements: " + (len - nextSetasList.size()));
|
||||
//writeStatistics("Number of Backtracking: " + noBacktracking);
|
||||
//System.out.println("");
|
||||
}
|
||||
else //writeStatistics("res: " + res.toString());
|
||||
//if (nextSetasList.size() == 0 && isUndefinedPairSetSet(result) && nextSet.size() > 1) {
|
||||
// return result;
|
||||
//}
|
||||
@ -1513,6 +1556,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//}
|
||||
//else result.stream().filter(y -> !isUndefinedPairSet(y));
|
||||
writeLog("res: " + res.toString());
|
||||
//writeStatistics(" End Number of Elements (" + rekTiefe + "): " + nextSetasList.size());
|
||||
noLoop++;
|
||||
//writeStatistics("Number of Loops: " + noLoop);
|
||||
}
|
||||
//2020-02-02: if (variance ==2) Hier Aufruf von filterOverriding einfuegen
|
||||
writeLog("Return computeCR: " + result.toString());
|
||||
@ -2008,7 +2054,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
//System.out.println(pair);
|
||||
if (first) { //writeLog(pair.toString()+"\n");
|
||||
if (((PlaceholderType)(pair.getLhsType())).getName().equals("AR")) {
|
||||
System.out.println("AR");
|
||||
//System.out.println("AR");
|
||||
}
|
||||
Set<Set<UnifyPair>> x1 = unifyCase1(pair, fc);
|
||||
if (pairOp == PairOperator.SMALLERNEQDOT) {
|
||||
@ -2576,4 +2622,18 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void writeStatistics(String str) {
|
||||
if (finalresult) {
|
||||
synchronized ( this ) {
|
||||
try {
|
||||
statistics.write("Thread No. " + thNo + ": " + str + "\n");
|
||||
statistics.flush();
|
||||
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("kein StatisticsFile");
|
||||
}
|
||||
}}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,58 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import de.dhbwstuttgart.syntaxtree.factory.UnifyTypeFactory;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class UnifyResultModelParallel {
|
||||
private ForkJoinPool pool;
|
||||
private ConstraintSet<Pair> cons;
|
||||
private IFiniteClosure fc;
|
||||
private List<UnifyResultListener> listeners = new ArrayList<>();
|
||||
|
||||
public UnifyResultModelParallel(ConstraintSet<Pair> cons, IFiniteClosure fc){
|
||||
this.cons = cons;
|
||||
this.fc = fc;
|
||||
}
|
||||
|
||||
public void setPool(ForkJoinPool pool){
|
||||
this.pool = pool;
|
||||
}
|
||||
public void addUnifyResultListener(UnifyResultListener listenerToAdd) {
|
||||
listeners.add(listenerToAdd);
|
||||
}
|
||||
public void removeUnifyResultListener(UnifyResultListener listenerToRemove) {
|
||||
listeners.remove(listenerToRemove);
|
||||
}
|
||||
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet){
|
||||
pool.execute(()->{
|
||||
Object HashSet;
|
||||
Set<Set<UnifyPair>> eqPrimePrimeSetRet = eqPrimePrimeSet.stream().map(x -> {
|
||||
Optional<Set<UnifyPair>> res = new RuleSet().subst(x.stream().map(y -> {
|
||||
if (y.getPairOp() == PairOperator.SMALLERDOTWC) y.setPairOp(PairOperator.EQUALSDOT);
|
||||
return y; //alle Paare a <.? b erden durch a =. b ersetzt
|
||||
}).collect(Collectors.toCollection(HashSet::new)));
|
||||
if (res.isPresent()) {//wenn subst ein Erg liefert wurde was veraendert
|
||||
return new TypeUnifyTask().applyTypeUnificationRules(res.get(), fc);
|
||||
}
|
||||
else return x; //wenn nichts veraendert wurde wird x zurueckgegeben
|
||||
}).collect(Collectors.toCollection(HashSet::new));
|
||||
List<ResultSet> newResult = eqPrimePrimeSetRet.stream().map(unifyPairs ->
|
||||
new ResultSet(UnifyTypeFactory.convert(unifyPairs, de.dhbwstuttgart.typeinference.constraints.Pair.generateTPHMap(cons))))
|
||||
.collect(Collectors.toList());
|
||||
UnifyResultEvent evt = new UnifyResultEvent(newResult);
|
||||
|
||||
for (UnifyResultListener listener : listeners) {
|
||||
listener.onNewTypeResultFound(evt);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -281,6 +281,7 @@ public class TestComplete {
|
||||
|
||||
@Test
|
||||
public void scalarTest() throws Exception {
|
||||
|
||||
var classFiles = generateClassFiles(new ByteArrayClassLoader(), "Scalar.jav");
|
||||
var scalar = classFiles.get("Scalar");
|
||||
|
||||
|
@ -7,7 +7,17 @@ import de.dhbwstuttgart.syntaxtree.visual.ASTPrinter;
|
||||
import de.dhbwstuttgart.syntaxtree.visual.ASTTypePrinter;
|
||||
import de.dhbwstuttgart.typedeployment.TypeInsert;
|
||||
import de.dhbwstuttgart.typedeployment.TypeInsertFactory;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.result.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.TypeUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyResultModelParallel;
|
||||
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.*;
|
||||
import org.apache.commons.io.output.NullWriter;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
@ -24,7 +34,71 @@ import java.util.Set;
|
||||
public class UnifyTest {
|
||||
|
||||
public static final String rootDirectory = System.getProperty("user.dir")+"/resources/javFiles/";
|
||||
/*
|
||||
|
||||
private UnifyPair genPairListOfInteger(String name){
|
||||
|
||||
UnifyType type1 = new PlaceholderType(name);
|
||||
UnifyType type2 = new ReferenceType("List", new TypeParams(new ReferenceType("Integer")));
|
||||
UnifyPair pair1 = new UnifyPair(type2, type1, PairOperator.SMALLERDOT);
|
||||
|
||||
return pair1;
|
||||
}
|
||||
private UnifyPair genPairListOfString(String name){
|
||||
|
||||
PlaceholderType type1 = new PlaceholderType(name);
|
||||
UnifyType type2 = new ReferenceType("List", new TypeParams(new ReferenceType("String")));
|
||||
UnifyPair pair1 = new UnifyPair(type2, type1, PairOperator.SMALLERDOT);
|
||||
|
||||
return pair1;
|
||||
}
|
||||
@Test
|
||||
public void unifyTest(){
|
||||
UnifyType type1;
|
||||
UnifyType type2;
|
||||
|
||||
Set<UnifyPair> undConstraints = new HashSet<>();
|
||||
undConstraints.add(genPairListOfInteger("a"));
|
||||
undConstraints.add(genPairListOfString("a"));
|
||||
|
||||
undConstraints.add(genPairListOfInteger("b"));
|
||||
undConstraints.add(genPairListOfString("b"));
|
||||
undConstraints.add(genPairListOfInteger("c"));
|
||||
undConstraints.add(genPairListOfString("c"));
|
||||
undConstraints.add(genPairListOfInteger("d"));
|
||||
undConstraints.add(genPairListOfString("d"));
|
||||
undConstraints.add(genPairListOfInteger("e"));
|
||||
undConstraints.add(genPairListOfString("e"));
|
||||
undConstraints.add(genPairListOfInteger("e1"));
|
||||
undConstraints.add(genPairListOfString("e1"));
|
||||
undConstraints.add(genPairListOfInteger("e2"));
|
||||
undConstraints.add(genPairListOfString("e2"));
|
||||
undConstraints.add(genPairListOfInteger("e3"));
|
||||
undConstraints.add(genPairListOfString("e3"));
|
||||
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints = new ArrayList<>();
|
||||
|
||||
Set<UnifyPair> constraints = new HashSet<>();
|
||||
type1 = new ReferenceType("Object");
|
||||
type2 = new ReferenceType("List", new TypeParams(new PlaceholderType("X")));
|
||||
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
|
||||
type1 = new ReferenceType("Object");
|
||||
type2 = new ReferenceType("Integer");
|
||||
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
|
||||
type1 = new ReferenceType("Object");
|
||||
type2 = new ReferenceType("String");
|
||||
constraints.add(new UnifyPair(type2, type1, PairOperator.SMALLER));
|
||||
|
||||
IFiniteClosure finiteClosure = new FiniteClosure(constraints, new NullWriter());
|
||||
|
||||
TypeUnify unifyAlgo = new TypeUnify();
|
||||
ConstraintSet<Pair> cons = new ConstraintSet<>();
|
||||
UnifyResultModelParallel urm = new UnifyResultModelParallel(cons, finiteClosure);
|
||||
UnifyTaskModel tasks = new UnifyTaskModel();
|
||||
Set<Set<UnifyPair>> solution = unifyAlgo.unify(undConstraints, oderConstraints, finiteClosure, new NullWriter(), false, urm, tasks);
|
||||
System.out.println(solution.size());
|
||||
System.out.println(solution);
|
||||
}
|
||||
/*
|
||||
@Test
|
||||
public void finiteClosure() throws IOException, ClassNotFoundException {
|
||||
execute(new File(rootDirectory+"fc.jav"));
|
||||
|
Loading…
x
Reference in New Issue
Block a user