Merge remote-tracking branch 'origin/bigRefactoring' into bigRefactoring

This commit is contained in:
Michael Uhl 2019-05-14 21:54:41 +02:00
commit 0233426979
10 changed files with 159 additions and 45 deletions

View File

@ -39,6 +39,7 @@ import de.dhbwstuttgart.typeinference.unify.TypeUnifyTask;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListener;
import de.dhbwstuttgart.typeinference.unify.UnifyResultListenerImpl;
import de.dhbwstuttgart.typeinference.unify.UnifyResultModel;
import de.dhbwstuttgart.typeinference.unify.UnifyTaskModel;
import java.io.File;
import java.io.FileOutputStream;
@ -58,9 +59,10 @@ import org.apache.commons.io.output.NullOutputStream;
public class JavaTXCompiler {
final CompilationEnvironment environment;
Boolean resultmodel = false;
Boolean resultmodel = true;
public final Map<File, SourceFile> sourceFiles = new HashMap<>();
Boolean log = true; //gibt an ob ein Log-File nach System.getProperty("user.dir")+"src/test/java/logFiles" geschrieben werden soll?
public volatile UnifyTaskModel usedTasks = new UnifyTaskModel();
/**
* Äußerste Liste der Source-Files.
@ -417,7 +419,7 @@ public class JavaTXCompiler {
}
return ret;
}).collect(Collectors.toCollection(ArrayList::new));
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm);
unify.unifyAsync(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
}
catch (IOException e) {
System.err.println("kein LogFile");
@ -438,7 +440,7 @@ public class JavaTXCompiler {
Set<Set<UnifyPair>> results = new HashSet<>();
try {
Writer logFile = //new OutputStreamWriter(new NullOutputStream());
new FileWriter(new File(System.getProperty("user.dir")+"/src/test/java/logFiles/"+"log_"+sourceFiles.keySet().iterator().next().getName()));
new FileWriter(new File(System.getProperty("user.dir")+"/src/test/resources/logFiles/"+"log_"+sourceFiles.keySet().iterator().next().getName()));
IFiniteClosure finiteClosure = UnifyTypeFactory.generateFC(allClasses,logFile);
System.out.println(finiteClosure);
ConstraintSet<UnifyPair> unifyCons = UnifyTypeFactory.convert(cons);
@ -557,7 +559,7 @@ public class JavaTXCompiler {
UnifyResultModel urm = new UnifyResultModel(cons, finiteClosure);
UnifyResultListenerImpl li = new UnifyResultListenerImpl();
urm.addUnifyResultListener(li);
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm);
unify.unifyParallel(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, urm, usedTasks);
System.out.println("RESULT Final: " + li.getResults());
logFile.write("RES_FINAL: " + li.getResults().toString()+"\n");
logFile.flush();
@ -566,7 +568,7 @@ public class JavaTXCompiler {
/* UnifyResultModel End */
else {
//Set<Set<UnifyPair>> result = unify.unify(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure));
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure));
Set<Set<UnifyPair>> result = unify.unifyOderConstraints(unifyCons.getUndConstraints(), oderConstraints, finiteClosure, logFile, log, new UnifyResultModel(cons, finiteClosure), usedTasks);
System.out.println("RESULT: " + result);
logFile.write("RES: " + result.toString()+"\n");
logFile.flush();

View File

@ -229,7 +229,7 @@ public class TYPEStmt implements StatementVisitor{
binary.operation.equals(BinaryExpr.Operator.MUL)||
binary.operation.equals(BinaryExpr.Operator.MOD)||
binary.operation.equals(BinaryExpr.Operator.ADD)||
binary.operation.equals(BinaryExpr.Operator.SUB)){
binary.operation.equals(BinaryExpr.Operator.SUB)) {
Set<Constraint<Pair>> numericAdditionOrStringConcatenation = new HashSet<>();
// TODO PL 2018-11-06
@ -312,6 +312,9 @@ public class TYPEStmt implements StatementVisitor{
numericAdditionOrStringConcatenation.add(stringConcat);
}
}
if(numericAdditionOrStringConcatenation.size()<1){
throw new TypeinferenceException("Kein Typ für " + binary.operation.toString() + " vorhanden", binary.getOffset());
}
constraintsSet.addOderConstraint(numericAdditionOrStringConcatenation);
}else if(binary.operation.equals(BinaryExpr.Operator.LESSEQUAL) ||
binary.operation.equals(BinaryExpr.Operator.BIGGEREQUAL) ||
@ -355,6 +358,7 @@ public class TYPEStmt implements StatementVisitor{
//***ACHTUNG: Moeglicherweise oder und und-Contraint falsch
*/
//Testeise eingefuegt PL 2018-05-24
//Hier sollte evtl. noch importe angefragt werden PL 2019-05-07
constraintsSet.addUndConstraint(new Pair(binary.lexpr.getType(), number, PairOperator.SMALLERNEQDOT));
constraintsSet.addUndConstraint(new Pair(binary.rexpr.getType(), number, PairOperator.SMALLERNEQDOT));
//Rückgabetyp ist Boolean

View File

@ -3,10 +3,12 @@ package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import de.dhbwstuttgart.core.JavaTXCompiler;
import de.dhbwstuttgart.typeinference.constraints.Constraint;
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
import de.dhbwstuttgart.typeinference.constraints.Pair;
@ -26,8 +28,8 @@ public class TypeUnify {
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
@ -52,8 +54,8 @@ public class TypeUnify {
* @param ret
* @return
*/
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
return ret;
@ -70,8 +72,8 @@ public class TypeUnify {
* @param ret
* @return
*/
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret);
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
ForkJoinPool pool = new ForkJoinPool();
pool.invoke(unifyTask);
Set<Set<UnifyPair>> res = unifyTask.join();
@ -103,8 +105,8 @@ public class TypeUnify {
* @param cons
* @return
*/
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret);
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
Set<Set<UnifyPair>> res = unifyTask.compute();
try {
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");

View File

@ -3,6 +3,7 @@ package de.dhbwstuttgart.typeinference.unify;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@ -17,8 +18,8 @@ public class TypeUnify2Task extends TypeUnifyTask {
Set<Set<UnifyPair>> setToFlatten;
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm);
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
this.setToFlatten = setToFlatten;
this.nextSetElement = nextSetElement;
}
@ -38,9 +39,17 @@ public class TypeUnify2Task extends TypeUnifyTask {
return new HashSet<>(); }
else
*/
noOfThread--;
return res;
}
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
else {
return res;
}
}
}
public void closeLogFile() {

View File

@ -122,6 +122,10 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
static Integer noShortendElements = 0;
Boolean myIsCanceled = false;
volatile UnifyTaskModel usedTasks;
public TypeUnifyTask() {
rules = new RuleSet();
}
@ -141,7 +145,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
*/
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm) {
public TypeUnifyTask(Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks) {
synchronized (this) {
this.eq = eq;
//this.oderConstraints = oderConstraints.stream().map(x -> x.stream().map(y -> new HashSet<>(y)).collect(Collectors.toSet(HashSet::new))).collect(Collectors.toList(ArrayList::new));
@ -168,16 +172,26 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
thNo = totalnoOfThread;
writeLog("thNo2 " + thNo);
try {
this.logFile = new OutputStreamWriter(new NullOutputStream());
//new FileWriter(new File(System.getProperty("user.dir")+"/src/test/java/logFiles/"+"Thread_"+thNo));
this.logFile = //new OutputStreamWriter(new NullOutputStream());
new FileWriter(new File(System.getProperty("user.dir")+"/src/test/resources/logFiles/"+"Thread_"+thNo));
logFile.write("");
}
catch (IOException e) {
System.err.println("log-File nicht vorhanden");
}
/*Abbruchtest
if (thNo > 10) {
System.out.println("cancel");
usedTasks.cancel();
writeLog(nOfUnify.toString() + "cancel");
System.out.println("cancel");
}
*/
rules = new RuleSet(logFile);
this.rekTiefeField = rekTiefe;
this.urm = urm;
this.usedTasks = usedTasks;
this.usedTasks.add(this);
}
}
@ -213,6 +227,13 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
}
}
*/
void myCancel(Boolean b) {
myIsCanceled = true;
}
public boolean myIsCancelled() {
return myIsCanceled;
}
protected Set<Set<UnifyPair>> compute() {
if (one) {
@ -238,7 +259,16 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if (isUndefinedPairSetSet(res)) {
throw new TypeinferenceException("Unresolved constraints: " + res.toString(), new NullToken()); //return new HashSet<>();
}
else return res;
else {
synchronized (usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
else {
return res;
}
}
}
}
/*
@Override
@ -272,6 +302,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
/*
* Step 1: Repeated application of reduce, adapt, erase, swap
*/
rekTiefe++;
nOfUnify++;
writeLog(nOfUnify.toString() + " Unifikation: " + eq.toString());
@ -430,7 +461,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
// .stream().map(x -> new HashSet<>(x))
// .collect(Collectors.toCollection(HashSet::new));
//Muss auskommentiert werden, wenn computeCartesianRecursive ENDE
Set<Set<UnifyPair>> eqPrimePrimeSet = new HashSet<>();
Set<TypeUnifyTask> forks = new HashSet<>();
@ -540,6 +573,7 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> computeCartesianRecursive(Set<Set<UnifyPair>> fstElems, ArrayList<Set<Set<UnifyPair>>> topLevelSets, Set<UnifyPair> eq, List<Set<Set<UnifyPair>>> oderConstraints, IFiniteClosure fc, boolean parallel, int rekTiefe, Boolean finalresult) {
//ArrayList<Set<Set<UnifyPair>>> remainingSets = new ArrayList<>(topLevelSets);
fstElems.addAll(topLevelSets.stream()
.filter(x -> x.size()==1)
.map(y -> y.stream().findFirst().get())
@ -804,9 +838,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm);
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
//forks.add(forkOrig);
forkOrig.fork();
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
forkOrig.fork();
}
/* FORK ENDE */
synchronized (this) {
@ -851,9 +890,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
forks.add(fork);
fork.fork();
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
}
}
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
@ -897,9 +941,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm);
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
//forks.add(forkOrig);
forkOrig.fork();
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
forkOrig.fork();
}
/* FORK ENDE */
synchronized (this) {
@ -944,9 +993,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
forks.add(fork);
fork.fork();
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
}
}
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
@ -991,9 +1045,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
newElemsOrig.add(a);
/* FORK ANFANG */
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm);
TypeUnify2Task forkOrig = new TypeUnify2Task(newElemsOrig, newEqOrig, newOderConstraintsOrig, a, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
//forks.add(forkOrig);
forkOrig.fork();
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
forkOrig.fork();
}
/* FORK ENDE */
synchronized (this) {
@ -1007,9 +1066,14 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
Set<Set<UnifyPair>> newElems = new HashSet<>(elems);
List<Set<Set<UnifyPair>>> newOderConstraints = new ArrayList<>(oderConstraints);
newElems.add(nSaL);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm);
TypeUnify2Task fork = new TypeUnify2Task(newElems, newEq, newOderConstraints, nSaL, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
forks.add(fork);
fork.fork();
synchronized(usedTasks) {
if (this.myIsCancelled()) {
return new HashSet<>();
}
fork.fork();
}
}
//res = unify2(newElemsOrig, newEqOrig, newOderConstraintsOrig, fc, parallel, rekTiefe);
@ -1046,9 +1110,6 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
if (!isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result)) {
//wenn korrektes Ergebnis gefunden alle Fehlerfaelle loeschen
result = res;
if (res.iterator().next() instanceof WildcardType) {
System.out.println("");
}
}
else {
if ((isUndefinedPairSetSet(res) && isUndefinedPairSetSet(result))
@ -1076,8 +1137,9 @@ public class TypeUnifyTask extends RecursiveTask<Set<Set<UnifyPair>>> {
//Alle Variablen bestimmen die nicht hinzugefügt wurden in a_last
//System.out.println(a_last);
a_last.forEach(x -> {writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());});
try {//PL eingefuegt 2019-03-06 da bei map mmer wieder Nullpointer kamen
a_last.forEach(x -> {writeLog("a_last_elem:" + x + " basepair: " + x.getBasePair());});//PL 2019-05-13 ins try hinzugefuegt Nullpointer-Exception ist in der Zeile aufgetaucht.
List<PlaceholderType> varsLast_a =
a_last.stream().filter(x -> ((x.getLhsType().getName().equals(x.getBasePair().getLhsType().getName())
&& (x.getLhsType() instanceof PlaceholderType) && (x.getBasePair().getLhsType() instanceof PlaceholderType))

View File

@ -0,0 +1,18 @@
package de.dhbwstuttgart.typeinference.unify;
import java.util.ArrayList;
public class UnifyTaskModel {
ArrayList<TypeUnifyTask> usedTasks = new ArrayList<>();
public synchronized void add(TypeUnifyTask t) {
usedTasks.add(t);
}
public synchronized void cancel() {
for(TypeUnifyTask t : usedTasks) {
t.myCancel(true);
}
}
}

View File

@ -137,7 +137,7 @@ public class UnifyPair {
undefinedPair = true;
}
public Set<UnifyPair> getSubstitution() {
return substitution;
return new HashSet<>(substitution);
}
public UnifyPair getBasePair() {
@ -149,9 +149,9 @@ public class UnifyPair {
public Set<UnifyPair> getAllSubstitutions () {
Set<UnifyPair> ret = new HashSet<>();
ret.addAll(getSubstitution());
ret.addAll(new ArrayList<>(getSubstitution()));
if (basePair != null) {
ret.addAll(basePair.getAllSubstitutions());
ret.addAll(new ArrayList<>(basePair.getAllSubstitutions()));
}
return ret;
}

View File

@ -12,7 +12,7 @@ import org.junit.Test;
import de.dhbwstuttgart.core.JavaTXCompiler;
public class FunOLTest {
public class OLFunTest {
private static String path;
private static File fileToTest;
private static JavaTXCompiler compiler;
@ -23,13 +23,13 @@ public class FunOLTest {
@Test
public void generateBC() throws Exception {
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/FunOL.jav";
path = System.getProperty("user.dir")+"/src/test/resources/bytecode/javFiles/OLFun.jav";
fileToTest = new File(path);
compiler = new JavaTXCompiler(fileToTest);
compiler.generateBytecode(System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/");
pathToClassFile = System.getProperty("user.dir")+"/src/test/resources/testBytecode/generatedBC/";
loader = new URLClassLoader(new URL[] {new URL("file://"+pathToClassFile)});
classToTest = loader.loadClass("FunOL");
classToTest = loader.loadClass("OLFun");
/*
instanceOfClass = classToTest.getDeclaredConstructor().newInstance();

View File

@ -1,6 +1,6 @@
import java.util.Vector;
import java.lang.Integer;
import java.lang.Byte;
//import java.lang.Byte;
import java.lang.Boolean;
public class MatrixOP extends Vector<Vector<Integer>> {

View File

@ -0,0 +1,17 @@
import java.lang.String;
import java.lang.Integer;
import java.lang.Double;
import java.util.Vector;
import java.lang.Boolean;
public class OLFun {
//f = x -> {return x + x;};
m(f, x) {
x = f.apply(x+x);
}
}