Merge mit Master
This commit is contained in:
commit
4f9b4c0e16
@ -6,7 +6,7 @@
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8"/>
|
||||
<classpathentry kind="lib" path="lib/junit-4.0.jar" sourcepath="/home/janulrich/.m2/repository/junit/junit/4.0/junit-4.0-sources.jar"/>
|
||||
<classpathentry kind="lib" path="lib/cloning.jar"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7"/>
|
||||
<classpathentry kind="lib" path="lib/bcel-6.0-SNAPSHOT.jar"/>
|
||||
<classpathentry kind="lib" path="lib/guava-10.0.1.jar"/>
|
||||
<classpathentry kind="output" path="bin"/>
|
||||
</classpath>
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,6 +1,7 @@
|
||||
CVS
|
||||
bin
|
||||
*.class
|
||||
*.log
|
||||
|
||||
# Mobile Tools for Java (J2ME)
|
||||
.mtj.tmp/
|
||||
|
BIN
lib/guava-10.0.1.jar
Normal file
BIN
lib/guava-10.0.1.jar
Normal file
Binary file not shown.
@ -7,6 +7,7 @@ import java.util.HashMap;
|
||||
import java.util.Hashtable;
|
||||
|
||||
import org.apache.bcel.generic.ClassGen;
|
||||
import org.apache.bcel.generic.InstructionList;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
import de.dhbwstuttgart.logger.Logger;
|
||||
@ -14,6 +15,7 @@ import de.dhbwstuttgart.myexception.CTypeReconstructionException;
|
||||
import de.dhbwstuttgart.myexception.JVMCodeException;
|
||||
import de.dhbwstuttgart.myexception.SCExcept;
|
||||
import de.dhbwstuttgart.myexception.SCStatementException;
|
||||
import de.dhbwstuttgart.parser.JavaClassName;
|
||||
import de.dhbwstuttgart.syntaxtree.Class;
|
||||
import de.dhbwstuttgart.syntaxtree.SyntaxTreeNode;
|
||||
import de.dhbwstuttgart.syntaxtree.operator.AddOp;
|
||||
@ -33,6 +35,7 @@ import de.dhbwstuttgart.typeinference.ResultSet;
|
||||
import de.dhbwstuttgart.typeinference.SingleConstraint;
|
||||
import de.dhbwstuttgart.typeinference.UndConstraint;
|
||||
import de.dhbwstuttgart.typeinference.assumptions.TypeAssumptions;
|
||||
import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
|
||||
import de.dhbwstuttgart.typeinference.unify.CSubstitutionSet;
|
||||
|
||||
|
||||
@ -214,11 +217,19 @@ public JavaCodeResult printJavaCode(ResultSet resultSet) {
|
||||
|
||||
|
||||
@Override
|
||||
public void genByteCode(ClassGen _cg) {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
public InstructionList genByteCode(ClassGen _cg) {
|
||||
InstructionList linkeSeite = this.expr1.genByteCode(_cg);
|
||||
InstructionList rechteSeite = this.expr2.genByteCode(_cg);
|
||||
if(this.getReturnType().getName().equals(new JavaClassName("String"))){
|
||||
throw new TypeinferenceException("Zeichenketten zusammenfügen ist noch nicht unterstützt",this);
|
||||
}
|
||||
//TODO: Je nachdem welches der Rückgabetyp ist, müssen die linke und rechte Seite unboxt und addiert werden.
|
||||
return null;
|
||||
}
|
||||
|
||||
private InstructionList genUnboxByteCode(ClassGen _cg, Type t){
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
// ino.end
|
||||
|
@ -14,6 +14,7 @@ import de.dhbwstuttgart.typeinference.Menge;
|
||||
import de.dhbwstuttgart.myexception.CTypeReconstructionException;
|
||||
import de.dhbwstuttgart.myexception.JVMCodeException;
|
||||
import de.dhbwstuttgart.myexception.SCStatementException;
|
||||
import de.dhbwstuttgart.parser.JavaClassName;
|
||||
import de.dhbwstuttgart.syntaxtree.Class;
|
||||
import de.dhbwstuttgart.syntaxtree.ClassHelper;
|
||||
import de.dhbwstuttgart.syntaxtree.FormalParameter;
|
||||
@ -177,6 +178,10 @@ public class LambdaExpression extends Expr{
|
||||
retType = new ExtendsWildcardType((ObjectType) retType);
|
||||
}
|
||||
|
||||
//Die LambdaExpression kann zu diesem Zeit schon feststellen, ob der Return-Type Void ist (Kein Return-Statement):
|
||||
if(retType.getName().equals(new JavaClassName("Void"))){
|
||||
System.out.println("Void rettype");
|
||||
}
|
||||
ret.add(new SingleConstraint(new FunN(retType, modifiedParamTypes).TYPE(assumptions, this),this.getType().TYPE(assumptions, this)));
|
||||
return ret;
|
||||
}
|
||||
|
@ -36,6 +36,10 @@ public class ConstraintsSet extends UndMenge<Pair> implements Iterable<OderConst
|
||||
}
|
||||
|
||||
public void filterWrongConstraints(Unifier unify) {
|
||||
/*
|
||||
* Das ConstraintsSet enthält nur OderConstraints, welche UND-Verknüpft sind.
|
||||
* Hier werden Constraints in den OderConstraints kontrolliert:
|
||||
*/
|
||||
for(OderConstraint constraint : this){
|
||||
constraint.filterWrongConstraints(unify);
|
||||
}
|
||||
|
@ -1,7 +1,8 @@
|
||||
package de.dhbwstuttgart.typeinference;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.Vector;
|
||||
|
||||
public class Menge<A> extends Vector<A>{
|
||||
public class Menge<A> extends Vector<A> implements Set<A>{
|
||||
|
||||
}
|
||||
|
@ -81,10 +81,15 @@ public class OderConstraint extends OderMenge<Pair>{
|
||||
*/
|
||||
}
|
||||
|
||||
public void addConstraint(UndConstraint methodConstraint) {
|
||||
oderConstraintPairs.add(methodConstraint);
|
||||
public void addConstraint(UndConstraint constraint) {
|
||||
oderConstraintPairs.add(constraint);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filtert die Constraints in diesem ODER-Verknüpften Constraint aus,
|
||||
* welche keinen Sinn ergeben, also beim unifizieren scheitern.
|
||||
* @param unifier - Wird für die Unifizierung benutzt
|
||||
*/
|
||||
void filterWrongConstraints(Unifier unifier) {
|
||||
Menge<UndConstraint> filteredConstraints = new Menge<>();
|
||||
for(UndConstraint cons : this.getUndConstraints()){
|
||||
|
@ -35,6 +35,13 @@ public class UndConstraint extends UndMenge<Pair> {
|
||||
this.set.add(new EinzelElement<Pair>(p));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String ret = this.getConstraintPairs().toString();
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
public UndConstraint(ConstraintType p1, ConstraintType p2) {
|
||||
super(p1, p2);
|
||||
|
@ -224,3 +224,41 @@ class TypePatchJob{
|
||||
return fileContent;
|
||||
}
|
||||
}
|
||||
|
||||
class TypedJavaSource{
|
||||
public TypedJavaSource(String source, int offset){
|
||||
|
||||
}
|
||||
|
||||
public void addInsert(GenericVarInsert genericVar, int offset){
|
||||
|
||||
}
|
||||
|
||||
public void addInsert(VarInsert var, int offset){
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
class VarInsert{
|
||||
public VarInsert(String var){
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
class GenericVarInsert{
|
||||
public GenericVarInsert(String varName){
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
class GenericVarExtendsInsert extends GenericVarInsert{
|
||||
public GenericVarExtendsInsert(String varName, String extendsVarName){
|
||||
super(varName);
|
||||
}
|
||||
}
|
||||
|
||||
class GenericVarSuperInsert extends GenericVarInsert{
|
||||
public GenericVarSuperInsert(String varName, String superVarName){
|
||||
super(varName);
|
||||
}
|
||||
}
|
||||
|
@ -6,11 +6,17 @@ package de.dhbwstuttgart.typeinference.unify;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Hashtable;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.google.common.collect.Sets.SetView;
|
||||
import com.rits.cloning.Cloner;
|
||||
|
||||
import de.dhbwstuttgart.logger.Logger;
|
||||
import de.dhbwstuttgart.logger.Section;
|
||||
import de.dhbwstuttgart.logger.SectionLogger;
|
||||
@ -634,21 +640,64 @@ public class Unify
|
||||
//TODO: Vor der Bildung des Karthesischen Produkts unmögliche Kombinationen ausfiltern
|
||||
//cartProduktSets kontrollieren:
|
||||
ConstraintsSet cSet = new ConstraintsSet();
|
||||
for (Menge<Menge<Pair>> vecvecpair : cartProduktSets){
|
||||
UndConstraint eq1cons = new UndConstraint();
|
||||
for(Pair p : Eq1){
|
||||
eq1cons.addConstraint(p.TA1, p.TA2);
|
||||
}
|
||||
cSet.add(eq1cons);
|
||||
for(Menge<Menge<Pair>> vecvecpair : cartProduktSets){
|
||||
OderConstraint orConstraints = new OderConstraint();
|
||||
for(Menge<Pair> pairs : vecvecpair){
|
||||
UndConstraint uCons = new UndConstraint();
|
||||
for(Pair p : pairs){
|
||||
uCons.addConstraint(new ConstraintPair(p));
|
||||
uCons.addConstraint(p.TA1, p.TA2);
|
||||
}
|
||||
orConstraints.addConstraint(uCons);
|
||||
}
|
||||
cSet.addItems(orConstraints);
|
||||
cSet.add(orConstraints);
|
||||
}
|
||||
|
||||
Menge<Menge<Pair>> bigCartProductErg3 = cSet.cartesianProduct();
|
||||
if(filter)log.debug("Karthesisches Produkt nach Filterung: "+bigCartProductErg3);
|
||||
Sets.cartesianProduct(bigCartProductErg3);
|
||||
*/
|
||||
SectionLogger log = Logger.getSectionLogger(Unify.class.getName(), Section.UNIFY);
|
||||
|
||||
if(filter){
|
||||
Cloner cloner = new Cloner();
|
||||
Menge<Menge<Menge<Pair>>> temp = new Menge<>(); //hier werden gefilterte Constraints gesammelt
|
||||
Menge<Pair> undMenge = new Menge<Pair>(); //Die Menge von Pairs, welche in jedem Kartesischen Produkt enthalten sind.
|
||||
undMenge.addAll(cloner.deepClone(Eq1));
|
||||
Menge<Menge<Menge<Pair>>> oderConstraints = new Menge<>();//Die zu filternden Constraints
|
||||
for (Menge<Menge<Pair>> vecvecpair : cartProduktSets){
|
||||
if(vecvecpair.size() == 1){//gibt es nur eine UndMenge in diesem Set, dann kommt diese in jedem Karthesischen Produkt vor:
|
||||
undMenge.addAll(cloner.deepClone(vecvecpair.firstElement()));
|
||||
temp.add(vecvecpair);
|
||||
}else{//gibt es mehrere Mengen, kann gefiltert werden:
|
||||
oderConstraints.add(vecvecpair); //die Menge zu den zu filternden OderConstraints anfügen
|
||||
}
|
||||
}
|
||||
//Filtere die OderConstraints:
|
||||
for(Menge<Menge<Pair>> oderConstraint : oderConstraints){
|
||||
Menge<Menge<Pair>> filteredOCons = new Menge<>(); //diese Menge sammelt nur Cons
|
||||
for(Menge<Pair> pairs : oderConstraint){
|
||||
Menge<Pair> testMenge = new Menge<Pair>();
|
||||
testMenge.addAll(cloner.deepClone(undMenge));
|
||||
testMenge.addAll(cloner.deepClone(pairs));
|
||||
Menge<Menge<Pair>> test = Unify.unifyFiltered(testMenge, fc_tto, false);
|
||||
if(test.size()>0){
|
||||
filteredOCons.add(pairs);
|
||||
}
|
||||
else{
|
||||
log.debug("Ausgesondertes Constraint: "+pairs);
|
||||
}
|
||||
}
|
||||
temp.add(filteredOCons);
|
||||
}
|
||||
SetView<Menge<Menge<Pair>>> difference = Sets.difference(cartProduktSets, temp);
|
||||
log.debug("Ausgelöschte Constraints: "+difference.toString());
|
||||
//cartProduktSets = temp;
|
||||
/*
|
||||
Unifier filterUnify = (pairs)->{
|
||||
String pairsString = pairs.toString();
|
||||
Menge<Menge<Pair>> retValue = new Menge<>();
|
||||
@ -656,18 +705,17 @@ public class Unify
|
||||
//Unify.unify(pairs, fc_tto, (i)->{});
|
||||
log.debug("Filtere Constraints:\n"+pairsString);
|
||||
log.debug("Ergebnis: "+ retValue);
|
||||
return retValue;};
|
||||
return retValue;
|
||||
};
|
||||
|
||||
log.debug("Filtere mithilfe von 'filterWrongConstraints': "+cSet);
|
||||
cSet.filterWrongConstraints(filterUnify);
|
||||
*/
|
||||
}
|
||||
|
||||
//*/
|
||||
|
||||
Menge<Menge<Pair>> bigCartProductErg = cSet.cartesianProduct();
|
||||
if(filter)log.debug("Karthesisches Produkt nach Filterung: "+bigCartProductErg);
|
||||
*/
|
||||
|
||||
///*
|
||||
///* Altes Karthesisches Produkt: Auskommentiert durch Andreas Stadelmeier
|
||||
//Hier wird aus den in Schritt 4, Teil 1 erzeugten Vektoren das Kartesische Produkt gebildet.
|
||||
Menge<Pair> helpvp;
|
||||
Menge<Menge<Pair>> bigCartProductErg = new Menge<Menge<Pair>>();
|
||||
@ -687,14 +735,30 @@ public class Unify
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
if(! bigCartProductErg.equals(bigCartProductErg3)){
|
||||
for(int i = 0; i<bigCartProductErg3.size();i++){
|
||||
if(! (bigCartProductErg.get(i).equals(bigCartProductErg3.get(i)))){
|
||||
System.out.println();
|
||||
//TODO: Hier testen, wo der Unterschied zwischen den beiden Karthesischen Produkten ist
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
//*/
|
||||
|
||||
//Schritt 5: Einsetzen der Subst Regel
|
||||
//Hier werden die TPHs substituiert, und dann nach geänderten und nicht geänderten Sets sortiert.
|
||||
Menge<Menge<Pair>> changedSets = new Menge<Menge<Pair>>();
|
||||
Menge<Menge<Pair>> notChangedSets = new Menge<Menge<Pair>>();
|
||||
int counter = 0;
|
||||
for(Menge<Pair> vecpair : bigCartProductErg)
|
||||
{
|
||||
counter++;
|
||||
if(counter > 1000){
|
||||
System.out.println(counter + " von "+bigCartProductErg.size());
|
||||
}
|
||||
boolean change = false; //eingefuegt PL 13-05-22
|
||||
Pair substPair = null;
|
||||
do
|
||||
|
@ -1,5 +1,11 @@
|
||||
package plugindevelopment.TypeInsertTests;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import de.dhbwstuttgart.logger.LoggerConfiguration;
|
||||
import de.dhbwstuttgart.logger.Section;
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
import org.junit.Test;
|
||||
@ -7,11 +13,17 @@ import org.junit.Test;
|
||||
public class GenericTypeVarTest2 {
|
||||
|
||||
private static final String TEST_FILE2 = "GenericTypeVarTest2.jav";
|
||||
private static final String LOGFILE = "GenericTypeVarTest2.log";
|
||||
|
||||
|
||||
@Test
|
||||
public void run2(){
|
||||
public void run2() throws FileNotFoundException{
|
||||
Menge<String> mustContain = new Menge<String>();
|
||||
mustContain.add("String var2");
|
||||
MultipleTypesInsertTester.test(TEST_FILE2, mustContain);
|
||||
File logFile = new File(MultipleTypesInsertTester.rootDirectory+this.LOGFILE);
|
||||
LoggerConfiguration lConf = new LoggerConfiguration();
|
||||
|
||||
lConf.setOutput(Section.TYPEINFERENCE, new PrintStream(logFile));
|
||||
MultipleTypesInsertTester.test(TEST_FILE2, mustContain, lConf);
|
||||
}
|
||||
}
|
||||
|
@ -1,13 +1,20 @@
|
||||
package plugindevelopment.TypeInsertTests;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import de.dhbwstuttgart.logger.Logger;
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import com.google.common.io.Files;
|
||||
|
||||
public class LambdaTest26 {
|
||||
|
||||
private static final String TEST_FILE = "LambdaTest26.jav";
|
||||
|
||||
|
||||
@Test
|
||||
public void run(){
|
||||
Menge<String> mustContain = new Menge<String>();
|
||||
|
@ -2,13 +2,16 @@ package plugindevelopment.TypeInsertTests;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
import com.google.common.io.Files;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
import de.dhbwstuttgart.core.MyCompiler;
|
||||
import de.dhbwstuttgart.core.MyCompilerAPI;
|
||||
import de.dhbwstuttgart.logger.Logger;
|
||||
import de.dhbwstuttgart.logger.LoggerConfiguration;
|
||||
import de.dhbwstuttgart.logger.Section;
|
||||
import de.dhbwstuttgart.logger.SectionLogger;
|
||||
import de.dhbwstuttgart.parser.JavaParser.yyException;
|
||||
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
|
||||
import de.dhbwstuttgart.typeinference.typedeployment.TypeInsertPoint;
|
||||
@ -38,6 +41,8 @@ public class MultipleTypesInsertTester extends TypeInsertTester{
|
||||
public static void test(String sourceFileToInfere, Menge<String> mustContain){
|
||||
String gesamterSrc = "";
|
||||
String inferedSource = "";
|
||||
SectionLogger log = Logger.getSectionLogger(MultipleTypesInsertTester.class.getName(), Section.TYPEINFERENCE);
|
||||
|
||||
MyCompilerAPI compiler = MyCompiler.getAPI(logConfig);
|
||||
try {
|
||||
compiler.parse(new File(rootDirectory + sourceFileToInfere));
|
||||
@ -51,7 +56,7 @@ public class MultipleTypesInsertTester extends TypeInsertTester{
|
||||
//TestCase.assertTrue("Es muss mindestens ein TypeInsertPoint vorhanden sein", point.points.size()>0);
|
||||
if(point.points.size()>0){
|
||||
inferedSource = point.insertAllTypes(TypeInsertTester.getFileContent(rootDirectory + sourceFileToInfere));
|
||||
System.out.println(inferedSource);
|
||||
log.debug(inferedSource);
|
||||
gesamterSrc += inferedSource;
|
||||
}
|
||||
|
||||
@ -64,6 +69,12 @@ public class MultipleTypesInsertTester extends TypeInsertTester{
|
||||
for(String containString : mustContain){
|
||||
TestCase.assertTrue("\""+containString+"\" muss in den inferierten Lösungen vorkommen",gesamterSrc.contains(containString));
|
||||
}
|
||||
try {
|
||||
Files.write(Logger.getWholeLog().getBytes(),new File(rootDirectory+sourceFileToInfere+".log"));
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
TestCase.fail();
|
||||
}
|
||||
}
|
||||
|
||||
public static void testSingleInsert(String sourceFileToInfere, Menge<String> mustContain){
|
||||
|
@ -1,5 +1,7 @@
|
||||
package plugindevelopment.TypeInsertTests;
|
||||
|
||||
import de.dhbwstuttgart.logger.LoggerConfiguration;
|
||||
import de.dhbwstuttgart.logger.Section;
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
import org.junit.Test;
|
||||
@ -13,7 +15,7 @@ public class OverloadingInMethod2 {
|
||||
Menge<String> mustContain = new Menge<String>();
|
||||
|
||||
//mustContain.add("Fun0<Fun1<java.lang.String, Fun2<AH, LambdaTest, java.lang.String>>> op");
|
||||
MultipleTypesInsertTester.test(this.TEST_FILE, mustContain);
|
||||
MultipleTypesInsertTester.test(this.TEST_FILE, mustContain, new LoggerConfiguration().setOutput(Section.UNIFY, System.out));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
package plugindevelopment.TypeInsertTests;
|
||||
|
||||
import de.dhbwstuttgart.logger.LoggerConfiguration;
|
||||
import de.dhbwstuttgart.logger.Section;
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
|
||||
import org.junit.Test;
|
||||
@ -12,7 +14,7 @@ public class ThisTest {
|
||||
public void run(){
|
||||
Menge<String> mustContain = new Menge<String>();
|
||||
|
||||
MultipleTypesInsertTester.test(this.TEST_FILE, mustContain);
|
||||
MultipleTypesInsertTester.test(this.TEST_FILE, mustContain, new LoggerConfiguration().setOutput(Section.UNIFY, System.out));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ import java.io.UnsupportedEncodingException;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import plugindevelopment.TypeInsertTester;
|
||||
import de.dhbwstuttgart.core.MyCompiler;
|
||||
import de.dhbwstuttgart.core.MyCompilerAPI;
|
||||
import de.dhbwstuttgart.logger.Logger;
|
||||
@ -17,6 +18,8 @@ import de.dhbwstuttgart.parser.JavaParser.yyException;
|
||||
import de.dhbwstuttgart.syntaxtree.SourceFile;
|
||||
import de.dhbwstuttgart.typeinference.Menge;
|
||||
import de.dhbwstuttgart.typeinference.TypeinferenceResultSet;
|
||||
import de.dhbwstuttgart.typeinference.typedeployment.TypeInsertPoint;
|
||||
import de.dhbwstuttgart.typeinference.typedeployment.TypeInsertSet;
|
||||
|
||||
public class UnifyFilter {
|
||||
public final static String rootDirectory = System.getProperty("user.dir")+"/test/unify/";
|
||||
@ -32,6 +35,7 @@ public class UnifyFilter {
|
||||
//Nichts weiter unternehmen. Nur die Ausgabe des Unifikationsalgorithmus anzeigen.
|
||||
String log = Logger.getWholeLog();
|
||||
//System.out.println(log);
|
||||
|
||||
writeLogFile(log);
|
||||
} catch (Exception e){
|
||||
e.printStackTrace();
|
||||
|
Loading…
Reference in New Issue
Block a user