Aufteilung Unifikation
This commit is contained in:
parent
d41d3293aa
commit
ea6ff84c9f
5
bin/.gitignore
vendored
5
bin/.gitignore
vendored
@ -1,6 +1 @@
|
|||||||
/bytecode/
|
|
||||||
/de/
|
/de/
|
||||||
/mycompiler/
|
|
||||||
/parser/
|
|
||||||
/plugindevelopment/
|
|
||||||
/syntaxTree/
|
|
||||||
|
@ -48,7 +48,9 @@ import de.dhbwstuttgart.typeinference.exceptions.TypeinferenceException;
|
|||||||
import de.dhbwstuttgart.typeinference.unify.FC_TTO;
|
import de.dhbwstuttgart.typeinference.unify.FC_TTO;
|
||||||
import de.dhbwstuttgart.typeinference.unify.Unifier;
|
import de.dhbwstuttgart.typeinference.unify.Unifier;
|
||||||
import de.dhbwstuttgart.typeinference.unify.Unify;
|
import de.dhbwstuttgart.typeinference.unify.Unify;
|
||||||
|
import de.dhbwstuttgart.typeinference.OderConstraint;
|
||||||
|
import de.dhbwstuttgart.typeinference.UndConstraint;
|
||||||
|
import de.dhbwstuttgart.typeinference.SingleConstraint;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -642,6 +644,46 @@ public class SourceFile
|
|||||||
return publicAssumptions;
|
return publicAssumptions;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Vector<OderConstraint> divideOderConstraints(OderConstraint oc) {
|
||||||
|
|
||||||
|
//Schritt 1: Variablen an Hand des 1. Elements der UndConstraints gruppieren
|
||||||
|
//!!! KLAEREN OB ES PASSIEREN KANN, DASS IM 2., 3. , ... ELEMENT ANDERE VARIABLEN VORKOMMEN KOENNEN!!!
|
||||||
|
//WENN JA, DANN MUESSEN DIE VARIABLEN UEBER ALLE ELEMENTE (NICHT NUR DAS 1.) AUFGESAMMELT WERDEN
|
||||||
|
Vector<Pair> pairs = oc.getUndConstraints().elementAt(0).getSingleConstraints().stream()
|
||||||
|
.map(x -> x.getPair())
|
||||||
|
.collect(Vector::new, Vector::add, Vector::addAll);
|
||||||
|
Vector<Vector<TypePlaceholder>> pairvars = pairs.stream().map(p -> {Vector<TypePlaceholder> TPHs = new Vector<>();
|
||||||
|
TPHs.addAll(p.TA1.getInvolvedTypePlaceholder());
|
||||||
|
TPHs.addAll(p.TA2.getInvolvedTypePlaceholder());
|
||||||
|
return TPHs;}
|
||||||
|
).collect(Vector::new, Vector::add, Vector::addAll);
|
||||||
|
|
||||||
|
//Schritt 2: Schnittmengen jedes Elements mit jedem Elememt von vars bilden und dann index zusammenfassen
|
||||||
|
//in indexset sind dann die Mengen von Indizes enthalten, die gemeisam unifiziert wreden müssen
|
||||||
|
Vector<Vector<Integer>> indexeset1 = new Vector<>();
|
||||||
|
if (pairvars != null && pairvars.size()>0) {
|
||||||
|
indexeset1 = Unify.schnitt(pairvars);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Schritt 3
|
||||||
|
//Aus den Indexmengen die UndConstraints als OderConstraint neu gruppieren
|
||||||
|
java.util.function.BiFunction<UndConstraint,SingleConstraint,UndConstraint> accSC = (a,b) -> { a.addConstraint(b); return a;};
|
||||||
|
java.util.function.BinaryOperator<UndConstraint> combUC = (a,b) -> { a.getUndConstraints().addAll(b.getUndConstraints()); return a;};
|
||||||
|
|
||||||
|
Vector<OderConstraint> ret = new Vector<OderConstraint>();
|
||||||
|
for (Vector<Integer> is : indexeset1) {
|
||||||
|
Stream<Stream<SingleConstraint>> vs = oc.getUndConstraints().stream().map(x -> is.stream().map(i -> x.getSingleConstraints().elementAt(i)));//.collect(Vector::new, Vector::add, Vector::addAll);;
|
||||||
|
//Vector<OderConstraint> us = vs.map(x -> x.reduce(new UndConstraint(), (a,b) -> { a.addConstraint((SingleConstraint)b); return a;}))
|
||||||
|
//.collect(Vector::new, Vector::add, Vector::addAll);
|
||||||
|
OderConstraint OCnew = vs.reduce(new OderConstraint(),
|
||||||
|
(y,x) -> { y.addConstraint(x.reduce(new UndConstraint(), accSC, combUC)); return y;},
|
||||||
|
(a,b) -> { a.getUndConstraints().addAll(b.getUndConstraints()); return a;}
|
||||||
|
);
|
||||||
|
ret.addElement(OCnew);
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
/////////////////////////////////////////////////////////////////////////
|
/////////////////////////////////////////////////////////////////////////
|
||||||
// TypeReconstructionAlgorithmus
|
// TypeReconstructionAlgorithmus
|
||||||
/////////////////////////////////////////////////////////////////////////
|
/////////////////////////////////////////////////////////////////////////
|
||||||
@ -662,7 +704,7 @@ public class SourceFile
|
|||||||
// ino.end
|
// ino.end
|
||||||
// ino.method.typeReconstruction.21406.body
|
// ino.method.typeReconstruction.21406.body
|
||||||
{
|
{
|
||||||
Vector<TypeinferenceResultSet> ret = new Vector<TypeinferenceResultSet>();
|
//Vector<TypeinferenceResultSet> ret = new Vector<TypeinferenceResultSet>();
|
||||||
|
|
||||||
//Logger initialisieren:
|
//Logger initialisieren:
|
||||||
Logger typinferenzLog = Logger.getLogger("Typeinference");
|
Logger typinferenzLog = Logger.getLogger("Typeinference");
|
||||||
@ -681,7 +723,7 @@ public class SourceFile
|
|||||||
FC_TTO finiteClosure = this.makeFC(globalAssumptions);
|
FC_TTO finiteClosure = this.makeFC(globalAssumptions);
|
||||||
|
|
||||||
typinferenzLog.debug("FiniteClosure: \n"+finiteClosure, Section.TYPEINFERENCE);
|
typinferenzLog.debug("FiniteClosure: \n"+finiteClosure, Section.TYPEINFERENCE);
|
||||||
|
|
||||||
ConstraintsSet oderConstraints = new ConstraintsSet();
|
ConstraintsSet oderConstraints = new ConstraintsSet();
|
||||||
//Alle Constraints der in dieser SourceFile enthaltenen Klassen sammeln:
|
//Alle Constraints der in dieser SourceFile enthaltenen Klassen sammeln:
|
||||||
for(Class klasse : KlassenVektor){
|
for(Class klasse : KlassenVektor){
|
||||||
@ -699,11 +741,67 @@ public class SourceFile
|
|||||||
return retValue;};
|
return retValue;};
|
||||||
oderConstraints.filterWrongConstraints(unifier);
|
oderConstraints.filterWrongConstraints(unifier);
|
||||||
//oderConstraints.unifyUndConstraints(unifier);
|
//oderConstraints.unifyUndConstraints(unifier);
|
||||||
typinferenzLog.debug("Übriggebliebene Konstraints:\n"+oderConstraints+"\n", Section.TYPEINFERENCE);
|
|
||||||
|
java.util.function.BiFunction<Vector<OderConstraint>,OderConstraint,Vector<OderConstraint>> divideOC =
|
||||||
|
(a,b) -> {
|
||||||
|
if ((b instanceof SingleConstraint) || (b instanceof UndConstraint)) { a.addElement(b); }
|
||||||
|
else { a.addAll(divideOderConstraints(b)); } // (b instanceof OderConstraint)
|
||||||
|
return a;
|
||||||
|
};
|
||||||
|
java.util.function.BinaryOperator<Vector<OderConstraint>> oderAll = (a,b) -> { a.addAll(b); return a;};
|
||||||
|
|
||||||
|
|
||||||
|
Vector<OderConstraint> vecoderConstraints = oderConstraints.getConstraintsSet().stream().reduce(new Vector<OderConstraint>(), divideOC, oderAll);
|
||||||
|
|
||||||
|
ConstraintsSet devidedOderConstraints = new ConstraintsSet();
|
||||||
|
devidedOderConstraints.addAll(vecoderConstraints);
|
||||||
|
//HIER WEITERMACHEN: oderConstraints in devidedOderConstraints umaendern
|
||||||
|
//1. Schritt:
|
||||||
|
Stream<Vector<Pair>> streamOneExample =
|
||||||
|
devidedOderConstraints.getConstraintsSet().stream().map(
|
||||||
|
(constr) -> constr.getOneExample()); //.reduce(Vector::new, Vector::addAll);
|
||||||
|
Vector<Vector<Pair>> vecOneExample = streamOneExample.collect(Vector::new, Vector::add, Vector::addAll);
|
||||||
|
|
||||||
|
//2. Schritt
|
||||||
|
Stream<Vector<TypePlaceholder>> streamvars = vecOneExample.stream().map(x -> x.stream().map(
|
||||||
|
p -> { Vector<TypePlaceholder> TPHs = new Vector<>();
|
||||||
|
TPHs.addAll(p.TA1.getInvolvedTypePlaceholder());
|
||||||
|
TPHs.addAll(p.TA2.getInvolvedTypePlaceholder());
|
||||||
|
return TPHs;}
|
||||||
|
).reduce(new Vector<TypePlaceholder>(),(a, b) -> { for (TypePlaceholder ty : b ) { if (!a.contains(ty)) a.add(ty); } return a; }));
|
||||||
|
|
||||||
|
Vector<Vector<TypePlaceholder>> vecvars = streamvars.collect(Vector::new, Vector::add, Vector::addAll);
|
||||||
|
|
||||||
|
//Schritt 3: Schnittmengen jedes Elements mit jedem Elememt von vars bilden und dann index zusammenfassen
|
||||||
|
//in indexset sind dann die Mengen von Indizes enthalten, die gemeisam unifiziert wreden müssen
|
||||||
|
Vector<Vector<Integer>> indexeset = new Vector<>();
|
||||||
|
if (vecvars != null && vecvars.size()>0) {
|
||||||
|
indexeset = Unify.schnitt(vecvars);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Schritt 3: Umwandlung der Indizes in die zugehoerigen Elemente
|
||||||
|
// In streamconstraintsclone sind die Mengen von Paar enthalten die unifiziert werden muessen
|
||||||
|
Stream<Vector<OderConstraint>> streamoderconstraints = indexeset.stream().map(x -> x.stream()
|
||||||
|
.map(i -> devidedOderConstraints.getConstraintsSet().elementAt(i))
|
||||||
|
.collect(Vector::new, Vector::add, Vector::addAll));
|
||||||
|
Vector<Vector<OderConstraint>> vecconstraintsclone = streamoderconstraints.collect(Vector::new, Vector::add, Vector::addAll);
|
||||||
|
Stream<ConstraintsSet> constraintsclone = vecconstraintsclone.stream().map(
|
||||||
|
oderConstraintsSubset -> {
|
||||||
|
ConstraintsSet ConsoderConstraintsSubset = new ConstraintsSet ();
|
||||||
|
ConsoderConstraintsSubset.addAll(oderConstraintsSubset);
|
||||||
|
System.out.println(oderConstraintsSubset);
|
||||||
|
return ConsoderConstraintsSubset; }
|
||||||
|
);
|
||||||
|
//Vector<ConstraintsSet> xxx1 = constraintsclone.collect(Vector::new, Vector::add, Vector::addAll);
|
||||||
|
Stream<Vector<TypeinferenceResultSet>> ressets = constraintsclone.map(constraintsSubset -> {
|
||||||
|
typinferenzLog.debug("Übriggebliebene Konstraints:\n"+devidedOderConstraints+"\n", Section.TYPEINFERENCE);
|
||||||
//Die Constraints in Pair's umwandeln (Karthesisches Produkt bilden):
|
//Die Constraints in Pair's umwandeln (Karthesisches Produkt bilden):
|
||||||
Vector<Vector<Pair>> xConstraints = new Vector<Vector<Pair>>();// = oderConstraints.getConstraints();
|
Vector<TypeinferenceResultSet> ret = new Vector<TypeinferenceResultSet>();
|
||||||
for(Vector<UndConstraint> uC : oderConstraints.getConstraints()){ //mit dem getConstraints-Aufruf wird das Karthesische Produkt erzeugt.
|
Vector<Vector<Pair>> xConstraints = new Vector<Vector<Pair>>();// = devidedOderConstraints.getConstraints();
|
||||||
Vector<Pair> cons = new Vector<Pair>();
|
for(Vector<UndConstraint> uC : constraintsSubset.getConstraints()){ //mit dem getConstraints-Aufruf wird das Karthesische Produkt erzeugt.
|
||||||
|
Vector<Pair> cons = new Vector<Pair>(); //HIER STIMMT ES NICHT constraintsSubset ein Element OderConstraint enthaelt vgl. (TPH AGG <. java.lang.Integer), ]| [
|
||||||
|
//(TPH AGG <. java.lang.Long), ]| [
|
||||||
|
//(TPH AGG <. java.lang.Double), ]| ]],
|
||||||
for(UndConstraint undCons:uC){
|
for(UndConstraint undCons:uC){
|
||||||
cons.addAll(undCons.getConstraintPairs());
|
cons.addAll(undCons.getConstraintPairs());
|
||||||
}
|
}
|
||||||
@ -790,17 +888,17 @@ public class SourceFile
|
|||||||
|
|
||||||
//Schritt 2: Schnittmengen jedes Elements mit jedem Elememt von vars bilden und dann index zusammenfassen
|
//Schritt 2: Schnittmengen jedes Elements mit jedem Elememt von vars bilden und dann index zusammenfassen
|
||||||
//in indexset sind dann die Mengen von Indizes enthalten, die gemeisam unifiziert wreden müssen
|
//in indexset sind dann die Mengen von Indizes enthalten, die gemeisam unifiziert wreden müssen
|
||||||
Vector<Vector<Integer>> indexeset = new Vector<>();
|
Vector<Vector<Integer>> indexeset1 = new Vector<>();
|
||||||
if (constraintsclonevars != null && constraintsclonevars.size()>0) {
|
if (constraintsclonevars != null && constraintsclonevars.size()>0) {
|
||||||
indexeset = Unify.schnitt(constraintsclonevars);
|
indexeset1 = Unify.schnitt(constraintsclonevars);
|
||||||
}
|
}
|
||||||
|
|
||||||
//Schritt 3: Umwandlung der Indizes in die zugehoerigen Elemente
|
//Schritt 3: Umwandlung der Indizes in die zugehoerigen Elemente
|
||||||
// In streamconstraintsclone sind die Mengen von Paar enthalten die unifiziert werden muessen
|
// In streamconstraintsclone sind die Mengen von Paar enthalten die unifiziert werden muessen
|
||||||
Stream<Vector<Pair>> streamconstraintsclone = indexeset.stream().map(x -> x.stream()
|
Stream<Vector<Pair>> streamconstraintsclone = indexeset1.stream().map(x -> x.stream()
|
||||||
.map(i -> constraintsClone.elementAt(i))
|
.map(i -> constraintsClone.elementAt(i))
|
||||||
.collect(Vector::new, Vector::add, Vector::addAll));
|
.collect(Vector::new, Vector::add, Vector::addAll));
|
||||||
// Vector<Vector<Pair>> vecconstraintsclone = streamconstraintsclone.collect(Vector::new, Vector::add, Vector::addAll);
|
// Vector<Vector<Pair>> vecconstraintsclone1 = streamconstraintsclone.collect(Vector::new, Vector::add, Vector::addAll);
|
||||||
|
|
||||||
//Schritt 4: Unifikation
|
//Schritt 4: Unifikation
|
||||||
Vector<Vector<Vector<Pair>>> vecunifyResult =
|
Vector<Vector<Vector<Pair>>> vecunifyResult =
|
||||||
@ -887,7 +985,7 @@ public class SourceFile
|
|||||||
//typinferenzLog.debug(supportData.getFiniteClosure());
|
//typinferenzLog.debug(supportData.getFiniteClosure());
|
||||||
//typinferenzLog.debug("Typinformationen: \n"+this.getTypeInformation(this.getMethodList(), fieldInitializers));
|
//typinferenzLog.debug("Typinformationen: \n"+this.getTypeInformation(this.getMethodList(), fieldInitializers));
|
||||||
|
|
||||||
typinferenzLog.debug("\nJavaFiles:\n", Section.TYPEINFERENCE);
|
//typinferenzLog.debug("\nJavaFiles:\n", Section.TYPEINFERENCE);
|
||||||
|
|
||||||
//typinferenzLog.debug(this.printJavaCode(new ResultSet(new Vector<Pair>())));
|
//typinferenzLog.debug(this.printJavaCode(new ResultSet(new Vector<Pair>())));
|
||||||
|
|
||||||
@ -911,7 +1009,9 @@ public class SourceFile
|
|||||||
if(unifyFail){
|
if(unifyFail){
|
||||||
if(!this.KlassenVektor.isEmpty())throw new TypeinferenceException("Fehler in Typinferierung", this.KlassenVektor.firstElement());
|
if(!this.KlassenVektor.isEmpty())throw new TypeinferenceException("Fehler in Typinferierung", this.KlassenVektor.firstElement());
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;});
|
||||||
|
Vector<Vector<TypeinferenceResultSet>> vecressets = ressets.collect(Vector::new, Vector::add, Vector::addAll);
|
||||||
|
return new Vector();
|
||||||
/*
|
/*
|
||||||
// HOTI: Nur zur Info.Ich habe den Loglevel auf Info geschaltet, damit
|
// HOTI: Nur zur Info.Ich habe den Loglevel auf Info geschaltet, damit
|
||||||
// in der GUI (Eclipse-Plugin) die Console nicht zugemüllt wird.
|
// in der GUI (Eclipse-Plugin) die Console nicht zugemüllt wird.
|
||||||
|
@ -21,6 +21,14 @@ public class ConstraintsSet implements Iterable<OderConstraint>{
|
|||||||
public void add(OderConstraint constraint){
|
public void add(OderConstraint constraint){
|
||||||
constraintsSet.add(constraint);
|
constraintsSet.add(constraint);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void addAll(Vector<OderConstraint> vecconstraint){
|
||||||
|
constraintsSet.addAll(vecconstraint);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Vector<OderConstraint> getConstraintsSet() {
|
||||||
|
return constraintsSet;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Liefert alle Constraint-Variationen
|
* Liefert alle Constraint-Variationen
|
||||||
|
@ -31,6 +31,13 @@ public class OderConstraint{
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gibt ein Exemplar zurück um die Variablen zu bestimmen
|
||||||
|
*/
|
||||||
|
public Vector<Pair> getOneExample() {
|
||||||
|
return this.getUndConstraints().elementAt(0).getConstraintPairs();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Liefert alle in diesem OderConstraint enthaltene Constraints. Dabei gehen die Verknüpfungen (Oder/Und) verloren.
|
* Liefert alle in diesem OderConstraint enthaltene Constraints. Dabei gehen die Verknüpfungen (Oder/Und) verloren.
|
||||||
* @return
|
* @return
|
||||||
@ -43,6 +50,8 @@ public class OderConstraint{
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fügt ein Pair(p1, p2) dem Constraint hinzu
|
* Fügt ein Pair(p1, p2) dem Constraint hinzu
|
||||||
* @param p1
|
* @param p1
|
||||||
@ -68,6 +77,17 @@ public class OderConstraint{
|
|||||||
}
|
}
|
||||||
return ret+"]";
|
return ret+"]";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Vector<SingleConstraint> getSingleConstraints() {
|
||||||
|
Vector<SingleConstraint> ret = new Vector<SingleConstraint>();
|
||||||
|
for(UndConstraint uc : oderConstraintPairs) {
|
||||||
|
if (uc instanceof SingleConstraint) {
|
||||||
|
ret.add((SingleConstraint)uc);
|
||||||
|
}
|
||||||
|
//else return new Exception();
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
public Vector<UndConstraint> getUndConstraints() {
|
public Vector<UndConstraint> getUndConstraints() {
|
||||||
return this.oderConstraintPairs;
|
return this.oderConstraintPairs;
|
||||||
|
@ -37,6 +37,11 @@ public class SingleConstraint extends UndConstraint{
|
|||||||
public Pair getPair(){
|
public Pair getPair(){
|
||||||
return constraintPair;
|
return constraintPair;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override //liefert das eine Paar zurueck
|
||||||
|
public Vector<Pair> getOneExample() {
|
||||||
|
return getConstraintPairs();
|
||||||
|
}
|
||||||
|
|
||||||
@Override //Methode überschreiben, damit immer nur ein Vector mit nur einem Element zurückgeliefert wird.
|
@Override //Methode überschreiben, damit immer nur ein Vector mit nur einem Element zurückgeliefert wird.
|
||||||
public Vector<Pair> getConstraintPairs(){
|
public Vector<Pair> getConstraintPairs(){
|
||||||
|
@ -20,6 +20,7 @@ public class UndConstraint extends OderConstraint {
|
|||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Vector<UndConstraint> getUndConstraints() {
|
public Vector<UndConstraint> getUndConstraints() {
|
||||||
Vector<UndConstraint> ret = new Vector<UndConstraint>();
|
Vector<UndConstraint> ret = new Vector<UndConstraint>();
|
||||||
|
Loading…
Reference in New Issue
Block a user