forked from i21017/JavaTypeUnify
initial commit
This commit is contained in:
commit
10096cbaeb
31
.gitignore
vendored
Normal file
31
.gitignore
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
CVS
|
||||
bin
|
||||
*.class
|
||||
*.log
|
||||
|
||||
# Mobile Tools for Java (J2ME)
|
||||
.mtj.tmp/
|
||||
|
||||
# Package Files #
|
||||
*.jar
|
||||
*.war
|
||||
*.ear
|
||||
|
||||
# IDEs
|
||||
.classpath
|
||||
*.iml
|
||||
.idea/
|
||||
/target/
|
||||
.DS_Store
|
||||
.project
|
||||
.settings/
|
||||
/target/
|
||||
|
||||
#
|
||||
manually/
|
||||
|
||||
logFiles/**
|
||||
!logFiles/.gitkeep
|
||||
|
||||
src/main/java/de/dhbwstuttgart/parser/antlr/
|
||||
src/main/java/de/dhbwstuttgart/sat/asp/parser/antlr/
|
3
Makefile
Normal file
3
Makefile
Normal file
@ -0,0 +1,3 @@
|
||||
NoOptParallel:
|
||||
mvn -DskipTests package
|
||||
cp target/JavaTXcompiler-0.1-jar-with-dependencies.jar target/JavaTXcompiler-0.1-jar-with-dependencies_NoOptParallel.jar
|
11
README_aktuelle_Branches
Normal file
11
README_aktuelle_Branches
Normal file
@ -0,0 +1,11 @@
|
||||
Stand: 24.5.21
|
||||
bigRefactoring: Master-Brach
|
||||
targetBytecode: Neuer Codegenerator mit generated generics Daniel
|
||||
bigRefactoringUnifyComment: Dokumentation Unify, Martin
|
||||
bytecodeGenericsSecond: Generated Generics, Ali, Martin
|
||||
inferWildcards, Wildcards, Till
|
||||
master, derzeit nicht genutzt
|
||||
plugin, eigemntlicher Branch fuer Plugin-Basis, derzeit nicht aktuelle (aktuelle Version in simplifyRes
|
||||
simplifyRes, Basis fuer Plugin, sollte auf Plugin gemerged werden, noch keine Packages, Michael
|
||||
strucTypesNew, Struturelle Typen, alte Basis, arbeite derzeit niemand
|
||||
|
126
pom.xml
Normal file
126
pom.xml
Normal file
@ -0,0 +1,126 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
||||
http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>de.dhbwstuttgart</groupId>
|
||||
<artifactId>JavaTXcompiler</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<version>0.1</version>
|
||||
<name>JavaTXcompiler</name>
|
||||
<url>http://maven.apache.org</url>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.11</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/org.antlr/antlr4 -->
|
||||
<dependency>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>antlr4</artifactId>
|
||||
<version>4.11.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>2.6</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>22.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.reflections</groupId>
|
||||
<artifactId>reflections</artifactId>
|
||||
<version>0.9.11</version>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/org.ow2.asm/asm -->
|
||||
<dependency>
|
||||
<groupId>org.ow2.asm</groupId>
|
||||
<artifactId>asm</artifactId>
|
||||
<version>7.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.8.0</version>
|
||||
<configuration>
|
||||
<compilerArgs>--enable-preview</compilerArgs>
|
||||
<source>21</source>
|
||||
<target>21</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>antlr4-maven-plugin</artifactId>
|
||||
<version>4.11.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>antlr</id>
|
||||
<goals>
|
||||
<goal>antlr4</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
<mainClass>de.dhbwstuttgart.core.ConsoleInterface</mainClass>
|
||||
</manifest>
|
||||
</archive>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
<mainClass>de.dhbwstuttgart.core.ConsoleInterface</mainClass>
|
||||
</manifest>
|
||||
</archive>
|
||||
<descriptorRefs>
|
||||
<descriptorRef>jar-with-dependencies</descriptorRef>
|
||||
</descriptorRefs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>maven-repository</id>
|
||||
<url>file:///${project.basedir}/target</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
<properties>
|
||||
<maven.compiler.source>19</maven.compiler.source>
|
||||
<maven.compiler.target>19</maven.compiler.target>
|
||||
<mainClass>de.dhbwstuttgart.core.ConsoleInterface</mainClass>
|
||||
</properties>
|
||||
<distributionManagement>
|
||||
<repository>
|
||||
<id>maven-repository</id>
|
||||
<name>MyCo Internal Repository</name>
|
||||
<url>file:///${project.basedir}/maven-repository/</url>
|
||||
</repository>
|
||||
</distributionManagement>
|
||||
</project>
|
@ -0,0 +1,7 @@
|
||||
package de.dhbwstuttgart.exceptions;
|
||||
|
||||
public class DebugException extends RuntimeException{
|
||||
public DebugException(String message){
|
||||
System.err.println(message);
|
||||
}
|
||||
}
|
@ -0,0 +1,69 @@
|
||||
package de.dhbwstuttgart.typeinference.constraints;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
public class Constraint<A> extends HashSet<A> {
|
||||
private static final long serialVersionUID = 1L;
|
||||
private Boolean isInherited = false;//wird nur für die Method-Constraints benoetigt
|
||||
|
||||
/*
|
||||
* wird verwendet um bei der Codegenerierung die richtige Methoden - Signatur
|
||||
* auszuwaehlen
|
||||
*/
|
||||
/*private*/ Set<A> methodSignatureConstraint = new HashSet<>();
|
||||
|
||||
private Constraint<A> extendConstraint = null;
|
||||
|
||||
public Constraint() {
|
||||
super();
|
||||
}
|
||||
|
||||
public Constraint(Boolean isInherited) {
|
||||
this.isInherited = isInherited;
|
||||
}
|
||||
|
||||
public Constraint(Boolean isInherited, Constraint<A> extendConstraint, Set<A> methodSignatureConstraint) {
|
||||
this.isInherited = isInherited;
|
||||
this.extendConstraint = extendConstraint;
|
||||
this.methodSignatureConstraint = methodSignatureConstraint;
|
||||
}
|
||||
|
||||
public void setIsInherited(Boolean isInherited) {
|
||||
this.isInherited = isInherited;
|
||||
}
|
||||
|
||||
public Boolean isInherited() {
|
||||
return isInherited;
|
||||
}
|
||||
|
||||
public Constraint<A> getExtendConstraint() {
|
||||
return extendConstraint;
|
||||
}
|
||||
|
||||
public void setExtendConstraint(Constraint<A> c) {
|
||||
extendConstraint = c;
|
||||
}
|
||||
|
||||
public Set<A> getmethodSignatureConstraint() {
|
||||
return methodSignatureConstraint;
|
||||
}
|
||||
|
||||
public void setmethodSignatureConstraint(Set<A> c) {
|
||||
methodSignatureConstraint = c;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return super.toString() + "\nisInherited = " + isInherited
|
||||
//" + extendsContraint: " + (extendConstraint != null ? extendConstraint.toStringBase() : "null" )
|
||||
+ "\n" ;
|
||||
}
|
||||
|
||||
public String toStringBase() {
|
||||
return super.toString();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,126 @@
|
||||
package de.dhbwstuttgart.typeinference.constraints;
|
||||
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.GuavaSetOperations;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.BinaryOperator;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class ConstraintSet<A> {
|
||||
Constraint<A> undConstraints = new Constraint<>();
|
||||
List<Set<Constraint<A>>> oderConstraints = new ArrayList<>();
|
||||
|
||||
public void addUndConstraint(A p){
|
||||
undConstraints.add(p);
|
||||
}
|
||||
|
||||
public void addOderConstraint(Set<Constraint<A>> methodConstraints) {
|
||||
oderConstraints.add(methodConstraints);
|
||||
}
|
||||
|
||||
public void addAllUndConstraint(Constraint<A> allUndConstraints){
|
||||
undConstraints.addAll(allUndConstraints);
|
||||
}
|
||||
|
||||
public void addAllOderConstraint(List<Set<Constraint<A>>> allOderConstraints){
|
||||
this.oderConstraints.addAll(allOderConstraints);
|
||||
}
|
||||
|
||||
public void addAll(ConstraintSet constraints) {
|
||||
this.addAllUndConstraint(constraints.undConstraints);
|
||||
this.addAllOderConstraint(constraints.oderConstraints);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(){
|
||||
BinaryOperator<String> b = (x,y) -> x+y;
|
||||
return "\nUND:" + this.undConstraints.toString() + "\n" +
|
||||
"ODER:" + this.oderConstraints.stream().reduce("", (x,y) -> x.toString()+ "\n" +y, b);
|
||||
//cartesianProduct().toString();
|
||||
}
|
||||
|
||||
public Set<List<Constraint<A>>> cartesianProduct(){
|
||||
Set<Constraint<A>> toAdd = new HashSet<>();
|
||||
toAdd.add(undConstraints);
|
||||
List<Set<Constraint<A>>> allConstraints = new ArrayList<>();
|
||||
allConstraints.add(toAdd);
|
||||
allConstraints.addAll(oderConstraints);
|
||||
return new GuavaSetOperations().cartesianProduct(allConstraints);
|
||||
}
|
||||
|
||||
public <B> ConstraintSet<B> map(Function<? super A, ? extends B> o) {
|
||||
Hashtable<Constraint<A>,Constraint<B>> CSA2CSB = new Hashtable<>();
|
||||
ConstraintSet<B> ret = new ConstraintSet<>();
|
||||
ret.undConstraints = undConstraints.stream().map(o).collect(Collectors.toCollection(Constraint<B>::new));
|
||||
List<Set<Constraint<B>>> newOder = new ArrayList<>();
|
||||
/*
|
||||
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||
oderConstraint.forEach(as -> {
|
||||
Constraint<B> newConst = as.stream()
|
||||
.map(o)
|
||||
.collect(Collectors.toCollection(
|
||||
() -> new Constraint<B> (as.isInherited())));
|
||||
CSA2CSB.put(as, newConst);} );
|
||||
}
|
||||
*/
|
||||
|
||||
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||
newOder.add(
|
||||
oderConstraint.parallelStream().map((Constraint<A> as) -> {
|
||||
|
||||
Constraint<B> newConst = as.stream()
|
||||
.map(o)
|
||||
.collect(Collectors.toCollection((as.getExtendConstraint() != null)
|
||||
? () -> new Constraint<B> (as.isInherited(),
|
||||
as.getExtendConstraint().stream().map(o).collect(Collectors.toCollection(Constraint::new)),
|
||||
as.getmethodSignatureConstraint().stream().map(o).collect(Collectors.toCollection(HashSet::new)))
|
||||
: () -> new Constraint<B> (as.isInherited())
|
||||
));
|
||||
|
||||
//CSA2CSB.put(as, newConst);
|
||||
|
||||
return newConst;
|
||||
|
||||
/*
|
||||
Constraint<B> bs = CSA2CSB.get(as);
|
||||
if (as.getExtendConstraint() != null) {
|
||||
bs.setExtendConstraint(CSA2CSB.get(as.getExtendConstraint()));
|
||||
}
|
||||
return bs;
|
||||
*/
|
||||
}).collect(Collectors.toSet())
|
||||
);
|
||||
}
|
||||
|
||||
ret.oderConstraints = newOder;
|
||||
return ret;
|
||||
}
|
||||
|
||||
public void forEach (Consumer<? super A> c) {
|
||||
undConstraints.stream().forEach(c);
|
||||
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||
oderConstraint.parallelStream().forEach((Constraint<A> as) ->
|
||||
as.stream().forEach(c));
|
||||
}
|
||||
}
|
||||
|
||||
public Set<A> getAll () {
|
||||
Set<A> ret = new HashSet<>();
|
||||
ret.addAll(undConstraints);
|
||||
for(Set<Constraint<A>> oderConstraint : oderConstraints){
|
||||
oderConstraint.parallelStream().forEach((Constraint<A> as) -> ret.addAll(as));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
public List<Set<Constraint<A>>> getOderConstraints() {
|
||||
return oderConstraints;
|
||||
}
|
||||
|
||||
public Set<A> getUndConstraints() {
|
||||
return undConstraints;
|
||||
}
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
package de.dhbwstuttgart.typeinference.constraints;
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
|
||||
|
||||
public class Pair implements Serializable
|
||||
{
|
||||
|
||||
}
|
||||
// ino.end
|
@ -0,0 +1,23 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.ISetOperations;
|
||||
|
||||
/**
|
||||
* Implements set operations using google guava.
|
||||
* @author DH10STF
|
||||
*
|
||||
*/
|
||||
public class GuavaSetOperations implements ISetOperations {
|
||||
|
||||
@Override
|
||||
public <B> Set<List<B>> cartesianProduct(List<? extends Set<? extends B>> sets) {
|
||||
// Wraps the call to google guava
|
||||
return Sets.cartesianProduct(sets);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,108 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IUnify;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
|
||||
/**
|
||||
* Implementation of the Martelli-Montanari unification algorithm.
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public class MartelliMontanariUnify implements IUnify {
|
||||
|
||||
@Override
|
||||
public Optional<Unifier> unify(Set<UnifyType> terms) {
|
||||
// Sets with less than 2 terms are trivially unified
|
||||
if(terms.size() < 2)
|
||||
return Optional.of(Unifier.identity());
|
||||
|
||||
// For the the set of terms {t1,...,tn},
|
||||
// build a list of equations {(t1 = t2), (t2 = t3), (t3 = t4), ....}
|
||||
ArrayList<UnifyPair> termsList = new ArrayList<UnifyPair>();
|
||||
Iterator<UnifyType> iter = terms.iterator();
|
||||
UnifyType prev = iter.next();
|
||||
while(iter.hasNext()) {
|
||||
UnifyType next = iter.next();
|
||||
termsList.add(new UnifyPair(prev, next, PairOperator.EQUALSDOT));
|
||||
prev = next;
|
||||
}
|
||||
|
||||
// Start with the identity unifier. Substitutions will be added later.
|
||||
Unifier mgu = Unifier.identity();
|
||||
|
||||
// Apply rules while possible
|
||||
int idx = 0;
|
||||
while(idx < termsList.size()) {
|
||||
UnifyPair pair = termsList.get(idx);
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
TypeParams rhsTypeParams = rhsType.getTypeParams();
|
||||
TypeParams lhsTypeParams = lhsType.getTypeParams();
|
||||
|
||||
// REDUCE - Rule
|
||||
if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)) {
|
||||
Set<UnifyPair> result = new HashSet<>();
|
||||
|
||||
// f<...> = g<...> with f != g are not unifiable
|
||||
if(!rhsType.getName().equals(lhsType.getName()))
|
||||
return Optional.empty(); // conflict
|
||||
// f<t1,...,tn> = f<s1,...,sm> are not unifiable
|
||||
if(rhsTypeParams.size() != lhsTypeParams.size())
|
||||
return Optional.empty(); // conflict
|
||||
// f = g is not unifiable (cannot be f = f because erase rule would have been applied)
|
||||
//if(rhsTypeParams.size() == 0)
|
||||
//return Optional.empty();
|
||||
|
||||
// Unpack the arguments
|
||||
for(int i = 0; i < rhsTypeParams.size(); i++)
|
||||
result.add(new UnifyPair(rhsTypeParams.get(i), lhsTypeParams.get(i), PairOperator.EQUALSDOT));
|
||||
|
||||
termsList.remove(idx);
|
||||
termsList.addAll(result);
|
||||
continue;
|
||||
}
|
||||
|
||||
// DELETE - Rule
|
||||
if(pair.getRhsType().equals(pair.getLhsType())) {
|
||||
termsList.remove(idx);
|
||||
continue;
|
||||
}
|
||||
|
||||
// SWAP - Rule
|
||||
if(!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
|
||||
termsList.remove(idx);
|
||||
termsList.add(new UnifyPair(rhsType, lhsType, PairOperator.EQUALSDOT));
|
||||
continue;
|
||||
}
|
||||
|
||||
// OCCURS-CHECK
|
||||
if(pair.getLhsType() instanceof PlaceholderType
|
||||
&& pair.getRhsType().getTypeParams().occurs((PlaceholderType) pair.getLhsType()))
|
||||
return Optional.empty();
|
||||
|
||||
// SUBST - Rule
|
||||
if(lhsType instanceof PlaceholderType) {
|
||||
mgu.add((PlaceholderType) lhsType, rhsType);
|
||||
//PL 2018-04-01 nach checken, ob es richtig ist, dass keine Substitutionen uebergeben werden muessen.
|
||||
termsList = termsList.stream().map(x -> mgu.apply(x)).collect(Collectors.toCollection(ArrayList::new));
|
||||
idx = idx+1 == termsList.size() ? 0 : idx+1;
|
||||
continue;
|
||||
}
|
||||
|
||||
idx++;
|
||||
}
|
||||
|
||||
return Optional.of(mgu);
|
||||
}
|
||||
}
|
@ -0,0 +1,92 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IMatch;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
|
||||
/**
|
||||
* Implementation of match derived from unification algorithm.
|
||||
* @author Martin Pluemicke
|
||||
*/
|
||||
public class Match implements IMatch {
|
||||
|
||||
@Override
|
||||
//vorne muss das Pattern stehen
|
||||
//A<X> =. A<Integer> ==> True
|
||||
//A<Integer> =. A<X> ==> False
|
||||
public Optional<Unifier> match(ArrayList<UnifyPair> termsList) {
|
||||
|
||||
// Start with the identity unifier. Substitutions will be added later.
|
||||
Unifier mgu = Unifier.identity();
|
||||
|
||||
// Apply rules while possible
|
||||
int idx = 0;
|
||||
while(idx < termsList.size()) {
|
||||
UnifyPair pair = termsList.get(idx);
|
||||
UnifyType rhsType = pair.getRhsType();
|
||||
UnifyType lhsType = pair.getLhsType();
|
||||
TypeParams rhsTypeParams = rhsType.getTypeParams();
|
||||
TypeParams lhsTypeParams = lhsType.getTypeParams();
|
||||
|
||||
// REDUCE - Rule
|
||||
if(!(rhsType instanceof PlaceholderType) && !(lhsType instanceof PlaceholderType)) {
|
||||
Set<UnifyPair> result = new HashSet<>();
|
||||
|
||||
// f<...> = g<...> with f != g are not unifiable
|
||||
if(!rhsType.getName().equals(lhsType.getName()))
|
||||
return Optional.empty(); // conflict
|
||||
// f<t1,...,tn> = f<s1,...,sm> are not unifiable
|
||||
if(rhsTypeParams.size() != lhsTypeParams.size())
|
||||
return Optional.empty(); // conflict
|
||||
// f = g is not unifiable (cannot be f = f because erase rule would have been applied)
|
||||
//if(rhsTypeParams.size() == 0)
|
||||
//return Optional.empty();
|
||||
|
||||
// Unpack the arguments
|
||||
for(int i = 0; i < rhsTypeParams.size(); i++)
|
||||
result.add(new UnifyPair(lhsTypeParams.get(i), rhsTypeParams.get(i), PairOperator.EQUALSDOT));
|
||||
|
||||
termsList.remove(idx);
|
||||
termsList.addAll(result);
|
||||
continue;
|
||||
}
|
||||
|
||||
// DELETE - Rule
|
||||
if(pair.getRhsType().equals(pair.getLhsType())) {
|
||||
termsList.remove(idx);
|
||||
continue;
|
||||
}
|
||||
|
||||
// SWAP - Rule
|
||||
if(!(lhsType instanceof PlaceholderType) && (rhsType instanceof PlaceholderType)) {
|
||||
return Optional.empty(); // conflict
|
||||
}
|
||||
|
||||
// OCCURS-CHECK
|
||||
//deleted
|
||||
|
||||
// SUBST - Rule
|
||||
if(lhsType instanceof PlaceholderType) {
|
||||
mgu.add((PlaceholderType) lhsType, rhsType);
|
||||
termsList = termsList.stream().map(mgu::applyleft).collect(Collectors.toCollection(ArrayList::new));
|
||||
idx = idx+1 == termsList.size() ? 0 : idx+1;
|
||||
continue;
|
||||
}
|
||||
|
||||
idx++;
|
||||
}
|
||||
|
||||
return Optional.of(mgu);
|
||||
}
|
||||
}
|
1050
src/main/java/de/dhbwstuttgart/typeinference/unify/RuleSet.java
Normal file
1050
src/main/java/de/dhbwstuttgart/typeinference/unify/RuleSet.java
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,125 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class TypeUnify {
|
||||
|
||||
public static Writer statistics;
|
||||
/**
|
||||
* unify parallel ohne result modell
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unify(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements + "\n");
|
||||
logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* unify asynchron mit Rückgabe UnifyResultModel ohne dass alle results gesammelt sind
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyAsync(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* unify parallel mit Rückgabe UnifyResultModel nachdem alle results gesammelt sind
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @param ret
|
||||
* @return
|
||||
*/
|
||||
public UnifyResultModel unifyParallel(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = //new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks);
|
||||
new TypeUnifyTask(undConstrains, oderConstraints, fc, true, logFile, log, 0, ret, usedTasks, statistics);
|
||||
ForkJoinPool pool = new ForkJoinPool();
|
||||
pool.invoke(unifyTask);
|
||||
Set<Set<UnifyPair>> res = unifyTask.join();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
logFile.flush();
|
||||
unifyTask.statistics.write("Backtracking: " + unifyTask.noBacktracking);
|
||||
unifyTask.statistics.write("\nLoops: " + unifyTask.noLoop);
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
/*
|
||||
public Set<Set<UnifyPair>> unifySequential(Set<UnifyPair> eq, IFiniteClosure fc, FileWriter logFile, Boolean log) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(eq, fc, false, logFile, log);
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
return res;
|
||||
}
|
||||
*/
|
||||
|
||||
/**
|
||||
* unify sequentiell mit oderconstraints
|
||||
* @param undConstrains
|
||||
* @param oderConstraints
|
||||
* @param fc
|
||||
* @param logFile
|
||||
* @param log
|
||||
* @param cons
|
||||
* @return
|
||||
*/
|
||||
public Set<Set<UnifyPair>> unifyOderConstraints(Set<UnifyPair> undConstrains, List<Set<Constraint<UnifyPair>>> oderConstraints, IFiniteClosure fc, Writer logFile, Boolean log, UnifyResultModel ret, UnifyTaskModel usedTasks) {
|
||||
TypeUnifyTask unifyTask = new TypeUnifyTask(undConstrains, oderConstraints, fc, false, logFile, log, 0, ret, usedTasks);
|
||||
unifyTask.statistics = statistics;
|
||||
Set<Set<UnifyPair>> res = unifyTask.compute();
|
||||
try {
|
||||
logFile.write("\nnoShortendElements: " + unifyTask.noShortendElements +"\n");
|
||||
logFile.flush();
|
||||
}
|
||||
catch (IOException e) {
|
||||
System.err.println("no log-File");
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,76 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.constraints.Pair;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class TypeUnify2Task extends TypeUnifyTask {
|
||||
|
||||
Set<Set<UnifyPair>> setToFlatten;
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe;
|
||||
|
||||
//statistics
|
||||
TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq,
|
||||
List<Set<Constraint<UnifyPair>>> oderConstraints,
|
||||
Set<UnifyPair> nextSetElement,
|
||||
IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks,
|
||||
Set<UnifyPair> methodSignatureConstraintUebergabe, Writer statistics) {
|
||||
this(setToFlatten, eq, oderConstraints, nextSetElement, fc, parallel, logFile, log, rekTiefe, urm, usedTasks, methodSignatureConstraintUebergabe );
|
||||
|
||||
}
|
||||
|
||||
public TypeUnify2Task(Set<Set<UnifyPair>> setToFlatten, Set<UnifyPair> eq, List<Set<Constraint<UnifyPair>>> oderConstraints, Set<UnifyPair> nextSetElement, IFiniteClosure fc, boolean parallel, Writer logFile, Boolean log, int rekTiefe, UnifyResultModel urm, UnifyTaskModel usedTasks, Set<UnifyPair> methodSignatureConstraintUebergabe) {
|
||||
super(eq, oderConstraints, fc, parallel, logFile, log, rekTiefe, urm, usedTasks);
|
||||
this.setToFlatten = setToFlatten;
|
||||
this.nextSetElement = nextSetElement;
|
||||
this.methodSignatureConstraintUebergabe = methodSignatureConstraintUebergabe;
|
||||
}
|
||||
|
||||
Set<UnifyPair> getNextSetElement() {
|
||||
return nextSetElement;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Set<UnifyPair>> compute() {
|
||||
if (one) {
|
||||
System.out.println("two");
|
||||
}
|
||||
one = true;
|
||||
Set<Set<UnifyPair>> res = unify2(setToFlatten, eq, oderConstraintsField, fc, parallel, rekTiefeField, methodSignatureConstraintUebergabe);
|
||||
/*if (isUndefinedPairSetSet(res)) {
|
||||
return new HashSet<>(); }
|
||||
else
|
||||
*/
|
||||
//writeLog("xxx");
|
||||
//noOfThread--;
|
||||
synchronized (usedTasks) {
|
||||
if (this.myIsCancelled()) {
|
||||
return new HashSet<>();
|
||||
}
|
||||
else {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void closeLogFile() {
|
||||
|
||||
try {
|
||||
logFile.close();
|
||||
}
|
||||
catch (IOException ioE) {
|
||||
System.err.println("no log-File" + thNo);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,11 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public interface Unifikationsalgorithmus {
|
||||
|
||||
public Set<Set<UnifyPair>> apply (Set<UnifyPair> E);
|
||||
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.util.List;
|
||||
|
||||
|
||||
public class UnifyResultEvent {
|
||||
|
||||
private List<ResultSet> newTypeResult;
|
||||
|
||||
public UnifyResultEvent(List<ResultSet> newTypeResult) {
|
||||
this.newTypeResult = newTypeResult;
|
||||
}
|
||||
|
||||
public List<ResultSet> getNewTypeResult() {
|
||||
return newTypeResult;
|
||||
}
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
public interface UnifyResultListener {
|
||||
|
||||
void onNewTypeResultFound(UnifyResultEvent evt);
|
||||
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class UnifyResultListenerImpl implements UnifyResultListener {
|
||||
|
||||
List<ResultSet> results = new ArrayList<>();
|
||||
|
||||
public synchronized void onNewTypeResultFound(UnifyResultEvent evt) {
|
||||
results.addAll(evt.getNewTypeResult());
|
||||
}
|
||||
|
||||
public List<ResultSet> getResults() {
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,41 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.ConstraintSet;
|
||||
import de.dhbwstuttgart.typeinference.unify.interfaces.IFiniteClosure;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
public class UnifyResultModel {
|
||||
|
||||
ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons;
|
||||
|
||||
IFiniteClosure fc;
|
||||
|
||||
public UnifyResultModel(ConstraintSet<de.dhbwstuttgart.typeinference.constraints.Pair> cons,
|
||||
IFiniteClosure fc) {
|
||||
this.cons = cons;
|
||||
this.fc = fc;
|
||||
}
|
||||
|
||||
private List<UnifyResultListener> listeners = new ArrayList<>();
|
||||
|
||||
public void addUnifyResultListener(UnifyResultListener listenerToAdd) {
|
||||
listeners.add(listenerToAdd);
|
||||
}
|
||||
|
||||
public void removeUnifyResultListener(UnifyResultListener listenerToRemove) {
|
||||
listeners.remove(listenerToRemove);
|
||||
}
|
||||
|
||||
public void notify(Set<Set<UnifyPair>> eqPrimePrimeSet) {
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
public class UnifyTaskModel {
|
||||
|
||||
ArrayList<TypeUnifyTask> usedTasks = new ArrayList<>();
|
||||
|
||||
public synchronized void add(TypeUnifyTask t) {
|
||||
usedTasks.add(t);
|
||||
}
|
||||
|
||||
public synchronized void cancel() {
|
||||
for(TypeUnifyTask t : usedTasks) {
|
||||
t.myCancel(true);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,54 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.TypeParams;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
|
||||
public class distributeVariance extends visitUnifyTypeVisitor<Integer> {
|
||||
|
||||
public static int inverseVariance(int variance) {
|
||||
Integer ret = 0;
|
||||
if (variance == 1) {
|
||||
ret = -1;
|
||||
}
|
||||
if (variance == -1) {
|
||||
ret = 1;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public PlaceholderType visit(PlaceholderType phty, Integer ht) {
|
||||
if (ht != 0) {
|
||||
if (phty.getVariance() == 0) {
|
||||
phty.setVariance(ht);
|
||||
}
|
||||
//PL 2018-05-17 urspruengliche Variance nicht veraendern
|
||||
//else if (phty.getVariance() != ht) {
|
||||
// phty.setVariance(0);
|
||||
//}
|
||||
}
|
||||
return phty;
|
||||
}
|
||||
|
||||
public FunNType visit(FunNType funnty, Integer ht) {
|
||||
List<UnifyType> param = new ArrayList<>(funnty.getTypeParams().get().length);
|
||||
param.addAll(Arrays.asList(funnty.getTypeParams().get()));
|
||||
UnifyType resultType = param.remove(param.size()-1);
|
||||
Integer htInverse = inverseVariance(ht);
|
||||
param = param.stream()
|
||||
.map(x -> x.accept(this, htInverse))
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
param.add(resultType.accept(this, ht));
|
||||
return FunNType.getFunNType(new TypeParams(param));
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package de.dhbwstuttgart.typeinference.unify;
|
||||
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
|
||||
|
||||
public class freshPlaceholder extends visitUnifyTypeVisitor<HashMap<PlaceholderType,PlaceholderType>> {
|
||||
|
||||
@Override
|
||||
public PlaceholderType visit(PlaceholderType phty, HashMap<PlaceholderType,PlaceholderType> ht) {
|
||||
return ht.get(phty);
|
||||
}
|
||||
}
|
@ -0,0 +1,68 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.interfaces;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ExtendsType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.FunNType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PairOperator;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.PlaceholderType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.ReferenceType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.SuperType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public interface IFiniteClosure {
|
||||
|
||||
public void setLogTrue();
|
||||
/**
|
||||
* Returns all types of the finite closure that are subtypes of the argument.
|
||||
* @return The set of subtypes of the argument.
|
||||
*/
|
||||
public Set<UnifyType> smaller(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
/**
|
||||
* Returns all types of the finite closure that are supertypes of the argument.
|
||||
* @return The set of supertypes of the argument.
|
||||
*/
|
||||
public Set<UnifyType> greater(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
/**
|
||||
* Wo passt Type rein?
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
public Set<UnifyType> grArg(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
/**
|
||||
* Was passt in Type rein?
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
public Set<UnifyType> smArg(UnifyType type, Set<UnifyType> fBounded);
|
||||
|
||||
public Set<UnifyType> grArg(ReferenceType type, Set<UnifyType> fBounded);
|
||||
public Set<UnifyType> smArg(ReferenceType type, Set<UnifyType> fBounded);
|
||||
|
||||
public Set<UnifyType> grArg(ExtendsType type, Set<UnifyType> fBounded);
|
||||
public Set<UnifyType> smArg(ExtendsType type, Set<UnifyType> fBounded);
|
||||
|
||||
public Set<UnifyType> grArg(SuperType type, Set<UnifyType> fBounded);
|
||||
public Set<UnifyType> smArg(SuperType type, Set<UnifyType> fBounded);
|
||||
|
||||
public Set<UnifyType> grArg(PlaceholderType type, Set<UnifyType> fBounded);
|
||||
public Set<UnifyType> smArg(PlaceholderType type, Set<UnifyType> fBounded);
|
||||
|
||||
public Set<UnifyType> grArg(FunNType type, Set<UnifyType> fBounded);
|
||||
public Set<UnifyType> smArg(FunNType type, Set<UnifyType> fBounded);
|
||||
|
||||
public Optional<UnifyType> getLeftHandedType(String typeName);
|
||||
public Set<UnifyType> getAncestors(UnifyType t);
|
||||
public Set<UnifyType> getChildren(UnifyType t);
|
||||
public Set<UnifyType> getAllTypesByName(String typeName);
|
||||
|
||||
public int compare(UnifyType rhsType, UnifyType rhsType2, PairOperator pairop);
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.interfaces;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
/**
|
||||
* Match
|
||||
* @author Martin Pluemicke
|
||||
* abgeleitet aus IUnify.java
|
||||
*/
|
||||
public interface IMatch {
|
||||
|
||||
/**
|
||||
* Finds the most general matcher sigma of the set {t1 =. t1',...,tn =. tn'} so that
|
||||
* sigma(t1) = t1' , ... sigma(tn) = tn'.
|
||||
* @param terms The set of terms to be matched
|
||||
* @return An optional of the most general matcher if it exists or an empty optional if there is no matcher.
|
||||
*/
|
||||
public Optional<Unifier> match(ArrayList<UnifyPair> termsList);
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,103 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.interfaces;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.constraints.Constraint;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyPair;
|
||||
|
||||
/**
|
||||
* Contains the inference rules that are applied to the set Eq.
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public interface IRuleSet {
|
||||
|
||||
public Optional<UnifyPair> reduceUp(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceLow(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceUpLow(UnifyPair pair);
|
||||
public Optional<Set<UnifyPair>> reduceExt(UnifyPair pair, IFiniteClosure fc);
|
||||
public Optional<Set<UnifyPair>> reduceSup(UnifyPair pair, IFiniteClosure fc);
|
||||
public Optional<Set<UnifyPair>> reduceEq(UnifyPair pair);
|
||||
public Optional<Set<UnifyPair>> reduce1(UnifyPair pair, IFiniteClosure fc);
|
||||
public Optional<Set<UnifyPair>> reduce2(UnifyPair pair);
|
||||
|
||||
/*
|
||||
* Missing Reduce-Rules for Wildcards
|
||||
*/
|
||||
public Optional<UnifyPair> reduceWildcardLow(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceWildcardLowRight(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceWildcardUp(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceWildcardUpRight(UnifyPair pair);
|
||||
|
||||
/*
|
||||
* vgl. JAVA_BSP/Wildcard6.java
|
||||
public Optional<UnifyPair> reduceWildcardLowUp(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceWildcardUpLow(UnifyPair pair);
|
||||
public Optional<UnifyPair> reduceWildcardLeft(UnifyPair pair);
|
||||
*/
|
||||
|
||||
/*
|
||||
* Additional Rules which replace cases of the cartesian product
|
||||
*/
|
||||
|
||||
/**
|
||||
* Rule that replaces the fourth case of the cartesian product where (a <.? Theta)
|
||||
*/
|
||||
public Optional<UnifyPair> reduceTph(UnifyPair pair);
|
||||
|
||||
/**
|
||||
* Rule that replaces the sixth case of the cartesian product where (? ext Theta <.? a)
|
||||
*/
|
||||
public Optional<Set<UnifyPair>> reduceTphExt(UnifyPair pair);
|
||||
|
||||
/**
|
||||
* Rule that replaces the fourth case of the cartesian product where (? sup Theta <.? a)
|
||||
*/
|
||||
public Optional<Set<UnifyPair>> reduceTphSup(UnifyPair pair);
|
||||
|
||||
/*
|
||||
* FunN Rules
|
||||
*/
|
||||
public Optional<Set<UnifyPair>> reduceFunN(UnifyPair pair);
|
||||
public Optional<Set<UnifyPair>> greaterFunN(UnifyPair pair);
|
||||
public Optional<Set<UnifyPair>> smallerFunN(UnifyPair pair);
|
||||
|
||||
/**
|
||||
* Checks whether the erase1-Rule applies to the pair.
|
||||
* @return True if the pair is erasable, false otherwise.
|
||||
*/
|
||||
public boolean erase1(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
/**
|
||||
* Checks whether the erase2-Rule applies to the pair.
|
||||
* @return True if the pair is erasable, false otherwise.
|
||||
*/
|
||||
public boolean erase2(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
/**
|
||||
* Checks whether the erase3-Rule applies to the pair.
|
||||
* @return True if the pair is erasable, false otherwise.
|
||||
*/
|
||||
public boolean erase3(UnifyPair pair);
|
||||
|
||||
public Optional<UnifyPair> swap(UnifyPair pair);
|
||||
|
||||
public Optional<UnifyPair> adapt(UnifyPair pair, IFiniteClosure fc);
|
||||
public Optional<UnifyPair> adaptExt(UnifyPair pair, IFiniteClosure fc);
|
||||
public Optional<UnifyPair> adaptSup(UnifyPair pair, IFiniteClosure fc);
|
||||
|
||||
/**
|
||||
* Applies the subst-Rule to a set of pairs (usually Eq').
|
||||
* @param pairs The set of pairs where the subst rule should apply.
|
||||
* @return An optional of the modified set, if there were any substitutions. An empty optional if there were no substitutions.
|
||||
*/
|
||||
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs, List<Set<Constraint<UnifyPair>>> oderConstraints);
|
||||
|
||||
/**
|
||||
* Applies the subst-Rule to a set of pairs (usually Eq').
|
||||
* @param pairs The set of pairs where the subst rule should apply.
|
||||
* @return An optional of the modified set, if there were any substitutions. An empty optional if there were no substitutions.
|
||||
*/
|
||||
public Optional<Set<UnifyPair>> subst(Set<UnifyPair> pairs);
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.interfaces;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Contains operations on sets.
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public interface ISetOperations {
|
||||
/**
|
||||
* Calculates the cartesian product of the sets.
|
||||
* @return The cartesian product
|
||||
*/
|
||||
<B> Set<List<B>> cartesianProduct(List<? extends Set<? extends B>> sets);
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
package de.dhbwstuttgart.typeinference.unify.interfaces;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import de.dhbwstuttgart.typeinference.unify.model.UnifyType;
|
||||
import de.dhbwstuttgart.typeinference.unify.model.Unifier;
|
||||
|
||||
/**
|
||||
* Standard unification algorithm (e.g. Robinson, Paterson-Wegman, Martelli-Montanari)
|
||||
* @author Florian Steurer
|
||||
*/
|
||||
public interface IUnify {
|
||||
|
||||
/**
|
||||
* Finds the most general unifier sigma of the set {t1 =. t1',...,tn =. tn'} so that
|
||||
* sigma(t1) = sigma(t1') , ... sigma(tn) = sigma(tn').
|
||||
* @param terms The set of terms to be unified
|
||||
* @return An optional of the most general unifier if it exists or an empty optional if there is no unifier.
|
||||
*/
|
||||
public Optional<Unifier> unify(Set<UnifyType> terms);
|
||||
|
||||
/**
|
||||
* Finds the most general unifier sigma of the set {t1 =. t1',...,tn =. tn'} so that
|
||||
* sigma(t1) = sigma(t1') , ... sigma(tn) = sigma(tn').
|
||||
* @param terms The set of terms to be unified
|
||||
* @return An optional of the most general unifier if it exists or an empty optional if there is no unifier.
|
||||
*/
|
||||
default public Optional<Unifier> unify(UnifyType... terms) {
|
||||
return unify(Arrays.stream(terms).collect(Collectors.toSet()));
|
||||
}
|
||||
|
||||
}
|