8156954: javac incorrectly complains of incompatible types
Add heuristics to pick best stuck constraint as per JLS 18.5.2 Reviewed-by: vromero
This commit is contained in:
parent
6cc9359822
commit
cc3cb0640f
langtools
src/jdk.compiler/share/classes/com/sun/tools/javac/comp
test/tools/javac/generics/inference/8156954
@ -35,6 +35,7 @@ import com.sun.tools.javac.resources.CompilerProperties.Fragments;
|
||||
import com.sun.tools.javac.tree.*;
|
||||
import com.sun.tools.javac.util.*;
|
||||
import com.sun.tools.javac.util.DefinedBy.Api;
|
||||
import com.sun.tools.javac.util.GraphUtils.DependencyKind;
|
||||
import com.sun.tools.javac.util.JCDiagnostic.DiagnosticPosition;
|
||||
import com.sun.tools.javac.code.Symbol.*;
|
||||
import com.sun.tools.javac.comp.Attr.ResultInfo;
|
||||
@ -44,9 +45,10 @@ import com.sun.tools.javac.util.JCDiagnostic.DiagnosticType;
|
||||
import com.sun.tools.javac.util.Log.DeferredDiagnosticHandler;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
@ -576,28 +578,11 @@ public class DeferredAttr extends JCTree.Visitor {
|
||||
*/
|
||||
void complete() {
|
||||
while (!deferredAttrNodes.isEmpty()) {
|
||||
Map<Type, Set<Type>> depVarsMap = new LinkedHashMap<>();
|
||||
List<Type> stuckVars = List.nil();
|
||||
boolean progress = false;
|
||||
//scan a defensive copy of the node list - this is because a deferred
|
||||
//attribution round can add new nodes to the list
|
||||
for (DeferredAttrNode deferredAttrNode : List.from(deferredAttrNodes)) {
|
||||
if (!deferredAttrNode.process(this)) {
|
||||
List<Type> restStuckVars =
|
||||
List.from(deferredAttrNode.deferredStuckPolicy.stuckVars())
|
||||
.intersect(inferenceContext.restvars());
|
||||
stuckVars = stuckVars.prependList(restStuckVars);
|
||||
//update dependency map
|
||||
for (Type t : List.from(deferredAttrNode.deferredStuckPolicy.depVars())
|
||||
.intersect(inferenceContext.restvars())) {
|
||||
Set<Type> prevDeps = depVarsMap.get(t);
|
||||
if (prevDeps == null) {
|
||||
prevDeps = new LinkedHashSet<>();
|
||||
depVarsMap.put(t, prevDeps);
|
||||
}
|
||||
prevDeps.addAll(restStuckVars);
|
||||
}
|
||||
} else {
|
||||
if (deferredAttrNode.process(this)) {
|
||||
deferredAttrNodes.remove(deferredAttrNode);
|
||||
progress = true;
|
||||
}
|
||||
@ -612,7 +597,9 @@ public class DeferredAttr extends JCTree.Visitor {
|
||||
//remove all variables that have already been instantiated
|
||||
//from the list of stuck variables
|
||||
try {
|
||||
inferenceContext.solveAny(stuckVars, depVarsMap, warn);
|
||||
//find stuck expression to unstuck
|
||||
DeferredAttrNode toUnstuck = pickDeferredNode();
|
||||
inferenceContext.solveAny(List.from(toUnstuck.deferredStuckPolicy.stuckVars()), warn);
|
||||
inferenceContext.notifyChange();
|
||||
} catch (Infer.GraphStrategy.NodeNotFoundException ex) {
|
||||
//this means that we are in speculative mode and the
|
||||
@ -634,6 +621,59 @@ public class DeferredAttr extends JCTree.Visitor {
|
||||
}
|
||||
return dac.parent.insideOverloadPhase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Pick the deferred node to be unstuck. The chosen node is the first strongly connected
|
||||
* component containing exactly one node found in the dependency graph induced by deferred nodes.
|
||||
* If no such component is found, the first deferred node is returned.
|
||||
*/
|
||||
DeferredAttrNode pickDeferredNode() {
|
||||
List<StuckNode> nodes = deferredAttrNodes.stream()
|
||||
.map(StuckNode::new)
|
||||
.collect(List.collector());
|
||||
//init stuck expression graph; a deferred node A depends on a deferred node B iff
|
||||
//the intersection between A's input variable and B's output variable is non-empty.
|
||||
for (StuckNode sn1 : nodes) {
|
||||
for (Type t : sn1.data.deferredStuckPolicy.stuckVars()) {
|
||||
for (StuckNode sn2 : nodes) {
|
||||
if (sn1 != sn2 && sn2.data.deferredStuckPolicy.depVars().contains(t)) {
|
||||
sn1.deps.add(sn2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
//compute tarjan on the stuck graph
|
||||
List<? extends StuckNode> csn = GraphUtils.tarjan(nodes).get(0);
|
||||
return csn.length() == 1 ? csn.get(0).data : deferredAttrNodes.get(0);
|
||||
}
|
||||
|
||||
class StuckNode extends GraphUtils.TarjanNode<DeferredAttrNode, StuckNode> {
|
||||
|
||||
Set<StuckNode> deps = new HashSet<>();
|
||||
|
||||
StuckNode(DeferredAttrNode data) {
|
||||
super(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DependencyKind[] getSupportedDependencyKinds() {
|
||||
return new DependencyKind[] { Infer.DependencyKind.STUCK };
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends StuckNode> getDependenciesByKind(DependencyKind dk) {
|
||||
if (dk == Infer.DependencyKind.STUCK) {
|
||||
return deps;
|
||||
} else {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<? extends StuckNode> getAllDependencies() {
|
||||
return deps;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -52,11 +52,9 @@ import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumMap;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
@ -1655,12 +1653,10 @@ public class Infer {
|
||||
class GraphSolver {
|
||||
|
||||
InferenceContext inferenceContext;
|
||||
Map<Type, Set<Type>> stuckDeps;
|
||||
Warner warn;
|
||||
|
||||
GraphSolver(InferenceContext inferenceContext, Map<Type, Set<Type>> stuckDeps, Warner warn) {
|
||||
GraphSolver(InferenceContext inferenceContext, Warner warn) {
|
||||
this.inferenceContext = inferenceContext;
|
||||
this.stuckDeps = stuckDeps;
|
||||
this.warn = warn;
|
||||
}
|
||||
|
||||
@ -1671,7 +1667,7 @@ public class Infer {
|
||||
*/
|
||||
void solve(GraphStrategy sstrategy) {
|
||||
doIncorporation(inferenceContext, warn); //initial propagation of bounds
|
||||
InferenceGraph inferenceGraph = new InferenceGraph(stuckDeps);
|
||||
InferenceGraph inferenceGraph = new InferenceGraph();
|
||||
while (!sstrategy.done()) {
|
||||
if (dependenciesFolder != null) {
|
||||
//add this graph to the pending queue
|
||||
@ -1720,12 +1716,12 @@ public class Infer {
|
||||
*/
|
||||
class Node extends GraphUtils.TarjanNode<ListBuffer<Type>, Node> implements DottableNode<ListBuffer<Type>, Node> {
|
||||
|
||||
/** map listing all dependencies (grouped by kind) */
|
||||
EnumMap<DependencyKind, Set<Node>> deps;
|
||||
/** node dependencies */
|
||||
Set<Node> deps;
|
||||
|
||||
Node(Type ivar) {
|
||||
super(ListBuffer.of(ivar));
|
||||
this.deps = new EnumMap<>(DependencyKind.class);
|
||||
this.deps = new HashSet<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1734,76 +1730,53 @@ public class Infer {
|
||||
}
|
||||
|
||||
public Iterable<? extends Node> getAllDependencies() {
|
||||
return getDependencies(DependencyKind.values());
|
||||
return deps;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends Node> getDependenciesByKind(GraphUtils.DependencyKind dk) {
|
||||
return getDependencies((DependencyKind)dk);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves all dependencies with given kind(s).
|
||||
*/
|
||||
protected Set<Node> getDependencies(DependencyKind... depKinds) {
|
||||
Set<Node> buf = new LinkedHashSet<>();
|
||||
for (DependencyKind dk : depKinds) {
|
||||
Set<Node> depsByKind = deps.get(dk);
|
||||
if (depsByKind != null) {
|
||||
buf.addAll(depsByKind);
|
||||
}
|
||||
if (dk == DependencyKind.BOUND) {
|
||||
return deps;
|
||||
} else {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
return buf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds dependency with given kind.
|
||||
*/
|
||||
protected void addDependency(DependencyKind dk, Node depToAdd) {
|
||||
Set<Node> depsByKind = deps.get(dk);
|
||||
if (depsByKind == null) {
|
||||
depsByKind = new LinkedHashSet<>();
|
||||
deps.put(dk, depsByKind);
|
||||
}
|
||||
depsByKind.add(depToAdd);
|
||||
protected void addDependency(Node depToAdd) {
|
||||
deps.add(depToAdd);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add multiple dependencies of same given kind.
|
||||
*/
|
||||
protected void addDependencies(DependencyKind dk, Set<Node> depsToAdd) {
|
||||
protected void addDependencies(Set<Node> depsToAdd) {
|
||||
for (Node n : depsToAdd) {
|
||||
addDependency(dk, n);
|
||||
addDependency(n);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a dependency, regardless of its kind.
|
||||
*/
|
||||
protected Set<DependencyKind> removeDependency(Node n) {
|
||||
Set<DependencyKind> removedKinds = new HashSet<>();
|
||||
for (DependencyKind dk : DependencyKind.values()) {
|
||||
Set<Node> depsByKind = deps.get(dk);
|
||||
if (depsByKind == null) continue;
|
||||
if (depsByKind.remove(n)) {
|
||||
removedKinds.add(dk);
|
||||
}
|
||||
}
|
||||
return removedKinds;
|
||||
protected boolean removeDependency(Node n) {
|
||||
return deps.remove(n);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute closure of a give node, by recursively walking
|
||||
* through all its dependencies (of given kinds)
|
||||
*/
|
||||
protected Set<Node> closure(DependencyKind... depKinds) {
|
||||
protected Set<Node> closure() {
|
||||
boolean progress = true;
|
||||
Set<Node> closure = new HashSet<>();
|
||||
closure.add(this);
|
||||
while (progress) {
|
||||
progress = false;
|
||||
for (Node n1 : new HashSet<>(closure)) {
|
||||
progress = closure.addAll(n1.getDependencies(depKinds));
|
||||
progress = closure.addAll(n1.deps);
|
||||
}
|
||||
}
|
||||
return closure;
|
||||
@ -1815,9 +1788,8 @@ public class Infer {
|
||||
*/
|
||||
protected boolean isLeaf() {
|
||||
//no deps, or only one self dep
|
||||
Set<Node> allDeps = getDependencies(DependencyKind.BOUND, DependencyKind.STUCK);
|
||||
if (allDeps.isEmpty()) return true;
|
||||
for (Node n : allDeps) {
|
||||
if (deps.isEmpty()) return true;
|
||||
for (Node n : deps) {
|
||||
if (n != this) {
|
||||
return false;
|
||||
}
|
||||
@ -1834,24 +1806,15 @@ public class Infer {
|
||||
for (Node n : nodes) {
|
||||
Assert.check(n.data.length() == 1, "Attempt to merge a compound node!");
|
||||
data.appendList(n.data);
|
||||
for (DependencyKind dk : DependencyKind.values()) {
|
||||
addDependencies(dk, n.getDependencies(dk));
|
||||
}
|
||||
addDependencies(n.deps);
|
||||
}
|
||||
//update deps
|
||||
EnumMap<DependencyKind, Set<Node>> deps2 = new EnumMap<>(DependencyKind.class);
|
||||
for (DependencyKind dk : DependencyKind.values()) {
|
||||
for (Node d : getDependencies(dk)) {
|
||||
Set<Node> depsByKind = deps2.get(dk);
|
||||
if (depsByKind == null) {
|
||||
depsByKind = new LinkedHashSet<>();
|
||||
deps2.put(dk, depsByKind);
|
||||
}
|
||||
if (data.contains(d.data.first())) {
|
||||
depsByKind.add(this);
|
||||
} else {
|
||||
depsByKind.add(d);
|
||||
}
|
||||
Set<Node> deps2 = new HashSet<>();
|
||||
for (Node d : deps) {
|
||||
if (data.contains(d.data.first())) {
|
||||
deps2.add(this);
|
||||
} else {
|
||||
deps2.add(d);
|
||||
}
|
||||
}
|
||||
deps = deps2;
|
||||
@ -1862,9 +1825,9 @@ public class Infer {
|
||||
* topology.
|
||||
*/
|
||||
private void graphChanged(Node from, Node to) {
|
||||
for (DependencyKind dk : removeDependency(from)) {
|
||||
if (removeDependency(from)) {
|
||||
if (to != null) {
|
||||
addDependency(dk, to);
|
||||
addDependency(to);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1880,22 +1843,19 @@ public class Infer {
|
||||
public Properties dependencyAttributes(Node sink, GraphUtils.DependencyKind dk) {
|
||||
Properties p = new Properties();
|
||||
p.put("style", ((DependencyKind)dk).dotSyle);
|
||||
if (dk == DependencyKind.STUCK) return p;
|
||||
else {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
String sep = "";
|
||||
for (Type from : data) {
|
||||
UndetVar uv = (UndetVar)inferenceContext.asUndetVar(from);
|
||||
for (Type bound : uv.getBounds(InferenceBound.values())) {
|
||||
if (bound.containsAny(List.from(sink.data))) {
|
||||
buf.append(sep);
|
||||
buf.append(bound);
|
||||
sep = ",";
|
||||
}
|
||||
StringBuilder buf = new StringBuilder();
|
||||
String sep = "";
|
||||
for (Type from : data) {
|
||||
UndetVar uv = (UndetVar)inferenceContext.asUndetVar(from);
|
||||
for (Type bound : uv.getBounds(InferenceBound.values())) {
|
||||
if (bound.containsAny(List.from(sink.data))) {
|
||||
buf.append(sep);
|
||||
buf.append(bound);
|
||||
sep = ",";
|
||||
}
|
||||
}
|
||||
p.put("label", "\"" + buf.toString() + "\"");
|
||||
}
|
||||
p.put("label", "\"" + buf.toString() + "\"");
|
||||
return p;
|
||||
}
|
||||
}
|
||||
@ -1903,8 +1863,8 @@ public class Infer {
|
||||
/** the nodes in the inference graph */
|
||||
ArrayList<Node> nodes;
|
||||
|
||||
InferenceGraph(Map<Type, Set<Type>> optDeps) {
|
||||
initNodes(optDeps);
|
||||
InferenceGraph() {
|
||||
initNodes();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1946,7 +1906,7 @@ public class Infer {
|
||||
* in the graph. For each component containing more than one node, a super node is
|
||||
* created, effectively replacing the original cyclic nodes.
|
||||
*/
|
||||
void initNodes(Map<Type, Set<Type>> stuckDeps) {
|
||||
void initNodes() {
|
||||
//add nodes
|
||||
nodes = new ArrayList<>();
|
||||
for (Type t : inferenceContext.restvars()) {
|
||||
@ -1955,17 +1915,12 @@ public class Infer {
|
||||
//add dependencies
|
||||
for (Node n_i : nodes) {
|
||||
Type i = n_i.data.first();
|
||||
Set<Type> optDepsByNode = stuckDeps.get(i);
|
||||
for (Node n_j : nodes) {
|
||||
Type j = n_j.data.first();
|
||||
UndetVar uv_i = (UndetVar)inferenceContext.asUndetVar(i);
|
||||
if (Type.containsAny(uv_i.getBounds(InferenceBound.values()), List.of(j))) {
|
||||
//update i's bound dependencies
|
||||
n_i.addDependency(DependencyKind.BOUND, n_j);
|
||||
}
|
||||
if (optDepsByNode != null && optDepsByNode.contains(j)) {
|
||||
//update i's stuck dependencies
|
||||
n_i.addDependency(DependencyKind.STUCK, n_j);
|
||||
n_i.addDependency(n_j);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -469,15 +469,11 @@ class InferenceContext {
|
||||
}
|
||||
}
|
||||
|
||||
private void solve(GraphStrategy ss, Warner warn) {
|
||||
solve(ss, new HashMap<Type, Set<Type>>(), warn);
|
||||
}
|
||||
|
||||
/**
|
||||
* Solve with given graph strategy.
|
||||
*/
|
||||
private void solve(GraphStrategy ss, Map<Type, Set<Type>> stuckDeps, Warner warn) {
|
||||
GraphSolver s = infer.new GraphSolver(this, stuckDeps, warn);
|
||||
private void solve(GraphStrategy ss, Warner warn) {
|
||||
GraphSolver s = infer.new GraphSolver(this, warn);
|
||||
s.solve(ss);
|
||||
}
|
||||
|
||||
@ -506,12 +502,12 @@ class InferenceContext {
|
||||
/**
|
||||
* Solve at least one variable in given list.
|
||||
*/
|
||||
public void solveAny(List<Type> varsToSolve, Map<Type, Set<Type>> optDeps, Warner warn) {
|
||||
public void solveAny(List<Type> varsToSolve, Warner warn) {
|
||||
solve(infer.new BestLeafSolver(varsToSolve.intersect(restvars())) {
|
||||
public boolean done() {
|
||||
return instvars().intersect(varsToSolve).nonEmpty();
|
||||
}
|
||||
}, optDeps, warn);
|
||||
}, warn);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -0,0 +1,43 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation. Oracle designates this
|
||||
* particular file as subject to the "Classpath" exception as provided
|
||||
* by Oracle in the LICENSE file that accompanied this code.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @test
|
||||
* @bug 8156954
|
||||
* @summary javac incorrectly complains of incompatible types
|
||||
* @compile T8156954.java
|
||||
*/
|
||||
import java.util.function.Function;
|
||||
|
||||
class T8156954 {
|
||||
|
||||
<T, R> void m1(Function<R, T> f1, Function<T, R> f2, R r) { }
|
||||
<T, R> void m2(Function<T, R> f1, Function<R, T> f2, R r) { }
|
||||
|
||||
void m(Integer intValue) {
|
||||
m1(o -> o, o -> o , intValue);
|
||||
m2(o -> o, o -> o , intValue);
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user