This commit is contained in:
Lana Steuck 2010-07-29 22:04:41 -07:00
commit 9f50973389
840 changed files with 18411 additions and 11653 deletions
.hgtags.hgtags-top-repoMakefile
corba
.hgtags
src/share/classes
com/sun/corba/se/impl
org/omg/CORBA
sun/corba
hotspot
.hgtags
agent/src/share/classes/sun/jvm/hotspot
make
hotspot_version
linux/makefiles
solaris/makefiles
windows/makefiles
src

@ -73,3 +73,7 @@ b5dab6a313fdff4c043250e4d9c8f66fd624d27e jdk7-b93
d260f892491e040ae385a8e6df59557a7d721abf jdk7-b96 d260f892491e040ae385a8e6df59557a7d721abf jdk7-b96
7e406ebed9a5968b584f3c3e6b60893b5d6d9741 jdk7-b97 7e406ebed9a5968b584f3c3e6b60893b5d6d9741 jdk7-b97
db6e660120446c407e2d908d52ec046592b21726 jdk7-b98 db6e660120446c407e2d908d52ec046592b21726 jdk7-b98
c4c8a5bc54f66abc68cd185d9294042121922154 jdk7-b99
2d6ba7a221915bdf0311acc5641c7f3875cb793e jdk7-b100
2548ac036b8fca3326d058d758e6df8355a42469 jdk7-b101
88db80c8e49cea352c2900f689600dc410761c1f jdk7-b102

@ -73,3 +73,7 @@ fd3663286e77b9f13c39eee124db2beb079b3ca6 jdk7-b95
cf71cb5151166f35433afebaf67dbf34a704a170 jdk7-b96 cf71cb5151166f35433afebaf67dbf34a704a170 jdk7-b96
5e197c942c6ebd8b92f324a31049c5f1d26d40ef jdk7-b97 5e197c942c6ebd8b92f324a31049c5f1d26d40ef jdk7-b97
6cea9984d73d74de0cd01f30d07ac0a1ed196117 jdk7-b98 6cea9984d73d74de0cd01f30d07ac0a1ed196117 jdk7-b98
e7f18db469a3e947b7096bfd12e87380e5a042cd jdk7-b99
b218a53ec7d3d42be61d31d6917a6c5c037b6f56 jdk7-b100
4193eaf5f1b82794c6a0fb1a8d11af43d1b1d611 jdk7-b101
a136a51f5113da4dad3853b74a8536ab583ab112 jdk7-b102

@ -1,5 +1,5 @@
# #
# Copyright (c) 1995, 2009, Oracle and/or its affiliates. All rights reserved. # Copyright (c) 1995, 2010, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
# #
# This code is free software; you can redistribute it and/or modify it # This code is free software; you can redistribute it and/or modify it
@ -29,10 +29,6 @@ ifndef TOPDIR
TOPDIR:=. TOPDIR:=.
endif endif
ifndef CONTROL_TOPDIR
CONTROL_TOPDIR=$(TOPDIR)
endif
# Openjdk sources (only used if SKIP_OPENJDK_BUILD!=true) # Openjdk sources (only used if SKIP_OPENJDK_BUILD!=true)
OPENJDK_SOURCETREE=$(TOPDIR)/openjdk OPENJDK_SOURCETREE=$(TOPDIR)/openjdk
OPENJDK_BUILDDIR:=$(shell \ OPENJDK_BUILDDIR:=$(shell \
@ -120,7 +116,7 @@ endif
all_product_build:: all_product_build::
@$(FINISH_ECHO) @$(FINISH_ECHO)
# Generis build of basic repo series # Generic build of basic repo series
generic_build_repo_series:: generic_build_repo_series::
$(MKDIR) -p $(OUTPUTDIR) $(MKDIR) -p $(OUTPUTDIR)
$(MKDIR) -p $(OUTPUTDIR)/j2sdk-image $(MKDIR) -p $(OUTPUTDIR)/j2sdk-image
@ -179,11 +175,15 @@ endif
# The install process needs to know what the DEBUG_NAME is, so # The install process needs to know what the DEBUG_NAME is, so
# look for INSTALL_DEBUG_NAME in the install rules. # look for INSTALL_DEBUG_NAME in the install rules.
# #
# NOTE: On windows, do not use $(ABS_BOOTDIR_OUTPUTDIR)-$(DEBUG_NAME).
# Due to the use of short paths in $(ABS_OUTPUTDIR), this may
# not be the same location.
#
# Location of fresh bootdir output # Location of fresh bootdir output
ABS_BOOTDIR_OUTPUTDIR=$(ABS_OUTPUTDIR)/bootjdk ABS_BOOTDIR_OUTPUTDIR=$(ABS_OUTPUTDIR)/bootjdk
FRESH_BOOTDIR=$(ABS_BOOTDIR_OUTPUTDIR)/j2sdk-image FRESH_BOOTDIR=$(ABS_BOOTDIR_OUTPUTDIR)/j2sdk-image
FRESH_DEBUG_BOOTDIR=$(ABS_BOOTDIR_OUTPUTDIR)-$(DEBUG_NAME)/j2sdk-image FRESH_DEBUG_BOOTDIR=$(ABS_BOOTDIR_OUTPUTDIR)/../$(PLATFORM)-$(ARCH)-$(DEBUG_NAME)/j2sdk-image
create_fresh_product_bootdir: FRC create_fresh_product_bootdir: FRC
@$(START_ECHO) @$(START_ECHO)
@ -248,10 +248,14 @@ build_product_image:
generic_build_repo_series generic_build_repo_series
@$(FINISH_ECHO) @$(FINISH_ECHO)
# NOTE: On windows, do not use $(ABS_OUTPUTDIR)-$(DEBUG_NAME).
# Due to the use of short paths in $(ABS_OUTPUTDIR), this may
# not be the same location.
generic_debug_build: generic_debug_build:
@$(START_ECHO) @$(START_ECHO)
$(MAKE) \ $(MAKE) \
ALT_OUTPUTDIR=$(ABS_OUTPUTDIR)-$(DEBUG_NAME) \ ALT_OUTPUTDIR=$(ABS_OUTPUTDIR)/../$(PLATFORM)-$(ARCH)-$(DEBUG_NAME) \
DEBUG_NAME=$(DEBUG_NAME) \ DEBUG_NAME=$(DEBUG_NAME) \
GENERATE_DOCS=false \ GENERATE_DOCS=false \
$(BOOT_CYCLE_DEBUG_SETTINGS) \ $(BOOT_CYCLE_DEBUG_SETTINGS) \
@ -348,8 +352,8 @@ endif
clobber:: clobber::
$(RM) -r $(OUTPUTDIR)/* $(RM) -r $(OUTPUTDIR)/*
$(RM) -r $(OUTPUTDIR)-debug/* $(RM) -r $(OUTPUTDIR)/../$(PLATFORM)-$(ARCH)-debug/*
$(RM) -r $(OUTPUTDIR)-fastdebug/* $(RM) -r $(OUTPUTDIR)/../$(PLATFORM)-$(ARCH)-fastdebug/*
-($(RMDIR) -p $(OUTPUTDIR) > $(DEV_NULL) 2>&1; $(TRUE)) -($(RMDIR) -p $(OUTPUTDIR) > $(DEV_NULL) 2>&1; $(TRUE))
clean: clobber clean: clobber
@ -550,6 +554,56 @@ ifeq ($(BUNDLE_RULES_AVAILABLE), true)
include $(BUNDLE_RULES) include $(BUNDLE_RULES)
endif endif
################################################################
# rule to test
################################################################
.NOTPARALLEL: test
test: test_clean test_start test_summary
test_start:
@$(ECHO) "Tests started at `$(DATE)`"
test_clean:
$(RM) $(OUTPUTDIR)/test_failures.txt $(OUTPUTDIR)/test_log.txt
test_summary: $(OUTPUTDIR)/test_failures.txt
@$(ECHO) "#################################################"
@$(ECHO) "Tests completed at `$(DATE)`"
@( $(EGREP) '^TEST STATS:' $(OUTPUTDIR)/test_log.txt \
|| $(ECHO) "No TEST STATS seen in log" )
@$(ECHO) "For complete details see: $(OUTPUTDIR)/test_log.txt"
@$(ECHO) "#################################################"
@if [ -s $< ] ; then \
$(ECHO) "ERROR: Test failure count: `$(CAT) $< | $(WC) -l`"; \
$(CAT) $<; \
exit 1; \
else \
$(ECHO) "Success! No failures detected"; \
fi
# Get failure list from log
$(OUTPUTDIR)/test_failures.txt: $(OUTPUTDIR)/test_log.txt
@$(RM) $@
@( $(EGREP) '^FAILED:' $< || $(ECHO) "" ) > $@
# Get log file of all tests run
JDK_TO_TEST := $(shell \
if [ -d "$(ABS_OUTPUTDIR)/j2sdk-image" ] ; then \
$(ECHO) "$(ABS_OUTPUTDIR)/j2sdk-image"; \
elif [ -d "$(ABS_OUTPUTDIR)/bin" ] ; then \
$(ECHO) "$(ABS_OUTPUTDIR)"; \
elif [ "$(PRODUCT_HOME)" != "" -a -d "$(PRODUCT_HOME)/bin" ] ; then \
$(ECHO) "$(PRODUCT_HOME)"; \
fi \
)
$(OUTPUTDIR)/test_log.txt:
$(RM) $@
( $(CD) test && \
$(MAKE) NO_STOPPING=- PRODUCT_HOME=$(JDK_TO_TEST) \
) | tee $@
################################################################ ################################################################
# JPRT rule to build # JPRT rule to build
################################################################ ################################################################
@ -560,7 +614,7 @@ include ./make/jprt.gmk
# PHONY # PHONY
################################################################ ################################################################
.PHONY: all \ .PHONY: all test test_start test_summary test_clean \
generic_build_repo_series \ generic_build_repo_series \
what clobber insane \ what clobber insane \
dev dev-build dev-sanity dev-clobber \ dev dev-build dev-sanity dev-clobber \

@ -73,3 +73,7 @@ bcd2fc089227559ac5be927923609fac29f067fa jdk7-b91
edc2a2659c77dabc55cb55bb617bad89e3a05bb3 jdk7-b96 edc2a2659c77dabc55cb55bb617bad89e3a05bb3 jdk7-b96
4ec9d59374caa1e5d72fa802291b4d66955a4936 jdk7-b97 4ec9d59374caa1e5d72fa802291b4d66955a4936 jdk7-b97
3b99409057e4c255da946f9f540d051a5ef4ab23 jdk7-b98 3b99409057e4c255da946f9f540d051a5ef4ab23 jdk7-b98
95db968660e7d87c345d5cf3dc2e3db037fb7220 jdk7-b99
a56d734a1e970e1a21a8f4feb13053e9a33674c7 jdk7-b100
86a239832646a74811695428984b6947c0bd6dc8 jdk7-b101
78561a95779090b5106c8d0f1a75360a027ef087 jdk7-b102

@ -75,7 +75,7 @@ public class CorbaResourceUtil {
args[1] = (arg1 != null ? arg1.toString() : "null"); args[1] = (arg1 != null ? arg1.toString() : "null");
args[2] = (arg2 != null ? arg2.toString() : "null"); args[2] = (arg2 != null ? arg2.toString() : "null");
return java.text.MessageFormat.format(format, args); return java.text.MessageFormat.format(format, (Object[]) args);
} }
private static boolean resourcesInitialized = false; private static boolean resourcesInitialized = false;

@ -350,7 +350,7 @@ public final class ObjectUtility {
if (useToString) { if (useToString) {
try { try {
cls.getDeclaredMethod( "toString", null ) ; cls.getDeclaredMethod( "toString", (Class[])null ) ;
return true ; return true ;
} catch (Exception exc) { } catch (Exception exc) {
return false ; return false ;

@ -108,8 +108,8 @@ public class ExceptionHandlerImpl implements ExceptionHandler
try { try {
helperClass = Class.forName( helperName, true, loader ) ; helperClass = Class.forName( helperName, true, loader ) ;
Method idMethod = helperClass.getDeclaredMethod( "id", null ) ; Method idMethod = helperClass.getDeclaredMethod( "id", (Class[])null ) ;
setId( (String)idMethod.invoke( null, null ) ) ; setId( (String)idMethod.invoke( null, (Object[])null ) ) ;
} catch (Exception ex) { } catch (Exception ex) {
throw wrapper.badHelperIdMethod( ex, helperName ) ; throw wrapper.badHelperIdMethod( ex, helperName ) ;
} }

@ -589,7 +589,7 @@ abstract public class ORB {
this.getClass().getMethod("create_operation_list", argc); this.getClass().getMethod("create_operation_list", argc);
// OK, the method exists, so invoke it and be happy. // OK, the method exists, so invoke it and be happy.
Object[] argx = { oper }; java.lang.Object[] argx = { oper };
return (org.omg.CORBA.NVList)meth.invoke(this, argx); return (org.omg.CORBA.NVList)meth.invoke(this, argx);
} }
catch( java.lang.reflect.InvocationTargetException exs ) { catch( java.lang.reflect.InvocationTargetException exs ) {

@ -187,7 +187,7 @@ public final class Bridge
try { try {
// Invoke the ObjectInputStream.latestUserDefinedLoader method // Invoke the ObjectInputStream.latestUserDefinedLoader method
return (ClassLoader)latestUserDefinedLoaderMethod.invoke(null, return (ClassLoader)latestUserDefinedLoaderMethod.invoke(null,
NO_ARGS); (Object[])NO_ARGS);
} catch (InvocationTargetException ite) { } catch (InvocationTargetException ite) {
Error err = new Error( Error err = new Error(
"sun.corba.Bridge.latestUserDefinedLoader: " + ite ) ; "sun.corba.Bridge.latestUserDefinedLoader: " + ite ) ;

@ -102,3 +102,8 @@ d38f45079fe98792a7381dbb4b64f5b589ec8c58 jdk7-b94
573e8ea5fd68e8e51eb6308d283ac3b3889d15e0 hs19-b02 573e8ea5fd68e8e51eb6308d283ac3b3889d15e0 hs19-b02
5f42499e57adc16380780f40541e1a66cd601891 jdk7-b97 5f42499e57adc16380780f40541e1a66cd601891 jdk7-b97
8a045b3f5c13eaad92ff4baf15ca671845fcad1a jdk7-b98 8a045b3f5c13eaad92ff4baf15ca671845fcad1a jdk7-b98
6a236384a379642b5a2398e2819db9ab4e711e9b jdk7-b99
ad1977f08c4d69162a0775fe3f9576b9fd521d10 jdk7-b100
6c3a919105b68c15b7db923ec9a00006e9560910 jdk7-b101
ad1977f08c4d69162a0775fe3f9576b9fd521d10 hs19-b03
c5cadf1a07717955cf60dbaec16e35b529fd2cb0 jdk7-b102

@ -35,7 +35,6 @@ import sun.jvm.hotspot.utilities.*;
public class NMethod extends CodeBlob { public class NMethod extends CodeBlob {
private static long pcDescSize; private static long pcDescSize;
private static CIntegerField zombieInstructionSizeField;
private static sun.jvm.hotspot.types.OopField methodField; private static sun.jvm.hotspot.types.OopField methodField;
/** != InvocationEntryBci if this nmethod is an on-stack replacement method */ /** != InvocationEntryBci if this nmethod is an on-stack replacement method */
private static CIntegerField entryBCIField; private static CIntegerField entryBCIField;
@ -88,7 +87,6 @@ public class NMethod extends CodeBlob {
private static void initialize(TypeDataBase db) { private static void initialize(TypeDataBase db) {
Type type = db.lookupType("nmethod"); Type type = db.lookupType("nmethod");
zombieInstructionSizeField = type.getCIntegerField("_zombie_instruction_size");
methodField = type.getOopField("_method"); methodField = type.getOopField("_method");
entryBCIField = type.getCIntegerField("_entry_bci"); entryBCIField = type.getCIntegerField("_entry_bci");
osrLinkField = type.getAddressField("_osr_link"); osrLinkField = type.getAddressField("_osr_link");

@ -72,6 +72,7 @@ public class BytecodeDisassembler {
addBytecodeClass(Bytecodes._invokestatic, BytecodeInvoke.class); addBytecodeClass(Bytecodes._invokestatic, BytecodeInvoke.class);
addBytecodeClass(Bytecodes._invokespecial, BytecodeInvoke.class); addBytecodeClass(Bytecodes._invokespecial, BytecodeInvoke.class);
addBytecodeClass(Bytecodes._invokeinterface, BytecodeInvoke.class); addBytecodeClass(Bytecodes._invokeinterface, BytecodeInvoke.class);
addBytecodeClass(Bytecodes._invokedynamic, BytecodeInvoke.class);
addBytecodeClass(Bytecodes._jsr, BytecodeJsr.class); addBytecodeClass(Bytecodes._jsr, BytecodeJsr.class);
addBytecodeClass(Bytecodes._jsr_w, BytecodeJsrW.class); addBytecodeClass(Bytecodes._jsr_w, BytecodeJsrW.class);
addBytecodeClass(Bytecodes._iload, BytecodeLoad.class); addBytecodeClass(Bytecodes._iload, BytecodeLoad.class);

@ -54,15 +54,31 @@ public class BytecodeInvoke extends BytecodeWithCPIndex {
// returns the name of the invoked method // returns the name of the invoked method
public Symbol name() { public Symbol name() {
ConstantPool cp = method().getConstants(); ConstantPool cp = method().getConstants();
if (isInvokedynamic()) {
int[] nt = cp.getNameAndTypeAt(indexForFieldOrMethod());
return cp.getSymbolAt(nt[0]);
}
return cp.getNameRefAt(index()); return cp.getNameRefAt(index());
} }
// returns the signature of the invoked method // returns the signature of the invoked method
public Symbol signature() { public Symbol signature() {
ConstantPool cp = method().getConstants(); ConstantPool cp = method().getConstants();
if (isInvokedynamic()) {
int[] nt = cp.getNameAndTypeAt(indexForFieldOrMethod());
return cp.getSymbolAt(nt[1]);
}
return cp.getSignatureRefAt(index()); return cp.getSignatureRefAt(index());
} }
public int getSecondaryIndex() {
if (isInvokedynamic()) {
// change byte-ordering of 4-byte integer
return VM.getVM().getBytes().swapInt(javaSignedWordAt(1));
}
return super.getSecondaryIndex(); // throw an error
}
public Method getInvokedMethod() { public Method getInvokedMethod() {
return method().getConstants().getMethodRefAt(index()); return method().getConstants().getMethodRefAt(index());
} }
@ -87,6 +103,7 @@ public class BytecodeInvoke extends BytecodeWithCPIndex {
public boolean isInvokevirtual() { return adjustedInvokeCode() == Bytecodes._invokevirtual; } public boolean isInvokevirtual() { return adjustedInvokeCode() == Bytecodes._invokevirtual; }
public boolean isInvokestatic() { return adjustedInvokeCode() == Bytecodes._invokestatic; } public boolean isInvokestatic() { return adjustedInvokeCode() == Bytecodes._invokestatic; }
public boolean isInvokespecial() { return adjustedInvokeCode() == Bytecodes._invokespecial; } public boolean isInvokespecial() { return adjustedInvokeCode() == Bytecodes._invokespecial; }
public boolean isInvokedynamic() { return adjustedInvokeCode() == Bytecodes._invokedynamic; }
public boolean isValid() { return isInvokeinterface() || public boolean isValid() { return isInvokeinterface() ||
isInvokevirtual() || isInvokevirtual() ||
@ -104,6 +121,11 @@ public class BytecodeInvoke extends BytecodeWithCPIndex {
buf.append(spaces); buf.append(spaces);
buf.append('#'); buf.append('#');
buf.append(Integer.toString(indexForFieldOrMethod())); buf.append(Integer.toString(indexForFieldOrMethod()));
if (isInvokedynamic()) {
buf.append('(');
buf.append(Integer.toString(getSecondaryIndex()));
buf.append(')');
}
buf.append(" [Method "); buf.append(" [Method ");
StringBuffer sigBuf = new StringBuffer(); StringBuffer sigBuf = new StringBuffer();
new SignatureConverter(signature(), sigBuf).iterateReturntype(); new SignatureConverter(signature(), sigBuf).iterateReturntype();

@ -25,6 +25,7 @@
package sun.jvm.hotspot.interpreter; package sun.jvm.hotspot.interpreter;
import sun.jvm.hotspot.oops.*; import sun.jvm.hotspot.oops.*;
import sun.jvm.hotspot.runtime.*;
import sun.jvm.hotspot.utilities.*; import sun.jvm.hotspot.utilities.*;
public class BytecodeLoadConstant extends BytecodeWithCPIndex { public class BytecodeLoadConstant extends BytecodeWithCPIndex {
@ -32,10 +33,47 @@ public class BytecodeLoadConstant extends BytecodeWithCPIndex {
super(method, bci); super(method, bci);
} }
public boolean hasCacheIndex() {
// normal ldc uses CP index, but fast_aldc uses swapped CP cache index
return javaCode() != code();
}
public int index() { public int index() {
return javaCode() == Bytecodes._ldc ? int i = javaCode() == Bytecodes._ldc ?
(int) (0xFF & javaByteAt(1)) (int) (0xFF & javaByteAt(1))
: (int) (0xFFFF & javaShortAt(1)); : (int) (0xFFFF & javaShortAt(1));
if (hasCacheIndex()) {
return (0xFFFF & VM.getVM().getBytes().swapShort((short) i));
} else {
return i;
}
}
public int poolIndex() {
int i = index();
if (hasCacheIndex()) {
ConstantPoolCache cpCache = method().getConstants().getCache();
return cpCache.getEntryAt(i).getConstantPoolIndex();
} else {
return i;
}
}
public int cacheIndex() {
if (hasCacheIndex()) {
return index();
} else {
return -1; // no cache index
}
}
private Oop getCachedConstant() {
int i = cacheIndex();
if (i >= 0) {
ConstantPoolCache cpCache = method().getConstants().getCache();
return cpCache.getEntryAt(i).getF1();
}
return null;
} }
public void verify() { public void verify() {
@ -58,6 +96,7 @@ public class BytecodeLoadConstant extends BytecodeWithCPIndex {
// has to be int or float or String or Klass // has to be int or float or String or Klass
return (ctag.isUnresolvedString() || ctag.isString() return (ctag.isUnresolvedString() || ctag.isString()
|| ctag.isUnresolvedKlass() || ctag.isKlass() || ctag.isUnresolvedKlass() || ctag.isKlass()
|| ctag.isMethodHandle() || ctag.isMethodType()
|| ctag.isInt() || ctag.isFloat())? true: false; || ctag.isInt() || ctag.isFloat())? true: false;
} }
} }
@ -112,7 +151,7 @@ public class BytecodeLoadConstant extends BytecodeWithCPIndex {
public String getConstantValue() { public String getConstantValue() {
ConstantPool cpool = method().getConstants(); ConstantPool cpool = method().getConstants();
int cpIndex = index(); int cpIndex = poolIndex();
ConstantTag ctag = cpool.getTagAt(cpIndex); ConstantTag ctag = cpool.getTagAt(cpIndex);
if (ctag.isInt()) { if (ctag.isInt()) {
return "<int " + Integer.toString(cpool.getIntAt(cpIndex)) +">"; return "<int " + Integer.toString(cpool.getIntAt(cpIndex)) +">";
@ -149,6 +188,18 @@ public class BytecodeLoadConstant extends BytecodeWithCPIndex {
} else { } else {
throw new RuntimeException("should not reach here"); throw new RuntimeException("should not reach here");
} }
} else if (ctag.isMethodHandle() || ctag.isMethodType()) {
Oop x = getCachedConstant();
int refidx = cpool.getMethodHandleIndexAt(cpIndex);
int refkind = cpool.getMethodHandleRefKindAt(cpIndex);
return "<MethodHandle kind=" + Integer.toString(refkind) +
" ref=" + Integer.toString(refidx)
+ (x == null ? "" : " @" + x.getHandle()) + ">";
} else if (ctag.isMethodType()) {
Oop x = getCachedConstant();
int refidx = cpool.getMethodTypeIndexAt(cpIndex);
return "<MethodType " + cpool.getSymbolAt(refidx).asString()
+ (x == null ? "" : " @" + x.getHandle()) + ">";
} else { } else {
if (Assert.ASSERTS_ENABLED) { if (Assert.ASSERTS_ENABLED) {
Assert.that(false, "invalid load constant type"); Assert.that(false, "invalid load constant type");
@ -162,7 +213,12 @@ public class BytecodeLoadConstant extends BytecodeWithCPIndex {
buf.append(getJavaBytecodeName()); buf.append(getJavaBytecodeName());
buf.append(spaces); buf.append(spaces);
buf.append('#'); buf.append('#');
buf.append(Integer.toString(index())); buf.append(Integer.toString(poolIndex()));
if (hasCacheIndex()) {
buf.append('(');
buf.append(Integer.toString(cacheIndex()));
buf.append(')');
}
buf.append(spaces); buf.append(spaces);
buf.append(getConstantValue()); buf.append(getConstantValue());
if (code() != javaCode()) { if (code() != javaCode()) {

@ -37,12 +37,19 @@ public abstract class BytecodeWithCPIndex extends Bytecode {
// the constant pool index for this bytecode // the constant pool index for this bytecode
public int index() { return 0xFFFF & javaShortAt(1); } public int index() { return 0xFFFF & javaShortAt(1); }
public int getSecondaryIndex() {
throw new IllegalArgumentException("must be invokedynamic");
}
protected int indexForFieldOrMethod() { protected int indexForFieldOrMethod() {
ConstantPoolCache cpCache = method().getConstants().getCache(); ConstantPoolCache cpCache = method().getConstants().getCache();
// get ConstantPool index from ConstantPoolCacheIndex at given bci // get ConstantPool index from ConstantPoolCacheIndex at given bci
int cpCacheIndex = index(); int cpCacheIndex = index();
if (cpCache == null) { if (cpCache == null) {
return cpCacheIndex; return cpCacheIndex;
} else if (code() == Bytecodes._invokedynamic) {
int secondaryIndex = getSecondaryIndex();
return cpCache.getMainEntryAt(secondaryIndex).getConstantPoolIndex();
} else { } else {
// change byte-ordering and go via cache // change byte-ordering and go via cache
return cpCache.getEntryAt((int) (0xFFFF & VM.getVM().getBytes().swapShort((short) cpCacheIndex))).getConstantPoolIndex(); return cpCache.getEntryAt((int) (0xFFFF & VM.getVM().getBytes().swapShort((short) cpCacheIndex))).getConstantPoolIndex();

@ -222,7 +222,7 @@ public class Bytecodes {
public static final int _invokespecial = 183; // 0xb7 public static final int _invokespecial = 183; // 0xb7
public static final int _invokestatic = 184; // 0xb8 public static final int _invokestatic = 184; // 0xb8
public static final int _invokeinterface = 185; // 0xb9 public static final int _invokeinterface = 185; // 0xb9
public static final int _xxxunusedxxx = 186; // 0xba public static final int _invokedynamic = 186; // 0xba
public static final int _new = 187; // 0xbb public static final int _new = 187; // 0xbb
public static final int _newarray = 188; // 0xbc public static final int _newarray = 188; // 0xbc
public static final int _anewarray = 189; // 0xbd public static final int _anewarray = 189; // 0xbd
@ -269,9 +269,12 @@ public class Bytecodes {
public static final int _fast_invokevfinal = 226; public static final int _fast_invokevfinal = 226;
public static final int _fast_linearswitch = 227; public static final int _fast_linearswitch = 227;
public static final int _fast_binaryswitch = 228; public static final int _fast_binaryswitch = 228;
public static final int _shouldnotreachhere = 229; // For debugging public static final int _fast_aldc = 229;
public static final int _fast_aldc_w = 230;
public static final int _return_register_finalizer = 231;
public static final int _shouldnotreachhere = 232; // For debugging
public static final int number_of_codes = 230; public static final int number_of_codes = 233;
public static int specialLengthAt(Method method, int bci) { public static int specialLengthAt(Method method, int bci) {
int code = codeAt(method, bci); int code = codeAt(method, bci);
@ -458,9 +461,9 @@ public class Bytecodes {
def(_dconst_1 , "dconst_1" , "b" , null , BasicType.getTDouble() , 2, false); def(_dconst_1 , "dconst_1" , "b" , null , BasicType.getTDouble() , 2, false);
def(_bipush , "bipush" , "bc" , null , BasicType.getTInt() , 1, false); def(_bipush , "bipush" , "bc" , null , BasicType.getTInt() , 1, false);
def(_sipush , "sipush" , "bcc" , null , BasicType.getTInt() , 1, false); def(_sipush , "sipush" , "bcc" , null , BasicType.getTInt() , 1, false);
def(_ldc , "ldc" , "bi" , null , BasicType.getTIllegal(), 1, true ); def(_ldc , "ldc" , "bk" , null , BasicType.getTIllegal(), 1, true );
def(_ldc_w , "ldc_w" , "bii" , null , BasicType.getTIllegal(), 1, true ); def(_ldc_w , "ldc_w" , "bkk" , null , BasicType.getTIllegal(), 1, true );
def(_ldc2_w , "ldc2_w" , "bii" , null , BasicType.getTIllegal(), 2, true ); def(_ldc2_w , "ldc2_w" , "bkk" , null , BasicType.getTIllegal(), 2, true );
def(_iload , "iload" , "bi" , "wbii" , BasicType.getTInt() , 1, false); def(_iload , "iload" , "bi" , "wbii" , BasicType.getTInt() , 1, false);
def(_lload , "lload" , "bi" , "wbii" , BasicType.getTLong() , 2, false); def(_lload , "lload" , "bi" , "wbii" , BasicType.getTLong() , 2, false);
def(_fload , "fload" , "bi" , "wbii" , BasicType.getTFloat() , 1, false); def(_fload , "fload" , "bi" , "wbii" , BasicType.getTFloat() , 1, false);
@ -618,26 +621,26 @@ public class Bytecodes {
def(_dreturn , "dreturn" , "b" , null , BasicType.getTDouble() , -2, true ); def(_dreturn , "dreturn" , "b" , null , BasicType.getTDouble() , -2, true );
def(_areturn , "areturn" , "b" , null , BasicType.getTObject() , -1, true ); def(_areturn , "areturn" , "b" , null , BasicType.getTObject() , -1, true );
def(_return , "return" , "b" , null , BasicType.getTVoid() , 0, true ); def(_return , "return" , "b" , null , BasicType.getTVoid() , 0, true );
def(_getstatic , "getstatic" , "bjj" , null , BasicType.getTIllegal(), 1, true ); def(_getstatic , "getstatic" , "bJJ" , null , BasicType.getTIllegal(), 1, true );
def(_putstatic , "putstatic" , "bjj" , null , BasicType.getTIllegal(), -1, true ); def(_putstatic , "putstatic" , "bJJ" , null , BasicType.getTIllegal(), -1, true );
def(_getfield , "getfield" , "bjj" , null , BasicType.getTIllegal(), 0, true ); def(_getfield , "getfield" , "bJJ" , null , BasicType.getTIllegal(), 0, true );
def(_putfield , "putfield" , "bjj" , null , BasicType.getTIllegal(), -2, true ); def(_putfield , "putfield" , "bJJ" , null , BasicType.getTIllegal(), -2, true );
def(_invokevirtual , "invokevirtual" , "bjj" , null , BasicType.getTIllegal(), -1, true ); def(_invokevirtual , "invokevirtual" , "bJJ" , null , BasicType.getTIllegal(), -1, true );
def(_invokespecial , "invokespecial" , "bjj" , null , BasicType.getTIllegal(), -1, true ); def(_invokespecial , "invokespecial" , "bJJ" , null , BasicType.getTIllegal(), -1, true );
def(_invokestatic , "invokestatic" , "bjj" , null , BasicType.getTIllegal(), 0, true ); def(_invokestatic , "invokestatic" , "bJJ" , null , BasicType.getTIllegal(), 0, true );
def(_invokeinterface , "invokeinterface" , "bjj__", null , BasicType.getTIllegal(), -1, true ); def(_invokeinterface , "invokeinterface" , "bJJ__", null , BasicType.getTIllegal(), -1, true );
def(_xxxunusedxxx , "xxxunusedxxx" , null , null , BasicType.getTVoid() , 0, false); def(_invokedynamic , "invokedynamic" , "bJJJJ", null , BasicType.getTIllegal(), -1, true );
def(_new , "new" , "bii" , null , BasicType.getTObject() , 1, true ); def(_new , "new" , "bkk" , null , BasicType.getTObject() , 1, true );
def(_newarray , "newarray" , "bc" , null , BasicType.getTObject() , 0, true ); def(_newarray , "newarray" , "bc" , null , BasicType.getTObject() , 0, true );
def(_anewarray , "anewarray" , "bii" , null , BasicType.getTObject() , 0, true ); def(_anewarray , "anewarray" , "bkk" , null , BasicType.getTObject() , 0, true );
def(_arraylength , "arraylength" , "b" , null , BasicType.getTVoid() , 0, true ); def(_arraylength , "arraylength" , "b" , null , BasicType.getTVoid() , 0, true );
def(_athrow , "athrow" , "b" , null , BasicType.getTVoid() , -1, true ); def(_athrow , "athrow" , "b" , null , BasicType.getTVoid() , -1, true );
def(_checkcast , "checkcast" , "bii" , null , BasicType.getTObject() , 0, true ); def(_checkcast , "checkcast" , "bkk" , null , BasicType.getTObject() , 0, true );
def(_instanceof , "instanceof" , "bii" , null , BasicType.getTInt() , 0, true ); def(_instanceof , "instanceof" , "bkk" , null , BasicType.getTInt() , 0, true );
def(_monitorenter , "monitorenter" , "b" , null , BasicType.getTVoid() , -1, true ); def(_monitorenter , "monitorenter" , "b" , null , BasicType.getTVoid() , -1, true );
def(_monitorexit , "monitorexit" , "b" , null , BasicType.getTVoid() , -1, true ); def(_monitorexit , "monitorexit" , "b" , null , BasicType.getTVoid() , -1, true );
def(_wide , "wide" , "" , null , BasicType.getTVoid() , 0, false); def(_wide , "wide" , "" , null , BasicType.getTVoid() , 0, false);
def(_multianewarray , "multianewarray" , "biic" , null , BasicType.getTObject() , 1, true ); def(_multianewarray , "multianewarray" , "bkkc" , null , BasicType.getTObject() , 1, true );
def(_ifnull , "ifnull" , "boo" , null , BasicType.getTVoid() , -1, false); def(_ifnull , "ifnull" , "boo" , null , BasicType.getTVoid() , -1, false);
def(_ifnonnull , "ifnonnull" , "boo" , null , BasicType.getTVoid() , -1, false); def(_ifnonnull , "ifnonnull" , "boo" , null , BasicType.getTVoid() , -1, false);
def(_goto_w , "goto_w" , "boooo", null , BasicType.getTVoid() , 0, false); def(_goto_w , "goto_w" , "boooo", null , BasicType.getTVoid() , 0, false);
@ -646,38 +649,44 @@ public class Bytecodes {
// JVM bytecodes // JVM bytecodes
// bytecode bytecode name format wide f. result tp stk traps std code // bytecode bytecode name format wide f. result tp stk traps std code
def(_fast_agetfield , "fast_agetfield" , "bjj" , null , BasicType.getTObject() , 0, true , _getfield ); def(_fast_agetfield , "fast_agetfield" , "bJJ" , null , BasicType.getTObject() , 0, true , _getfield );
def(_fast_bgetfield , "fast_bgetfield" , "bjj" , null , BasicType.getTInt() , 0, true , _getfield ); def(_fast_bgetfield , "fast_bgetfield" , "bJJ" , null , BasicType.getTInt() , 0, true , _getfield );
def(_fast_cgetfield , "fast_cgetfield" , "bjj" , null , BasicType.getTChar() , 0, true , _getfield ); def(_fast_cgetfield , "fast_cgetfield" , "bJJ" , null , BasicType.getTChar() , 0, true , _getfield );
def(_fast_dgetfield , "fast_dgetfield" , "bjj" , null , BasicType.getTDouble() , 0, true , _getfield ); def(_fast_dgetfield , "fast_dgetfield" , "bJJ" , null , BasicType.getTDouble() , 0, true , _getfield );
def(_fast_fgetfield , "fast_fgetfield" , "bjj" , null , BasicType.getTFloat() , 0, true , _getfield ); def(_fast_fgetfield , "fast_fgetfield" , "bJJ" , null , BasicType.getTFloat() , 0, true , _getfield );
def(_fast_igetfield , "fast_igetfield" , "bjj" , null , BasicType.getTInt() , 0, true , _getfield ); def(_fast_igetfield , "fast_igetfield" , "bJJ" , null , BasicType.getTInt() , 0, true , _getfield );
def(_fast_lgetfield , "fast_lgetfield" , "bjj" , null , BasicType.getTLong() , 0, true , _getfield ); def(_fast_lgetfield , "fast_lgetfield" , "bJJ" , null , BasicType.getTLong() , 0, true , _getfield );
def(_fast_sgetfield , "fast_sgetfield" , "bjj" , null , BasicType.getTShort() , 0, true , _getfield ); def(_fast_sgetfield , "fast_sgetfield" , "bJJ" , null , BasicType.getTShort() , 0, true , _getfield );
def(_fast_aputfield , "fast_aputfield" , "bjj" , null , BasicType.getTObject() , 0, true , _putfield ); def(_fast_aputfield , "fast_aputfield" , "bJJ" , null , BasicType.getTObject() , 0, true , _putfield );
def(_fast_bputfield , "fast_bputfield" , "bjj" , null , BasicType.getTInt() , 0, true , _putfield ); def(_fast_bputfield , "fast_bputfield" , "bJJ" , null , BasicType.getTInt() , 0, true , _putfield );
def(_fast_cputfield , "fast_cputfield" , "bjj" , null , BasicType.getTChar() , 0, true , _putfield ); def(_fast_cputfield , "fast_cputfield" , "bJJ" , null , BasicType.getTChar() , 0, true , _putfield );
def(_fast_dputfield , "fast_dputfield" , "bjj" , null , BasicType.getTDouble() , 0, true , _putfield ); def(_fast_dputfield , "fast_dputfield" , "bJJ" , null , BasicType.getTDouble() , 0, true , _putfield );
def(_fast_fputfield , "fast_fputfield" , "bjj" , null , BasicType.getTFloat() , 0, true , _putfield ); def(_fast_fputfield , "fast_fputfield" , "bJJ" , null , BasicType.getTFloat() , 0, true , _putfield );
def(_fast_iputfield , "fast_iputfield" , "bjj" , null , BasicType.getTInt() , 0, true , _putfield ); def(_fast_iputfield , "fast_iputfield" , "bJJ" , null , BasicType.getTInt() , 0, true , _putfield );
def(_fast_lputfield , "fast_lputfield" , "bjj" , null , BasicType.getTLong() , 0, true , _putfield ); def(_fast_lputfield , "fast_lputfield" , "bJJ" , null , BasicType.getTLong() , 0, true , _putfield );
def(_fast_sputfield , "fast_sputfield" , "bjj" , null , BasicType.getTShort() , 0, true , _putfield ); def(_fast_sputfield , "fast_sputfield" , "bJJ" , null , BasicType.getTShort() , 0, true , _putfield );
def(_fast_aload_0 , "fast_aload_0" , "b" , null , BasicType.getTObject() , 1, true , _aload_0 ); def(_fast_aload_0 , "fast_aload_0" , "b" , null , BasicType.getTObject() , 1, true , _aload_0 );
def(_fast_iaccess_0 , "fast_iaccess_0" , "b_jj" , null , BasicType.getTInt() , 1, true , _aload_0 ); def(_fast_iaccess_0 , "fast_iaccess_0" , "b_JJ" , null , BasicType.getTInt() , 1, true , _aload_0 );
def(_fast_aaccess_0 , "fast_aaccess_0" , "b_jj" , null , BasicType.getTObject() , 1, true , _aload_0 ); def(_fast_aaccess_0 , "fast_aaccess_0" , "b_JJ" , null , BasicType.getTObject() , 1, true , _aload_0 );
def(_fast_faccess_0 , "fast_faccess_0" , "b_jj" , null , BasicType.getTObject() , 1, true , _aload_0 ); def(_fast_faccess_0 , "fast_faccess_0" , "b_JJ" , null , BasicType.getTObject() , 1, true , _aload_0 );
def(_fast_iload , "fast_iload" , "bi" , null , BasicType.getTInt() , 1, false, _iload); def(_fast_iload , "fast_iload" , "bi" , null , BasicType.getTInt() , 1, false, _iload);
def(_fast_iload2 , "fast_iload2" , "bi_i" , null , BasicType.getTInt() , 2, false, _iload); def(_fast_iload2 , "fast_iload2" , "bi_i" , null , BasicType.getTInt() , 2, false, _iload);
def(_fast_icaload , "fast_icaload" , "bi_" , null , BasicType.getTInt() , 0, false, _iload); def(_fast_icaload , "fast_icaload" , "bi_" , null , BasicType.getTInt() , 0, false, _iload);
// Faster method invocation. // Faster method invocation.
def(_fast_invokevfinal , "fast_invokevfinal" , "bjj" , null , BasicType.getTIllegal(), -1, true, _invokevirtual); def(_fast_invokevfinal , "fast_invokevfinal" , "bJJ" , null , BasicType.getTIllegal(), -1, true, _invokevirtual);
def(_fast_linearswitch , "fast_linearswitch" , "" , null , BasicType.getTVoid() , -1, false, _lookupswitch ); def(_fast_linearswitch , "fast_linearswitch" , "" , null , BasicType.getTVoid() , -1, false, _lookupswitch );
def(_fast_binaryswitch , "fast_binaryswitch" , "" , null , BasicType.getTVoid() , -1, false, _lookupswitch ); def(_fast_binaryswitch , "fast_binaryswitch" , "" , null , BasicType.getTVoid() , -1, false, _lookupswitch );
def(_return_register_finalizer, "return_register_finalizer", "b" , null , BasicType.getTVoid() , 0, true, _return );
def(_fast_aldc , "fast_aldc" , "bj" , null , BasicType.getTObject(), 1, true, _ldc );
def(_fast_aldc_w , "fast_aldc_w" , "bJJ" , null , BasicType.getTObject(), 1, true, _ldc_w );
def(_shouldnotreachhere , "_shouldnotreachhere" , "b" , null , BasicType.getTVoid() , 0, false); def(_shouldnotreachhere , "_shouldnotreachhere" , "b" , null , BasicType.getTVoid() , 0, false);
if (Assert.ASSERTS_ENABLED) { if (Assert.ASSERTS_ENABLED) {

@ -152,7 +152,7 @@ public class ConstantPool extends Oop implements ClassConstants {
return res; return res;
} }
public int getNameAndTypeAt(int which) { public int[] getNameAndTypeAt(int which) {
if (Assert.ASSERTS_ENABLED) { if (Assert.ASSERTS_ENABLED) {
Assert.that(getTagAt(which).isNameAndType(), "Corrupted constant pool"); Assert.that(getTagAt(which).isNameAndType(), "Corrupted constant pool");
} }
@ -160,18 +160,16 @@ public class ConstantPool extends Oop implements ClassConstants {
if (DEBUG) { if (DEBUG) {
System.err.println("ConstantPool.getNameAndTypeAt(" + which + "): result = " + i); System.err.println("ConstantPool.getNameAndTypeAt(" + which + "): result = " + i);
} }
return i; return new int[] { extractLowShortFromInt(i), extractHighShortFromInt(i) };
} }
public Symbol getNameRefAt(int which) { public Symbol getNameRefAt(int which) {
int refIndex = getNameAndTypeAt(getNameAndTypeRefIndexAt(which)); int nameIndex = getNameAndTypeAt(getNameAndTypeRefIndexAt(which))[0];
int nameIndex = extractLowShortFromInt(refIndex);
return getSymbolAt(nameIndex); return getSymbolAt(nameIndex);
} }
public Symbol getSignatureRefAt(int which) { public Symbol getSignatureRefAt(int which) {
int refIndex = getNameAndTypeAt(getNameAndTypeRefIndexAt(which)); int sigIndex = getNameAndTypeAt(getNameAndTypeRefIndexAt(which))[1];
int sigIndex = extractHighShortFromInt(refIndex);
return getSymbolAt(sigIndex); return getSymbolAt(sigIndex);
} }
@ -220,11 +218,11 @@ public class ConstantPool extends Oop implements ClassConstants {
/** Lookup for entries consisting of (name_index, signature_index) */ /** Lookup for entries consisting of (name_index, signature_index) */
public int getNameRefIndexAt(int index) { public int getNameRefIndexAt(int index) {
int refIndex = getNameAndTypeAt(index); int[] refIndex = getNameAndTypeAt(index);
if (DEBUG) { if (DEBUG) {
System.err.println("ConstantPool.getNameRefIndexAt(" + index + "): refIndex = " + refIndex); System.err.println("ConstantPool.getNameRefIndexAt(" + index + "): refIndex = " + refIndex[0]+"/"+refIndex[1]);
} }
int i = extractLowShortFromInt(refIndex); int i = refIndex[0];
if (DEBUG) { if (DEBUG) {
System.err.println("ConstantPool.getNameRefIndexAt(" + index + "): result = " + i); System.err.println("ConstantPool.getNameRefIndexAt(" + index + "): result = " + i);
} }
@ -233,17 +231,53 @@ public class ConstantPool extends Oop implements ClassConstants {
/** Lookup for entries consisting of (name_index, signature_index) */ /** Lookup for entries consisting of (name_index, signature_index) */
public int getSignatureRefIndexAt(int index) { public int getSignatureRefIndexAt(int index) {
int refIndex = getNameAndTypeAt(index); int[] refIndex = getNameAndTypeAt(index);
if (DEBUG) { if (DEBUG) {
System.err.println("ConstantPool.getSignatureRefIndexAt(" + index + "): refIndex = " + refIndex); System.err.println("ConstantPool.getSignatureRefIndexAt(" + index + "): refIndex = " + refIndex[0]+"/"+refIndex[1]);
} }
int i = extractHighShortFromInt(refIndex); int i = refIndex[1];
if (DEBUG) { if (DEBUG) {
System.err.println("ConstantPool.getSignatureRefIndexAt(" + index + "): result = " + i); System.err.println("ConstantPool.getSignatureRefIndexAt(" + index + "): result = " + i);
} }
return i; return i;
} }
/** Lookup for MethodHandle entries. */
public int getMethodHandleIndexAt(int i) {
if (Assert.ASSERTS_ENABLED) {
Assert.that(getTagAt(i).isMethodHandle(), "Corrupted constant pool");
}
int res = extractHighShortFromInt(getIntAt(i));
if (DEBUG) {
System.err.println("ConstantPool.getMethodHandleIndexAt(" + i + "): result = " + res);
}
return res;
}
/** Lookup for MethodHandle entries. */
public int getMethodHandleRefKindAt(int i) {
if (Assert.ASSERTS_ENABLED) {
Assert.that(getTagAt(i).isMethodHandle(), "Corrupted constant pool");
}
int res = extractLowShortFromInt(getIntAt(i));
if (DEBUG) {
System.err.println("ConstantPool.getMethodHandleRefKindAt(" + i + "): result = " + res);
}
return res;
}
/** Lookup for MethodType entries. */
public int getMethodTypeIndexAt(int i) {
if (Assert.ASSERTS_ENABLED) {
Assert.that(getTagAt(i).isMethodType(), "Corrupted constant pool");
}
int res = getIntAt(i);
if (DEBUG) {
System.err.println("ConstantPool.getMethodHandleTypeAt(" + i + "): result = " + res);
}
return res;
}
final private static String[] nameForTag = new String[] { final private static String[] nameForTag = new String[] {
}; };
@ -261,6 +295,8 @@ public class ConstantPool extends Oop implements ClassConstants {
case JVM_CONSTANT_Methodref: return "JVM_CONSTANT_Methodref"; case JVM_CONSTANT_Methodref: return "JVM_CONSTANT_Methodref";
case JVM_CONSTANT_InterfaceMethodref: return "JVM_CONSTANT_InterfaceMethodref"; case JVM_CONSTANT_InterfaceMethodref: return "JVM_CONSTANT_InterfaceMethodref";
case JVM_CONSTANT_NameAndType: return "JVM_CONSTANT_NameAndType"; case JVM_CONSTANT_NameAndType: return "JVM_CONSTANT_NameAndType";
case JVM_CONSTANT_MethodHandle: return "JVM_CONSTANT_MethodHandle";
case JVM_CONSTANT_MethodType: return "JVM_CONSTANT_MethodType";
case JVM_CONSTANT_Invalid: return "JVM_CONSTANT_Invalid"; case JVM_CONSTANT_Invalid: return "JVM_CONSTANT_Invalid";
case JVM_CONSTANT_UnresolvedClass: return "JVM_CONSTANT_UnresolvedClass"; case JVM_CONSTANT_UnresolvedClass: return "JVM_CONSTANT_UnresolvedClass";
case JVM_CONSTANT_UnresolvedClassInError: return "JVM_CONSTANT_UnresolvedClassInError"; case JVM_CONSTANT_UnresolvedClassInError: return "JVM_CONSTANT_UnresolvedClassInError";
@ -317,6 +353,8 @@ public class ConstantPool extends Oop implements ClassConstants {
case JVM_CONSTANT_Methodref: case JVM_CONSTANT_Methodref:
case JVM_CONSTANT_InterfaceMethodref: case JVM_CONSTANT_InterfaceMethodref:
case JVM_CONSTANT_NameAndType: case JVM_CONSTANT_NameAndType:
case JVM_CONSTANT_MethodHandle:
case JVM_CONSTANT_MethodType:
visitor.doInt(new IntField(new NamedFieldIdentifier(nameForTag(ctag)), indexOffset(index), true), true); visitor.doInt(new IntField(new NamedFieldIdentifier(nameForTag(ctag)), indexOffset(index), true), true);
break; break;
} }
@ -467,6 +505,18 @@ public class ConstantPool extends Oop implements ClassConstants {
+ ", type = " + signatureIndex); + ", type = " + signatureIndex);
break; break;
} }
case JVM_CONSTANT_MethodHandle: {
dos.writeByte(cpConstType);
int value = getIntAt(ci);
short nameIndex = (short) extractLowShortFromInt(value);
short signatureIndex = (short) extractHighShortFromInt(value);
dos.writeShort(nameIndex);
dos.writeShort(signatureIndex);
if (DEBUG) debugMessage("CP[" + ci + "] = N&T name = " + nameIndex
+ ", type = " + signatureIndex);
break;
}
default: default:
throw new InternalError("unknown tag: " + cpConstType); throw new InternalError("unknown tag: " + cpConstType);
} // switch } // switch
@ -488,10 +538,12 @@ public class ConstantPool extends Oop implements ClassConstants {
// //
private static int extractHighShortFromInt(int val) { private static int extractHighShortFromInt(int val) {
// must stay in sync with constantPoolOopDesc::name_and_type_at_put, method_at_put, etc.
return (val >> 16) & 0xFFFF; return (val >> 16) & 0xFFFF;
} }
private static int extractLowShortFromInt(int val) { private static int extractLowShortFromInt(int val) {
// must stay in sync with constantPoolOopDesc::name_and_type_at_put, method_at_put, etc.
return val & 0xFFFF; return val & 0xFFFF;
} }
} }

@ -78,6 +78,31 @@ public class ConstantPoolCache extends Oop {
return new ConstantPoolCacheEntry(this, i); return new ConstantPoolCacheEntry(this, i);
} }
public static boolean isSecondaryIndex(int i) { return (i < 0); }
public static int decodeSecondaryIndex(int i) { return isSecondaryIndex(i) ? ~i : i; }
public static int encodeSecondaryIndex(int i) { return !isSecondaryIndex(i) ? ~i : i; }
// secondary entries hold invokedynamic call site bindings
public ConstantPoolCacheEntry getSecondaryEntryAt(int i) {
ConstantPoolCacheEntry e = new ConstantPoolCacheEntry(this, decodeSecondaryIndex(i));
if (Assert.ASSERTS_ENABLED) {
Assert.that(e.isSecondaryEntry(), "must be a secondary entry");
}
return e;
}
public ConstantPoolCacheEntry getMainEntryAt(int i) {
if (isSecondaryIndex(i)) {
// run through an extra level of indirection:
i = getSecondaryEntryAt(i).getMainEntryIndex();
}
ConstantPoolCacheEntry e = new ConstantPoolCacheEntry(this, i);
if (Assert.ASSERTS_ENABLED) {
Assert.that(!e.isSecondaryEntry(), "must not be a secondary entry");
}
return e;
}
public int getIntAt(int entry, int fld) { public int getIntAt(int entry, int fld) {
//alignObjectSize ? //alignObjectSize ?
long offset = baseOffset + /*alignObjectSize*/entry * elementSize + fld* getHeap().getIntSize(); long offset = baseOffset + /*alignObjectSize*/entry * elementSize + fld* getHeap().getIntSize();

@ -28,6 +28,7 @@ import java.util.*;
import sun.jvm.hotspot.debugger.*; import sun.jvm.hotspot.debugger.*;
import sun.jvm.hotspot.runtime.*; import sun.jvm.hotspot.runtime.*;
import sun.jvm.hotspot.types.*; import sun.jvm.hotspot.types.*;
import sun.jvm.hotspot.utilities.*;
public class ConstantPoolCacheEntry { public class ConstantPoolCacheEntry {
private static long size; private static long size;
@ -67,9 +68,23 @@ public class ConstantPoolCacheEntry {
} }
public int getConstantPoolIndex() { public int getConstantPoolIndex() {
if (Assert.ASSERTS_ENABLED) {
Assert.that(!isSecondaryEntry(), "must not be a secondary CP entry");
}
return (int) (getIndices() & 0xFFFF); return (int) (getIndices() & 0xFFFF);
} }
public boolean isSecondaryEntry() {
return (getIndices() & 0xFFFF) == 0;
}
public int getMainEntryIndex() {
if (Assert.ASSERTS_ENABLED) {
Assert.that(isSecondaryEntry(), "must be a secondary CP entry");
}
return (int) (getIndices() >>> 16);
}
private long getIndices() { private long getIndices() {
return cp.getHandle().getCIntegerAt(indices.getOffset() + offset, indices.getSize(), indices.isUnsigned()); return cp.getHandle().getCIntegerAt(indices.getOffset() + offset, indices.getSize(), indices.isUnsigned());
} }

@ -566,6 +566,7 @@ public class GenerateOopMap {
case Bytecodes._invokespecial: case Bytecodes._invokespecial:
case Bytecodes._invokestatic: case Bytecodes._invokestatic:
case Bytecodes._invokeinterface: case Bytecodes._invokeinterface:
case Bytecodes._invokedynamic:
// FIXME: print signature of referenced method (need more // FIXME: print signature of referenced method (need more
// accessors in ConstantPool and ConstantPoolCache) // accessors in ConstantPool and ConstantPoolCache)
int idx = currentBC.getIndexBig(); int idx = currentBC.getIndexBig();
@ -605,6 +606,7 @@ public class GenerateOopMap {
case Bytecodes._invokespecial: case Bytecodes._invokespecial:
case Bytecodes._invokestatic: case Bytecodes._invokestatic:
case Bytecodes._invokeinterface: case Bytecodes._invokeinterface:
case Bytecodes._invokedynamic:
// FIXME: print signature of referenced method (need more // FIXME: print signature of referenced method (need more
// accessors in ConstantPool and ConstantPoolCache) // accessors in ConstantPool and ConstantPoolCache)
int idx = currentBC.getIndexBig(); int idx = currentBC.getIndexBig();
@ -1134,6 +1136,7 @@ public class GenerateOopMap {
case Bytecodes._invokespecial: case Bytecodes._invokespecial:
case Bytecodes._invokestatic: case Bytecodes._invokestatic:
case Bytecodes._invokeinterface: case Bytecodes._invokeinterface:
case Bytecodes._invokedynamic:
_itr_send = itr; _itr_send = itr;
_report_result_for_send = true; _report_result_for_send = true;
break; break;
@ -1379,6 +1382,7 @@ public class GenerateOopMap {
case Bytecodes._invokevirtual: case Bytecodes._invokevirtual:
case Bytecodes._invokespecial: doMethod(false, false, itr.getIndexBig(), itr.bci()); break; case Bytecodes._invokespecial: doMethod(false, false, itr.getIndexBig(), itr.bci()); break;
case Bytecodes._invokestatic: doMethod(true, false, itr.getIndexBig(), itr.bci()); break; case Bytecodes._invokestatic: doMethod(true, false, itr.getIndexBig(), itr.bci()); break;
case Bytecodes._invokedynamic: doMethod(false, true, itr.getIndexBig(), itr.bci()); break;
case Bytecodes._invokeinterface: doMethod(false, true, itr.getIndexBig(), itr.bci()); break; case Bytecodes._invokeinterface: doMethod(false, true, itr.getIndexBig(), itr.bci()); break;
case Bytecodes._newarray: case Bytecodes._newarray:
case Bytecodes._anewarray: ppNewRef(vCTS, itr.bci()); break; case Bytecodes._anewarray: ppNewRef(vCTS, itr.bci()); break;
@ -1725,7 +1729,7 @@ public class GenerateOopMap {
void doMethod (boolean is_static, boolean is_interface, int idx, int bci) { void doMethod (boolean is_static, boolean is_interface, int idx, int bci) {
// Dig up signature for field in constant pool // Dig up signature for field in constant pool
ConstantPool cp = _method.getConstants(); ConstantPool cp = _method.getConstants();
int nameAndTypeIdx = cp.getNameAndTypeRefIndexAt(idx); int nameAndTypeIdx = cp.getTagAt(idx).isNameAndType() ? idx : cp.getNameAndTypeRefIndexAt(idx);
int signatureIdx = cp.getSignatureRefIndexAt(nameAndTypeIdx); int signatureIdx = cp.getSignatureRefIndexAt(nameAndTypeIdx);
Symbol signature = cp.getSymbolAt(signatureIdx); Symbol signature = cp.getSymbolAt(signatureIdx);

@ -40,6 +40,19 @@ public interface ClassConstants
public static final int JVM_CONSTANT_Methodref = 10; public static final int JVM_CONSTANT_Methodref = 10;
public static final int JVM_CONSTANT_InterfaceMethodref = 11; public static final int JVM_CONSTANT_InterfaceMethodref = 11;
public static final int JVM_CONSTANT_NameAndType = 12; public static final int JVM_CONSTANT_NameAndType = 12;
public static final int JVM_CONSTANT_MethodHandle = 15;
public static final int JVM_CONSTANT_MethodType = 16;
// JVM_CONSTANT_MethodHandle subtypes
public static final int JVM_REF_getField = 1;
public static final int JVM_REF_getStatic = 2;
public static final int JVM_REF_putField = 3;
public static final int JVM_REF_putStatic = 4;
public static final int JVM_REF_invokeVirtual = 5;
public static final int JVM_REF_invokeStatic = 6;
public static final int JVM_REF_invokeSpecial = 7;
public static final int JVM_REF_newInvokeSpecial = 8;
public static final int JVM_REF_invokeInterface = 9;
// HotSpot specific constant pool constant types. // HotSpot specific constant pool constant types.

@ -54,14 +54,34 @@ public class ByteCodeRewriter
} }
protected short getConstantPoolIndex(int bci) { protected short getConstantPoolIndex(int rawcode, int bci) {
// get ConstantPool index from ConstantPoolCacheIndex at given bci // get ConstantPool index from ConstantPoolCacheIndex at given bci
short cpCacheIndex = method.getBytecodeShortArg(bci); String fmt = Bytecodes.format(rawcode);
int cpCacheIndex;
switch (fmt.length()) {
case 2: cpCacheIndex = method.getBytecodeByteArg(bci); break;
case 3: cpCacheIndex = method.getBytecodeShortArg(bci); break;
case 5:
if (fmt.indexOf("__") >= 0)
cpCacheIndex = method.getBytecodeShortArg(bci);
else
cpCacheIndex = method.getBytecodeIntArg(bci);
break;
default: throw new IllegalArgumentException();
}
if (cpCache == null) { if (cpCache == null) {
return cpCacheIndex; return (short) cpCacheIndex;
} else { } else if (fmt.indexOf("JJJJ") >= 0) {
// change byte-ordering and go via secondary cache entry
return (short) cpCache.getMainEntryAt(bytes.swapInt(cpCacheIndex)).getConstantPoolIndex();
} else if (fmt.indexOf("JJ") >= 0) {
// change byte-ordering and go via cache // change byte-ordering and go via cache
return (short) cpCache.getEntryAt((int) (0xFFFF & bytes.swapShort(cpCacheIndex))).getConstantPoolIndex(); return (short) cpCache.getEntryAt((int) (0xFFFF & bytes.swapShort((short)cpCacheIndex))).getConstantPoolIndex();
} else if (fmt.indexOf("j") >= 0) {
// go via cache
return (short) cpCache.getEntryAt((int) (0xFF & cpCacheIndex)).getConstantPoolIndex();
} else {
return (short) cpCacheIndex;
} }
} }
@ -100,10 +120,31 @@ public class ByteCodeRewriter
case Bytecodes._invokespecial: case Bytecodes._invokespecial:
case Bytecodes._invokestatic: case Bytecodes._invokestatic:
case Bytecodes._invokeinterface: { case Bytecodes._invokeinterface: {
cpoolIndex = getConstantPoolIndex(bci + 1); cpoolIndex = getConstantPoolIndex(hotspotcode, bci + 1);
writeShort(code, bci + 1, cpoolIndex); writeShort(code, bci + 1, cpoolIndex);
break; break;
} }
case Bytecodes._invokedynamic:
cpoolIndex = getConstantPoolIndex(hotspotcode, bci + 1);
writeShort(code, bci + 1, cpoolIndex);
writeShort(code, bci + 3, (short)0); // clear out trailing bytes
break;
case Bytecodes._ldc_w:
if (hotspotcode != bytecode) {
// fast_aldc_w puts constant in CP cache
cpoolIndex = getConstantPoolIndex(hotspotcode, bci + 1);
writeShort(code, bci + 1, cpoolIndex);
}
break;
case Bytecodes._ldc:
if (hotspotcode != bytecode) {
// fast_aldc puts constant in CP cache
cpoolIndex = getConstantPoolIndex(hotspotcode, bci + 1);
code[bci + 1] = (byte)(cpoolIndex);
}
break;
} }
len = Bytecodes.lengthFor(bytecode); len = Bytecodes.lengthFor(bytecode);

@ -61,10 +61,12 @@ public class ClassWriter implements /* imports */ ClassConstants
protected short _signatureIndex; protected short _signatureIndex;
protected static int extractHighShortFromInt(int val) { protected static int extractHighShortFromInt(int val) {
// must stay in sync with constantPoolOopDesc::name_and_type_at_put, method_at_put, etc.
return (val >> 16) & 0xFFFF; return (val >> 16) & 0xFFFF;
} }
protected static int extractLowShortFromInt(int val) { protected static int extractLowShortFromInt(int val) {
// must stay in sync with constantPoolOopDesc::name_and_type_at_put, method_at_put, etc.
return val & 0xFFFF; return val & 0xFFFF;
} }
@ -297,6 +299,28 @@ public class ClassWriter implements /* imports */ ClassConstants
+ ", type = " + signatureIndex); + ", type = " + signatureIndex);
break; break;
} }
case JVM_CONSTANT_MethodHandle: {
dos.writeByte(cpConstType);
int value = cpool.getIntAt(ci);
short refIndex = (short) extractHighShortFromInt(value);
byte refKind = (byte) extractLowShortFromInt(value);
dos.writeByte(refKind);
dos.writeShort(refIndex);
if (DEBUG) debugMessage("CP[" + ci + "] = MH index = " + refIndex
+ ", kind = " + refKind);
break;
}
case JVM_CONSTANT_MethodType: {
dos.writeByte(cpConstType);
int value = cpool.getIntAt(ci);
short refIndex = (short) value;
dos.writeShort(refIndex);
if (DEBUG) debugMessage("CP[" + ci + "] = MT index = " + refIndex);
break;
}
default: default:
throw new InternalError("Unknown tag: " + cpConstType); throw new InternalError("Unknown tag: " + cpConstType);
} // switch } // switch

@ -572,6 +572,16 @@ public class HTMLGenerator implements /* imports */ ClassConstants {
buf.cell(Integer.toString(cpool.getIntAt(index))); buf.cell(Integer.toString(cpool.getIntAt(index)));
break; break;
case JVM_CONSTANT_MethodHandle:
buf.cell("JVM_CONSTANT_MethodHandle");
buf.cell(genLowHighShort(cpool.getIntAt(index)));
break;
case JVM_CONSTANT_MethodType:
buf.cell("JVM_CONSTANT_MethodType");
buf.cell(Integer.toString(cpool.getIntAt(index)));
break;
default: default:
throw new InternalError("unknown tag: " + ctag); throw new InternalError("unknown tag: " + ctag);
} }

@ -38,12 +38,26 @@ public class ConstantTag {
private static int JVM_CONSTANT_Methodref = 10; private static int JVM_CONSTANT_Methodref = 10;
private static int JVM_CONSTANT_InterfaceMethodref = 11; private static int JVM_CONSTANT_InterfaceMethodref = 11;
private static int JVM_CONSTANT_NameAndType = 12; private static int JVM_CONSTANT_NameAndType = 12;
private static int JVM_CONSTANT_MethodHandle = 15; // JSR 292
private static int JVM_CONSTANT_MethodType = 16; // JSR 292
private static int JVM_CONSTANT_Invalid = 0; // For bad value initialization private static int JVM_CONSTANT_Invalid = 0; // For bad value initialization
private static int JVM_CONSTANT_UnresolvedClass = 100; // Temporary tag until actual use private static int JVM_CONSTANT_UnresolvedClass = 100; // Temporary tag until actual use
private static int JVM_CONSTANT_ClassIndex = 101; // Temporary tag while constructing constant pool private static int JVM_CONSTANT_ClassIndex = 101; // Temporary tag while constructing constant pool
private static int JVM_CONSTANT_UnresolvedString = 102; // Temporary tag until actual use private static int JVM_CONSTANT_UnresolvedString = 102; // Temporary tag until actual use
private static int JVM_CONSTANT_StringIndex = 103; // Temporary tag while constructing constant pool private static int JVM_CONSTANT_StringIndex = 103; // Temporary tag while constructing constant pool
private static int JVM_CONSTANT_UnresolvedClassInError = 104; // Resolution failed private static int JVM_CONSTANT_UnresolvedClassInError = 104; // Resolution failed
private static int JVM_CONSTANT_Object = 105; // Required for BoundMethodHandle arguments.
// JVM_CONSTANT_MethodHandle subtypes //FIXME: connect these to data structure
private static int JVM_REF_getField = 1;
private static int JVM_REF_getStatic = 2;
private static int JVM_REF_putField = 3;
private static int JVM_REF_putStatic = 4;
private static int JVM_REF_invokeVirtual = 5;
private static int JVM_REF_invokeStatic = 6;
private static int JVM_REF_invokeSpecial = 7;
private static int JVM_REF_newInvokeSpecial = 8;
private static int JVM_REF_invokeInterface = 9;
private byte tag; private byte tag;
@ -62,6 +76,8 @@ public class ConstantTag {
public boolean isDouble() { return tag == JVM_CONSTANT_Double; } public boolean isDouble() { return tag == JVM_CONSTANT_Double; }
public boolean isNameAndType() { return tag == JVM_CONSTANT_NameAndType; } public boolean isNameAndType() { return tag == JVM_CONSTANT_NameAndType; }
public boolean isUtf8() { return tag == JVM_CONSTANT_Utf8; } public boolean isUtf8() { return tag == JVM_CONSTANT_Utf8; }
public boolean isMethodHandle() { return tag == JVM_CONSTANT_MethodHandle; }
public boolean isMethodType() { return tag == JVM_CONSTANT_MethodType; }
public boolean isInvalid() { return tag == JVM_CONSTANT_Invalid; } public boolean isInvalid() { return tag == JVM_CONSTANT_Invalid; }
@ -73,6 +89,8 @@ public class ConstantTag {
public boolean isUnresolvedString() { return tag == JVM_CONSTANT_UnresolvedString; } public boolean isUnresolvedString() { return tag == JVM_CONSTANT_UnresolvedString; }
public boolean isStringIndex() { return tag == JVM_CONSTANT_StringIndex; } public boolean isStringIndex() { return tag == JVM_CONSTANT_StringIndex; }
public boolean isObject() { return tag == JVM_CONSTANT_Object; }
public boolean isKlassReference() { return isKlassIndex() || isUnresolvedKlass(); } public boolean isKlassReference() { return isKlassIndex() || isUnresolvedKlass(); }
public boolean isFieldOrMethod() { return isField() || isMethod() || isInterfaceMethod(); } public boolean isFieldOrMethod() { return isField() || isMethod() || isInterfaceMethod(); }
public boolean isSymbol() { return isUtf8(); } public boolean isSymbol() { return isUtf8(); }

@ -825,6 +825,8 @@ function jdis(method) {
} }
writeln(""); writeln("");
disAsm.decode(new sapkg.interpreter.BytecodeVisitor() { disAsm.decode(new sapkg.interpreter.BytecodeVisitor() {
prologue: function(method) { },
epilogue: function() { },
visit: function(bytecode) { visit: function(bytecode) {
if (hasLines) { if (hasLines) {
var line = method.getLineNumberFromBCI(bci); var line = method.getLineNumberFromBCI(bci);

@ -35,7 +35,7 @@ HOTSPOT_VM_COPYRIGHT=Copyright 2010
HS_MAJOR_VER=19 HS_MAJOR_VER=19
HS_MINOR_VER=0 HS_MINOR_VER=0
HS_BUILD_NUMBER=03 HS_BUILD_NUMBER=04
JDK_MAJOR_VER=1 JDK_MAJOR_VER=1
JDK_MINOR_VER=7 JDK_MINOR_VER=7

@ -138,7 +138,11 @@ ADLCFLAGS += -q -T
# Normally, debugging is done directly on the ad_<arch>*.cpp files. # Normally, debugging is done directly on the ad_<arch>*.cpp files.
# But -g will put #line directives in those files pointing back to <arch>.ad. # But -g will put #line directives in those files pointing back to <arch>.ad.
# Some builds of gcc 3.2 have a bug that gets tickled by the extra #line directives
# so skip it for 3.2 and ealier.
ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 3 \) \| \( \( $(CC_VER_MAJOR) = 3 \) \& \( $(CC_VER_MINOR) \>= 3 \) \))" "0"
ADLCFLAGS += -g ADLCFLAGS += -g
endif
ifdef LP64 ifdef LP64
ADLCFLAGS += -D_LP64 ADLCFLAGS += -D_LP64

@ -40,6 +40,9 @@ GENERATED = $(TOPDIR)/../generated
# tools.jar is needed by the JDI - SA binding # tools.jar is needed by the JDI - SA binding
SA_CLASSPATH = $(BOOT_JAVA_HOME)/lib/tools.jar SA_CLASSPATH = $(BOOT_JAVA_HOME)/lib/tools.jar
# TODO: if it's a modules image, check if SA module is installed.
MODULELIB_PATH= $(BOOT_JAVA_HOME)/lib/modules
# gnumake 3.78.1 does not accept the *s that # gnumake 3.78.1 does not accept the *s that
# are in AGENT_FILES1 and AGENT_FILES2, so use the shell to expand them # are in AGENT_FILES1 and AGENT_FILES2, so use the shell to expand them
AGENT_FILES1 := $(shell /usr/bin/test -d $(AGENT_DIR) && /bin/ls $(AGENT_FILES1)) AGENT_FILES1 := $(shell /usr/bin/test -d $(AGENT_DIR) && /bin/ls $(AGENT_FILES1))
@ -65,7 +68,7 @@ $(GENERATED)/sa-jdi.jar: $(AGENT_FILES1) $(AGENT_FILES2)
echo "ALT_BOOTDIR, BOOTDIR or JAVA_HOME needs to be defined to build SA"; \ echo "ALT_BOOTDIR, BOOTDIR or JAVA_HOME needs to be defined to build SA"; \
exit 1; \ exit 1; \
fi fi
$(QUIETLY) if [ ! -f $(SA_CLASSPATH) ] ; then \ $(QUIETLY) if [ ! -f $(SA_CLASSPATH) -a ! -d $(MODULELIB_PATH) ] ; then \
echo "Missing $(SA_CLASSPATH) file. Use 1.6.0 or later version of JDK";\ echo "Missing $(SA_CLASSPATH) file. Use 1.6.0 or later version of JDK";\
echo ""; \ echo ""; \
exit 1; \ exit 1; \

@ -36,6 +36,9 @@ GENERATED = ../generated
# tools.jar is needed by the JDI - SA binding # tools.jar is needed by the JDI - SA binding
SA_CLASSPATH = $(BOOT_JAVA_HOME)/lib/tools.jar SA_CLASSPATH = $(BOOT_JAVA_HOME)/lib/tools.jar
# TODO: if it's a modules image, check if SA module is installed.
MODULELIB_PATH= $(BOOT_JAVA_HOME)/lib/modules
# gnumake 3.78.1 does not accept the *s that # gnumake 3.78.1 does not accept the *s that
# are in AGENT_FILES1 and AGENT_FILES2, so use the shell to expand them # are in AGENT_FILES1 and AGENT_FILES2, so use the shell to expand them
AGENT_FILES1 := $(shell /usr/bin/test -d $(AGENT_DIR) && /bin/ls $(AGENT_FILES1)) AGENT_FILES1 := $(shell /usr/bin/test -d $(AGENT_DIR) && /bin/ls $(AGENT_FILES1))
@ -59,7 +62,7 @@ $(GENERATED)/sa-jdi.jar: $(AGENT_FILES1) $(AGENT_FILES2)
echo "ALT_BOOTDIR, BOOTDIR or JAVA_HOME needs to be defined to build SA"; \ echo "ALT_BOOTDIR, BOOTDIR or JAVA_HOME needs to be defined to build SA"; \
exit 1; \ exit 1; \
fi fi
$(QUIETLY) if [ ! -f $(SA_CLASSPATH) ] ; then \ $(QUIETLY) if [ ! -f $(SA_CLASSPATH) -a ! -d $(MODULELIB_PATH) ] ; then \
echo "Missing $(SA_CLASSPATH) file. Use 1.6.0 or later version of JDK";\ echo "Missing $(SA_CLASSPATH) file. Use 1.6.0 or later version of JDK";\
echo ""; \ echo ""; \
exit 1; \ exit 1; \

@ -32,6 +32,17 @@ SLASH_JAVA ?= J:
PATH_SEP = ; PATH_SEP = ;
# Need PLATFORM (os-arch combo names) for jdk and hotspot, plus libarch name # Need PLATFORM (os-arch combo names) for jdk and hotspot, plus libarch name
ifeq ($(ARCH_DATA_MODEL),32)
ARCH_DATA_MODEL=32
PLATFORM=windows-i586
VM_PLATFORM=windows_i486
HS_ARCH=x86
MAKE_ARGS += ARCH=x86
MAKE_ARGS += BUILDARCH=i486
MAKE_ARGS += Platform_arch=x86
MAKE_ARGS += Platform_arch_model=x86_32
endif
ifneq ($(shell $(ECHO) $(PROCESSOR_IDENTIFIER) | $(GREP) x86),) ifneq ($(shell $(ECHO) $(PROCESSOR_IDENTIFIER) | $(GREP) x86),)
ARCH_DATA_MODEL=32 ARCH_DATA_MODEL=32
PLATFORM=windows-i586 PLATFORM=windows-i586
@ -43,55 +54,57 @@ ifneq ($(shell $(ECHO) $(PROCESSOR_IDENTIFIER) | $(GREP) x86),)
MAKE_ARGS += Platform_arch_model=x86_32 MAKE_ARGS += Platform_arch_model=x86_32
endif endif
ifneq ($(shell $(ECHO) $(PROCESSOR_IDENTIFIER) | $(GREP) ia64),) ifneq ($(ARCH_DATA_MODEL),32)
ARCH_DATA_MODEL=64 ifneq ($(shell $(ECHO) $(PROCESSOR_IDENTIFIER) | $(GREP) ia64),)
PLATFORM=windows-ia64 ARCH_DATA_MODEL=64
VM_PLATFORM=windows_ia64 PLATFORM=windows-ia64
HS_ARCH=ia64 VM_PLATFORM=windows_ia64
MAKE_ARGS += LP64=1 HS_ARCH=ia64
MAKE_ARGS += ARCH=ia64 MAKE_ARGS += LP64=1
MAKE_ARGS += BUILDARCH=ia64 MAKE_ARGS += ARCH=ia64
MAKE_ARGS += Platform_arch=ia64 MAKE_ARGS += BUILDARCH=ia64
MAKE_ARGS += Platform_arch_model=ia64 MAKE_ARGS += Platform_arch=ia64
endif MAKE_ARGS += Platform_arch_model=ia64
endif
# http://support.microsoft.com/kb/888731 : this can be either # http://support.microsoft.com/kb/888731 : this can be either
# AMD64 for AMD, or EM64T for Intel chips. # AMD64 for AMD, or EM64T for Intel chips.
ifneq ($(shell $(ECHO) $(PROCESSOR_IDENTIFIER) | $(GREP) AMD64),) ifneq ($(shell $(ECHO) $(PROCESSOR_IDENTIFIER) | $(GREP) AMD64),)
ARCH_DATA_MODEL=64 ARCH_DATA_MODEL=64
PLATFORM=windows-amd64 PLATFORM=windows-amd64
VM_PLATFORM=windows_amd64 VM_PLATFORM=windows_amd64
HS_ARCH=x86 HS_ARCH=x86
MAKE_ARGS += LP64=1 MAKE_ARGS += LP64=1
MAKE_ARGS += ARCH=x86 MAKE_ARGS += ARCH=x86
MAKE_ARGS += BUILDARCH=amd64 MAKE_ARGS += BUILDARCH=amd64
MAKE_ARGS += Platform_arch=x86 MAKE_ARGS += Platform_arch=x86
MAKE_ARGS += Platform_arch_model=x86_64 MAKE_ARGS += Platform_arch_model=x86_64
endif endif
ifneq ($(shell $(ECHO) $(PROCESSOR_IDENTIFIER) | $(GREP) EM64T),) ifneq ($(shell $(ECHO) $(PROCESSOR_IDENTIFIER) | $(GREP) EM64T),)
ARCH_DATA_MODEL=64 ARCH_DATA_MODEL=64
PLATFORM=windows-amd64 PLATFORM=windows-amd64
VM_PLATFORM=windows_amd64 VM_PLATFORM=windows_amd64
HS_ARCH=x86 HS_ARCH=x86
MAKE_ARGS += LP64=1 MAKE_ARGS += LP64=1
MAKE_ARGS += ARCH=x86 MAKE_ARGS += ARCH=x86
MAKE_ARGS += BUILDARCH=amd64 MAKE_ARGS += BUILDARCH=amd64
MAKE_ARGS += Platform_arch=x86 MAKE_ARGS += Platform_arch=x86
MAKE_ARGS += Platform_arch_model=x86_64 MAKE_ARGS += Platform_arch_model=x86_64
endif endif
# NB later OS versions than 2003 may report "Intel64" # NB later OS versions than 2003 may report "Intel64"
ifneq ($(shell $(ECHO) $(PROCESSOR_IDENTIFIER) | $(GREP) Intel64),) ifneq ($(shell $(ECHO) $(PROCESSOR_IDENTIFIER) | $(GREP) Intel64),)
ARCH_DATA_MODEL=64 ARCH_DATA_MODEL=64
PLATFORM=windows-amd64 PLATFORM=windows-amd64
VM_PLATFORM=windows_amd64 VM_PLATFORM=windows_amd64
HS_ARCH=x86 HS_ARCH=x86
MAKE_ARGS += LP64=1 MAKE_ARGS += LP64=1
MAKE_ARGS += ARCH=x86 MAKE_ARGS += ARCH=x86
MAKE_ARGS += BUILDARCH=amd64 MAKE_ARGS += BUILDARCH=amd64
MAKE_ARGS += Platform_arch=x86 MAKE_ARGS += Platform_arch=x86
MAKE_ARGS += Platform_arch_model=x86_64 MAKE_ARGS += Platform_arch_model=x86_64
endif
endif endif
JDK_INCLUDE_SUBDIR=win32 JDK_INCLUDE_SUBDIR=win32

@ -318,6 +318,31 @@ void TemplateTable::ldc(bool wide) {
__ bind(exit); __ bind(exit);
} }
// Fast path for caching oop constants.
// %%% We should use this to handle Class and String constants also.
// %%% It will simplify the ldc/primitive path considerably.
void TemplateTable::fast_aldc(bool wide) {
transition(vtos, atos);
if (!EnableMethodHandles) {
// We should not encounter this bytecode if !EnableMethodHandles.
// The verifier will stop it. However, if we get past the verifier,
// this will stop the thread in a reasonable way, without crashing the JVM.
__ call_VM(noreg, CAST_FROM_FN_PTR(address,
InterpreterRuntime::throw_IncompatibleClassChangeError));
// the call_VM checks for exception, so we should never return here.
__ should_not_reach_here();
return;
}
Register Rcache = G3_scratch;
Register Rscratch = G4_scratch;
resolve_cache_and_index(f1_oop, Otos_i, Rcache, Rscratch, wide ? sizeof(u2) : sizeof(u1));
__ verify_oop(Otos_i);
}
void TemplateTable::ldc2_w() { void TemplateTable::ldc2_w() {
transition(vtos, vtos); transition(vtos, vtos);
Label retry, resolved, Long, exit; Label retry, resolved, Long, exit;
@ -1994,6 +2019,8 @@ void TemplateTable::resolve_cache_and_index(int byte_no,
case Bytecodes::_invokestatic : // fall through case Bytecodes::_invokestatic : // fall through
case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break; case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break; case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
case Bytecodes::_fast_aldc : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
case Bytecodes::_fast_aldc_w : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
default : ShouldNotReachHere(); break; default : ShouldNotReachHere(); break;
} }
// first time invocation - must resolve first // first time invocation - must resolve first

@ -375,6 +375,32 @@ void TemplateTable::ldc(bool wide) {
__ bind(Done); __ bind(Done);
} }
// Fast path for caching oop constants.
// %%% We should use this to handle Class and String constants also.
// %%% It will simplify the ldc/primitive path considerably.
void TemplateTable::fast_aldc(bool wide) {
transition(vtos, atos);
if (!EnableMethodHandles) {
// We should not encounter this bytecode if !EnableMethodHandles.
// The verifier will stop it. However, if we get past the verifier,
// this will stop the thread in a reasonable way, without crashing the JVM.
__ call_VM(noreg, CAST_FROM_FN_PTR(address,
InterpreterRuntime::throw_IncompatibleClassChangeError));
// the call_VM checks for exception, so we should never return here.
__ should_not_reach_here();
return;
}
const Register cache = rcx;
const Register index = rdx;
resolve_cache_and_index(f1_oop, rax, cache, index, wide ? sizeof(u2) : sizeof(u1));
if (VerifyOops) {
__ verify_oop(rax);
}
}
void TemplateTable::ldc2_w() { void TemplateTable::ldc2_w() {
transition(vtos, vtos); transition(vtos, vtos);
Label Long, Done; Label Long, Done;
@ -2055,6 +2081,8 @@ void TemplateTable::resolve_cache_and_index(int byte_no,
case Bytecodes::_invokestatic : // fall through case Bytecodes::_invokestatic : // fall through
case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break; case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break; case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
case Bytecodes::_fast_aldc : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
case Bytecodes::_fast_aldc_w : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
default : ShouldNotReachHere(); break; default : ShouldNotReachHere(); break;
} }
__ movl(temp, (int)bytecode()); __ movl(temp, (int)bytecode());

@ -389,6 +389,32 @@ void TemplateTable::ldc(bool wide) {
__ bind(Done); __ bind(Done);
} }
// Fast path for caching oop constants.
// %%% We should use this to handle Class and String constants also.
// %%% It will simplify the ldc/primitive path considerably.
void TemplateTable::fast_aldc(bool wide) {
transition(vtos, atos);
if (!EnableMethodHandles) {
// We should not encounter this bytecode if !EnableMethodHandles.
// The verifier will stop it. However, if we get past the verifier,
// this will stop the thread in a reasonable way, without crashing the JVM.
__ call_VM(noreg, CAST_FROM_FN_PTR(address,
InterpreterRuntime::throw_IncompatibleClassChangeError));
// the call_VM checks for exception, so we should never return here.
__ should_not_reach_here();
return;
}
const Register cache = rcx;
const Register index = rdx;
resolve_cache_and_index(f1_oop, rax, cache, index, wide ? sizeof(u2) : sizeof(u1));
if (VerifyOops) {
__ verify_oop(rax);
}
}
void TemplateTable::ldc2_w() { void TemplateTable::ldc2_w() {
transition(vtos, vtos); transition(vtos, vtos);
Label Long, Done; Label Long, Done;
@ -2063,6 +2089,12 @@ void TemplateTable::resolve_cache_and_index(int byte_no,
case Bytecodes::_invokedynamic: case Bytecodes::_invokedynamic:
entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);
break; break;
case Bytecodes::_fast_aldc:
entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);
break;
case Bytecodes::_fast_aldc_w:
entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);
break;
default: default:
ShouldNotReachHere(); ShouldNotReachHere();
break; break;

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 1997, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -34,7 +34,7 @@ const char* VM_Version::_features_str = "";
VM_Version::CpuidInfo VM_Version::_cpuid_info = { 0, }; VM_Version::CpuidInfo VM_Version::_cpuid_info = { 0, };
static BufferBlob* stub_blob; static BufferBlob* stub_blob;
static const int stub_size = 300; static const int stub_size = 400;
extern "C" { extern "C" {
typedef void (*getPsrInfo_stub_t)(void*); typedef void (*getPsrInfo_stub_t)(void*);
@ -56,7 +56,7 @@ class VM_Version_StubGenerator: public StubCodeGenerator {
const uint32_t CPU_FAMILY_386 = (3 << CPU_FAMILY_SHIFT); const uint32_t CPU_FAMILY_386 = (3 << CPU_FAMILY_SHIFT);
const uint32_t CPU_FAMILY_486 = (4 << CPU_FAMILY_SHIFT); const uint32_t CPU_FAMILY_486 = (4 << CPU_FAMILY_SHIFT);
Label detect_486, cpu486, detect_586, std_cpuid1; Label detect_486, cpu486, detect_586, std_cpuid1, std_cpuid4;
Label ext_cpuid1, ext_cpuid5, done; Label ext_cpuid1, ext_cpuid5, done;
StubCodeMark mark(this, "VM_Version", "getPsrInfo_stub"); StubCodeMark mark(this, "VM_Version", "getPsrInfo_stub");
@ -131,13 +131,62 @@ class VM_Version_StubGenerator: public StubCodeGenerator {
__ movl(Address(rsi, 8), rcx); __ movl(Address(rsi, 8), rcx);
__ movl(Address(rsi,12), rdx); __ movl(Address(rsi,12), rdx);
__ cmpl(rax, 3); // Is cpuid(0x4) supported? __ cmpl(rax, 0xa); // Is cpuid(0xB) supported?
__ jccb(Assembler::belowEqual, std_cpuid1); __ jccb(Assembler::belowEqual, std_cpuid4);
//
// cpuid(0xB) Processor Topology
//
__ movl(rax, 0xb);
__ xorl(rcx, rcx); // Threads level
__ cpuid();
__ lea(rsi, Address(rbp, in_bytes(VM_Version::tpl_cpuidB0_offset())));
__ movl(Address(rsi, 0), rax);
__ movl(Address(rsi, 4), rbx);
__ movl(Address(rsi, 8), rcx);
__ movl(Address(rsi,12), rdx);
__ movl(rax, 0xb);
__ movl(rcx, 1); // Cores level
__ cpuid();
__ push(rax);
__ andl(rax, 0x1f); // Determine if valid topology level
__ orl(rax, rbx); // eax[4:0] | ebx[0:15] == 0 indicates invalid level
__ andl(rax, 0xffff);
__ pop(rax);
__ jccb(Assembler::equal, std_cpuid4);
__ lea(rsi, Address(rbp, in_bytes(VM_Version::tpl_cpuidB1_offset())));
__ movl(Address(rsi, 0), rax);
__ movl(Address(rsi, 4), rbx);
__ movl(Address(rsi, 8), rcx);
__ movl(Address(rsi,12), rdx);
__ movl(rax, 0xb);
__ movl(rcx, 2); // Packages level
__ cpuid();
__ push(rax);
__ andl(rax, 0x1f); // Determine if valid topology level
__ orl(rax, rbx); // eax[4:0] | ebx[0:15] == 0 indicates invalid level
__ andl(rax, 0xffff);
__ pop(rax);
__ jccb(Assembler::equal, std_cpuid4);
__ lea(rsi, Address(rbp, in_bytes(VM_Version::tpl_cpuidB2_offset())));
__ movl(Address(rsi, 0), rax);
__ movl(Address(rsi, 4), rbx);
__ movl(Address(rsi, 8), rcx);
__ movl(Address(rsi,12), rdx);
// //
// cpuid(0x4) Deterministic cache params // cpuid(0x4) Deterministic cache params
// //
__ bind(std_cpuid4);
__ movl(rax, 4); __ movl(rax, 4);
__ cmpl(rax, Address(rbp, in_bytes(VM_Version::std_cpuid0_offset()))); // Is cpuid(0x4) supported?
__ jccb(Assembler::greater, std_cpuid1);
__ xorl(rcx, rcx); // L1 cache __ xorl(rcx, rcx); // L1 cache
__ cpuid(); __ cpuid();
__ push(rax); __ push(rax);
@ -460,13 +509,18 @@ void VM_Version::get_processor_features() {
AllocatePrefetchDistance = allocate_prefetch_distance(); AllocatePrefetchDistance = allocate_prefetch_distance();
AllocatePrefetchStyle = allocate_prefetch_style(); AllocatePrefetchStyle = allocate_prefetch_style();
if( AllocatePrefetchStyle == 2 && is_intel() && if( is_intel() && cpu_family() == 6 && supports_sse3() ) {
cpu_family() == 6 && supports_sse3() ) { // watermark prefetching on Core if( AllocatePrefetchStyle == 2 ) { // watermark prefetching on Core
#ifdef _LP64 #ifdef _LP64
AllocatePrefetchDistance = 384; AllocatePrefetchDistance = 384;
#else #else
AllocatePrefetchDistance = 320; AllocatePrefetchDistance = 320;
#endif #endif
}
if( supports_sse4_2() && supports_ht() ) { // Nehalem based cpus
AllocatePrefetchDistance = 192;
AllocatePrefetchLines = 4;
}
} }
assert(AllocatePrefetchDistance % AllocatePrefetchStepSize == 0, "invalid value"); assert(AllocatePrefetchDistance % AllocatePrefetchStepSize == 0, "invalid value");

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 1997, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -114,6 +114,14 @@ public:
} bits; } bits;
}; };
union TplCpuidBEbx {
uint32_t value;
struct {
uint32_t logical_cpus : 16,
: 16;
} bits;
};
union ExtCpuid1Ecx { union ExtCpuid1Ecx {
uint32_t value; uint32_t value;
struct { struct {
@ -211,6 +219,25 @@ protected:
uint32_t dcp_cpuid4_ecx; // unused currently uint32_t dcp_cpuid4_ecx; // unused currently
uint32_t dcp_cpuid4_edx; // unused currently uint32_t dcp_cpuid4_edx; // unused currently
// cpuid function 0xB (processor topology)
// ecx = 0
uint32_t tpl_cpuidB0_eax;
TplCpuidBEbx tpl_cpuidB0_ebx;
uint32_t tpl_cpuidB0_ecx; // unused currently
uint32_t tpl_cpuidB0_edx; // unused currently
// ecx = 1
uint32_t tpl_cpuidB1_eax;
TplCpuidBEbx tpl_cpuidB1_ebx;
uint32_t tpl_cpuidB1_ecx; // unused currently
uint32_t tpl_cpuidB1_edx; // unused currently
// ecx = 2
uint32_t tpl_cpuidB2_eax;
TplCpuidBEbx tpl_cpuidB2_ebx;
uint32_t tpl_cpuidB2_ecx; // unused currently
uint32_t tpl_cpuidB2_edx; // unused currently
// cpuid function 0x80000000 // example, unused // cpuid function 0x80000000 // example, unused
uint32_t ext_max_function; uint32_t ext_max_function;
uint32_t ext_vendor_name_0; uint32_t ext_vendor_name_0;
@ -316,6 +343,9 @@ public:
static ByteSize ext_cpuid1_offset() { return byte_offset_of(CpuidInfo, ext_cpuid1_eax); } static ByteSize ext_cpuid1_offset() { return byte_offset_of(CpuidInfo, ext_cpuid1_eax); }
static ByteSize ext_cpuid5_offset() { return byte_offset_of(CpuidInfo, ext_cpuid5_eax); } static ByteSize ext_cpuid5_offset() { return byte_offset_of(CpuidInfo, ext_cpuid5_eax); }
static ByteSize ext_cpuid8_offset() { return byte_offset_of(CpuidInfo, ext_cpuid8_eax); } static ByteSize ext_cpuid8_offset() { return byte_offset_of(CpuidInfo, ext_cpuid8_eax); }
static ByteSize tpl_cpuidB0_offset() { return byte_offset_of(CpuidInfo, tpl_cpuidB0_eax); }
static ByteSize tpl_cpuidB1_offset() { return byte_offset_of(CpuidInfo, tpl_cpuidB1_eax); }
static ByteSize tpl_cpuidB2_offset() { return byte_offset_of(CpuidInfo, tpl_cpuidB2_eax); }
// Initialization // Initialization
static void initialize(); static void initialize();
@ -346,10 +376,22 @@ public:
static bool is_amd() { assert_is_initialized(); return _cpuid_info.std_vendor_name_0 == 0x68747541; } // 'htuA' static bool is_amd() { assert_is_initialized(); return _cpuid_info.std_vendor_name_0 == 0x68747541; } // 'htuA'
static bool is_intel() { assert_is_initialized(); return _cpuid_info.std_vendor_name_0 == 0x756e6547; } // 'uneG' static bool is_intel() { assert_is_initialized(); return _cpuid_info.std_vendor_name_0 == 0x756e6547; } // 'uneG'
static bool supports_processor_topology() {
return (_cpuid_info.std_max_function >= 0xB) &&
// eax[4:0] | ebx[0:15] == 0 indicates invalid topology level.
// Some cpus have max cpuid >= 0xB but do not support processor topology.
((_cpuid_info.tpl_cpuidB0_eax & 0x1f | _cpuid_info.tpl_cpuidB0_ebx.bits.logical_cpus) != 0);
}
static uint cores_per_cpu() { static uint cores_per_cpu() {
uint result = 1; uint result = 1;
if (is_intel()) { if (is_intel()) {
result = (_cpuid_info.dcp_cpuid4_eax.bits.cores_per_cpu + 1); if (supports_processor_topology()) {
result = _cpuid_info.tpl_cpuidB1_ebx.bits.logical_cpus /
_cpuid_info.tpl_cpuidB0_ebx.bits.logical_cpus;
} else {
result = (_cpuid_info.dcp_cpuid4_eax.bits.cores_per_cpu + 1);
}
} else if (is_amd()) { } else if (is_amd()) {
result = (_cpuid_info.ext_cpuid8_ecx.bits.cores_per_cpu + 1); result = (_cpuid_info.ext_cpuid8_ecx.bits.cores_per_cpu + 1);
} }
@ -358,7 +400,9 @@ public:
static uint threads_per_core() { static uint threads_per_core() {
uint result = 1; uint result = 1;
if (_cpuid_info.std_cpuid1_edx.bits.ht != 0) { if (is_intel() && supports_processor_topology()) {
result = _cpuid_info.tpl_cpuidB0_ebx.bits.logical_cpus;
} else if (_cpuid_info.std_cpuid1_edx.bits.ht != 0) {
result = _cpuid_info.std_cpuid1_ebx.bits.threads_per_cpu / result = _cpuid_info.std_cpuid1_ebx.bits.threads_per_cpu /
cores_per_cpu(); cores_per_cpu();
} }

@ -820,7 +820,7 @@ int AbstractInterpreter::layout_activation(methodOop method,
bool is_top_frame) { bool is_top_frame) {
assert(popframe_extra_args == 0, "what to do?"); assert(popframe_extra_args == 0, "what to do?");
assert(!is_top_frame || (!callee_locals && !callee_param_count), assert(!is_top_frame || (!callee_locals && !callee_param_count),
"top frame should have no caller") "top frame should have no caller");
// This code must exactly match what InterpreterFrame::build // This code must exactly match what InterpreterFrame::build
// does (the full InterpreterFrame::build, that is, not the // does (the full InterpreterFrame::build, that is, not the

@ -2079,9 +2079,9 @@ void os::print_signal_handlers(outputStream* st, char* buf, size_t buflen) {
static char saved_jvm_path[MAXPATHLEN] = {0}; static char saved_jvm_path[MAXPATHLEN] = {0};
// Find the full path to the current module, libjvm.so or libjvm_g.so // Find the full path to the current module, libjvm.so or libjvm_g.so
void os::jvm_path(char *buf, jint len) { void os::jvm_path(char *buf, jint buflen) {
// Error checking. // Error checking.
if (len < MAXPATHLEN) { if (buflen < MAXPATHLEN) {
assert(false, "must use a large-enough buffer"); assert(false, "must use a large-enough buffer");
buf[0] = '\0'; buf[0] = '\0';
return; return;
@ -2117,6 +2117,9 @@ void os::jvm_path(char *buf, jint len) {
// Look for JAVA_HOME in the environment. // Look for JAVA_HOME in the environment.
char* java_home_var = ::getenv("JAVA_HOME"); char* java_home_var = ::getenv("JAVA_HOME");
if (java_home_var != NULL && java_home_var[0] != 0) { if (java_home_var != NULL && java_home_var[0] != 0) {
char* jrelib_p;
int len;
// Check the current module name "libjvm.so" or "libjvm_g.so". // Check the current module name "libjvm.so" or "libjvm_g.so".
p = strrchr(buf, '/'); p = strrchr(buf, '/');
assert(strstr(p, "/libjvm") == p, "invalid library name"); assert(strstr(p, "/libjvm") == p, "invalid library name");
@ -2124,14 +2127,24 @@ void os::jvm_path(char *buf, jint len) {
if (realpath(java_home_var, buf) == NULL) if (realpath(java_home_var, buf) == NULL)
return; return;
sprintf(buf + strlen(buf), "/jre/lib/%s", cpu_arch);
// determine if this is a legacy image or modules image
// modules image doesn't have "jre" subdirectory
len = strlen(buf);
jrelib_p = buf + len;
snprintf(jrelib_p, buflen-len, "/jre/lib/%s", cpu_arch);
if (0 != access(buf, F_OK)) {
snprintf(jrelib_p, buflen-len, "/lib/%s", cpu_arch);
}
if (0 == access(buf, F_OK)) { if (0 == access(buf, F_OK)) {
// Use current module name "libjvm[_g].so" instead of // Use current module name "libjvm[_g].so" instead of
// "libjvm"debug_only("_g")".so" since for fastdebug version // "libjvm"debug_only("_g")".so" since for fastdebug version
// we should have "libjvm.so" but debug_only("_g") adds "_g"! // we should have "libjvm.so" but debug_only("_g") adds "_g"!
// It is used when we are choosing the HPI library's name // It is used when we are choosing the HPI library's name
// "libhpi[_g].so" in hpi::initialize_get_interface(). // "libhpi[_g].so" in hpi::initialize_get_interface().
sprintf(buf + strlen(buf), "/hotspot/libjvm%s.so", p); len = strlen(buf);
snprintf(buf + len, buflen-len, "/hotspot/libjvm%s.so", p);
} else { } else {
// Go back to path of .so // Go back to path of .so
if (realpath(dli_fname, buf) == NULL) if (realpath(dli_fname, buf) == NULL)

@ -123,7 +123,7 @@
int set_interrupt_callback (Sync_Interrupt_Callback * cb); int set_interrupt_callback (Sync_Interrupt_Callback * cb);
void remove_interrupt_callback(Sync_Interrupt_Callback * cb); void remove_interrupt_callback(Sync_Interrupt_Callback * cb);
void OSThread::do_interrupt_callbacks_at_interrupt(InterruptArguments *args); void do_interrupt_callbacks_at_interrupt(InterruptArguments *args);
// *************************************************************** // ***************************************************************
// java.lang.Thread.interrupt state. // java.lang.Thread.interrupt state.

@ -2435,6 +2435,8 @@ void os::jvm_path(char *buf, jint buflen) {
char* java_home_var = ::getenv("JAVA_HOME"); char* java_home_var = ::getenv("JAVA_HOME");
if (java_home_var != NULL && java_home_var[0] != 0) { if (java_home_var != NULL && java_home_var[0] != 0) {
char cpu_arch[12]; char cpu_arch[12];
char* jrelib_p;
int len;
sysinfo(SI_ARCHITECTURE, cpu_arch, sizeof(cpu_arch)); sysinfo(SI_ARCHITECTURE, cpu_arch, sizeof(cpu_arch));
#ifdef _LP64 #ifdef _LP64
// If we are on sparc running a 64-bit vm, look in jre/lib/sparcv9. // If we are on sparc running a 64-bit vm, look in jre/lib/sparcv9.
@ -2450,14 +2452,23 @@ void os::jvm_path(char *buf, jint buflen) {
p = strstr(p, "_g") ? "_g" : ""; p = strstr(p, "_g") ? "_g" : "";
realpath(java_home_var, buf); realpath(java_home_var, buf);
sprintf(buf + strlen(buf), "/jre/lib/%s", cpu_arch); // determine if this is a legacy image or modules image
// modules image doesn't have "jre" subdirectory
len = strlen(buf);
jrelib_p = buf + len;
snprintf(jrelib_p, buflen-len, "/jre/lib/%s", cpu_arch);
if (0 != access(buf, F_OK)) {
snprintf(jrelib_p, buflen-len, "/lib/%s", cpu_arch);
}
if (0 == access(buf, F_OK)) { if (0 == access(buf, F_OK)) {
// Use current module name "libjvm[_g].so" instead of // Use current module name "libjvm[_g].so" instead of
// "libjvm"debug_only("_g")".so" since for fastdebug version // "libjvm"debug_only("_g")".so" since for fastdebug version
// we should have "libjvm.so" but debug_only("_g") adds "_g"! // we should have "libjvm.so" but debug_only("_g") adds "_g"!
// It is used when we are choosing the HPI library's name // It is used when we are choosing the HPI library's name
// "libhpi[_g].so" in hpi::initialize_get_interface(). // "libhpi[_g].so" in hpi::initialize_get_interface().
sprintf(buf + strlen(buf), "/hotspot/libjvm%s.so", p); len = strlen(buf);
snprintf(buf + len, buflen-len, "/hotspot/libjvm%s.so", p);
} else { } else {
// Go back to path of .so // Go back to path of .so
realpath((char *)dlinfo.dli_fname, buf); realpath((char *)dlinfo.dli_fname, buf);

@ -1,290 +0,0 @@
/*
* Copyright (c) 1998, 2007, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "incls/_precompiled.incl"
#include "incls/_vtune_windows.cpp.incl"
static int current_method_ID = 0;
// ------------- iJITProf.h -------------------
// defined by Intel -- do not change
#include "windows.h"
extern "C" {
enum iJITP_Event {
ExceptionOccurred_S, // Java exception
ExceptionOccurred_IDS,
Shutdown, // VM exit
ThreadCreate, // threads
ThreadDestroy,
ThreadSwitch,
ClassLoadStart, // class loading
ClassLoadEnd,
GCStart, // GC
GCEnd,
NMethodCreate = 13, // nmethod creation
NMethodDelete
// rest of event types omitted (call profiling not supported yet)
};
// version number -- 0 if VTune not installed
int WINAPI iJitP_VersionNumber();
enum iJITP_ModeFlags {
NoNotification = 0x0, // don't call vtune
NotifyNMethodCreate = 0x1, // notify NMethod_Create
NotifyNMethodDelete = 0x2, // notify NMethod_Create
NotifyMethodEnter = 0x4, // method entry
NotifyMethodExit = 0x8, // method exit
NotifyShutdown = 0x10, // VM exit
NotifyGC = 0x20, // GC
};
// call back function type
typedef void (WINAPI *ModeChangedFn)(iJITP_ModeFlags flags);
// ------------- VTune method interfaces ----------------------
typedef void (WINAPI *RegisterCallbackFn)(ModeChangedFn fn); // register callback
typedef int (WINAPI *NotifyEventFn)(iJITP_Event, void* event_data);
// specific event data structures
// data for NMethodCreate
struct VTuneObj { // base class for allocation
// (can't use CHeapObj -- has vtable ptr)
void* operator new(size_t size) { return os::malloc(size); }
void operator delete(void* p) { fatal("never delete VTune data"); }
};
struct LineNumberInfo : VTuneObj { // PC-to-line number mapping
unsigned long offset; // byte offset from start of method
unsigned long line_num; // corresponding line number
};
struct MethodLoadInfo : VTuneObj {
unsigned long methodID; // unique method ID
const char* name; // method name
unsigned long instr_start; // start address
unsigned long instr_size; // length in bytes
unsigned long line_number_size; // size of line number table
LineNumberInfo* line_number_table; // line number mapping
unsigned long classID; // unique class ID
char* class_file_name; // fully qualified class file name
char* source_file_name; // fully qualified source file name
MethodLoadInfo(nmethod* nm); // for real nmethods
MethodLoadInfo(const char* vm_name, address start, address end);
// for "nmethods" like stubs, interpreter, etc
};
// data for NMethodDelete
struct MethodInfo : VTuneObj {
unsigned long methodID; // unique method ID
unsigned long classID; // (added for convenience -- not part of Intel interface)
MethodInfo(methodOop m);
};
};
MethodInfo::MethodInfo(methodOop m) {
// just give it a new ID -- we're not compiling methods twice (usually)
// (and even if we did, one might want to see the two versions separately)
methodID = ++current_method_ID;
}
MethodLoadInfo::MethodLoadInfo(const char* vm_name, address start, address end) {
classID = 0;
methodID = ++current_method_ID;
name = vm_name;
instr_start = (unsigned long)start;
instr_size = end - start;
line_number_size = 0;
line_number_table = NULL;
class_file_name = source_file_name = "HotSpot JVM";
}
MethodLoadInfo::MethodLoadInfo(nmethod* nm) {
methodOop m = nm->method();
MethodInfo info(m);
classID = info.classID;
methodID = info.methodID;
name = strdup(m->name()->as_C_string());
instr_start = (unsigned long)nm->instructions_begin();
instr_size = nm->code_size();
line_number_size = 0;
line_number_table = NULL;
klassOop kl = m->method_holder();
char* class_name = Klass::cast(kl)->name()->as_C_string();
char* file_name = NEW_C_HEAP_ARRAY(char, strlen(class_name) + 1);
strcpy(file_name, class_name);
class_file_name = file_name;
char* src_name = NEW_C_HEAP_ARRAY(char, strlen(class_name) + strlen(".java") + 1);
strcpy(src_name, class_name);
strcat(src_name, ".java");
source_file_name = src_name;
}
// --------------------- DLL loading functions ------------------------
#define DLLNAME "iJitProf.dll"
static HINSTANCE load_lib(char* name) {
HINSTANCE lib = NULL;
HKEY hk;
// try to get VTune directory from the registry
if (RegOpenKey(HKEY_CURRENT_USER, "Software\\VB and VBA Program Settings\\VTune\\StartUp", &hk) == ERROR_SUCCESS) {
for (int i = 0; true; i++) {
char szName[MAX_PATH + 1];
char szVal [MAX_PATH + 1];
DWORD cbName, cbVal;
cbName = cbVal = MAX_PATH + 1;
if (RegEnumValue(hk, i, szName, &cbName, NULL, NULL, (LPBYTE)szVal, &cbVal) == ERROR_SUCCESS) {
// get VTune directory
if (!strcmp(szName, name)) {
char*p = szVal;
while (*p == ' ') p++; // trim
char* q = p + strlen(p) - 1;
while (*q == ' ') *(q--) = '\0';
// chdir to the VTune dir
GetCurrentDirectory(MAX_PATH + 1, szName);
SetCurrentDirectory(p);
// load lib
lib = LoadLibrary(strcat(strcat(p, "\\"), DLLNAME));
if (lib != NULL && WizardMode) tty->print_cr("*loaded VTune DLL %s", p);
// restore current dir
SetCurrentDirectory(szName);
break;
}
} else {
break;
}
}
}
return lib;
}
static RegisterCallbackFn iJIT_RegisterCallback = NULL;
static NotifyEventFn iJIT_NotifyEvent = NULL;
static bool load_iJIT_funcs() {
// first try to load from PATH
HINSTANCE lib = LoadLibrary(DLLNAME);
if (lib != NULL && WizardMode) tty->print_cr("*loaded VTune DLL %s via PATH", DLLNAME);
// if not successful, try to look in the VTUNE directory
if (lib == NULL) lib = load_lib("VTUNEDIR30");
if (lib == NULL) lib = load_lib("VTUNEDIR25");
if (lib == NULL) lib = load_lib("VTUNEDIR");
if (lib == NULL) return false; // unsuccessful
// try to load the functions
iJIT_RegisterCallback = (RegisterCallbackFn)GetProcAddress(lib, "iJIT_RegisterCallback");
iJIT_NotifyEvent = (NotifyEventFn) GetProcAddress(lib, "iJIT_NotifyEvent");
if (!iJIT_RegisterCallback) tty->print_cr("*couldn't find VTune entry point iJIT_RegisterCallback");
if (!iJIT_NotifyEvent) tty->print_cr("*couldn't find VTune entry point iJIT_NotifyEvent");
return iJIT_RegisterCallback != NULL && iJIT_NotifyEvent != NULL;
}
// --------------------- VTune class ------------------------
static bool active = false;
static int flags = 0;
void VTune::start_GC() {
if (active && (flags & NotifyGC)) iJIT_NotifyEvent(GCStart, NULL);
}
void VTune::end_GC() {
if (active && (flags & NotifyGC)) iJIT_NotifyEvent(GCEnd, NULL);
}
void VTune::start_class_load() {
// not yet implemented in VTune
}
void VTune::end_class_load() {
// not yet implemented in VTune
}
void VTune::exit() {
if (active && (flags & NotifyShutdown)) iJIT_NotifyEvent(Shutdown, NULL);
}
void VTune::register_stub(const char* name, address start, address end) {
if (flags & NotifyNMethodCreate) {
MethodLoadInfo* info = new MethodLoadInfo(name, start, end);
if (PrintMiscellaneous && WizardMode && Verbose) {
tty->print_cr("NMethodCreate %s (%d): %#x..%#x", info->name, info->methodID,
info->instr_start, info->instr_start + info->instr_size);
}
iJIT_NotifyEvent(NMethodCreate, info);
}
}
void VTune::create_nmethod(nmethod* nm) {
if (flags & NotifyNMethodCreate) {
MethodLoadInfo* info = new MethodLoadInfo(nm);
if (PrintMiscellaneous && WizardMode && Verbose) {
tty->print_cr("NMethodCreate %s (%d): %#x..%#x", info->name, info->methodID,
info->instr_start, info->instr_start + info->instr_size);
}
iJIT_NotifyEvent(NMethodCreate, info);
}
}
void VTune::delete_nmethod(nmethod* nm) {
if (flags & NotifyNMethodDelete) {
MethodInfo* info = new MethodInfo(nm->method());
iJIT_NotifyEvent(NMethodDelete, info);
}
}
static void set_flags(int new_flags) {
flags = new_flags;
// if (WizardMode) tty->print_cr("*new VTune flags: %#x", flags);
}
void vtune_init() {
if (!UseVTune) return;
active = load_iJIT_funcs();
if (active) {
iJIT_RegisterCallback((ModeChangedFn)set_flags);
} else {
assert(flags == 0, "flags shouldn't be set");
}
}

@ -26,7 +26,7 @@ static void pd_conjoint_words(HeapWord* from, HeapWord* to, size_t count) {
#ifdef AMD64 #ifdef AMD64
(void)memmove(to, from, count * HeapWordSize); (void)memmove(to, from, count * HeapWordSize);
#else #else
// Same as pd_aligned_conjoint_words, except includes a zero-count check. // Includes a zero-count check.
intx temp; intx temp;
__asm__ volatile(" testl %6,%6 ;" __asm__ volatile(" testl %6,%6 ;"
" jz 7f ;" " jz 7f ;"
@ -84,7 +84,7 @@ static void pd_disjoint_words(HeapWord* from, HeapWord* to, size_t count) {
break; break;
} }
#else #else
// Same as pd_aligned_disjoint_words, except includes a zero-count check. // Includes a zero-count check.
intx temp; intx temp;
__asm__ volatile(" testl %6,%6 ;" __asm__ volatile(" testl %6,%6 ;"
" jz 3f ;" " jz 3f ;"
@ -130,75 +130,18 @@ static void pd_disjoint_words_atomic(HeapWord* from, HeapWord* to, size_t count)
} }
static void pd_aligned_conjoint_words(HeapWord* from, HeapWord* to, size_t count) { static void pd_aligned_conjoint_words(HeapWord* from, HeapWord* to, size_t count) {
#ifdef AMD64 pd_conjoint_words(from, to, count);
(void)memmove(to, from, count * HeapWordSize);
#else
// Same as pd_conjoint_words, except no zero-count check.
intx temp;
__asm__ volatile(" cmpl %4,%5 ;"
" leal -4(%4,%6,4),%3;"
" jbe 1f ;"
" cmpl %7,%5 ;"
" jbe 4f ;"
"1: cmpl $32,%6 ;"
" ja 3f ;"
" subl %4,%1 ;"
"2: movl (%4),%3 ;"
" movl %7,(%5,%4,1) ;"
" addl $4,%0 ;"
" subl $1,%2 ;"
" jnz 2b ;"
" jmp 7f ;"
"3: rep; smovl ;"
" jmp 7f ;"
"4: cmpl $32,%2 ;"
" movl %7,%0 ;"
" leal -4(%5,%6,4),%1;"
" ja 6f ;"
" subl %4,%1 ;"
"5: movl (%4),%3 ;"
" movl %7,(%5,%4,1) ;"
" subl $4,%0 ;"
" subl $1,%2 ;"
" jnz 5b ;"
" jmp 7f ;"
"6: std ;"
" rep; smovl ;"
" cld ;"
"7: nop "
: "=S" (from), "=D" (to), "=c" (count), "=r" (temp)
: "0" (from), "1" (to), "2" (count), "3" (temp)
: "memory", "flags");
#endif // AMD64
} }
static void pd_aligned_disjoint_words(HeapWord* from, HeapWord* to, size_t count) { static void pd_aligned_disjoint_words(HeapWord* from, HeapWord* to, size_t count) {
#ifdef AMD64
pd_disjoint_words(from, to, count); pd_disjoint_words(from, to, count);
#else
// Same as pd_disjoint_words, except no zero-count check.
intx temp;
__asm__ volatile(" cmpl $32,%6 ;"
" ja 2f ;"
" subl %4,%1 ;"
"1: movl (%4),%3 ;"
" movl %7,(%5,%4,1);"
" addl $4,%0 ;"
" subl $1,%2 ;"
" jnz 1b ;"
" jmp 3f ;"
"2: rep; smovl ;"
"3: nop "
: "=S" (from), "=D" (to), "=c" (count), "=r" (temp)
: "0" (from), "1" (to), "2" (count), "3" (temp)
: "memory", "cc");
#endif // AMD64
} }
static void pd_conjoint_bytes(void* from, void* to, size_t count) { static void pd_conjoint_bytes(void* from, void* to, size_t count) {
#ifdef AMD64 #ifdef AMD64
(void)memmove(to, from, count); (void)memmove(to, from, count);
#else #else
// Includes a zero-count check.
intx temp; intx temp;
__asm__ volatile(" testl %6,%6 ;" __asm__ volatile(" testl %6,%6 ;"
" jz 13f ;" " jz 13f ;"

@ -121,10 +121,10 @@ cb_CopyRight:
jnz 3b jnz 3b
addl %esi,%edi addl %esi,%edi
4: movl %eax,%ecx # byte count less prefix 4: movl %eax,%ecx # byte count less prefix
andl $3,%ecx # suffix byte count 5: andl $3,%ecx # suffix byte count
jz 7f # no suffix jz 7f # no suffix
# copy suffix # copy suffix
5: xorl %eax,%eax xorl %eax,%eax
6: movb (%esi,%eax,1),%dl 6: movb (%esi,%eax,1),%dl
movb %dl,(%edi,%eax,1) movb %dl,(%edi,%eax,1)
addl $1,%eax addl $1,%eax
@ -159,10 +159,10 @@ cb_CopyLeft:
# copy dwords, aligned or not # copy dwords, aligned or not
3: rep; smovl 3: rep; smovl
4: movl %eax,%ecx # byte count 4: movl %eax,%ecx # byte count
andl $3,%ecx # suffix byte count 5: andl $3,%ecx # suffix byte count
jz 7f # no suffix jz 7f # no suffix
# copy suffix # copy suffix
5: subl %esi,%edi subl %esi,%edi
addl $3,%esi addl $3,%esi
6: movb (%esi),%dl 6: movb (%esi),%dl
movb %dl,(%edi,%esi,1) movb %dl,(%edi,%esi,1)
@ -214,10 +214,10 @@ acb_CopyRight:
# copy aligned dwords # copy aligned dwords
3: rep; smovl 3: rep; smovl
4: movl %eax,%ecx 4: movl %eax,%ecx
andl $3,%ecx 5: andl $3,%ecx
jz 7f jz 7f
# copy suffix # copy suffix
5: xorl %eax,%eax xorl %eax,%eax
6: movb (%esi,%eax,1),%dl 6: movb (%esi,%eax,1),%dl
movb %dl,(%edi,%eax,1) movb %dl,(%edi,%eax,1)
addl $1,%eax addl $1,%eax
@ -250,9 +250,9 @@ acb_CopyLeft:
jnz 3b jnz 3b
addl %esi,%edi addl %esi,%edi
4: movl %eax,%ecx 4: movl %eax,%ecx
andl $3,%ecx 5: andl $3,%ecx
jz 7f jz 7f
5: subl %esi,%edi subl %esi,%edi
addl $3,%esi addl $3,%esi
6: movb (%esi),%dl 6: movb (%esi),%dl
movb %dl,(%edi,%esi,1) movb %dl,(%edi,%esi,1)
@ -287,11 +287,12 @@ cs_CopyRight:
andl $3,%eax # either 0 or 2 andl $3,%eax # either 0 or 2
jz 1f # no prefix jz 1f # no prefix
# copy prefix # copy prefix
subl $1,%ecx
jl 5f # zero count
movw (%esi),%dx movw (%esi),%dx
movw %dx,(%edi) movw %dx,(%edi)
addl %eax,%esi # %eax == 2 addl %eax,%esi # %eax == 2
addl %eax,%edi addl %eax,%edi
subl $1,%ecx
1: movl %ecx,%eax # word count less prefix 1: movl %ecx,%eax # word count less prefix
sarl %ecx # dword count sarl %ecx # dword count
jz 4f # no dwords to move jz 4f # no dwords to move
@ -454,12 +455,13 @@ ci_CopyRight:
ret ret
.=.+10 .=.+10
2: subl %esi,%edi 2: subl %esi,%edi
jmp 4f
.p2align 4,,15 .p2align 4,,15
3: movl (%esi),%edx 3: movl (%esi),%edx
movl %edx,(%edi,%esi,1) movl %edx,(%edi,%esi,1)
addl $4,%esi addl $4,%esi
subl $1,%ecx 4: subl $1,%ecx
jnz 3b jge 3b
popl %edi popl %edi
popl %esi popl %esi
ret ret
@ -467,19 +469,20 @@ ci_CopyLeft:
std std
leal -4(%edi,%ecx,4),%edi # to + count*4 - 4 leal -4(%edi,%ecx,4),%edi # to + count*4 - 4
cmpl $32,%ecx cmpl $32,%ecx
ja 3f # > 32 dwords ja 4f # > 32 dwords
subl %eax,%edi # eax == from + count*4 - 4 subl %eax,%edi # eax == from + count*4 - 4
jmp 3f
.p2align 4,,15 .p2align 4,,15
2: movl (%eax),%edx 2: movl (%eax),%edx
movl %edx,(%edi,%eax,1) movl %edx,(%edi,%eax,1)
subl $4,%eax subl $4,%eax
subl $1,%ecx 3: subl $1,%ecx
jnz 2b jge 2b
cld cld
popl %edi popl %edi
popl %esi popl %esi
ret ret
3: movl %eax,%esi # from + count*4 - 4 4: movl %eax,%esi # from + count*4 - 4
rep; smovl rep; smovl
cld cld
popl %edi popl %edi

@ -861,7 +861,7 @@ cmpxchg_func_t* os::atomic_cmpxchg_func = os::atomic_cmpxchg_bootstrap
cmpxchg_long_func_t* os::atomic_cmpxchg_long_func = os::atomic_cmpxchg_long_bootstrap; cmpxchg_long_func_t* os::atomic_cmpxchg_long_func = os::atomic_cmpxchg_long_bootstrap;
add_func_t* os::atomic_add_func = os::atomic_add_bootstrap; add_func_t* os::atomic_add_func = os::atomic_add_bootstrap;
extern "C" _solaris_raw_setup_fpu(address ptr); extern "C" void _solaris_raw_setup_fpu(address ptr);
void os::setup_fpu() { void os::setup_fpu() {
address fpu_cntrl = StubRoutines::addr_fpu_cntrl_wrd_std(); address fpu_cntrl = StubRoutines::addr_fpu_cntrl_wrd_std();
_solaris_raw_setup_fpu(fpu_cntrl); _solaris_raw_setup_fpu(fpu_cntrl);

@ -154,10 +154,10 @@ cb_CopyRight:
jnz 3b jnz 3b
addl %esi,%edi addl %esi,%edi
4: movl %eax,%ecx / byte count less prefix 4: movl %eax,%ecx / byte count less prefix
andl $3,%ecx / suffix byte count 5: andl $3,%ecx / suffix byte count
jz 7f / no suffix jz 7f / no suffix
/ copy suffix / copy suffix
5: xorl %eax,%eax xorl %eax,%eax
6: movb (%esi,%eax,1),%dl 6: movb (%esi,%eax,1),%dl
movb %dl,(%edi,%eax,1) movb %dl,(%edi,%eax,1)
addl $1,%eax addl $1,%eax
@ -192,10 +192,10 @@ cb_CopyLeft:
/ copy dwords, aligned or not / copy dwords, aligned or not
3: rep; smovl 3: rep; smovl
4: movl %eax,%ecx / byte count 4: movl %eax,%ecx / byte count
andl $3,%ecx / suffix byte count 5: andl $3,%ecx / suffix byte count
jz 7f / no suffix jz 7f / no suffix
/ copy suffix / copy suffix
5: subl %esi,%edi subl %esi,%edi
addl $3,%esi addl $3,%esi
6: movb (%esi),%dl 6: movb (%esi),%dl
movb %dl,(%edi,%esi,1) movb %dl,(%edi,%esi,1)
@ -246,10 +246,10 @@ acb_CopyRight:
/ copy aligned dwords / copy aligned dwords
3: rep; smovl 3: rep; smovl
4: movl %eax,%ecx 4: movl %eax,%ecx
andl $3,%ecx 5: andl $3,%ecx
jz 7f jz 7f
/ copy suffix / copy suffix
5: xorl %eax,%eax xorl %eax,%eax
6: movb (%esi,%eax,1),%dl 6: movb (%esi,%eax,1),%dl
movb %dl,(%edi,%eax,1) movb %dl,(%edi,%eax,1)
addl $1,%eax addl $1,%eax
@ -282,9 +282,9 @@ acb_CopyLeft:
jnz 3b jnz 3b
addl %esi,%edi addl %esi,%edi
4: movl %eax,%ecx 4: movl %eax,%ecx
andl $3,%ecx 5: andl $3,%ecx
jz 7f jz 7f
5: subl %esi,%edi subl %esi,%edi
addl $3,%esi addl $3,%esi
6: movb (%esi),%dl 6: movb (%esi),%dl
movb %dl,(%edi,%esi,1) movb %dl,(%edi,%esi,1)
@ -318,11 +318,12 @@ cs_CopyRight:
andl $3,%eax / either 0 or 2 andl $3,%eax / either 0 or 2
jz 1f / no prefix jz 1f / no prefix
/ copy prefix / copy prefix
subl $1,%ecx
jl 5f / zero count
movw (%esi),%dx movw (%esi),%dx
movw %dx,(%edi) movw %dx,(%edi)
addl %eax,%esi / %eax == 2 addl %eax,%esi / %eax == 2
addl %eax,%edi addl %eax,%edi
subl $1,%ecx
1: movl %ecx,%eax / word count less prefix 1: movl %ecx,%eax / word count less prefix
sarl %ecx / dword count sarl %ecx / dword count
jz 4f / no dwords to move jz 4f / no dwords to move
@ -482,12 +483,13 @@ ci_CopyRight:
ret ret
.=.+10 .=.+10
2: subl %esi,%edi 2: subl %esi,%edi
jmp 4f
.align 16 .align 16
3: movl (%esi),%edx 3: movl (%esi),%edx
movl %edx,(%edi,%esi,1) movl %edx,(%edi,%esi,1)
addl $4,%esi addl $4,%esi
subl $1,%ecx 4: subl $1,%ecx
jnz 3b jge 3b
popl %edi popl %edi
popl %esi popl %esi
ret ret
@ -495,19 +497,20 @@ ci_CopyLeft:
std std
leal -4(%edi,%ecx,4),%edi / to + count*4 - 4 leal -4(%edi,%ecx,4),%edi / to + count*4 - 4
cmpl $32,%ecx cmpl $32,%ecx
ja 3f / > 32 dwords ja 4f / > 32 dwords
subl %eax,%edi / eax == from + count*4 - 4 subl %eax,%edi / eax == from + count*4 - 4
jmp 3f
.align 16 .align 16
2: movl (%eax),%edx 2: movl (%eax),%edx
movl %edx,(%edi,%eax,1) movl %edx,(%edi,%eax,1)
subl $4,%eax subl $4,%eax
subl $1,%ecx 3: subl $1,%ecx
jnz 2b jge 2b
cld cld
popl %edi popl %edi
popl %esi popl %esi
ret ret
3: movl %eax,%esi / from + count*4 - 4 4: movl %eax,%esi / from + count*4 - 4
rep; smovl rep; smovl
cld cld
popl %edi popl %edi

@ -404,7 +404,7 @@ void CodeSection::expand_locs(int new_capacity) {
locs_start = REALLOC_RESOURCE_ARRAY(relocInfo, _locs_start, old_capacity, new_capacity); locs_start = REALLOC_RESOURCE_ARRAY(relocInfo, _locs_start, old_capacity, new_capacity);
} else { } else {
locs_start = NEW_RESOURCE_ARRAY(relocInfo, new_capacity); locs_start = NEW_RESOURCE_ARRAY(relocInfo, new_capacity);
Copy::conjoint_bytes(_locs_start, locs_start, old_capacity * sizeof(relocInfo)); Copy::conjoint_jbytes(_locs_start, locs_start, old_capacity * sizeof(relocInfo));
_locs_own = true; _locs_own = true;
} }
_locs_start = locs_start; _locs_start = locs_start;
@ -581,7 +581,7 @@ csize_t CodeBuffer::copy_relocations_to(CodeBlob* dest) const {
(HeapWord*)(buf+buf_offset), (HeapWord*)(buf+buf_offset),
(lsize + HeapWordSize-1) / HeapWordSize); (lsize + HeapWordSize-1) / HeapWordSize);
} else { } else {
Copy::conjoint_bytes(lstart, buf+buf_offset, lsize); Copy::conjoint_jbytes(lstart, buf+buf_offset, lsize);
} }
} }
buf_offset += lsize; buf_offset += lsize;

@ -242,10 +242,10 @@ void Compilation::setup_code_buffer(CodeBuffer* code, int call_stub_estimate) {
code->insts()->initialize_shared_locs((relocInfo*)locs_buffer, code->insts()->initialize_shared_locs((relocInfo*)locs_buffer,
locs_buffer_size / sizeof(relocInfo)); locs_buffer_size / sizeof(relocInfo));
code->initialize_consts_size(Compilation::desired_max_constant_size()); code->initialize_consts_size(Compilation::desired_max_constant_size());
// Call stubs + deopt/exception handler // Call stubs + two deopt handlers (regular and MH) + exception handler
code->initialize_stubs_size((call_stub_estimate * LIR_Assembler::call_stub_size) + code->initialize_stubs_size((call_stub_estimate * LIR_Assembler::call_stub_size) +
LIR_Assembler::exception_handler_size + LIR_Assembler::exception_handler_size +
LIR_Assembler::deopt_handler_size); 2 * LIR_Assembler::deopt_handler_size);
} }

@ -878,15 +878,12 @@ void GraphBuilder::load_constant() {
case T_OBJECT : case T_OBJECT :
{ {
ciObject* obj = con.as_object(); ciObject* obj = con.as_object();
if (obj->is_klass()) { if (!obj->is_loaded()
ciKlass* klass = obj->as_klass(); || (PatchALot && obj->klass() != ciEnv::current()->String_klass())) {
if (!klass->is_loaded() || PatchALot) { patch_state = state()->copy();
patch_state = state()->copy(); t = new ObjectConstant(obj);
t = new ObjectConstant(obj);
} else {
t = new InstanceConstant(klass->java_mirror());
}
} else { } else {
assert(!obj->is_klass(), "must be java_mirror of klass");
t = new InstanceConstant(obj->as_instance()); t = new InstanceConstant(obj->as_instance());
} }
break; break;

@ -601,7 +601,7 @@ JRT_END
static klassOop resolve_field_return_klass(methodHandle caller, int bci, TRAPS) { static klassOop resolve_field_return_klass(methodHandle caller, int bci, TRAPS) {
Bytecode_field* field_access = Bytecode_field_at(caller(), caller->bcp_from(bci)); Bytecode_field* field_access = Bytecode_field_at(caller, bci);
// This can be static or non-static field access // This can be static or non-static field access
Bytecodes::Code code = field_access->code(); Bytecodes::Code code = field_access->code();
@ -721,7 +721,7 @@ JRT_ENTRY(void, Runtime1::patch_code(JavaThread* thread, Runtime1::StubID stub_i
Handle load_klass(THREAD, NULL); // oop needed by load_klass_patching code Handle load_klass(THREAD, NULL); // oop needed by load_klass_patching code
if (stub_id == Runtime1::access_field_patching_id) { if (stub_id == Runtime1::access_field_patching_id) {
Bytecode_field* field_access = Bytecode_field_at(caller_method(), caller_method->bcp_from(bci)); Bytecode_field* field_access = Bytecode_field_at(caller_method, bci);
FieldAccessInfo result; // initialize class if needed FieldAccessInfo result; // initialize class if needed
Bytecodes::Code code = field_access->code(); Bytecodes::Code code = field_access->code();
constantPoolHandle constants(THREAD, caller_method->constants()); constantPoolHandle constants(THREAD, caller_method->constants());
@ -781,11 +781,9 @@ JRT_ENTRY(void, Runtime1::patch_code(JavaThread* thread, Runtime1::StubID stub_i
case Bytecodes::_ldc: case Bytecodes::_ldc:
case Bytecodes::_ldc_w: case Bytecodes::_ldc_w:
{ {
Bytecode_loadconstant* cc = Bytecode_loadconstant_at(caller_method(), Bytecode_loadconstant* cc = Bytecode_loadconstant_at(caller_method, bci);
caller_method->bcp_from(bci)); k = cc->resolve_constant(CHECK);
klassOop resolved = caller_method->constants()->klass_at(cc->index(), CHECK); assert(k != NULL && !k->is_klass(), "must be class mirror or other Java constant");
// ldc wants the java mirror.
k = resolved->klass_part()->java_mirror();
} }
break; break;
default: Unimplemented(); default: Unimplemented();
@ -816,6 +814,15 @@ JRT_ENTRY(void, Runtime1::patch_code(JavaThread* thread, Runtime1::StubID stub_i
// Return to the now deoptimized frame. // Return to the now deoptimized frame.
} }
// If we are patching in a non-perm oop, make sure the nmethod
// is on the right list.
if (ScavengeRootsInCode && load_klass.not_null() && load_klass->is_scavengable()) {
MutexLockerEx ml_code (CodeCache_lock, Mutex::_no_safepoint_check_flag);
nmethod* nm = CodeCache::find_nmethod(caller_frame.pc());
guarantee(nm != NULL, "only nmethods can contain non-perm oops");
if (!nm->on_scavenge_root_list())
CodeCache::add_scavenge_root_nmethod(nm);
}
// Now copy code back // Now copy code back
@ -1115,7 +1122,7 @@ JRT_LEAF(void, Runtime1::primitive_arraycopy(HeapWord* src, HeapWord* dst, int l
if (length == 0) return; if (length == 0) return;
// Not guaranteed to be word atomic, but that doesn't matter // Not guaranteed to be word atomic, but that doesn't matter
// for anything but an oop array, which is covered by oop_arraycopy. // for anything but an oop array, which is covered by oop_arraycopy.
Copy::conjoint_bytes(src, dst, length); Copy::conjoint_jbytes(src, dst, length);
JRT_END JRT_END
JRT_LEAF(void, Runtime1::oop_arraycopy(HeapWord* src, HeapWord* dst, int num)) JRT_LEAF(void, Runtime1::oop_arraycopy(HeapWord* src, HeapWord* dst, int num))

@ -106,7 +106,7 @@ public:
void BCEscapeAnalyzer::set_returned(ArgumentMap vars) { void BCEscapeAnalyzer::set_returned(ArgumentMap vars) {
for (int i = 0; i < _arg_size; i++) { for (int i = 0; i < _arg_size; i++) {
if (vars.contains(i)) if (vars.contains(i))
_arg_returned.set_bit(i); _arg_returned.set(i);
} }
_return_local = _return_local && !(vars.contains_unknown() || vars.contains_allocated()); _return_local = _return_local && !(vars.contains_unknown() || vars.contains_allocated());
_return_allocated = _return_allocated && vars.contains_allocated() && !(vars.contains_unknown() || vars.contains_vars()); _return_allocated = _return_allocated && vars.contains_allocated() && !(vars.contains_unknown() || vars.contains_vars());
@ -126,16 +126,16 @@ bool BCEscapeAnalyzer::is_arg_stack(ArgumentMap vars){
if (_conservative) if (_conservative)
return true; return true;
for (int i = 0; i < _arg_size; i++) { for (int i = 0; i < _arg_size; i++) {
if (vars.contains(i) && _arg_stack.at(i)) if (vars.contains(i) && _arg_stack.test(i))
return true; return true;
} }
return false; return false;
} }
void BCEscapeAnalyzer::clear_bits(ArgumentMap vars, BitMap &bm) { void BCEscapeAnalyzer::clear_bits(ArgumentMap vars, VectorSet &bm) {
for (int i = 0; i < _arg_size; i++) { for (int i = 0; i < _arg_size; i++) {
if (vars.contains(i)) { if (vars.contains(i)) {
bm.clear_bit(i); bm >>= i;
} }
} }
} }
@ -1157,15 +1157,15 @@ void BCEscapeAnalyzer::initialize() {
ciSignature* sig = method()->signature(); ciSignature* sig = method()->signature();
int j = 0; int j = 0;
if (!method()->is_static()) { if (!method()->is_static()) {
_arg_local.set_bit(0); _arg_local.set(0);
_arg_stack.set_bit(0); _arg_stack.set(0);
j++; j++;
} }
for (i = 0; i < sig->count(); i++) { for (i = 0; i < sig->count(); i++) {
ciType* t = sig->type_at(i); ciType* t = sig->type_at(i);
if (!t->is_primitive_type()) { if (!t->is_primitive_type()) {
_arg_local.set_bit(j); _arg_local.set(j);
_arg_stack.set_bit(j); _arg_stack.set(j);
} }
j += t->size(); j += t->size();
} }
@ -1198,9 +1198,9 @@ void BCEscapeAnalyzer::clear_escape_info() {
set_modified(var, OFFSET_ANY, 4); set_modified(var, OFFSET_ANY, 4);
set_global_escape(var); set_global_escape(var);
} }
_arg_local.clear(); _arg_local.Clear();
_arg_stack.clear(); _arg_stack.Clear();
_arg_returned.clear(); _arg_returned.Clear();
_return_local = false; _return_local = false;
_return_allocated = false; _return_allocated = false;
_allocated_escapes = true; _allocated_escapes = true;
@ -1254,7 +1254,7 @@ void BCEscapeAnalyzer::compute_escape_info() {
// Do not scan method if it has no object parameters and // Do not scan method if it has no object parameters and
// does not returns an object (_return_allocated is set in initialize()). // does not returns an object (_return_allocated is set in initialize()).
if (_arg_local.is_empty() && !_return_allocated) { if (_arg_local.Size() == 0 && !_return_allocated) {
// Clear all info since method's bytecode was not analysed and // Clear all info since method's bytecode was not analysed and
// set pessimistic escape information. // set pessimistic escape information.
clear_escape_info(); clear_escape_info();
@ -1275,14 +1275,14 @@ void BCEscapeAnalyzer::compute_escape_info() {
// //
if (!has_dependencies() && !methodData()->is_empty()) { if (!has_dependencies() && !methodData()->is_empty()) {
for (i = 0; i < _arg_size; i++) { for (i = 0; i < _arg_size; i++) {
if (_arg_local.at(i)) { if (_arg_local.test(i)) {
assert(_arg_stack.at(i), "inconsistent escape info"); assert(_arg_stack.test(i), "inconsistent escape info");
methodData()->set_arg_local(i); methodData()->set_arg_local(i);
methodData()->set_arg_stack(i); methodData()->set_arg_stack(i);
} else if (_arg_stack.at(i)) { } else if (_arg_stack.test(i)) {
methodData()->set_arg_stack(i); methodData()->set_arg_stack(i);
} }
if (_arg_returned.at(i)) { if (_arg_returned.test(i)) {
methodData()->set_arg_returned(i); methodData()->set_arg_returned(i);
} }
methodData()->set_arg_modified(i, _arg_modified[i]); methodData()->set_arg_modified(i, _arg_modified[i]);
@ -1308,9 +1308,12 @@ void BCEscapeAnalyzer::read_escape_info() {
// read escape information from method descriptor // read escape information from method descriptor
for (int i = 0; i < _arg_size; i++) { for (int i = 0; i < _arg_size; i++) {
_arg_local.at_put(i, methodData()->is_arg_local(i)); if (methodData()->is_arg_local(i))
_arg_stack.at_put(i, methodData()->is_arg_stack(i)); _arg_local.set(i);
_arg_returned.at_put(i, methodData()->is_arg_returned(i)); if (methodData()->is_arg_stack(i))
_arg_stack.set(i);
if (methodData()->is_arg_returned(i))
_arg_returned.set(i);
_arg_modified[i] = methodData()->arg_modified(i); _arg_modified[i] = methodData()->arg_modified(i);
} }
_return_local = methodData()->eflag_set(methodDataOopDesc::return_local); _return_local = methodData()->eflag_set(methodDataOopDesc::return_local);
@ -1358,26 +1361,26 @@ void BCEscapeAnalyzer::dump() {
BCEscapeAnalyzer::BCEscapeAnalyzer(ciMethod* method, BCEscapeAnalyzer* parent) BCEscapeAnalyzer::BCEscapeAnalyzer(ciMethod* method, BCEscapeAnalyzer* parent)
: _conservative(method == NULL || !EstimateArgEscape) : _conservative(method == NULL || !EstimateArgEscape)
, _arena(CURRENT_ENV->arena())
, _method(method) , _method(method)
, _methodData(method ? method->method_data() : NULL) , _methodData(method ? method->method_data() : NULL)
, _arg_size(method ? method->arg_size() : 0) , _arg_size(method ? method->arg_size() : 0)
, _stack() , _arg_local(_arena)
, _arg_local(_arg_size) , _arg_stack(_arena)
, _arg_stack(_arg_size) , _arg_returned(_arena)
, _arg_returned(_arg_size) , _dirty(_arena)
, _dirty(_arg_size)
, _return_local(false) , _return_local(false)
, _return_allocated(false) , _return_allocated(false)
, _allocated_escapes(false) , _allocated_escapes(false)
, _unknown_modified(false) , _unknown_modified(false)
, _dependencies() , _dependencies(_arena, 4, 0, NULL)
, _parent(parent) , _parent(parent)
, _level(parent == NULL ? 0 : parent->level() + 1) { , _level(parent == NULL ? 0 : parent->level() + 1) {
if (!_conservative) { if (!_conservative) {
_arg_local.clear(); _arg_local.Clear();
_arg_stack.clear(); _arg_stack.Clear();
_arg_returned.clear(); _arg_returned.Clear();
_dirty.clear(); _dirty.Clear();
Arena* arena = CURRENT_ENV->arena(); Arena* arena = CURRENT_ENV->arena();
_arg_modified = (uint *) arena->Amalloc(_arg_size * sizeof(uint)); _arg_modified = (uint *) arena->Amalloc(_arg_size * sizeof(uint));
Copy::zero_to_bytes(_arg_modified, _arg_size * sizeof(uint)); Copy::zero_to_bytes(_arg_modified, _arg_size * sizeof(uint));
@ -1414,8 +1417,8 @@ void BCEscapeAnalyzer::copy_dependencies(Dependencies *deps) {
deps->assert_evol_method(method()); deps->assert_evol_method(method());
} }
for (int i = 0; i < _dependencies.length(); i+=2) { for (int i = 0; i < _dependencies.length(); i+=2) {
ciKlass *k = _dependencies[i]->as_klass(); ciKlass *k = _dependencies.at(i)->as_klass();
ciMethod *m = _dependencies[i+1]->as_method(); ciMethod *m = _dependencies.at(i+1)->as_method();
deps->assert_unique_concrete_method(k, m); deps->assert_unique_concrete_method(k, m);
} }
} }

@ -22,9 +22,6 @@
* *
*/ */
define_array(ciObjectArray, ciObject*);
define_stack(ciObjectList, ciObjectArray);
// This class implements a fast, conservative analysis of effect of methods // This class implements a fast, conservative analysis of effect of methods
// on the escape state of their arguments. The analysis is at the bytecode // on the escape state of their arguments. The analysis is at the bytecode
// level. // level.
@ -34,18 +31,17 @@ class ciBlock;
class BCEscapeAnalyzer : public ResourceObj { class BCEscapeAnalyzer : public ResourceObj {
private: private:
Arena* _arena; // ciEnv arena
bool _conservative; // If true, return maximally bool _conservative; // If true, return maximally
// conservative results. // conservative results.
ciMethod* _method; ciMethod* _method;
ciMethodData* _methodData; ciMethodData* _methodData;
int _arg_size; int _arg_size;
VectorSet _arg_local;
intStack _stack; VectorSet _arg_stack;
VectorSet _arg_returned;
BitMap _arg_local; VectorSet _dirty;
BitMap _arg_stack;
BitMap _arg_returned;
BitMap _dirty;
enum{ ARG_OFFSET_MAX = 31}; enum{ ARG_OFFSET_MAX = 31};
uint *_arg_modified; uint *_arg_modified;
@ -54,7 +50,7 @@ class BCEscapeAnalyzer : public ResourceObj {
bool _allocated_escapes; bool _allocated_escapes;
bool _unknown_modified; bool _unknown_modified;
ciObjectList _dependencies; GrowableArray<ciObject *> _dependencies;
ciMethodBlocks *_methodBlocks; ciMethodBlocks *_methodBlocks;
@ -68,20 +64,10 @@ class BCEscapeAnalyzer : public ResourceObj {
private: private:
// helper functions // helper functions
bool is_argument(int i) { return i >= 0 && i < _arg_size; } bool is_argument(int i) { return i >= 0 && i < _arg_size; }
void raw_push(int i) { _stack.push(i); }
int raw_pop() { return _stack.is_empty() ? -1 : _stack.pop(); }
void apush(int i) { raw_push(i); }
void spush() { raw_push(-1); }
void lpush() { spush(); spush(); }
int apop() { return raw_pop(); }
void spop() { assert(_stack.is_empty() || _stack.top() == -1, ""); raw_pop(); }
void lpop() { spop(); spop(); }
void set_returned(ArgumentMap vars); void set_returned(ArgumentMap vars);
bool is_argument(ArgumentMap vars); bool is_argument(ArgumentMap vars);
bool is_arg_stack(ArgumentMap vars); bool is_arg_stack(ArgumentMap vars);
void clear_bits(ArgumentMap vars, BitMap &bs); void clear_bits(ArgumentMap vars, VectorSet &bs);
void set_method_escape(ArgumentMap vars); void set_method_escape(ArgumentMap vars);
void set_global_escape(ArgumentMap vars); void set_global_escape(ArgumentMap vars);
void set_dirty(ArgumentMap vars); void set_dirty(ArgumentMap vars);
@ -116,25 +102,25 @@ class BCEscapeAnalyzer : public ResourceObj {
ciMethodData* methodData() const { return _methodData; } ciMethodData* methodData() const { return _methodData; }
BCEscapeAnalyzer* parent() const { return _parent; } BCEscapeAnalyzer* parent() const { return _parent; }
int level() const { return _level; } int level() const { return _level; }
ciObjectList* dependencies() { return &_dependencies; } GrowableArray<ciObject *>* dependencies() { return &_dependencies; }
bool has_dependencies() const { return !_dependencies.is_empty(); } bool has_dependencies() const { return !_dependencies.is_empty(); }
// retrieval of interprocedural escape information // retrieval of interprocedural escape information
// The given argument does not escape the callee. // The given argument does not escape the callee.
bool is_arg_local(int i) const { bool is_arg_local(int i) const {
return !_conservative && _arg_local.at(i); return !_conservative && _arg_local.test(i);
} }
// The given argument escapes the callee, but does not become globally // The given argument escapes the callee, but does not become globally
// reachable. // reachable.
bool is_arg_stack(int i) const { bool is_arg_stack(int i) const {
return !_conservative && _arg_stack.at(i); return !_conservative && _arg_stack.test(i);
} }
// The given argument does not escape globally, and may be returned. // The given argument does not escape globally, and may be returned.
bool is_arg_returned(int i) const { bool is_arg_returned(int i) const {
return !_conservative && _arg_returned.at(i); } return !_conservative && _arg_returned.test(i); }
// True iff only input arguments are returned. // True iff only input arguments are returned.
bool is_return_local() const { bool is_return_local() const {

@ -44,12 +44,22 @@ size_t ciCPCache::get_f1_offset(int index) {
// ciCPCache::is_f1_null_at // ciCPCache::is_f1_null_at
bool ciCPCache::is_f1_null_at(int index) { bool ciCPCache::is_f1_null_at(int index) {
VM_ENTRY_MARK; VM_ENTRY_MARK;
constantPoolCacheOop cpcache = (constantPoolCacheOop) get_oop(); oop f1 = entry_at(index)->f1();
oop f1 = cpcache->secondary_entry_at(index)->f1();
return (f1 == NULL); return (f1 == NULL);
} }
// ------------------------------------------------------------------
// ciCPCache::get_pool_index
int ciCPCache::get_pool_index(int index) {
VM_ENTRY_MARK;
ConstantPoolCacheEntry* e = entry_at(index);
if (e->is_secondary_entry())
e = entry_at(e->main_entry_index());
return e->constant_pool_index();
}
// ------------------------------------------------------------------ // ------------------------------------------------------------------
// ciCPCache::print // ciCPCache::print
// //

@ -29,6 +29,18 @@
// Note: This class is called ciCPCache as ciConstantPoolCache is used // Note: This class is called ciCPCache as ciConstantPoolCache is used
// for something different. // for something different.
class ciCPCache : public ciObject { class ciCPCache : public ciObject {
private:
constantPoolCacheOop get_cpCacheOop() { // must be called inside a VM_ENTRY_MARK
return (constantPoolCacheOop) get_oop();
}
ConstantPoolCacheEntry* entry_at(int i) {
int raw_index = i;
if (constantPoolCacheOopDesc::is_secondary_index(i))
raw_index = constantPoolCacheOopDesc::decode_secondary_index(i);
return get_cpCacheOop()->entry_at(raw_index);
}
public: public:
ciCPCache(constantPoolCacheHandle cpcache) : ciObject(cpcache) {} ciCPCache(constantPoolCacheHandle cpcache) : ciObject(cpcache) {}
@ -41,5 +53,7 @@ public:
bool is_f1_null_at(int index); bool is_f1_null_at(int index);
int get_pool_index(int index);
void print(); void print();
}; };

@ -85,6 +85,7 @@ friend class ciCallSite; \
friend class ciConstantPoolCache; \ friend class ciConstantPoolCache; \
friend class ciField; \ friend class ciField; \
friend class ciConstant; \ friend class ciConstant; \
friend class ciCPCache; \
friend class ciFlags; \ friend class ciFlags; \
friend class ciExceptionHandler; \ friend class ciExceptionHandler; \
friend class ciCallProfile; \ friend class ciCallProfile; \

@ -511,9 +511,22 @@ ciKlass* ciEnv::get_klass_by_index(constantPoolHandle cpool,
// //
// Implementation of get_constant_by_index(). // Implementation of get_constant_by_index().
ciConstant ciEnv::get_constant_by_index_impl(constantPoolHandle cpool, ciConstant ciEnv::get_constant_by_index_impl(constantPoolHandle cpool,
int index, int pool_index, int cache_index,
ciInstanceKlass* accessor) { ciInstanceKlass* accessor) {
bool ignore_will_link;
EXCEPTION_CONTEXT; EXCEPTION_CONTEXT;
int index = pool_index;
if (cache_index >= 0) {
assert(index < 0, "only one kind of index at a time");
ConstantPoolCacheEntry* cpc_entry = cpool->cache()->entry_at(cache_index);
index = cpc_entry->constant_pool_index();
oop obj = cpc_entry->f1();
if (obj != NULL) {
assert(obj->is_instance(), "must be an instance");
ciObject* ciobj = get_object(obj);
return ciConstant(T_OBJECT, ciobj);
}
}
constantTag tag = cpool->tag_at(index); constantTag tag = cpool->tag_at(index);
if (tag.is_int()) { if (tag.is_int()) {
return ciConstant(T_INT, (jint)cpool->int_at(index)); return ciConstant(T_INT, (jint)cpool->int_at(index));
@ -540,8 +553,7 @@ ciConstant ciEnv::get_constant_by_index_impl(constantPoolHandle cpool,
return ciConstant(T_OBJECT, constant); return ciConstant(T_OBJECT, constant);
} else if (tag.is_klass() || tag.is_unresolved_klass()) { } else if (tag.is_klass() || tag.is_unresolved_klass()) {
// 4881222: allow ldc to take a class type // 4881222: allow ldc to take a class type
bool ignore; ciKlass* klass = get_klass_by_index_impl(cpool, index, ignore_will_link, accessor);
ciKlass* klass = get_klass_by_index_impl(cpool, index, ignore, accessor);
if (HAS_PENDING_EXCEPTION) { if (HAS_PENDING_EXCEPTION) {
CLEAR_PENDING_EXCEPTION; CLEAR_PENDING_EXCEPTION;
record_out_of_memory_failure(); record_out_of_memory_failure();
@ -549,42 +561,32 @@ ciConstant ciEnv::get_constant_by_index_impl(constantPoolHandle cpool,
} }
assert (klass->is_instance_klass() || klass->is_array_klass(), assert (klass->is_instance_klass() || klass->is_array_klass(),
"must be an instance or array klass "); "must be an instance or array klass ");
return ciConstant(T_OBJECT, klass); return ciConstant(T_OBJECT, klass->java_mirror());
} else if (tag.is_object()) { } else if (tag.is_object()) {
oop obj = cpool->object_at(index); oop obj = cpool->object_at(index);
assert(obj->is_instance(), "must be an instance"); assert(obj->is_instance(), "must be an instance");
ciObject* ciobj = get_object(obj); ciObject* ciobj = get_object(obj);
return ciConstant(T_OBJECT, ciobj); return ciConstant(T_OBJECT, ciobj);
} else if (tag.is_method_type()) {
// must execute Java code to link this CP entry into cache[i].f1
ciSymbol* signature = get_object(cpool->method_type_signature_at(index))->as_symbol();
ciObject* ciobj = get_unloaded_method_type_constant(signature);
return ciConstant(T_OBJECT, ciobj);
} else if (tag.is_method_handle()) {
// must execute Java code to link this CP entry into cache[i].f1
int ref_kind = cpool->method_handle_ref_kind_at(index);
int callee_index = cpool->method_handle_klass_index_at(index);
ciKlass* callee = get_klass_by_index_impl(cpool, callee_index, ignore_will_link, accessor);
ciSymbol* name = get_object(cpool->method_handle_name_ref_at(index))->as_symbol();
ciSymbol* signature = get_object(cpool->method_handle_signature_ref_at(index))->as_symbol();
ciObject* ciobj = get_unloaded_method_handle_constant(callee, name, signature, ref_kind);
return ciConstant(T_OBJECT, ciobj);
} else { } else {
ShouldNotReachHere(); ShouldNotReachHere();
return ciConstant(); return ciConstant();
} }
} }
// ------------------------------------------------------------------
// ciEnv::is_unresolved_string_impl
//
// Implementation of is_unresolved_string().
bool ciEnv::is_unresolved_string_impl(instanceKlass* accessor, int index) const {
EXCEPTION_CONTEXT;
assert(accessor->is_linked(), "must be linked before accessing constant pool");
constantPoolOop cpool = accessor->constants();
constantTag tag = cpool->tag_at(index);
return tag.is_unresolved_string();
}
// ------------------------------------------------------------------
// ciEnv::is_unresolved_klass_impl
//
// Implementation of is_unresolved_klass().
bool ciEnv::is_unresolved_klass_impl(instanceKlass* accessor, int index) const {
EXCEPTION_CONTEXT;
assert(accessor->is_linked(), "must be linked before accessing constant pool");
constantPoolOop cpool = accessor->constants();
constantTag tag = cpool->tag_at(index);
return tag.is_unresolved_klass();
}
// ------------------------------------------------------------------ // ------------------------------------------------------------------
// ciEnv::get_constant_by_index // ciEnv::get_constant_by_index
// //
@ -592,31 +594,9 @@ bool ciEnv::is_unresolved_klass_impl(instanceKlass* accessor, int index) const {
// //
// Implementation note: this query is currently in no way cached. // Implementation note: this query is currently in no way cached.
ciConstant ciEnv::get_constant_by_index(constantPoolHandle cpool, ciConstant ciEnv::get_constant_by_index(constantPoolHandle cpool,
int index, int pool_index, int cache_index,
ciInstanceKlass* accessor) { ciInstanceKlass* accessor) {
GUARDED_VM_ENTRY(return get_constant_by_index_impl(cpool, index, accessor);) GUARDED_VM_ENTRY(return get_constant_by_index_impl(cpool, pool_index, cache_index, accessor);)
}
// ------------------------------------------------------------------
// ciEnv::is_unresolved_string
//
// Check constant pool
//
// Implementation note: this query is currently in no way cached.
bool ciEnv::is_unresolved_string(ciInstanceKlass* accessor,
int index) const {
GUARDED_VM_ENTRY(return is_unresolved_string_impl(accessor->get_instanceKlass(), index); )
}
// ------------------------------------------------------------------
// ciEnv::is_unresolved_klass
//
// Check constant pool
//
// Implementation note: this query is currently in no way cached.
bool ciEnv::is_unresolved_klass(ciInstanceKlass* accessor,
int index) const {
GUARDED_VM_ENTRY(return is_unresolved_klass_impl(accessor->get_instanceKlass(), index); )
} }
// ------------------------------------------------------------------ // ------------------------------------------------------------------

@ -116,12 +116,8 @@ private:
bool& is_accessible, bool& is_accessible,
ciInstanceKlass* loading_klass); ciInstanceKlass* loading_klass);
ciConstant get_constant_by_index(constantPoolHandle cpool, ciConstant get_constant_by_index(constantPoolHandle cpool,
int constant_index, int pool_index, int cache_index,
ciInstanceKlass* accessor); ciInstanceKlass* accessor);
bool is_unresolved_string(ciInstanceKlass* loading_klass,
int constant_index) const;
bool is_unresolved_klass(ciInstanceKlass* loading_klass,
int constant_index) const;
ciField* get_field_by_index(ciInstanceKlass* loading_klass, ciField* get_field_by_index(ciInstanceKlass* loading_klass,
int field_index); int field_index);
ciMethod* get_method_by_index(constantPoolHandle cpool, ciMethod* get_method_by_index(constantPoolHandle cpool,
@ -137,12 +133,8 @@ private:
bool& is_accessible, bool& is_accessible,
ciInstanceKlass* loading_klass); ciInstanceKlass* loading_klass);
ciConstant get_constant_by_index_impl(constantPoolHandle cpool, ciConstant get_constant_by_index_impl(constantPoolHandle cpool,
int constant_index, int pool_index, int cache_index,
ciInstanceKlass* loading_klass); ciInstanceKlass* loading_klass);
bool is_unresolved_string_impl (instanceKlass* loading_klass,
int constant_index) const;
bool is_unresolved_klass_impl (instanceKlass* loading_klass,
int constant_index) const;
ciField* get_field_by_index_impl(ciInstanceKlass* loading_klass, ciField* get_field_by_index_impl(ciInstanceKlass* loading_klass,
int field_index); int field_index);
ciMethod* get_method_by_index_impl(constantPoolHandle cpool, ciMethod* get_method_by_index_impl(constantPoolHandle cpool,
@ -190,6 +182,25 @@ private:
return _factory->get_unloaded_klass(accessing_klass, name, true); return _factory->get_unloaded_klass(accessing_klass, name, true);
} }
// Get a ciKlass representing an unloaded klass mirror.
// Result is not necessarily unique, but will be unloaded.
ciInstance* get_unloaded_klass_mirror(ciKlass* type) {
return _factory->get_unloaded_klass_mirror(type);
}
// Get a ciInstance representing an unresolved method handle constant.
ciInstance* get_unloaded_method_handle_constant(ciKlass* holder,
ciSymbol* name,
ciSymbol* signature,
int ref_kind) {
return _factory->get_unloaded_method_handle_constant(holder, name, signature, ref_kind);
}
// Get a ciInstance representing an unresolved method type constant.
ciInstance* get_unloaded_method_type_constant(ciSymbol* signature) {
return _factory->get_unloaded_method_type_constant(signature);
}
// See if we already have an unloaded klass for the given name // See if we already have an unloaded klass for the given name
// or return NULL if not. // or return NULL if not.
ciKlass *check_get_unloaded_klass(ciKlass* accessing_klass, ciSymbol* name) { ciKlass *check_get_unloaded_klass(ciKlass* accessing_klass, ciSymbol* name) {

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 1999, 2008, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -44,9 +44,7 @@ ciInstanceKlass::ciInstanceKlass(KlassHandle h_k) :
_flags = ciFlags(access_flags); _flags = ciFlags(access_flags);
_has_finalizer = access_flags.has_finalizer(); _has_finalizer = access_flags.has_finalizer();
_has_subklass = ik->subklass() != NULL; _has_subklass = ik->subklass() != NULL;
_is_initialized = ik->is_initialized(); _init_state = (instanceKlass::ClassState)ik->get_init_state();
// Next line must follow and use the result of the previous line:
_is_linked = _is_initialized || ik->is_linked();
_nonstatic_field_size = ik->nonstatic_field_size(); _nonstatic_field_size = ik->nonstatic_field_size();
_has_nonstatic_fields = ik->has_nonstatic_fields(); _has_nonstatic_fields = ik->has_nonstatic_fields();
_nonstatic_fields = NULL; // initialized lazily by compute_nonstatic_fields: _nonstatic_fields = NULL; // initialized lazily by compute_nonstatic_fields:
@ -91,8 +89,7 @@ ciInstanceKlass::ciInstanceKlass(ciSymbol* name,
: ciKlass(name, ciInstanceKlassKlass::make()) : ciKlass(name, ciInstanceKlassKlass::make())
{ {
assert(name->byte_at(0) != '[', "not an instance klass"); assert(name->byte_at(0) != '[', "not an instance klass");
_is_initialized = false; _init_state = (instanceKlass::ClassState)0;
_is_linked = false;
_nonstatic_field_size = -1; _nonstatic_field_size = -1;
_has_nonstatic_fields = false; _has_nonstatic_fields = false;
_nonstatic_fields = NULL; _nonstatic_fields = NULL;
@ -109,21 +106,10 @@ ciInstanceKlass::ciInstanceKlass(ciSymbol* name,
// ------------------------------------------------------------------ // ------------------------------------------------------------------
// ciInstanceKlass::compute_shared_is_initialized // ciInstanceKlass::compute_shared_is_initialized
bool ciInstanceKlass::compute_shared_is_initialized() { void ciInstanceKlass::compute_shared_init_state() {
GUARDED_VM_ENTRY( GUARDED_VM_ENTRY(
instanceKlass* ik = get_instanceKlass(); instanceKlass* ik = get_instanceKlass();
_is_initialized = ik->is_initialized(); _init_state = (instanceKlass::ClassState)ik->get_init_state();
return _is_initialized;
)
}
// ------------------------------------------------------------------
// ciInstanceKlass::compute_shared_is_linked
bool ciInstanceKlass::compute_shared_is_linked() {
GUARDED_VM_ENTRY(
instanceKlass* ik = get_instanceKlass();
_is_linked = ik->is_linked();
return _is_linked;
) )
} }
@ -323,8 +309,8 @@ ciInstanceKlass* ciInstanceKlass::super() {
// ciInstanceKlass::java_mirror // ciInstanceKlass::java_mirror
// //
// Get the instance of java.lang.Class corresponding to this klass. // Get the instance of java.lang.Class corresponding to this klass.
// Cache it on this->_java_mirror.
ciInstance* ciInstanceKlass::java_mirror() { ciInstance* ciInstanceKlass::java_mirror() {
assert(is_loaded(), "must be loaded");
if (_java_mirror == NULL) { if (_java_mirror == NULL) {
_java_mirror = ciKlass::java_mirror(); _java_mirror = ciKlass::java_mirror();
} }

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 1999, 2008, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -39,9 +39,8 @@ private:
jobject _loader; jobject _loader;
jobject _protection_domain; jobject _protection_domain;
instanceKlass::ClassState _init_state; // state of class
bool _is_shared; bool _is_shared;
bool _is_initialized;
bool _is_linked;
bool _has_finalizer; bool _has_finalizer;
bool _has_subklass; bool _has_subklass;
bool _has_nonstatic_fields; bool _has_nonstatic_fields;
@ -87,27 +86,34 @@ protected:
bool is_shared() { return _is_shared; } bool is_shared() { return _is_shared; }
bool compute_shared_is_initialized(); void compute_shared_init_state();
bool compute_shared_is_linked();
bool compute_shared_has_subklass(); bool compute_shared_has_subklass();
int compute_shared_nof_implementors(); int compute_shared_nof_implementors();
int compute_nonstatic_fields(); int compute_nonstatic_fields();
GrowableArray<ciField*>* compute_nonstatic_fields_impl(GrowableArray<ciField*>* super_fields); GrowableArray<ciField*>* compute_nonstatic_fields_impl(GrowableArray<ciField*>* super_fields);
// Update the init_state for shared klasses
void update_if_shared(instanceKlass::ClassState expected) {
if (_is_shared && _init_state != expected) {
if (is_loaded()) compute_shared_init_state();
}
}
public: public:
// Has this klass been initialized? // Has this klass been initialized?
bool is_initialized() { bool is_initialized() {
if (_is_shared && !_is_initialized) { update_if_shared(instanceKlass::fully_initialized);
return is_loaded() && compute_shared_is_initialized(); return _init_state == instanceKlass::fully_initialized;
} }
return _is_initialized; // Is this klass being initialized?
bool is_being_initialized() {
update_if_shared(instanceKlass::being_initialized);
return _init_state == instanceKlass::being_initialized;
} }
// Has this klass been linked? // Has this klass been linked?
bool is_linked() { bool is_linked() {
if (_is_shared && !_is_linked) { update_if_shared(instanceKlass::linked);
return is_loaded() && compute_shared_is_linked(); return _init_state >= instanceKlass::linked;
}
return _is_linked;
} }
// General klass information. // General klass information.

@ -192,8 +192,14 @@ ciKlass* ciKlass::find_klass(ciSymbol* klass_name) {
// ------------------------------------------------------------------ // ------------------------------------------------------------------
// ciKlass::java_mirror // ciKlass::java_mirror
//
// Get the instance of java.lang.Class corresponding to this klass.
// If it is an unloaded instance or array klass, return an unloaded
// mirror object of type Class.
ciInstance* ciKlass::java_mirror() { ciInstance* ciKlass::java_mirror() {
GUARDED_VM_ENTRY( GUARDED_VM_ENTRY(
if (!is_loaded())
return ciEnv::current()->get_unloaded_klass_mirror(this);
oop java_mirror = get_Klass()->java_mirror(); oop java_mirror = get_Klass()->java_mirror();
return CURRENT_ENV->get_object(java_mirror)->as_instance(); return CURRENT_ENV->get_object(java_mirror)->as_instance();
) )

@ -54,10 +54,10 @@ ciMethod::ciMethod(methodHandle h_m) : ciObject(h_m) {
_code = NULL; _code = NULL;
_exception_handlers = NULL; _exception_handlers = NULL;
_liveness = NULL; _liveness = NULL;
_bcea = NULL;
_method_blocks = NULL; _method_blocks = NULL;
#ifdef COMPILER2 #ifdef COMPILER2
_flow = NULL; _flow = NULL;
_bcea = NULL;
#endif // COMPILER2 #endif // COMPILER2
ciEnv *env = CURRENT_ENV; ciEnv *env = CURRENT_ENV;
@ -121,11 +121,11 @@ ciMethod::ciMethod(ciInstanceKlass* holder,
_intrinsic_id = vmIntrinsics::_none; _intrinsic_id = vmIntrinsics::_none;
_liveness = NULL; _liveness = NULL;
_can_be_statically_bound = false; _can_be_statically_bound = false;
_bcea = NULL;
_method_blocks = NULL; _method_blocks = NULL;
_method_data = NULL; _method_data = NULL;
#ifdef COMPILER2 #ifdef COMPILER2
_flow = NULL; _flow = NULL;
_bcea = NULL;
#endif // COMPILER2 #endif // COMPILER2
} }
@ -1033,10 +1033,15 @@ bool ciMethod::is_accessor () const { FETCH_FLAG_FROM_VM(is_accessor)
bool ciMethod::is_initializer () const { FETCH_FLAG_FROM_VM(is_initializer); } bool ciMethod::is_initializer () const { FETCH_FLAG_FROM_VM(is_initializer); }
BCEscapeAnalyzer *ciMethod::get_bcea() { BCEscapeAnalyzer *ciMethod::get_bcea() {
#ifdef COMPILER2
if (_bcea == NULL) { if (_bcea == NULL) {
_bcea = new (CURRENT_ENV->arena()) BCEscapeAnalyzer(this, NULL); _bcea = new (CURRENT_ENV->arena()) BCEscapeAnalyzer(this, NULL);
} }
return _bcea; return _bcea;
#else // COMPILER2
ShouldNotReachHere();
return NULL;
#endif // COMPILER2
} }
ciMethodBlocks *ciMethod::get_method_blocks() { ciMethodBlocks *ciMethod::get_method_blocks() {

@ -48,7 +48,6 @@ class ciMethod : public ciObject {
ciInstanceKlass* _holder; ciInstanceKlass* _holder;
ciSignature* _signature; ciSignature* _signature;
ciMethodData* _method_data; ciMethodData* _method_data;
BCEscapeAnalyzer* _bcea;
ciMethodBlocks* _method_blocks; ciMethodBlocks* _method_blocks;
// Code attributes. // Code attributes.
@ -72,7 +71,8 @@ class ciMethod : public ciObject {
// Optional liveness analyzer. // Optional liveness analyzer.
MethodLiveness* _liveness; MethodLiveness* _liveness;
#ifdef COMPILER2 #ifdef COMPILER2
ciTypeFlow* _flow; ciTypeFlow* _flow;
BCEscapeAnalyzer* _bcea;
#endif #endif
ciMethod(methodHandle h_m); ciMethod(methodHandle h_m);

@ -70,6 +70,7 @@ ciObjectFactory::ciObjectFactory(Arena* arena,
_unloaded_methods = new (arena) GrowableArray<ciMethod*>(arena, 4, 0, NULL); _unloaded_methods = new (arena) GrowableArray<ciMethod*>(arena, 4, 0, NULL);
_unloaded_klasses = new (arena) GrowableArray<ciKlass*>(arena, 8, 0, NULL); _unloaded_klasses = new (arena) GrowableArray<ciKlass*>(arena, 8, 0, NULL);
_unloaded_instances = new (arena) GrowableArray<ciInstance*>(arena, 4, 0, NULL);
_return_addresses = _return_addresses =
new (arena) GrowableArray<ciReturnAddress*>(arena, 8, 0, NULL); new (arena) GrowableArray<ciReturnAddress*>(arena, 8, 0, NULL);
} }
@ -443,6 +444,74 @@ ciKlass* ciObjectFactory::get_unloaded_klass(ciKlass* accessing_klass,
return new_klass; return new_klass;
} }
//------------------------------------------------------------------
// ciObjectFactory::get_unloaded_instance
//
// Get a ciInstance representing an as-yet undetermined instance of a given class.
//
ciInstance* ciObjectFactory::get_unloaded_instance(ciInstanceKlass* instance_klass) {
for (int i=0; i<_unloaded_instances->length(); i++) {
ciInstance* entry = _unloaded_instances->at(i);
if (entry->klass()->equals(instance_klass)) {
// We've found a match.
return entry;
}
}
// This is a new unloaded instance. Create it and stick it in
// the cache.
ciInstance* new_instance = new (arena()) ciInstance(instance_klass);
init_ident_of(new_instance);
_unloaded_instances->append(new_instance);
// make sure it looks the way we want:
assert(!new_instance->is_loaded(), "");
assert(new_instance->klass() == instance_klass, "");
return new_instance;
}
//------------------------------------------------------------------
// ciObjectFactory::get_unloaded_klass_mirror
//
// Get a ciInstance representing an unresolved klass mirror.
//
// Currently, this ignores the parameters and returns a unique unloaded instance.
ciInstance* ciObjectFactory::get_unloaded_klass_mirror(ciKlass* type) {
assert(ciEnv::_Class_klass != NULL, "");
return get_unloaded_instance(ciEnv::_Class_klass->as_instance_klass());
}
//------------------------------------------------------------------
// ciObjectFactory::get_unloaded_method_handle_constant
//
// Get a ciInstance representing an unresolved method handle constant.
//
// Currently, this ignores the parameters and returns a unique unloaded instance.
ciInstance* ciObjectFactory::get_unloaded_method_handle_constant(ciKlass* holder,
ciSymbol* name,
ciSymbol* signature,
int ref_kind) {
if (ciEnv::_MethodHandle_klass == NULL) return NULL;
return get_unloaded_instance(ciEnv::_MethodHandle_klass->as_instance_klass());
}
//------------------------------------------------------------------
// ciObjectFactory::get_unloaded_method_type_constant
//
// Get a ciInstance representing an unresolved method type constant.
//
// Currently, this ignores the parameters and returns a unique unloaded instance.
ciInstance* ciObjectFactory::get_unloaded_method_type_constant(ciSymbol* signature) {
if (ciEnv::_MethodType_klass == NULL) return NULL;
return get_unloaded_instance(ciEnv::_MethodType_klass->as_instance_klass());
}
//------------------------------------------------------------------ //------------------------------------------------------------------
// ciObjectFactory::get_empty_methodData // ciObjectFactory::get_empty_methodData
// //
@ -637,7 +706,8 @@ void ciObjectFactory::print_contents() {
// //
// Print debugging information about the object factory // Print debugging information about the object factory
void ciObjectFactory::print() { void ciObjectFactory::print() {
tty->print("<ciObjectFactory oops=%d unloaded_methods=%d unloaded_klasses=%d>", tty->print("<ciObjectFactory oops=%d unloaded_methods=%d unloaded_instances=%d unloaded_klasses=%d>",
_ci_objects->length(), _unloaded_methods->length(), _ci_objects->length(), _unloaded_methods->length(),
_unloaded_instances->length(),
_unloaded_klasses->length()); _unloaded_klasses->length());
} }

@ -39,6 +39,7 @@ private:
GrowableArray<ciObject*>* _ci_objects; GrowableArray<ciObject*>* _ci_objects;
GrowableArray<ciMethod*>* _unloaded_methods; GrowableArray<ciMethod*>* _unloaded_methods;
GrowableArray<ciKlass*>* _unloaded_klasses; GrowableArray<ciKlass*>* _unloaded_klasses;
GrowableArray<ciInstance*>* _unloaded_instances;
GrowableArray<ciReturnAddress*>* _return_addresses; GrowableArray<ciReturnAddress*>* _return_addresses;
int _next_ident; int _next_ident;
@ -73,6 +74,8 @@ private:
void print_contents_impl(); void print_contents_impl();
ciInstance* get_unloaded_instance(ciInstanceKlass* klass);
public: public:
static bool is_initialized() { return _initialized; } static bool is_initialized() { return _initialized; }
@ -98,6 +101,18 @@ public:
ciSymbol* name, ciSymbol* name,
bool create_if_not_found); bool create_if_not_found);
// Get a ciInstance representing an unresolved klass mirror.
ciInstance* get_unloaded_klass_mirror(ciKlass* type);
// Get a ciInstance representing an unresolved method handle constant.
ciInstance* get_unloaded_method_handle_constant(ciKlass* holder,
ciSymbol* name,
ciSymbol* signature,
int ref_kind);
// Get a ciInstance representing an unresolved method type constant.
ciInstance* get_unloaded_method_type_constant(ciSymbol* signature);
// Get the ciMethodData representing the methodData for a method // Get the ciMethodData representing the methodData for a method
// with none. // with none.

@ -186,12 +186,13 @@ ciKlass* ciBytecodeStream::get_klass(bool& will_link) {
} }
// ------------------------------------------------------------------ // ------------------------------------------------------------------
// ciBytecodeStream::get_constant_index // ciBytecodeStream::get_constant_raw_index
// //
// If this bytecode is one of the ldc variants, get the index of the // If this bytecode is one of the ldc variants, get the index of the
// referenced constant. // referenced constant.
int ciBytecodeStream::get_constant_index() const { int ciBytecodeStream::get_constant_raw_index() const {
switch(cur_bc()) { // work-alike for Bytecode_loadconstant::raw_index()
switch (cur_bc()) {
case Bytecodes::_ldc: case Bytecodes::_ldc:
return get_index_u1(); return get_index_u1();
case Bytecodes::_ldc_w: case Bytecodes::_ldc_w:
@ -202,25 +203,52 @@ int ciBytecodeStream::get_constant_index() const {
return 0; return 0;
} }
} }
// ------------------------------------------------------------------
// ciBytecodeStream::get_constant_pool_index
// Decode any CP cache index into a regular pool index.
int ciBytecodeStream::get_constant_pool_index() const {
// work-alike for Bytecode_loadconstant::pool_index()
int index = get_constant_raw_index();
if (has_cache_index()) {
return get_cpcache()->get_pool_index(index);
}
return index;
}
// ------------------------------------------------------------------
// ciBytecodeStream::get_constant_cache_index
// Return the CP cache index, or -1 if there isn't any.
int ciBytecodeStream::get_constant_cache_index() const {
// work-alike for Bytecode_loadconstant::cache_index()
return has_cache_index() ? get_constant_raw_index() : -1;
}
// ------------------------------------------------------------------ // ------------------------------------------------------------------
// ciBytecodeStream::get_constant // ciBytecodeStream::get_constant
// //
// If this bytecode is one of the ldc variants, get the referenced // If this bytecode is one of the ldc variants, get the referenced
// constant. // constant.
ciConstant ciBytecodeStream::get_constant() { ciConstant ciBytecodeStream::get_constant() {
int pool_index = get_constant_raw_index();
int cache_index = -1;
if (has_cache_index()) {
cache_index = pool_index;
pool_index = -1;
}
VM_ENTRY_MARK; VM_ENTRY_MARK;
constantPoolHandle cpool(_method->get_methodOop()->constants()); constantPoolHandle cpool(_method->get_methodOop()->constants());
return CURRENT_ENV->get_constant_by_index(cpool, get_constant_index(), _holder); return CURRENT_ENV->get_constant_by_index(cpool, pool_index, cache_index, _holder);
} }
// ------------------------------------------------------------------ // ------------------------------------------------------------------
bool ciBytecodeStream::is_unresolved_string() const { // ciBytecodeStream::get_constant_pool_tag
return CURRENT_ENV->is_unresolved_string(_holder, get_constant_index()); //
} // If this bytecode is one of the ldc variants, get the referenced
// constant.
// ------------------------------------------------------------------ constantTag ciBytecodeStream::get_constant_pool_tag(int index) const {
bool ciBytecodeStream::is_unresolved_klass() const { VM_ENTRY_MARK;
return CURRENT_ENV->is_unresolved_klass(_holder, get_klass_index()); return _method->get_methodOop()->constants()->tag_at(index);
} }
// ------------------------------------------------------------------ // ------------------------------------------------------------------
@ -378,13 +406,16 @@ int ciBytecodeStream::get_method_signature_index() {
// ------------------------------------------------------------------ // ------------------------------------------------------------------
// ciBytecodeStream::get_cpcache // ciBytecodeStream::get_cpcache
ciCPCache* ciBytecodeStream::get_cpcache() { ciCPCache* ciBytecodeStream::get_cpcache() const {
VM_ENTRY_MARK; if (_cpcache == NULL) {
// Get the constant pool. VM_ENTRY_MARK;
constantPoolOop cpool = _holder->get_instanceKlass()->constants(); // Get the constant pool.
constantPoolCacheOop cpcache = cpool->cache(); constantPoolOop cpool = _holder->get_instanceKlass()->constants();
constantPoolCacheOop cpcache = cpool->cache();
return CURRENT_ENV->get_object(cpcache)->as_cpcache(); *(ciCPCache**)&_cpcache = CURRENT_ENV->get_object(cpcache)->as_cpcache();
}
return _cpcache;
} }
// ------------------------------------------------------------------ // ------------------------------------------------------------------

@ -46,6 +46,7 @@ private:
ciMethod* _method; // the method ciMethod* _method; // the method
ciInstanceKlass* _holder; ciInstanceKlass* _holder;
ciCPCache* _cpcache;
address _bc_start; // Start of current bytecode for table address _bc_start; // Start of current bytecode for table
address _was_wide; // Address past last wide bytecode address _was_wide; // Address past last wide bytecode
jint* _table_base; // Aligned start of last table or switch jint* _table_base; // Aligned start of last table or switch
@ -58,7 +59,9 @@ private:
void reset( address base, unsigned int size ) { void reset( address base, unsigned int size ) {
_bc_start =_was_wide = 0; _bc_start =_was_wide = 0;
_start = _pc = base; _end = base + size; } _start = _pc = base; _end = base + size;
_cpcache = NULL;
}
void assert_wide(bool require_wide) const { void assert_wide(bool require_wide) const {
if (require_wide) if (require_wide)
@ -136,15 +139,20 @@ public:
bool is_wide() const { return ( _pc == _was_wide ); } bool is_wide() const { return ( _pc == _was_wide ); }
// Does this instruction contain an index which refes into the CP cache? // Does this instruction contain an index which refes into the CP cache?
bool uses_cp_cache() const { return Bytecodes::uses_cp_cache(cur_bc_raw()); } bool has_cache_index() const { return Bytecodes::uses_cp_cache(cur_bc_raw()); }
int get_index_u1() const { int get_index_u1() const {
return bytecode()->get_index_u1(cur_bc_raw()); return bytecode()->get_index_u1(cur_bc_raw());
} }
int get_index_u1_cpcache() const {
return bytecode()->get_index_u1_cpcache(cur_bc_raw());
}
// Get a byte index following this bytecode. // Get a byte index following this bytecode.
// If prefixed with a wide bytecode, get a wide index. // If prefixed with a wide bytecode, get a wide index.
int get_index() const { int get_index() const {
assert(!has_cache_index(), "else use cpcache variant");
return (_pc == _was_wide) // was widened? return (_pc == _was_wide) // was widened?
? get_index_u2(true) // yes, return wide index ? get_index_u2(true) // yes, return wide index
: get_index_u1(); // no, return narrow index : get_index_u1(); // no, return narrow index
@ -207,7 +215,9 @@ public:
return cur_bci() + get_int_table(index); } return cur_bci() + get_int_table(index); }
// --- Constant pool access --- // --- Constant pool access ---
int get_constant_index() const; int get_constant_raw_index() const;
int get_constant_pool_index() const;
int get_constant_cache_index() const;
int get_field_index(); int get_field_index();
int get_method_index(); int get_method_index();
@ -217,12 +227,17 @@ public:
int get_klass_index() const; int get_klass_index() const;
// If this bytecode is one of the ldc variants, get the referenced // If this bytecode is one of the ldc variants, get the referenced
// constant // constant. Do not attempt to resolve it, since that would require
// execution of Java code. If it is not resolved, return an unloaded
// object (ciConstant.as_object()->is_loaded() == false).
ciConstant get_constant(); ciConstant get_constant();
// True if the ldc variant points to an unresolved string constantTag get_constant_pool_tag(int index) const;
bool is_unresolved_string() const;
// True if the ldc variant points to an unresolved klass // True if the klass-using bytecode points to an unresolved klass
bool is_unresolved_klass() const; bool is_unresolved_klass() const {
constantTag tag = get_constant_pool_tag(get_klass_index());
return tag.is_unresolved_klass();
}
// If this bytecode is one of get_field, get_static, put_field, // If this bytecode is one of get_field, get_static, put_field,
// or put_static, get the referenced field. // or put_static, get the referenced field.
@ -238,7 +253,7 @@ public:
int get_method_holder_index(); int get_method_holder_index();
int get_method_signature_index(); int get_method_signature_index();
ciCPCache* get_cpcache(); ciCPCache* get_cpcache() const;
ciCallSite* get_call_site(); ciCallSite* get_call_site();
}; };

@ -712,10 +712,8 @@ void ciTypeFlow::StateVector::do_ldc(ciBytecodeStream* str) {
ciObject* obj = con.as_object(); ciObject* obj = con.as_object();
if (obj->is_null_object()) { if (obj->is_null_object()) {
push_null(); push_null();
} else if (obj->is_klass()) {
// The type of ldc <class> is java.lang.Class
push_object(outer()->env()->Class_klass());
} else { } else {
assert(!obj->is_klass(), "must be java_mirror of klass");
push_object(obj->klass()); push_object(obj->klass());
} }
} else { } else {

@ -117,6 +117,29 @@ void ClassFileParser::parse_constant_pool_entries(constantPoolHandle cp, int len
cp->string_index_at_put(index, string_index); cp->string_index_at_put(index, string_index);
} }
break; break;
case JVM_CONSTANT_MethodHandle :
case JVM_CONSTANT_MethodType :
if (!EnableMethodHandles ||
_major_version < Verifier::INVOKEDYNAMIC_MAJOR_VERSION) {
classfile_parse_error(
(!EnableInvokeDynamic ?
"This JVM does not support constant tag %u in class file %s" :
"Class file version does not support constant tag %u in class file %s"),
tag, CHECK);
}
if (tag == JVM_CONSTANT_MethodHandle) {
cfs->guarantee_more(4, CHECK); // ref_kind, method_index, tag/access_flags
u1 ref_kind = cfs->get_u1_fast();
u2 method_index = cfs->get_u2_fast();
cp->method_handle_index_at_put(index, ref_kind, method_index);
} else if (tag == JVM_CONSTANT_MethodType) {
cfs->guarantee_more(3, CHECK); // signature_index, tag/access_flags
u2 signature_index = cfs->get_u2_fast();
cp->method_type_index_at_put(index, signature_index);
} else {
ShouldNotReachHere();
}
break;
case JVM_CONSTANT_Integer : case JVM_CONSTANT_Integer :
{ {
cfs->guarantee_more(5, CHECK); // bytes, tag/access_flags cfs->guarantee_more(5, CHECK); // bytes, tag/access_flags
@ -337,6 +360,60 @@ constantPoolHandle ClassFileParser::parse_constant_pool(TRAPS) {
cp->unresolved_string_at_put(index, sym); cp->unresolved_string_at_put(index, sym);
} }
break; break;
case JVM_CONSTANT_MethodHandle :
{
int ref_index = cp->method_handle_index_at(index);
check_property(
valid_cp_range(ref_index, length) &&
EnableMethodHandles,
"Invalid constant pool index %u in class file %s",
ref_index, CHECK_(nullHandle));
constantTag tag = cp->tag_at(ref_index);
int ref_kind = cp->method_handle_ref_kind_at(index);
switch (ref_kind) {
case JVM_REF_getField:
case JVM_REF_getStatic:
case JVM_REF_putField:
case JVM_REF_putStatic:
check_property(
tag.is_field(),
"Invalid constant pool index %u in class file %s (not a field)",
ref_index, CHECK_(nullHandle));
break;
case JVM_REF_invokeVirtual:
case JVM_REF_invokeStatic:
case JVM_REF_invokeSpecial:
case JVM_REF_newInvokeSpecial:
check_property(
tag.is_method(),
"Invalid constant pool index %u in class file %s (not a method)",
ref_index, CHECK_(nullHandle));
break;
case JVM_REF_invokeInterface:
check_property(
tag.is_interface_method(),
"Invalid constant pool index %u in class file %s (not an interface method)",
ref_index, CHECK_(nullHandle));
break;
default:
classfile_parse_error(
"Bad method handle kind at constant pool index %u in class file %s",
index, CHECK_(nullHandle));
}
// Keep the ref_index unchanged. It will be indirected at link-time.
}
break;
case JVM_CONSTANT_MethodType :
{
int ref_index = cp->method_type_index_at(index);
check_property(
valid_cp_range(ref_index, length) &&
cp->tag_at(ref_index).is_utf8() &&
EnableMethodHandles,
"Invalid constant pool index %u in class file %s",
ref_index, CHECK_(nullHandle));
}
break;
default: default:
fatal(err_msg("bad constant pool tag value %u", fatal(err_msg("bad constant pool tag value %u",
cp->tag_at(index).value())); cp->tag_at(index).value()));
@ -452,6 +529,43 @@ constantPoolHandle ClassFileParser::parse_constant_pool(TRAPS) {
} }
break; break;
} }
case JVM_CONSTANT_MethodHandle: {
int ref_index = cp->method_handle_index_at(index);
int ref_kind = cp->method_handle_ref_kind_at(index);
switch (ref_kind) {
case JVM_REF_invokeVirtual:
case JVM_REF_invokeStatic:
case JVM_REF_invokeSpecial:
case JVM_REF_newInvokeSpecial:
{
int name_and_type_ref_index = cp->name_and_type_ref_index_at(ref_index);
int name_ref_index = cp->name_ref_index_at(name_and_type_ref_index);
symbolHandle name(THREAD, cp->symbol_at(name_ref_index));
if (ref_kind == JVM_REF_newInvokeSpecial) {
if (name() != vmSymbols::object_initializer_name()) {
classfile_parse_error(
"Bad constructor name at constant pool index %u in class file %s",
name_ref_index, CHECK_(nullHandle));
}
} else {
if (name() == vmSymbols::object_initializer_name()) {
classfile_parse_error(
"Bad method name at constant pool index %u in class file %s",
name_ref_index, CHECK_(nullHandle));
}
}
}
break;
// Other ref_kinds are already fully checked in previous pass.
}
break;
}
case JVM_CONSTANT_MethodType: {
symbolHandle no_name = vmSymbolHandles::type_name(); // place holder
symbolHandle signature(THREAD, cp->method_type_signature_at(index));
verify_legal_method_signature(no_name, signature, CHECK_(nullHandle));
break;
}
} // end of switch } // end of switch
} // end of for } // end of for
@ -467,7 +581,7 @@ void ClassFileParser::patch_constant_pool(constantPoolHandle cp, int index, Hand
case JVM_CONSTANT_UnresolvedClass : case JVM_CONSTANT_UnresolvedClass :
// Patching a class means pre-resolving it. // Patching a class means pre-resolving it.
// The name in the constant pool is ignored. // The name in the constant pool is ignored.
if (patch->klass() == SystemDictionary::Class_klass()) { // %%% java_lang_Class::is_instance if (java_lang_Class::is_instance(patch())) {
guarantee_property(!java_lang_Class::is_primitive(patch()), guarantee_property(!java_lang_Class::is_primitive(patch()),
"Illegal class patch at %d in class file %s", "Illegal class patch at %d in class file %s",
index, CHECK); index, CHECK);

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 1997, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -832,7 +832,6 @@ objArrayOop ClassLoader::get_system_packages(TRAPS) {
instanceKlassHandle ClassLoader::load_classfile(symbolHandle h_name, TRAPS) { instanceKlassHandle ClassLoader::load_classfile(symbolHandle h_name, TRAPS) {
VTuneClassLoadMarker clm;
ResourceMark rm(THREAD); ResourceMark rm(THREAD);
EventMark m("loading class " INTPTR_FORMAT, (address)h_name()); EventMark m("loading class " INTPTR_FORMAT, (address)h_name());
ThreadProfilerMark tpm(ThreadProfilerMark::classLoaderRegion); ThreadProfilerMark tpm(ThreadProfilerMark::classLoaderRegion);

@ -2454,6 +2454,48 @@ Handle SystemDictionary::find_method_handle_type(symbolHandle signature,
return Handle(THREAD, (oop) result.get_jobject()); return Handle(THREAD, (oop) result.get_jobject());
} }
// Ask Java code to find or construct a method handle constant.
Handle SystemDictionary::link_method_handle_constant(KlassHandle caller,
int ref_kind, //e.g., JVM_REF_invokeVirtual
KlassHandle callee,
symbolHandle name_sym,
symbolHandle signature,
TRAPS) {
Handle empty;
Handle name = java_lang_String::create_from_symbol(name_sym(), CHECK_(empty));
Handle type;
if (signature->utf8_length() > 0 && signature->byte_at(0) == '(') {
bool ignore_is_on_bcp = false;
type = find_method_handle_type(signature, caller, ignore_is_on_bcp, CHECK_(empty));
} else {
SignatureStream ss(signature(), false);
if (!ss.is_done()) {
oop mirror = ss.as_java_mirror(caller->class_loader(), caller->protection_domain(),
SignatureStream::NCDFError, CHECK_(empty));
type = Handle(THREAD, mirror);
ss.next();
if (!ss.is_done()) type = Handle(); // error!
}
}
if (type.is_null()) {
THROW_MSG_(vmSymbols::java_lang_LinkageError(), "bad signature", empty);
}
// call sun.dyn.MethodHandleNatives::linkMethodHandleConstant(Class caller, int refKind, Class callee, String name, Object type) -> MethodHandle
JavaCallArguments args;
args.push_oop(caller->java_mirror()); // the referring class
args.push_int(ref_kind);
args.push_oop(callee->java_mirror()); // the target class
args.push_oop(name());
args.push_oop(type());
JavaValue result(T_OBJECT);
JavaCalls::call_static(&result,
SystemDictionary::MethodHandleNatives_klass(),
vmSymbols::linkMethodHandleConstant_name(),
vmSymbols::linkMethodHandleConstant_signature(),
&args, CHECK_(empty));
return Handle(THREAD, (oop) result.get_jobject());
}
// Ask Java code to find or construct a java.dyn.CallSite for the given // Ask Java code to find or construct a java.dyn.CallSite for the given
// name and signature, as interpreted relative to the given class loader. // name and signature, as interpreted relative to the given class loader.

@ -473,6 +473,13 @@ public:
KlassHandle accessing_klass, KlassHandle accessing_klass,
bool& return_bcp_flag, bool& return_bcp_flag,
TRAPS); TRAPS);
// ask Java to compute a java.dyn.MethodHandle object for a given CP entry
static Handle link_method_handle_constant(KlassHandle caller,
int ref_kind, //e.g., JVM_REF_invokeVirtual
KlassHandle callee,
symbolHandle name,
symbolHandle signature,
TRAPS);
// ask Java to create a dynamic call site, while linking an invokedynamic op // ask Java to create a dynamic call site, while linking an invokedynamic op
static Handle make_dynamic_call_site(Handle bootstrap_method, static Handle make_dynamic_call_site(Handle bootstrap_method,
// Callee information: // Callee information:

@ -1598,7 +1598,10 @@ void ClassVerifier::verify_ldc(
if (opcode == Bytecodes::_ldc || opcode == Bytecodes::_ldc_w) { if (opcode == Bytecodes::_ldc || opcode == Bytecodes::_ldc_w) {
if (!tag.is_unresolved_string() && !tag.is_unresolved_klass()) { if (!tag.is_unresolved_string() && !tag.is_unresolved_klass()) {
types = (1 << JVM_CONSTANT_Integer) | (1 << JVM_CONSTANT_Float) types = (1 << JVM_CONSTANT_Integer) | (1 << JVM_CONSTANT_Float)
| (1 << JVM_CONSTANT_String) | (1 << JVM_CONSTANT_Class); | (1 << JVM_CONSTANT_String) | (1 << JVM_CONSTANT_Class)
| (1 << JVM_CONSTANT_MethodHandle) | (1 << JVM_CONSTANT_MethodType);
// Note: The class file parser already verified the legality of
// MethodHandle and MethodType constants.
verify_cp_type(index, cp, types, CHECK_VERIFY(this)); verify_cp_type(index, cp, types, CHECK_VERIFY(this));
} }
} else { } else {
@ -1632,6 +1635,14 @@ void ClassVerifier::verify_ldc(
current_frame->push_stack_2( current_frame->push_stack_2(
VerificationType::long_type(), VerificationType::long_type(),
VerificationType::long2_type(), CHECK_VERIFY(this)); VerificationType::long2_type(), CHECK_VERIFY(this));
} else if (tag.is_method_handle()) {
current_frame->push_stack(
VerificationType::reference_type(
vmSymbols::java_dyn_MethodHandle()), CHECK_VERIFY(this));
} else if (tag.is_method_type()) {
current_frame->push_stack(
VerificationType::reference_type(
vmSymbols::java_dyn_MethodType()), CHECK_VERIFY(this));
} else { } else {
verify_error(bci, "Invalid index in ldc"); verify_error(bci, "Invalid index in ldc");
return; return;
@ -1920,9 +1931,12 @@ void ClassVerifier::verify_invoke_instructions(
// Get referenced class type // Get referenced class type
VerificationType ref_class_type; VerificationType ref_class_type;
if (opcode == Bytecodes::_invokedynamic) { if (opcode == Bytecodes::_invokedynamic) {
if (!EnableInvokeDynamic) { if (!EnableInvokeDynamic ||
_klass->major_version() < Verifier::INVOKEDYNAMIC_MAJOR_VERSION) {
class_format_error( class_format_error(
"invokedynamic instructions not enabled on this JVM", (!EnableInvokeDynamic ?
"invokedynamic instructions not enabled in this JVM" :
"invokedynamic instructions not supported by this class file version"),
_klass->external_name()); _klass->external_name());
return; return;
} }

@ -25,7 +25,10 @@
// The verifier class // The verifier class
class Verifier : AllStatic { class Verifier : AllStatic {
public: public:
enum { STACKMAP_ATTRIBUTE_MAJOR_VERSION = 50 }; enum {
STACKMAP_ATTRIBUTE_MAJOR_VERSION = 50,
INVOKEDYNAMIC_MAJOR_VERSION = 51
};
typedef enum { ThrowException, NoException } Mode; typedef enum { ThrowException, NoException } Mode;
/** /**

@ -246,6 +246,8 @@
/* internal up-calls made only by the JVM, via class sun.dyn.MethodHandleNatives: */ \ /* internal up-calls made only by the JVM, via class sun.dyn.MethodHandleNatives: */ \
template(findMethodHandleType_name, "findMethodHandleType") \ template(findMethodHandleType_name, "findMethodHandleType") \
template(findMethodHandleType_signature, "(Ljava/lang/Class;[Ljava/lang/Class;)Ljava/dyn/MethodType;") \ template(findMethodHandleType_signature, "(Ljava/lang/Class;[Ljava/lang/Class;)Ljava/dyn/MethodType;") \
template(linkMethodHandleConstant_name, "linkMethodHandleConstant") \
template(linkMethodHandleConstant_signature, "(Ljava/lang/Class;ILjava/lang/Class;Ljava/lang/String;Ljava/lang/Object;)Ljava/dyn/MethodHandle;") \
template(makeDynamicCallSite_name, "makeDynamicCallSite") \ template(makeDynamicCallSite_name, "makeDynamicCallSite") \
template(makeDynamicCallSite_signature, "(Ljava/dyn/MethodHandle;Ljava/lang/String;Ljava/dyn/MethodType;Ljava/lang/Object;Lsun/dyn/MemberName;I)Ljava/dyn/CallSite;") \ template(makeDynamicCallSite_signature, "(Ljava/dyn/MethodHandle;Ljava/lang/String;Ljava/dyn/MethodType;Ljava/lang/Object;Lsun/dyn/MemberName;I)Ljava/dyn/CallSite;") \
NOT_LP64( do_alias(machine_word_signature, int_signature) ) \ NOT_LP64( do_alias(machine_word_signature, int_signature) ) \

@ -210,6 +210,7 @@ AdapterBlob* AdapterBlob::create(CodeBuffer* cb) {
{ {
MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
blob = new (size) AdapterBlob(size, cb); blob = new (size) AdapterBlob(size, cb);
CodeCache::commit(blob);
} }
// Track memory usage statistic after releasing CodeCache_lock // Track memory usage statistic after releasing CodeCache_lock
MemoryService::track_code_cache_memory_usage(); MemoryService::track_code_cache_memory_usage();
@ -281,7 +282,6 @@ RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
tty->print_cr("Decoding %s " INTPTR_FORMAT, stub_id, stub); tty->print_cr("Decoding %s " INTPTR_FORMAT, stub_id, stub);
Disassembler::decode(stub->instructions_begin(), stub->instructions_end()); Disassembler::decode(stub->instructions_begin(), stub->instructions_end());
} }
VTune::register_stub(stub_id, stub->instructions_begin(), stub->instructions_end());
Forte::register_stub(stub_id, stub->instructions_begin(), stub->instructions_end()); Forte::register_stub(stub_id, stub->instructions_begin(), stub->instructions_end());
if (JvmtiExport::should_post_dynamic_code_generated()) { if (JvmtiExport::should_post_dynamic_code_generated()) {
@ -356,7 +356,6 @@ DeoptimizationBlob* DeoptimizationBlob::create(
tty->print_cr("Decoding %s " INTPTR_FORMAT, blob_id, blob); tty->print_cr("Decoding %s " INTPTR_FORMAT, blob_id, blob);
Disassembler::decode(blob->instructions_begin(), blob->instructions_end()); Disassembler::decode(blob->instructions_begin(), blob->instructions_end());
} }
VTune::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end());
Forte::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end()); Forte::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end());
if (JvmtiExport::should_post_dynamic_code_generated()) { if (JvmtiExport::should_post_dynamic_code_generated()) {
@ -414,7 +413,6 @@ UncommonTrapBlob* UncommonTrapBlob::create(
tty->print_cr("Decoding %s " INTPTR_FORMAT, blob_id, blob); tty->print_cr("Decoding %s " INTPTR_FORMAT, blob_id, blob);
Disassembler::decode(blob->instructions_begin(), blob->instructions_end()); Disassembler::decode(blob->instructions_begin(), blob->instructions_end());
} }
VTune::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end());
Forte::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end()); Forte::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end());
if (JvmtiExport::should_post_dynamic_code_generated()) { if (JvmtiExport::should_post_dynamic_code_generated()) {
@ -474,7 +472,6 @@ ExceptionBlob* ExceptionBlob::create(
tty->print_cr("Decoding %s " INTPTR_FORMAT, blob_id, blob); tty->print_cr("Decoding %s " INTPTR_FORMAT, blob_id, blob);
Disassembler::decode(blob->instructions_begin(), blob->instructions_end()); Disassembler::decode(blob->instructions_begin(), blob->instructions_end());
} }
VTune::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end());
Forte::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end()); Forte::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end());
if (JvmtiExport::should_post_dynamic_code_generated()) { if (JvmtiExport::should_post_dynamic_code_generated()) {
@ -533,7 +530,6 @@ SafepointBlob* SafepointBlob::create(
tty->print_cr("Decoding %s " INTPTR_FORMAT, blob_id, blob); tty->print_cr("Decoding %s " INTPTR_FORMAT, blob_id, blob);
Disassembler::decode(blob->instructions_begin(), blob->instructions_end()); Disassembler::decode(blob->instructions_begin(), blob->instructions_end());
} }
VTune::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end());
Forte::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end()); Forte::register_stub(blob_id, blob->instructions_begin(), blob->instructions_end());
if (JvmtiExport::should_post_dynamic_code_generated()) { if (JvmtiExport::should_post_dynamic_code_generated()) {

@ -93,6 +93,8 @@ class CodeBlob_sizes {
CodeHeap * CodeCache::_heap = new CodeHeap(); CodeHeap * CodeCache::_heap = new CodeHeap();
int CodeCache::_number_of_blobs = 0; int CodeCache::_number_of_blobs = 0;
int CodeCache::_number_of_adapters = 0;
int CodeCache::_number_of_nmethods = 0;
int CodeCache::_number_of_nmethods_with_dependencies = 0; int CodeCache::_number_of_nmethods_with_dependencies = 0;
bool CodeCache::_needs_cache_clean = false; bool CodeCache::_needs_cache_clean = false;
nmethod* CodeCache::_scavenge_root_nmethods = NULL; nmethod* CodeCache::_scavenge_root_nmethods = NULL;
@ -176,8 +178,14 @@ void CodeCache::free(CodeBlob* cb) {
verify_if_often(); verify_if_often();
print_trace("free", cb); print_trace("free", cb);
if (cb->is_nmethod() && ((nmethod *)cb)->has_dependencies()) { if (cb->is_nmethod()) {
_number_of_nmethods_with_dependencies--; _number_of_nmethods--;
if (((nmethod *)cb)->has_dependencies()) {
_number_of_nmethods_with_dependencies--;
}
}
if (cb->is_adapter_blob()) {
_number_of_adapters--;
} }
_number_of_blobs--; _number_of_blobs--;
@ -191,9 +199,16 @@ void CodeCache::free(CodeBlob* cb) {
void CodeCache::commit(CodeBlob* cb) { void CodeCache::commit(CodeBlob* cb) {
// this is called by nmethod::nmethod, which must already own CodeCache_lock // this is called by nmethod::nmethod, which must already own CodeCache_lock
assert_locked_or_safepoint(CodeCache_lock); assert_locked_or_safepoint(CodeCache_lock);
if (cb->is_nmethod() && ((nmethod *)cb)->has_dependencies()) { if (cb->is_nmethod()) {
_number_of_nmethods_with_dependencies++; _number_of_nmethods++;
if (((nmethod *)cb)->has_dependencies()) {
_number_of_nmethods_with_dependencies++;
}
} }
if (cb->is_adapter_blob()) {
_number_of_adapters++;
}
// flush the hardware I-cache // flush the hardware I-cache
ICache::invalidate_range(cb->instructions_begin(), cb->instructions_size()); ICache::invalidate_range(cb->instructions_begin(), cb->instructions_size());
} }

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 1997, 2008, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -43,6 +43,8 @@ class CodeCache : AllStatic {
// 4422213 or 4436291 for details. // 4422213 or 4436291 for details.
static CodeHeap * _heap; static CodeHeap * _heap;
static int _number_of_blobs; static int _number_of_blobs;
static int _number_of_adapters;
static int _number_of_nmethods;
static int _number_of_nmethods_with_dependencies; static int _number_of_nmethods_with_dependencies;
static bool _needs_cache_clean; static bool _needs_cache_clean;
static nmethod* _scavenge_root_nmethods; // linked via nm->scavenge_root_link() static nmethod* _scavenge_root_nmethods; // linked via nm->scavenge_root_link()
@ -105,6 +107,8 @@ class CodeCache : AllStatic {
static nmethod* first_nmethod(); static nmethod* first_nmethod();
static nmethod* next_nmethod (CodeBlob* cb); static nmethod* next_nmethod (CodeBlob* cb);
static int nof_blobs() { return _number_of_blobs; } static int nof_blobs() { return _number_of_blobs; }
static int nof_adapters() { return _number_of_adapters; }
static int nof_nmethods() { return _number_of_nmethods; }
// GC support // GC support
static void gc_epilogue(); static void gc_epilogue();

@ -397,11 +397,6 @@ void nmethod::add_handler_for_exception_and_pc(Handle exception, address pc, add
//-------------end of code for ExceptionCache-------------- //-------------end of code for ExceptionCache--------------
void nmFlags::clear() {
assert(sizeof(nmFlags) == sizeof(int), "using more than one word for nmFlags");
*(jint*)this = 0;
}
int nmethod::total_size() const { int nmethod::total_size() const {
return return
code_size() + code_size() +
@ -419,8 +414,32 @@ const char* nmethod::compile_kind() const {
return NULL; return NULL;
} }
// %%% This variable is no longer used? // Fill in default values for various flag fields
int nmethod::_zombie_instruction_size = NativeJump::instruction_size; void nmethod::init_defaults() {
_state = alive;
_marked_for_reclamation = 0;
_has_flushed_dependencies = 0;
_speculatively_disconnected = 0;
_has_unsafe_access = 0;
_has_method_handle_invokes = 0;
_marked_for_deoptimization = 0;
_lock_count = 0;
_stack_traversal_mark = 0;
_unload_reported = false; // jvmti state
NOT_PRODUCT(_has_debug_info = false);
_oops_do_mark_link = NULL;
_jmethod_id = NULL;
_osr_link = NULL;
_scavenge_root_link = NULL;
_scavenge_root_state = 0;
_saved_nmethod_link = NULL;
_compiler = NULL;
#ifdef HAVE_DTRACE_H
_trap_offset = 0;
#endif // def HAVE_DTRACE_H
}
nmethod* nmethod::new_native_nmethod(methodHandle method, nmethod* nmethod::new_native_nmethod(methodHandle method,
@ -580,24 +599,16 @@ nmethod::nmethod(
debug_only(No_Safepoint_Verifier nsv;) debug_only(No_Safepoint_Verifier nsv;)
assert_locked_or_safepoint(CodeCache_lock); assert_locked_or_safepoint(CodeCache_lock);
NOT_PRODUCT(_has_debug_info = false); init_defaults();
_oops_do_mark_link = NULL;
_method = method; _method = method;
_entry_bci = InvocationEntryBci; _entry_bci = InvocationEntryBci;
_osr_link = NULL;
_scavenge_root_link = NULL;
_scavenge_root_state = 0;
_saved_nmethod_link = NULL;
_compiler = NULL;
// We have no exception handler or deopt handler make the // We have no exception handler or deopt handler make the
// values something that will never match a pc like the nmethod vtable entry // values something that will never match a pc like the nmethod vtable entry
_exception_offset = 0; _exception_offset = 0;
_deoptimize_offset = 0; _deoptimize_offset = 0;
_deoptimize_mh_offset = 0; _deoptimize_mh_offset = 0;
_orig_pc_offset = 0; _orig_pc_offset = 0;
#ifdef HAVE_DTRACE_H
_trap_offset = 0;
#endif // def HAVE_DTRACE_H
_stub_offset = data_offset(); _stub_offset = data_offset();
_consts_offset = data_offset(); _consts_offset = data_offset();
_oops_offset = data_offset(); _oops_offset = data_offset();
@ -615,17 +626,9 @@ nmethod::nmethod(
_exception_cache = NULL; _exception_cache = NULL;
_pc_desc_cache.reset_to(NULL); _pc_desc_cache.reset_to(NULL);
flags.clear();
flags.state = alive;
_markedForDeoptimization = 0;
_lock_count = 0;
_stack_traversal_mark = 0;
code_buffer->copy_oops_to(this); code_buffer->copy_oops_to(this);
debug_only(verify_scavenge_root_oops()); debug_only(verify_scavenge_root_oops());
CodeCache::commit(this); CodeCache::commit(this);
VTune::create_nmethod(this);
} }
if (PrintNativeNMethods || PrintDebugInfo || PrintRelocations || PrintDependencies) { if (PrintNativeNMethods || PrintDebugInfo || PrintRelocations || PrintDependencies) {
@ -673,14 +676,9 @@ nmethod::nmethod(
debug_only(No_Safepoint_Verifier nsv;) debug_only(No_Safepoint_Verifier nsv;)
assert_locked_or_safepoint(CodeCache_lock); assert_locked_or_safepoint(CodeCache_lock);
NOT_PRODUCT(_has_debug_info = false); init_defaults();
_oops_do_mark_link = NULL;
_method = method; _method = method;
_entry_bci = InvocationEntryBci; _entry_bci = InvocationEntryBci;
_osr_link = NULL;
_scavenge_root_link = NULL;
_scavenge_root_state = 0;
_compiler = NULL;
// We have no exception handler or deopt handler make the // We have no exception handler or deopt handler make the
// values something that will never match a pc like the nmethod vtable entry // values something that will never match a pc like the nmethod vtable entry
_exception_offset = 0; _exception_offset = 0;
@ -706,17 +704,9 @@ nmethod::nmethod(
_exception_cache = NULL; _exception_cache = NULL;
_pc_desc_cache.reset_to(NULL); _pc_desc_cache.reset_to(NULL);
flags.clear();
flags.state = alive;
_markedForDeoptimization = 0;
_lock_count = 0;
_stack_traversal_mark = 0;
code_buffer->copy_oops_to(this); code_buffer->copy_oops_to(this);
debug_only(verify_scavenge_root_oops()); debug_only(verify_scavenge_root_oops());
CodeCache::commit(this); CodeCache::commit(this);
VTune::create_nmethod(this);
} }
if (PrintNMethods || PrintDebugInfo || PrintRelocations || PrintDependencies) { if (PrintNMethods || PrintDebugInfo || PrintRelocations || PrintDependencies) {
@ -781,20 +771,13 @@ nmethod::nmethod(
debug_only(No_Safepoint_Verifier nsv;) debug_only(No_Safepoint_Verifier nsv;)
assert_locked_or_safepoint(CodeCache_lock); assert_locked_or_safepoint(CodeCache_lock);
NOT_PRODUCT(_has_debug_info = false); init_defaults();
_oops_do_mark_link = NULL;
_method = method; _method = method;
_entry_bci = entry_bci;
_compile_id = compile_id; _compile_id = compile_id;
_comp_level = comp_level; _comp_level = comp_level;
_entry_bci = entry_bci;
_osr_link = NULL;
_scavenge_root_link = NULL;
_scavenge_root_state = 0;
_compiler = compiler; _compiler = compiler;
_orig_pc_offset = orig_pc_offset; _orig_pc_offset = orig_pc_offset;
#ifdef HAVE_DTRACE_H
_trap_offset = 0;
#endif // def HAVE_DTRACE_H
_stub_offset = instructions_offset() + code_buffer->total_offset_of(code_buffer->stubs()->start()); _stub_offset = instructions_offset() + code_buffer->total_offset_of(code_buffer->stubs()->start());
// Exception handler and deopt handler are in the stub section // Exception handler and deopt handler are in the stub section
@ -821,15 +804,6 @@ nmethod::nmethod(
_exception_cache = NULL; _exception_cache = NULL;
_pc_desc_cache.reset_to(scopes_pcs_begin()); _pc_desc_cache.reset_to(scopes_pcs_begin());
flags.clear();
flags.state = alive;
_markedForDeoptimization = 0;
_unload_reported = false; // jvmti state
_lock_count = 0;
_stack_traversal_mark = 0;
// Copy contents of ScopeDescRecorder to nmethod // Copy contents of ScopeDescRecorder to nmethod
code_buffer->copy_oops_to(this); code_buffer->copy_oops_to(this);
debug_info->copy_to(this); debug_info->copy_to(this);
@ -841,8 +815,6 @@ nmethod::nmethod(
CodeCache::commit(this); CodeCache::commit(this);
VTune::create_nmethod(this);
// Copy contents of ExceptionHandlerTable to nmethod // Copy contents of ExceptionHandlerTable to nmethod
handler_table->copy_to(this); handler_table->copy_to(this);
nul_chk_table->copy_to(this); nul_chk_table->copy_to(this);
@ -988,11 +960,6 @@ void nmethod::print_nmethod(bool printmethod) {
} }
void nmethod::set_version(int v) {
flags.version = v;
}
// Promote one word from an assembly-time handle to a live embedded oop. // Promote one word from an assembly-time handle to a live embedded oop.
inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) { inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
if (handle == NULL || if (handle == NULL ||
@ -1139,6 +1106,8 @@ void nmethod::cleanup_inline_caches() {
// This is a private interface with the sweeper. // This is a private interface with the sweeper.
void nmethod::mark_as_seen_on_stack() { void nmethod::mark_as_seen_on_stack() {
assert(is_not_entrant(), "must be a non-entrant method"); assert(is_not_entrant(), "must be a non-entrant method");
// Set the traversal mark to ensure that the sweeper does 2
// cleaning passes before moving to zombie.
set_stack_traversal_mark(NMethodSweeper::traversal_count()); set_stack_traversal_mark(NMethodSweeper::traversal_count());
} }
@ -1207,7 +1176,7 @@ void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) {
// for later on. // for later on.
CodeCache::set_needs_cache_clean(true); CodeCache::set_needs_cache_clean(true);
} }
flags.state = unloaded; _state = unloaded;
// Log the unloading. // Log the unloading.
log_state_change(); log_state_change();
@ -1233,21 +1202,21 @@ void nmethod::log_state_change() const {
if (LogCompilation) { if (LogCompilation) {
if (xtty != NULL) { if (xtty != NULL) {
ttyLocker ttyl; // keep the following output all in one block ttyLocker ttyl; // keep the following output all in one block
if (flags.state == unloaded) { if (_state == unloaded) {
xtty->begin_elem("make_unloaded thread='" UINTX_FORMAT "'", xtty->begin_elem("make_unloaded thread='" UINTX_FORMAT "'",
os::current_thread_id()); os::current_thread_id());
} else { } else {
xtty->begin_elem("make_not_entrant thread='" UINTX_FORMAT "'%s", xtty->begin_elem("make_not_entrant thread='" UINTX_FORMAT "'%s",
os::current_thread_id(), os::current_thread_id(),
(flags.state == zombie ? " zombie='1'" : "")); (_state == zombie ? " zombie='1'" : ""));
} }
log_identity(xtty); log_identity(xtty);
xtty->stamp(); xtty->stamp();
xtty->end_elem(); xtty->end_elem();
} }
} }
if (PrintCompilation && flags.state != unloaded) { if (PrintCompilation && _state != unloaded) {
print_on(tty, flags.state == zombie ? "made zombie " : "made not entrant "); print_on(tty, _state == zombie ? "made zombie " : "made not entrant ");
tty->cr(); tty->cr();
} }
} }
@ -1258,8 +1227,9 @@ bool nmethod::make_not_entrant_or_zombie(unsigned int state) {
bool was_alive = false; bool was_alive = false;
// Make sure the nmethod is not flushed in case of a safepoint in code below. // Make sure neither the nmethod nor the method is flushed in case of a safepoint in code below.
nmethodLocker nml(this); nmethodLocker nml(this);
methodHandle the_method(method());
{ {
// If the method is already zombie there is nothing to do // If the method is already zombie there is nothing to do
@ -1279,7 +1249,7 @@ bool nmethod::make_not_entrant_or_zombie(unsigned int state) {
// Enter critical section. Does not block for safepoint. // Enter critical section. Does not block for safepoint.
MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag); MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
if (flags.state == state) { if (_state == state) {
// another thread already performed this transition so nothing // another thread already performed this transition so nothing
// to do, but return false to indicate this. // to do, but return false to indicate this.
return false; return false;
@ -1290,17 +1260,37 @@ bool nmethod::make_not_entrant_or_zombie(unsigned int state) {
if (!is_osr_method() && !is_not_entrant()) { if (!is_osr_method() && !is_not_entrant()) {
NativeJump::patch_verified_entry(entry_point(), verified_entry_point(), NativeJump::patch_verified_entry(entry_point(), verified_entry_point(),
SharedRuntime::get_handle_wrong_method_stub()); SharedRuntime::get_handle_wrong_method_stub());
assert (NativeJump::instruction_size == nmethod::_zombie_instruction_size, "");
} }
was_alive = is_in_use(); // Read state under lock if (is_in_use()) {
// It's a true state change, so mark the method as decompiled.
// Do it only for transition from alive.
inc_decompile_count();
}
// Change state // Change state
flags.state = state; _state = state;
// Log the transition once // Log the transition once
log_state_change(); log_state_change();
// Remove nmethod from method.
// We need to check if both the _code and _from_compiled_code_entry_point
// refer to this nmethod because there is a race in setting these two fields
// in methodOop as seen in bugid 4947125.
// If the vep() points to the zombie nmethod, the memory for the nmethod
// could be flushed and the compiler and vtable stubs could still call
// through it.
if (method() != NULL && (method()->code() == this ||
method()->from_compiled_entry() == verified_entry_point())) {
HandleMark hm;
method()->clear_code();
}
if (state == not_entrant) {
mark_as_seen_on_stack();
}
} // leave critical region under Patching_lock } // leave critical region under Patching_lock
// When the nmethod becomes zombie it is no longer alive so the // When the nmethod becomes zombie it is no longer alive so the
@ -1308,18 +1298,17 @@ bool nmethod::make_not_entrant_or_zombie(unsigned int state) {
// state will be flushed later when the transition to zombie // state will be flushed later when the transition to zombie
// happens or they get unloaded. // happens or they get unloaded.
if (state == zombie) { if (state == zombie) {
// zombie only - if a JVMTI agent has enabled the CompiledMethodUnload event
// and it hasn't already been reported for this nmethod then report it now.
// (the event may have been reported earilier if the GC marked it for unloading).
post_compiled_method_unload();
MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
flush_dependencies(NULL); flush_dependencies(NULL);
} else { } else {
assert(state == not_entrant, "other cases may need to be handled differently"); assert(state == not_entrant, "other cases may need to be handled differently");
} }
if (state == not_entrant) {
Events::log("Make nmethod not entrant " INTPTR_FORMAT, this);
} else {
Events::log("Make nmethod zombie " INTPTR_FORMAT, this);
}
if (TraceCreateZombies) { if (TraceCreateZombies) {
tty->print_cr("nmethod <" INTPTR_FORMAT "> code made %s", this, (state == not_entrant) ? "not entrant" : "zombie"); tty->print_cr("nmethod <" INTPTR_FORMAT "> code made %s", this, (state == not_entrant) ? "not entrant" : "zombie");
} }
@ -1327,47 +1316,6 @@ bool nmethod::make_not_entrant_or_zombie(unsigned int state) {
// Make sweeper aware that there is a zombie method that needs to be removed // Make sweeper aware that there is a zombie method that needs to be removed
NMethodSweeper::notify(this); NMethodSweeper::notify(this);
// not_entrant only stuff
if (state == not_entrant) {
mark_as_seen_on_stack();
}
if (was_alive) {
// It's a true state change, so mark the method as decompiled.
// Do it only for transition from alive.
inc_decompile_count();
}
// zombie only - if a JVMTI agent has enabled the CompiledMethodUnload event
// and it hasn't already been reported for this nmethod then report it now.
// (the event may have been reported earilier if the GC marked it for unloading).
if (state == zombie) {
post_compiled_method_unload();
}
// Zombie only stuff
if (state == zombie) {
VTune::delete_nmethod(this);
}
// Check whether method got unloaded at a safepoint before this,
// if so we can skip the flushing steps below
if (method() == NULL) return true;
// Remove nmethod from method.
// We need to check if both the _code and _from_compiled_code_entry_point
// refer to this nmethod because there is a race in setting these two fields
// in methodOop as seen in bugid 4947125.
// If the vep() points to the zombie nmethod, the memory for the nmethod
// could be flushed and the compiler and vtable stubs could still call
// through it.
if (method()->code() == this ||
method()->from_compiled_entry() == verified_entry_point()) {
HandleMark hm;
method()->clear_code();
}
return true; return true;
} }
@ -1488,11 +1436,25 @@ void nmethod::post_compiled_method_load_event() {
moop->signature()->utf8_length(), moop->signature()->utf8_length(),
code_begin(), code_size()); code_begin(), code_size());
if (JvmtiExport::should_post_compiled_method_load() ||
JvmtiExport::should_post_compiled_method_unload()) {
get_and_cache_jmethod_id();
}
if (JvmtiExport::should_post_compiled_method_load()) { if (JvmtiExport::should_post_compiled_method_load()) {
JvmtiExport::post_compiled_method_load(this); JvmtiExport::post_compiled_method_load(this);
} }
} }
jmethodID nmethod::get_and_cache_jmethod_id() {
if (_jmethod_id == NULL) {
// Cache the jmethod_id since it can no longer be looked up once the
// method itself has been marked for unloading.
_jmethod_id = method()->jmethod_id();
}
return _jmethod_id;
}
void nmethod::post_compiled_method_unload() { void nmethod::post_compiled_method_unload() {
if (unload_reported()) { if (unload_reported()) {
// During unloading we transition to unloaded and then to zombie // During unloading we transition to unloaded and then to zombie
@ -1504,12 +1466,17 @@ void nmethod::post_compiled_method_unload() {
DTRACE_METHOD_UNLOAD_PROBE(method()); DTRACE_METHOD_UNLOAD_PROBE(method());
// If a JVMTI agent has enabled the CompiledMethodUnload event then // If a JVMTI agent has enabled the CompiledMethodUnload event then
// post the event. Sometime later this nmethod will be made a zombie by // post the event. Sometime later this nmethod will be made a zombie
// the sweeper but the methodOop will not be valid at that point. // by the sweeper but the methodOop will not be valid at that point.
if (JvmtiExport::should_post_compiled_method_unload()) { // If the _jmethod_id is null then no load event was ever requested
// so don't bother posting the unload. The main reason for this is
// that the jmethodID is a weak reference to the methodOop so if
// it's being unloaded there's no way to look it up since the weak
// ref will have been cleared.
if (_jmethod_id != NULL && JvmtiExport::should_post_compiled_method_unload()) {
assert(!unload_reported(), "already unloaded"); assert(!unload_reported(), "already unloaded");
HandleMark hm; HandleMark hm;
JvmtiExport::post_compiled_method_unload(method()->jmethod_id(), code_begin()); JvmtiExport::post_compiled_method_unload(_jmethod_id, code_begin());
} }
// The JVMTI CompiledMethodUnload event can be enabled or disabled at // The JVMTI CompiledMethodUnload event can be enabled or disabled at
@ -2087,7 +2054,6 @@ address nmethod::continuation_for_implicit_exception(address pc) {
void nmethod_init() { void nmethod_init() {
// make sure you didn't forget to adjust the filler fields // make sure you didn't forget to adjust the filler fields
assert(sizeof(nmFlags) <= 4, "nmFlags occupies more than a word");
assert(sizeof(nmethod) % oopSize == 0, "nmethod size must be multiple of a word"); assert(sizeof(nmethod) % oopSize == 0, "nmethod size must be multiple of a word");
} }
@ -2323,7 +2289,6 @@ void nmethod::print() const {
tty->print("((nmethod*) "INTPTR_FORMAT ") ", this); tty->print("((nmethod*) "INTPTR_FORMAT ") ", this);
tty->print(" for method " INTPTR_FORMAT , (address)method()); tty->print(" for method " INTPTR_FORMAT , (address)method());
tty->print(" { "); tty->print(" { ");
if (version()) tty->print("v%d ", version());
if (is_in_use()) tty->print("in_use "); if (is_in_use()) tty->print("in_use ");
if (is_not_entrant()) tty->print("not_entrant "); if (is_not_entrant()) tty->print("not_entrant ");
if (is_zombie()) tty->print("zombie "); if (is_zombie()) tty->print("zombie ");
@ -2659,13 +2624,10 @@ void nmethod::print_code_comment_on(outputStream* st, int column, u_char* begin,
case Bytecodes::_getstatic: case Bytecodes::_getstatic:
case Bytecodes::_putstatic: case Bytecodes::_putstatic:
{ {
methodHandle sdm = sd->method(); Bytecode_field* field = Bytecode_field_at(sd->method(), sd->bci());
Bytecode_field* field = Bytecode_field_at(sdm(), sdm->bcp_from(sd->bci()));
constantPoolOop sdmc = sdm->constants();
symbolOop name = sdmc->name_ref_at(field->index());
st->print(" "); st->print(" ");
if (name != NULL) if (field->name() != NULL)
name->print_symbol_on(st); field->name()->print_symbol_on(st);
else else
st->print("<UNKNOWN>"); st->print("<UNKNOWN>");
} }

@ -78,29 +78,8 @@ class PcDescCache VALUE_OBJ_CLASS_SPEC {
// nmethods (native methods) are the compiled code versions of Java methods. // nmethods (native methods) are the compiled code versions of Java methods.
//
struct nmFlags { // An nmethod contains:
friend class VMStructs;
unsigned int version:8; // version number (0 = first version)
unsigned int age:4; // age (in # of sweep steps)
unsigned int state:2; // {alive, zombie, unloaded)
unsigned int isUncommonRecompiled:1; // recompiled because of uncommon trap?
unsigned int isToBeRecompiled:1; // to be recompiled as soon as it matures
unsigned int hasFlushedDependencies:1; // Used for maintenance of dependencies
unsigned int markedForReclamation:1; // Used by NMethodSweeper
unsigned int has_unsafe_access:1; // May fault due to unsafe access.
unsigned int has_method_handle_invokes:1; // Has this method MethodHandle invokes?
unsigned int speculatively_disconnected:1; // Marked for potential unload
void clear();
};
// A nmethod contains:
// - header (the nmethod structure) // - header (the nmethod structure)
// [Relocation] // [Relocation]
// - relocation information // - relocation information
@ -131,10 +110,9 @@ class nmethod : public CodeBlob {
friend class CodeCache; // non-perm oops friend class CodeCache; // non-perm oops
private: private:
// Shared fields for all nmethod's // Shared fields for all nmethod's
static int _zombie_instruction_size;
methodOop _method; methodOop _method;
int _entry_bci; // != InvocationEntryBci if this nmethod is an on-stack replacement method int _entry_bci; // != InvocationEntryBci if this nmethod is an on-stack replacement method
jmethodID _jmethod_id; // Cache of method()->jmethod_id()
// To support simple linked-list chaining of nmethods: // To support simple linked-list chaining of nmethods:
nmethod* _osr_link; // from instanceKlass::osr_nmethods_head nmethod* _osr_link; // from instanceKlass::osr_nmethods_head
@ -146,6 +124,11 @@ class nmethod : public CodeBlob {
AbstractCompiler* _compiler; // The compiler which compiled this nmethod AbstractCompiler* _compiler; // The compiler which compiled this nmethod
// offsets for entry points
address _entry_point; // entry point with class check
address _verified_entry_point; // entry point without class check
address _osr_entry_point; // entry point for on stack replacement
// Offsets for different nmethod parts // Offsets for different nmethod parts
int _exception_offset; int _exception_offset;
// All deoptee's will resume execution at this location described by // All deoptee's will resume execution at this location described by
@ -174,23 +157,31 @@ class nmethod : public CodeBlob {
// pc during a deopt. // pc during a deopt.
int _orig_pc_offset; int _orig_pc_offset;
int _compile_id; // which compilation made this nmethod int _compile_id; // which compilation made this nmethod
int _comp_level; // compilation level int _comp_level; // compilation level
// offsets for entry points // protected by CodeCache_lock
address _entry_point; // entry point with class check bool _has_flushed_dependencies; // Used for maintenance of dependencies (CodeCache_lock)
address _verified_entry_point; // entry point without class check bool _speculatively_disconnected; // Marked for potential unload
address _osr_entry_point; // entry point for on stack replacement
bool _marked_for_reclamation; // Used by NMethodSweeper (set only by sweeper)
bool _marked_for_deoptimization; // Used for stack deoptimization
// used by jvmti to track if an unload event has been posted for this nmethod.
bool _unload_reported;
// set during construction
unsigned int _has_unsafe_access:1; // May fault due to unsafe access.
unsigned int _has_method_handle_invokes:1; // Has this method MethodHandle invokes?
// Protected by Patching_lock
unsigned char _state; // {alive, not_entrant, zombie, unloaded)
nmFlags flags; // various flags to keep track of nmethod state
bool _markedForDeoptimization; // Used for stack deoptimization
enum { alive = 0, enum { alive = 0,
not_entrant = 1, // uncommon trap has happened but activations may still exist not_entrant = 1, // uncommon trap has happened but activations may still exist
zombie = 2, zombie = 2,
unloaded = 3 }; unloaded = 3 };
// used by jvmti to track if an unload event has been posted for this nmethod.
bool _unload_reported;
jbyte _scavenge_root_state; jbyte _scavenge_root_state;
@ -269,15 +260,15 @@ class nmethod : public CodeBlob {
bool make_not_entrant_or_zombie(unsigned int state); bool make_not_entrant_or_zombie(unsigned int state);
void inc_decompile_count(); void inc_decompile_count();
// used to check that writes to nmFlags are done consistently.
static void check_safepoint() PRODUCT_RETURN;
// Used to manipulate the exception cache // Used to manipulate the exception cache
void add_exception_cache_entry(ExceptionCache* new_entry); void add_exception_cache_entry(ExceptionCache* new_entry);
ExceptionCache* exception_cache_entry_for_exception(Handle exception); ExceptionCache* exception_cache_entry_for_exception(Handle exception);
// Inform external interfaces that a compiled method has been unloaded // Inform external interfaces that a compiled method has been unloaded
inline void post_compiled_method_unload(); void post_compiled_method_unload();
// Initailize fields to their default values
void init_defaults();
public: public:
// create nmethod with entry_bci // create nmethod with entry_bci
@ -392,11 +383,11 @@ class nmethod : public CodeBlob {
address verified_entry_point() const { return _verified_entry_point; } // if klass is correct address verified_entry_point() const { return _verified_entry_point; } // if klass is correct
// flag accessing and manipulation // flag accessing and manipulation
bool is_in_use() const { return flags.state == alive; } bool is_in_use() const { return _state == alive; }
bool is_alive() const { return flags.state == alive || flags.state == not_entrant; } bool is_alive() const { return _state == alive || _state == not_entrant; }
bool is_not_entrant() const { return flags.state == not_entrant; } bool is_not_entrant() const { return _state == not_entrant; }
bool is_zombie() const { return flags.state == zombie; } bool is_zombie() const { return _state == zombie; }
bool is_unloaded() const { return flags.state == unloaded; } bool is_unloaded() const { return _state == unloaded; }
// Make the nmethod non entrant. The nmethod will continue to be // Make the nmethod non entrant. The nmethod will continue to be
// alive. It is used when an uncommon trap happens. Returns true // alive. It is used when an uncommon trap happens. Returns true
@ -409,37 +400,33 @@ class nmethod : public CodeBlob {
bool unload_reported() { return _unload_reported; } bool unload_reported() { return _unload_reported; }
void set_unload_reported() { _unload_reported = true; } void set_unload_reported() { _unload_reported = true; }
bool is_marked_for_deoptimization() const { return _markedForDeoptimization; } bool is_marked_for_deoptimization() const { return _marked_for_deoptimization; }
void mark_for_deoptimization() { _markedForDeoptimization = true; } void mark_for_deoptimization() { _marked_for_deoptimization = true; }
void make_unloaded(BoolObjectClosure* is_alive, oop cause); void make_unloaded(BoolObjectClosure* is_alive, oop cause);
bool has_dependencies() { return dependencies_size() != 0; } bool has_dependencies() { return dependencies_size() != 0; }
void flush_dependencies(BoolObjectClosure* is_alive); void flush_dependencies(BoolObjectClosure* is_alive);
bool has_flushed_dependencies() { return flags.hasFlushedDependencies; } bool has_flushed_dependencies() { return _has_flushed_dependencies; }
void set_has_flushed_dependencies() { void set_has_flushed_dependencies() {
assert(!has_flushed_dependencies(), "should only happen once"); assert(!has_flushed_dependencies(), "should only happen once");
flags.hasFlushedDependencies = 1; _has_flushed_dependencies = 1;
} }
bool is_marked_for_reclamation() const { return flags.markedForReclamation; } bool is_marked_for_reclamation() const { return _marked_for_reclamation; }
void mark_for_reclamation() { flags.markedForReclamation = 1; } void mark_for_reclamation() { _marked_for_reclamation = 1; }
void unmark_for_reclamation() { flags.markedForReclamation = 0; }
bool has_unsafe_access() const { return flags.has_unsafe_access; } bool has_unsafe_access() const { return _has_unsafe_access; }
void set_has_unsafe_access(bool z) { flags.has_unsafe_access = z; } void set_has_unsafe_access(bool z) { _has_unsafe_access = z; }
bool has_method_handle_invokes() const { return flags.has_method_handle_invokes; } bool has_method_handle_invokes() const { return _has_method_handle_invokes; }
void set_has_method_handle_invokes(bool z) { flags.has_method_handle_invokes = z; } void set_has_method_handle_invokes(bool z) { _has_method_handle_invokes = z; }
bool is_speculatively_disconnected() const { return flags.speculatively_disconnected; } bool is_speculatively_disconnected() const { return _speculatively_disconnected; }
void set_speculatively_disconnected(bool z) { flags.speculatively_disconnected = z; } void set_speculatively_disconnected(bool z) { _speculatively_disconnected = z; }
int comp_level() const { return _comp_level; } int comp_level() const { return _comp_level; }
int version() const { return flags.version; }
void set_version(int v);
// Support for oops in scopes and relocs: // Support for oops in scopes and relocs:
// Note: index 0 is reserved for null. // Note: index 0 is reserved for null.
oop oop_at(int index) const { return index == 0 ? (oop) NULL: *oop_addr_at(index); } oop oop_at(int index) const { return index == 0 ? (oop) NULL: *oop_addr_at(index); }
@ -599,6 +586,7 @@ public:
// jvmti support: // jvmti support:
void post_compiled_method_load_event(); void post_compiled_method_load_event();
jmethodID get_and_cache_jmethod_id();
// verify operations // verify operations
void verify(); void verify();

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 1997, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -50,7 +50,6 @@ void* VtableStub::operator new(size_t size, int code_size) {
} }
_chunk = blob->instructions_begin(); _chunk = blob->instructions_begin();
_chunk_end = _chunk + bytes; _chunk_end = _chunk + bytes;
VTune::register_stub("vtable stub", _chunk, _chunk_end);
Forte::register_stub("vtable stub", _chunk, _chunk_end); Forte::register_stub("vtable stub", _chunk, _chunk_end);
// Notify JVMTI about this stub. The event will be recorded by the enclosing // Notify JVMTI about this stub. The event will be recorded by the enclosing
// JvmtiDynamicCodeEventCollector and posted when this thread has released // JvmtiDynamicCodeEventCollector and posted when this thread has released

@ -3972,6 +3972,10 @@ public:
void work(int i) { void work(int i) {
if (i >= _n_workers) return; // no work needed this round if (i >= _n_workers) return; // no work needed this round
double start_time_ms = os::elapsedTime() * 1000.0;
_g1h->g1_policy()->record_gc_worker_start_time(i, start_time_ms);
ResourceMark rm; ResourceMark rm;
HandleMark hm; HandleMark hm;
@ -4019,7 +4023,7 @@ public:
double elapsed_ms = (os::elapsedTime()-start)*1000.0; double elapsed_ms = (os::elapsedTime()-start)*1000.0;
double term_ms = pss.term_time()*1000.0; double term_ms = pss.term_time()*1000.0;
_g1h->g1_policy()->record_obj_copy_time(i, elapsed_ms-term_ms); _g1h->g1_policy()->record_obj_copy_time(i, elapsed_ms-term_ms);
_g1h->g1_policy()->record_termination_time(i, term_ms); _g1h->g1_policy()->record_termination(i, term_ms, pss.term_attempts());
} }
_g1h->g1_policy()->record_thread_age_table(pss.age_table()); _g1h->g1_policy()->record_thread_age_table(pss.age_table());
_g1h->update_surviving_young_words(pss.surviving_young_words()+1); _g1h->update_surviving_young_words(pss.surviving_young_words()+1);
@ -4043,7 +4047,8 @@ public:
double term = pss.term_time(); double term = pss.term_time();
gclog_or_tty->print(" Elapsed: %7.2f ms.\n" gclog_or_tty->print(" Elapsed: %7.2f ms.\n"
" Strong roots: %7.2f ms (%6.2f%%)\n" " Strong roots: %7.2f ms (%6.2f%%)\n"
" Termination: %7.2f ms (%6.2f%%) (in %d entries)\n", " Termination: %7.2f ms (%6.2f%%) "
"(in "SIZE_FORMAT" entries)\n",
elapsed * 1000.0, elapsed * 1000.0,
strong_roots * 1000.0, (strong_roots*100.0/elapsed), strong_roots * 1000.0, (strong_roots*100.0/elapsed),
term * 1000.0, (term*100.0/elapsed), term * 1000.0, (term*100.0/elapsed),
@ -4059,6 +4064,8 @@ public:
assert(pss.refs_to_scan() == 0, "Task queue should be empty"); assert(pss.refs_to_scan() == 0, "Task queue should be empty");
assert(pss.overflowed_refs_to_scan() == 0, "Overflow queue should be empty"); assert(pss.overflowed_refs_to_scan() == 0, "Overflow queue should be empty");
double end_time_ms = os::elapsedTime() * 1000.0;
_g1h->g1_policy()->record_gc_worker_end_time(i, end_time_ms);
} }
}; };

@ -1549,7 +1549,7 @@ protected:
int _hash_seed; int _hash_seed;
int _queue_num; int _queue_num;
int _term_attempts; size_t _term_attempts;
#if G1_DETAILED_STATS #if G1_DETAILED_STATS
int _pushes, _pops, _steals, _steal_attempts; int _pushes, _pops, _steals, _steal_attempts;
int _overflow_pushes; int _overflow_pushes;
@ -1727,8 +1727,8 @@ public:
int* hash_seed() { return &_hash_seed; } int* hash_seed() { return &_hash_seed; }
int queue_num() { return _queue_num; } int queue_num() { return _queue_num; }
int term_attempts() { return _term_attempts; } size_t term_attempts() { return _term_attempts; }
void note_term_attempt() { _term_attempts++; } void note_term_attempt() { _term_attempts++; }
#if G1_DETAILED_STATS #if G1_DETAILED_STATS
int pushes() { return _pushes; } int pushes() { return _pushes; }

@ -231,20 +231,21 @@ G1CollectorPolicy::G1CollectorPolicy() :
_recent_prev_end_times_for_all_gcs_sec->add(os::elapsedTime()); _recent_prev_end_times_for_all_gcs_sec->add(os::elapsedTime());
_prev_collection_pause_end_ms = os::elapsedTime() * 1000.0; _prev_collection_pause_end_ms = os::elapsedTime() * 1000.0;
_par_last_gc_worker_start_times_ms = new double[_parallel_gc_threads];
_par_last_ext_root_scan_times_ms = new double[_parallel_gc_threads]; _par_last_ext_root_scan_times_ms = new double[_parallel_gc_threads];
_par_last_mark_stack_scan_times_ms = new double[_parallel_gc_threads]; _par_last_mark_stack_scan_times_ms = new double[_parallel_gc_threads];
_par_last_update_rs_start_times_ms = new double[_parallel_gc_threads];
_par_last_update_rs_times_ms = new double[_parallel_gc_threads]; _par_last_update_rs_times_ms = new double[_parallel_gc_threads];
_par_last_update_rs_processed_buffers = new double[_parallel_gc_threads]; _par_last_update_rs_processed_buffers = new double[_parallel_gc_threads];
_par_last_scan_rs_start_times_ms = new double[_parallel_gc_threads];
_par_last_scan_rs_times_ms = new double[_parallel_gc_threads]; _par_last_scan_rs_times_ms = new double[_parallel_gc_threads];
_par_last_scan_new_refs_times_ms = new double[_parallel_gc_threads]; _par_last_scan_new_refs_times_ms = new double[_parallel_gc_threads];
_par_last_obj_copy_times_ms = new double[_parallel_gc_threads]; _par_last_obj_copy_times_ms = new double[_parallel_gc_threads];
_par_last_termination_times_ms = new double[_parallel_gc_threads]; _par_last_termination_times_ms = new double[_parallel_gc_threads];
_par_last_termination_attempts = new double[_parallel_gc_threads];
_par_last_gc_worker_end_times_ms = new double[_parallel_gc_threads];
// start conservatively // start conservatively
_expensive_region_limit_ms = 0.5 * (double) MaxGCPauseMillis; _expensive_region_limit_ms = 0.5 * (double) MaxGCPauseMillis;
@ -274,10 +275,64 @@ G1CollectorPolicy::G1CollectorPolicy() :
// </NEW PREDICTION> // </NEW PREDICTION>
double time_slice = (double) GCPauseIntervalMillis / 1000.0; // Below, we might need to calculate the pause time target based on
// the pause interval. When we do so we are going to give G1 maximum
// flexibility and allow it to do pauses when it needs to. So, we'll
// arrange that the pause interval to be pause time target + 1 to
// ensure that a) the pause time target is maximized with respect to
// the pause interval and b) we maintain the invariant that pause
// time target < pause interval. If the user does not want this
// maximum flexibility, they will have to set the pause interval
// explicitly.
// First make sure that, if either parameter is set, its value is
// reasonable.
if (!FLAG_IS_DEFAULT(MaxGCPauseMillis)) {
if (MaxGCPauseMillis < 1) {
vm_exit_during_initialization("MaxGCPauseMillis should be "
"greater than 0");
}
}
if (!FLAG_IS_DEFAULT(GCPauseIntervalMillis)) {
if (GCPauseIntervalMillis < 1) {
vm_exit_during_initialization("GCPauseIntervalMillis should be "
"greater than 0");
}
}
// Then, if the pause time target parameter was not set, set it to
// the default value.
if (FLAG_IS_DEFAULT(MaxGCPauseMillis)) {
if (FLAG_IS_DEFAULT(GCPauseIntervalMillis)) {
// The default pause time target in G1 is 200ms
FLAG_SET_DEFAULT(MaxGCPauseMillis, 200);
} else {
// We do not allow the pause interval to be set without the
// pause time target
vm_exit_during_initialization("GCPauseIntervalMillis cannot be set "
"without setting MaxGCPauseMillis");
}
}
// Then, if the interval parameter was not set, set it according to
// the pause time target (this will also deal with the case when the
// pause time target is the default value).
if (FLAG_IS_DEFAULT(GCPauseIntervalMillis)) {
FLAG_SET_DEFAULT(GCPauseIntervalMillis, MaxGCPauseMillis + 1);
}
// Finally, make sure that the two parameters are consistent.
if (MaxGCPauseMillis >= GCPauseIntervalMillis) {
char buffer[256];
jio_snprintf(buffer, 256,
"MaxGCPauseMillis (%u) should be less than "
"GCPauseIntervalMillis (%u)",
MaxGCPauseMillis, GCPauseIntervalMillis);
vm_exit_during_initialization(buffer);
}
double max_gc_time = (double) MaxGCPauseMillis / 1000.0; double max_gc_time = (double) MaxGCPauseMillis / 1000.0;
guarantee(max_gc_time < time_slice, double time_slice = (double) GCPauseIntervalMillis / 1000.0;
"Max GC time should not be greater than the time slice");
_mmu_tracker = new G1MMUTrackerQueue(time_slice, max_gc_time); _mmu_tracker = new G1MMUTrackerQueue(time_slice, max_gc_time);
_sigma = (double) G1ConfidencePercent / 100.0; _sigma = (double) G1ConfidencePercent / 100.0;
@ -782,16 +837,17 @@ void G1CollectorPolicy::record_collection_pause_start(double start_time_sec,
// if they are not set properly // if they are not set properly
for (int i = 0; i < _parallel_gc_threads; ++i) { for (int i = 0; i < _parallel_gc_threads; ++i) {
_par_last_ext_root_scan_times_ms[i] = -666.0; _par_last_gc_worker_start_times_ms[i] = -1234.0;
_par_last_mark_stack_scan_times_ms[i] = -666.0; _par_last_ext_root_scan_times_ms[i] = -1234.0;
_par_last_update_rs_start_times_ms[i] = -666.0; _par_last_mark_stack_scan_times_ms[i] = -1234.0;
_par_last_update_rs_times_ms[i] = -666.0; _par_last_update_rs_times_ms[i] = -1234.0;
_par_last_update_rs_processed_buffers[i] = -666.0; _par_last_update_rs_processed_buffers[i] = -1234.0;
_par_last_scan_rs_start_times_ms[i] = -666.0; _par_last_scan_rs_times_ms[i] = -1234.0;
_par_last_scan_rs_times_ms[i] = -666.0; _par_last_scan_new_refs_times_ms[i] = -1234.0;
_par_last_scan_new_refs_times_ms[i] = -666.0; _par_last_obj_copy_times_ms[i] = -1234.0;
_par_last_obj_copy_times_ms[i] = -666.0; _par_last_termination_times_ms[i] = -1234.0;
_par_last_termination_times_ms[i] = -666.0; _par_last_termination_attempts[i] = -1234.0;
_par_last_gc_worker_end_times_ms[i] = -1234.0;
} }
#endif #endif
@ -942,9 +998,9 @@ T sum_of(T* sum_arr, int start, int n, int N) {
return sum; return sum;
} }
void G1CollectorPolicy::print_par_stats (int level, void G1CollectorPolicy::print_par_stats(int level,
const char* str, const char* str,
double* data, double* data,
bool summary) { bool summary) {
double min = data[0], max = data[0]; double min = data[0], max = data[0];
double total = 0.0; double total = 0.0;
@ -973,10 +1029,10 @@ void G1CollectorPolicy::print_par_stats (int level,
gclog_or_tty->print_cr("]"); gclog_or_tty->print_cr("]");
} }
void G1CollectorPolicy::print_par_buffers (int level, void G1CollectorPolicy::print_par_sizes(int level,
const char* str, const char* str,
double* data, double* data,
bool summary) { bool summary) {
double min = data[0], max = data[0]; double min = data[0], max = data[0];
double total = 0.0; double total = 0.0;
int j; int j;
@ -1321,15 +1377,22 @@ void G1CollectorPolicy::record_collection_pause_end(bool abandoned) {
} }
if (parallel) { if (parallel) {
print_stats(1, "Parallel Time", _cur_collection_par_time_ms); print_stats(1, "Parallel Time", _cur_collection_par_time_ms);
print_par_stats(2, "Update RS (Start)", _par_last_update_rs_start_times_ms, false); print_par_stats(2, "GC Worker Start Time",
_par_last_gc_worker_start_times_ms, false);
print_par_stats(2, "Update RS", _par_last_update_rs_times_ms); print_par_stats(2, "Update RS", _par_last_update_rs_times_ms);
print_par_buffers(3, "Processed Buffers", print_par_sizes(3, "Processed Buffers",
_par_last_update_rs_processed_buffers, true); _par_last_update_rs_processed_buffers, true);
print_par_stats(2, "Ext Root Scanning", _par_last_ext_root_scan_times_ms); print_par_stats(2, "Ext Root Scanning",
print_par_stats(2, "Mark Stack Scanning", _par_last_mark_stack_scan_times_ms); _par_last_ext_root_scan_times_ms);
print_par_stats(2, "Mark Stack Scanning",
_par_last_mark_stack_scan_times_ms);
print_par_stats(2, "Scan RS", _par_last_scan_rs_times_ms); print_par_stats(2, "Scan RS", _par_last_scan_rs_times_ms);
print_par_stats(2, "Object Copy", _par_last_obj_copy_times_ms); print_par_stats(2, "Object Copy", _par_last_obj_copy_times_ms);
print_par_stats(2, "Termination", _par_last_termination_times_ms); print_par_stats(2, "Termination", _par_last_termination_times_ms);
print_par_sizes(3, "Termination Attempts",
_par_last_termination_attempts, true);
print_par_stats(2, "GC Worker End Time",
_par_last_gc_worker_end_times_ms, false);
print_stats(2, "Other", parallel_other_time); print_stats(2, "Other", parallel_other_time);
print_stats(1, "Clear CT", _cur_clear_ct_time_ms); print_stats(1, "Clear CT", _cur_clear_ct_time_ms);
} else { } else {

@ -171,16 +171,17 @@ protected:
double* _cur_aux_times_ms; double* _cur_aux_times_ms;
bool* _cur_aux_times_set; bool* _cur_aux_times_set;
double* _par_last_gc_worker_start_times_ms;
double* _par_last_ext_root_scan_times_ms; double* _par_last_ext_root_scan_times_ms;
double* _par_last_mark_stack_scan_times_ms; double* _par_last_mark_stack_scan_times_ms;
double* _par_last_update_rs_start_times_ms;
double* _par_last_update_rs_times_ms; double* _par_last_update_rs_times_ms;
double* _par_last_update_rs_processed_buffers; double* _par_last_update_rs_processed_buffers;
double* _par_last_scan_rs_start_times_ms;
double* _par_last_scan_rs_times_ms; double* _par_last_scan_rs_times_ms;
double* _par_last_scan_new_refs_times_ms; double* _par_last_scan_new_refs_times_ms;
double* _par_last_obj_copy_times_ms; double* _par_last_obj_copy_times_ms;
double* _par_last_termination_times_ms; double* _par_last_termination_times_ms;
double* _par_last_termination_attempts;
double* _par_last_gc_worker_end_times_ms;
// indicates that we are in young GC mode // indicates that we are in young GC mode
bool _in_young_gc_mode; bool _in_young_gc_mode;
@ -559,13 +560,14 @@ public:
} }
protected: protected:
void print_stats (int level, const char* str, double value); void print_stats(int level, const char* str, double value);
void print_stats (int level, const char* str, int value); void print_stats(int level, const char* str, int value);
void print_par_stats (int level, const char* str, double* data) {
void print_par_stats(int level, const char* str, double* data) {
print_par_stats(level, str, data, true); print_par_stats(level, str, data, true);
} }
void print_par_stats (int level, const char* str, double* data, bool summary); void print_par_stats(int level, const char* str, double* data, bool summary);
void print_par_buffers (int level, const char* str, double* data, bool summary); void print_par_sizes(int level, const char* str, double* data, bool summary);
void check_other_times(int level, void check_other_times(int level,
NumberSeq* other_times_ms, NumberSeq* other_times_ms,
@ -891,6 +893,10 @@ public:
virtual void record_full_collection_start(); virtual void record_full_collection_start();
virtual void record_full_collection_end(); virtual void record_full_collection_end();
void record_gc_worker_start_time(int worker_i, double ms) {
_par_last_gc_worker_start_times_ms[worker_i] = ms;
}
void record_ext_root_scan_time(int worker_i, double ms) { void record_ext_root_scan_time(int worker_i, double ms) {
_par_last_ext_root_scan_times_ms[worker_i] = ms; _par_last_ext_root_scan_times_ms[worker_i] = ms;
} }
@ -912,10 +918,6 @@ public:
_all_mod_union_times_ms->add(ms); _all_mod_union_times_ms->add(ms);
} }
void record_update_rs_start_time(int thread, double ms) {
_par_last_update_rs_start_times_ms[thread] = ms;
}
void record_update_rs_time(int thread, double ms) { void record_update_rs_time(int thread, double ms) {
_par_last_update_rs_times_ms[thread] = ms; _par_last_update_rs_times_ms[thread] = ms;
} }
@ -925,10 +927,6 @@ public:
_par_last_update_rs_processed_buffers[thread] = processed_buffers; _par_last_update_rs_processed_buffers[thread] = processed_buffers;
} }
void record_scan_rs_start_time(int thread, double ms) {
_par_last_scan_rs_start_times_ms[thread] = ms;
}
void record_scan_rs_time(int thread, double ms) { void record_scan_rs_time(int thread, double ms) {
_par_last_scan_rs_times_ms[thread] = ms; _par_last_scan_rs_times_ms[thread] = ms;
} }
@ -953,16 +951,13 @@ public:
_par_last_obj_copy_times_ms[thread] += ms; _par_last_obj_copy_times_ms[thread] += ms;
} }
void record_obj_copy_time(double ms) { void record_termination(int thread, double ms, size_t attempts) {
record_obj_copy_time(0, ms);
}
void record_termination_time(int thread, double ms) {
_par_last_termination_times_ms[thread] = ms; _par_last_termination_times_ms[thread] = ms;
_par_last_termination_attempts[thread] = (double) attempts;
} }
void record_termination_time(double ms) { void record_gc_worker_end_time(int worker_i, double ms) {
record_termination_time(0, ms); _par_last_gc_worker_end_times_ms[worker_i] = ms;
} }
void record_pause_time_ms(double ms) { void record_pause_time_ms(double ms) {

@ -303,7 +303,6 @@ void HRInto_G1RemSet::scanRS(OopsInHeapRegionClosure* oc, int worker_i) {
assert( _cards_scanned != NULL, "invariant" ); assert( _cards_scanned != NULL, "invariant" );
_cards_scanned[worker_i] = scanRScl.cards_done(); _cards_scanned[worker_i] = scanRScl.cards_done();
_g1p->record_scan_rs_start_time(worker_i, rs_time_start * 1000.0);
_g1p->record_scan_rs_time(worker_i, scan_rs_time_sec * 1000.0); _g1p->record_scan_rs_time(worker_i, scan_rs_time_sec * 1000.0);
} }
@ -311,8 +310,6 @@ void HRInto_G1RemSet::updateRS(int worker_i) {
ConcurrentG1Refine* cg1r = _g1->concurrent_g1_refine(); ConcurrentG1Refine* cg1r = _g1->concurrent_g1_refine();
double start = os::elapsedTime(); double start = os::elapsedTime();
_g1p->record_update_rs_start_time(worker_i, start * 1000.0);
// Apply the appropriate closure to all remaining log entries. // Apply the appropriate closure to all remaining log entries.
_g1->iterate_dirty_card_closure(false, worker_i); _g1->iterate_dirty_card_closure(false, worker_i);
// Now there should be no dirty cards. // Now there should be no dirty cards.
@ -471,7 +468,6 @@ HRInto_G1RemSet::oops_into_collection_set_do(OopsInHeapRegionClosure* oc,
updateRS(worker_i); updateRS(worker_i);
scanNewRefsRS(oc, worker_i); scanNewRefsRS(oc, worker_i);
} else { } else {
_g1p->record_update_rs_start_time(worker_i, os::elapsedTime() * 1000.0);
_g1p->record_update_rs_processed_buffers(worker_i, 0.0); _g1p->record_update_rs_processed_buffers(worker_i, 0.0);
_g1p->record_update_rs_time(worker_i, 0.0); _g1p->record_update_rs_time(worker_i, 0.0);
_g1p->record_scan_new_refs_time(worker_i, 0.0); _g1p->record_scan_new_refs_time(worker_i, 0.0);
@ -479,7 +475,6 @@ HRInto_G1RemSet::oops_into_collection_set_do(OopsInHeapRegionClosure* oc,
if (G1UseParallelRSetScanning || (worker_i == 0)) { if (G1UseParallelRSetScanning || (worker_i == 0)) {
scanRS(oc, worker_i); scanRS(oc, worker_i);
} else { } else {
_g1p->record_scan_rs_start_time(worker_i, os::elapsedTime() * 1000.0);
_g1p->record_scan_rs_time(worker_i, 0.0); _g1p->record_scan_rs_time(worker_i, 0.0);
} }
} else { } else {

@ -270,7 +270,7 @@ psParallelCompact.cpp parallelScavengeHeap.inline.hpp
psParallelCompact.cpp pcTasks.hpp psParallelCompact.cpp pcTasks.hpp
psParallelCompact.cpp psMarkSweep.hpp psParallelCompact.cpp psMarkSweep.hpp
psParallelCompact.cpp psMarkSweepDecorator.hpp psParallelCompact.cpp psMarkSweepDecorator.hpp
psParallelCompact.cpp psCompactionManager.hpp psParallelCompact.cpp psCompactionManager.inline.hpp
psParallelCompact.cpp psPromotionManager.inline.hpp psParallelCompact.cpp psPromotionManager.inline.hpp
psParallelCompact.cpp psOldGen.hpp psParallelCompact.cpp psOldGen.hpp
psParallelCompact.cpp psParallelCompact.hpp psParallelCompact.cpp psParallelCompact.hpp

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2001, 2008, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2001, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -566,14 +566,14 @@ void CardTableExtension::resize_covered_region_by_end(int changed_region,
#endif #endif
// Commit new or uncommit old pages, if necessary. // Commit new or uncommit old pages, if necessary.
resize_commit_uncommit(changed_region, new_region); if (resize_commit_uncommit(changed_region, new_region)) {
// Set the new start of the committed region
resize_update_committed_table(changed_region, new_region);
}
// Update card table entries // Update card table entries
resize_update_card_table_entries(changed_region, new_region); resize_update_card_table_entries(changed_region, new_region);
// Set the new start of the committed region
resize_update_committed_table(changed_region, new_region);
// Update the covered region // Update the covered region
resize_update_covered_table(changed_region, new_region); resize_update_covered_table(changed_region, new_region);
@ -604,8 +604,9 @@ void CardTableExtension::resize_covered_region_by_end(int changed_region,
debug_only(verify_guard();) debug_only(verify_guard();)
} }
void CardTableExtension::resize_commit_uncommit(int changed_region, bool CardTableExtension::resize_commit_uncommit(int changed_region,
MemRegion new_region) { MemRegion new_region) {
bool result = false;
// Commit new or uncommit old pages, if necessary. // Commit new or uncommit old pages, if necessary.
MemRegion cur_committed = _committed[changed_region]; MemRegion cur_committed = _committed[changed_region];
assert(_covered[changed_region].end() == new_region.end(), assert(_covered[changed_region].end() == new_region.end(),
@ -675,20 +676,31 @@ void CardTableExtension::resize_commit_uncommit(int changed_region,
"card table expansion"); "card table expansion");
} }
} }
result = true;
} else if (new_start_aligned > cur_committed.start()) { } else if (new_start_aligned > cur_committed.start()) {
// Shrink the committed region // Shrink the committed region
#if 0 // uncommitting space is currently unsafe because of the interactions
// of growing and shrinking regions. One region A can uncommit space
// that it owns but which is being used by another region B (maybe).
// Region B has not committed the space because it was already
// committed by region A.
MemRegion uncommit_region = committed_unique_to_self(changed_region, MemRegion uncommit_region = committed_unique_to_self(changed_region,
MemRegion(cur_committed.start(), new_start_aligned)); MemRegion(cur_committed.start(), new_start_aligned));
if (!uncommit_region.is_empty()) { if (!uncommit_region.is_empty()) {
if (!os::uncommit_memory((char*)uncommit_region.start(), if (!os::uncommit_memory((char*)uncommit_region.start(),
uncommit_region.byte_size())) { uncommit_region.byte_size())) {
vm_exit_out_of_memory(uncommit_region.byte_size(), // If the uncommit fails, ignore it. Let the
"card table contraction"); // committed table resizing go even though the committed
// table will over state the committed space.
} }
} }
#else
assert(!result, "Should be false with current workaround");
#endif
} }
assert(_committed[changed_region].end() == cur_committed.end(), assert(_committed[changed_region].end() == cur_committed.end(),
"end should not change"); "end should not change");
return result;
} }
void CardTableExtension::resize_update_committed_table(int changed_region, void CardTableExtension::resize_update_committed_table(int changed_region,

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2001, 2008, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2001, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -30,7 +30,9 @@ class GCTaskQueue;
class CardTableExtension : public CardTableModRefBS { class CardTableExtension : public CardTableModRefBS {
private: private:
// Support methods for resizing the card table. // Support methods for resizing the card table.
void resize_commit_uncommit(int changed_region, MemRegion new_region); // resize_commit_uncommit() returns true if the pages were committed or
// uncommitted
bool resize_commit_uncommit(int changed_region, MemRegion new_region);
void resize_update_card_table_entries(int changed_region, void resize_update_card_table_entries(int changed_region,
MemRegion new_region); MemRegion new_region);
void resize_update_committed_table(int changed_region, MemRegion new_region); void resize_update_committed_table(int changed_region, MemRegion new_region);

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2005, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2005, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -32,7 +32,7 @@ ParCompactionManager::ObjArrayTaskQueueSet*
ParCompactionManager::_objarray_queues = NULL; ParCompactionManager::_objarray_queues = NULL;
ObjectStartArray* ParCompactionManager::_start_array = NULL; ObjectStartArray* ParCompactionManager::_start_array = NULL;
ParMarkBitMap* ParCompactionManager::_mark_bitmap = NULL; ParMarkBitMap* ParCompactionManager::_mark_bitmap = NULL;
RegionTaskQueueSet* ParCompactionManager::_region_array = NULL; RegionTaskQueueSet* ParCompactionManager::_region_array = NULL;
ParCompactionManager::ParCompactionManager() : ParCompactionManager::ParCompactionManager() :
_action(CopyAndUpdate) { _action(CopyAndUpdate) {
@ -43,25 +43,9 @@ ParCompactionManager::ParCompactionManager() :
_old_gen = heap->old_gen(); _old_gen = heap->old_gen();
_start_array = old_gen()->start_array(); _start_array = old_gen()->start_array();
marking_stack()->initialize(); marking_stack()->initialize();
_objarray_stack.initialize();
// We want the overflow stack to be permanent
_overflow_stack = new (ResourceObj::C_HEAP) GrowableArray<oop>(10, true);
_objarray_queue.initialize();
_objarray_overflow_stack =
new (ResourceObj::C_HEAP) ObjArrayOverflowStack(10, true);
#ifdef USE_RegionTaskQueueWithOverflow
region_stack()->initialize(); region_stack()->initialize();
#else
region_stack()->initialize();
// We want the overflow stack to be permanent
_region_overflow_stack =
new (ResourceObj::C_HEAP) GrowableArray<size_t>(10, true);
#endif
// Note that _revisit_klass_stack is allocated out of the // Note that _revisit_klass_stack is allocated out of the
// C heap (as opposed to out of ResourceArena). // C heap (as opposed to out of ResourceArena).
@ -71,12 +55,9 @@ ParCompactionManager::ParCompactionManager() :
// From some experiments (#klass/k)^2 for k = 10 seems a better fit, but this will // From some experiments (#klass/k)^2 for k = 10 seems a better fit, but this will
// have to do for now until we are able to investigate a more optimal setting. // have to do for now until we are able to investigate a more optimal setting.
_revisit_mdo_stack = new (ResourceObj::C_HEAP) GrowableArray<DataLayout*>(size*2, true); _revisit_mdo_stack = new (ResourceObj::C_HEAP) GrowableArray<DataLayout*>(size*2, true);
} }
ParCompactionManager::~ParCompactionManager() { ParCompactionManager::~ParCompactionManager() {
delete _overflow_stack;
delete _objarray_overflow_stack;
delete _revisit_klass_stack; delete _revisit_klass_stack;
delete _revisit_mdo_stack; delete _revisit_mdo_stack;
// _manager_array and _stack_array are statics // _manager_array and _stack_array are statics
@ -108,12 +89,8 @@ void ParCompactionManager::initialize(ParMarkBitMap* mbm) {
_manager_array[i] = new ParCompactionManager(); _manager_array[i] = new ParCompactionManager();
guarantee(_manager_array[i] != NULL, "Could not create ParCompactionManager"); guarantee(_manager_array[i] != NULL, "Could not create ParCompactionManager");
stack_array()->register_queue(i, _manager_array[i]->marking_stack()); stack_array()->register_queue(i, _manager_array[i]->marking_stack());
_objarray_queues->register_queue(i, &_manager_array[i]->_objarray_queue); _objarray_queues->register_queue(i, &_manager_array[i]->_objarray_stack);
#ifdef USE_RegionTaskQueueWithOverflow
region_array()->register_queue(i, _manager_array[i]->region_stack()->task_queue());
#else
region_array()->register_queue(i, _manager_array[i]->region_stack()); region_array()->register_queue(i, _manager_array[i]->region_stack());
#endif
} }
// The VMThread gets its own ParCompactionManager, which is not available // The VMThread gets its own ParCompactionManager, which is not available
@ -149,57 +126,6 @@ bool ParCompactionManager::should_reset_only() {
return action() == ParCompactionManager::ResetObjects; return action() == ParCompactionManager::ResetObjects;
} }
// For now save on a stack
void ParCompactionManager::save_for_scanning(oop m) {
stack_push(m);
}
void ParCompactionManager::stack_push(oop obj) {
if(!marking_stack()->push(obj)) {
overflow_stack()->push(obj);
}
}
oop ParCompactionManager::retrieve_for_scanning() {
// Should not be used in the parallel case
ShouldNotReachHere();
return NULL;
}
// Save region on a stack
void ParCompactionManager::save_for_processing(size_t region_index) {
#ifdef ASSERT
const ParallelCompactData& sd = PSParallelCompact::summary_data();
ParallelCompactData::RegionData* const region_ptr = sd.region(region_index);
assert(region_ptr->claimed(), "must be claimed");
assert(region_ptr->_pushed++ == 0, "should only be pushed once");
#endif
region_stack_push(region_index);
}
void ParCompactionManager::region_stack_push(size_t region_index) {
#ifdef USE_RegionTaskQueueWithOverflow
region_stack()->save(region_index);
#else
if(!region_stack()->push(region_index)) {
region_overflow_stack()->push(region_index);
}
#endif
}
bool ParCompactionManager::retrieve_for_processing(size_t& region_index) {
#ifdef USE_RegionTaskQueueWithOverflow
return region_stack()->retrieve(region_index);
#else
// Should not be used in the parallel case
ShouldNotReachHere();
return false;
#endif
}
ParCompactionManager* ParCompactionManager*
ParCompactionManager::gc_thread_compaction_manager(int index) { ParCompactionManager::gc_thread_compaction_manager(int index) {
assert(index >= 0 && index < (int)ParallelGCThreads, "index out of range"); assert(index >= 0 && index < (int)ParallelGCThreads, "index out of range");
@ -218,8 +144,8 @@ void ParCompactionManager::follow_marking_stacks() {
do { do {
// Drain the overflow stack first, to allow stealing from the marking stack. // Drain the overflow stack first, to allow stealing from the marking stack.
oop obj; oop obj;
while (!overflow_stack()->is_empty()) { while (marking_stack()->pop_overflow(obj)) {
overflow_stack()->pop()->follow_contents(this); obj->follow_contents(this);
} }
while (marking_stack()->pop_local(obj)) { while (marking_stack()->pop_local(obj)) {
obj->follow_contents(this); obj->follow_contents(this);
@ -227,11 +153,10 @@ void ParCompactionManager::follow_marking_stacks() {
// Process ObjArrays one at a time to avoid marking stack bloat. // Process ObjArrays one at a time to avoid marking stack bloat.
ObjArrayTask task; ObjArrayTask task;
if (!_objarray_overflow_stack->is_empty()) { if (_objarray_stack.pop_overflow(task)) {
task = _objarray_overflow_stack->pop();
objArrayKlass* const k = (objArrayKlass*)task.obj()->blueprint(); objArrayKlass* const k = (objArrayKlass*)task.obj()->blueprint();
k->oop_follow_contents(this, task.obj(), task.index()); k->oop_follow_contents(this, task.obj(), task.index());
} else if (_objarray_queue.pop_local(task)) { } else if (_objarray_stack.pop_local(task)) {
objArrayKlass* const k = (objArrayKlass*)task.obj()->blueprint(); objArrayKlass* const k = (objArrayKlass*)task.obj()->blueprint();
k->oop_follow_contents(this, task.obj(), task.index()); k->oop_follow_contents(this, task.obj(), task.index());
} }
@ -240,68 +165,18 @@ void ParCompactionManager::follow_marking_stacks() {
assert(marking_stacks_empty(), "Sanity"); assert(marking_stacks_empty(), "Sanity");
} }
void ParCompactionManager::drain_region_overflow_stack() {
size_t region_index = (size_t) -1;
while(region_stack()->retrieve_from_overflow(region_index)) {
PSParallelCompact::fill_and_update_region(this, region_index);
}
}
void ParCompactionManager::drain_region_stacks() { void ParCompactionManager::drain_region_stacks() {
#ifdef ASSERT
ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap();
assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity");
MutableSpace* to_space = heap->young_gen()->to_space();
MutableSpace* old_space = heap->old_gen()->object_space();
MutableSpace* perm_space = heap->perm_gen()->object_space();
#endif /* ASSERT */
#if 1 // def DO_PARALLEL - the serial code hasn't been updated
do { do {
// Drain overflow stack first so other threads can steal.
#ifdef USE_RegionTaskQueueWithOverflow size_t region_index;
// Drain overflow stack first, so other threads can steal from while (region_stack()->pop_overflow(region_index)) {
// claimed stack while we work.
size_t region_index = (size_t) -1;
while(region_stack()->retrieve_from_overflow(region_index)) {
PSParallelCompact::fill_and_update_region(this, region_index); PSParallelCompact::fill_and_update_region(this, region_index);
} }
while (region_stack()->retrieve_from_stealable_queue(region_index)) { while (region_stack()->pop_local(region_index)) {
PSParallelCompact::fill_and_update_region(this, region_index); PSParallelCompact::fill_and_update_region(this, region_index);
} }
} while (!region_stack()->is_empty()); } while (!region_stack()->is_empty());
#else
// Drain overflow stack first, so other threads can steal from
// claimed stack while we work.
while(!region_overflow_stack()->is_empty()) {
size_t region_index = region_overflow_stack()->pop();
PSParallelCompact::fill_and_update_region(this, region_index);
}
size_t region_index = -1;
// obj is a reference!!!
while (region_stack()->pop_local(region_index)) {
// It would be nice to assert about the type of objects we might
// pop, but they can come from anywhere, unfortunately.
PSParallelCompact::fill_and_update_region(this, region_index);
}
} while((region_stack()->size() != 0) ||
(region_overflow_stack()->length() != 0));
#endif
#ifdef USE_RegionTaskQueueWithOverflow
assert(region_stack()->is_empty(), "Sanity");
#else
assert(region_stack()->size() == 0, "Sanity");
assert(region_overflow_stack()->length() == 0, "Sanity");
#endif
#else
oop obj;
while (obj = retrieve_for_scanning()) {
obj->follow_contents(this);
}
#endif
} }
#ifdef ASSERT #ifdef ASSERT

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2005, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2005, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -59,10 +59,10 @@ class ParCompactionManager : public CHeapObj {
private: private:
// 32-bit: 4K * 8 = 32KiB; 64-bit: 8K * 16 = 128KiB // 32-bit: 4K * 8 = 32KiB; 64-bit: 8K * 16 = 128KiB
#define OBJARRAY_QUEUE_SIZE (1 << NOT_LP64(12) LP64_ONLY(13)) #define QUEUE_SIZE (1 << NOT_LP64(12) LP64_ONLY(13))
typedef GenericTaskQueue<ObjArrayTask, OBJARRAY_QUEUE_SIZE> ObjArrayTaskQueue; typedef OverflowTaskQueue<ObjArrayTask, QUEUE_SIZE> ObjArrayTaskQueue;
typedef GenericTaskQueueSet<ObjArrayTaskQueue> ObjArrayTaskQueueSet; typedef GenericTaskQueueSet<ObjArrayTaskQueue> ObjArrayTaskQueueSet;
#undef OBJARRAY_QUEUE_SIZE #undef QUEUE_SIZE
static ParCompactionManager** _manager_array; static ParCompactionManager** _manager_array;
static OopTaskQueueSet* _stack_array; static OopTaskQueueSet* _stack_array;
@ -72,23 +72,13 @@ class ParCompactionManager : public CHeapObj {
static PSOldGen* _old_gen; static PSOldGen* _old_gen;
private: private:
OopTaskQueue _marking_stack; OverflowTaskQueue<oop> _marking_stack;
GrowableArray<oop>* _overflow_stack; ObjArrayTaskQueue _objarray_stack;
typedef GrowableArray<ObjArrayTask> ObjArrayOverflowStack;
ObjArrayTaskQueue _objarray_queue;
ObjArrayOverflowStack* _objarray_overflow_stack;
// Is there a way to reuse the _marking_stack for the // Is there a way to reuse the _marking_stack for the
// saving empty regions? For now just create a different // saving empty regions? For now just create a different
// type of TaskQueue. // type of TaskQueue.
#ifdef USE_RegionTaskQueueWithOverflow
RegionTaskQueueWithOverflow _region_stack;
#else
RegionTaskQueue _region_stack; RegionTaskQueue _region_stack;
GrowableArray<size_t>* _region_overflow_stack;
#endif
#if 1 // does this happen enough to need a per thread stack? #if 1 // does this happen enough to need a per thread stack?
GrowableArray<Klass*>* _revisit_klass_stack; GrowableArray<Klass*>* _revisit_klass_stack;
@ -107,16 +97,8 @@ private:
protected: protected:
// Array of tasks. Needed by the ParallelTaskTerminator. // Array of tasks. Needed by the ParallelTaskTerminator.
static RegionTaskQueueSet* region_array() { return _region_array; } static RegionTaskQueueSet* region_array() { return _region_array; }
OopTaskQueue* marking_stack() { return &_marking_stack; } OverflowTaskQueue<oop>* marking_stack() { return &_marking_stack; }
GrowableArray<oop>* overflow_stack() { return _overflow_stack; } RegionTaskQueue* region_stack() { return &_region_stack; }
#ifdef USE_RegionTaskQueueWithOverflow
RegionTaskQueueWithOverflow* region_stack() { return &_region_stack; }
#else
RegionTaskQueue* region_stack() { return &_region_stack; }
GrowableArray<size_t>* region_overflow_stack() {
return _region_overflow_stack;
}
#endif
// Pushes onto the marking stack. If the marking stack is full, // Pushes onto the marking stack. If the marking stack is full,
// pushes onto the overflow stack. // pushes onto the overflow stack.
@ -124,11 +106,7 @@ private:
// Do not implement an equivalent stack_pop. Deal with the // Do not implement an equivalent stack_pop. Deal with the
// marking stack and overflow stack directly. // marking stack and overflow stack directly.
// Pushes onto the region stack. If the region stack is full, public:
// pushes onto the region overflow stack.
void region_stack_push(size_t region_index);
public:
Action action() { return _action; } Action action() { return _action; }
void set_action(Action v) { _action = v; } void set_action(Action v) { _action = v; }
@ -157,22 +135,15 @@ public:
GrowableArray<DataLayout*>* revisit_mdo_stack() { return _revisit_mdo_stack; } GrowableArray<DataLayout*>* revisit_mdo_stack() { return _revisit_mdo_stack; }
#endif #endif
// Save oop for later processing. Must not fail. // Save for later processing. Must not fail.
void save_for_scanning(oop m); inline void push(oop obj) { _marking_stack.push(obj); }
// Get a oop for scanning. If returns null, no oop were found. inline void push_objarray(oop objarray, size_t index);
oop retrieve_for_scanning(); inline void push_region(size_t index);
inline void push_objarray(oop obj, size_t index);
// Save region for later processing. Must not fail.
void save_for_processing(size_t region_index);
// Get a region for processing. If returns null, no region were found.
bool retrieve_for_processing(size_t& region_index);
// Access function for compaction managers // Access function for compaction managers
static ParCompactionManager* gc_thread_compaction_manager(int index); static ParCompactionManager* gc_thread_compaction_manager(int index);
static bool steal(int queue_num, int* seed, Task& t) { static bool steal(int queue_num, int* seed, oop& t) {
return stack_array()->steal(queue_num, seed, t); return stack_array()->steal(queue_num, seed, t);
} }
@ -180,8 +151,8 @@ public:
return _objarray_queues->steal(queue_num, seed, t); return _objarray_queues->steal(queue_num, seed, t);
} }
static bool steal(int queue_num, int* seed, RegionTask& t) { static bool steal(int queue_num, int* seed, size_t& region) {
return region_array()->steal(queue_num, seed, t); return region_array()->steal(queue_num, seed, region);
} }
// Process tasks remaining on any marking stack // Process tasks remaining on any marking stack
@ -191,9 +162,6 @@ public:
// Process tasks remaining on any stack // Process tasks remaining on any stack
void drain_region_stacks(); void drain_region_stacks();
// Process tasks remaining on any stack
void drain_region_overflow_stack();
// Debugging support // Debugging support
#ifdef ASSERT #ifdef ASSERT
bool stacks_have_been_allocated(); bool stacks_have_been_allocated();
@ -208,6 +176,5 @@ inline ParCompactionManager* ParCompactionManager::manager_array(int index) {
} }
bool ParCompactionManager::marking_stacks_empty() const { bool ParCompactionManager::marking_stacks_empty() const {
return _marking_stack.size() == 0 && _overflow_stack->is_empty() && return _marking_stack.is_empty() && _objarray_stack.is_empty();
_objarray_queue.size() == 0 && _objarray_overflow_stack->is_empty();
} }

@ -26,7 +26,16 @@ void ParCompactionManager::push_objarray(oop obj, size_t index)
{ {
ObjArrayTask task(obj, index); ObjArrayTask task(obj, index);
assert(task.is_valid(), "bad ObjArrayTask"); assert(task.is_valid(), "bad ObjArrayTask");
if (!_objarray_queue.push(task)) { _objarray_stack.push(task);
_objarray_overflow_stack->push(task); }
}
void ParCompactionManager::push_region(size_t index)
{
#ifdef ASSERT
const ParallelCompactData& sd = PSParallelCompact::summary_data();
ParallelCompactData::RegionData* const region_ptr = sd.region(index);
assert(region_ptr->claimed(), "must be claimed");
assert(region_ptr->_pushed++ == 0, "should only be pushed once");
#endif
region_stack()->push(index);
} }

@ -2474,7 +2474,7 @@ void PSParallelCompact::enqueue_region_draining_tasks(GCTaskQueue* q,
for (size_t cur = end_region - 1; cur >= beg_region; --cur) { for (size_t cur = end_region - 1; cur >= beg_region; --cur) {
if (sd.region(cur)->claim_unsafe()) { if (sd.region(cur)->claim_unsafe()) {
ParCompactionManager* cm = ParCompactionManager::manager_array(which); ParCompactionManager* cm = ParCompactionManager::manager_array(which);
cm->save_for_processing(cur); cm->push_region(cur);
if (TraceParallelOldGCCompactionPhase && Verbose) { if (TraceParallelOldGCCompactionPhase && Verbose) {
const size_t count_mod_8 = fillable_regions & 7; const size_t count_mod_8 = fillable_regions & 7;
@ -3138,7 +3138,7 @@ void PSParallelCompact::decrement_destination_counts(ParCompactionManager* cm,
assert(cur->data_size() > 0, "region must have live data"); assert(cur->data_size() > 0, "region must have live data");
cur->decrement_destination_count(); cur->decrement_destination_count();
if (cur < enqueue_end && cur->available() && cur->claim()) { if (cur < enqueue_end && cur->available() && cur->claim()) {
cm->save_for_processing(sd.region(cur)); cm->push_region(sd.region(cur));
} }
} }
} }

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2005, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2005, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -1297,11 +1297,8 @@ inline void PSParallelCompact::mark_and_push(ParCompactionManager* cm, T* p) {
T heap_oop = oopDesc::load_heap_oop(p); T heap_oop = oopDesc::load_heap_oop(p);
if (!oopDesc::is_null(heap_oop)) { if (!oopDesc::is_null(heap_oop)) {
oop obj = oopDesc::decode_heap_oop_not_null(heap_oop); oop obj = oopDesc::decode_heap_oop_not_null(heap_oop);
if (mark_bitmap()->is_unmarked(obj)) { if (mark_bitmap()->is_unmarked(obj) && mark_obj(obj)) {
if (mark_obj(obj)) { cm->push(obj);
// This thread marked the object and owns the subsequent processing of it.
cm->save_for_scanning(obj);
}
} }
} }
} }

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2002, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2002, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -94,45 +94,13 @@ void PSPromotionManager::post_scavenge() {
print_stats(); print_stats();
#endif // PS_PM_STATS #endif // PS_PM_STATS
for(uint i=0; i<ParallelGCThreads+1; i++) { for (uint i = 0; i < ParallelGCThreads + 1; i++) {
PSPromotionManager* manager = manager_array(i); PSPromotionManager* manager = manager_array(i);
if (UseDepthFirstScavengeOrder) {
// the guarantees are a bit gratuitous but, if one fires, we'll assert(manager->claimed_stack_depth()->is_empty(), "should be empty");
// have a better idea of what went wrong
if (i < ParallelGCThreads) {
guarantee((!UseDepthFirstScavengeOrder ||
manager->overflow_stack_depth()->length() <= 0),
"promotion manager overflow stack must be empty");
guarantee((UseDepthFirstScavengeOrder ||
manager->overflow_stack_breadth()->length() <= 0),
"promotion manager overflow stack must be empty");
guarantee((!UseDepthFirstScavengeOrder ||
manager->claimed_stack_depth()->size() <= 0),
"promotion manager claimed stack must be empty");
guarantee((UseDepthFirstScavengeOrder ||
manager->claimed_stack_breadth()->size() <= 0),
"promotion manager claimed stack must be empty");
} else { } else {
guarantee((!UseDepthFirstScavengeOrder || assert(manager->claimed_stack_breadth()->is_empty(), "should be empty");
manager->overflow_stack_depth()->length() <= 0),
"VM Thread promotion manager overflow stack "
"must be empty");
guarantee((UseDepthFirstScavengeOrder ||
manager->overflow_stack_breadth()->length() <= 0),
"VM Thread promotion manager overflow stack "
"must be empty");
guarantee((!UseDepthFirstScavengeOrder ||
manager->claimed_stack_depth()->size() <= 0),
"VM Thread promotion manager claimed stack "
"must be empty");
guarantee((UseDepthFirstScavengeOrder ||
manager->claimed_stack_breadth()->size() <= 0),
"VM Thread promotion manager claimed stack "
"must be empty");
} }
manager->flush_labs(); manager->flush_labs();
} }
} }
@ -181,15 +149,9 @@ PSPromotionManager::PSPromotionManager() {
if (depth_first()) { if (depth_first()) {
claimed_stack_depth()->initialize(); claimed_stack_depth()->initialize();
queue_size = claimed_stack_depth()->max_elems(); queue_size = claimed_stack_depth()->max_elems();
// We want the overflow stack to be permanent
_overflow_stack_depth = new (ResourceObj::C_HEAP) GrowableArray<StarTask>(10, true);
_overflow_stack_breadth = NULL;
} else { } else {
claimed_stack_breadth()->initialize(); claimed_stack_breadth()->initialize();
queue_size = claimed_stack_breadth()->max_elems(); queue_size = claimed_stack_breadth()->max_elems();
// We want the overflow stack to be permanent
_overflow_stack_breadth = new (ResourceObj::C_HEAP) GrowableArray<oop>(10, true);
_overflow_stack_depth = NULL;
} }
_totally_drain = (ParallelGCThreads == 1) || (GCDrainStackTargetSize == 0); _totally_drain = (ParallelGCThreads == 1) || (GCDrainStackTargetSize == 0);
@ -209,8 +171,7 @@ PSPromotionManager::PSPromotionManager() {
} }
void PSPromotionManager::reset() { void PSPromotionManager::reset() {
assert(claimed_stack_empty(), "reset of non-empty claimed stack"); assert(stacks_empty(), "reset of non-empty stack");
assert(overflow_stack_empty(), "reset of non-empty overflow stack");
// We need to get an assert in here to make sure the labs are always flushed. // We need to get an assert in here to make sure the labs are always flushed.
@ -243,7 +204,7 @@ void PSPromotionManager::reset() {
void PSPromotionManager::drain_stacks_depth(bool totally_drain) { void PSPromotionManager::drain_stacks_depth(bool totally_drain) {
assert(depth_first(), "invariant"); assert(depth_first(), "invariant");
assert(overflow_stack_depth() != NULL, "invariant"); assert(claimed_stack_depth()->overflow_stack() != NULL, "invariant");
totally_drain = totally_drain || _totally_drain; totally_drain = totally_drain || _totally_drain;
#ifdef ASSERT #ifdef ASSERT
@ -254,41 +215,35 @@ void PSPromotionManager::drain_stacks_depth(bool totally_drain) {
MutableSpace* perm_space = heap->perm_gen()->object_space(); MutableSpace* perm_space = heap->perm_gen()->object_space();
#endif /* ASSERT */ #endif /* ASSERT */
OopStarTaskQueue* const tq = claimed_stack_depth();
do { do {
StarTask p; StarTask p;
// Drain overflow stack first, so other threads can steal from // Drain overflow stack first, so other threads can steal from
// claimed stack while we work. // claimed stack while we work.
while(!overflow_stack_depth()->is_empty()) { while (tq->pop_overflow(p)) {
// linux compiler wants different overloaded operator= in taskqueue to process_popped_location_depth(p);
// assign to p that the other compilers don't like.
StarTask ptr = overflow_stack_depth()->pop();
process_popped_location_depth(ptr);
} }
if (totally_drain) { if (totally_drain) {
while (claimed_stack_depth()->pop_local(p)) { while (tq->pop_local(p)) {
process_popped_location_depth(p); process_popped_location_depth(p);
} }
} else { } else {
while (claimed_stack_depth()->size() > _target_stack_size && while (tq->size() > _target_stack_size && tq->pop_local(p)) {
claimed_stack_depth()->pop_local(p)) {
process_popped_location_depth(p); process_popped_location_depth(p);
} }
} }
} while( (totally_drain && claimed_stack_depth()->size() > 0) || } while (totally_drain && !tq->taskqueue_empty() || !tq->overflow_empty());
(overflow_stack_depth()->length() > 0) );
assert(!totally_drain || claimed_stack_empty(), "Sanity"); assert(!totally_drain || tq->taskqueue_empty(), "Sanity");
assert(totally_drain || assert(totally_drain || tq->size() <= _target_stack_size, "Sanity");
claimed_stack_depth()->size() <= _target_stack_size, assert(tq->overflow_empty(), "Sanity");
"Sanity");
assert(overflow_stack_empty(), "Sanity");
} }
void PSPromotionManager::drain_stacks_breadth(bool totally_drain) { void PSPromotionManager::drain_stacks_breadth(bool totally_drain) {
assert(!depth_first(), "invariant"); assert(!depth_first(), "invariant");
assert(overflow_stack_breadth() != NULL, "invariant"); assert(claimed_stack_breadth()->overflow_stack() != NULL, "invariant");
totally_drain = totally_drain || _totally_drain; totally_drain = totally_drain || _totally_drain;
#ifdef ASSERT #ifdef ASSERT
@ -299,51 +254,39 @@ void PSPromotionManager::drain_stacks_breadth(bool totally_drain) {
MutableSpace* perm_space = heap->perm_gen()->object_space(); MutableSpace* perm_space = heap->perm_gen()->object_space();
#endif /* ASSERT */ #endif /* ASSERT */
OverflowTaskQueue<oop>* const tq = claimed_stack_breadth();
do { do {
oop obj; oop obj;
// Drain overflow stack first, so other threads can steal from // Drain overflow stack first, so other threads can steal from
// claimed stack while we work. // claimed stack while we work.
while(!overflow_stack_breadth()->is_empty()) { while (tq->pop_overflow(obj)) {
obj = overflow_stack_breadth()->pop();
obj->copy_contents(this); obj->copy_contents(this);
} }
if (totally_drain) { if (totally_drain) {
// obj is a reference!!! while (tq->pop_local(obj)) {
while (claimed_stack_breadth()->pop_local(obj)) {
// It would be nice to assert about the type of objects we might
// pop, but they can come from anywhere, unfortunately.
obj->copy_contents(this); obj->copy_contents(this);
} }
} else { } else {
// obj is a reference!!! while (tq->size() > _target_stack_size && tq->pop_local(obj)) {
while (claimed_stack_breadth()->size() > _target_stack_size &&
claimed_stack_breadth()->pop_local(obj)) {
// It would be nice to assert about the type of objects we might
// pop, but they can come from anywhere, unfortunately.
obj->copy_contents(this); obj->copy_contents(this);
} }
} }
// If we could not find any other work, flush the prefetch queue // If we could not find any other work, flush the prefetch queue
if (claimed_stack_breadth()->size() == 0 && if (tq->is_empty()) {
(overflow_stack_breadth()->length() == 0)) {
flush_prefetch_queue(); flush_prefetch_queue();
} }
} while((totally_drain && claimed_stack_breadth()->size() > 0) || } while (totally_drain && !tq->taskqueue_empty() || !tq->overflow_empty());
(overflow_stack_breadth()->length() > 0));
assert(!totally_drain || claimed_stack_empty(), "Sanity"); assert(!totally_drain || tq->taskqueue_empty(), "Sanity");
assert(totally_drain || assert(totally_drain || tq->size() <= _target_stack_size, "Sanity");
claimed_stack_breadth()->size() <= _target_stack_size, assert(tq->overflow_empty(), "Sanity");
"Sanity");
assert(overflow_stack_empty(), "Sanity");
} }
void PSPromotionManager::flush_labs() { void PSPromotionManager::flush_labs() {
assert(claimed_stack_empty(), "Attempt to flush lab with live stack"); assert(stacks_empty(), "Attempt to flush lab with live stack");
assert(overflow_stack_empty(), "Attempt to flush lab with live overflow stack");
// If either promotion lab fills up, we can flush the // If either promotion lab fills up, we can flush the
// lab but not refill it, so check first. // lab but not refill it, so check first.

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2002, 2008, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2002, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -78,9 +78,7 @@ class PSPromotionManager : public CHeapObj {
PrefetchQueue _prefetch_queue; PrefetchQueue _prefetch_queue;
OopStarTaskQueue _claimed_stack_depth; OopStarTaskQueue _claimed_stack_depth;
GrowableArray<StarTask>* _overflow_stack_depth; OverflowTaskQueue<oop> _claimed_stack_breadth;
OopTaskQueue _claimed_stack_breadth;
GrowableArray<oop>* _overflow_stack_breadth;
bool _depth_first; bool _depth_first;
bool _totally_drain; bool _totally_drain;
@ -97,9 +95,6 @@ class PSPromotionManager : public CHeapObj {
template <class T> inline void claim_or_forward_internal_depth(T* p); template <class T> inline void claim_or_forward_internal_depth(T* p);
template <class T> inline void claim_or_forward_internal_breadth(T* p); template <class T> inline void claim_or_forward_internal_breadth(T* p);
GrowableArray<StarTask>* overflow_stack_depth() { return _overflow_stack_depth; }
GrowableArray<oop>* overflow_stack_breadth() { return _overflow_stack_breadth; }
// On the task queues we push reference locations as well as // On the task queues we push reference locations as well as
// partially-scanned arrays (in the latter case, we push an oop to // partially-scanned arrays (in the latter case, we push an oop to
// the from-space image of the array and the length on the // the from-space image of the array and the length on the
@ -151,18 +146,19 @@ class PSPromotionManager : public CHeapObj {
#if PS_PM_STATS #if PS_PM_STATS
++_total_pushes; ++_total_pushes;
int stack_length = claimed_stack_depth()->overflow_stack()->length();
#endif // PS_PM_STATS #endif // PS_PM_STATS
if (!claimed_stack_depth()->push(p)) { claimed_stack_depth()->push(p);
overflow_stack_depth()->push(p);
#if PS_PM_STATS #if PS_PM_STATS
if (claimed_stack_depth()->overflow_stack()->length() != stack_length) {
++_overflow_pushes; ++_overflow_pushes;
uint stack_length = (uint) overflow_stack_depth()->length(); if ((uint)stack_length + 1 > _max_overflow_length) {
if (stack_length > _max_overflow_length) { _max_overflow_length = (uint)stack_length + 1;
_max_overflow_length = stack_length;
} }
#endif // PS_PM_STATS
} }
#endif // PS_PM_STATS
} }
void push_breadth(oop o) { void push_breadth(oop o) {
@ -170,18 +166,19 @@ class PSPromotionManager : public CHeapObj {
#if PS_PM_STATS #if PS_PM_STATS
++_total_pushes; ++_total_pushes;
int stack_length = claimed_stack_breadth()->overflow_stack()->length();
#endif // PS_PM_STATS #endif // PS_PM_STATS
if(!claimed_stack_breadth()->push(o)) { claimed_stack_breadth()->push(o);
overflow_stack_breadth()->push(o);
#if PS_PM_STATS #if PS_PM_STATS
if (claimed_stack_breadth()->overflow_stack()->length() != stack_length) {
++_overflow_pushes; ++_overflow_pushes;
uint stack_length = (uint) overflow_stack_breadth()->length(); if ((uint)stack_length + 1 > _max_overflow_length) {
if (stack_length > _max_overflow_length) { _max_overflow_length = (uint)stack_length + 1;
_max_overflow_length = stack_length;
} }
#endif // PS_PM_STATS
} }
#endif // PS_PM_STATS
} }
protected: protected:
@ -199,12 +196,10 @@ class PSPromotionManager : public CHeapObj {
static PSPromotionManager* vm_thread_promotion_manager(); static PSPromotionManager* vm_thread_promotion_manager();
static bool steal_depth(int queue_num, int* seed, StarTask& t) { static bool steal_depth(int queue_num, int* seed, StarTask& t) {
assert(stack_array_depth() != NULL, "invariant");
return stack_array_depth()->steal(queue_num, seed, t); return stack_array_depth()->steal(queue_num, seed, t);
} }
static bool steal_breadth(int queue_num, int* seed, Task& t) { static bool steal_breadth(int queue_num, int* seed, oop& t) {
assert(stack_array_breadth() != NULL, "invariant");
return stack_array_breadth()->steal(queue_num, seed, t); return stack_array_breadth()->steal(queue_num, seed, t);
} }
@ -214,7 +209,7 @@ class PSPromotionManager : public CHeapObj {
OopStarTaskQueue* claimed_stack_depth() { OopStarTaskQueue* claimed_stack_depth() {
return &_claimed_stack_depth; return &_claimed_stack_depth;
} }
OopTaskQueue* claimed_stack_breadth() { OverflowTaskQueue<oop>* claimed_stack_breadth() {
return &_claimed_stack_breadth; return &_claimed_stack_breadth;
} }
@ -246,25 +241,13 @@ class PSPromotionManager : public CHeapObj {
void drain_stacks_depth(bool totally_drain); void drain_stacks_depth(bool totally_drain);
void drain_stacks_breadth(bool totally_drain); void drain_stacks_breadth(bool totally_drain);
bool claimed_stack_empty() { bool depth_first() const {
if (depth_first()) { return _depth_first;
return claimed_stack_depth()->size() <= 0;
} else {
return claimed_stack_breadth()->size() <= 0;
}
}
bool overflow_stack_empty() {
if (depth_first()) {
return overflow_stack_depth()->length() <= 0;
} else {
return overflow_stack_breadth()->length() <= 0;
}
} }
bool stacks_empty() { bool stacks_empty() {
return claimed_stack_empty() && overflow_stack_empty(); return depth_first() ?
} claimed_stack_depth()->is_empty() :
bool depth_first() { claimed_stack_breadth()->is_empty();
return _depth_first;
} }
inline void process_popped_location_depth(StarTask p); inline void process_popped_location_depth(StarTask p);

@ -414,7 +414,6 @@ bool PSScavenge::invoke_no_policy() {
} }
// Finally, flush the promotion_manager's labs, and deallocate its stacks. // Finally, flush the promotion_manager's labs, and deallocate its stacks.
assert(promotion_manager->claimed_stack_empty(), "Sanity");
PSPromotionManager::post_scavenge(); PSPromotionManager::post_scavenge();
promotion_failure_occurred = promotion_failed(); promotion_failure_occurred = promotion_failed();

@ -89,6 +89,21 @@ adlcVMDeps.hpp allocation.hpp
allocation.hpp c2_globals.hpp allocation.hpp c2_globals.hpp
bcEscapeAnalyzer.cpp bcEscapeAnalyzer.hpp
bcEscapeAnalyzer.cpp bitMap.inline.hpp
bcEscapeAnalyzer.cpp bytecode.hpp
bcEscapeAnalyzer.cpp ciConstant.hpp
bcEscapeAnalyzer.cpp ciField.hpp
bcEscapeAnalyzer.cpp ciMethodBlocks.hpp
bcEscapeAnalyzer.cpp ciStreams.hpp
bcEscapeAnalyzer.hpp allocation.hpp
bcEscapeAnalyzer.hpp ciMethod.hpp
bcEscapeAnalyzer.hpp ciMethodData.hpp
bcEscapeAnalyzer.hpp dependencies.hpp
bcEscapeAnalyzer.hpp growableArray.hpp
bcEscapeAnalyzer.hpp vectset.hpp
block.cpp allocation.inline.hpp block.cpp allocation.inline.hpp
block.cpp block.hpp block.cpp block.hpp
block.cpp cfgnode.hpp block.cpp cfgnode.hpp
@ -239,6 +254,7 @@ chaitin_<os_family>.cpp machnode.hpp
ciEnv.cpp compileLog.hpp ciEnv.cpp compileLog.hpp
ciEnv.cpp runtime.hpp ciEnv.cpp runtime.hpp
ciMethod.cpp bcEscapeAnalyzer.hpp
ciMethod.cpp ciTypeFlow.hpp ciMethod.cpp ciTypeFlow.hpp
ciMethod.cpp methodOop.hpp ciMethod.cpp methodOop.hpp

@ -301,20 +301,6 @@ barrierSet.hpp oopsHierarchy.hpp
barrierSet.inline.hpp barrierSet.hpp barrierSet.inline.hpp barrierSet.hpp
barrierSet.inline.hpp cardTableModRefBS.hpp barrierSet.inline.hpp cardTableModRefBS.hpp
bcEscapeAnalyzer.cpp bcEscapeAnalyzer.hpp
bcEscapeAnalyzer.cpp bitMap.inline.hpp
bcEscapeAnalyzer.cpp bytecode.hpp
bcEscapeAnalyzer.cpp ciConstant.hpp
bcEscapeAnalyzer.cpp ciField.hpp
bcEscapeAnalyzer.cpp ciMethodBlocks.hpp
bcEscapeAnalyzer.cpp ciStreams.hpp
bcEscapeAnalyzer.hpp allocation.hpp
bcEscapeAnalyzer.hpp ciMethod.hpp
bcEscapeAnalyzer.hpp ciMethodData.hpp
bcEscapeAnalyzer.hpp dependencies.hpp
bcEscapeAnalyzer.hpp growableArray.hpp
biasedLocking.cpp biasedLocking.hpp biasedLocking.cpp biasedLocking.hpp
biasedLocking.cpp klass.inline.hpp biasedLocking.cpp klass.inline.hpp
biasedLocking.cpp markOop.hpp biasedLocking.cpp markOop.hpp
@ -545,6 +531,7 @@ ciCPCache.cpp ciUtilities.hpp
ciCPCache.hpp ciClassList.hpp ciCPCache.hpp ciClassList.hpp
ciCPCache.hpp ciObject.hpp ciCPCache.hpp ciObject.hpp
ciCPCache.hpp cpCacheOop.hpp
ciEnv.cpp allocation.inline.hpp ciEnv.cpp allocation.inline.hpp
ciEnv.cpp ciConstant.hpp ciEnv.cpp ciConstant.hpp
@ -664,7 +651,6 @@ ciKlassKlass.hpp ciSymbol.hpp
ciMethod.cpp abstractCompiler.hpp ciMethod.cpp abstractCompiler.hpp
ciMethod.cpp allocation.inline.hpp ciMethod.cpp allocation.inline.hpp
ciMethod.cpp bcEscapeAnalyzer.hpp
ciMethod.cpp bitMap.inline.hpp ciMethod.cpp bitMap.inline.hpp
ciMethod.cpp ciCallProfile.hpp ciMethod.cpp ciCallProfile.hpp
ciMethod.cpp ciExceptionHandler.hpp ciMethod.cpp ciExceptionHandler.hpp
@ -823,6 +809,7 @@ ciSignature.hpp growableArray.hpp
ciStreams.cpp ciCallSite.hpp ciStreams.cpp ciCallSite.hpp
ciStreams.cpp ciConstant.hpp ciStreams.cpp ciConstant.hpp
ciStreams.cpp ciCPCache.hpp
ciStreams.cpp ciField.hpp ciStreams.cpp ciField.hpp
ciStreams.cpp ciStreams.hpp ciStreams.cpp ciStreams.hpp
ciStreams.cpp ciUtilities.hpp ciStreams.cpp ciUtilities.hpp
@ -962,7 +949,6 @@ classLoader.cpp threadService.hpp
classLoader.cpp timer.hpp classLoader.cpp timer.hpp
classLoader.cpp universe.inline.hpp classLoader.cpp universe.inline.hpp
classLoader.cpp vmSymbols.hpp classLoader.cpp vmSymbols.hpp
classLoader.cpp vtune.hpp
classLoader.hpp classFileParser.hpp classLoader.hpp classFileParser.hpp
classLoader.hpp perfData.hpp classLoader.hpp perfData.hpp
@ -1002,7 +988,6 @@ codeBlob.cpp relocInfo.hpp
codeBlob.cpp safepoint.hpp codeBlob.cpp safepoint.hpp
codeBlob.cpp sharedRuntime.hpp codeBlob.cpp sharedRuntime.hpp
codeBlob.cpp vframe.hpp codeBlob.cpp vframe.hpp
codeBlob.cpp vtune.hpp
codeBlob.hpp codeBuffer.hpp codeBlob.hpp codeBuffer.hpp
codeBlob.hpp frame.hpp codeBlob.hpp frame.hpp
@ -2165,7 +2150,6 @@ interpreter.cpp sharedRuntime.hpp
interpreter.cpp stubRoutines.hpp interpreter.cpp stubRoutines.hpp
interpreter.cpp templateTable.hpp interpreter.cpp templateTable.hpp
interpreter.cpp timer.hpp interpreter.cpp timer.hpp
interpreter.cpp vtune.hpp
interpreter.hpp cppInterpreter.hpp interpreter.hpp cppInterpreter.hpp
interpreter.hpp stubs.hpp interpreter.hpp stubs.hpp
@ -2321,7 +2305,6 @@ java.cpp universe.hpp
java.cpp vmError.hpp java.cpp vmError.hpp
java.cpp vm_operations.hpp java.cpp vm_operations.hpp
java.cpp vm_version_<arch>.hpp java.cpp vm_version_<arch>.hpp
java.cpp vtune.hpp
java.hpp os.hpp java.hpp os.hpp
@ -3048,7 +3031,6 @@ nmethod.cpp nmethod.hpp
nmethod.cpp scopeDesc.hpp nmethod.cpp scopeDesc.hpp
nmethod.cpp sharedRuntime.hpp nmethod.cpp sharedRuntime.hpp
nmethod.cpp sweeper.hpp nmethod.cpp sweeper.hpp
nmethod.cpp vtune.hpp
nmethod.cpp xmlstream.hpp nmethod.cpp xmlstream.hpp
nmethod.hpp codeBlob.hpp nmethod.hpp codeBlob.hpp
@ -3771,7 +3753,6 @@ sharedRuntime.cpp vframeArray.hpp
sharedRuntime.cpp vmSymbols.hpp sharedRuntime.cpp vmSymbols.hpp
sharedRuntime.cpp vmreg_<arch>.inline.hpp sharedRuntime.cpp vmreg_<arch>.inline.hpp
sharedRuntime.cpp vtableStubs.hpp sharedRuntime.cpp vtableStubs.hpp
sharedRuntime.cpp vtune.hpp
sharedRuntime.cpp xmlstream.hpp sharedRuntime.cpp xmlstream.hpp
sharedRuntime.hpp allocation.hpp sharedRuntime.hpp allocation.hpp
@ -3935,7 +3916,6 @@ stubCodeGenerator.cpp disassembler.hpp
stubCodeGenerator.cpp forte.hpp stubCodeGenerator.cpp forte.hpp
stubCodeGenerator.cpp oop.inline.hpp stubCodeGenerator.cpp oop.inline.hpp
stubCodeGenerator.cpp stubCodeGenerator.hpp stubCodeGenerator.cpp stubCodeGenerator.hpp
stubCodeGenerator.cpp vtune.hpp
stubCodeGenerator.hpp allocation.hpp stubCodeGenerator.hpp allocation.hpp
stubCodeGenerator.hpp assembler.hpp stubCodeGenerator.hpp assembler.hpp
@ -4456,7 +4436,6 @@ universe.cpp universe.hpp
universe.cpp universe.inline.hpp universe.cpp universe.inline.hpp
universe.cpp vmSymbols.hpp universe.cpp vmSymbols.hpp
universe.cpp vm_operations.hpp universe.cpp vm_operations.hpp
universe.cpp vtune.hpp
universe.hpp growableArray.hpp universe.hpp growableArray.hpp
universe.hpp handles.hpp universe.hpp handles.hpp
@ -4719,7 +4698,6 @@ vtableStubs.cpp mutexLocker.hpp
vtableStubs.cpp resourceArea.hpp vtableStubs.cpp resourceArea.hpp
vtableStubs.cpp sharedRuntime.hpp vtableStubs.cpp sharedRuntime.hpp
vtableStubs.cpp vtableStubs.hpp vtableStubs.cpp vtableStubs.hpp
vtableStubs.cpp vtune.hpp
vtableStubs.hpp allocation.hpp vtableStubs.hpp allocation.hpp
@ -4733,11 +4711,6 @@ vtableStubs_<arch_model>.cpp sharedRuntime.hpp
vtableStubs_<arch_model>.cpp vmreg_<arch>.inline.hpp vtableStubs_<arch_model>.cpp vmreg_<arch>.inline.hpp
vtableStubs_<arch_model>.cpp vtableStubs.hpp vtableStubs_<arch_model>.cpp vtableStubs.hpp
vtune.hpp allocation.hpp
vtune_<os_family>.cpp interpreter.hpp
vtune_<os_family>.cpp vtune.hpp
watermark.hpp allocation.hpp watermark.hpp allocation.hpp
watermark.hpp globalDefinitions.hpp watermark.hpp globalDefinitions.hpp

@ -136,25 +136,24 @@ int Bytecode_tableswitch::dest_offset_at(int i) const {
// Implementation of Bytecode_invoke // Implementation of Bytecode_invoke
void Bytecode_invoke::verify() const { void Bytecode_invoke::verify() const {
Bytecodes::Code bc = adjusted_invoke_code();
assert(is_valid(), "check invoke"); assert(is_valid(), "check invoke");
assert(method()->constants()->cache() != NULL, "do not call this from verifier or rewriter"); assert(method()->constants()->cache() != NULL, "do not call this from verifier or rewriter");
} }
symbolOop Bytecode_invoke::signature() const { symbolOop Bytecode_member_ref::signature() const {
constantPoolOop constants = method()->constants(); constantPoolOop constants = method()->constants();
return constants->signature_ref_at(index()); return constants->signature_ref_at(index());
} }
symbolOop Bytecode_invoke::name() const { symbolOop Bytecode_member_ref::name() const {
constantPoolOop constants = method()->constants(); constantPoolOop constants = method()->constants();
return constants->name_ref_at(index()); return constants->name_ref_at(index());
} }
BasicType Bytecode_invoke::result_type(Thread *thread) const { BasicType Bytecode_member_ref::result_type(Thread *thread) const {
symbolHandle sh(thread, signature()); symbolHandle sh(thread, signature());
ResultTypeFinder rts(sh); ResultTypeFinder rts(sh);
rts.iterate(); rts.iterate();
@ -167,9 +166,9 @@ methodHandle Bytecode_invoke::static_target(TRAPS) {
KlassHandle resolved_klass; KlassHandle resolved_klass;
constantPoolHandle constants(THREAD, _method->constants()); constantPoolHandle constants(THREAD, _method->constants());
if (adjusted_invoke_code() == Bytecodes::_invokedynamic) { if (java_code() == Bytecodes::_invokedynamic) {
LinkResolver::resolve_dynamic_method(m, resolved_klass, constants, index(), CHECK_(methodHandle())); LinkResolver::resolve_dynamic_method(m, resolved_klass, constants, index(), CHECK_(methodHandle()));
} else if (adjusted_invoke_code() != Bytecodes::_invokeinterface) { } else if (java_code() != Bytecodes::_invokeinterface) {
LinkResolver::resolve_method(m, resolved_klass, constants, index(), CHECK_(methodHandle())); LinkResolver::resolve_method(m, resolved_klass, constants, index(), CHECK_(methodHandle()));
} else { } else {
LinkResolver::resolve_interface_method(m, resolved_klass, constants, index(), CHECK_(methodHandle())); LinkResolver::resolve_interface_method(m, resolved_klass, constants, index(), CHECK_(methodHandle()));
@ -178,51 +177,68 @@ methodHandle Bytecode_invoke::static_target(TRAPS) {
} }
int Bytecode_invoke::index() const { int Bytecode_member_ref::index() const {
// Note: Rewriter::rewrite changes the Java_u2 of an invokedynamic to a native_u4, // Note: Rewriter::rewrite changes the Java_u2 of an invokedynamic to a native_u4,
// at the same time it allocates per-call-site CP cache entries. // at the same time it allocates per-call-site CP cache entries.
Bytecodes::Code stdc = Bytecodes::java_code(code()); Bytecodes::Code rawc = code();
Bytecode* invoke = Bytecode_at(bcp()); Bytecode* invoke = bytecode();
if (invoke->has_index_u4(stdc)) if (invoke->has_index_u4(rawc))
return invoke->get_index_u4(stdc); return invoke->get_index_u4(rawc);
else else
return invoke->get_index_u2_cpcache(stdc); return invoke->get_index_u2_cpcache(rawc);
} }
int Bytecode_member_ref::pool_index() const {
int index = this->index();
DEBUG_ONLY({
if (!bytecode()->has_index_u4(code()))
index -= constantPoolOopDesc::CPCACHE_INDEX_TAG;
});
return _method->constants()->cache()->entry_at(index)->constant_pool_index();
}
// Implementation of Bytecode_field // Implementation of Bytecode_field
void Bytecode_field::verify() const { void Bytecode_field::verify() const {
Bytecodes::Code stdc = Bytecodes::java_code(code()); assert(is_valid(), "check field");
assert(stdc == Bytecodes::_putstatic || stdc == Bytecodes::_getstatic ||
stdc == Bytecodes::_putfield || stdc == Bytecodes::_getfield, "check field");
} }
bool Bytecode_field::is_static() const { // Implementation of Bytecode_loadconstant
Bytecodes::Code stdc = Bytecodes::java_code(code());
return stdc == Bytecodes::_putstatic || stdc == Bytecodes::_getstatic; int Bytecode_loadconstant::raw_index() const {
Bytecode* bcp = bytecode();
Bytecodes::Code rawc = bcp->code();
assert(rawc != Bytecodes::_wide, "verifier prevents this");
if (Bytecodes::java_code(rawc) == Bytecodes::_ldc)
return bcp->get_index_u1(rawc);
else
return bcp->get_index_u2(rawc, false);
} }
int Bytecode_loadconstant::pool_index() const {
int Bytecode_field::index() const { int index = raw_index();
Bytecode* invoke = Bytecode_at(bcp()); if (has_cache_index()) {
return invoke->get_index_u2_cpcache(Bytecodes::_getfield); return _method->constants()->cache()->entry_at(index)->constant_pool_index();
} }
return index;
}
// Implementation of Bytecodes loac constant
BasicType Bytecode_loadconstant::result_type() const {
int Bytecode_loadconstant::index() const { int index = pool_index();
Bytecodes::Code stdc = Bytecodes::java_code(code()); constantTag tag = _method->constants()->tag_at(index);
if (stdc != Bytecodes::_wide) { return tag.basic_type();
if (Bytecodes::java_code(stdc) == Bytecodes::_ldc) }
return get_index_u1(stdc);
else oop Bytecode_loadconstant::resolve_constant(TRAPS) const {
return get_index_u2(stdc, false); assert(_method.not_null(), "must supply method to resolve constant");
int index = raw_index();
constantPoolOop constants = _method->constants();
if (has_cache_index()) {
return constants->resolve_cached_constant_at(index, THREAD);
} else {
return constants->resolve_constant_at(index, THREAD);
} }
stdc = Bytecodes::code_at(addr_at(1));
return get_index_u2(stdc, true);
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------

@ -76,9 +76,13 @@ class Bytecode: public ThisRelativeObj {
return Bytes::get_native_u2(p); return Bytes::get_native_u2(p);
else return Bytes::get_Java_u2(p); else return Bytes::get_Java_u2(p);
} }
int get_index_u1_cpcache(Bytecodes::Code bc) const {
assert_same_format_as(bc); assert_index_size(1, bc);
return *(jubyte*)addr_at(1) + constantPoolOopDesc::CPCACHE_INDEX_TAG;
}
int get_index_u2_cpcache(Bytecodes::Code bc) const { int get_index_u2_cpcache(Bytecodes::Code bc) const {
assert_same_format_as(bc); assert_index_size(2, bc); assert_native_index(bc); assert_same_format_as(bc); assert_index_size(2, bc); assert_native_index(bc);
return Bytes::get_native_u2(addr_at(1)) DEBUG_ONLY(+ constantPoolOopDesc::CPCACHE_INDEX_TAG); return Bytes::get_native_u2(addr_at(1)) + constantPoolOopDesc::CPCACHE_INDEX_TAG;
} }
int get_index_u4(Bytecodes::Code bc) const { int get_index_u4(Bytecodes::Code bc) const {
assert_same_format_as(bc); assert_index_size(4, bc); assert_same_format_as(bc); assert_index_size(4, bc);
@ -152,7 +156,7 @@ class Bytecode_lookupswitch: public Bytecode {
inline Bytecode_lookupswitch* Bytecode_lookupswitch_at(address bcp) { inline Bytecode_lookupswitch* Bytecode_lookupswitch_at(address bcp) {
Bytecode_lookupswitch* b = (Bytecode_lookupswitch*)bcp; Bytecode_lookupswitch* b = (Bytecode_lookupswitch*)bcp;
debug_only(b->verify()); DEBUG_ONLY(b->verify());
return b; return b;
} }
@ -174,44 +178,56 @@ class Bytecode_tableswitch: public Bytecode {
inline Bytecode_tableswitch* Bytecode_tableswitch_at(address bcp) { inline Bytecode_tableswitch* Bytecode_tableswitch_at(address bcp) {
Bytecode_tableswitch* b = (Bytecode_tableswitch*)bcp; Bytecode_tableswitch* b = (Bytecode_tableswitch*)bcp;
debug_only(b->verify()); DEBUG_ONLY(b->verify());
return b; return b;
} }
// Abstraction for invoke_{virtual, static, interface, special} // Common code for decoding invokes and field references.
class Bytecode_invoke: public ResourceObj { class Bytecode_member_ref: public ResourceObj {
protected: protected:
methodHandle _method; // method containing the bytecode methodHandle _method; // method containing the bytecode
int _bci; // position of the bytecode int _bci; // position of the bytecode
Bytecode_invoke(methodHandle method, int bci) : _method(method), _bci(bci) {} Bytecode_member_ref(methodHandle method, int bci) : _method(method), _bci(bci) {}
public:
// Attributes
methodHandle method() const { return _method; }
int bci() const { return _bci; }
address bcp() const { return _method->bcp_from(bci()); }
Bytecode* bytecode() const { return Bytecode_at(bcp()); }
int index() const; // cache index (loaded from instruction)
int pool_index() const; // constant pool index
symbolOop name() const; // returns the name of the method or field
symbolOop signature() const; // returns the signature of the method or field
BasicType result_type(Thread* thread) const; // returns the result type of the getfield or invoke
Bytecodes::Code code() const { return Bytecodes::code_at(bcp(), _method()); }
Bytecodes::Code java_code() const { return Bytecodes::java_code(code()); }
};
// Abstraction for invoke_{virtual, static, interface, special}
class Bytecode_invoke: public Bytecode_member_ref {
protected:
Bytecode_invoke(methodHandle method, int bci) : Bytecode_member_ref(method, bci) {}
public: public:
void verify() const; void verify() const;
// Attributes // Attributes
methodHandle method() const { return _method; }
int bci() const { return _bci; }
address bcp() const { return _method->bcp_from(bci()); }
int index() const; // the constant pool index for the invoke
symbolOop name() const; // returns the name of the invoked method
symbolOop signature() const; // returns the signature of the invoked method
BasicType result_type(Thread *thread) const; // returns the result type of the invoke
Bytecodes::Code code() const { return Bytecodes::code_at(bcp(), _method()); }
Bytecodes::Code adjusted_invoke_code() const { return Bytecodes::java_code(code()); }
methodHandle static_target(TRAPS); // "specified" method (from constant pool) methodHandle static_target(TRAPS); // "specified" method (from constant pool)
// Testers // Testers
bool is_invokeinterface() const { return adjusted_invoke_code() == Bytecodes::_invokeinterface; } bool is_invokeinterface() const { return java_code() == Bytecodes::_invokeinterface; }
bool is_invokevirtual() const { return adjusted_invoke_code() == Bytecodes::_invokevirtual; } bool is_invokevirtual() const { return java_code() == Bytecodes::_invokevirtual; }
bool is_invokestatic() const { return adjusted_invoke_code() == Bytecodes::_invokestatic; } bool is_invokestatic() const { return java_code() == Bytecodes::_invokestatic; }
bool is_invokespecial() const { return adjusted_invoke_code() == Bytecodes::_invokespecial; } bool is_invokespecial() const { return java_code() == Bytecodes::_invokespecial; }
bool is_invokedynamic() const { return adjusted_invoke_code() == Bytecodes::_invokedynamic; } bool is_invokedynamic() const { return java_code() == Bytecodes::_invokedynamic; }
bool has_receiver() const { return !is_invokestatic() && !is_invokedynamic(); } bool has_receiver() const { return !is_invokestatic() && !is_invokedynamic(); }
@ -230,7 +246,7 @@ class Bytecode_invoke: public ResourceObj {
inline Bytecode_invoke* Bytecode_invoke_at(methodHandle method, int bci) { inline Bytecode_invoke* Bytecode_invoke_at(methodHandle method, int bci) {
Bytecode_invoke* b = new Bytecode_invoke(method, bci); Bytecode_invoke* b = new Bytecode_invoke(method, bci);
debug_only(b->verify()); DEBUG_ONLY(b->verify());
return b; return b;
} }
@ -240,21 +256,34 @@ inline Bytecode_invoke* Bytecode_invoke_at_check(methodHandle method, int bci) {
} }
// Abstraction for all field accesses (put/get field/static_ // Abstraction for all field accesses (put/get field/static)
class Bytecode_field: public Bytecode { class Bytecode_field: public Bytecode_member_ref {
public: protected:
Bytecode_field(methodHandle method, int bci) : Bytecode_member_ref(method, bci) {}
public:
// Testers
bool is_getfield() const { return java_code() == Bytecodes::_getfield; }
bool is_putfield() const { return java_code() == Bytecodes::_putfield; }
bool is_getstatic() const { return java_code() == Bytecodes::_getstatic; }
bool is_putstatic() const { return java_code() == Bytecodes::_putstatic; }
bool is_getter() const { return is_getfield() || is_getstatic(); }
bool is_static() const { return is_getstatic() || is_putstatic(); }
bool is_valid() const { return is_getfield() ||
is_putfield() ||
is_getstatic() ||
is_putstatic(); }
void verify() const; void verify() const;
int index() const;
bool is_static() const;
// Creation // Creation
inline friend Bytecode_field* Bytecode_field_at(const methodOop method, address bcp); inline friend Bytecode_field* Bytecode_field_at(methodHandle method, int bci);
}; };
inline Bytecode_field* Bytecode_field_at(const methodOop method, address bcp) { inline Bytecode_field* Bytecode_field_at(methodHandle method, int bci) {
Bytecode_field* b = (Bytecode_field*)bcp; Bytecode_field* b = new Bytecode_field(method, bci);
debug_only(b->verify()); DEBUG_ONLY(b->verify());
return b; return b;
} }
@ -274,7 +303,7 @@ class Bytecode_checkcast: public Bytecode {
inline Bytecode_checkcast* Bytecode_checkcast_at(address bcp) { inline Bytecode_checkcast* Bytecode_checkcast_at(address bcp) {
Bytecode_checkcast* b = (Bytecode_checkcast*)bcp; Bytecode_checkcast* b = (Bytecode_checkcast*)bcp;
debug_only(b->verify()); DEBUG_ONLY(b->verify());
return b; return b;
} }
@ -294,7 +323,7 @@ class Bytecode_instanceof: public Bytecode {
inline Bytecode_instanceof* Bytecode_instanceof_at(address bcp) { inline Bytecode_instanceof* Bytecode_instanceof_at(address bcp) {
Bytecode_instanceof* b = (Bytecode_instanceof*)bcp; Bytecode_instanceof* b = (Bytecode_instanceof*)bcp;
debug_only(b->verify()); DEBUG_ONLY(b->verify());
return b; return b;
} }
@ -312,7 +341,7 @@ class Bytecode_new: public Bytecode {
inline Bytecode_new* Bytecode_new_at(address bcp) { inline Bytecode_new* Bytecode_new_at(address bcp) {
Bytecode_new* b = (Bytecode_new*)bcp; Bytecode_new* b = (Bytecode_new*)bcp;
debug_only(b->verify()); DEBUG_ONLY(b->verify());
return b; return b;
} }
@ -330,7 +359,7 @@ class Bytecode_multianewarray: public Bytecode {
inline Bytecode_multianewarray* Bytecode_multianewarray_at(address bcp) { inline Bytecode_multianewarray* Bytecode_multianewarray_at(address bcp) {
Bytecode_multianewarray* b = (Bytecode_multianewarray*)bcp; Bytecode_multianewarray* b = (Bytecode_multianewarray*)bcp;
debug_only(b->verify()); DEBUG_ONLY(b->verify());
return b; return b;
} }
@ -348,29 +377,57 @@ class Bytecode_anewarray: public Bytecode {
inline Bytecode_anewarray* Bytecode_anewarray_at(address bcp) { inline Bytecode_anewarray* Bytecode_anewarray_at(address bcp) {
Bytecode_anewarray* b = (Bytecode_anewarray*)bcp; Bytecode_anewarray* b = (Bytecode_anewarray*)bcp;
debug_only(b->verify()); DEBUG_ONLY(b->verify());
return b; return b;
} }
// Abstraction for ldc, ldc_w and ldc2_w // Abstraction for ldc, ldc_w and ldc2_w
class Bytecode_loadconstant: public Bytecode { class Bytecode_loadconstant: public ResourceObj {
private:
int _bci;
methodHandle _method;
Bytecodes::Code code() const { return bytecode()->code(); }
int raw_index() const;
Bytecode_loadconstant(methodHandle method, int bci) : _method(method), _bci(bci) {}
public: public:
// Attributes
methodHandle method() const { return _method; }
int bci() const { return _bci; }
address bcp() const { return _method->bcp_from(bci()); }
Bytecode* bytecode() const { return Bytecode_at(bcp()); }
void verify() const { void verify() const {
assert(_method.not_null(), "must supply method");
Bytecodes::Code stdc = Bytecodes::java_code(code()); Bytecodes::Code stdc = Bytecodes::java_code(code());
assert(stdc == Bytecodes::_ldc || assert(stdc == Bytecodes::_ldc ||
stdc == Bytecodes::_ldc_w || stdc == Bytecodes::_ldc_w ||
stdc == Bytecodes::_ldc2_w, "load constant"); stdc == Bytecodes::_ldc2_w, "load constant");
} }
int index() const; // Only non-standard bytecodes (fast_aldc) have CP cache indexes.
bool has_cache_index() const { return code() >= Bytecodes::number_of_java_codes; }
inline friend Bytecode_loadconstant* Bytecode_loadconstant_at(const methodOop method, address bcp); int pool_index() const; // index into constant pool
int cache_index() const { // index into CP cache (or -1 if none)
return has_cache_index() ? raw_index() : -1;
}
BasicType result_type() const; // returns the result type of the ldc
oop resolve_constant(TRAPS) const;
// Creation
inline friend Bytecode_loadconstant* Bytecode_loadconstant_at(methodHandle method, int bci);
}; };
inline Bytecode_loadconstant* Bytecode_loadconstant_at(const methodOop method, address bcp) { inline Bytecode_loadconstant* Bytecode_loadconstant_at(methodHandle method, int bci) {
Bytecode_loadconstant* b = (Bytecode_loadconstant*)bcp; Bytecode_loadconstant* b = new Bytecode_loadconstant(method, bci);
debug_only(b->verify()); DEBUG_ONLY(b->verify());
return b; return b;
} }

@ -49,6 +49,7 @@ class BytecodePrinter: public BytecodeClosure {
int get_index_u1() { return *(address)_next_pc++; } int get_index_u1() { return *(address)_next_pc++; }
int get_index_u2() { int i=Bytes::get_Java_u2(_next_pc); _next_pc+=2; return i; } int get_index_u2() { int i=Bytes::get_Java_u2(_next_pc); _next_pc+=2; return i; }
int get_index_u1_cpcache() { return get_index_u1() + constantPoolOopDesc::CPCACHE_INDEX_TAG; }
int get_index_u2_cpcache() { int i=Bytes::get_native_u2(_next_pc); _next_pc+=2; return i + constantPoolOopDesc::CPCACHE_INDEX_TAG; } int get_index_u2_cpcache() { int i=Bytes::get_native_u2(_next_pc); _next_pc+=2; return i + constantPoolOopDesc::CPCACHE_INDEX_TAG; }
int get_index_u4() { int i=Bytes::get_native_u4(_next_pc); _next_pc+=4; return i; } int get_index_u4() { int i=Bytes::get_native_u4(_next_pc); _next_pc+=4; return i; }
int get_index_special() { return (is_wide()) ? get_index_u2() : get_index_u1(); } int get_index_special() { return (is_wide()) ? get_index_u2() : get_index_u1(); }
@ -60,6 +61,7 @@ class BytecodePrinter: public BytecodeClosure {
bool check_index(int i, int& cp_index, outputStream* st = tty); bool check_index(int i, int& cp_index, outputStream* st = tty);
void print_constant(int i, outputStream* st = tty); void print_constant(int i, outputStream* st = tty);
void print_field_or_method(int i, outputStream* st = tty); void print_field_or_method(int i, outputStream* st = tty);
void print_field_or_method(int orig_i, int i, outputStream* st = tty);
void print_attributes(int bci, outputStream* st = tty); void print_attributes(int bci, outputStream* st = tty);
void bytecode_epilog(int bci, outputStream* st = tty); void bytecode_epilog(int bci, outputStream* st = tty);
@ -177,18 +179,29 @@ void BytecodeTracer::trace(methodHandle method, address bcp, outputStream* st) {
_closure->trace(method, bcp, st); _closure->trace(method, bcp, st);
} }
void print_symbol(symbolOop sym, outputStream* st) {
char buf[40];
int len = sym->utf8_length();
if (len >= (int)sizeof(buf)) {
st->print_cr(" %s...[%d]", sym->as_C_string(buf, sizeof(buf)), len);
} else {
st->print(" ");
sym->print_on(st); st->cr();
}
}
void print_oop(oop value, outputStream* st) { void print_oop(oop value, outputStream* st) {
if (value == NULL) { if (value == NULL) {
st->print_cr(" NULL"); st->print_cr(" NULL");
} else { } else if (java_lang_String::is_instance(value)) {
EXCEPTION_MARK; EXCEPTION_MARK;
Handle h_value (THREAD, value); Handle h_value (THREAD, value);
symbolHandle sym = java_lang_String::as_symbol(h_value, CATCH); symbolHandle sym = java_lang_String::as_symbol(h_value, CATCH);
if (sym->utf8_length() > 32) { print_symbol(sym(), st);
st->print_cr(" ...."); } else if (value->is_symbol()) {
} else { print_symbol(symbolOop(value), st);
sym->print_on(st); st->cr(); } else {
} st->print_cr(" " PTR_FORMAT, (intptr_t) value);
} }
} }
@ -279,16 +292,27 @@ void BytecodePrinter::print_constant(int i, outputStream* st) {
} else if (tag.is_double()) { } else if (tag.is_double()) {
st->print_cr(" %f", constants->double_at(i)); st->print_cr(" %f", constants->double_at(i));
} else if (tag.is_string()) { } else if (tag.is_string()) {
oop string = constants->resolved_string_at(i); oop string = constants->pseudo_string_at(i);
print_oop(string, st); print_oop(string, st);
} else if (tag.is_unresolved_string()) { } else if (tag.is_unresolved_string()) {
st->print_cr(" <unresolved string at %d>", i); const char* string = constants->string_at_noresolve(i);
st->print_cr(" %s", string);
} else if (tag.is_klass()) { } else if (tag.is_klass()) {
st->print_cr(" %s", constants->resolved_klass_at(i)->klass_part()->external_name()); st->print_cr(" %s", constants->resolved_klass_at(i)->klass_part()->external_name());
} else if (tag.is_unresolved_klass()) { } else if (tag.is_unresolved_klass()) {
st->print_cr(" <unresolved klass at %d>", i); st->print_cr(" <unresolved klass at %d>", i);
} else if (tag.is_object()) { } else if (tag.is_object()) {
st->print_cr(" " PTR_FORMAT, constants->object_at(i)); st->print(" <Object>");
print_oop(constants->object_at(i), st);
} else if (tag.is_method_type()) {
int i2 = constants->method_type_index_at(i);
st->print(" <MethodType> %d", i2);
print_oop(constants->symbol_at(i2), st);
} else if (tag.is_method_handle()) {
int kind = constants->method_handle_ref_kind_at(i);
int i2 = constants->method_handle_index_at(i);
st->print(" <MethodHandle of kind %d>", kind, i2);
print_field_or_method(-i, i2, st);
} else { } else {
st->print_cr(" bad tag=%d at %d", tag.value(), i); st->print_cr(" bad tag=%d at %d", tag.value(), i);
} }
@ -297,7 +321,10 @@ void BytecodePrinter::print_constant(int i, outputStream* st) {
void BytecodePrinter::print_field_or_method(int i, outputStream* st) { void BytecodePrinter::print_field_or_method(int i, outputStream* st) {
int orig_i = i; int orig_i = i;
if (!check_index(orig_i, i, st)) return; if (!check_index(orig_i, i, st)) return;
print_field_or_method(orig_i, i, st);
}
void BytecodePrinter::print_field_or_method(int orig_i, int i, outputStream* st) {
constantPoolOop constants = method()->constants(); constantPoolOop constants = method()->constants();
constantTag tag = constants->tag_at(i); constantTag tag = constants->tag_at(i);
@ -314,9 +341,11 @@ void BytecodePrinter::print_field_or_method(int i, outputStream* st) {
return; return;
} }
symbolOop klass = constants->klass_name_at(constants->uncached_klass_ref_index_at(i));
symbolOop name = constants->uncached_name_ref_at(i); symbolOop name = constants->uncached_name_ref_at(i);
symbolOop signature = constants->uncached_signature_ref_at(i); symbolOop signature = constants->uncached_signature_ref_at(i);
st->print_cr(" %d <%s> <%s> ", i, name->as_C_string(), signature->as_C_string()); const char* sep = (tag.is_field() ? "/" : "");
st->print_cr(" %d <%s.%s%s%s> ", i, klass->as_C_string(), name->as_C_string(), sep, signature->as_C_string());
} }
@ -340,12 +369,20 @@ void BytecodePrinter::print_attributes(int bci, outputStream* st) {
st->print_cr(" " INT32_FORMAT, get_short()); st->print_cr(" " INT32_FORMAT, get_short());
break; break;
case Bytecodes::_ldc: case Bytecodes::_ldc:
print_constant(get_index_u1(), st); if (Bytecodes::uses_cp_cache(raw_code())) {
print_constant(get_index_u1_cpcache(), st);
} else {
print_constant(get_index_u1(), st);
}
break; break;
case Bytecodes::_ldc_w: case Bytecodes::_ldc_w:
case Bytecodes::_ldc2_w: case Bytecodes::_ldc2_w:
print_constant(get_index_u2(), st); if (Bytecodes::uses_cp_cache(raw_code())) {
print_constant(get_index_u2_cpcache(), st);
} else {
print_constant(get_index_u2(), st);
}
break; break;
case Bytecodes::_iload: case Bytecodes::_iload:

Some files were not shown because too many files have changed in this diff Show More