8308655: Narrow types of ConstantPool and ConstMethod returns

Reviewed-by: fparain, matsaave
This commit is contained in:
Coleen Phillimore 2023-05-25 11:52:40 +00:00
parent 5a0a238f67
commit 2599ada152
31 changed files with 162 additions and 161 deletions

@ -2672,7 +2672,7 @@ Method* ClassFileParser::parse_method(const ClassFileStream* const cfs,
m->set_constants(_cp);
m->set_name_index(name_index);
m->set_signature_index(signature_index);
m->compute_from_signature(cp->symbol_at(signature_index));
m->constMethod()->compute_from_signature(cp->symbol_at(signature_index), access_flags.is_static());
assert(args_size < 0 || args_size == m->size_of_parameters(), "");
// Fill in code attribute information

@ -1345,8 +1345,7 @@ void ClassLoader::record_result(JavaThread* current, InstanceKlass* ik,
const char* const file_name = file_name_for_class_name(class_name,
ik->name()->utf8_length());
assert(file_name != nullptr, "invariant");
ClassLoaderExt::record_result(classpath_index, ik, redefined);
ClassLoaderExt::record_result(checked_cast<s2>(classpath_index), ik, redefined);
}
#endif // INCLUDE_CDS

@ -66,7 +66,8 @@ void ClassLoaderExt::append_boot_classpath(ClassPathEntry* new_entry) {
void ClassLoaderExt::setup_app_search_path(JavaThread* current) {
Arguments::assert_is_dumping_archive();
_app_class_paths_start_index = ClassLoader::num_boot_classpath_entries();
int start_index = ClassLoader::num_boot_classpath_entries();
_app_class_paths_start_index = checked_cast<jshort>(start_index);
char* app_class_path = os::strdup_check_oom(Arguments::get_appclasspath(), mtClass);
if (strcmp(app_class_path, ".") == 0) {
@ -116,8 +117,9 @@ void ClassLoaderExt::process_module_table(JavaThread* current, ModuleEntryTable*
void ClassLoaderExt::setup_module_paths(JavaThread* current) {
Arguments::assert_is_dumping_archive();
_app_module_paths_start_index = ClassLoader::num_boot_classpath_entries() +
ClassLoader::num_app_classpath_entries();
int start_index = ClassLoader::num_boot_classpath_entries() +
ClassLoader::num_app_classpath_entries();
_app_module_paths_start_index = checked_cast<jshort>(start_index);
Handle system_class_loader (current, SystemDictionary::java_system_loader());
ModuleEntryTable* met = Modules::get_module_entry_table(system_class_loader);
process_module_table(current, met);

@ -903,8 +903,8 @@ static Method* new_method(
m->set_constants(nullptr); // This will get filled in later
m->set_name_index(cp->utf8(name));
m->set_signature_index(cp->utf8(sig));
m->compute_from_signature(sig);
m->set_size_of_parameters(params);
m->constMethod()->compute_from_signature(sig, flags.is_static());
assert(m->size_of_parameters() == params, "should be computed above");
m->set_max_stack(max_stack);
m->set_max_locals(params);
m->constMethod()->set_stackmap_data(nullptr);

@ -1,5 +1,5 @@
/*
* Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -74,7 +74,7 @@ InstanceKlass* KlassFactory::check_shared_class_file_load_hook(
// Set new class file stream using JVMTI agent modified class file data.
ClassLoaderData* loader_data =
ClassLoaderData::class_loader_data(class_loader());
int path_index = ik->shared_classpath_index();
s2 path_index = ik->shared_classpath_index();
ClassFileStream* stream = new ClassFileStream(ptr,
end_ptr - ptr,
cfs->source(),

@ -1182,7 +1182,7 @@ void SystemDictionary::load_shared_class_misc(InstanceKlass* ik, ClassLoaderData
// For boot loader, ensure that GetSystemPackage knows that a class in this
// package was loaded.
if (loader_data->is_the_null_class_loader_data()) {
int path_index = ik->shared_classpath_index();
s2 path_index = ik->shared_classpath_index();
ik->set_classpath_index(path_index);
}

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -139,7 +139,7 @@ private:
}
public:
FunctionType _function[KLASS_KIND_COUNT];
FunctionType _function[Klass::KLASS_KIND_COUNT];
Table(){
set_init_function<InstanceKlass>();
@ -202,7 +202,7 @@ private:
}
public:
FunctionType _function[KLASS_KIND_COUNT];
FunctionType _function[Klass::KLASS_KIND_COUNT];
Table(){
set_init_function<InstanceKlass>();
@ -265,7 +265,7 @@ private:
}
public:
FunctionType _function[KLASS_KIND_COUNT];
FunctionType _function[Klass::KLASS_KIND_COUNT];
Table(){
set_init_function<InstanceKlass>();

@ -30,6 +30,7 @@
#include "oops/constMethod.hpp"
#include "oops/method.hpp"
#include "runtime/safepointVerifiers.hpp"
#include "runtime/signature.hpp"
#include "utilities/align.hpp"
// Static initialization
@ -70,6 +71,20 @@ ConstMethod::ConstMethod(int byte_code_size,
set_result_type((BasicType)0);
}
// Derive size of parameters, return type, and fingerprint,
// all in one pass, which is run at load time.
// We need the first two, and might as well grab the third.
void ConstMethod::compute_from_signature(Symbol* sig, bool is_static) {
// At this point, since we are scanning the signature,
// we might as well compute the whole fingerprint.
Fingerprinter fp(sig, is_static);
set_size_of_parameters(fp.size_of_parameters());
set_num_stack_arg_slots(fp.num_stack_arg_slots());
set_result_type(fp.return_type());
set_fingerprint(fp.fingerprint());
}
// Accessor that copies to metadata.
void ConstMethod::copy_stackmap_data(ClassLoaderData* loader_data,
u1* sd, int length, TRAPS) {
@ -105,16 +120,16 @@ int ConstMethod::size(int code_size,
}
if (sizes->checked_exceptions_length() > 0) {
extra_bytes += sizeof(u2);
extra_bytes += sizes->checked_exceptions_length() * sizeof(CheckedExceptionElement);
extra_bytes += sizes->checked_exceptions_length() * (int)sizeof(CheckedExceptionElement);
}
if (sizes->localvariable_table_length() > 0) {
extra_bytes += sizeof(u2);
extra_bytes +=
sizes->localvariable_table_length() * sizeof(LocalVariableTableElement);
sizes->localvariable_table_length() * (int)sizeof(LocalVariableTableElement);
}
if (sizes->exception_table_length() > 0) {
extra_bytes += sizeof(u2);
extra_bytes += sizes->exception_table_length() * sizeof(ExceptionTableElement);
extra_bytes += sizes->exception_table_length() * (int)sizeof(ExceptionTableElement);
}
if (sizes->generic_signature_index() != 0) {
extra_bytes += sizeof(u2);
@ -125,7 +140,7 @@ int ConstMethod::size(int code_size,
// cause the reflection API to throw a MalformedParametersException.
if (sizes->method_parameters_length() >= 0) {
extra_bytes += sizeof(u2);
extra_bytes += sizes->method_parameters_length() * sizeof(MethodParametersElement);
extra_bytes += sizes->method_parameters_length() * (int)sizeof(MethodParametersElement);
}
// Align sizes up to a word.
@ -133,16 +148,16 @@ int ConstMethod::size(int code_size,
// One pointer per annotation array
if (sizes->method_annotations_length() > 0) {
extra_bytes += sizeof(AnnotationArray*);
extra_bytes += (int)sizeof(AnnotationArray*);
}
if (sizes->parameter_annotations_length() > 0) {
extra_bytes += sizeof(AnnotationArray*);
extra_bytes += (int)sizeof(AnnotationArray*);
}
if (sizes->type_annotations_length() > 0) {
extra_bytes += sizeof(AnnotationArray*);
extra_bytes += (int)sizeof(AnnotationArray*);
}
if (sizes->default_annotations_length() > 0) {
extra_bytes += sizeof(AnnotationArray*);
extra_bytes += (int)sizeof(AnnotationArray*);
}
int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
@ -276,16 +291,16 @@ void ConstMethod::set_inlined_tables_length(InlineTableSizes* sizes) {
// anything is added here. It might be advisable to have some sort
// of indication of this inline.
if (sizes->generic_signature_index() != 0)
*(generic_signature_index_addr()) = sizes->generic_signature_index();
*(generic_signature_index_addr()) = checked_cast<u2>(sizes->generic_signature_index());
// New data should probably go here.
if (sizes->method_parameters_length() >= 0)
*(method_parameters_length_addr()) = sizes->method_parameters_length();
*(method_parameters_length_addr()) = checked_cast<u2>(sizes->method_parameters_length());
if (sizes->checked_exceptions_length() > 0)
*(checked_exceptions_length_addr()) = sizes->checked_exceptions_length();
*(checked_exceptions_length_addr()) = checked_cast<u2>(sizes->checked_exceptions_length());
if (sizes->exception_table_length() > 0)
*(exception_table_length_addr()) = sizes->exception_table_length();
*(exception_table_length_addr()) = checked_cast<u2>(sizes->exception_table_length());
if (sizes->localvariable_table_length() > 0)
*(localvariable_table_length_addr()) = sizes->localvariable_table_length();
*(localvariable_table_length_addr()) = checked_cast<u2>(sizes->localvariable_table_length());
}
int ConstMethod::method_parameters_length() const {
@ -300,7 +315,7 @@ MethodParametersElement* ConstMethod::method_parameters_start() const {
}
int ConstMethod::checked_exceptions_length() const {
u2 ConstMethod::checked_exceptions_length() const {
return has_checked_exceptions() ? *(checked_exceptions_length_addr()) : 0;
}
@ -314,7 +329,7 @@ CheckedExceptionElement* ConstMethod::checked_exceptions_start() const {
}
int ConstMethod::localvariable_table_length() const {
u2 ConstMethod::localvariable_table_length() const {
return has_localvariable_table() ? *(localvariable_table_length_addr()) : 0;
}
@ -327,7 +342,7 @@ LocalVariableTableElement* ConstMethod::localvariable_table_start() const {
return (LocalVariableTableElement*) addr;
}
int ConstMethod::exception_table_length() const {
u2 ConstMethod::exception_table_length() const {
return has_exception_table() ? *(exception_table_length_addr()) : 0;
}
@ -505,7 +520,7 @@ void ConstMethod::verify_on(outputStream* st) {
} else {
uncompressed_table_start = (u2*) m_end;
}
int gap = (intptr_t) uncompressed_table_start - (intptr_t) compressed_table_end;
int gap = int((intptr_t) uncompressed_table_start - (intptr_t) compressed_table_end);
int max_gap = align_metadata_size(1)*BytesPerWord;
guarantee(gap >= 0 && gap < max_gap, "invalid method layout");
}

@ -213,6 +213,11 @@ private:
InlineTableSizes* sizes,
MethodType is_overpass,
int size);
void set_size_of_parameters(int size) { _size_of_parameters = checked_cast<u2>(size); }
void set_num_stack_arg_slots(int n) { _num_stack_arg_slots = checked_cast<u2>(n); }
void set_result_type(BasicType rt) { assert(rt < 16, "result type too large");
_result_type = (u1)rt; }
public:
static ConstMethod* allocate(ClassLoaderData* loader_data,
@ -283,22 +288,22 @@ public:
}
// name
int name_index() const { return _name_index; }
void set_name_index(int index) { _name_index = index; }
u2 name_index() const { return _name_index; }
void set_name_index(int index) { _name_index = checked_cast<u2>(index); }
// signature
int signature_index() const { return _signature_index; }
void set_signature_index(int index) { _signature_index = index; }
u2 signature_index() const { return _signature_index; }
void set_signature_index(int index) { _signature_index = checked_cast<u2>(index); }
// generics support
int generic_signature_index() const {
u2 generic_signature_index() const {
if (has_generic_signature()) {
return *generic_signature_index_addr();
} else {
return 0;
}
}
void set_generic_signature_index(u2 index) {
void set_generic_signature_index(u2 index) {
assert(has_generic_signature(), "");
u2* addr = generic_signature_index_addr();
*addr = index;
@ -324,7 +329,7 @@ public:
assert(max_method_code_size < (1 << 16),
"u2 is too small to hold method code size in general");
assert(0 <= size && size <= max_method_code_size, "invalid code size");
_code_size = size;
_code_size = (u2)size;
}
// linenumber table - note that length is unknown until decompression,
@ -337,15 +342,15 @@ public:
u2* method_parameters_length_addr() const;
// checked exceptions
int checked_exceptions_length() const;
u2 checked_exceptions_length() const;
CheckedExceptionElement* checked_exceptions_start() const;
// localvariable table
int localvariable_table_length() const;
u2 localvariable_table_length() const;
LocalVariableTableElement* localvariable_table_start() const;
// exception table
int exception_table_length() const;
u2 exception_table_length() const;
ExceptionTableElement* exception_table_start() const;
// method parameters table
@ -441,28 +446,27 @@ public:
u2 orig_method_idnum() const { return _orig_method_idnum; }
void set_orig_method_idnum(u2 idnum) { _orig_method_idnum = idnum; }
// Derive stuff from the signature at load time.
void compute_from_signature(Symbol* sig, bool is_static);
// max stack
int max_stack() const { return _max_stack; }
void set_max_stack(int size) { _max_stack = size; }
u2 max_stack() const { return _max_stack; }
void set_max_stack(int size) { _max_stack = checked_cast<u2>(size); }
// max locals
int max_locals() const { return _max_locals; }
void set_max_locals(int size) { _max_locals = size; }
u2 max_locals() const { return _max_locals; }
void set_max_locals(int size) { _max_locals = checked_cast<u2>(size); }
// size of parameters
int size_of_parameters() const { return _size_of_parameters; }
void set_size_of_parameters(int size) { _size_of_parameters = size; }
u2 size_of_parameters() const { return _size_of_parameters; }
// Number of arguments passed on the stack even when compiled
int num_stack_arg_slots() const { return _num_stack_arg_slots; }
void set_num_stack_arg_slots(int n) { _num_stack_arg_slots = n; }
u2 num_stack_arg_slots() const { return _num_stack_arg_slots; }
// result type (basic type of return value)
BasicType result_type() const { assert(_result_type >= T_BOOLEAN, "Must be set");
return (BasicType)_result_type; }
void set_result_type(BasicType rt) { assert(rt < 16, "result type too large");
_result_type = (u1)rt; }
// Deallocation for RedefineClasses
void deallocate_contents(ClassLoaderData* loader_data);
bool is_klass() const { return false; }

@ -448,7 +448,7 @@ bool ConstantPool::maybe_archive_resolved_klass_at(int cp_index) {
int ConstantPool::cp_to_object_index(int cp_index) {
// this is harder don't do this so much.
int i = reference_map()->find(cp_index);
int i = reference_map()->find(checked_cast<u2>(cp_index));
// We might not find the index for jsr292 call.
return (i < 0) ? _no_index_sentinel : i;
}
@ -699,9 +699,9 @@ int ConstantPool::to_cp_index(int index, Bytecodes::Code code) {
}
}
int ConstantPool::uncached_name_and_type_ref_index_at(int cp_index) {
u2 ConstantPool::uncached_name_and_type_ref_index_at(int cp_index) {
if (tag_at(cp_index).has_bootstrap()) {
int pool_index = bootstrap_name_and_type_ref_index_at(cp_index);
u2 pool_index = bootstrap_name_and_type_ref_index_at(cp_index);
assert(tag_at(pool_index).is_name_and_type(), "");
return pool_index;
}
@ -711,7 +711,7 @@ int ConstantPool::uncached_name_and_type_ref_index_at(int cp_index) {
return extract_high_short_from_int(ref_index);
}
int ConstantPool::name_and_type_ref_index_at(int index, Bytecodes::Code code) {
u2 ConstantPool::name_and_type_ref_index_at(int index, Bytecodes::Code code) {
return uncached_name_and_type_ref_index_at(to_cp_index(index, code));
}
@ -723,13 +723,13 @@ constantTag ConstantPool::tag_ref_at(int which, Bytecodes::Code code) {
return tag_at(pool_index);
}
int ConstantPool::uncached_klass_ref_index_at(int cp_index) {
u2 ConstantPool::uncached_klass_ref_index_at(int cp_index) {
assert(tag_at(cp_index).is_field_or_method(), "Corrupted constant pool");
jint ref_index = *int_at_addr(cp_index);
return extract_low_short_from_int(ref_index);
}
int ConstantPool::klass_ref_index_at(int index, Bytecodes::Code code) {
u2 ConstantPool::klass_ref_index_at(int index, Bytecodes::Code code) {
guarantee(!ConstantPool::is_invokedynamic_index(index),
"an invokedynamic instruction does not have a klass");
assert(code != Bytecodes::_invokedynamic,
@ -756,13 +756,13 @@ void ConstantPool::verify_constant_pool_resolve(const constantPoolHandle& this_c
}
int ConstantPool::name_ref_index_at(int which_nt) {
u2 ConstantPool::name_ref_index_at(int which_nt) {
jint ref_index = name_and_type_at(which_nt);
return extract_low_short_from_int(ref_index);
}
int ConstantPool::signature_ref_index_at(int which_nt) {
u2 ConstantPool::signature_ref_index_at(int which_nt) {
jint ref_index = name_and_type_at(which_nt);
return extract_high_short_from_int(ref_index);
}
@ -2182,14 +2182,14 @@ int ConstantPool::copy_cpool_bytes(int cpool_size,
}
case JVM_CONSTANT_ClassIndex: {
*bytes = JVM_CONSTANT_Class;
idx1 = klass_index_at(idx);
idx1 = checked_cast<u2>(klass_index_at(idx));
Bytes::put_Java_u2((address) (bytes+1), idx1);
DBG(printf("JVM_CONSTANT_ClassIndex: %hd", idx1));
break;
}
case JVM_CONSTANT_StringIndex: {
*bytes = JVM_CONSTANT_String;
idx1 = string_index_at(idx);
idx1 = checked_cast<u2>(string_index_at(idx));
Bytes::put_Java_u2((address) (bytes+1), idx1);
DBG(printf("JVM_CONSTANT_StringIndex: %hd", idx1));
break;
@ -2198,7 +2198,7 @@ int ConstantPool::copy_cpool_bytes(int cpool_size,
case JVM_CONSTANT_MethodHandleInError: {
*bytes = JVM_CONSTANT_MethodHandle;
int kind = method_handle_ref_kind_at(idx);
idx1 = method_handle_index_at(idx);
idx1 = checked_cast<u2>(method_handle_index_at(idx));
*(bytes+1) = (unsigned char) kind;
Bytes::put_Java_u2((address) (bytes+2), idx1);
DBG(printf("JVM_CONSTANT_MethodHandle: %d %hd", kind, idx1));
@ -2207,7 +2207,7 @@ int ConstantPool::copy_cpool_bytes(int cpool_size,
case JVM_CONSTANT_MethodType:
case JVM_CONSTANT_MethodTypeInError: {
*bytes = JVM_CONSTANT_MethodType;
idx1 = method_type_index_at(idx);
idx1 = checked_cast<u2>(method_type_index_at(idx));
Bytes::put_Java_u2((address) (bytes+1), idx1);
DBG(printf("JVM_CONSTANT_MethodType: %hd", idx1));
break;
@ -2284,7 +2284,7 @@ void ConstantPool::set_on_stack(const bool value) {
} else {
// Clearing is done single-threadedly.
if (!is_shared()) {
_flags &= ~_on_stack;
_flags &= (u2)(~_on_stack);
}
}
}

@ -259,7 +259,7 @@ class ConstantPool : public Metadata {
// Given the per-instruction index of an indy instruction, report the
// main constant pool entry for its bootstrap specifier.
// From there, uncached_name/signature_ref_at will get the name/type.
int invokedynamic_bootstrap_ref_index_at(int indy_index) const {
u2 invokedynamic_bootstrap_ref_index_at(int indy_index) const {
return cache()->resolved_indy_entry_at(decode_invokedynamic_index(indy_index))->constant_pool_index();
}
@ -507,7 +507,7 @@ class ConstantPool : public Metadata {
int member = method_handle_index_at(which);
return uncached_signature_ref_at(member);
}
int method_handle_klass_index_at(int which) {
u2 method_handle_klass_index_at(int which) {
int member = method_handle_index_at(which);
return uncached_klass_ref_index_at(member);
}
@ -516,11 +516,11 @@ class ConstantPool : public Metadata {
return symbol_at(sym);
}
int bootstrap_name_and_type_ref_index_at(int which) {
u2 bootstrap_name_and_type_ref_index_at(int which) {
assert(tag_at(which).has_bootstrap(), "Corrupted constant pool");
return extract_high_short_from_int(*int_at_addr(which));
}
int bootstrap_methods_attribute_index(int which) {
u2 bootstrap_methods_attribute_index(int which) {
assert(tag_at(which).has_bootstrap(), "Corrupted constant pool");
return extract_low_short_from_int(*int_at_addr(which));
}
@ -668,8 +668,8 @@ class ConstantPool : public Metadata {
return symbol_at(signature_index);
}
int klass_ref_index_at(int which, Bytecodes::Code code);
int name_and_type_ref_index_at(int which, Bytecodes::Code code);
u2 klass_ref_index_at(int which, Bytecodes::Code code);
u2 name_and_type_ref_index_at(int which, Bytecodes::Code code);
int remap_instruction_operand_from_cache(int operand); // operand must be biased by CPCACHE_INDEX_TAG
@ -678,8 +678,8 @@ class ConstantPool : public Metadata {
int to_cp_index(int which, Bytecodes::Code code);
// Lookup for entries consisting of (name_index, signature_index)
int name_ref_index_at(int which_nt); // == low-order jshort of name_and_type_at(which_nt)
int signature_ref_index_at(int which_nt); // == high-order jshort of name_and_type_at(which_nt)
u2 name_ref_index_at(int which_nt); // == low-order jshort of name_and_type_at(which_nt)
u2 signature_ref_index_at(int which_nt); // == high-order jshort of name_and_type_at(which_nt)
BasicType basic_type_for_signature_at(int which) const;
@ -786,8 +786,8 @@ class ConstantPool : public Metadata {
int signature_index = signature_ref_index_at(uncached_name_and_type_ref_index_at(cp_index));
return symbol_at(signature_index);
}
int uncached_klass_ref_index_at(int cp_index);
int uncached_name_and_type_ref_index_at(int cp_index);
u2 uncached_klass_ref_index_at(int cp_index);
u2 uncached_name_and_type_ref_index_at(int cp_index);
// Sharing
int pre_resolve_shared_klasses(TRAPS);

@ -64,11 +64,11 @@ void ConstantPoolCacheEntry::initialize_entry(int index) {
assert(constant_pool_index() == index, "");
}
int ConstantPoolCacheEntry::make_flags(TosState state,
intx ConstantPoolCacheEntry::make_flags(TosState state,
int option_bits,
int field_index_or_method_params) {
assert(state < number_of_states, "Invalid state in make_flags");
int f = ((int)state << tos_state_shift) | option_bits | field_index_or_method_params;
intx f = ((int)state << tos_state_shift) | option_bits | field_index_or_method_params;
// Preserve existing flag bit values
// The low bits are a field offset, or else the method parameter size.
#ifdef ASSERT

@ -159,7 +159,7 @@ class ConstantPoolCacheEntry {
assert(is_vfinal(), "flags must be set");
set_f2((intx)f2);
}
int make_flags(TosState state, int option_bits, int field_index_or_method_params);
intx make_flags(TosState state, int option_bits, int field_index_or_method_params);
void set_flags(intx flags) { _flags = flags; }
void set_field_flags(TosState field_type, int option_bits, int field_index) {
assert((field_index & field_index_mask) == field_index, "field_index in range");
@ -308,8 +308,8 @@ class ConstantPoolCacheEntry {
bool is_resolved(Bytecodes::Code code) const;
// Accessors
int indices() const { return _indices; }
int indices_ord() const;
intx indices() const { return _indices; }
intx indices_ord() const;
int constant_pool_index() const { return (indices() & cp_index_mask); }
Bytecodes::Code bytecode_1() const;
Bytecodes::Code bytecode_2() const;

@ -30,7 +30,7 @@
#include "oops/oopHandle.inline.hpp"
#include "runtime/atomic.hpp"
inline int ConstantPoolCacheEntry::indices_ord() const { return Atomic::load_acquire(&_indices); }
inline intx ConstantPoolCacheEntry::indices_ord() const { return Atomic::load_acquire(&_indices); }
inline Bytecodes::Code ConstantPoolCacheEntry::bytecode_1() const {
return Bytecodes::cast((indices_ord() >> bytecode_1_shift) & bytecode_1_mask);

@ -86,7 +86,7 @@ Array<u1>* FieldInfoStream::create_FieldInfoStream(GrowableArray<FieldInfo>* fie
#ifdef ASSERT
FieldInfoReader r(fis);
u2 jfc = r.next_uint();
int jfc = r.next_uint();
assert(jfc == java_fields, "Must be");
int ifc = r.next_uint();
assert(ifc == injected_fields, "Must be");

@ -98,23 +98,23 @@ inline FieldInfoReader::FieldInfoReader(const Array<u1>* fi)
inline void FieldInfoReader::read_field_info(FieldInfo& fi) {
fi._index = _next_index++;
fi._name_index = next_uint();
fi._signature_index = next_uint();
fi._name_index = checked_cast<u2>(next_uint());
fi._signature_index = checked_cast<u2>(next_uint());
fi._offset = next_uint();
fi._access_flags = AccessFlags(next_uint());
fi._field_flags = FieldInfo::FieldFlags(next_uint());
if (fi._field_flags.is_initialized()) {
fi._initializer_index = next_uint();
fi._initializer_index = checked_cast<u2>(next_uint());
} else {
fi._initializer_index = 0;
}
if (fi._field_flags.is_generic()) {
fi._generic_signature_index = next_uint();
fi._generic_signature_index = checked_cast<u2>(next_uint());
} else {
fi._generic_signature_index = 0;
}
if (fi._field_flags.is_contended()) {
fi._contention_group = next_uint();
fi._contention_group = checked_cast<u2>(next_uint());
} else {
fi._contention_group = 0;
}

@ -2511,7 +2511,9 @@ void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) {
if (itable_length() > 0) {
itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
int method_table_offset_in_words = ioe->offset()/wordSize;
int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words())
int itable_offset_in_words = (int)(start_of_itable() - (intptr_t*)this);
int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words)
/ itableOffsetEntry::size();
for (int i = 0; i < nof_interfaces; i ++, ioe ++) {

@ -944,7 +944,6 @@ public:
inline intptr_t* start_of_itable() const;
inline intptr_t* end_of_itable() const;
inline int itable_offset_in_words() const;
inline oop static_field_base_raw();
inline OopMapBlock* start_of_nonstatic_oop_maps() const;

@ -41,8 +41,6 @@
inline intptr_t* InstanceKlass::start_of_itable() const { return (intptr_t*)start_of_vtable() + vtable_length(); }
inline intptr_t* InstanceKlass::end_of_itable() const { return start_of_itable() + itable_length(); }
inline int InstanceKlass::itable_offset_in_words() const { return start_of_itable() - (intptr_t*)this; }
inline oop InstanceKlass::static_field_base_raw() { return java_mirror(); }
inline Symbol* InstanceKlass::field_name(int index) const { return field(index).name(constants()); }

@ -261,7 +261,7 @@ void Klass::initialize_supers(Klass* k, Array<InstanceKlass*>* transitive_interf
// Overflow of the primary_supers array forces me to be secondary.
super_check_cell = &_secondary_super_cache;
}
set_super_check_offset((address)super_check_cell - (address) this);
set_super_check_offset(u4((address)super_check_cell - (address) this));
#ifdef ASSERT
{

@ -37,19 +37,6 @@
#include "jfr/support/jfrTraceIdExtension.hpp"
#endif
// Klass Kinds for all subclasses of Klass
enum KlassKind {
InstanceKlassKind,
InstanceRefKlassKind,
InstanceMirrorKlassKind,
InstanceClassLoaderKlassKind,
InstanceStackChunkKlassKind,
TypeArrayKlassKind,
ObjArrayKlassKind
};
const uint KLASS_KIND_COUNT = ObjArrayKlassKind + 1;
//
// A Klass provides:
// 1: language level class object (method dictionary etc.)
@ -79,7 +66,22 @@ class vtableEntry;
class Klass : public Metadata {
friend class VMStructs;
friend class JVMCIVMStructs;
public:
// Klass Kinds for all subclasses of Klass
enum KlassKind {
InstanceKlassKind,
InstanceRefKlassKind,
InstanceMirrorKlassKind,
InstanceClassLoaderKlassKind,
InstanceStackChunkKlassKind,
TypeArrayKlassKind,
ObjArrayKlassKind,
UnknownKlassKind
};
static const uint KLASS_KIND_COUNT = ObjArrayKlassKind + 1;
protected:
// If you add a new field that points to any metaspace object, you
// must add this field to Klass::metaspace_pointers_do().
@ -170,7 +172,7 @@ private:
// associate this class with the JAR file where it's loaded from during
// dump time. If a class is not loaded from the shared archive, this field is
// -1.
jshort _shared_class_path_index;
s2 _shared_class_path_index;
#if INCLUDE_CDS
// Various attributes for shared classes. Should be zero for a non-shared class.
@ -193,7 +195,7 @@ protected:
// Constructor
Klass(KlassKind kind);
Klass() : _kind(KlassKind(-1)) { assert(DumpSharedSpaces || UseSharedSpaces, "only for cds"); }
Klass() : _kind(UnknownKlassKind) { assert(DumpSharedSpaces || UseSharedSpaces, "only for cds"); }
void* operator new(size_t size, ClassLoaderData* loader_data, size_t word_size, TRAPS) throw();
@ -303,11 +305,11 @@ protected:
ClassLoaderData* class_loader_data() const { return _class_loader_data; }
void set_class_loader_data(ClassLoaderData* loader_data) { _class_loader_data = loader_data; }
int shared_classpath_index() const {
s2 shared_classpath_index() const {
return _shared_class_path_index;
};
void set_shared_classpath_index(int index) {
void set_shared_classpath_index(s2 index) {
_shared_class_path_index = index;
};
@ -322,7 +324,7 @@ protected:
CDS_ONLY(_shared_class_flags |= _archived_lambda_proxy_is_available;)
}
void clear_lambda_proxy_is_available() {
CDS_ONLY(_shared_class_flags &= ~_archived_lambda_proxy_is_available;)
CDS_ONLY(_shared_class_flags &= (u2)(~_archived_lambda_proxy_is_available);)
}
bool lambda_proxy_is_available() const {
CDS_ONLY(return (_shared_class_flags & _archived_lambda_proxy_is_available) != 0;)
@ -333,7 +335,7 @@ protected:
CDS_ONLY(_shared_class_flags |= _has_value_based_class_annotation;)
}
void clear_has_value_based_class_annotation() {
CDS_ONLY(_shared_class_flags &= ~_has_value_based_class_annotation;)
CDS_ONLY(_shared_class_flags &= (u2)(~_has_value_based_class_annotation);)
}
bool has_value_based_class_annotation() const {
CDS_ONLY(return (_shared_class_flags & _has_value_based_class_annotation) != 0;)

@ -1118,9 +1118,9 @@ klassItable::klassItable(InstanceKlass* klass) {
intptr_t* method_entry = (intptr_t *)(((address)klass) + offset_entry->offset());
intptr_t* end = klass->end_of_itable();
_table_offset = (intptr_t*)offset_entry - (intptr_t*)klass;
_size_offset_table = (method_entry - ((intptr_t*)offset_entry)) / itableOffsetEntry::size();
_size_method_table = (end - method_entry) / itableMethodEntry::size();
_table_offset = int((intptr_t*)offset_entry - (intptr_t*)klass);
_size_offset_table = int((method_entry - ((intptr_t*)offset_entry)) / itableOffsetEntry::size());
_size_method_table = int((end - method_entry) / itableMethodEntry::size());
assert(_table_offset >= 0 && _size_offset_table >= 0 && _size_method_table >= 0, "wrong computation");
return;
}
@ -1497,7 +1497,7 @@ class SetupItableClosure : public InterfaceVisiterClosure {
itableMethodEntry* method_entry() const { return _method_entry; }
void doit(InstanceKlass* intf, int method_count) {
int offset = ((address)_method_entry) - _klass_begin;
int offset = int(((address)_method_entry) - _klass_begin);
_offset_entry->initialize(intf, offset);
_offset_entry++;
_method_entry += method_count;

@ -52,7 +52,8 @@ class klassVtable {
public:
klassVtable(Klass* klass, void* base, int length) : _klass(klass) {
_tableOffset = (address)base - (address)klass; _length = length;
_tableOffset = int((address)base - (address)klass);
_length = length;
}
// accessors

@ -328,7 +328,7 @@ int Method::bci_from(address bcp) const {
assert(is_native() && bcp == code_base() || contains(bcp) || VMError::is_error_reported(),
"bcp doesn't belong to this method. bcp: " PTR_FORMAT, p2i(bcp));
return bcp - code_base();
return int(bcp - code_base());
}
@ -348,7 +348,7 @@ int Method::validate_bci_from_bcp(address bcp) const {
// the method may be native
bci = 0;
} else if (contains(bcp)) {
bci = bcp - code_base();
bci = int(bcp - code_base());
}
// Assert that if we have dodged any asserts, bci is negative.
assert(bci == -1 || bci == bci_from(bcp_from(bci)), "sane bci if >=0");
@ -652,19 +652,6 @@ int Method::extra_stack_words() {
return extra_stack_entries() * Interpreter::stackElementSize;
}
// Derive size of parameters, return type, and fingerprint,
// all in one pass, which is run at load time.
// We need the first two, and might as well grab the third.
void Method::compute_from_signature(Symbol* sig) {
// At this point, since we are scanning the signature,
// we might as well compute the whole fingerprint.
Fingerprinter fp(sig, is_static());
set_size_of_parameters(fp.size_of_parameters());
set_num_stack_arg_slots(fp.num_stack_arg_slots());
constMethod()->set_result_type(fp.return_type());
constMethod()->set_fingerprint(fp.fingerprint());
}
bool Method::is_vanilla_constructor() const {
// Returns true if this method is a vanilla constructor, i.e. an "<init>" "()V" method
// which only calls the superclass vanilla constructor and possibly does stores of
@ -1484,7 +1471,7 @@ methodHandle Method::make_method_handle_intrinsic(vmIntrinsics::ID iid,
m->set_signature_index(_imcp_invoke_signature);
assert(MethodHandles::is_signature_polymorphic_name(m->name()), "");
assert(m->signature() == signature, "");
m->compute_from_signature(signature);
m->constMethod()->compute_from_signature(signature, must_be_static);
m->init_intrinsic_id(klass_id_for_intrinsics(m->method_holder()));
assert(m->is_method_handle_intrinsic(), "");
#ifdef ASSERT
@ -1768,7 +1755,7 @@ void Method::sort_methods(Array<Method*>* methods, bool set_idnums, method_compa
}
// Reset method ordering
if (set_idnums) {
for (int i = 0; i < length; i++) {
for (u2 i = 0; i < length; i++) {
Method* m = methods->at(i);
m->set_method_idnum(i);
m->set_orig_method_idnum(i);

@ -142,18 +142,17 @@ class Method : public Metadata {
// name
Symbol* name() const { return constants()->symbol_at(name_index()); }
int name_index() const { return constMethod()->name_index(); }
u2 name_index() const { return constMethod()->name_index(); }
void set_name_index(int index) { constMethod()->set_name_index(index); }
// signature
Symbol* signature() const { return constants()->symbol_at(signature_index()); }
int signature_index() const { return constMethod()->signature_index(); }
u2 signature_index() const { return constMethod()->signature_index(); }
void set_signature_index(int index) { constMethod()->set_signature_index(index); }
// generics support
Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : nullptr); }
int generic_signature_index() const { return constMethod()->generic_signature_index(); }
void set_generic_signature_index(int index) { constMethod()->set_generic_signature_index(index); }
u2 generic_signature_index() const { return constMethod()->generic_signature_index(); }
// annotations support
AnnotationArray* annotations() const {
@ -298,12 +297,7 @@ class Method : public Metadata {
}
}
// Derive stuff from the signature at load time.
void compute_from_signature(Symbol* sig);
// size of parameters (receiver if any + arguments)
int size_of_parameters() const { return constMethod()->size_of_parameters(); }
void set_size_of_parameters(int size) { constMethod()->set_size_of_parameters(size); }
u2 size_of_parameters() const { return constMethod()->size_of_parameters(); }
bool has_stackmap_table() const {
return constMethod()->has_stackmap_table();
@ -320,7 +314,7 @@ class Method : public Metadata {
// exception handler table
bool has_exception_handler() const
{ return constMethod()->has_exception_table(); }
int exception_table_length() const
u2 exception_table_length() const
{ return constMethod()->exception_table_length(); }
ExceptionTableElement* exception_table_start() const
{ return constMethod()->exception_table_start(); }
@ -933,8 +927,6 @@ public:
// Inlined elements
address* native_function_addr() const { assert(is_native(), "must be native"); return (address*) (this+1); }
address* signature_handler_addr() const { return native_function_addr() + 1; }
void set_num_stack_arg_slots(int n) { constMethod()->set_num_stack_arg_slots(n); }
};

@ -69,7 +69,7 @@ inline void CompressedLineNumberWriteStream::write_pair_inline(int bci, int line
// Check if bci is 5-bit and line number 3-bit unsigned.
if (((bci_delta & ~0x1F) == 0) && ((line_delta & ~0x7) == 0)) {
// Compress into single byte.
jubyte value = ((jubyte) bci_delta << 3) | (jubyte) line_delta;
jubyte value = (jubyte)((bci_delta << 3) | line_delta);
// Check that value doesn't match escape character.
if (value != 0xFF) {
write_byte(value);

@ -1,5 +1,5 @@
/*
* Copyright (c) 2013, 2021, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2013, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -105,9 +105,9 @@ class MethodCounters : public Metadata {
void set_rate(float rate) { _rate = rate; }
int highest_comp_level() const { return _highest_comp_level; }
void set_highest_comp_level(int level) { _highest_comp_level = level; }
void set_highest_comp_level(int level) { _highest_comp_level = (u1)level; }
int highest_osr_comp_level() const { return _highest_osr_comp_level; }
void set_highest_osr_comp_level(int level) { _highest_osr_comp_level = level; }
void set_highest_osr_comp_level(int level) { _highest_osr_comp_level = (u1)level; }
// invocation counter
InvocationCounter* invocation_counter() { return &_invocation_counter; }

@ -55,7 +55,7 @@ uint32_t Symbol::pack_hash_and_refcount(short hash, int refcount) {
Symbol::Symbol(const u1* name, int length, int refcount) {
_hash_and_refcount = pack_hash_and_refcount((short)os::random(), refcount);
_length = length;
_length = (u2)length;
// _body[0..1] are allocated in the header just by coincidence in the current
// implementation of Symbol. They are read by identity_hash(), so make sure they
// are initialized.
@ -215,7 +215,7 @@ const char* Symbol::as_klass_external_name() const {
static void print_class(outputStream *os, const SignatureStream& ss) {
int sb = ss.raw_symbol_begin(), se = ss.raw_symbol_end();
for (int i = sb; i < se; ++i) {
int ch = ss.raw_char_at(i);
char ch = ss.raw_char_at(i);
if (ch == JVM_SIGNATURE_SLASH) {
os->put(JVM_SIGNATURE_DOT);
} else {
@ -359,7 +359,7 @@ void Symbol::make_permanent() {
fatal("refcount underflow");
return;
} else {
int hash = extract_hash(old_value);
short hash = extract_hash(old_value);
found = Atomic::cmpxchg(&_hash_and_refcount, old_value, pack_hash_and_refcount(hash, PERM_REFCOUNT));
if (found == old_value) {
return; // successfully updated.

@ -211,7 +211,7 @@ class Symbol : public MetaspaceObj {
bool starts_with(const char* prefix) const {
return starts_with(prefix, (int) strlen(prefix));
}
bool starts_with(int prefix_char) const {
bool starts_with(char prefix_char) const {
return contains_byte_at(0, prefix_char);
}
// Tests if the symbol ends with the given suffix.
@ -221,7 +221,7 @@ class Symbol : public MetaspaceObj {
bool ends_with(const char* suffix) const {
return ends_with(suffix, (int) strlen(suffix));
}
bool ends_with(int suffix_char) const {
bool ends_with(char suffix_char) const {
return contains_byte_at(utf8_length() - 1, suffix_char);
}

@ -3642,7 +3642,7 @@ void VM_RedefineClasses::set_new_constant_pool(
if (new_index != 0) {
log_trace(redefine, class, constantpool)
("method-generic_signature_index change: %d to %d", method->generic_signature_index(), new_index);
method->set_generic_signature_index(new_index);
method->constMethod()->set_generic_signature_index(new_index);
}
// Update constant pool indices in the method's checked exception

@ -1176,15 +1176,15 @@ inline intx byte_size(void* from, void* to) {
// Pack and extract shorts to/from ints:
inline int extract_low_short_from_int(jint x) {
return x & 0xffff;
inline u2 extract_low_short_from_int(u4 x) {
return u2(x & 0xffff);
}
inline int extract_high_short_from_int(jint x) {
return (x >> 16) & 0xffff;
inline u2 extract_high_short_from_int(u4 x) {
return u2((x >> 16) & 0xffff);
}
inline int build_int_from_shorts( jushort low, jushort high ) {
inline int build_int_from_shorts( u2 low, u2 high ) {
return ((int)((unsigned int)high << 16) | (unsigned int)low);
}