diff --git a/src/CLR/Core/CLR_RT_HeapBlock.cpp b/src/CLR/Core/CLR_RT_HeapBlock.cpp index 9a167f438b..0e6acf0b8c 100644 --- a/src/CLR/Core/CLR_RT_HeapBlock.cpp +++ b/src/CLR/Core/CLR_RT_HeapBlock.cpp @@ -351,6 +351,7 @@ HRESULT CLR_RT_HeapBlock::SetGenericInstanceObject(const CLR_RT_TypeSpec_Index & m_data.genericInstance.genericType = genericType; m_data.genericInstance.ptr = nullptr; + m_id.raw = CLR_RT_HEAPBLOCK_RAW_ID(DATATYPE_GENERICINST, 0, 1); NANOCLR_NOCLEANUP(); } diff --git a/src/CLR/Core/Execution.cpp b/src/CLR/Core/Execution.cpp index 2192853454..27e5b86958 100644 --- a/src/CLR/Core/Execution.cpp +++ b/src/CLR/Core/Execution.cpp @@ -91,7 +91,7 @@ HRESULT CLR_RT_ExecutionEngine::ExecutionEngine_Initialize() // CLR_RT_Thread* m_cctorThread; // #if !defined(NANOCLR_APPDOMAINS) - m_globalLock = nullptr; // CLR_RT_HeapBlock* m_globalLock; + m_globalLock = nullptr; // CLR_RT_HeapBlock* m_globalLock; m_outOfMemoryException = nullptr; // CLR_RT_HeapBlock* m_outOfMemoryException; #endif @@ -2262,19 +2262,21 @@ HRESULT CLR_RT_ExecutionEngine::NewGenericInstanceObject( const CLR_RECORD_FIELDDEF *target = nullptr; CLR_RT_Assembly *assm = nullptr; CLR_RT_TypeDef_Instance instSub = instance; + CLR_RT_HeapBlock_GenericInstance *giHeader = nullptr; + CLR_RT_HeapBlock *fieldCursor = nullptr; reference.SetObjectReference(nullptr); int clsFields = instance.target->instanceFieldsCount; int totFields = instance.CrossReference().totalFields + CLR_RT_HeapBlock::HB_Object_Fields_Offset; - CLR_RT_HeapBlock_GenericInstance *genericInst; + giHeader = (CLR_RT_HeapBlock_GenericInstance *)ExtractHeapBlocksForGenericInstance(0, genericInstance, totFields); + CHECK_ALLOCATION(giHeader); - genericInst = - (CLR_RT_HeapBlock_GenericInstance *)ExtractHeapBlocksForGenericInstance(0, genericInstance, totFields); - CHECK_ALLOCATION(genericInst); + reference.SetObjectReference(giHeader); - reference.SetObjectReference(genericInst); + // Associate the instance with its declaring type for reflection & casting utilities. + giHeader->SetObjectCls(instance); // // Initialize field types, from last to first. @@ -2282,7 +2284,9 @@ HRESULT CLR_RT_ExecutionEngine::NewGenericInstanceObject( // We do the decrement BEFORE the comparison because we want to stop short of the first field, the // object descriptor (already initialized). // - genericInst += totFields; + + fieldCursor = reinterpret_cast(giHeader) + totFields; + while (--totFields > 0) { while (clsFields == 0) @@ -2302,11 +2306,11 @@ HRESULT CLR_RT_ExecutionEngine::NewGenericInstanceObject( target = assm->GetFieldDef(instSub.target->firstInstanceField + clsFields); } - genericInst--; + fieldCursor--; target--; clsFields--; - NANOCLR_CHECK_HRESULT(InitializeReference(*genericInst, target, assm)); + NANOCLR_CHECK_HRESULT(InitializeReference(*fieldCursor, target, assm)); } if (instance.HasFinalizer()) diff --git a/src/CLR/Core/Interpreter.cpp b/src/CLR/Core/Interpreter.cpp index 0ad2b3f0f6..923fface93 100644 --- a/src/CLR/Core/Interpreter.cpp +++ b/src/CLR/Core/Interpreter.cpp @@ -2548,6 +2548,7 @@ HRESULT CLR_RT_Thread::Execute_IL(CLR_RT_StackFrame &stackArg) { case DATATYPE_CLASS: case DATATYPE_VALUETYPE: + case DATATYPE_GENERICINST: evalPos[0].Assign(obj[fieldInst.CrossReference().offset]); goto Execute_LoadAndPromote; case DATATYPE_DATETIME: @@ -2600,13 +2601,17 @@ HRESULT CLR_RT_Thread::Execute_IL(CLR_RT_StackFrame &stackArg) #if defined(NANOCLR_APPDOMAINS) _ASSERTE(dt != DATATYPE_TRANSPARENT_PROXY); #endif - if (dt == DATATYPE_CLASS || dt == DATATYPE_VALUETYPE) + if (dt == DATATYPE_CLASS || dt == DATATYPE_VALUETYPE || dt == DATATYPE_GENERICINST) { + // This is a reference to the field. + // We need to make sure that the object is not a transparent proxy. evalPos[0].SetReference(obj[fieldInst.CrossReference().offset]); } - else if (dt == DATATYPE_DATETIME || dt == DATATYPE_TIMESPAN) // Special case. + // Special case. + else if (dt == DATATYPE_DATETIME || dt == DATATYPE_TIMESPAN) { - NANOCLR_SET_AND_LEAVE(CLR_E_WRONG_TYPE); // NOT SUPPORTED. + // NOT SUPPORTED. + NANOCLR_SET_AND_LEAVE(CLR_E_WRONG_TYPE); } else { @@ -3212,6 +3217,18 @@ HRESULT CLR_RT_Thread::Execute_IL(CLR_RT_StackFrame &stackArg) } break; + case TBL_MethodSpec: + { + CLR_RT_MethodDef_Instance method{}; + if (!method.ResolveToken(arg, assm)) + { + NANOCLR_SET_AND_LEAVE(CLR_E_WRONG_TYPE); + } + + evalPos[0].SetReflection(method); + } + break; + default: NANOCLR_SET_AND_LEAVE(CLR_E_WRONG_TYPE); break; diff --git a/src/CLR/Include/nanoCLR_Runtime.h b/src/CLR/Include/nanoCLR_Runtime.h index 2393b66e80..5134b73ed6 100644 --- a/src/CLR/Include/nanoCLR_Runtime.h +++ b/src/CLR/Include/nanoCLR_Runtime.h @@ -712,7 +712,7 @@ struct CLR_RT_MethodSpec_Index CLR_INDEX Method() const { - return (data & 0x7FFF); + return (CLR_INDEX)data; } }; diff --git a/src/CLR/Include/nanoCLR_Runtime__HeapBlock.h b/src/CLR/Include/nanoCLR_Runtime__HeapBlock.h index f16f16b30f..1159922cd3 100644 --- a/src/CLR/Include/nanoCLR_Runtime__HeapBlock.h +++ b/src/CLR/Include/nanoCLR_Runtime__HeapBlock.h @@ -1187,6 +1187,7 @@ struct CLR_RT_HeapBlock { m_id.raw = CLR_RT_HEAPBLOCK_RAW_ID(DATATYPE_GENERICINST, 0, 1); m_data.genericInstance.genericType.data = genericType.data; + m_data.genericInstance.ptr = nullptr; } const CLR_RT_TypeSpec_Index &ObjectGenericType() const