// // Copyright (c) Microsoft Corporation. All rights reserved. // /*******************************************************************/ /* WARNING */ /* This file should be identical in the Bartok and Singularity */ /* depots. Master copy resides in Bartok Depot. Changes should be */ /* made to Bartok Depot and propagated to Singularity Depot. */ /*******************************************************************/ #define OLD_FAST_TESTS #if MARKSWEEPCOLLECTOR #define ALLOW_BOOT_ARGLIST #endif namespace System { using Microsoft.Bartok.Runtime; #if SINGULARITY_KERNEL using Microsoft.Singularity; #elif SINGULARITY_PROCESS using Microsoft.Singularity; using Microsoft.Singularity.V1.Services; #endif using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Threading; using System; internal sealed class MutableInt32 { public MutableInt32(int x) { this.val = x; } public int Value { get { return this.val; } set { this.val = value; } } private int val; } [RequiredByBartok] internal struct InterfaceInfo { [RequiredByBartok] internal System.RuntimeType type; [RequiredByBartok] internal IntPtr offset; } [StructLayout(LayoutKind.Sequential)] [CCtorIsRunDuringStartup] [NoLoggingForUndo] [RequiredByBartok] [AccessedByRuntime("Referenced from brt{main,exn}.cpp and brtasm.asm")] internal sealed class VTable { [RequiredByBartok] [NoBarriers] public readonly VTable depth1; [NoBarriers] [RequiredByBartok] public readonly VTable depth2; [NoBarriers] [RequiredByBartok] public readonly VTable depth3; [NoBarriers] [RequiredByBartok] public readonly VTable depth4; [NoBarriers] [RequiredByBartok] public readonly VTable depth5; [NoBarriers] [RequiredByBartok] public readonly VTable depth6; [NoBarriers] public VTable posCache; [RequiredByBartok] [NoBarriers] public int depth; [RequiredByBartok] [NoBarriers] public readonly StructuralType arrayOf; [RequiredByBartok] [NoBarriers] public readonly VTable arrayElementClass; [RequiredByBartok] [NoBarriers] public readonly int arrayElementSize; [RequiredByBartok] [NoBarriers] public readonly InterfaceInfo[] interfaces; [RequiredByBartok] [NoBarriers] public readonly uint baseLength; [RequiredByBartok] [NoBarriers] public readonly uint baseAlignment; [RequiredByBartok] [NoBarriers] public readonly UIntPtr pointerTrackingMask; [RequiredByBartok] [NoBarriers] public readonly StructuralType structuralView; [AccessedByRuntime("Referenced from C++")] [NoBarriers] public readonly RuntimeType vtableType; [RequiredByBartok] [NoBarriers] public readonly int marshalSize; [RequiredByBartok] [NoBarriers] public readonly VTable vectorClass; [AccessedByRuntime("Type information available to runtime.")] [NoBarriers] public readonly bool isAcyclicRefType; // one less than size, which must be a power of two [NoBarriers] internal static int tryAllCheckMask = 2047; #if SINGULARITY_KERNEL private static bool multiThreaded; #endif internal class ClassConstructorLock { } [NoBarriers] public static readonly ClassConstructorLock cctorLock = new ClassConstructorLock(); [NoBarriers] public static int[] handle_counts = new int[4]; [NoBarriers] public static bool runtimeInitialized = false; [NoBarriers] public static System.Collections.Hashtable interfaceOffsetTable; [NoBarriers] public static bool callTraceInProgress; #if ARM // TODO: ARM isn't supported by the BartokLinker yet, so this // function won't be correctly generated. [StackBound(252)] [RequiredByBartok] private static void Initialize() { } #else // This method is generated by Bartok. // It calls all the auto init class constructors. [MethodImpl(MethodImplOptions.InternalCall)] [StackBound(252)] [RequiredByBartok] private static extern void Initialize(); #endif // NB: This is called from Kernel.Main() before the VTable.cctor. // It calls all the manual init class constructors then // call Initialize() above to call the auto init class constructors. [AccessedByRuntime("called from brtmain.cpp")] [NoStackLinkCheck] static public void Initialize(RuntimeType mainType) { initializeGC(); #if !SINGULARITY if (useLinkedStack) { System.GCs.StackManager.Initialize(); } #endif // !SINGULARITY // The remaining cctors may allocate memory normally #if !SINGULARITY InitializeLimitedType(typeof(Microsoft.Win32.Win32Native)); #endif InitializeLimitedType(typeof(System.Delegate)); InitializeLimitedType(typeof(System.MultiUseWord)); InitializeLimitedType(typeof(System.WeakReference)); InitializeLimitedType(typeof(System.GCs.Verifier)); InitializeLimitedType(typeof(System.Collections.Hashtable)); Thread.FinishInitializeThread(); // Make transition to full thread-ready GC. // We can't start any other threads until this code runs. System.GC.FinishInitializeThread(); #if !SINGULARITY InitializeLimitedType(typeof(System.IO.Stream)); InitializeLimitedType(typeof(System.Reflection.Missing)); InitializeLimitedType(typeof(System.RuntimeType)); InitializeLimitedType(typeof(System.Environment)); #endif #if SINGULARITY InitializeLimitedType(typeof(System.DateTime)); InitializeLimitedType(typeof(System.TimeSpan)); InitializeLimitedType(typeof(System.SchedulerTime)); #endif InitializeLimitedType(typeof(System.Type)); InitializeLimitedType(typeof(System.String)); InitializeLimitedType(typeof(System.BitConverter)); InitializeLimitedType(typeof(System.Math)); InitializeLimitedType(typeof(System.VTable)); #if !VC InitializeLimitedType(typeof(System.TryAllManager)); InitializeLimitedType(typeof(System.TryAllCounters)); #endif VTable.runtimeInitialized = true; VTable.Initialize(); #if !SINGULARITY GC.EnableHeap(); if(mainType != null) { initType(mainType); } #endif } #if SINGULARITY_KERNEL static public void InitializeForMultipleThread() { multiThreaded = true; } #endif #if (REFERENCE_COUNTING_GC || DEFERRED_REFERENCE_COUNTING_GC) [NoInline] #endif // REFERENCE_COUNTING_GC [NoBarriers] [PreInitRefCounts] private static void initializeGC() { // Create the heap management data structures System.GC.ConstructHeap(); // The following cctors may only allocate using BootstrapMemory InitializeLimitedType(typeof(System.Threading.Thread)); InitializeLimitedType(typeof(System.GC)); InitializeLimitedType(typeof(System.Finalizer)); System.GCs.BootstrapMemory.Truncate(); } [RequiredByBartok] private static bool useLinkedStack; [System.Diagnostics.Conditional("DEBUG")] static unsafe void StartupChecks() { VTable v = ((RuntimeType)typeof(System.VTable)).classVtable; Assert(v.depth == (Constants.TypeTestDisplayObjectOffset + 1), "Depth of VTable's VTable is wrong, so either the hierarchy " + "has changed or the compiler and runtime differ over the " + "size of the display so the depth offset is wrong"); fixed(int * p = &v.depth) { UIntPtr up = (UIntPtr) p; UIntPtr upBase = Magic.addressOf(v); int diff = ((int) (up - upBase)) / UIntPtr.Size; #if (REFERENCE_COUNTING_GC || DEFERRED_REFERENCE_COUNTING_GC) Assert(diff == Constants.TypeTestDisplaySize + 3, "Number of display fields does not match constant"); #else // REFERENCE_COUNTING_GC Assert(diff == Constants.TypeTestDisplaySize + 2, "Number of display fields does not match constant"); #endif } } static unsafe VTable() { StartupChecks(); // Ensure that the monitors have been allocated, so that we don't // try to allocate them just as we are running out of memory Monitor.Enter(VTable.cctorLock); Monitor.Exit(VTable.cctorLock); } internal static void checkNonNull(object obj) { if (obj == null) { throw new NullReferenceException(); } } [Inline] [RequiredByBartok] internal static void checkUnbox(object obj, int structuralType) { if (structuralType != (int) obj.vtable.structuralView) { #if SINGULARITY Tracing.Log(Tracing.Debug, "checkUnbox failed: Obj={0:x8} st={1}", Magic.addressOf(obj), unchecked((uint)structuralType)); #endif throwNewClassCastException(); } } #if (REFERENCE_COUNTING_GC || DEFERRED_REFERENCE_COUNTING_GC) // The CheckVectorStore opcode returns a value to model a // dependency with respect to StoreVectorElement. // CheckVectorStore gets lowered to a call to checkArrayStore. // If the call is subsequently inlined, the type checker may // complain, due to effects like t := Id(). // // The bug shows up in release builds with the RC collector, // when the RCCollectorOptInlineRCUpdates stage-control is // also turned on. // // Attaching [NoInline] to checkArrayStore is a temporary // solution to get around this bug. [NoInline] #endif // REFERENCE_COUNTING_GC [RequiredByBartok] internal static void checkArrayStore(object obj1,object obj2) { Assert(obj1.vtable.vtableType.IsArray); // can't put an object into a primitive array, but we can probably // guarantee that this is never called on a primitive type array VTable obj1VT = obj1.vtable; VTable obj1ElemVT = obj1VT.arrayElementClass; RuntimeType obj1ElemTy = obj1ElemVT.vtableType; // typeof is too slow right now... //if(obj1ElemTy == typeof(Object)) { // return; //} Deny(obj1ElemTy.IsPrimitive); if(obj2 == null) { return; } RuntimeType obj2Ty = obj2.vtable.vtableType; if(! isValidAssignment(obj1ElemTy, obj2Ty)) { #if SINGULARITY Tracing.Log(Tracing.Debug, "checkArrayStore failed: obj1={0:x8} obj2={1:x8} obj1ElemTy={2:x8} obj2Ty={3:x8}", Magic.addressOf(obj1), Magic.addressOf(obj2), Magic.addressOf(obj1ElemTy), Magic.addressOf(obj2Ty)); #endif throwNewArrayTypeMismatchException(); } } [RequiredByBartok] internal static void checkArrayElementAddress(RuntimeType ty, object obj) { if(ty != obj.vtable.vtableType) { throwNewArrayTypeMismatchException(); #if SINGULARITY Tracing.Log(Tracing.Debug, "checkArrayElementAddress failed: ty={0:x8} obj={1:x8}", Magic.addressOf(ty), Magic.addressOf(obj)); #endif } } [NoBarriers] [PreInitRefCounts] private static bool isValidAssignmentArray(RuntimeType ty, RuntimeType objTy) { Assert((ty.IsVector && objTy.IsVector) || (ty.IsRectangleArray && objTy.IsRectangleArray), "incompatible arrays"); Assert(ty.classVtable.arrayOf == objTy.classVtable.arrayOf, "!= arrayOfs"); VTable tyVT = ty.classVtable; VTable tyElementVT = tyVT.arrayElementClass; Assert(tyElementVT != null, "array has null arrayElementClass"); RuntimeType tyElementType = tyElementVT.vtableType; RuntimeType objElementType = objTy.classVtable.arrayElementClass.vtableType; // struct if(tyVT.arrayOf == StructuralType.Struct) { return tyElementType == objElementType; } // primitives and enums -- this allows int[], E1[], and E2[] // to cast either way (assuming E1 and E2 are based on int) if(tyVT.arrayOf != StructuralType.Reference) { return true; } // reference return isValidAssignment(tyElementType, objElementType); } [NoBarriers] [PreInitRefCounts] internal static bool isValidAssignment(RuntimeType ty,RuntimeType objTy) { return (ty == objTy) || isValidAssignmentMedium(ty, objTy); } [NoBarriers] [PreInitRefCounts] internal static bool isValidAssignmentMedium(RuntimeType ty, RuntimeType objTy) { RuntimeType testType = objTy; do { testType = testType.baseType; if (ty == testType) { return true; } } while (testType != null); return isValidAssignmentSlow(ty, objTy); } [NoBarriers] [PreInitRefCounts] private static bool isValidAssignmentSlow(RuntimeType ty, RuntimeType objTy) { Assert(ty != null, "isValidAssignment null dst type"); Assert(objTy != null, "isValidAssignment null src type"); if(ty.classVtable.arrayOf != StructuralType.None) { // ty is an Array if(ty.classVtable.arrayOf == objTy.classVtable.arrayOf) { if(ty.IsVector) { if (objTy.IsVector) { return isValidAssignmentArray(ty, objTy); } } else { Assert(ty.IsRectangleArray, "not rectangle array"); if(!objTy.IsVector && ty.rank == objTy.rank) { return isValidAssignmentArray(ty, objTy); } } } } else if ((ty.attributes & TypeAttributes.ClassSemanticsMask) == TypeAttributes.Interface) { // ty is an Interface System.RuntimeType[] interfaces = objTy.interfaces; if(interfaces != null) { int numInterfaces = interfaces.Length; for(int i=0; i 0); if(alwaysExact) { return vobj == vtarget; } UIntPtr * p = (UIntPtr *) Magic.addressOf(vobj); #if (REFERENCE_COUNTING_GC || DEFERRED_REFERENCE_COUNTING_GC) if(alwaysSimple || (vtargetDepth < Constants.TypeTestDisplayPosCache)) { return p[vtargetDepth+1] == Magic.addressOf(vtarget); } #else // REFERENCE_COUNTING_GC if(alwaysSimple || (vtargetDepth < Constants.TypeTestDisplayPosCache)) { return p[vtargetDepth] == Magic.addressOf(vtarget); } #endif #if (REFERENCE_COUNTING_GC || DEFERRED_REFERENCE_COUNTING_GC) if(p[vtargetDepth+1] == Magic.addressOf(vtarget)) { return true; } #else // REFERENCE_COUNTING_GC if(p[vtargetDepth] == Magic.addressOf(vtarget)) { return true; } #endif if(isValidAssignment(vtarget.vtableType, vobj.vtableType)) { vobj.posCache = vtarget; return true; } return false; } [Inline] private static Object newIsInstanceOf(VTable v, Object obj, int vtargetDepth, bool mayBeNull, bool mustBeExact, bool alwaysSimple) { if(mayBeNull && obj == null) { return null; } VTable objVtable = obj.vtable; if(newIsValidAssignment(v, objVtable, vtargetDepth, mustBeExact, alwaysSimple)) { return obj; } else { return null; } } [Inline] [RequiredByBartok] internal static Object newIsInstanceOfNonNullExact(VTable v, Object obj, int vtargetDepth) { return newIsInstanceOf(v, obj, vtargetDepth, false, true, true); } [Inline] [RequiredByBartok] internal static Object newIsInstanceOfExact(VTable v, Object obj, int vtargetDepth) { return newIsInstanceOf(v, obj, vtargetDepth, true, true, true); } [Inline] [RequiredByBartok] internal static Object newIsInstanceOfNonNullSimple(VTable v, Object obj, int vtargetDepth) { return newIsInstanceOf(v, obj, vtargetDepth, false, false, true); } [Inline] [RequiredByBartok] internal static Object newIsInstanceOfSimple(VTable v, Object obj, int vtargetDepth) { return newIsInstanceOf(v, obj, vtargetDepth, true, false, true); } [Inline] [RequiredByBartok] internal static Object newIsInstanceOfNonNullComplex(VTable v, Object obj, int vtargetDepth) { return newIsInstanceOf(v, obj, vtargetDepth, false, false, false); } [Inline] [RequiredByBartok] internal static Object newIsInstanceOfComplex(VTable v, Object obj, int vtargetDepth) { return newIsInstanceOf(v, obj, vtargetDepth, true, false, false); } [Inline] [NoBarriers] [PreInitRefCounts] internal static void newCheckClassCast(VTable v, object obj, int vtargetDepth, bool mayBeNull, bool mustBeExact, bool alwaysSimple) { if(mayBeNull && obj == null) { return; } if(!newIsValidAssignment(v, obj.vtable, vtargetDepth, mustBeExact, alwaysSimple)) { #if SINGULARITY Tracing.Log(Tracing.Debug, "newCheckClassCast failed: v={0:x8} obj={1:x8} vtd={2} mbn={3} as={4}", Magic.addressOf(v), Magic.addressOf(obj), unchecked((uint)vtargetDepth), unchecked((uint)(mayBeNull ? 1 : 0)), unchecked((uint)(alwaysSimple ? 1 : 0))); #endif throwNewClassCastException(); } } [Inline] [NoBarriers] [PreInitRefCounts] [RequiredByBartok] internal static void newCheckClassCastNonNullExact(VTable v, object obj, int vtargetDepth) { newCheckClassCast(v, obj, vtargetDepth, false, true, true); } [Inline] [NoBarriers] [PreInitRefCounts] [RequiredByBartok] internal static void newCheckClassCastExact(VTable v, object obj, int vtargetDepth) { newCheckClassCast(v, obj, vtargetDepth, true, true, true); } [Inline] [NoBarriers] [PreInitRefCounts] [RequiredByBartok] internal static void newCheckClassCastNonNullSimple(VTable v, object obj, int vtargetDepth) { newCheckClassCast(v, obj, vtargetDepth, false, false, true); } [Inline] [NoBarriers] [PreInitRefCounts] [RequiredByBartok] internal static void newCheckClassCastSimple(VTable v, object obj, int vtargetDepth) { newCheckClassCast(v, obj, vtargetDepth, true, false, true); } [Inline] [NoBarriers] [PreInitRefCounts] [RequiredByBartok] internal static void newCheckClassCastNonNullComplex(VTable v, object obj, int vtargetDepth) { newCheckClassCast(v, obj, vtargetDepth, false, false, false); } [Inline] [NoBarriers] [PreInitRefCounts] [RequiredByBartok] internal static void newCheckClassCastComplex(VTable v, object obj, int vtargetDepth) { newCheckClassCast(v, obj, vtargetDepth, true, false, false); } #else [Inline] private unsafe static bool checkDisplay(VTable vtarget, VTable vobj, int vtargetDepth) { UIntPtr * p = (UIntPtr *) Magic.addressOf(vobj); Assert((int)vtarget.depth == vtargetDepth); Assert(vtargetDepth > 0); #if (REFERENCE_COUNTING_GC || DEFERRED_REFERENCE_COUNTING_GC) return p[vtargetDepth+1] == Magic.addressOf(vtarget); #else // REFERENCE_COUNTING_GC return p[vtargetDepth] == Magic.addressOf(vtarget); #endif } [Inline] [RequiredByBartok] internal static Object newIsInstanceOfNonNullSimple(VTable v, Object obj, int vtargetDepth) { return checkDisplay(v, obj.vtable, vtargetDepth) ? obj : null; } [Inline] internal static Object newIsInstanceOfSimple(VTable v, Object obj, int vtargetDepth) { if(obj == null) { return null; } return newIsInstanceOfNonNullSimple(v, obj, vtargetDepth); } [NoInline] internal static Object newIsInstanceOfHelp(VTable v, Object obj) { int vtargetDepth = v.depth; if(vtargetDepth < Constants.TypeTestDisplayPosCache) { return null; } VTable vobj = obj.vtable; if(isValidAssignment(v.vtableType, vobj.vtableType)) { v.posCache = vobj; return obj; } return null; } [Inline] [RequiredByBartok] internal static Object newIsInstanceOfNonNullComplex(VTable v, Object obj, int vtargetDepth) { return checkDisplay(v, obj.vtable, vtargetDepth) ? obj : newIsInstanceOfHelp(v, obj); } [Inline] internal static Object newIsInstanceOfComplex(VTable v, Object obj, int vtargetDepth) { if(obj == null) { return null; } return newIsInstanceOfNonNullComplex(v, obj, vtargetDepth); } [Inline] [RequiredByBartok] internal static void newCheckClassCastNonNullSimple(VTable v, object obj, int vtargetDepth) { if(!checkDisplay(v, obj.vtable, vtargetDepth)) { #if SINGULARITY Tracing.Log(Tracing.Debug, "newCheckClassCastNonNullSimple failed: v={0:x8} obj={1:x8} vtd={2}", Magic.addressOf(v), Magic.addressOf(obj), unchecked((uint)vtargetDepth)); #endif throwNewClassCastException(); } } [Inline] [NoBarriers] [PreInitRefCounts] [RequiredByBartok] internal static void newCheckClassCastSimple(VTable v, object obj, int vtargetDepth) { if(obj == null) { return; } newCheckClassCastNonNullSimple(v, obj, vtargetDepth); } [NoInline] [NoBarriers] [PreInitRefCounts] internal static void newCheckClassCastHelp(VTable v, VTable vobj) { int vtargetDepth = v.depth; if(vtargetDepth == Constants.TypeTestDisplayPosCache && isValidAssignment(v.vtableType, vobj.vtableType)) { v.posCache = vobj; return; } #if SINGULARITY Tracing.Log(Tracing.Debug, "newCheckClassCastHelp failed: v={0:x8} vobj={1:x8}", Magic.addressOf(v), Magic.addressOf(vobj)); #endif throwNewClassCastException(); } [Inline] [RequiredByBartok] internal static void newCheckClassCastNonNullComplex(VTable v, object obj, int vtargetDepth) { VTable vobj = obj.vtable; if(!checkDisplay(v, vobj, vtargetDepth)) { newCheckClassCastHelp(v, vobj); } } [Inline] [RequiredByBartok] internal static void newCheckClassCastComplex(VTable v, object obj, int vtargetDepth) { if(obj == null) { return; } newCheckClassCastNonNullComplex(v, obj, vtargetDepth); } /* [NoInline] internal static void newCheckArrayStoreHelp(VTable v, VTable vobj) { int vtargetDepth = v.depth; if(vtargetDepth == Constants.TypeTestDisplayPosCache && isValidAssignment(v.vtableType, vobj.vtableType)) { v.posCache = vobj; return; } throwNewArrayTypeMismatchException(); } [NoInline] internal static void newCheckArrayStore(object obj1,object obj2) { Assert(obj1.vtable.vtableType.IsArray); if(obj2 == null) { return; } // can't put an object into a primitive array, but we can probably // guarantee that this is never called on a primitive type array VTable obj1ElemTy = obj1.vtable.arrayElementClass; VTable obj2Ty = obj2.vtable; Deny(obj1ElemTy.vtableType.IsPrimitive); if(checkDisplay(obj1ElemTy, obj2Ty, obj1ElemTy.depth)) { return; } newCheckArrayStoreHelp(obj1ElemTy, obj2Ty); } */ #endif // OLD_FAST_TESTS [NoInline] internal static bool shouldDoCallTrace() { return VTable.runtimeInitialized && !VTable.callTraceInProgress; } [NoInline] internal static void callTrace(Object o) { if(o == null) { VTable.DebugPrint("null"); return; } VTable.callTraceInProgress = true; try { VTable.DebugPrint(o.ToString()); } catch(Exception) { VTable.DebugPrint("exn"); } VTable.callTraceInProgress = false; } // Bartok intrinsics [Intrinsic] internal static extern ulong mulUIntUIntToULong(uint x, uint y); internal static extern bool BuildC2Mods { [Intrinsic] get; } [System.Diagnostics.Conditional("DEBUG")] [NoInline] private static void initTypePrint(RuntimeType ty, String s) { if (VTable.enableDebugPrint) { #if ALLOW_BOOT_ARGLIST DebugPrint("initType({0}.{1}): {2}\n", __arglist(ty.Namespace, ty.Name, s)); #endif } } [NoBarriers] [PreInitRefCounts] internal static void InitializeLimitedType(Type t) { InitializeLimitedType(Magic.toRuntimeType(t)); } [NoBarriers] [PreInitRefCounts] private static void InitializeLimitedType(RuntimeType ty) { #if SINGULARITY_KERNEL Tracing.Log(Tracing.Debug, "InitializeLimitedType({0}.{1}", ty.Namespace, ty.Name); //DebugStub.WriteLine("InitializeLimitedType({0}.{1})", //__arglist(ty.Namespace, ty.Name)); #endif // Null check is needed because Bartok can remove static type // constructors and we don't have a good framework for removing the // static initialization of those types (particularly in // VTable.Initialize). if(ty.cctor != UIntPtr.Zero) { Magic.calli(ty.cctor); } ty.cctorState = TypeInitState.Completed; } // Rationale for cutting the fact that no stack probes are permitted in this // tree of calls: // // There is no legitimate rationale. A bug exists in the database for all // intractable violations, listing each one including this one. This violation // is somewhat intractable because initType requires a link check and yet it // is potentially called from stack linking code that cannot tolerate any // linking. // // We need to rethink the .NET model for implicit lazy class initialization, // since it is hard to reason about the failure points. // // Bug 436 [NoStackLinkCheckTransCut] [RequiredByBartok] [CalledRarely] internal static void initType(RuntimeType ty) { // Abort early if the type has already been initialized. if (ty.cctorState == TypeInitState.Completed) { return; } // Or if it doesn't have a class constructor. if (ty.cctor == UIntPtr.Zero) { ty.cctorState = TypeInitState.Completed; return; } #if SINGULARITY_KERNEL if (!multiThreaded) { //DebugStub.WriteLine("initType({0}.{1}) - singlethreaded", //__arglist(ty.Namespace, ty.Name)); switch(ty.cctorState) { case TypeInitState.Ready: initTypePrint(ty, "ready"); ty.cctorState = TypeInitState.Running; try { initTypePrint(ty, "running"); Magic.calli(ty.cctor); } catch (Exception e) { initTypePrint(ty, "failed"); // Wrap the type initializer exception // appropriately and save it for future calls. ty.cctorState = TypeInitState.Failed; Exception exn = new TypeInitializationException(ty.FullName, e); ty.cctorException = exn; throw exn; } initTypePrint(ty, "complete"); ty.cctorState = TypeInitState.Completed; return; case TypeInitState.Failed: initTypePrint(ty, "previous failure"); throw ty.cctorException; case TypeInitState.Completed: initTypePrint(ty, "already completed"); return; default: initTypePrint(ty, "selfloop or finished"); if (ty.cctorState == TypeInitState.Failed) { initTypePrint(ty, "clash finished - failed"); throw ty.cctorException; } return; } } //DebugStub.WriteLine("initType({0}.{1}) - multithreaded", //__arglist(ty.Namespace, ty.Name)); #endif // Get global lock - must have to read/write any thread/type // wait dependencies. Also used for some things that possibly // should be shifted to the type locks. // BUGBUG: The local lock used below is the RuntimeType object. We // can't use VTable since interfaces do not currently have VTable // objects. But RuntimeType is exposed to users and thus this is // incorrect. Monitor.Enter(VTable.cctorLock); switch(ty.cctorState) { case TypeInitState.Ready:{ initTypePrint(ty, "ready"); // Record that this thread is running this type initializer. Thread t = Thread.CurrentThread; ty.cctorThread = t; ty.cctorState = TypeInitState.Running; // Get local lock - must have to actually run an initializer // and thus also to check if one is done. We are done setting // global state and ready to run the local (type) initializer; // therefore we can dump the global lock now as well. lock (ty) { // .classVtable) { Monitor.Exit(VTable.cctorLock); // we don't need to start with "initialize base and // all interfaces" because the compiler explicitly // add calls to the base and interfaces ..ctor in // current type's ..cctor. // Run the type initializer, if it exists. if (ty.cctor != UIntPtr.Zero) { try { initTypePrint(ty, "running"); Magic.calli(ty.cctor); } catch(Exception e) { initTypePrint(ty, "failed"); // Wrap the type initializer exception // appropriately and save it for future calls. ty.cctorState = TypeInitState.Failed; ty.cctorThread = null; Exception exn = new TypeInitializationException (ty.FullName, e); ty.cctorException = exn; throw exn; } } initTypePrint(ty, "complete"); ty.cctorState = TypeInitState.Completed; ty.cctorThread = null; } return; } case TypeInitState.Running:{ initTypePrint(ty, "clash"); // Try to grab the local lock. This seems somewhat confusing // to me but I'll follow the CLR code for now (see // VM\class.cpp:EEClass::DoRunClassInit()). It is important // to do the TryEnter() as we need to detect circular // dependencies. Thread runningThread = ty.cctorThread; bool gotIt = Monitor.TryEnter(ty); // .classVtable); if (gotIt) { initTypePrint(ty, "selfloop or finished"); // If we succeed in getting the lock, then either the // class finished between the switch statement and here // (setting complete/fail does not require the global // lock...) or this thread is attempting circular // initialization. In the former we need to check for // success/failure; in the latter we just return to // prevent deadlock (state will still be Running and thus // not Failed). Monitor.Exit(ty); // .classVtable); Monitor.Exit(VTable.cctorLock); if (ty.cctorState == TypeInitState.Failed) { initTypePrint(ty, "clash finished - failed"); throw ty.cctorException; } return; } else { initTypePrint(ty, "blocked by another thread"); // Check and see if blocking this thread to wait for the // type to complete initialization will cause a deadlock. // To do this, see which thread is initializing the type // and then follow the blocking threads. This thread // can not be at any of the intermediate (next blocking // thread is non-null) points as by definition they are // blocked and can't have reached here. Thread currentThread = Thread.CurrentThread; Thread blockingThread = runningThread; VTable.Assert(blockingThread != null); while(blockingThread.blockingCctorThread != null) { blockingThread = blockingThread.blockingCctorThread; } if(currentThread == blockingThread) { // Circular dependency. Give up on "finished // initialization" guaranties and just return. initTypePrint(ty, "circular"); Monitor.Exit(VTable.cctorLock); return; } // Set us as blocked by this type, release the global // lock, and wait for the type to complete initialization. // REVIEW: I have concerns about the lack of lock around // setting the block to null. Come back to this! currentThread.blockingCctorThread = runningThread; Monitor.Exit(VTable.cctorLock); lock (ty) {} // .classVtable) {} currentThread.blockingCctorThread = null; // The type has finished initialization. As above, the // two possible cases are Completed and Failed. if(ty.cctorState == TypeInitState.Failed) { throw ty.cctorException; } return; } } case TypeInitState.Failed: initTypePrint(ty, "previous failure"); Monitor.Exit(VTable.cctorLock); throw ty.cctorException; case TypeInitState.Completed: initTypePrint(ty, "already completed"); Monitor.Exit(VTable.cctorLock); return; } } [NoStackLinkCheckTransCut] [NoInline] internal static void throwNewClassCastException() { throw new InvalidCastException(); } [NoInline] internal static void throwNewArgumentOutOfRangeException() { throw new ArgumentOutOfRangeException(); } // Rationale for cutting the fact that no stack probes are permitted in this // tree of calls: // // Without this, then trees of code that cannot probe (like the stack link and // unlink code itself) cannot use arrays. This is too high a burden. Instead, // we change it to the *unchecked* requirement that this code cannot attempt to // over-index an array. There is a bug that captures the fact that we cannot // check this behavior. // // Bug 436 [NoStackLinkCheckTransCut] [NoInline] [RequiredByBartok] internal static void throwNewIndexOutOfRangeException() { throw new IndexOutOfRangeException(); } [NoInline] [RequiredByBartok] internal static void throwNewStringIndexOutOfRangeException() { throw new ArgumentOutOfRangeException(); } [NoInline] internal static void throwNewArrayTypeMismatchException() { throw new ArrayTypeMismatchException(); } // Rationale for cutting the fact that no stack probes are permitted in this // tree of calls: // // Without this, then trees of code that cannot probe (like the stack link and // unlink code itself) cannot use arrays. This is too high a burden. Instead, // we change it to the *unchecked* requirement that this code cannot attempt to // over-index an array. There is a bug that captures the fact that we cannot // check this behavior. // // Bug 436 [NoStackLinkCheckTransCut] [NoInline] [AccessedByRuntime("referenced from halasm.asm")] internal static void throwNewOverflowException() { #if DEBUG && SINGULARITY DebugStub.WriteLine("Overflow exception"); DebugStub.Break(); #endif throw new OverflowException(); } // Rationale for cutting the fact that no stack probes are permitted in this // tree of calls: // // Without this, then trees of code that cannot probe (like the stack link and // unlink code itself) cannot use arrays. This is too high a burden. Instead, // we change it to the *unchecked* requirement that this code cannot attempt to // over-index an array. There is a bug that captures the fact that we cannot // check this behavior. // // Bug 436 [NoStackLinkCheckTransCut] [NoInline] [RequiredByBartok] internal static void throwNewDivideByZeroException() { throw new DivideByZeroException(); } [NoStackLinkCheckTransCut] [NoInline] [RequiredByBartok] internal static void throwNewArithmeticException() { throw new ArithmeticException(); } [System.Diagnostics.Conditional("DEBUG")] // Rationale for cutting the fact that no stack probes are permitted in this // tree of calls: // // This is DEBUG-only code. It is reachable from every interface call, which // means we would have to effectively prevent usage of interface calls when // stack probes are transitively forbidden. This is a needless burden for // DEBUG-only code. [NoStackLinkCheckTransCut] [StackLinkCheck] private static void profileInterfaceOffset(VTable v, RuntimeType ty, int index) { // REVIEW: Counts could be off in a multithreaded setting, but these // are just debug counts and if extra hashtables are allocated or // increments are missed it's not that big of a deal. // index ignored if(VTable.runtimeInitialized && VTable.enableDumpInterface) { if(VTable.interfaceOffsetTable == null) { VTable.interfaceOffsetTable = new System.Collections.Hashtable(); } System.Collections.Hashtable h = (System.Collections.Hashtable) VTable.interfaceOffsetTable[v]; if(h == null) { VTable.interfaceOffsetTable[v] = h = new System.Collections.Hashtable(); } MutableInt32 m = (MutableInt32) h[ty]; if(m == null) { h[ty] = new MutableInt32(1); } else { m.Value++; } } } [DisableBoundsChecks] [RequiredByBartok] internal static IntPtr interfaceOffset(VTable v,RuntimeType ty) { InterfaceInfo[] interfaces = v.interfaces; VTable.Assert(interfaces != null); int numInterfaces = interfaces.Length; int i; for(i=0; i> 32); uint loY = (uint) y; uint hiY = (uint) (y >> 32); if (hiX > 0 && hiY > 0) { VTable.throwNewOverflowException(); } ulong result = mulUIntUIntToULong(loY, loX); checked { result = result + (((ulong) (loX * hiY)) << 32); result = result + (((ulong) (loY * hiX)) << 32); } return result; } [RequiredByBartok] // unsigned 64-bit multiplication with no overflow checking static public ulong mulUnsigned64Bit(ulong x,ulong y) { uint loX = (uint) x; uint hiX = (uint) (x >> 32); uint loY = (uint) y; uint hiY = (uint) (y >> 32); ulong result = mulUIntUIntToULong(loY, loX); result = result + (((ulong) (loX * hiY)) << 32); result = result + (((ulong) (loY * hiX)) << 32); return result; } [RequiredByBartok] static public ulong doubleToULong(double val) { double two63 = 2147483648.0 * 4294967296.0; ulong ret; if (val < two63) { ret = (ulong)doubleToLong(val); } else { // subtract 0x8000000000000000, do the convert then add // it back again ret = (ulong)doubleToLong(val - two63) + (0x8000000000000000L); } return ret; } [RequiredByBartok] static public ulong checkedDoubleToULong(double val) { double two64 = 4294967296.0 * 4294967296.0; // Note that this expression also works properly for val = NaN case if (val > -1.0 && val < two64) { const double two63 = 2147483648.0 * 4294967296.0; ulong ret; if (val < two63) { ret = (ulong)doubleToLong(val); } else { // subtract 0x8000000000000000, do the convert then add // it back again ret = (ulong)doubleToLong(val - two63) + (0x8000000000000000L); } return ret; } // throw VTable.throwNewOverflowException(); return 0; } public const bool enableLibraryOptions = true; internal static bool enableDebugPrint = false; /*true to log initType calls*/ internal static bool enableUserTiming = false; internal static bool enableGCVerify = false; internal static bool enableGCProfiling = false; internal static bool enableGCTiming = false; internal static bool enableFinalGCTiming = false; internal static bool enableGCAccounting = false; internal static bool enableGCAccurateHeapSize = false; internal static bool enableDumpMemStats = false; internal static bool enableDumpMultiUseWords = false; internal static bool enableDumpInterface = false; internal static bool enableDumpTryAllStats = false; internal static int beforeUser; internal static int afterUser; [NoHeapAllocation] public static bool EnableLibraryNotImplemented() { return(false); } [NoHeapAllocation] public static bool EnableLibraryAsserts() { return(true); } [NoHeapAllocation] public static void NotImplemented() { failAssert("Not implemented."); } [NoHeapAllocation] public static void NotImplemented(String msg) { failAssert(/*"Not implemented: "+*/msg); } [System.Diagnostics.Conditional("DEBUG")] [NoInline] [NoHeapAllocation] public static void NotReached() { failAssert("Unreachable code reached."); } [System.Diagnostics.Conditional("DEBUG")] [NoInline] [NoHeapAllocation] public static void NotReached(String msg) { failAssert(/*"Unreachable code reached: "+*/msg); } [System.Diagnostics.Conditional("DEBUG")] [NoInline] [ManualRefCounts] [NoHeapAllocation] [NoStackLinkCheckTrans] public static void Assert(bool expr) { if (VTable.enableLibraryOptions && EnableLibraryAsserts() && !expr) { failAssert(null); } } [System.Diagnostics.Conditional("DEBUG")] [NoInline] [ManualRefCounts] [NoHeapAllocation] [NoStackLinkCheckTrans] public static void Deny(bool expr) { if (VTable.enableLibraryOptions && EnableLibraryAsserts() && expr) { failAssert(null); } } [System.Diagnostics.Conditional("DEBUG")] [NoInline] [NoHeapAllocation] public static void AssertForRedundant(bool expr) { if (!expr) { failAssert(null); } } [System.Diagnostics.Conditional("DEBUG")] [NoInline] [NoHeapAllocation] public static void DenyForRedundant(bool expr) { AssertForRedundant(!expr); } [System.Diagnostics.Conditional("DEBUG")] [NoInline] [ManualRefCounts] [NoHeapAllocation] public static void Assert(bool expr, String s) { if (VTable.enableLibraryOptions && EnableLibraryAsserts() && !expr) { failAssert(s); } } [System.Diagnostics.Conditional("DEBUG")] [NoInline] [NoHeapAllocation] public static void Deny(bool expr, String s) { if (VTable.enableLibraryOptions && EnableLibraryAsserts() && expr) { failAssert(s); } } // Rationale for cutting the fact that no stack probes are permitted in this // tree of calls: // // * Asserts are debug only. // * When an assert fails, a future stack overflow is not our biggest problem. // * We want the ability to freely assert in trees that forbid stack probes. [NoStackLinkCheckTransCut] [ManualRefCounts] [NoHeapAllocation] private static void failAssert(String s) { #if SINGULARITY if (s != null) { Tracing.Log(Tracing.Notice, "Assertion failed: {0}\n", s); } else { Tracing.Log(Tracing.Notice, "Assertion failed\n"); } #endif // DebugPrintSpinLock(); // DebugPrint(Thread.currentThread().toString()); if (s != null) { DebugPrint("Assertion failed: {0}\n", __arglist(s)); } else { DebugPrint("Assertion failed.\n"); } // Thread.DebugDumpThreadAnchorTable(0); DebugBreak(); // DebugPrintSpinUnlock(); } #if SINGULARITY_KERNEL || SINGULARITY_PROCESS // Note confusion: This DebugPrint is an unconditional output to stderr, // not something that only prints in BRT debug modes. [RequiredByBartok] [NoHeapAllocation] static public void DebugPrint(String v) { DebugStub.Print(v); } [NoHeapAllocation] static public void DebugPrint(String v, __arglist) { DebugStub.Print(v, new ArgIterator(__arglist)); } [NoHeapAllocation] static public void DebugPrint(String v, ArgIterator args) { DebugStub.Print(v, args); } [RequiredByBartok] static public void DebugPrint(int v) { DebugStub.Print("{0}", __arglist(v)); } [RequiredByBartok] static public void DebugPrint(long v) { DebugStub.Print("{0}", __arglist(v)); } [NoHeapAllocation] static public void DebugBreak() { DebugStub.Break(); } #else // not SINGULARITY // Note confusion: This DebugPrint is an unconditional output to stderr, // not something that only prints in BRT debug modes. [RequiredByBartok] [MethodImpl(MethodImplOptions.InternalCall)] [StackBound(312)] [NoHeapAllocation] [GCAnnotation(GCOption.NOGC)] static public extern void DebugPrint(String v); [MethodImpl(MethodImplOptions.InternalCall)] //[StackBound(610)] [NoHeapAllocation] [GCAnnotation(GCOption.NOGC)] static private unsafe extern void DebugPrintHelper(char *p_str, int length); const int DEBUG_MESSAGE_BUFFER_SIZE = 4095; [NoHeapAllocation] static public unsafe void DebugPrint(String v, ArgIterator args) { char *memBuffer = stackalloc char[DEBUG_MESSAGE_BUFFER_SIZE+1]; // note that we save 1 for the null character int stringLength = String.LimitedFormatTo(v, args, memBuffer, DEBUG_MESSAGE_BUFFER_SIZE); VTable.Assert(stringLength <= DEBUG_MESSAGE_BUFFER_SIZE, "String.LimitedFormatTo returned an impossibly large stringLength"); // null terminate the string DebugPrintHelper(memBuffer, stringLength); } [NoHeapAllocation] static public void DebugPrint(String v, __arglist) { DebugPrint(v, new ArgIterator(__arglist)); } [MethodImpl(MethodImplOptions.InternalCall)] [StackBound(610)] [NoHeapAllocation] [GCAnnotation(GCOption.NOGC)] static public extern void DebugPrint(byte v); [MethodImpl(MethodImplOptions.InternalCall)] [StackBound(610)] [RequiredByBartok] [NoHeapAllocation] [GCAnnotation(GCOption.NOGC)] static public extern void DebugPrint(int v); [MethodImpl(MethodImplOptions.InternalCall)] [StackBound(618)] [RequiredByBartok] [NoHeapAllocation] [GCAnnotation(GCOption.NOGC)] static public extern void DebugPrint(long v); [MethodImpl(MethodImplOptions.InternalCall)] [StackBound(618)] [NoHeapAllocation] [GCAnnotation(GCOption.NOGC)] static public extern void DebugPrint(ulong v); [MethodImpl(MethodImplOptions.InternalCall)] [StackBound(618)] [NoHeapAllocation] [GCAnnotation(GCOption.NOGC)] static public extern void DebugPrint(int v, int width); [MethodImpl(MethodImplOptions.InternalCall)] [StackBound(626)] [NoHeapAllocation] [GCAnnotation(GCOption.NOGC)] static public extern void DebugPrint(long v, int width); [MethodImpl(MethodImplOptions.InternalCall)] [NoHeapAllocation] [GCAnnotation(GCOption.NOGC)] static public extern void DebugPrint(ulong v, int width); [NoHeapAllocation] static public void DebugPrint(bool b) { if (b) { DebugPrint("true"); } else { DebugPrint("false"); } } [MethodImpl(MethodImplOptions.InternalCall)] [StackBound(638)] [NoHeapAllocation] [GCAnnotation(GCOption.NOGC)] static public extern void DebugDump(object o); [MethodImpl(MethodImplOptions.InternalCall)] [StackBound(638)] [NoHeapAllocation] [GCAnnotation(GCOption.NOGC)] static public extern void DebugBreak(); #endif [AccessedByRuntime("referenced from halexn.cpp")] [NoHeapAllocation] static public bool IsExceptionHandler(Type t,Exception e) { Type s = e.vtable.vtableType; // check whether s is a subtype of t do { if (s==t) { return true; } s = s.BaseType; } while (s != null); return false; } #if SINGULARITY static public String[] ParseArgs(String[] args) #else [AccessedByRuntime("referenced from brtmain.cpp")] static public String[] ParseArgs() #endif { #if !SINGULARITY String[] args = System.Environment.GetCommandLineArgs(); #endif int i=0; #if !SINGULARITY_KERNEL ++i; // dump program name argument #endif for( ; i 0) && (((tryAllSize - 1) & tryAllSize) == 0), "--brt-tryallchecksize must be a power of two"); VTable.tryAllCheckMask = tryAllSize - 1; } catch(FormatException) { DebugPrint ("--brt-tryallchecksize requires argument\r\n"); DebugBreak(); } continue; } if(arg == "--brt-dump-tryallstats") { VTable.enableDumpTryAllStats = true; continue; } if(arg == "--brt-countgc-trigger") { #if !SINGULARITY || ADAPTIVE_COPYING_COLLECTOR || SEMISPACE_COLLECTOR switch(GC.gcType) { case GCType.AdaptiveCopyingCollector: case GCType.SemispaceCollector: { ++i; if(i==args.Length) { DebugPrint("--brt-countgc-trigger requires argument\r\n"); DebugBreak(); } try { short countGCTrigger = Int16.Parse(args[i]); if(countGCTrigger == -1) { countGCTrigger = Int16.MaxValue; } GCs.GenerationalGCData.gcFrequencyTable [(int)GCs.GenerationalGCData.MIN_GENERATION] = countGCTrigger; } catch(FormatException) { DebugPrint("--brt-countgc-trigger requires argument\r\n"); DebugBreak(); } break; } } #endif continue; // argument loop } if(arg == "--brt-promotegc-trigger") { #if !SINGULARITY || ADAPTIVE_COPYING_COLLECTOR || SEMISPACE_COLLECTOR switch(GC.gcType) { case GCType.AdaptiveCopyingCollector: case GCType.SemispaceCollector: { ++i; if(i==args.Length) { DebugPrint("--brt-promotegc-trigger requires argument\r\n"); DebugBreak(); } try { long promotedGCTrigger = Int64.Parse(args[i]); if(promotedGCTrigger == -1L) { promotedGCTrigger = Int64.MaxValue; } GCs.GenerationalGCData.gcPromotedLimitTable [(int)GCs.GenerationalGCData.MIN_GENERATION] = (UIntPtr) promotedGCTrigger; } catch(FormatException) { DebugPrint ("--brt-promotegc-trigger requires argument\r\n"); DebugBreak(); } break; } } #endif continue; // argument loop } if(arg == "--brt-nurserysize") { #if !SINGULARITY || ADAPTIVE_COPYING_COLLECTOR || SEMISPACE_COLLECTOR switch(GC.gcType) { case GCType.AdaptiveCopyingCollector: case GCType.SemispaceCollector: { ++i; if(i==args.Length) { DebugPrint ("--brt-nurserysize requires argument\r\n"); DebugBreak(); } try { GCs.GenerationalGCData.nurserySize = (UIntPtr) UInt64.Parse(args[i]); } catch(FormatException) { DebugPrint("--brt-nurserysize-trigger requires argument\r\n"); DebugBreak(); } break; } } #endif continue; // argument loop } if(arg == "--brt-maxheapsizekb") { Assert(GC.HeapSizeConfigurable, "not compiled with /heapsizeconfigurable"); ++i; if(i==args.Length) { DebugPrint ("--brt-maxheapsizekb requires argument\r\n"); DebugBreak(); } try { int maxheapsizekb = Int32.Parse(args[i]); GC.MaxHeapPages = maxheapsizekb >> (GCs.PageTable.PageBits-10); } catch(FormatException) { DebugPrint("--brt-maxheapsizekb requires argument\r\n"); DebugBreak(); } continue; // argument loop } /* if(arg == "--brt-ssbsize") { ++i; if(i==args.Length) { DebugPrint ("--brt-ssbsize requires argument\r\n"); DebugBreak(); } try { SequentialStoreBuffer.??? = Int32.Parse(args[i]); } catch(FormatException) { DebugPrint ("--brt-ssbsize requires argument\r\n"); DebugBreak(); } continue; } */ if(arg == "--brt-gctrace-filter") { ++i; if(i==args.Length) { DebugPrint ("--brt-gctrace-filter requires argument\r\n"); DebugBreak(); } try { System.GCs.Trace.filter = (UIntPtr) UInt64.Parse(args[i], System.Globalization.NumberStyles.AllowHexSpecifier); } catch(FormatException) { DebugPrint ("--brt-gctrace-filter requires a hex argument\r\n"); DebugBreak(); } continue; } if(arg == "--brt-gctrace-filter-range") { i += 2; if(i >= args.Length) { DebugPrint ("--brt-gctrace-filter-range requires two hex arguments\r\n"); DebugBreak(); } try { System.GCs.Trace.filterLow = (UIntPtr) UInt64.Parse(args[i-1], System.Globalization.NumberStyles.AllowHexSpecifier); System.GCs.Trace.filterHigh = (UIntPtr) UInt64.Parse(args[i], System.Globalization.NumberStyles.AllowHexSpecifier); } catch(FormatException) { DebugPrint ("--brt-gctrace-filter-range requires two hex arguments\r\n"); DebugBreak(); } continue; } if(arg == "--brt-gctrace-filter-area") { ++i; if(i >= args.Length) { DebugPrint ("--brt-gctrace-filter-area requires hex argument\r\n"); DebugBreak(); } try { System.GCs.Trace.filterArea = (System.GCs.Trace.Area) Int32.Parse(args[i], System.Globalization.NumberStyles.AllowHexSpecifier); } catch(FormatException) { DebugPrint ("--brt-gctrace-filter-area requires hex argument\r\n"); DebugBreak(); } continue; } DebugPrint("Unrecognized BRT option: {0}\n", __arglist(arg)); DebugBreak(); } if(VTable.enableGCAccounting) { GCs.MemoryAccounting.Initialize(GC.gcType); } int mainArgsLength = args.Length - i; VTable.Assert(mainArgsLength >= 0, "negative number of args to Main"); String[] mainArgs = new String[mainArgsLength]; for(int j=0; i 4) { for(int i = 0; i < size; i++) { if(handle_counts[i] == 0) { continue; } DebugPrint("{0:d3}: {1}\n", __arglist(i, handle_counts[i])); } } GC.DestructHeap(); } [AccessedByRuntime("referenced from halexn.cpp")] static public void TerminateByException(Exception e) { RuntimeType rt = (RuntimeType)e.GetType(); DebugPrint("\n"); DebugPrint("Unhandled Exception ({0}.{1}):\n", __arglist(rt.Namespace, rt.Name)); DebugPrint(" {0}\n", __arglist(e.Message)); try { DebugPrint(" {0}\n", __arglist(e.ToString())); } catch(Exception) { try { DebugPrint("\n Exception.ToString() failed.\n"); } catch (Exception) { } } finally { Shutdown(-2); } DebugBreak(); } static public void DumpException(Exception e) { RuntimeType rt = (RuntimeType)e.GetType(); DebugPrint("Handled Exception ({0}.{1}):\n", __arglist(rt.Namespace, rt.Name)); DebugPrint(" {0}\n", __arglist(e.Message)); try { DebugPrint(" {0}\n", __arglist(e.ToString())); } catch(Exception) { try { DebugPrint(" Exception.ToString() failed.\n"); } catch (Exception) { } } finally { } } [RequiredByBartok] public static void init(object obj) { } private static bool fDebug { get { return false; } } } }