From 730f83e4354154f5405260fb29c6fa784e2b7b30 Mon Sep 17 00:00:00 2001 From: xezon <4720891+xezon@users.noreply.github.com> Date: Thu, 12 Jun 2025 18:31:31 +0200 Subject: [PATCH 1/3] [GEN] Fix crash when launching the game with an unsupported Display Resolution --- .../W3DDevice/GameClient/W3DDisplay.cpp | 77 ++++++++++++------- 1 file changed, 48 insertions(+), 29 deletions(-) diff --git a/Generals/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp b/Generals/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp index 2c2d491ca0..a351e37ee7 100644 --- a/Generals/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp +++ b/Generals/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp @@ -115,7 +115,6 @@ static void drawFramerateBar(void); #endif // DEFINE AND ENUMS /////////////////////////////////////////////////////////// -#define W3D_DISPLAY_DEFAULT_BIT_DEPTH 32 #define no_SAMPLE_DYNAMIC_LIGHT 1 #ifdef SAMPLE_DYNAMIC_LIGHT @@ -668,38 +667,58 @@ void W3DDisplay::init( void ) m_2DRender = NEW Render2DClass; DEBUG_ASSERTCRASH( m_2DRender, ("Cannot create Render2DClass") ); - // set our default width and height and bit depth - /// @todo we should set this according to options read from a file - setWidth( TheGlobalData->m_xResolution ); - setHeight( TheGlobalData->m_yResolution ); - setBitDepth( W3D_DISPLAY_DEFAULT_BIT_DEPTH ); - - if( WW3D::Set_Render_Device( 0, - getWidth(), - getHeight(), - getBitDepth(), - getWindowed(), - true ) != WW3D_ERROR_OK ) + WW3DErrorType renderDeviceError; + Int attempt = 0; + do { - // Getting the device at the default bit depth (32) didn't work, so try - // getting a 16 bit display. (Voodoo 1-3 only supported 16 bit.) jba. - setBitDepth( 16 ); - if( WW3D::Set_Render_Device( 0, - getWidth(), - getHeight(), - getBitDepth(), - getWindowed(), - true ) != WW3D_ERROR_OK ) + switch (attempt) { - - WW3D::Shutdown(); - WWMath::Shutdown(); - throw ERROR_INVALID_D3D; //failed to initialize. User probably doesn't have DX 8.1 - DEBUG_ASSERTCRASH( 0, ("Unable to set render device\n") ); - return; + case 0: + // set our default width and height and bit depth + setWidth( TheGlobalData->m_xResolution ); + setHeight( TheGlobalData->m_yResolution ); + setBitDepth( 32 ); + break; + case 1: + // Getting the device at the default bit depth (32) didn't work, so try + // getting a 16 bit display. (Voodoo 1-3 only supported 16 bit.) jba. + setBitDepth( 16 ); + break; + case 2: + // TheSuperHackers @bugfix xezon 11/06/2025 Now tries a safe default resolution + // if the previous one did not succeed. This is unlikely to happen but is possible + // if the user writes an unsupported resolution into to the Option Preferences. + TheWritableGlobalData->m_xResolution = 800; + TheWritableGlobalData->m_yResolution = 600; + setWidth( TheGlobalData->m_xResolution ); + setHeight( TheGlobalData->m_yResolution ); + setBitDepth( 32 ); + break; + case 3: + setBitDepth( 16 ); + break; } - } // end if + renderDeviceError = WW3D::Set_Render_Device( + 0, + getWidth(), + getHeight(), + getBitDepth(), + getWindowed(), + true ); + + ++attempt; + } + while (attempt < 4 && renderDeviceError != WW3D_ERROR_OK); + + if (renderDeviceError != WW3D_ERROR_OK) + { + WW3D::Shutdown(); + WWMath::Shutdown(); + throw ERROR_INVALID_D3D; //failed to initialize. User probably doesn't have DX 8.1 + DEBUG_ASSERTCRASH( 0, ("Unable to set render device\n") ); + return; + } //Check if level was never set and default to setting most suitable for system. if (TheGameLODManager->getStaticLODLevel() == STATIC_GAME_LOD_UNKNOWN) From e6298c267b2a0766af1ed9a455fa173f35b77a97 Mon Sep 17 00:00:00 2001 From: xezon <4720891+xezon@users.noreply.github.com> Date: Thu, 12 Jun 2025 18:33:14 +0200 Subject: [PATCH 2/3] Replicate in GeneralsMD --- .../W3DDevice/GameClient/W3DDisplay.cpp | 77 ++++++++++++------- 1 file changed, 48 insertions(+), 29 deletions(-) diff --git a/GeneralsMD/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp b/GeneralsMD/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp index bf282b3196..3745049fb5 100644 --- a/GeneralsMD/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp +++ b/GeneralsMD/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp @@ -116,7 +116,6 @@ static void drawFramerateBar(void); #endif // DEFINE AND ENUMS /////////////////////////////////////////////////////////// -#define W3D_DISPLAY_DEFAULT_BIT_DEPTH 32 #define no_SAMPLE_DYNAMIC_LIGHT 1 #ifdef SAMPLE_DYNAMIC_LIGHT @@ -735,38 +734,58 @@ void W3DDisplay::init( void ) m_2DRender = NEW Render2DClass; DEBUG_ASSERTCRASH( m_2DRender, ("Cannot create Render2DClass") ); - // set our default width and height and bit depth - /// @todo we should set this according to options read from a file - setWidth( TheGlobalData->m_xResolution ); - setHeight( TheGlobalData->m_yResolution ); - setBitDepth( W3D_DISPLAY_DEFAULT_BIT_DEPTH ); - - if( WW3D::Set_Render_Device( 0, - getWidth(), - getHeight(), - getBitDepth(), - getWindowed(), - true ) != WW3D_ERROR_OK ) + WW3DErrorType renderDeviceError; + Int attempt = 0; + do { - // Getting the device at the default bit depth (32) didn't work, so try - // getting a 16 bit display. (Voodoo 1-3 only supported 16 bit.) jba. - setBitDepth( 16 ); - if( WW3D::Set_Render_Device( 0, - getWidth(), - getHeight(), - getBitDepth(), - getWindowed(), - true ) != WW3D_ERROR_OK ) + switch (attempt) { - - WW3D::Shutdown(); - WWMath::Shutdown(); - throw ERROR_INVALID_D3D; //failed to initialize. User probably doesn't have DX 8.1 - DEBUG_ASSERTCRASH( 0, ("Unable to set render device\n") ); - return; + case 0: + // set our default width and height and bit depth + setWidth( TheGlobalData->m_xResolution ); + setHeight( TheGlobalData->m_yResolution ); + setBitDepth( 32 ); + break; + case 1: + // Getting the device at the default bit depth (32) didn't work, so try + // getting a 16 bit display. (Voodoo 1-3 only supported 16 bit.) jba. + setBitDepth( 16 ); + break; + case 2: + // TheSuperHackers @bugfix xezon 11/06/2025 Now tries a safe default resolution + // if the previous one did not succeed. This is unlikely to happen but is possible + // if the user writes an unsupported resolution into to the Option Preferences. + TheWritableGlobalData->m_xResolution = 800; + TheWritableGlobalData->m_yResolution = 600; + setWidth( TheGlobalData->m_xResolution ); + setHeight( TheGlobalData->m_yResolution ); + setBitDepth( 32 ); + break; + case 3: + setBitDepth( 16 ); + break; } - } // end if + renderDeviceError = WW3D::Set_Render_Device( + 0, + getWidth(), + getHeight(), + getBitDepth(), + getWindowed(), + true ); + + ++attempt; + } + while (attempt < 4 && renderDeviceError != WW3D_ERROR_OK); + + if (renderDeviceError != WW3D_ERROR_OK) + { + WW3D::Shutdown(); + WWMath::Shutdown(); + throw ERROR_INVALID_D3D; //failed to initialize. User probably doesn't have DX 8.1 + DEBUG_ASSERTCRASH( 0, ("Unable to set render device\n") ); + return; + } //Check if level was never set and default to setting most suitable for system. if (TheGameLODManager->getStaticLODLevel() == STATIC_GAME_LOD_UNKNOWN) From 6dc225888ed341b451c20238fb1a21c8c0760e4b Mon Sep 17 00:00:00 2001 From: xezon <4720891+xezon@users.noreply.github.com> Date: Thu, 12 Jun 2025 19:25:00 +0200 Subject: [PATCH 3/3] Improve logic --- .../W3DDevice/GameClient/W3DDisplay.cpp | 28 +++++++++++++++---- .../W3DDevice/GameClient/W3DDisplay.cpp | 28 +++++++++++++++---- 2 files changed, 44 insertions(+), 12 deletions(-) diff --git a/Generals/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp b/Generals/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp index a351e37ee7..04c3abaf5c 100644 --- a/Generals/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp +++ b/Generals/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp @@ -685,15 +685,31 @@ void W3DDisplay::init( void ) setBitDepth( 16 ); break; case 2: + { // TheSuperHackers @bugfix xezon 11/06/2025 Now tries a safe default resolution - // if the previous one did not succeed. This is unlikely to happen but is possible - // if the user writes an unsupported resolution into to the Option Preferences. - TheWritableGlobalData->m_xResolution = 800; - TheWritableGlobalData->m_yResolution = 600; - setWidth( TheGlobalData->m_xResolution ); - setHeight( TheGlobalData->m_yResolution ); + // if the custom resolution did not succeed. This is unlikely to happen but is possible + // if the user writes an unsupported resolution into to the Option Preferences or if the + // graphics adapter does not support 800 x 600 to begin with. + const Int minW = 800; + const Int minH = 600; + Int xres = minW; + Int yres = minH; + Int bitDepth = 32; + Int displayModeCount = getDisplayModeCount(); + Int displayModeIndex = 0; + for (; displayModeIndex < displayModeCount; ++displayModeIndex) + { + getDisplayModeDescription(0, &xres, &yres, &bitDepth); + if (xres * yres >= minW * minH) + break; // Is good enough. Use it. + } + TheWritableGlobalData->m_xResolution = xres; + TheWritableGlobalData->m_yResolution = yres; + setWidth( xres ); + setHeight( yres ); setBitDepth( 32 ); break; + } case 3: setBitDepth( 16 ); break; diff --git a/GeneralsMD/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp b/GeneralsMD/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp index 3745049fb5..f52da866f5 100644 --- a/GeneralsMD/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp +++ b/GeneralsMD/Code/GameEngineDevice/Source/W3DDevice/GameClient/W3DDisplay.cpp @@ -752,15 +752,31 @@ void W3DDisplay::init( void ) setBitDepth( 16 ); break; case 2: + { // TheSuperHackers @bugfix xezon 11/06/2025 Now tries a safe default resolution - // if the previous one did not succeed. This is unlikely to happen but is possible - // if the user writes an unsupported resolution into to the Option Preferences. - TheWritableGlobalData->m_xResolution = 800; - TheWritableGlobalData->m_yResolution = 600; - setWidth( TheGlobalData->m_xResolution ); - setHeight( TheGlobalData->m_yResolution ); + // if the custom resolution did not succeed. This is unlikely to happen but is possible + // if the user writes an unsupported resolution into to the Option Preferences or if the + // graphics adapter does not support 800 x 600 to begin with. + const Int minW = 800; + const Int minH = 600; + Int xres = minW; + Int yres = minH; + Int bitDepth = 32; + Int displayModeCount = getDisplayModeCount(); + Int displayModeIndex = 0; + for (; displayModeIndex < displayModeCount; ++displayModeIndex) + { + getDisplayModeDescription(0, &xres, &yres, &bitDepth); + if (xres * yres >= minW * minH) + break; // Is good enough. Use it. + } + TheWritableGlobalData->m_xResolution = xres; + TheWritableGlobalData->m_yResolution = yres; + setWidth( xres ); + setHeight( yres ); setBitDepth( 32 ); break; + } case 3: setBitDepth( 16 ); break;