Skip to content

Commit c9a6475

Browse files
committed
Fix GPU memory display: already given in Mb for Nvidia GPUs
1 parent 5d63351 commit c9a6475

File tree

1 file changed

+13
-11
lines changed

1 file changed

+13
-11
lines changed

library/stats.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -179,11 +179,12 @@ def temperature():
179179
# TODO: Built in function for *nix in psutil, for Windows can use WMI or a third party library
180180

181181

182-
def display_gpu_stats(load, memory_percentage, memory_used, temperature):
182+
def display_gpu_stats(load, memory_percentage, memory_used_mb, temperature):
183183
if THEME_DATA['STATS']['GPU']['PERCENTAGE']['GRAPH'].get("SHOW", False):
184184
if math.isnan(load):
185185
logger.warning("Your GPU load is not supported yet")
186186
THEME_DATA['STATS']['GPU']['PERCENTAGE']['GRAPH']['SHOW'] = False
187+
THEME_DATA['STATS']['GPU']['PERCENTAGE']['TEXT']['SHOW'] = False
187188
else:
188189
# logger.debug(f"GPU Load: {load}")
189190
display.lcd.DisplayProgressBar(
@@ -206,6 +207,7 @@ def display_gpu_stats(load, memory_percentage, memory_used, temperature):
206207
if THEME_DATA['STATS']['GPU']['PERCENTAGE']['TEXT'].get("SHOW", False):
207208
if math.isnan(load):
208209
logger.warning("Your GPU load is not supported yet")
210+
THEME_DATA['STATS']['GPU']['PERCENTAGE']['GRAPH']['SHOW'] = False
209211
THEME_DATA['STATS']['GPU']['PERCENTAGE']['TEXT']['SHOW'] = False
210212
else:
211213
display.lcd.DisplayText(
@@ -245,12 +247,12 @@ def display_gpu_stats(load, memory_percentage, memory_used, temperature):
245247
)
246248

247249
if THEME_DATA['STATS']['GPU']['MEMORY']['TEXT'].get("SHOW", False):
248-
if math.isnan(memory_percentage):
250+
if math.isnan(memory_used_mb):
249251
logger.warning("Your GPU memory absolute usage (M) is not supported yet")
250252
THEME_DATA['STATS']['GPU']['MEMORY']['TEXT']['SHOW'] = False
251253
else:
252254
display.lcd.DisplayText(
253-
text=f"{int(memory_used / 1000000):>5} M",
255+
text=f"{int(memory_used_mb):>5} M",
254256
x=THEME_DATA['STATS']['GPU']['MEMORY']['TEXT'].get("X", 0),
255257
y=THEME_DATA['STATS']['GPU']['MEMORY']['TEXT'].get("Y", 0),
256258
font=THEME_DATA['STATS']['GPU']['MEMORY']['TEXT'].get("FONT", "roboto-mono/RobotoMono-Regular.ttf"),
@@ -285,20 +287,20 @@ def stats():
285287
nvidia_gpus = GPUtil.getGPUs()
286288

287289
memory_used_all = [item.memoryUsed for item in nvidia_gpus]
288-
memory_used = sum(memory_used_all) / len(memory_used_all)
290+
memory_used_mb = sum(memory_used_all) / len(memory_used_all)
289291

290292
memory_total_all = [item.memoryTotal for item in nvidia_gpus]
291-
memory_total = sum(memory_total_all) / len(memory_total_all)
293+
memory_total_mb = sum(memory_total_all) / len(memory_total_all)
292294

293-
memory_percentage = (memory_used / memory_total) * 100
295+
memory_percentage = (memory_used_mb / memory_total_mb) * 100
294296

295297
load_all = [item.load for item in nvidia_gpus]
296298
load = (sum(load_all) / len(load_all)) * 100
297299

298300
temperature_all = [item.temperature for item in nvidia_gpus]
299301
temperature = sum(temperature_all) / len(temperature_all)
300302

301-
display_gpu_stats(load, memory_percentage, memory_used, temperature)
303+
display_gpu_stats(load, memory_percentage, memory_used_mb, temperature)
302304

303305
@staticmethod
304306
def is_available():
@@ -317,20 +319,20 @@ def stats():
317319
i = i + 1
318320

319321
memory_used_all = [item.query_vram_usage() for item in amd_gpus]
320-
memory_used = sum(memory_used_all) / len(memory_used_all)
322+
memory_used_bytes = sum(memory_used_all) / len(memory_used_all)
321323

322324
memory_total_all = [item.memory_info["vram_size"] for item in amd_gpus]
323-
memory_total = sum(memory_total_all) / len(memory_total_all)
325+
memory_total_bytes = sum(memory_total_all) / len(memory_total_all)
324326

325-
memory_percentage = (memory_used / memory_total) * 100
327+
memory_percentage = (memory_used_bytes / memory_total_bytes) * 100
326328

327329
load_all = [item.query_load() for item in amd_gpus]
328330
load = (sum(load_all) / len(load_all)) * 100
329331

330332
temperature_all = [item.query_temperature() for item in amd_gpus]
331333
temperature = sum(temperature_all) / len(temperature_all)
332334

333-
display_gpu_stats(load, memory_percentage, memory_used, temperature)
335+
display_gpu_stats(load, memory_percentage, memory_used_bytes / 1000000, temperature)
334336
elif pyadl:
335337
amd_gpus = pyadl.ADLManager.getInstance().getDevices()
336338

0 commit comments

Comments
 (0)