Skip to content

Commit 08a021b

Browse files
levendleefacebook-github-bot
authored andcommitted
Issue warnings once. (#4164)
Summary: Pull Request resolved: #4164 X-link: facebookresearch/FBGEMM#1244 Issue warnings once to avoid repeated warnings in log. Reviewed By: jianyuh Differential Revision: D75161332 fbshipit-source-id: 41fc61804a41bb92c308c037bbb84a7b55cee7a8
1 parent d68987c commit 08a021b

File tree

1 file changed

+8
-8
lines changed

1 file changed

+8
-8
lines changed

fbgemm_gpu/experimental/gemm/triton_gemm/grouped_gemm.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
import functools
1010
import inspect
11-
import logging
11+
import warnings
1212

1313
from typing import Optional
1414

@@ -20,7 +20,6 @@
2020
from fbgemm_gpu.experimental.gemm.triton_gemm import utils
2121
from triton.runtime import driver # @manual
2222

23-
logger: logging.Logger = logging.getLogger(__name__)
2423

2524
_NV_CONFIGS = [
2625
triton.Config(
@@ -940,6 +939,9 @@ def _fbgemm_grouped_gemm_fp8_rowwise_ws(
940939
iterated_tiles += num_tiles
941940

942941

942+
warnings.simplefilter("once")
943+
944+
943945
def _grouped_gemm(
944946
*,
945947
x: torch.Tensor,
@@ -958,21 +960,19 @@ def _grouped_gemm(
958960

959961
if USE_TMA_LOAD and not utils.HAS_TMA_DESC:
960962
USE_TMA_LOAD = False
961-
logging.warning("TMA load is disabled as there is no TMA descriptor support!")
963+
warnings.warn("TMA load is disabled as there is no TMA descriptor support!")
962964

963965
if USE_TMA_STORE and not utils.HAS_TMA_DESC:
964966
USE_TMA_STORE = False
965-
logging.warning("TMA store is disabled as there is no TMA descriptor support!")
967+
warnings.warn("TMA store is disabled as there is no TMA descriptor support!")
966968

967969
# TODO(shikaili): Check the readniess of WS on ROCm side in Meta's Triton.
968970
if use_warp_specialization and torch.version.hip:
969-
logging.warning(
970-
"Warp specialization is disabled as it is not supported on ROCm."
971-
)
971+
warnings.warn("Warp specialization is disabled as it is not supported on ROCm.")
972972
use_warp_specialization = False
973973

974974
if use_warp_specialization and not _HAS_WS_SUPPORT:
975-
logging.warning(
975+
warnings.warn(
976976
"Warp specialization is disabled as the Triton build in current environment doesn't have such support. Please build from https://github.com/facebookexperimental/triton/tree/ws-3.2.x to enable it for best performance on Nvidia's SM90 GPUs."
977977
)
978978
use_warp_specialization = False

0 commit comments

Comments
 (0)