Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -124,16 +124,16 @@ def _apply_kernel_optimizations(self):
if self.use_flash_attention_kernel:
logger.info(" ✓ Flash Attention 2 (loaded with model)")

if self.use_rmsnorm_kernel:
try:
from sgl_kernel.elementwise import rmsnorm

self._rmsnorm_kernel = rmsnorm
self._replace_rmsnorm_with_kernel()
logger.info(" ✓ RMSNorm kernel integrated (from sgl_kernel)")
except ImportError as e:
logger.warning(f" ✗ Failed to import sgl_kernel: {e}. RMSNorm optimization disabled.")
self.use_rmsnorm_kernel = False
if self.use_rmsnorm_kernel:
try:
from sgl_kernel.elementwise import rmsnorm

self._rmsnorm_kernel = rmsnorm
self._replace_rmsnorm_with_kernel()
logger.info(" ✓ RMSNorm kernel integrated (from sgl_kernel)")
except ImportError as e:
logger.warning(f" ✗ Failed to import sgl_kernel: {e}. RMSNorm optimization disabled.")
self.use_rmsnorm_kernel = False

def _replace_rmsnorm_with_kernel(self):
"""Replace RMSNorm layers with fused kernel"""
Expand Down