[ROCm] [AITER] [Bugfix] Patch for AITER commit 648764942e552a8bb5fe16026703716a81f05374 (#18990)

Signed-off-by: tjtanaa <tunjian.tan@embeddedllm.com>
This commit is contained in:
TJian
2025-06-18 22:56:31 -07:00
committed by GitHub
parent 83ca9ae47b
commit 2de12be428
2 changed files with 4 additions and 3 deletions

View File

@ -12,7 +12,7 @@ ARG PYTORCH_REPO="https://github.com/pytorch/pytorch.git"
ARG PYTORCH_VISION_REPO="https://github.com/pytorch/vision.git"
ARG FA_BRANCH="1a7f4dfa"
ARG FA_REPO="https://github.com/Dao-AILab/flash-attention.git"
ARG AITER_BRANCH="c1debd8"
ARG AITER_BRANCH="6487649"
ARG AITER_REPO="https://github.com/ROCm/aiter.git"
FROM ${BASE_IMAGE} AS base

View File

@ -22,8 +22,9 @@ class QuantMethod(IntEnum):
NO = 0 # a16w16
PER_TENSOR = 1 # w8a8 (pre_Tensor)
PER_TOKEN = 2 # w8a8/w8a4 (per_Token)
BLOCK_1X128 = 3 # block quantized w8a8 (per_1x128)
BLOCK_128x128 = 4 # block quantized w8a8 (per_128x128)
BLOCK_1X32 = 3 # fp4x2
BLOCK_1X128 = 4 # block quantized w8a8 (per_1x128)
BLOCK_128x128 = 5 # block quantized w8a8 (per_128x128)
class ActivationMethod(IntEnum):