Files
rocm-faster-whisper/Dockerfile
scott f5a19920d7 Fix all ROCm 6.2 hipBLAS API incompatibilities
ROCm 6.2 hipblasGemmEx uses hipblasDatatype_t for the compute type arg,
not hipblasComputeType_t (that came in ROCm 7). Map compute type and
HIPBLAS_COMPUTE_* constants down to the old hipblasDatatype_t/HIPBLAS_R_*
equivalents. Also fix HIPBLAS_R_16B (not 16BF) and add rocrand-dev.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-02 21:46:56 -04:00

106 lines
4.1 KiB
Docker

ARG ROCM_VERSION=6.2
ARG GPU_ARCH="gfx1030;gfx1031"
ARG ONEDNN_VERSION=3.1.1
# ── Stage 1: Build CTranslate2 wheel ─────────────────────────────────────────
FROM rocm/dev-ubuntu-22.04:${ROCM_VERSION} AS builder
ARG GPU_ARCH
ARG ONEDNN_VERSION
RUN apt-get update && apt-get install -y --no-install-recommends \
git cmake cmake-curses-gui \
libopenblas-dev wget \
python3-dev python3-pip \
hipblas-dev miopen-hip-dev rocthrust-dev hiprand-dev hipcub-dev rocrand-dev \
&& rm -rf /var/lib/apt/lists/*
# Build oneDNN as a static library (CPU fallback / conv ops)
WORKDIR /build
RUN wget -q https://github.com/oneapi-src/oneDNN/archive/refs/tags/v${ONEDNN_VERSION}.tar.gz && \
tar xf v${ONEDNN_VERSION}.tar.gz && \
rm v${ONEDNN_VERSION}.tar.gz && \
cd oneDNN-${ONEDNN_VERSION} && \
cmake -DCMAKE_BUILD_TYPE=Release \
-DONEDNN_LIBRARY_TYPE=STATIC \
-DONEDNN_BUILD_EXAMPLES=OFF \
-DONEDNN_BUILD_TESTS=OFF \
-DONEDNN_ENABLE_WORKLOAD=INFERENCE \
"-DONEDNN_ENABLE_PRIMITIVE=CONVOLUTION;REORDER" \
-DONEDNN_BUILD_GRAPH=OFF . && \
make -j$(nproc) install && \
cd /build && rm -rf oneDNN-${ONEDNN_VERSION}
# Build CTranslate2 with ROCm/HIP
ENV CTRANSLATE2_ROOT=/opt/ctranslate2
COPY patches/ct2_4.0.0_rocm.patch /tmp/
RUN git clone --branch v4.0.0 --depth 1 \
https://github.com/OpenNMT/CTranslate2.git /build/CTranslate2 && \
cd /build/CTranslate2 && \
git submodule update --init --recursive && \
git apply /tmp/ct2_4.0.0_rocm.patch && \
sed -i \
-e 's/hipDataType/hipblasDatatype_t/g' \
-e 's/hipblasComputeType_t/hipblasDatatype_t/g' \
-e 's/HIP_R_16F/HIPBLAS_R_16F/g' \
-e 's/HIP_R_32F/HIPBLAS_R_32F/g' \
-e 's/HIP_R_16BF/HIPBLAS_R_16B/g' \
-e 's/HIP_R_32I/HIPBLAS_R_32I/g' \
-e 's/HIP_R_8I/HIPBLAS_R_8I/g' \
-e 's/HIPBLAS_COMPUTE_16F/HIPBLAS_R_16F/g' \
-e 's/HIPBLAS_COMPUTE_32F_FAST_16BF/HIPBLAS_R_32F/g' \
-e 's/HIPBLAS_COMPUTE_32F_FAST_16F/HIPBLAS_R_32F/g' \
-e 's/HIPBLAS_COMPUTE_32F/HIPBLAS_R_32F/g' \
-e 's/HIPBLAS_COMPUTE_32I/HIPBLAS_R_32I/g' \
src/cuda2hip_macros.hpp && \
mkdir build && cd build && \
cmake -DCMAKE_INSTALL_PREFIX=${CTRANSLATE2_ROOT} \
-DCMAKE_PREFIX_PATH=/opt/rocm \
-DWITH_CUDA=ON \
-DWITH_CUDNN=ON \
-DWITH_MKL=OFF \
-DWITH_DNNL=ON \
-DOPENMP_RUNTIME=COMP \
-DCMAKE_HIP_ARCHITECTURES="${GPU_ARCH}" \
-DGPU_TARGETS="${GPU_ARCH}" \
-DAMDGPU_TARGETS="${GPU_ARCH}" \
-DCMAKE_BUILD_TYPE=Release \
-DGPU_RUNTIME=HIP \
-DWITH_OPENBLAS=ON \
-DENABLE_CPU_DISPATCH=OFF \
-DBUILD_TESTS=OFF .. && \
make -j$(nproc) install
# Build Python wheel
RUN cd /build/CTranslate2/python && \
pip install --no-cache-dir -r install_requirements.txt && \
python3 setup.py bdist_wheel --dist-dir /wheels
# ── Stage 2: Runtime ──────────────────────────────────────────────────────────
FROM rocm/dev-ubuntu-22.04:${ROCM_VERSION}
ENV CTRANSLATE2_ROOT=/opt/ctranslate2
ENV LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${CTRANSLATE2_ROOT}/lib"
# ROCm HIP allocator settings for stable inference
ENV CT2_CUDA_ALLOCATOR=cub_caching
ENV CT2_CUDA_CACHING_ALLOCATOR_CONFIG=4,3,12,419430400
COPY --from=builder /opt/ctranslate2 /opt/ctranslate2
COPY --from=builder /wheels /wheels
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 python3-pip ffmpeg \
&& rm -rf /var/lib/apt/lists/* \
&& pip install --no-cache-dir /wheels/*.whl \
&& pip install --no-cache-dir \
"faster-whisper>=1.2.1,<2" \
"wyoming-faster-whisper>=3.1.1"
RUN mkdir /data
VOLUME /data
EXPOSE 10405
ENTRYPOINT ["python3", "-m", "wyoming_faster_whisper"]
CMD ["--uri", "tcp://0.0.0.0:10405", "--data-dir", "/data", "--model", "Systran/faster-distil-whisper-small.en", "--device", "cuda", "--compute-type", "float16"]