# Multi-stage build - GPU version builds on top of the base dev environment FROM rocky_dev:latest # Update and install GPU-specific packages RUN dnf update -y && \ dnf install -y kernel-headers kernel-devel pciutils && \ dnf clean all # Install NVIDIA container toolkit dependencies RUN dnf config-manager --add-repo https://nvidia.github.io/libnvidia-container/stable/rpm/nvidia-container-toolkit.repo && \ dnf install -y nvidia-container-toolkit && \ dnf clean all # Set environment variables for NVIDIA ENV NVIDIA_VISIBLE_DEVICES=all ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility # Add GPU test script RUN echo '#!/bin/bash' > /usr/local/bin/gpu-test.sh && \ echo 'echo "=== System Information ==="' >> /usr/local/bin/gpu-test.sh && \ echo 'cat /etc/rocky-release' >> /usr/local/bin/gpu-test.sh && \ echo 'echo' >> /usr/local/bin/gpu-test.sh && \ echo 'echo "=== PCI Devices (GPUs) ==="' >> /usr/local/bin/gpu-test.sh && \ echo 'lspci | grep -i nvidia' >> /usr/local/bin/gpu-test.sh && \ echo 'echo' >> /usr/local/bin/gpu-test.sh && \ echo 'echo "=== NVIDIA SMI ==="' >> /usr/local/bin/gpu-test.sh && \ echo 'if command -v nvidia-smi &> /dev/null; then' >> /usr/local/bin/gpu-test.sh && \ echo ' nvidia-smi' >> /usr/local/bin/gpu-test.sh && \ echo 'else' >> /usr/local/bin/gpu-test.sh && \ echo ' echo "nvidia-smi not found. GPU might not be accessible inside container."' >> /usr/local/bin/gpu-test.sh && \ echo 'fi' >> /usr/local/bin/gpu-test.sh && \ chmod +x /usr/local/bin/gpu-test.sh # Create workspace directory for GPU workloads RUN mkdir -p /workspace # Keep the same working directory and CMD from base image WORKDIR /root EXPOSE 22 CMD ["/usr/sbin/sshd", "-D", "-e"]