devcontainer: replace VAULT_HOST with AWS_ROLE_ARN (#187)

* devcontainer: replace VAULT_HOST with AWS_ROLE_ARN

Signed-off-by: Jordan Jacobelli <jjacobelli@nvidia.com>

* Update devcontainers base image to support AWS_ROLE_ARN

Signed-off-by: Jordan Jacobelli <jjacobelli@nvidia.com>

* Bump cuda latest version to 12.6

Signed-off-by: Jordan Jacobelli <jjacobelli@nvidia.com>

* Replace ubuntu18.04 with ubuntu20.04

Ubuntu 18.04 is not supported anymore

Signed-off-by: Jordan Jacobelli <jjacobelli@nvidia.com>

* Use DOOD stategy to keep supporting ubuntu18.04

See https://github.com/NVIDIA/cccl/pull/1779

Signed-off-by: Jordan Jacobelli <jjacobelli@nvidia.com>

---------

Signed-off-by: Jordan Jacobelli <jjacobelli@nvidia.com>
This commit is contained in:
Jordan Jacobelli
2024-10-25 17:49:02 +02:00
committed by GitHub
parent a171514056
commit 92286e1d4a
45 changed files with 513 additions and 189 deletions

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc7-cuda11.1-ubuntu18.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc7-cuda11.1-ubuntu18.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda11.1-gcc7",
"CCCL_CUDA_VERSION": "11.1",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc8-cuda11.1-ubuntu18.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc8-cuda11.1-ubuntu18.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda11.1-gcc8",
"CCCL_CUDA_VERSION": "11.1",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc9-cuda11.1-ubuntu18.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc9-cuda11.1-ubuntu18.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda11.1-gcc9",
"CCCL_CUDA_VERSION": "11.1",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm9-cuda11.1-ubuntu18.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm9-cuda11.1-ubuntu18.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda11.1-llvm9",
"CCCL_CUDA_VERSION": "11.1",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc11-cuda11.8-ubuntu22.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc11-cuda11.8-ubuntu22.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda11.8-gcc11",
"CCCL_CUDA_VERSION": "11.8",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc10-cuda12.0-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc10-cuda12.0-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-gcc10",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc11-cuda12.0-ubuntu22.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc11-cuda12.0-ubuntu22.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-gcc11",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc12-cuda12.0-ubuntu22.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc12-cuda12.0-ubuntu22.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-gcc12",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc7-cuda12.0-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc7-cuda12.0-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-gcc7",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc8-cuda12.0-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc8-cuda12.0-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-gcc8",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc9-cuda12.0-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc9-cuda12.0-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-gcc9",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm10-cuda12.0-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm10-cuda12.0-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-llvm10",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm11-cuda12.0-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm11-cuda12.0-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-llvm11",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm12-cuda12.0-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm12-cuda12.0-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-llvm12",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm13-cuda12.0-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm13-cuda12.0-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-llvm13",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm14-cuda12.0-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm14-cuda12.0-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-llvm14",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm9-cuda12.0-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm9-cuda12.0-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,7 +12,7 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.0-llvm9",
"CCCL_CUDA_VERSION": "12.0",

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc10-cuda12.4-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc10-cuda12.6-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-gcc10",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-gcc10",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "gcc",
"CCCL_HOST_COMPILER_VERSION": "10",
"CCCL_BUILD_INFIX": "cuda12.4-gcc10"
"CCCL_BUILD_INFIX": "cuda12.6-gcc10"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-gcc10"
"name": "cuda12.6-gcc10"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc11-cuda12.4-ubuntu22.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc11-cuda12.6-ubuntu22.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-gcc11",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-gcc11",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "gcc",
"CCCL_HOST_COMPILER_VERSION": "11",
"CCCL_BUILD_INFIX": "cuda12.4-gcc11"
"CCCL_BUILD_INFIX": "cuda12.6-gcc11"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-gcc11"
"name": "cuda12.6-gcc11"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc12-cuda12.4-ubuntu22.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc12-cuda12.6-ubuntu22.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-gcc12",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-gcc12",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "gcc",
"CCCL_HOST_COMPILER_VERSION": "12",
"CCCL_BUILD_INFIX": "cuda12.4-gcc12"
"CCCL_BUILD_INFIX": "cuda12.6-gcc12"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-gcc12"
"name": "cuda12.6-gcc12"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc7-cuda12.4-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc7-cuda12.6-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-gcc7",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-gcc7",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "gcc",
"CCCL_HOST_COMPILER_VERSION": "7",
"CCCL_BUILD_INFIX": "cuda12.4-gcc7"
"CCCL_BUILD_INFIX": "cuda12.6-gcc7"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-gcc7"
"name": "cuda12.6-gcc7"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc8-cuda12.4-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc8-cuda12.6-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-gcc8",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-gcc8",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "gcc",
"CCCL_HOST_COMPILER_VERSION": "8",
"CCCL_BUILD_INFIX": "cuda12.4-gcc8"
"CCCL_BUILD_INFIX": "cuda12.6-gcc8"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-gcc8"
"name": "cuda12.6-gcc8"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc9-cuda12.4-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc9-cuda12.6-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-gcc9",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-gcc9",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "gcc",
"CCCL_HOST_COMPILER_VERSION": "9",
"CCCL_BUILD_INFIX": "cuda12.4-gcc9"
"CCCL_BUILD_INFIX": "cuda12.6-gcc9"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-gcc9"
"name": "cuda12.6-gcc9"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm10-cuda12.4-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm10-cuda12.6-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-llvm10",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-llvm10",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "llvm",
"CCCL_HOST_COMPILER_VERSION": "10",
"CCCL_BUILD_INFIX": "cuda12.4-llvm10"
"CCCL_BUILD_INFIX": "cuda12.6-llvm10"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-llvm10"
"name": "cuda12.6-llvm10"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm11-cuda12.4-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm11-cuda12.6-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-llvm11",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-llvm11",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "llvm",
"CCCL_HOST_COMPILER_VERSION": "11",
"CCCL_BUILD_INFIX": "cuda12.4-llvm11"
"CCCL_BUILD_INFIX": "cuda12.6-llvm11"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-llvm11"
"name": "cuda12.6-llvm11"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm12-cuda12.4-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm12-cuda12.6-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-llvm12",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-llvm12",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "llvm",
"CCCL_HOST_COMPILER_VERSION": "12",
"CCCL_BUILD_INFIX": "cuda12.4-llvm12"
"CCCL_BUILD_INFIX": "cuda12.6-llvm12"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-llvm12"
"name": "cuda12.6-llvm12"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm13-cuda12.4-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm13-cuda12.6-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-llvm13",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-llvm13",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "llvm",
"CCCL_HOST_COMPILER_VERSION": "13",
"CCCL_BUILD_INFIX": "cuda12.4-llvm13"
"CCCL_BUILD_INFIX": "cuda12.6-llvm13"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-llvm13"
"name": "cuda12.6-llvm13"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm14-cuda12.4-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm14-cuda12.6-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-llvm14",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-llvm14",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "llvm",
"CCCL_HOST_COMPILER_VERSION": "14",
"CCCL_BUILD_INFIX": "cuda12.4-llvm14"
"CCCL_BUILD_INFIX": "cuda12.6-llvm14"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-llvm14"
"name": "cuda12.6-llvm14"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm15-cuda12.4-ubuntu22.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm15-cuda12.6-ubuntu22.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-llvm15",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-llvm15",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "llvm",
"CCCL_HOST_COMPILER_VERSION": "15",
"CCCL_BUILD_INFIX": "cuda12.4-llvm15"
"CCCL_BUILD_INFIX": "cuda12.6-llvm15"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-llvm15"
"name": "cuda12.6-llvm15"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm16-cuda12.4-ubuntu22.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm16-cuda12.6-ubuntu22.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-llvm16",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-llvm16",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "llvm",
"CCCL_HOST_COMPILER_VERSION": "16",
"CCCL_BUILD_INFIX": "cuda12.4-llvm16"
"CCCL_BUILD_INFIX": "cuda12.6-llvm16"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-llvm16"
"name": "cuda12.6-llvm16"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm17-cuda12.4-ubuntu22.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm17-cuda12.6-ubuntu22.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-llvm17",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-llvm17",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "llvm",
"CCCL_HOST_COMPILER_VERSION": "17",
"CCCL_BUILD_INFIX": "cuda12.4-llvm17"
"CCCL_BUILD_INFIX": "cuda12.6-llvm17"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-llvm17"
"name": "cuda12.6-llvm17"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm18-cuda12.4-ubuntu22.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm18-cuda12.6-ubuntu22.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-llvm18",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-llvm18",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "llvm",
"CCCL_HOST_COMPILER_VERSION": "18",
"CCCL_BUILD_INFIX": "cuda12.4-llvm18"
"CCCL_BUILD_INFIX": "cuda12.6-llvm18"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-llvm18"
"name": "cuda12.6-llvm18"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-llvm9-cuda12.4-ubuntu20.04",
"image": "rapidsai/devcontainers:24.12-cpp-llvm9-cuda12.6-ubuntu20.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-llvm9",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-llvm9",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "llvm",
"CCCL_HOST_COMPILER_VERSION": "9",
"CCCL_BUILD_INFIX": "cuda12.4-llvm9"
"CCCL_BUILD_INFIX": "cuda12.6-llvm9"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-llvm9"
"name": "cuda12.6-llvm9"
}

View File

@@ -1,6 +1,6 @@
{
"shutdownAction": "stopContainer",
"image": "rapidsai/devcontainers:24.06-cpp-gcc12-cuda12.4-ubuntu22.04",
"image": "rapidsai/devcontainers:24.12-cpp-gcc12-cuda12.6-ubuntu22.04",
"hostRequirements": {
"gpu": "optional"
},
@@ -12,13 +12,13 @@
"containerEnv": {
"SCCACHE_REGION": "us-east-2",
"SCCACHE_BUCKET": "rapids-sccache-devs",
"VAULT_HOST": "https://vault.ops.k8s.rapids.ai",
"AWS_ROLE_ARN": "arn:aws:iam::279114543810:role/nv-gha-token-sccache-devs",
"HISTFILE": "${containerWorkspaceFolder}/.cache/._bash_history",
"DEVCONTAINER_NAME": "cuda12.4-gcc12",
"CCCL_CUDA_VERSION": "12.4",
"DEVCONTAINER_NAME": "cuda12.6-gcc12",
"CCCL_CUDA_VERSION": "12.6",
"CCCL_HOST_COMPILER": "gcc",
"CCCL_HOST_COMPILER_VERSION": "12",
"CCCL_BUILD_INFIX": "cuda12.4-gcc12"
"CCCL_BUILD_INFIX": "cuda12.6-gcc12"
},
"workspaceFolder": "/home/coder/${localWorkspaceFolderBasename}",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/coder/${localWorkspaceFolderBasename},type=bind,consistency=consistent",
@@ -42,5 +42,5 @@
}
}
},
"name": "cuda12.4-gcc12"
"name": "cuda12.6-gcc12"
}

View File

@@ -0,0 +1,49 @@
#!/usr/bin/env bash
# Maybe change the UID/GID of the container's non-root user to match the host's UID/GID
: "${REMOTE_USER:="coder"}";
: "${OLD_UID:=}";
: "${OLD_GID:=}";
: "${NEW_UID:=}";
: "${NEW_GID:=}";
eval "$(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd)";
eval "$(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd)";
eval "$(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group)";
if [ -z "$OLD_UID" ]; then
echo "Remote user not found in /etc/passwd ($REMOTE_USER).";
exec "$(pwd)/.devcontainer/nvbench-entrypoint.sh" "$@";
elif [ "$OLD_UID" = "$NEW_UID" ] && [ "$OLD_GID" = "$NEW_GID" ]; then
echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID).";
exec "$(pwd)/.devcontainer/nvbench-entrypoint.sh" "$@";
elif [ "$OLD_UID" != "$NEW_UID" ] && [ -n "$EXISTING_USER" ]; then
echo "User with UID exists ($EXISTING_USER=$NEW_UID).";
exec "$(pwd)/.devcontainer/nvbench-entrypoint.sh" "$@";
else
if [ "$OLD_GID" != "$NEW_GID" ] && [ -n "$EXISTING_GROUP" ]; then
echo "Group with GID exists ($EXISTING_GROUP=$NEW_GID).";
NEW_GID="$OLD_GID";
fi
echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID.";
sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd;
if [ "$OLD_GID" != "$NEW_GID" ]; then
sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group;
fi
# Fast parallel `chown -R`
find "$HOME_FOLDER/" -not -user "$REMOTE_USER" -print0 \
| xargs -0 -r -n1 -P"$(nproc --all)" chown "$NEW_UID:$NEW_GID"
# Run the container command as $REMOTE_USER, preserving the container startup environment.
#
# We cannot use `su -w` because that's not supported by the `su` in Ubuntu18.04, so we reset the following
# environment variables to the expected values, then pass through everything else from the startup environment.
export HOME="$HOME_FOLDER";
export XDG_CACHE_HOME="$HOME_FOLDER/.cache";
export XDG_CONFIG_HOME="$HOME_FOLDER/.config";
export XDG_STATE_HOME="$HOME_FOLDER/.local/state";
export PYTHONHISTFILE="$HOME_FOLDER/.local/state/.python_history";
exec su -p "$REMOTE_USER" -- "$(pwd)/.devcontainer/nvbench-entrypoint.sh" "$@";
fi

View File

@@ -2,7 +2,7 @@
set -euo pipefail
# Ensure the script is being executed in the cccl/ root
# Ensure the script is being executed in the nvbench/ root
cd "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/..";
print_help() {
@@ -11,17 +11,46 @@ print_help() {
echo "the top-level devcontainer in .devcontainer/devcontainer.json will be used."
echo ""
echo "Options:"
echo " -c, --cuda Specify the CUDA version. E.g., 12.2"
echo " -H, --host Specify the host compiler. E.g., gcc12"
echo " -d, --docker Launch the development environment in Docker directly without using VSCode."
echo " -h, --help Display this help message and exit."
echo " -c, --cuda Specify the CUDA version. E.g., 12.2"
echo " -H, --host Specify the host compiler. E.g., gcc12"
echo " -d, --docker Launch the development environment in Docker directly without using VSCode."
echo " --gpus gpu-request GPU devices to add to the container ('all' to pass all GPUs)."
echo " -e, --env list Set additional container environment variables."
echo " -v, --volume list Bind mount a volume."
echo " -h, --help Display this help message and exit."
}
# Assign variable one scope above the caller
# Usage: local "$1" && _upvar $1 "value(s)"
# Param: $1 Variable name to assign value to
# Param: $* Value(s) to assign. If multiple values, an array is
# assigned, otherwise a single value is assigned.
# See: http://fvue.nl/wiki/Bash:_Passing_variables_by_reference
_upvar() {
if unset -v "$1"; then
if (( $# == 2 )); then
eval $1=\"\$2\";
else
eval $1=\(\"\${@:2}\"\);
fi;
fi
}
parse_options() {
local OPTIONS=c:H:dh
local LONG_OPTIONS=cuda:,host:,docker,help
local -;
set -euo pipefail;
# Read the name of the variable in which to return unparsed arguments
local UNPARSED="${!#}";
# Splice the unparsed arguments variable name from the arguments list
set -- "${@:1:$#-1}";
local OPTIONS=c:e:H:dhv
local LONG_OPTIONS=cuda:,env:,host:,gpus:,volume:,docker,help
# shellcheck disable=SC2155
local PARSED_OPTIONS=$(getopt -n "$0" -o "${OPTIONS}" --long "${LONG_OPTIONS}" -- "$@")
# shellcheck disable=SC2181
if [[ $? -ne 0 ]]; then
exit 1
fi
@@ -34,10 +63,18 @@ parse_options() {
cuda_version="$2"
shift 2
;;
-e|--env)
env_vars+=("$1" "$2")
shift 2
;;
-H|--host)
host_compiler="$2"
shift 2
;;
--gpus)
gpu_request="$2"
shift 2
;;
-d|--docker)
docker_mode=true
shift
@@ -46,8 +83,13 @@ parse_options() {
print_help
exit 0
;;
-v|--volume)
volumes+=("$1" "$2")
shift 2
;;
--)
shift
_upvar "${UNPARSED}" "${@}"
break
;;
*)
@@ -59,20 +101,153 @@ parse_options() {
done
}
# shellcheck disable=SC2155
launch_docker() {
DOCKER_IMAGE=$(grep "image" "${path}/devcontainer.json" | sed 's/.*: "\(.*\)",/\1/')
echo "Found image: ${DOCKER_IMAGE}"
docker pull ${DOCKER_IMAGE}
docker run \
-it --rm \
--user coder \
--workdir /home/coder/cccl \
--mount type=bind,src="$(pwd)",dst='/home/coder/cccl' \
${DOCKER_IMAGE} \
/bin/bash
local -;
set -euo pipefail
inline_vars() {
cat - \
`# inline local workspace folder` \
| sed "s@\${localWorkspaceFolder}@$(pwd)@g" \
`# inline local workspace folder basename` \
| sed "s@\${localWorkspaceFolderBasename}@$(basename "$(pwd)")@g" \
`# inline container workspace folder` \
| sed "s@\${containerWorkspaceFolder}@${WORKSPACE_FOLDER:-}@g" \
`# inline container workspace folder basename` \
| sed "s@\${containerWorkspaceFolderBasename}@$(basename "${WORKSPACE_FOLDER:-}")@g" \
`# translate local envvars to shell syntax` \
| sed -r 's/\$\{localEnv:([^\:]*):?(.*)\}/${\1:-\2}/g'
}
args_to_path() {
local -a keys=("${@}")
keys=("${keys[@]/#/[}")
keys=("${keys[@]/%/]}")
echo "$(IFS=; echo "${keys[*]}")"
}
json_string() {
python3 -c "import json,sys; print(json.load(sys.stdin)$(args_to_path "${@}"))" 2>/dev/null | inline_vars
}
json_array() {
python3 -c "import json,sys; [print(f'\"{x}\"') for x in json.load(sys.stdin)$(args_to_path "${@}")]" 2>/dev/null | inline_vars
}
json_map() {
python3 -c "import json,sys; [print(f'{k}=\"{v}\"') for k,v in json.load(sys.stdin)$(args_to_path "${@}").items()]" 2>/dev/null | inline_vars
}
devcontainer_metadata_json() {
docker inspect --type image --format '{{json .Config.Labels}}' "$DOCKER_IMAGE" \
| json_string '"devcontainer.metadata"'
}
###
# Read relevant values from devcontainer.json
###
local devcontainer_json="${path}/devcontainer.json";
# Read image
local DOCKER_IMAGE="$(json_string '"image"' < "${devcontainer_json}")"
# Always pull the latest copy of the image
docker pull "$DOCKER_IMAGE"
# Read workspaceFolder
local WORKSPACE_FOLDER="$(json_string '"workspaceFolder"' < "${devcontainer_json}")"
# Read remoteUser
local REMOTE_USER="$(json_string '"remoteUser"' < "${devcontainer_json}")"
# If remoteUser isn't in our devcontainer.json, read it from the image's "devcontainer.metadata" label
if test -z "${REMOTE_USER:-}"; then
REMOTE_USER="$(devcontainer_metadata_json | json_string "-1" '"remoteUser"')"
fi
# Read runArgs
local -a RUN_ARGS="($(json_array '"runArgs"' < "${devcontainer_json}"))"
# Read initializeCommand
local -a INITIALIZE_COMMAND="($(json_array '"initializeCommand"' < "${devcontainer_json}"))"
# Read containerEnv
local -a ENV_VARS="($(json_map '"containerEnv"' < "${devcontainer_json}" | sed -r 's/(.*)=(.*)/--env \1=\2/'))"
# Read mounts
local -a MOUNTS="($(
tee < "${devcontainer_json}" \
1>/dev/null \
>(json_array '"mounts"') \
>(json_string '"workspaceMount"') \
| xargs -r -I% echo --mount '%'
))"
###
# Update run arguments and container environment variables
###
# Only pass `-it` if the shell is a tty
if ! ${CI:-'false'} && tty >/dev/null 2>&1 && (exec </dev/tty); then
RUN_ARGS+=("-it")
fi
for flag in rm init; do
if [[ " ${RUN_ARGS[*]} " != *" --${flag} "* ]]; then
RUN_ARGS+=("--${flag}")
fi
done
# Prefer the user-provided --gpus argument
if test -n "${gpu_request:-}"; then
RUN_ARGS+=(--gpus "${gpu_request}")
else
# Otherwise read and infer from hostRequirements.gpu
local GPU_REQUEST="$(json_string '"hostRequirements"' '"gpu"' < "${devcontainer_json}")"
if test "${GPU_REQUEST:-false}" = true; then
RUN_ARGS+=(--gpus all)
elif test "${GPU_REQUEST:-false}" = optional && \
command -v nvidia-container-runtime >/dev/null 2>&1; then
RUN_ARGS+=(--gpus all)
fi
fi
RUN_ARGS+=(--workdir "${WORKSPACE_FOLDER:-/home/coder/nvbench}")
if test -n "${REMOTE_USER:-}"; then
ENV_VARS+=(--env NEW_UID="$(id -u)")
ENV_VARS+=(--env NEW_GID="$(id -g)")
ENV_VARS+=(--env REMOTE_USER="$REMOTE_USER")
RUN_ARGS+=(-u root:root)
RUN_ARGS+=(--entrypoint "${WORKSPACE_FOLDER:-/home/coder/nvbench}/.devcontainer/docker-entrypoint.sh")
fi
if test -n "${SSH_AUTH_SOCK:-}"; then
ENV_VARS+=(--env "SSH_AUTH_SOCK=/tmp/ssh-auth-sock")
MOUNTS+=(--mount "source=${SSH_AUTH_SOCK},target=/tmp/ssh-auth-sock,type=bind")
fi
# Append user-provided volumes
if test -v volumes && test ${#volumes[@]} -gt 0; then
MOUNTS+=("${volumes[@]}")
fi
# Append user-provided envvars
if test -v env_vars && test ${#env_vars[@]} -gt 0; then
ENV_VARS+=("${env_vars[@]}")
fi
# Run the initialize command before starting the container
if test "${#INITIALIZE_COMMAND[@]}" -gt 0; then
eval "${INITIALIZE_COMMAND[*]@Q}"
fi
exec docker run \
"${RUN_ARGS[@]}" \
"${ENV_VARS[@]}" \
"${MOUNTS[@]}" \
"${DOCKER_IMAGE}" \
"$@"
}
launch_vscode() {
local -;
set -euo pipefail;
# Since Visual Studio Code allows only one instance per `devcontainer.json`,
# this code prepares a unique temporary directory structure for each launch of a devcontainer.
# By doing so, it ensures that multiple instances of the same environment can be run
@@ -85,10 +260,10 @@ launch_vscode() {
mkdir -p "${tmpdir}"
mkdir -p "${tmpdir}/.devcontainer"
cp -arL "${path}/devcontainer.json" "${tmpdir}/.devcontainer"
sed -i 's@\\${localWorkspaceFolder}@$(pwd)@g' "${tmpdir}/.devcontainer/devcontainer.json"
sed -i "s@\\${localWorkspaceFolder}@$(pwd)@g" "${tmpdir}/.devcontainer/devcontainer.json"
local path="${tmpdir}"
local hash="$(echo -n "${path}" | xxd -pu - | tr -d '[:space:]')"
local url="vscode://vscode-remote/dev-container+${hash}/home/coder/cccl"
local url="vscode://vscode-remote/dev-container+${hash}/home/coder/nvbench"
local launch=""
if type open >/dev/null 2>&1; then
@@ -105,7 +280,9 @@ launch_vscode() {
}
main() {
parse_options "$@"
local -a unparsed;
parse_options "$@" unparsed;
set -- "${unparsed[@]}";
# If no CTK/Host compiler are provided, just use the default environment
if [[ -z ${cuda_version:-} ]] && [[ -z ${host_compiler:-} ]]; then
@@ -120,7 +297,7 @@ main() {
fi
if ${docker_mode:-'false'}; then
launch_docker
launch_docker "$@"
else
launch_vscode
fi

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env bash
# shellcheck disable=SC1091
set -e;
devcontainer-utils-post-create-command;
devcontainer-utils-init-git;
devcontainer-utils-post-attach-command;
cd /home/coder/nvbench/
if test $# -gt 0; then
exec "$@";
else
exec /bin/bash -li;
fi

View File

@@ -3,12 +3,6 @@ description: "Set up AWS credentials and environment variables for sccache"
runs:
using: "composite"
steps:
- name: Get AWS credentials for sccache bucket
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: arn:aws:iam::279114543810:role/gha-oidc-NVIDIA
aws-region: us-east-2
role-duration-seconds: 43200 # 12 hours)
- name: Set environment variables
run: |
echo "SCCACHE_BUCKET=rapids-sccache-devs" >> $GITHUB_ENV

View File

@@ -7,6 +7,8 @@ defaults:
on:
workflow_call:
inputs:
cuda: {type: string, required: true}
host: {type: string, required: true}
cpu: {type: string, required: true}
test_name: {type: string, required: false}
build_script: {type: string, required: false}
@@ -25,6 +27,8 @@ jobs:
contents: read
uses: ./.github/workflows/run-as-coder.yml
with:
cuda: ${{ inputs.cuda }}
host: ${{ inputs.host }}
name: Build/Test ${{inputs.test_name}}
runner: linux-${{inputs.cpu}}-gpu-v100-latest-1
image: ${{ inputs.container_image }}

View File

@@ -23,7 +23,7 @@ jobs:
SCCACHE_S3_NO_CREDENTIALS: false
steps:
- name: Get AWS credentials for sccache bucket
uses: aws-actions/configure-aws-credentials@v2
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: arn:aws:iam::279114543810:role/gha-oidc-NVIDIA
aws-region: us-east-2

View File

@@ -27,6 +27,8 @@ jobs:
matrix:
include: ${{ fromJSON(inputs.per_cuda_compiler_matrix) }}
with:
cuda: ${{ matrix.cuda }}
host: ${{matrix.compiler.name}}${{matrix.compiler.version}}
cpu: ${{ matrix.cpu }}
test_name: ${{matrix.cpu}}/${{matrix.compiler.name}}${{matrix.compiler.version}} ${{matrix.extra_build_args}}
build_script: "./ci/build_${{ inputs.project_name }}.sh -cxx ${{matrix.compiler.exe}} ${{matrix.extra_build_args}}"

View File

@@ -44,7 +44,7 @@ jobs:
PER_CUDA_COMPILER_KEYS: ${{steps.set-outputs.outputs.PER_CUDA_COMPILER_KEYS}}
steps:
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Compute matrix outputs
id: set-outputs
run: |

View File

@@ -7,6 +7,8 @@ defaults:
on:
workflow_call:
inputs:
cuda: {type: string, required: true}
host: {type: string, required: true}
name: {type: string, required: true}
image: {type: string, required: true}
runner: {type: string, required: true}
@@ -24,31 +26,63 @@ jobs:
contents: read
runs-on: ${{inputs.runner}}
container:
options: -u root
image: ${{inputs.image}}
# This job now uses a docker-outside-of-docker (DOOD) strategy.
#
# The GitHub Actions runner application mounts the host's docker socket `/var/run/docker.sock` into the
# container. By using a container with the `docker` CLI, this container can launch docker containers
# using the host's docker daemon.
#
# This allows us to run actions that require node v20 in the `cruizba/ubuntu-dind:jammy-26.1.3` container, and
# then launch our Ubuntu18.04-based GCC 6/7 containers to build and test CCCL.
#
# The main inconvenience to this approach is that any container mounts have to match the paths of the runner host,
# not the paths as seen in the intermediate (`cruizba/ubuntu-dind`) container.
#
# Note: I am using `cruizba/ubuntu-dind:jammy-26.1.3` instead of `docker:latest`, because GitHub doesn't support
# JS actions in alpine aarch64 containers, instead failing actions with this error:
# ```
# Error: JavaScript Actions in Alpine containers are only supported on x64 Linux runners. Detected Linux Arm64
# ```
image: cruizba/ubuntu-dind:jammy-26.1.3
env:
NVIDIA_VISIBLE_DEVICES: ${{ env.NVIDIA_VISIBLE_DEVICES }}
steps:
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
path: nvbench
persist-credentials: false
- name: Move files to coder user home directory
run: |
cp -R nvbench /home/coder/nvbench
chown -R coder:coder /home/coder/
- name: Add NVCC problem matcher
run: |
echo "::add-matcher::nvbench/.github/problem-matchers/problem-matcher.json"
- name: Configure credentials and environment variables for sccache
uses: ./nvbench/.github/actions/configure_cccl_sccache
- name: Run command
shell: su coder {0}
env:
CI: true
RUNNER: "${{inputs.runner}}"
COMMAND: "${{inputs.command}}"
AWS_ACCESS_KEY_ID: "${{env.AWS_ACCESS_KEY_ID}}"
AWS_SESSION_TOKEN: "${{env.AWS_SESSION_TOKEN}}"
AWS_SECRET_ACCESS_KEY: "${{env.AWS_SECRET_ACCESS_KEY}}"
run: |
echo "[host] github.workspace: ${{github.workspace}}"
echo "[container] GITHUB_WORKSPACE: ${GITHUB_WORKSPACE:-}"
echo "[container] PWD: $(pwd)"
# Necessary because we're doing docker-outside-of-docker:
# Make a symlink in the container that matches the host's ${{github.workspace}}, so that way `$(pwd)`
# in `.devcontainer/launch.sh` constructs volume paths relative to the hosts's ${{github.workspace}}.
mkdir -p "$(dirname "${{github.workspace}}")"
ln -s "$(pwd)" "${{github.workspace}}"
cd "${{github.workspace}}"
cat <<"EOF" > ci.sh
#! /usr/bin/env bash
set -eo pipefail
cd ~/nvbench
echo -e "\e[1;34mRunning as 'coder' user in $(pwd):\e[0m"
echo -e "\e[1;34mRunning as '$(whoami)' user in $(pwd):\e[0m"
echo -e "\e[1;34m${{inputs.command}}\e[0m"
eval "${{inputs.command}}" || exit_code=$?
if [ ! -z "$exit_code" ]; then
@@ -65,3 +99,58 @@ jobs:
echo " - Continuous Integration (CI) Overview: https://github.com/NVIDIA/cccl/blob/main/ci-overview.md"
exit $exit_code
fi
EOF
chmod +x ci.sh
mkdir "$RUNNER_TEMP/.aws";
cat <<EOF > "$RUNNER_TEMP/.aws/config"
[default]
bucket=rapids-sccache-devs
region=us-east-2
EOF
cat <<EOF > "$RUNNER_TEMP/.aws/credentials"
[default]
aws_access_key_id=$AWS_ACCESS_KEY_ID
aws_session_token=$AWS_SESSION_TOKEN
aws_secret_access_key=$AWS_SECRET_ACCESS_KEY
EOF
chmod 0600 "$RUNNER_TEMP/.aws/credentials"
chmod 0664 "$RUNNER_TEMP/.aws/config"
declare -a gpu_request=()
# Explicitly pass which GPU to use if on a GPU runner
if [[ "${RUNNER}" = *"-gpu-"* ]]; then
gpu_request+=(--gpus "device=${NVIDIA_VISIBLE_DEVICES}")
fi
host_path() {
sed "s@/__w@$(dirname "$(dirname "${{github.workspace}}")")@" <<< "$1"
}
# Launch this container using the host's docker daemon
${{github.event.repository.name}}/.devcontainer/launch.sh \
--docker \
--cuda ${{inputs.cuda}} \
--host ${{inputs.host}} \
"${gpu_request[@]}" \
--env "CI=$CI" \
--env "AWS_ROLE_ARN=" \
--env "COMMAND=$COMMAND" \
--env "GITHUB_ENV=$GITHUB_ENV" \
--env "GITHUB_SHA=$GITHUB_SHA" \
--env "GITHUB_PATH=$GITHUB_PATH" \
--env "GITHUB_OUTPUT=$GITHUB_OUTPUT" \
--env "GITHUB_ACTIONS=$GITHUB_ACTIONS" \
--env "GITHUB_REF_NAME=$GITHUB_REF_NAME" \
--env "GITHUB_WORKSPACE=$GITHUB_WORKSPACE" \
--env "GITHUB_REPOSITORY=$GITHUB_REPOSITORY" \
--env "GITHUB_STEP_SUMMARY=$GITHUB_STEP_SUMMARY" \
--volume "${{github.workspace}}/ci.sh:/ci.sh" \
--volume "$(host_path "$RUNNER_TEMP")/.aws:/root/.aws" \
--volume "$(dirname "$(dirname "${{github.workspace}}")"):/__w" \
-- /ci.sh

View File

@@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup jq and yq
run: |
sudo apt-get update
@@ -45,7 +45,7 @@ jobs:
devcontainers: ${{ steps.get-list.outputs.devcontainers }}
steps:
- name: Check out the code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Get list of devcontainer.json paths and names
id: get-list
run: |
@@ -67,16 +67,7 @@ jobs:
contents: read
steps:
- name: Check out the code
uses: actions/checkout@v3
# devcontainer/ci doesn't supported nested devcontainer.json files, so we need to copy the devcontainer.json
# file to the top level .devcontainer/ directory
- name: Copy devcontainer.json to .devcontainer/
run: |
src="${{ matrix.devcontainer.path }}"
dst=".devcontainer/devcontainer.json"
if [[ "$src" != "$dst" ]]; then
cp "$src" "$dst"
fi
uses: actions/checkout@v4
# We don't really need sccache configured, but we need the AWS credentials envvars to be set
# in order to avoid the devcontainer hanging waiting for GitHub authentication
- name: Configure credentials and environment variables for sccache
@@ -85,6 +76,7 @@ jobs:
uses: devcontainers/ci@v0.3
with:
push: never
configFile: ${{ matrix.devcontainer.path }}
env: |
SCCACHE_REGION=${{ env.SCCACHE_REGION }}
AWS_ACCESS_KEY_ID=${{ env.AWS_ACCESS_KEY_ID }}

View File

@@ -2,10 +2,10 @@
cuda_prev_min: &cuda_prev_min '11.1' # Does not support the CUPTI APIs we use (added in 11.3)
cuda_prev_max: &cuda_prev_max '11.8'
cuda_curr_min: &cuda_curr_min '12.0'
cuda_curr_max: &cuda_curr_max '12.4'
cuda_curr_max: &cuda_curr_max '12.6'
# The version of the devcontainer images to use from https://hub.docker.com/r/rapidsai/devcontainers
devcontainer_version: '24.06'
devcontainer_version: '24.12'
# gcc compiler configurations
gcc7: &gcc7 { name: 'gcc', version: '7', exe: 'g++' }