|
apiVersion: kubeflow.org/v2beta1 |
|
kind: MPIJob |
|
metadata: |
|
name: peacock-tokenizer-dravid-103 |
|
namespace: peacock |
|
labels: |
|
app: peacock-tokenizer-dravid-103 |
|
spec: |
|
slotsPerWorker: 8 |
|
runPolicy: |
|
backoffLimit: 1 |
|
cleanPodPolicy: Running |
|
mpiReplicaSpecs: |
|
Launcher: |
|
replicas: 1 |
|
template: |
|
spec: |
|
hostIPC: true |
|
volumes: |
|
- name: work-dir |
|
persistentVolumeClaim: |
|
claimName: peacock-fs-pvc |
|
containers: |
|
- image: vault.habana.ai/gaudi-docker/1.15.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.0:latest |
|
name: peacock-tokenizer-dravid-103-launcher |
|
imagePullPolicy: IfNotPresent |
|
volumeMounts: |
|
- name: work-dir |
|
mountPath: /mnt/weka/peacock |
|
command: ["/bin/bash", "-c"] |
|
args: |
|
- >- |
|
|
|
export SYNAPSE_VERSION="1.15.1"; |
|
export WORKER_DIR="/mnt/weka/peacock/experiments/llama"; |
|
|
|
export MEGATRON_SETUP_CMD="$WORKER_DIR/launch/setup.sh" |
|
export TOKENIZER_CMD=/mnt/weka/peacock/idc/datasets/dravid//tok_dravid/tok_files/tokenizer_103.sh |
|
HOSTSFILE=$OMPI_MCA_orte_default_hostfile; |
|
echo "HOSTSFILE=$HOSTSFILE"; |
|
MASTER_ADDR="$(head -n 1 $HOSTSFILE | sed -n s/[[:space:]]slots.*//p)"; |
|
NUM_NODES=$(wc -l < $HOSTSFILE); |
|
CARDS_PER_NODE=8; |
|
N_CARDS=$((NUM_NODES*CARDS_PER_NODE)); |
|
echo "MPI_ROOT=$MPI_ROOT"; |
|
echo "N_CARDS=$N_CARDS"; |
|
echo "MASTER_ADDR=$MASTER_ADDR"; |
|
sleep 20; |
|
|
|
|
|
mpirun -np $N_CARDS -npernode 8 \ |
|
--tag-output \ |
|
--allow-run-as-root \ |
|
--prefix $MPI_ROOT \ |
|
-x WORKER_DIR=$WORKER_DIR \ |
|
-x SYNAPSE_VERSION=$SYNAPSE_VERSION $MEGATRON_SETUP_CMD; |
|
|
|
mpirun -np $N_CARDS -npernode 8 \ |
|
--tag-output \ |
|
--allow-run-as-root \ |
|
--prefix $MPI_ROOT \ |
|
-x WORKER_DIR=$WORKER_DIR \ |
|
-x SYNAPSE_VERSION=$SYNAPSE_VERSION $TOKENIZER_CMD; |
|
|
|
|
|
Worker: |
|
replicas: 1 |
|
template: |
|
spec: |
|
volumes: |
|
- name: work-dir |
|
persistentVolumeClaim: |
|
claimName: peacock-fs-pvc |
|
tolerations: |
|
- key: "habana.ai/gaudi" |
|
operator: "Exists" |
|
effect: "NoSchedule" |
|
- key: "k8s/namespace" |
|
operator: "Equal" |
|
value: "peacock" |
|
effect: "NoSchedule" |
|
hostIPC: true |
|
containers: |
|
- image: vault.habana.ai/gaudi-docker/1.15.1/ubuntu22.04/habanalabs/pytorch-installer-2.2.0:latest |
|
name: peacock-llama-worker |
|
imagePullPolicy: IfNotPresent |
|
securityContext: |
|
capabilities: |
|
add: |
|
- SYSLOG |
|
resources: |
|
limits: |
|
habana.ai/gaudi: 8 |
|
hugepages-2Mi: 300Gi |
|
memory: 700Gi |
|
cpu: 150 |
|
requests: |
|
habana.ai/gaudi: 8 |
|
hugepages-2Mi: 300Gi |
|
memory: 700Gi |
|
cpu: 150 |
|
volumeMounts: |
|
- name: work-dir |
|
mountPath: /mnt/weka/peacock |
|
|