applied-ai-018's picture
Add files using upload-large-folder tool
0d0762a verified
#!/bin/bash
set -m
sed -i -e "s/raw_content/text/g" /mnt/weka/peacock/idc/datasets/dravid//tok_dravid/split//finalao_splitab
echo "above deepspeed$"
FINAL_DIR=/mnt/weka/peacock/idc/hineng/dravid/
#TOKENIZER=facebook/nllb-200-distilled-600M
#TOKENIZER_TYPE=HuggingFaceTokenizer
TOKENIZER_PATH=/mnt/weka/peacock/tokenization/trained-tokenizer/dravid_64k/
TOKENIZER=$TOKENIZER_PATH/ta_te_kan_ml_50kspm_tokenizer.model
VOCAB_FILE=$TOKENIZER_PATH/ta_te_kan_ml_50kspm_tokenizer.vocab
TOKENIZER_TYPE=SentencePieceTokenizer
mkdir -p $FINAL_DIR/tokenizer29/
cd /mnt/weka/peacock/experiments/llama/Megatron-DeepSpeed
echo "inside deepspeed"
pwd
python3 tools/preprocess_data.py \
--input /mnt/weka/peacock/idc/datasets/dravid//tok_dravid/split//finalao_splitab \
--output-prefix $FINAL_DIR/tokenizer29/ \
--tokenizer-model $TOKENIZER \
--vocab-file $VOCAB_FILE \
--dataset-impl mmap --tokenizer-type $TOKENIZER_TYPE\
--append-eod --workers 8 # --partitions 16 #--chunk-size 50
# --merge-file $MERGES_FILE \