Spaces:
Build error
Build error
| BASEDIR=$(dirname "$0") | |
| cd $BASEDIR/.. | |
| echo Current Directory: | |
| pwd | |
| BASEDIR=`pwd` | |
| nvidia-smi | |
| uname -a | |
| cat /etc/os-release | |
| lscpu | |
| grep MemTotal /proc/meminfo | |
| #pip install -r requirements.txt | |
| #cd ../LLaMA-Factory && pip install -e .[torch,bitsandbytes] | |
| export LOGICAL_REASONING_DATA_PATH=datasets/mgtv | |
| export RESIZE_TOKEN_EMBEDDINGS=true | |
| export START_EPOCH=0 | |
| export USING_LLAMA_FACTORY=true | |
| export USE_ENGLISH_DATASETS=true | |
| export USE_BF16_FOR_INFERENCE=true | |
| export MODEL_NAME=meta-llama/Meta-Llama-3.1-8B-Instruct | |
| export MODEL_PREFIX=llama3-8b_lora_sft_bf16 | |
| export LOGICAL_REASONING_RESULTS_PATH=results/$MODEL_PREFIX-p1_en_r2.csv | |
| export ADAPTER_PATH_BASE=llama-factory/saves/llama3-8b/lora/sft_bf16_p1_full_en_r2 | |
| export USING_P1_PROMPT_TEMPLATE=true | |
| echo "Eval $MODEL_NAME with $ADAPTER_PATH_BASE" | |
| python llm_toolkit/eval_logical_reasoning_all_epochs.py | |
| export LOGICAL_REASONING_RESULTS_PATH=results/$MODEL_PREFIX-p2_en_r2.csv | |
| export ADAPTER_PATH_BASE=llama-factory/saves/llama3-8b/lora/sft_bf16_p2_full_en_r2 | |
| export USING_P1_PROMPT_TEMPLATE=false | |
| echo "Eval $MODEL_NAME with $ADAPTER_PATH_BASE" | |
| python llm_toolkit/eval_logical_reasoning_all_epochs.py | |