-
Notifications
You must be signed in to change notification settings - Fork 1
/
llm_eval_link.sh
29 lines (26 loc) · 944 Bytes
/
llm_eval_link.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
#!/bin/bash
dataset=${1:-"cora"} #test dataset
task=${2:-"lp"} #test task
model_path=${3:-"/root/paddlejob/workspace/env_run/MyOFA/checkpoints/cora/llaga-mistral-7b-hf-sbert-4-hop-token-linear-projector_nc"}
pretrain_data=${4:-"cora.3-citeseer.3"}
model_base="/localscratch/chenzh85/models--mistralai--Mistral-7B-v0.1/snapshots/26bca36bde8333b5d7f72e9ed20ccda6a618af24"
mode="mistral_instruct" # use 'llaga_llama_2' for llama and "v1" for others
emb="sbert"
use_hop=4
sample_size=10
template="HO" # or ND
output_path="./checkpoints/${dataset}_${task}_${pretrain_data}"
python3 eval_pretrain_logits.py \
--model_path ${model_path} \
--model_base ${model_base} \
--conv_mode ${mode} \
--dataset ${dataset} \
--pretrained_embedding_type ${emb} \
--use_hop ${use_hop} \
--sample_neighbor_size ${sample_size} \
--answers_file ${output_path} \
--task ${task} \
--cache_dir "./llmcheckpoint" \
--template ${template} \
--start "-1" \
--end "2000"