Skip to content

Commit fbeb5ff

Browse files
author
Lorenzo Toniazzi
committed
Add assertions
1 parent 226d842 commit fbeb5ff

File tree

2 files changed

+50
-1
lines changed

2 files changed

+50
-1
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,3 +129,6 @@ poetry.toml
129129

130130
# Scripts
131131
!/scripts/install-oneapi.bat
132+
133+
# Test models for lora adapters
134+
/reduce-llms-for-testing

tests/test_lora_conversion_and_inference.sh

Lines changed: 47 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,28 @@ else
1212
echo "Repository already exists. Skipping clone."
1313
fi
1414

15+
# Load the expected starting strings from the text file
16+
EXPECTED_BASE=$(cat $MODELS_REPO/data/pale_blue_dot.txt)
17+
EXPECTED_LORA_HOT=$(cat $MODELS_REPO/data/bohemian_rhapsody.txt)
18+
EXPECTED_LORA_MERGED=$(cat $MODELS_REPO/data/bohemian_rhapsody.txt)
19+
1520
# Declare a regular array to store results
1621
results=()
1722

23+
trim_leading_whitespace() {
24+
local input_string="$1"
25+
# Remove leading whitespace
26+
echo "${input_string#"${input_string%%[![:space:]]*}"}"
27+
}
28+
29+
extract_starting_substring() {
30+
local reference_string="$1"
31+
local target_string="$2"
32+
33+
local target_length=${#target_string}
34+
echo "${reference_string:0:$target_length}"
35+
}
36+
1837
run_conversion_and_inference_lora() {
1938
local model_name=$1
2039
local size_matrix=$2
@@ -50,6 +69,33 @@ run_conversion_and_inference_lora() {
5069
OUTPUT_LORA_MERGED=$(llama-cli -m $MODELS_REPO/$model_name/size=$size_matrix/base/Base-F32-lora-merged.gguf \
5170
-p "I see a little silhouetto" -n 50 --seed 42 --temp 0)
5271

72+
# Extract the corresponding substring from EXPECTED_BASE
73+
# and remove initial white spaces in OUTPUT_BASE
74+
OUTPUT_BASE=$(trim_leading_whitespace "$OUTPUT_BASE")
75+
EXPECTED_BASE=$(extract_starting_substring "$EXPECTED_BASE" "$OUTPUT_BASE")
76+
OUTPUT_LORA_HOT=$(trim_leading_whitespace "$OUTPUT_LORA_HOT")
77+
EXPECTED_LORA_HOT=$(extract_starting_substring "$EXPECTED_LORA_HOT" "$OUTPUT_LORA_HOT")
78+
OUTPUT_LORA_MERGED=$(trim_leading_whitespace "$OUTPUT_LORA_MERGED")
79+
EXPECTED_LORA_MERGED=$(extract_starting_substring "$EXPECTED_LORA_MERGED" "$OUTPUT_LORA_MERGED")
80+
81+
# Compare the actual output with the expected start
82+
if [[ "$OUTPUT_BASE" != "$EXPECTED_BASE" ]]; then
83+
echo "Error: $model_name OUTPUT_BASE does not start with the expected string."
84+
echo -e "Out=$OUTPUT_BASE\n\nExp=$EXPECTED_BASE"
85+
exit 1
86+
fi
87+
if [[ "$OUTPUT_LORA_HOT" != "$EXPECTED_LORA_HOT" ]]; then
88+
echo "Error: $model_name OUTPUT_LORA_HOT does not start with the expected string."
89+
echo -e "Out=$OUTPUT_LORA_HOT\n\nExp=$EXPECTED_LORA_HOT"
90+
exit 1
91+
fi
92+
if [[ "$OUTPUT_LORA_MERGED" != "$EXPECTED_LORA_MERGED" ]]; then
93+
echo "Error: $model_name OUTPUT_LORA_MERGED does not start with the expected string."
94+
echo -e "Out=$OUTPUT_LORA_MERGED\n\nExp=$EXPECTED_LORA_MERGED"
95+
exit 1
96+
fi
97+
98+
5399
# Store the results in the regular array
54100
results+=("
55101
\n\033[1mResults for $model_name with size $size_matrix:\033[0m
@@ -64,7 +110,7 @@ run_conversion_and_inference_lora() {
64110

65111
# Array of parameters to iterate over
66112
declare -a params=(
67-
"Gemma2ForCausalLM 64"
113+
# "Gemma2ForCausalLM 64"
68114
"LlamaForCausalLM 64"
69115
"Phi3ForCausalLM 64"
70116
)

0 commit comments

Comments
 (0)