]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
scripts : update get-hellaswag.sh and get-winogrande.sh (#20542)
authorAdrien Gallouët <redacted>
Sat, 14 Mar 2026 10:21:50 +0000 (11:21 +0100)
committerGitHub <redacted>
Sat, 14 Mar 2026 10:21:50 +0000 (11:21 +0100)
Signed-off-by: Adrien Gallouët <redacted>
.gitignore
scripts/get-hellaswag.sh
scripts/get-winogrande.sh

index bb122d692441b883496fa2ad5dc4e9f8cb714326..73954e8f5dc392f82dbadcbef387a3906b0c9b37 100644 (file)
@@ -124,6 +124,11 @@ poetry.toml
 # Scripts
 !/scripts/install-oneapi.bat
 
+# Generated by scripts
+/hellaswag_val_full.txt
+/winogrande-debiased-eval.csv
+/wikitext-2-raw/
+
 # Test models for lora adapters
 /lora-tests
 
index 484e56fd8f685bd30c1d4a98cd911f5aad528dce..0b161141f46555fca7b0edd49bfb8272e847288f 100755 (executable)
@@ -1,10 +1,38 @@
-#!/usr/bin/env bash
+#!/bin/sh
+# vim: set ts=4 sw=4 et:
 
-wget https://raw.githubusercontent.com/klosax/hellaswag_text_data/main/hellaswag_val_full.txt
+FILE="hellaswag_val_full.txt"
+URL="https://raw.githubusercontent.com/klosax/hellaswag_text_data/main/$FILE"
 
-echo "Usage:"
-echo ""
-echo "  ./llama-perplexity -m model.gguf -f hellaswag_val_full.txt --hellaswag [--hellaswag-tasks N] [other params]"
-echo ""
+die() {
+    printf "%s\n" "$@" >&2
+    exit 1
+}
 
-exit 0
+have_cmd() {
+    for cmd; do
+        command -v "$cmd" >/dev/null || return
+    done
+}
+
+dl() {
+    [ -f "$2" ] && return
+    if have_cmd wget; then
+        wget "$1" -O "$2"
+    elif have_cmd curl; then
+        curl -L "$1" -o "$2"
+    else
+        die "Please install wget or curl"
+    fi
+}
+
+if [ ! -f "$FILE" ]; then
+    dl "$URL" "$FILE" || exit
+fi
+
+cat <<EOF
+Usage:
+
+  llama-perplexity -m model.gguf -f $FILE --hellaswag [--hellaswag-tasks N] [other params]
+
+EOF
index 2b48b11756647ac2263d708b8eeb60311478fd9e..bfa0d2ef0b237651ac47f4e3ce6aab0fc614b7a2 100755 (executable)
@@ -1,10 +1,38 @@
-#!/usr/bin/env bash
+#!/bin/sh
+# vim: set ts=4 sw=4 et:
 
-wget https://huggingface.co/datasets/ikawrakow/winogrande-eval-for-llama.cpp/raw/main/winogrande-debiased-eval.csv
+FILE="winogrande-debiased-eval.csv"
+URL="https://huggingface.co/datasets/ikawrakow/winogrande-eval-for-llama.cpp/raw/main/$FILE"
 
-echo "Usage:"
-echo ""
-echo "  ./llama-perplexity -m model.gguf -f winogrande-debiased-eval.csv --winogrande [--winogrande-tasks N] [other params]"
-echo ""
+die() {
+    printf "%s\n" "$@" >&2
+    exit 1
+}
 
-exit 0
+have_cmd() {
+    for cmd; do
+        command -v "$cmd" >/dev/null || return
+    done
+}
+
+dl() {
+    [ -f "$2" ] && return
+    if have_cmd wget; then
+        wget "$1" -O "$2"
+    elif have_cmd curl; then
+        curl -L "$1" -o "$2"
+    else
+        die "Please install wget or curl"
+    fi
+}
+
+if [ ! -f "$FILE" ]; then
+    dl "$URL" "$FILE" || exit
+fi
+
+cat <<EOF
+Usage:
+
+  llama-perplexity -m model.gguf -f $FILE --winogrande [--winogrande-tasks N] [other params]
+
+EOF