Skip to content

Commit

Permalink
improve check-requirements.sh
Browse files Browse the repository at this point in the history
  • Loading branch information
cebtenzzre committed Dec 28, 2023
1 parent cb58775 commit ce26f49
Show file tree
Hide file tree
Showing 3 changed files with 199 additions and 207 deletions.
152 changes: 69 additions & 83 deletions check-requirements.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#!/bin/bash
set -euo pipefail
#
# check-requirements.sh checks all requirements files for each top-level
# convert*.py script.
Expand All @@ -8,7 +9,7 @@
# sized tmpfs /tmp or ramdisk is recommended if running this frequently.
#
# usage: ./check-requirements.sh [<working_dir>]
# ./check-requirements.sh 'nocleanup' [<working_dir>]
# ./check-requirements.sh nocleanup [<working_dir>]
#
# where:
# - <working_dir> is a directory that can be used as the base for
Expand All @@ -20,161 +21,146 @@
# - bash >= 3.2.57
# - shellcheck
#
# For each script, it creates a fresh venv, `pip install -r` the
# requirements, and finally executes the python script with no arguments to
# check for a `ModuleNotFoundError`.
# For each script, it creates a fresh venv, `pip install`s the requirements, and
# finally imports the python script to check for `ImportError`.
#

log() {
local level="$1"; shift
local format="$1"; shift
# shellcheck disable=SC2059
>&2 printf "$level: $format\n" "$@"
local level=$1 msg=$2
printf >&2 '%s: %s\n' "$level" "$msg"
}

debug () {
log 'DEBUG' "$@"
debug() {
log DEBUG "$@"
}

info() {
log 'INFO' "$@"
log INFO "$@"
}

fatal() {
log 'FATAL' "$@"
log FATAL "$@"
exit 1
}

cleanup() {
if [[ -n ${workdir+x} && -d $workdir && -w $workdir ]]; then
info "Removing $workdir"
(
count=0
rm -rfv "$workdir" | while read -r; do
if (( count++ > 750 )); then
printf '.'
count=0
fi
done
printf '\n'
)&
wait $!
info "Removed '$workdir'"
local count=0
rm -rfv -- "$workdir" | while read -r; do
if (( count++ > 750 )); then
printf .
count=0
fi
done
printf '\n'
info "Removed $workdir"
fi
}

abort() {
cleanup
exit 1
}

if [[ $1 == nocleanup ]]; then
shift # discard nocleanup arg
if [[ ${1-} == nocleanup ]]; then
shift # discard nocleanup arg
else
trap abort SIGINT SIGTERM SIGQUIT SIGABRT
trap exit INT TERM
trap cleanup EXIT
fi

set -eu -o pipefail
this="$(realpath "$0")"; readonly this
this=$(realpath -- "$0"); readonly this
cd "$(dirname "$this")"

shellcheck "$this"

readonly reqs_dir='./requirements'
readonly reqs_dir=requirements

workdir=
if [[ -n ${1+x} ]]; then
arg_dir="$(realpath "$1")"
if [[ ! ( -d $arg_dir && -w $arg_dir ) ]]; then
fatal "$arg_dir is not a valid directory"
if [[ ${1+x} ]]; then
tmp_dir=$(realpath -- "$1")
if [[ ! ( -d $tmp_dir && -w $tmp_dir ) ]]; then
fatal "$tmp_dir is not a writable directory"
fi
workdir="$(mktemp -d "$arg_dir/check-requirements.XXXX")"
else
workdir="$(mktemp -d "/tmp/check-requirements.XXXX")"
tmp_dir=/tmp
fi
readonly workdir

workdir=$(mktemp -d "$tmp_dir/check-requirements.XXXX"); readonly workdir
info "Working directory: $workdir"

assert_arg_count() {
local argcount="$1"; shift
if (( $# != argcount )); then
fatal "${FUNCNAME[1]}: incorrect number of args"
fi
}

check_requirements() {
assert_arg_count 2 "$@"
local venv="$1"
local reqs="$2"
local reqs=$1

info "$reqs: beginning check"
(
# shellcheck source=/dev/null
source "$venv/bin/activate"
pip --disable-pip-version-check install -q -r "$reqs"
)
pip --disable-pip-version-check install -qr "$reqs"
info "$reqs: OK"
}

check_convert_script() {
assert_arg_count 1 "$@"
local py="$1"; shift # e.g. ./convert-hf-to-gguf.py
local pyname; pyname="$(basename "$py")" # e.g. convert-hf-to-gguf.py
pyname="${pyname%.py}" # e.g. convert-hf-to-gguf
local py=$1 # e.g. ./convert-hf-to-gguf.py
local pyname=${py##*/} # e.g. convert-hf-to-gguf.py
pyname=${pyname%.py} # e.g. convert-hf-to-gguf

info "$py: beginning check"

local reqs="$reqs_dir/requirements-$pyname.txt"
if [[ ! -r "$reqs" ]]; then
if [[ ! -r $reqs ]]; then
fatal "$py missing requirements. Expected: $reqs"
fi

local venv="$workdir/$pyname-venv"
python3 -m venv "$venv"

check_requirements "$venv" "$reqs"

# Because we mask the return value of the subshell,
# we don't need to use set +e/-e.
# shellcheck disable=SC2155
local py_err=$(
(
# shellcheck source=/dev/null
source "$venv/bin/activate"
python "$py" 2>&1

check_requirements "$reqs"

python - "$py" "$pyname" <<EOF
import sys
from importlib.machinery import SourceFileLoader
py, pyname = sys.argv[1:]
SourceFileLoader(pyname, py).load_module()
EOF
)

# shellcheck disable=SC2181
if grep -Fe 'ModuleNotFoundError' <<< "$py_err"; then
fatal "$py: some imports not declared in $reqs"
fi
rm -rf -- "$venv"

info "$py: imports OK"
}

readonly ignore_eq_eq='check_requirements: ignore "=="'

for req in "$reqs_dir"/*; do
# Check that all sub-requirements are added to top-level requirements.txt
if ! grep -qFe "$req" ./requirements.txt; then
fatal "$req needs to be added to ./requirements.txt"
if ! grep -qF "$req" requirements.txt; then
fatal "$req needs to be added to requirements.txt"
fi

# Make sure exact release versions aren't being pinned in the requirements
# Filters out the ignore string
req_no_ignore_eq_eq="$(grep -vF "$ignore_eq_eq" "$req")"
if grep -Fe '==' <<< "$req_no_ignore_eq_eq" ; then
fatal "Avoid pinning exact package versions. Use '~=' instead.\nYou can suppress this error by appending the following to the line: \n\t# $ignore_eq_eq"
if grep -vF "$ignore_eq_eq" "$req" | grep -q '=='; then
tab=$'\t'
cat >&2 <<EOF
FATAL: Avoid pinning exact package versions. Use '~=' instead.
You can suppress this error by appending the following to the line:
$tab# $ignore_eq_eq
EOF
exit 1
fi
done

all_venv="$workdir/all-venv"
python3 -m venv "$all_venv"
check_requirements "$all_venv" './requirements.txt'

check_convert_script './convert.py'
for py in ./convert-*.py;do
(
# shellcheck source=/dev/null
source "$all_venv/bin/activate"
check_requirements requirements.txt
)

rm -rf -- "$all_venv"

check_convert_script convert.py
for py in convert-*.py; do
check_convert_script "$py"
done

info "Done! No issues found."
info 'Done! No issues found.'
71 changes: 38 additions & 33 deletions convert-hf-to-gguf.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ def _set_vocab_gpt2(self):
tokens: list[bytearray] = []
toktypes: list[int] = []

from transformers import AutoTokenizer # type: ignore[attr-defined]
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained(dir_model)
vocab_size = hparams.get("vocab_size", len(tokenizer.vocab))
assert max(tokenizer.vocab.values()) < vocab_size
Expand Down Expand Up @@ -848,7 +848,7 @@ def set_gguf_parameters(self):
hparams = self.hparams
block_count = hparams["num_hidden_layers"]

self.gguf_writer.add_name(dir_model.name)
self.gguf_writer.add_name(self.dir_model.name)
self.gguf_writer.add_context_length(hparams["max_position_embeddings"])
self.gguf_writer.add_embedding_length(hparams["hidden_size"])
self.gguf_writer.add_block_count(block_count)
Expand Down Expand Up @@ -894,7 +894,7 @@ def set_vocab(self):
tokens: list[bytearray] = []
toktypes: list[int] = []

from transformers import AutoTokenizer # type: ignore[attr-defined]
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained(dir_model, trust_remote_code=True)
vocab_size = hparams["vocab_size"]
assert max(tokenizer.get_vocab().values()) < vocab_size
Expand Down Expand Up @@ -1112,43 +1112,48 @@ def parse_args() -> argparse.Namespace:
return parser.parse_args()


args = parse_args()
def main() -> None:
args = parse_args()

dir_model = args.model
if not dir_model.is_dir():
print(f'Error: {args.model} is not a directory', file=sys.stderr)
sys.exit(1)
dir_model = args.model
if not dir_model.is_dir():
print(f'Error: {args.model} is not a directory', file=sys.stderr)
sys.exit(1)

ftype_map = {
"f32": gguf.GGMLQuantizationType.F32,
"f16": gguf.GGMLQuantizationType.F16,
}
ftype_map = {
"f32": gguf.GGMLQuantizationType.F32,
"f16": gguf.GGMLQuantizationType.F16,
}

if args.outfile is not None:
fname_out = args.outfile
else:
# output in the same directory as the model by default
fname_out = dir_model / f'ggml-model-{args.outtype}.gguf'
if args.outfile is not None:
fname_out = args.outfile
else:
# output in the same directory as the model by default
fname_out = dir_model / f'ggml-model-{args.outtype}.gguf'

print(f"Loading model: {dir_model.name}")
print(f"Loading model: {dir_model.name}")

hparams = Model.load_hparams(dir_model)
hparams = Model.load_hparams(dir_model)

with torch.inference_mode():
model_class = Model.from_model_architecture(hparams["architectures"][0])
model_instance = model_class(dir_model, ftype_map[args.outtype], fname_out, args.bigendian)
with torch.inference_mode():
model_class = Model.from_model_architecture(hparams["architectures"][0])
model_instance = model_class(dir_model, ftype_map[args.outtype], fname_out, args.bigendian)

print("Set model parameters")
model_instance.set_gguf_parameters()
print("Set model parameters")
model_instance.set_gguf_parameters()

print("Set model tokenizer")
model_instance.set_vocab()
print("Set model tokenizer")
model_instance.set_vocab()

if args.vocab_only:
print(f"Exporting model vocab to '{fname_out}'")
model_instance.write_vocab()
else:
print(f"Exporting model to '{fname_out}'")
model_instance.write()

print(f"Model successfully exported to '{fname_out}'")

if args.vocab_only:
print(f"Exporting model vocab to '{fname_out}'")
model_instance.write_vocab()
else:
print(f"Exporting model to '{fname_out}'")
model_instance.write()

print(f"Model successfully exported to '{fname_out}'")
if __name__ == '__main__':
main()
Loading

0 comments on commit ce26f49

Please sign in to comment.