if chkhsh == "855059429035d75a914d1eda9f10a876752e281a054a7a3d421ef0533e5b6249":
# ref: https://huggingface.co/HuggingFaceTB/SmolLM-135M
res = "smollm"
+ if chkhsh == "3c30d3ad1d6b64202cd222813e7736c2db6e1bd6d67197090fc1211fbc612ae7":
+ # ref: https://huggingface.co/bigscience/bloom
+ res = "bloom"
+ if chkhsh == "bc01ce58980e1db43859146dc51b1758b3b88729b217a74792e9f8d43e479d21":
+ # ref: https://huggingface.co/TurkuNLP/gpt3-finnish-small
+ res = "gpt3-finnish"
if res is None:
logger.warning("\n")
return tensors
-@Model.register("BloomForCausalLM")
+@Model.register("BloomForCausalLM", "BloomModel")
class BloomModel(Model):
model_arch = gguf.MODEL_ARCH.BLOOM
{"name": "codeshell", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/WisdomShell/CodeShell-7B", },
{"name": "tekken", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/mistralai/Mistral-Nemo-Base-2407", },
{"name": "smollm", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/HuggingFaceTB/SmolLM-135M", },
+ {'name': "bloom", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/bigscience/bloom", },
+ {'name': "gpt3-finnish", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/TurkuNLP/gpt3-finnish-small", },
]
LLAMA_VOCAB_PRE_TYPE_TEKKEN = 20,
LLAMA_VOCAB_PRE_TYPE_SMOLLM = 21,
LLAMA_VOCAB_PRE_TYPE_CODESHELL = 22,
+ LLAMA_VOCAB_PRE_TYPE_BLOOM = 23,
+ LLAMA_VOCAB_PRE_TYPE_GPT3_FINNISH = 24,
};
enum llama_rope_type {
};
break;
case LLAMA_VOCAB_PRE_TYPE_PORO:
+ case LLAMA_VOCAB_PRE_TYPE_BLOOM:
+ case LLAMA_VOCAB_PRE_TYPE_GPT3_FINNISH:
regex_exprs = {
" ?[^(\\s|.,!?…。,、।۔،)]+",
};
} else if (
tokenizer_pre == "codeshell") {
vocab.type_pre = LLAMA_VOCAB_PRE_TYPE_CODESHELL;
+ } else if (
+ tokenizer_pre == "bloom") {
+ vocab.type_pre = LLAMA_VOCAB_PRE_TYPE_BLOOM;
+ } else if (
+ tokenizer_pre == "gpt3-finnish") {
+ vocab.type_pre = LLAMA_VOCAB_PRE_TYPE_GPT3_FINNISH;
} else {
throw std::runtime_error(format("unknown pre-tokenizer type: '%s'", tokenizer_pre.c_str()));
}