from pathlib import Path
from typing import Any
-import gguf
import numpy as np
import torch
from transformers import AutoTokenizer # type: ignore[import]
+if 'NO_LOCAL_GGUF' not in os.environ:
+ sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf'))
+import gguf
+
def bytes_to_unicode():
# ref: https://github.com/openai/gpt-2/blob/master/src/encoder.py
from pathlib import Path
from typing import Any
-import gguf
import numpy as np
import torch
from transformers import AutoTokenizer # type: ignore[import]
+if 'NO_LOCAL_GGUF' not in os.environ:
+ sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf'))
+import gguf
+
# ref: https://github.com/openai/gpt-2/blob/master/src/encoder.py
import sys
from pathlib import Path
-import gguf
import numpy as np
+import os
+if 'NO_LOCAL_GGUF' not in os.environ:
+ sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf'))
+import gguf
+
# Note: Does not support GGML_QKK_64
QK_K = 256
# Items here are (block size, type size)
from pathlib import Path
from typing import IO, TYPE_CHECKING, Any, Callable, Generator, Iterable, Literal, Sequence, TypeVar
-import gguf
import numpy as np
from sentencepiece import SentencePieceProcessor # type: ignore[import]
+import os
+if 'NO_LOCAL_GGUF' not in os.environ:
+ sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf'))
+import gguf
+
if TYPE_CHECKING:
from typing import TypeAlias
# train-text-from-scratch checkpoint --> gguf conversion
import argparse
-import gguf
import os
import struct
import sys
import numpy as np
from pathlib import Path
+if 'NO_LOCAL_GGUF' not in os.environ:
+ sys.path.insert(1, str(Path(__file__).parent / '..' / '..' / 'gguf-py' / 'gguf'))
+import gguf
+
# gguf constants
LLM_KV_OPTIMIZER_TYPE = "optimizer.type"
LLM_KV_OPTIMIZER_TYPE_ADAM = "adam"