]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
ci : bump ty to 0.0.26 (#21156)
authorSigbjørn Skjæret <redacted>
Mon, 30 Mar 2026 07:29:15 +0000 (09:29 +0200)
committerGitHub <redacted>
Mon, 30 Mar 2026 07:29:15 +0000 (09:29 +0200)
* fix incorrect type ignore comments

* bump ty to 0.0.26

.github/workflows/python-type-check.yml
convert_hf_to_gguf.py
examples/model-conversion/scripts/causal/compare-logits.py
examples/model-conversion/scripts/utils/check-nmse.py
examples/model-conversion/scripts/utils/compare_tokens.py
examples/model-conversion/scripts/utils/semantic_check.py
gguf-py/gguf/vocab.py
scripts/gen-unicode-data.py
tools/server/tests/utils.py

index 2c6267872636919496de2dff10f11079111c662c..dc7aebe24ca2c03ce12444b4013aa407ea99ff78 100644 (file)
@@ -31,7 +31,7 @@ jobs:
         uses: actions/setup-python@v6
         with:
           python-version: "3.11"
-          pip-install: -r requirements/requirements-all.txt ty==0.0.24
+          pip-install: -r requirements/requirements-all.txt ty==0.0.26
       # - name: Type-check with Pyright
       #   uses: jakebailey/pyright-action@v2
       #   with:
index bcf98cfae76f48a2035d9e9af7acc8cb65ac273c..51f0d1ab152ba94d8048d4886ed2da2e2e0b5bc5 100755 (executable)
@@ -31,10 +31,10 @@ import gguf
 from gguf.vocab import MistralTokenizerType, MistralVocab
 
 try:
-    from mistral_common.tokens.tokenizers.base import TokenizerVersion # type: ignore[import-not-found]
-    from mistral_common.tokens.tokenizers.multimodal import DATASET_MEAN as _MISTRAL_COMMON_DATASET_MEAN, DATASET_STD as _MISTRAL_COMMON_DATASET_STD # type: ignore[import-not-found]
-    from mistral_common.tokens.tokenizers.tekken import Tekkenizer # type: ignore[import-not-found]
-    from mistral_common.tokens.tokenizers.sentencepiece import ( # type: ignore[import-not-found]
+    from mistral_common.tokens.tokenizers.base import TokenizerVersion # type: ignore[import-not-found, ty:unresolved-import]
+    from mistral_common.tokens.tokenizers.multimodal import DATASET_MEAN as _MISTRAL_COMMON_DATASET_MEAN, DATASET_STD as _MISTRAL_COMMON_DATASET_STD # type: ignore[import-not-found, ty:unresolved-import]
+    from mistral_common.tokens.tokenizers.tekken import Tekkenizer # type: ignore[import-not-found, ty:unresolved-import]
+    from mistral_common.tokens.tokenizers.sentencepiece import ( # type: ignore[import-not-found, ty:unresolved-import]
         SentencePieceTokenizer,
     )
 
index 83bd14c659f2d8ee0570b5205743a2713c604c74..181c0486301288baffcfda179b80a140903eed17 100755 (executable)
@@ -7,7 +7,7 @@ import os
 
 # Add utils directory to path for direct script execution
 sys.path.insert(0, str(Path(__file__).parent.parent / "utils"))
-from common import get_model_name_from_env_path, compare_tokens, exit_with_warning  # type: ignore[import-not-found]
+from common import get_model_name_from_env_path, compare_tokens, exit_with_warning  # type: ignore[import-not-found, ty:unresolved-import]
 
 def quick_logits_check(pytorch_file, llamacpp_file):
     """Lightweight sanity check before NMSE"""
index 83f63f9ff36fe793171dd30a9fb724afe9a25125..324e3858e368245e39d2a2e5d5d9ebba6719ac57 100755 (executable)
@@ -5,7 +5,7 @@ import sys
 import os
 import argparse
 from pathlib import Path
-from common import get_model_name_from_env_path  # type: ignore[import-not-found]
+from common import get_model_name_from_env_path  # type: ignore[import-not-found, ty:unresolved-import]
 
 def calculate_nmse(reference, test):
     mse = np.mean((test - reference) ** 2)
index 62826ec7a60baec4022418c45af0999829a64279..55e3f26ab4d14639b1264755b7a671bb02ea4713 100755 (executable)
@@ -2,7 +2,7 @@
 
 import argparse
 import sys
-from common import compare_tokens  # type: ignore[import-not-found]
+from common import compare_tokens  # type: ignore[import-not-found, ty:unresolved-import]
 
 
 def parse_arguments():
index 73e20ea48909aba68a6a2d19cca6d32ff5805087..db0d004dab262555b68b1c7e9a9f7975043e0630 100644 (file)
@@ -7,7 +7,7 @@ import importlib
 from pathlib import Path
 
 from transformers import AutoTokenizer, AutoConfig, AutoModelForCausalLM, AutoModel
-from common import compare_tokens, exit_with_warning  # type: ignore[import-not-found]
+from common import compare_tokens, exit_with_warning  # type: ignore[import-not-found, ty:unresolved-import]
 
 unreleased_model_name = os.getenv('UNRELEASED_MODEL_NAME')
 
index e4ab5e1e4ba27a292883dd2e606d8148eae68671..5cd729dfa86a830d7f9ccc404877699994389be8 100644 (file)
@@ -14,12 +14,12 @@ except ImportError:
     SentencePieceProcessor: Any = None
 
 try:
-    from mistral_common.tokens.tokenizers.mistral import MistralTokenizer # type: ignore[import-not-found]
-    from mistral_common.tokens.tokenizers.tekken import Tekkenizer # type: ignore[import-not-found]
-    from mistral_common.tokens.tokenizers.utils import ( # type: ignore[import-not-found]
+    from mistral_common.tokens.tokenizers.mistral import MistralTokenizer # type: ignore[import-not-found, ty:unresolved-import]
+    from mistral_common.tokens.tokenizers.tekken import Tekkenizer # type: ignore[import-not-found, ty:unresolved-import]
+    from mistral_common.tokens.tokenizers.utils import ( # type: ignore[import-not-found, ty:unresolved-import]
         _filter_valid_tokenizer_files,
     )
-    from mistral_common.tokens.tokenizers.sentencepiece import ( # type: ignore[import-not-found]
+    from mistral_common.tokens.tokenizers.sentencepiece import ( # type: ignore[import-not-found, ty:unresolved-import]
         SentencePieceTokenizer,
     )
 except ImportError:
@@ -32,7 +32,7 @@ else:
     _mistral_common_installed = True
 
 try:
-    from mistral_common.tokens.tokenizers.utils import ( # type: ignore[import-not-found]
+    from mistral_common.tokens.tokenizers.utils import ( # type: ignore[import-not-found, ty:unresolved-import]
         get_one_valid_tokenizer_file,
     )
 except ImportError:
index 2d9bde01c3ca728d49e9c3910975dcc44746eb95..0fa78b22ec90348e503a901563b53213ca6a2381 100644 (file)
@@ -147,7 +147,7 @@ ranges_nfd: list[tuple[int, int, int]] = [(0, 0, 0)]  # start, last, nfd
 for codepoint, norm in table_nfd:
     start = ranges_nfd[-1][0]
     if ranges_nfd[-1] != (start, codepoint - 1, norm):
-        ranges_nfd.append(None)  # type: ignore[arg-type]  # dummy, will be replaced below
+        ranges_nfd.append((0, 0, 0))  # dummy, will be replaced below
         start = codepoint
     ranges_nfd[-1] = (start, codepoint, norm)
 
index 8f62210bccfe3061109b468fbf3a4e6bf794beab..a9a7e3c4f36dfc34d3ced94580d145857d98b58b 100644 (file)
@@ -116,7 +116,7 @@ class ServerProcess:
             self.server_port = int(os.environ["PORT"])
         self.external_server = "DEBUG_EXTERNAL" in os.environ
 
-    def start(self, timeout_seconds: int | None = DEFAULT_HTTP_TIMEOUT) -> None:
+    def start(self, timeout_seconds: int = DEFAULT_HTTP_TIMEOUT) -> None:
         if self.external_server:
             print(f"[external_server]: Assuming external server running on {self.server_host}:{self.server_port}")
             return