"""
BASE_DOMAIN = "https://huggingface.co"
- ALIGNMENT = 8 # bytes
@classmethod
def get_list_tensors_hf_model(cls, model_id: str) -> dict[str, RemoteTensor]:
# Calculate the data start offset
data_start_offset = 8 + metadata_length
- alignment = SafetensorRemote.ALIGNMENT
- if data_start_offset % alignment != 0:
- data_start_offset += alignment - (data_start_offset % alignment)
# Check if we have enough data to read the metadata
if len(raw_data) < 8 + metadata_length:
Custom parsing gives a bit more control over the memory usage.
The official safetensors library doesn't expose file ranges.
"""
- ALIGNMENT = 8 # bytes
tensors: dict[str, LocalTensor]
raise ValueError(f"Failed to parse safetensors metadata as JSON: {e}")
data_start_offset = f.tell()
- alignment = self.ALIGNMENT
- if data_start_offset % alignment != 0:
- data_start_offset += alignment - (data_start_offset % alignment)
tensors: dict[str, LocalTensor] = {}
for name, meta in metadata.items():