@@ -39,9 +39,9 @@ class Metadata:
39
39
license_name : Optional [str ] = None
40
40
license_link : Optional [str ] = None
41
41
base_models : Optional [list [dict ]] = None
42
+ datasets : Optional [list [dict ]] = None
42
43
tags : Optional [list [str ]] = None
43
44
languages : Optional [list [str ]] = None
44
- datasets : Optional [list [str ]] = None
45
45
46
46
@staticmethod
47
47
def load (metadata_override_path : Optional [Path ] = None , model_path : Optional [Path ] = None , model_name : Optional [str ] = None , total_params : int = 0 ) -> Metadata :
@@ -91,9 +91,11 @@ def load(metadata_override_path: Optional[Path] = None, model_path: Optional[Pat
91
91
# Base Models is received here as an array of models
92
92
metadata .base_models = metadata_override .get ("general.base_models" , metadata .base_models )
93
93
94
+ # Datasets is received here as an array of datasets
95
+ metadata .datasets = metadata_override .get ("general.datasets" , metadata .datasets )
96
+
94
97
metadata .tags = metadata_override .get (Keys .General .TAGS , metadata .tags )
95
98
metadata .languages = metadata_override .get (Keys .General .LANGUAGES , metadata .languages )
96
- metadata .datasets = metadata_override .get (Keys .General .DATASETS , metadata .datasets )
97
99
98
100
# Direct Metadata Override (via direct cli argument)
99
101
if model_name is not None :
@@ -346,12 +348,12 @@ def use_array_model_card_metadata(metadata_key: str, model_card_key: str):
346
348
use_model_card_metadata ("author" , "model_creator" )
347
349
use_model_card_metadata ("basename" , "model_type" )
348
350
349
- if "base_model" in model_card :
351
+ if "base_model" in model_card or "base_models" in model_card :
350
352
# This represents the parent models that this is based on
351
353
# Example: stabilityai/stable-diffusion-xl-base-1.0. Can also be a list (for merges)
352
354
# Example of merges: https://huggingface.co/EmbeddedLLM/Mistral-7B-Merge-14-v0.1/blob/main/README.md
353
355
metadata_base_models = []
354
- base_model_value = model_card .get ("base_model" , None )
356
+ base_model_value = model_card .get ("base_model" , model_card . get ( "base_models" , None ) )
355
357
356
358
if base_model_value is not None :
357
359
if isinstance (base_model_value , str ):
@@ -364,18 +366,54 @@ def use_array_model_card_metadata(metadata_key: str, model_card_key: str):
364
366
365
367
for model_id in metadata_base_models :
366
368
# NOTE: model size of base model is assumed to be similar to the size of the current model
367
- model_full_name_component , org_component , basename , finetune , version , size_label = Metadata .get_model_id_components (model_id , total_params )
368
369
base_model = {}
369
- if model_full_name_component is not None :
370
- base_model ["name" ] = Metadata .id_to_title (model_full_name_component )
371
- if org_component is not None :
372
- base_model ["organization" ] = Metadata .id_to_title (org_component )
373
- if version is not None :
374
- base_model ["version" ] = version
375
- if org_component is not None and model_full_name_component is not None :
376
- base_model ["repo_url" ] = f"https://huggingface.co/{ org_component } /{ model_full_name_component } "
370
+ if isinstance (model_id , str ) and (model_id .startswith ("http://" ) or model_id .startswith ("https://" )):
371
+ base_model ["repo_url" ] = model_id
372
+ else :
373
+ # Likely a Hugging Face ID
374
+ model_full_name_component , org_component , basename , finetune , version , size_label = Metadata .get_model_id_components (model_id , total_params )
375
+ if model_full_name_component is not None :
376
+ base_model ["name" ] = Metadata .id_to_title (model_full_name_component )
377
+ if org_component is not None :
378
+ base_model ["organization" ] = Metadata .id_to_title (org_component )
379
+ if version is not None :
380
+ base_model ["version" ] = version
381
+ if org_component is not None and model_full_name_component is not None :
382
+ base_model ["repo_url" ] = f"https://huggingface.co/{ org_component } /{ model_full_name_component } "
377
383
metadata .base_models .append (base_model )
378
384
385
+ if "datasets" in model_card or "dataset" in model_card :
386
+ # This represents the datasets that this was trained from
387
+ metadata_datasets = []
388
+ dataset_value = model_card .get ("datasets" , model_card .get ("dataset" , None ))
389
+
390
+ if dataset_value is not None :
391
+ if isinstance (dataset_value , str ):
392
+ metadata_datasets .append (dataset_value )
393
+ elif isinstance (dataset_value , list ):
394
+ metadata_datasets .extend (dataset_value )
395
+
396
+ if metadata .datasets is None :
397
+ metadata .datasets = []
398
+
399
+ for dataset_id in metadata_datasets :
400
+ # NOTE: model size of base model is assumed to be similar to the size of the current model
401
+ dataset = {}
402
+ if isinstance (dataset_id , str ) and (dataset_id .startswith ("http://" ) or dataset_id .startswith ("https://" )):
403
+ dataset ["repo_url" ] = dataset_id
404
+ else :
405
+ # Likely a Hugging Face ID
406
+ dataset_name_component , org_component , basename , finetune , version , size_label = Metadata .get_model_id_components (dataset_id , total_params )
407
+ if dataset_name_component is not None :
408
+ dataset ["name" ] = Metadata .id_to_title (dataset_name_component )
409
+ if org_component is not None :
410
+ dataset ["organization" ] = Metadata .id_to_title (org_component )
411
+ if version is not None :
412
+ dataset ["version" ] = version
413
+ if org_component is not None and dataset_name_component is not None :
414
+ dataset ["repo_url" ] = f"https://huggingface.co/{ org_component } /{ dataset_name_component } "
415
+ metadata .datasets .append (dataset )
416
+
379
417
use_model_card_metadata ("license" , "license" )
380
418
use_model_card_metadata ("license_name" , "license_name" )
381
419
use_model_card_metadata ("license_link" , "license_link" )
@@ -386,9 +424,6 @@ def use_array_model_card_metadata(metadata_key: str, model_card_key: str):
386
424
use_array_model_card_metadata ("languages" , "languages" )
387
425
use_array_model_card_metadata ("languages" , "language" )
388
426
389
- use_array_model_card_metadata ("datasets" , "datasets" )
390
- use_array_model_card_metadata ("datasets" , "dataset" )
391
-
392
427
# Hugging Face Parameter Heuristics
393
428
####################################
394
429
@@ -504,9 +539,29 @@ def set_gguf_meta_model(self, gguf_writer: gguf.GGUFWriter):
504
539
if "repo_url" in base_model_entry :
505
540
gguf_writer .add_base_model_repo_url (key , base_model_entry ["repo_url" ])
506
541
542
+ if self .datasets is not None :
543
+ gguf_writer .add_dataset_count (len (self .datasets ))
544
+ for key , dataset_entry in enumerate (self .datasets ):
545
+ if "name" in dataset_entry :
546
+ gguf_writer .add_dataset_name (key , dataset_entry ["name" ])
547
+ if "author" in dataset_entry :
548
+ gguf_writer .add_dataset_author (key , dataset_entry ["author" ])
549
+ if "version" in dataset_entry :
550
+ gguf_writer .add_dataset_version (key , dataset_entry ["version" ])
551
+ if "organization" in dataset_entry :
552
+ gguf_writer .add_dataset_organization (key , dataset_entry ["organization" ])
553
+ if "description" in dataset_entry :
554
+ gguf_writer .add_dataset_description (key , dataset_entry ["description" ])
555
+ if "url" in dataset_entry :
556
+ gguf_writer .add_dataset_url (key , dataset_entry ["url" ])
557
+ if "doi" in dataset_entry :
558
+ gguf_writer .add_dataset_doi (key , dataset_entry ["doi" ])
559
+ if "uuid" in dataset_entry :
560
+ gguf_writer .add_dataset_uuid (key , dataset_entry ["uuid" ])
561
+ if "repo_url" in dataset_entry :
562
+ gguf_writer .add_dataset_repo_url (key , dataset_entry ["repo_url" ])
563
+
507
564
if self .tags is not None :
508
565
gguf_writer .add_tags (self .tags )
509
566
if self .languages is not None :
510
567
gguf_writer .add_languages (self .languages )
511
- if self .datasets is not None :
512
- gguf_writer .add_datasets (self .datasets )
0 commit comments