{ "query_token_id": "[unused0]", "doc_token_id": "[unused1]", "query_token": "[Q]", "doc_token": "[D]", "ncells": null, "centroid_score_threshold": null, "ndocs": null, "load_index_with_mmap": false, "index_path": null, "index_bsize": 64, "nbits": 1, "kmeans_niters": 4, "resume": false, "pool_factor": 1, "clustering_mode": "hierarchical", "protected_tokens": 0, "similarity": "cosine", "bsize": 32, "accumsteps": 1, "lr": 1e-5, "maxsteps": 15626, "save_every": null, "warmup": 781, "warmup_bert": null, "relu": false, "nway": 32, "use_ib_negatives": false, "reranker": false, "distillation_alpha": 1.0, "ignore_scores": false, "model_name": "answerdotai/AnswerAI-ColBERTv2.5-small", "schedule_free": false, "schedule_free_wd": 0.0, "kldiv_loss": true, "marginmse_loss": false, "kldiv_weight": 1.0, "marginmse_weight": 0.05, "ib_loss_weight": 1.0, "normalise_training_scores": true, "normalization_method": "minmax", "quant_aware": false, "highest_quant_level": 8, "lowest_quant_level": 2, "query_maxlen": 32, "attend_to_mask_tokens": false, "interaction": "colbert", "cap_padding": 0, "dynamic_query_maxlen": false, "dynamic_querylen_multiples": 32, "dim": 96, "doc_maxlen": 300, "mask_punctuation": true, "checkpoint": "answerdotai/AnswerAI-ColBERTv2.5-small", "triples": "\/home\/bclavie\/colbertv2.5_en\/data\/msmarco\/triplets.jsonl", "collection": "\/home\/bclavie\/colbertv2.5_en\/data\/msmarco\/collection.tsv", "queries": "\/home\/bclavie\/colbertv2.5_en\/data\/msmarco\/queries.tsv", "index_name": null, "overwrite": false, "root": "\/home\/bclavie\/colbertv2.5_en\/experiments", "experiment": "minicolbertv2.5", "index_root": null, "name": "2024-08\/07\/08.16.20", "rank": 0, "nranks": 4, "amp": true, "gpus": 4, "avoid_fork_if_possible": false, "meta": { "hostname": "a100-80-4x", "current_datetime": "Aug 07, 2024 ; 12:13PM UTC (+0000)", "cmd": "train_v2.5_mini.py", "version": "colbert-v0.4" } }