File size: 287 Bytes
f7feca3 |
1 |
{"model": "CausalLM/7B", "base_model": "", "revision": "main", "private": false, "precision": "bfloat16", "weight_type": "Original", "status": "PENDING", "submitted_time": "2023-11-16T19:21:57Z", "model_type": "\ud83d\udd36 : fine-tuned", "likes": 103, "params": 7.0, "license": "wtfpl"} |