|
6483 | 6483 | "8-bit",
|
6484 | 6484 | "none"
|
6485 | 6485 | ],
|
6486 |
| - "model_id": "OpenGVLab/InternVL2-1B", |
6487 |
| - "model_revision": "a9fc14aea824b6ea1d44f8778cad6b35512c4ce1" |
| 6486 | + "model_id": "OpenGVLab/InternVL2-1B" |
6488 | 6487 | },
|
6489 | 6488 | {
|
6490 | 6489 | "model_format": "pytorch",
|
|
6494 | 6493 | "8-bit",
|
6495 | 6494 | "none"
|
6496 | 6495 | ],
|
6497 |
| - "model_id": "OpenGVLab/InternVL2-2B", |
6498 |
| - "model_revision": "422ad7c6335917bfb514958233955512338485a6" |
| 6496 | + "model_id": "OpenGVLab/InternVL2-2B" |
6499 | 6497 | },
|
6500 | 6498 | {
|
6501 | 6499 | "model_format": "awq",
|
6502 | 6500 | "model_size_in_billions": 2,
|
6503 | 6501 | "quantizations": [
|
6504 | 6502 | "Int4"
|
6505 | 6503 | ],
|
6506 |
| - "model_id": "OpenGVLab/InternVL2-2B-AWQ", |
6507 |
| - "model_revision": "701bc3fc098a8a3b686b3b4135cfb77202be89e0" |
| 6504 | + "model_id": "OpenGVLab/InternVL2-2B-AWQ" |
6508 | 6505 | },
|
6509 | 6506 | {
|
6510 | 6507 | "model_format": "pytorch",
|
|
6514 | 6511 | "8-bit",
|
6515 | 6512 | "none"
|
6516 | 6513 | ],
|
6517 |
| - "model_id": "OpenGVLab/InternVL2-4B", |
6518 |
| - "model_revision": "b50544dafada6c41e80bfde2f57cc9b0140fc21c" |
| 6514 | + "model_id": "OpenGVLab/InternVL2-4B" |
6519 | 6515 | },
|
6520 | 6516 | {
|
6521 | 6517 | "model_format": "pytorch",
|
|
6525 | 6521 | "8-bit",
|
6526 | 6522 | "none"
|
6527 | 6523 | ],
|
6528 |
| - "model_id": "OpenGVLab/InternVL2-8B", |
6529 |
| - "model_revision": "3bfd3664dea4f3da628785f5125d30f889701253" |
| 6524 | + "model_id": "OpenGVLab/InternVL2-8B" |
6530 | 6525 | },
|
6531 | 6526 | {
|
6532 | 6527 | "model_format": "awq",
|
6533 | 6528 | "model_size_in_billions": 8,
|
6534 | 6529 | "quantizations": [
|
6535 | 6530 | "Int4"
|
6536 | 6531 | ],
|
6537 |
| - "model_id": "OpenGVLab/InternVL2-8B-AWQ", |
6538 |
| - "model_revision": "9f1a4756b7ae18eb26d8a22b618dfc283e8193b3" |
| 6532 | + "model_id": "OpenGVLab/InternVL2-8B-AWQ" |
6539 | 6533 | },
|
6540 | 6534 | {
|
6541 | 6535 | "model_format": "pytorch",
|
|
6545 | 6539 | "8-bit",
|
6546 | 6540 | "none"
|
6547 | 6541 | ],
|
6548 |
| - "model_id": "OpenGVLab/InternVL2-26B", |
6549 |
| - "model_revision": "b9f3c7e6d575b0115e076a3ffc46fd20b7586899" |
| 6542 | + "model_id": "OpenGVLab/InternVL2-26B" |
6550 | 6543 | },
|
6551 | 6544 | {
|
6552 | 6545 | "model_format": "awq",
|
6553 | 6546 | "model_size_in_billions": 26,
|
6554 | 6547 | "quantizations": [
|
6555 | 6548 | "Int4"
|
6556 | 6549 | ],
|
6557 |
| - "model_id": "OpenGVLab/InternVL2-26B-AWQ", |
6558 |
| - "model_revision": "469e0019ffd251e22ff6501a5c2321964e86ef0d" |
| 6550 | + "model_id": "OpenGVLab/InternVL2-26B-AWQ" |
6559 | 6551 | },
|
6560 | 6552 | {
|
6561 | 6553 | "model_format": "pytorch",
|
|
6565 | 6557 | "8-bit",
|
6566 | 6558 | "none"
|
6567 | 6559 | ],
|
6568 |
| - "model_id": "OpenGVLab/InternVL2-40B", |
6569 |
| - "model_revision": "725a12063bb855c966e30a0617d0ccd9e870d772" |
| 6560 | + "model_id": "OpenGVLab/InternVL2-40B" |
6570 | 6561 | },
|
6571 | 6562 | {
|
6572 | 6563 | "model_format": "awq",
|
6573 | 6564 | "model_size_in_billions": 40,
|
6574 | 6565 | "quantizations": [
|
6575 | 6566 | "Int4"
|
6576 | 6567 | ],
|
6577 |
| - "model_id": "OpenGVLab/InternVL2-40B-AWQ", |
6578 |
| - "model_revision": "d92e140f6dfe8ea9679924c6a31898f42c4e1846" |
| 6568 | + "model_id": "OpenGVLab/InternVL2-40B-AWQ" |
6579 | 6569 | },
|
6580 | 6570 | {
|
6581 | 6571 | "model_format": "pytorch",
|
|
6585 | 6575 | "8-bit",
|
6586 | 6576 | "none"
|
6587 | 6577 | ],
|
6588 |
| - "model_id": "OpenGVLab/InternVL2-Llama3-76B", |
6589 |
| - "model_revision": "cf7914905f78e9e3560ddbd6f5dfc39becac494f" |
| 6578 | + "model_id": "OpenGVLab/InternVL2-Llama3-76B" |
6590 | 6579 | },
|
6591 | 6580 | {
|
6592 | 6581 | "model_format": "awq",
|
6593 | 6582 | "model_size_in_billions": 76,
|
6594 | 6583 | "quantizations": [
|
6595 | 6584 | "Int4"
|
6596 | 6585 | ],
|
6597 |
| - "model_id": "OpenGVLab/InternVL2-Llama3-76B-AWQ", |
6598 |
| - "model_revision": "1bc796bf80f2ebc7d6a14c15f55217a4600d50a4" |
| 6586 | + "model_id": "OpenGVLab/InternVL2-Llama3-76B-AWQ" |
6599 | 6587 | }
|
6600 | 6588 | ],
|
6601 | 6589 | "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
|
0 commit comments