Upload dataset
Browse files- README.md +203 -0
- vit/train-00000-of-00001.parquet +3 -0
README.md
CHANGED
@@ -597,6 +597,205 @@ dataset_info:
|
|
597 |
num_examples: 264
|
598 |
download_size: 1724766354
|
599 |
dataset_size: 2561357508
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
600 |
configs:
|
601 |
- config_name: mistral-7b-v0.1-dpo
|
602 |
data_files:
|
@@ -610,4 +809,8 @@ configs:
|
|
610 |
data_files:
|
611 |
- split: train
|
612 |
path: stable-diffusion-1.5/train-*
|
|
|
|
|
|
|
|
|
613 |
---
|
|
|
597 |
num_examples: 264
|
598 |
download_size: 1724766354
|
599 |
dataset_size: 2561357508
|
600 |
+
- config_name: vit
|
601 |
+
features:
|
602 |
+
- name: task_name
|
603 |
+
dtype: string
|
604 |
+
- name: layer_model
|
605 |
+
dtype: string
|
606 |
+
- name: layer_name
|
607 |
+
dtype: string
|
608 |
+
- name: pre_ft_name
|
609 |
+
dtype: string
|
610 |
+
- name: pre_ft_weight
|
611 |
+
sequence:
|
612 |
+
sequence: float32
|
613 |
+
- name: lora_0_name
|
614 |
+
dtype: string
|
615 |
+
- name: lora_0_A_weight
|
616 |
+
sequence:
|
617 |
+
sequence: float32
|
618 |
+
- name: lora_0_B_weight
|
619 |
+
sequence:
|
620 |
+
sequence: float32
|
621 |
+
- name: lora_0_rank
|
622 |
+
dtype: int64
|
623 |
+
- name: lora_0_alpha
|
624 |
+
dtype: int64
|
625 |
+
- name: lora_1_name
|
626 |
+
dtype: string
|
627 |
+
- name: lora_1_A_weight
|
628 |
+
sequence:
|
629 |
+
sequence: float32
|
630 |
+
- name: lora_1_B_weight
|
631 |
+
sequence:
|
632 |
+
sequence: float32
|
633 |
+
- name: lora_1_rank
|
634 |
+
dtype: int64
|
635 |
+
- name: lora_1_alpha
|
636 |
+
dtype: int64
|
637 |
+
- name: lora_2_name
|
638 |
+
dtype: string
|
639 |
+
- name: lora_2_A_weight
|
640 |
+
sequence:
|
641 |
+
sequence: float32
|
642 |
+
- name: lora_2_B_weight
|
643 |
+
sequence:
|
644 |
+
sequence: float32
|
645 |
+
- name: lora_2_rank
|
646 |
+
dtype: int64
|
647 |
+
- name: lora_2_alpha
|
648 |
+
dtype: int64
|
649 |
+
- name: lora_3_name
|
650 |
+
dtype: string
|
651 |
+
- name: lora_3_A_weight
|
652 |
+
sequence:
|
653 |
+
sequence: float32
|
654 |
+
- name: lora_3_B_weight
|
655 |
+
sequence:
|
656 |
+
sequence: float32
|
657 |
+
- name: lora_3_rank
|
658 |
+
dtype: int64
|
659 |
+
- name: lora_3_alpha
|
660 |
+
dtype: int64
|
661 |
+
- name: lora_4_name
|
662 |
+
dtype: string
|
663 |
+
- name: lora_4_A_weight
|
664 |
+
sequence:
|
665 |
+
sequence: float32
|
666 |
+
- name: lora_4_B_weight
|
667 |
+
sequence:
|
668 |
+
sequence: float32
|
669 |
+
- name: lora_4_rank
|
670 |
+
dtype: int64
|
671 |
+
- name: lora_4_alpha
|
672 |
+
dtype: int64
|
673 |
+
- name: lora_5_name
|
674 |
+
dtype: string
|
675 |
+
- name: lora_5_A_weight
|
676 |
+
sequence:
|
677 |
+
sequence: float32
|
678 |
+
- name: lora_5_B_weight
|
679 |
+
sequence:
|
680 |
+
sequence: float32
|
681 |
+
- name: lora_5_rank
|
682 |
+
dtype: int64
|
683 |
+
- name: lora_5_alpha
|
684 |
+
dtype: int64
|
685 |
+
- name: lora_6_name
|
686 |
+
dtype: string
|
687 |
+
- name: lora_6_A_weight
|
688 |
+
sequence:
|
689 |
+
sequence: float32
|
690 |
+
- name: lora_6_B_weight
|
691 |
+
sequence:
|
692 |
+
sequence: float32
|
693 |
+
- name: lora_6_rank
|
694 |
+
dtype: int64
|
695 |
+
- name: lora_6_alpha
|
696 |
+
dtype: int64
|
697 |
+
- name: lora_7_name
|
698 |
+
dtype: string
|
699 |
+
- name: lora_7_A_weight
|
700 |
+
sequence:
|
701 |
+
sequence: float32
|
702 |
+
- name: lora_7_B_weight
|
703 |
+
sequence:
|
704 |
+
sequence: float32
|
705 |
+
- name: lora_7_rank
|
706 |
+
dtype: int64
|
707 |
+
- name: lora_7_alpha
|
708 |
+
dtype: int64
|
709 |
+
- name: lora_8_name
|
710 |
+
dtype: string
|
711 |
+
- name: lora_8_A_weight
|
712 |
+
sequence:
|
713 |
+
sequence: float32
|
714 |
+
- name: lora_8_B_weight
|
715 |
+
sequence:
|
716 |
+
sequence: float32
|
717 |
+
- name: lora_8_rank
|
718 |
+
dtype: int64
|
719 |
+
- name: lora_8_alpha
|
720 |
+
dtype: int64
|
721 |
+
- name: lora_9_name
|
722 |
+
dtype: string
|
723 |
+
- name: lora_9_A_weight
|
724 |
+
sequence:
|
725 |
+
sequence: float32
|
726 |
+
- name: lora_9_B_weight
|
727 |
+
sequence:
|
728 |
+
sequence: float32
|
729 |
+
- name: lora_9_rank
|
730 |
+
dtype: int64
|
731 |
+
- name: lora_9_alpha
|
732 |
+
dtype: int64
|
733 |
+
- name: lora_10_name
|
734 |
+
dtype: string
|
735 |
+
- name: lora_10_A_weight
|
736 |
+
sequence:
|
737 |
+
sequence: float32
|
738 |
+
- name: lora_10_B_weight
|
739 |
+
sequence:
|
740 |
+
sequence: float32
|
741 |
+
- name: lora_10_rank
|
742 |
+
dtype: int64
|
743 |
+
- name: lora_10_alpha
|
744 |
+
dtype: int64
|
745 |
+
- name: lora_11_name
|
746 |
+
dtype: string
|
747 |
+
- name: lora_11_A_weight
|
748 |
+
sequence:
|
749 |
+
sequence: float32
|
750 |
+
- name: lora_11_B_weight
|
751 |
+
sequence:
|
752 |
+
sequence: float32
|
753 |
+
- name: lora_11_rank
|
754 |
+
dtype: int64
|
755 |
+
- name: lora_11_alpha
|
756 |
+
dtype: int64
|
757 |
+
- name: lora_12_name
|
758 |
+
dtype: string
|
759 |
+
- name: lora_12_A_weight
|
760 |
+
sequence:
|
761 |
+
sequence: float32
|
762 |
+
- name: lora_12_B_weight
|
763 |
+
sequence:
|
764 |
+
sequence: float32
|
765 |
+
- name: lora_12_rank
|
766 |
+
dtype: int64
|
767 |
+
- name: lora_12_alpha
|
768 |
+
dtype: int64
|
769 |
+
- name: lora_13_name
|
770 |
+
dtype: string
|
771 |
+
- name: lora_13_A_weight
|
772 |
+
sequence:
|
773 |
+
sequence: float32
|
774 |
+
- name: lora_13_B_weight
|
775 |
+
sequence:
|
776 |
+
sequence: float32
|
777 |
+
- name: lora_13_rank
|
778 |
+
dtype: int64
|
779 |
+
- name: lora_13_alpha
|
780 |
+
dtype: int64
|
781 |
+
- name: lora_14_name
|
782 |
+
dtype: string
|
783 |
+
- name: lora_14_A_weight
|
784 |
+
sequence:
|
785 |
+
sequence: float32
|
786 |
+
- name: lora_14_B_weight
|
787 |
+
sequence:
|
788 |
+
sequence: float32
|
789 |
+
- name: lora_14_rank
|
790 |
+
dtype: int64
|
791 |
+
- name: lora_14_alpha
|
792 |
+
dtype: int64
|
793 |
+
splits:
|
794 |
+
- name: train
|
795 |
+
num_bytes: 93231628
|
796 |
+
num_examples: 24
|
797 |
+
download_size: 111481540
|
798 |
+
dataset_size: 93231628
|
799 |
configs:
|
800 |
- config_name: mistral-7b-v0.1-dpo
|
801 |
data_files:
|
|
|
809 |
data_files:
|
810 |
- split: train
|
811 |
path: stable-diffusion-1.5/train-*
|
812 |
+
- config_name: vit
|
813 |
+
data_files:
|
814 |
+
- split: train
|
815 |
+
path: vit/train-*
|
816 |
---
|
vit/train-00000-of-00001.parquet
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:87fb6e6d1d09712db6936d944dcddece31eefbb5135cabcaba48d2e7b0544b96
|
3 |
+
size 111481540
|