File size: 1,519 Bytes
72ff821
 
 
82f1bf5
 
87a0e23
 
82f1bf5
 
72ff821
 
82f1bf5
 
 
 
 
 
 
49ce4b9
87a0e23
49ce4b9
9ee06c7
 
 
87a0e23
 
 
82f1bf5
 
d754e91
35fba55
82f1bf5
d754e91
72ff821
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import os
import subprocess

from typing import Any, Dict, List, Optional, Tuple, Union

from .lib.finetune import train


class Global:
    version = None

    base_model: str = ""
    data_dir: str = ""
    load_8bit: bool = False

    loaded_tokenizer: Any = None
    loaded_base_model: Any = None

    # Functions
    train_fn: Any = train

    # Training Control
    should_stop_training = False

    # Model related
    model_has_been_used = False

    # UI related
    ui_title: str = "LLaMA-LoRA"
    ui_emoji: str = "🦙🎛️"
    ui_subtitle: str = "Toolkit for evaluating and fine-tuning LLaMA models with low-rank adaptation (LoRA)."
    ui_show_sys_info: bool = True
    ui_dev_mode: bool = False


def get_package_dir():
    current_file_path = os.path.abspath(__file__)
    parent_directory_path = os.path.dirname(current_file_path)
    return os.path.abspath(parent_directory_path)


def get_git_commit_hash():
    try:
        original_cwd = os.getcwd()
        project_dir = get_package_dir()
        try:
            os.chdir(project_dir)
            commit_hash = subprocess.check_output(
                ['git', 'rev-parse', 'HEAD']).strip().decode('utf-8')
            return commit_hash
        except Exception as e:
            print(f"Cannot get git commit hash: {e}")
        finally:
            os.chdir(original_cwd)
    except Exception as e:
        print(f"Cannot get git commit hash: {e}")


commit_hash = get_git_commit_hash()

if commit_hash:
    Global.version = commit_hash[:8]