File size: 5,937 Bytes
ba6dc6d
 
 
 
 
 
 
 
 
 
61de2dc
 
 
ba6dc6d
19ed6cf
ba6dc6d
250618e
 
61de2dc
250618e
19ed6cf
89221eb
 
 
 
 
19ed6cf
8e3d125
19ed6cf
 
ba6dc6d
19ed6cf
ba6dc6d
19ed6cf
 
ba6dc6d
19ed6cf
 
 
 
 
 
 
 
 
 
 
 
 
ba6dc6d
19ed6cf
 
 
 
 
 
 
ba6dc6d
19ed6cf
ba6dc6d
19ed6cf
ba6dc6d
 
19ed6cf
 
 
 
 
ba6dc6d
19ed6cf
ba6dc6d
19ed6cf
ba6dc6d
 
 
19ed6cf
61de2dc
ba6dc6d
 
19ed6cf
8e3d125
 
 
 
 
 
 
250618e
 
 
 
 
 
 
 
ba6dc6d
 
89221eb
 
 
8e3d125
19ed6cf
 
 
89221eb
 
 
19ed6cf
 
 
 
 
 
89221eb
 
 
19ed6cf
61de2dc
 
 
 
 
250618e
19ed6cf
8e3d125
19ed6cf
 
89221eb
 
 
 
19ed6cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89221eb
 
 
250618e
 
 
 
 
 
 
8e3d125
250618e
 
 
 
19ed6cf
89221eb
 
 
 
 
 
 
 
 
19ed6cf
89221eb
 
 
 
19ed6cf
250618e
32d2da0
 
 
 
19ed6cf
61de2dc
250618e
19ed6cf
250618e
 
89221eb
250618e
 
 
19ed6cf
89221eb
250618e
 
19ed6cf
ba6dc6d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
#!/usr/bin/env -S julia --project
# Script to generate `Artifacts.toml` and `artifacts/*.tar.gz`.
#
# For the 1st time running this script, you need to run
#   using Pkg; Pkg.instantiate()
# to install the dependencies.
#
# Artifacts docs:
# https://pkgdocs.julialang.org/v1/artifacts/
#
using Pkg
Pkg.instantiate()

using Tar, Inflate, SHA, TOML
using Dates: unix2datetime

const artifacts_dir = joinpath(@__DIR__, "artifacts")
const datasets_dir = joinpath(@__DIR__, "datasets")
const deploy_branch = "artifacts"

"""
Create a artifact tarball for a folder.

# Arguments
- `folder`: subfolders in `datasets/` folder, e.g. `"Si2"`
- `local_url`: use local file path for the url, for testing only
"""
function create_artifact(folder::AbstractString; local_url::Bool = false)
    fullpath = joinpath(datasets_dir, folder)
    isdir(fullpath) || return

    mkpath(artifacts_dir)

    # exclude the inputs folder which contains the input files for generating the datasets
    tar_excludes = ["inputs", ".gitignore", "README.md"]

    # gzip compression level, highest
    GZIP = "-9"
    # By default, use gzip
    compress_prog = "gzip $GZIP"
    # Try to use pigz for parallel compression.
    # However, it is not available in github workflow (ubuntu-latest)
    try
        run(`which pigz`)
        # -k: keep original files
        global compress_prog = "pigz $GZIP -k"
    catch
        # pigz is not available
    end

    tar_cmd = [
        Sys.isapple() ? "gtar" : "tar",
        "--exclude-vcs",
        "--exclude-vcs-ignores",
        "--use-compress-program=$compress_prog",
    ]
    append!(tar_cmd, ["--exclude=" * f for f in tar_excludes])

    tar_name = "$(folder).tar.gz"
    outpath = joinpath(artifacts_dir, tar_name)
    prev_dir = pwd()
    cd(fullpath) do
        files = readdir()
        # check w90 prefix is the same as the folder name, otherwise
        # `Wannier.Datasets.load_dataset(folder)` won't work
        "$(folder).win" in files || @warn "No $folder.win file in the folder $folder?"

        run(Cmd(vcat(tar_cmd, ["-cvf", outpath], files)))
    end
    cd(prev_dir)

    if local_url
        # if you want to test locally
        url = "file://$(outpath)"
    else
        # use huggingface `artifacts` branch to host the artifacts
        url = "https://huggingface.co/datasets/atomology/WannierDatasets/resolve/artifacts/$(tar_name)"
    end

    artifact_name = folder
    res =
        artifact_name => Dict(
            "git-tree-sha1" => Tar.tree_hash(IOBuffer(inflate_gzip(outpath))),
            "lazy" => true,
            "download" =>
                [Dict("url" => url, "sha256" => bytes2hex(open(sha256, outpath)))],
        )

    # print the result as toml
    buf = IOBuffer()
    TOML.print(buf, Dict(res))
    s = String(take!(buf))
    @info "New artifact:\n$s"

    return res
end

"""
Get the git hash of the latest commit of a file.
"""
function git_hash(path::AbstractString; branch::AbstractString = "HEAD")
    return read(`git log -n 1 --pretty=format:"%H" $branch -- "$path"`, String)
end

"""
Get the date of a git commit.
"""
function git_date(githash::AbstractString)
    # get the date of the commit, unix timestamp
    g = strip(read(`git show -s --format=%ct $githash`, String))
    return unix2datetime(parse(Float64, g))
end

"""
Get list of artifact tarballs in the deploy branch.
"""
function list_previous_artifacts()
    # if this command fails with the error
    # `fatal: Not a valid object name artifacts`
    # you need to run at least once
    # `git checkout artifacts`
    # to create the artifacts branch
    files = split(read(`git ls-tree -r --name-only $deploy_branch`, String))
    filter!(f -> endswith(f, ".tar.gz"), files)
    return Dict(f => git_hash(f; branch = deploy_branch) for f in files)
end

"""
Get folder names in `datasets/` that are newer than the corresponding
tarball.
"""
function list_new_folders()
    prev_artifacts = list_previous_artifacts()

    new_folders = String[]
    for data in readdir(datasets_dir)
        startswith(data, "_") && continue
        startswith(data, ".") && continue
        isdir(joinpath(datasets_dir, data)) || continue

        dir_hash = git_hash(joinpath(basename(datasets_dir), data))
        artifact_hash = get(prev_artifacts, "$(data).tar.gz", nothing)
        # @info "dir_hash: $dir_hash, artifact_hash: $artifact_hash"
        if !isnothing(artifact_hash)
            # if date of dir_hash is older than artifact_hash
            # then we do not need to update the artifact
            (git_date(dir_hash) <= git_date(artifact_hash)) && continue
        end
        push!(new_folders, data)
    end
    return new_folders
end

"""
Remove the `artifacts_dir` folder and create a new one.
"""
function clean_artifacts_dir()
    print("I will clean the `$(artifacts_dir)` folder [y/N]: ")
    n = readline()
    if lowercase(n) != "y"
        println("Aborting...")
        return false
    end
    rm(artifacts_dir; force = true, recursive = true)
    mkpath(artifacts_dir)
    println("Cleaned `$(artifacts_dir)` folder.")
    return true
end

"""
Get previous Artifacts.toml from deploy branch.
"""
function read_artifact_toml()
    return try
        content = read(`git show $deploy_branch:Artifacts.toml`)
        return TOML.parse(String(content))
    catch e
        return Dict{String,Any}()
    end
end

function (@main)(args)
    artifacts = read_artifact_toml()

    new_folders = list_new_folders()
    if isempty(new_folders)
        println("No new folders to process.")
        return
    end

    if !("dryrun" in args)
        clean_artifacts_dir() || return

        for folder in new_folders
            artifact = create_artifact(folder)
            # existing entries will be replaced
            push!(artifacts, artifact)
            # break
        end

        open("Artifacts.toml", "w") do io
            TOML.print(io, artifacts)
        end
    end
end