#!/usr/bin/env python3
# Generates a dummy resource.json file and corresponding .zip archives

import filecmp
import hashlib # md5
import math
import io
import json
import re # regex
import shutil # rmtree, copy2
import os
import zipfile

SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
os.chdir(SCRIPT_DIR)

LATEST = "game_1.9.1" # either "game_1.9.0" or "game_1.9.1"
LANG_PACKS_DIR = "GenshinImpact_Data/StreamingAssets/AudioAssets"

LANG_CODE_TO_DIR = {
	"en-us": "English(US)",
	"ja-jp": "Japanese",
	"ko-kr": "Korean",
	"zh-cn": "Chinese"
}

# Destination to put generated *.zip archives
ZIP_OUTPUT_DIR = os.path.join(SCRIPT_DIR, "archives")
os.makedirs(ZIP_OUTPUT_DIR, exist_ok=True)

regex_get_version = re.compile(r'^[a-z]+_(\d+\.\d+\.\d+)')

def may_bundle_file(fullpath, langdir):
	if "pkg_version" in fullpath:
		return False

	if langdir:
		# Voiceover pack files
		return (os.path.join(LANG_PACKS_DIR, langdir) in fullpath)
	else:
		# Main archive contents without voiceover files
		return (LANG_PACKS_DIR not in fullpath)


def make_pkg_version(path, langdir):
	if langdir:
		outpath = os.path.join(path, "Audio_{}_pkg_version".format(langdir))
	else:
		outpath = os.path.join(path, "pkg_version")

	fh = io.open(outpath, "w")
	for root, dirs, files in os.walk(path):
		for file in files:
			fullpath = os.path.join(root, file)

			if not may_bundle_file(fullpath, langdir):
				continue

			shortpath = os.path.relpath(fullpath, path)
			fh.write(json.dumps({
				"remoteName": shortpath,
				"md5": hashlib.md5(io.open(fullpath,'rb').read()).hexdigest(),
				"fileSize": os.path.getsize(fullpath)
			}))
			fh.write("\r\n")
	print("Created pkg_version for {}".format(path))
	return outpath


# ----------------- Diffs

def make_deletefiles(dir_old, dir_new, dir_out, langdir):
	fh = io.open(os.path.join(dir_out, "deletefiles.txt"), "w")
	for root, dirs, files in os.walk(dir_old):
		for file in files:
			fullpath = os.path.join(root, file)

			if not may_bundle_file(fullpath, langdir):
				continue

			shortpath = os.path.relpath(fullpath, dir_old)
			if os.path.isfile(os.path.join(dir_new, shortpath)):
				continue # Does still exist

			fh.write(shortpath)
			fh.write("\r\n")
	print("Created deletefiles for {}".format(os.path.basename(dir_out)))
	return


def make_hdifffiles(dir_old, dir_new, dir_out, langdir):
	# TODO:Implement hdifffiles.txt generation
	raise "Not implemented"


def copy_diff_files(dir_old, dir_new, dir_out, langdir):
	for root, dirs, files in os.walk(dir_new):
		for file in files:
			fullpath = os.path.join(root, file)

			if not may_bundle_file(fullpath, langdir):
				continue

			# Path relative to the installation directory
			shortpath = os.path.relpath(fullpath, dir_new)
			path_old = os.path.join(dir_old, shortpath)
			if os.path.isfile(path_old):
				if filecmp.cmp(path_old, fullpath, shallow=False):
					continue # Same file, unchanged.

			dstpath = os.path.join(dir_out, shortpath)
			os.makedirs(os.path.dirname(dstpath), exist_ok=True)
			shutil.copy2(fullpath, dstpath)

	print("Created diff dir for {}".format(os.path.basename(dir_out)))
	return

# Copy diff files and zip them
def make_diff_zip(dir_old, dir_new, langcode):
	ver_old = regex_get_version.match(dir_old).groups(1)[0] # 1.8.0
	ver_new = regex_get_version.match(dir_new).groups(1)[0] # 1.9.0

	diffname = "{}_{}_{}_hdiff_RANDOMVALUE".format(langcode or "diff", ver_old, ver_new)
	dir_out = os.path.join(ZIP_OUTPUT_DIR, diffname)

	# Remove old instances
	if os.path.isdir(dir_out):
		shutil.rmtree(dir_out)
	os.mkdir(dir_out)

	# Create update files and zip
	langdir = LANG_CODE_TO_DIR[langcode] if langcode else None
	copy_diff_files(dir_old, dir_new, dir_out, langdir)
	# Only add deletefiles and pkg_version when there are changed files
	if os.listdir(dir_out):
		make_deletefiles(dir_old, dir_new, dir_out, langdir)
		shutil.copy2(make_pkg_version(dir_new, langdir), dir_out)

	# Just pack everything in the directory
	return make_dir_zip(dir_out, diffname + ".zip", langdir)


# For the first time install
def make_lang_zip(path, langcode):
	ver = regex_get_version.match(path).groups(1)[0] # 1.9.0

	langdir = LANG_CODE_TO_DIR[langcode]
	outname = "Audio_{}_{}".format(langdir, ver)

	return make_dir_zip(path, outname + ".zip", langdir)


# ----------------- ZIP archive creation

# Splits the given file into two parts
def make_zip_segments(zippath):
	chunksize = math.ceil(os.path.getsize(zippath) / 2)
	i = 0
	fh = io.open(zippath, "rb")
	while True:
		data = fh.read(chunksize)
		if not data:
			break # done

		sh = io.open(zippath + ".{:03d}".format(i + 1), "wb")
		sh.write(data)
		i = i + 1

	print("Split {} into {} parts".format(os.path.basename(zippath), i))
	return


# Create archive from directory (all contents)
def make_dir_zip(path, outname, langdir):
	zippath = os.path.join(ZIP_OUTPUT_DIR, outname)
	zh = zipfile.ZipFile(zippath, "w", zipfile.ZIP_DEFLATED)
	for root, dirs, files in os.walk(path):
		for file in files:
			fullpath = os.path.join(root, file)

			if not may_bundle_file(fullpath, langdir):
				continue

			shortpath = os.path.relpath(os.path.join(root, file), path)
			zh.write(
				os.path.join(root, file),
				shortpath
			)

	filename = ""
	if langdir:
		filename = "Audio_" + langdir + "_pkg_version"
	else:
		filename = "pkg_version"

	fullpath = os.path.join(path, filename)
	if os.path.isfile(fullpath):
		zh.write(fullpath, filename)

	if langdir:
		# Previously excluded
		filename = "deletefiles.txt"
		fullpath = os.path.join(path, filename)
		if os.path.isfile(fullpath):
			zh.write(fullpath, filename)

	print("Created archive {}".format(outname))
	return zippath



# ----------------- JSON creation


def save_json_file(fields):
	fh = io.open("resource.json", "w")
	fh.write(json.dumps(fields))
	print("Saved resource.json")


def dict_add_game_entry(ref, zip_path, decompressed_path):
	if not decompressed_path:
		# diff listing
		ref.append({})
		ref = ref[len(ref) - 1]

	filename = os.path.basename(zip_path)
	ref["name"] = filename
	m = regex_get_version.match(filename)
	ref["version"] = m.group(1)
	md5hash = hashlib.md5()

	if decompressed_path:
		relpath = os.path.relpath(decompressed_path, SCRIPT_DIR)
		ref["decompressed_path"] = "http://0.0.0.0:8000/" + relpath

	md5hash = hashlib.md5()
	filesize = dict_add_archive(ref, zip_path, None)

	ref["size"] = str(math.floor(filesize * 1.654321)) # installed size
	ref["package_size"] = str(filesize) # archive size
	ref["voice_packs"] = []
	return ref


def dict_add_voice_pack_entry(ref, zip_path, langcode):
	ref.append({})
	ref = ref[len(ref) - 1]

	ref["language"] = langcode
	ref["name"] = "" # only poopulated in diffs

	md5hash = hashlib.md5()
	filesize = dict_add_archive(ref, zip_path, md5hash)

	ref["md5"] = md5hash.hexdigest()
	ref["size"] = str(math.floor(filesize * 1.654321)) # installed size
	ref["package_size"] = str(filesize) # archive size
	return


# Returns the total file size
def dict_add_archive(ref, zip_path, md5hash):
	filesize = 0

	if not md5hash:
		md5hash = hashlib.md5()

	# Try all available segments. Sum up all files and update the md5 hash
	sref = []
	i = 0
	totalsize = 0
	while True:
		segname = zip_path + ".{:03d}".format(i + 1)
		if not os.path.isfile(segname):
			break

		sref.append({})
		totalsize = totalsize + dict_add_archive(sref[i], segname, md5hash)
		i = i + 1

	if i > 0:
		ref["segments"] = sref
		ref["md5"] = md5hash.hexdigest()
		ref["package_size"] = "0" # same as sent by server
		return totalsize

	# Found no segments. Try normal archive
	if os.path.isfile(zip_path):
		# Single archive (segmented or normal)
		relpath = os.path.relpath(zip_path, SCRIPT_DIR)
		ref["path"] = "http://0.0.0.0:8000/" + relpath

		data = io.open(zip_path, 'rb').read()
		md5hash.update(data) # total md5 hash for segments

		ref["md5"] = hashlib.md5(data).hexdigest() # for this archive only
		ref["package_size"] = "0" # same as sent by server
		return len(data)

	raise "Cannot find archive for {}".format(zip_path)


# ----------------- Main program

fields = {
	"retcode": 0,
	"message": "OK",
	"data": {
		"game": {
			"latest": {},
			"diffs": []
		},
		"plugin": {
			"plugins": [],
			"version": 0
		},
		"pre_download_game": {
			"latest": {},
			"diffs": []
		}
	}
}

HAVE_PREDOWNLOAD = True

if not HAVE_PREDOWNLOAD:
	fields["data"]["pre_download_game"] = None

# Main game archive
make_pkg_version(LATEST, None)
zip190_path = make_dir_zip(LATEST, LATEST + ".zip", None)
make_zip_segments(zip190_path)
ref_game = dict_add_game_entry(fields["data"]["game"]["latest"], zip190_path, LATEST)

# Main voiceover packs
for k in LANG_CODE_TO_DIR:
	zippath = make_lang_zip(LATEST, k)
	dict_add_voice_pack_entry(ref_game["voice_packs"], zippath, k)


# 1.8.0 -> 1.9.0 diff
diff_path = make_diff_zip("game_1.8.0", "game_1.9.0", None)
ref_diff = dict_add_game_entry(fields["data"]["game"]["diffs"], diff_path, None)

# 1.8.0 -> 1.9.0 voiceover diffs
for k in LANG_CODE_TO_DIR:
	diff_path = make_diff_zip("game_1.8.0", "game_1.9.0", k)
	dict_add_voice_pack_entry(ref_diff["voice_packs"], diff_path, k)

if LATEST == "game_1.9.1":
    # 1.9.0 -> 1.9.1 diff
    diff_path = make_diff_zip("game_1.9.0", "game_1.9.1", None)
    ref_diff = dict_add_game_entry(fields["data"]["game"]["diffs"], diff_path, None)

    # 1.9.0 -> 1.9.1 voiceover diffs
    for k in LANG_CODE_TO_DIR:
    	diff_path = make_diff_zip("game_1.9.0", "game_1.9.1", k)
    	dict_add_voice_pack_entry(ref_diff["voice_packs"], diff_path, k)


save_json_file(fields)
