#!/bin/bash
# Job yaml can send below ENVIRONMENT VARIABLES to me
# - upstream_repo
# - arch
# upstream_commit
# upstream_branch
# custom_repo_name
# custom_repo_addr
# mount_repo_name
# mount_repo_addr
# repo_addr
# whether_email
# compat_os
# SRPMs jobs not use upstream_repo, Git jobs use all.
# user can add mount_repo_name and mount_repo_addr to setup
# local repo, value can be an array
# END ENVIRONMENT VARIABLES

. $LKP_SRC/lib/debug.sh
. $LKP_SRC/lib/upload.sh
. $LKP_SRC/lib/rpm.sh
. $LKP_SRC/lib/email.sh
. $LKP_SRC/lib/cache_proxy_client.sh

: "${compat_os:=budding-openeuler}"
: "${build_user:=lkp}"

([ -n "$snapshot_id" ] || [ -n "$depend_upstream_repo" ] || [ -n "$custom_repo_name" ]) || die "custom_repo_name is empty"

init_build_user()
{
	if [ "$build_user" != "lkp" ];then
		useradd ${build_user} -g lkp
	fi
}

create_build_command()
{
	# gcc_secure use the file
	[ -n "$spec_file_name" ] && {
	        echo "$spec_file_name" > /.build.command
		chmod 775 /.build.command
	}
}

init_env()
{
	init_build_user
	hostname dc-64g.compass-ci
	echo "export TERM=xterm-256color" >> /etc/profile
	echo "export TZ=Asia/Shanghai" >> /etc/profile
	source /etc/profile
	dest_dir="/rpm/upload/${os}-${os_version}/${custom_repo_name}"
	rpmdev_dir="/home/${build_user}/rpmbuild"
	declare -a reverse_depends
	create_build_command
}

init_env

init_ccache()
{
	[[ $CCACHE_ENABLE == "True" ]] && {
		usermod -aG root ${build_user}
		yum install -y ccache 'dnf-command(download)'
		unset CCACHE_HARDLINK
		ln -s /usr/bin/ccache /usr/local/bin/gcc
		ln -s /usr/bin/ccache /usr/local/bin/g++
		ln -s /usr/bin/ccache /usr/local/bin/cc
		ln -s /usr/bin/ccache /usr/local/bin/c++
		echo "export CCACHE_DIR=/etc/.ccache/" >> /etc/profile
		echo "export CCACHE_MAXSIZE=100G" >> /etc/profile
		echo "export CCACHE_UMASK=002" >> /etc/profile
		source /etc/profile
	}
}

die()
{
	rpmbuild_email "rpmbuild_failed"
	echo "$@"
	[ -n "$sff" ] && sleep ${sff}
	exit 99
}

in_array()
{
	local arr=$1
	for item in ${arr[*]};
	do
		[ "$item" == "$2" ] && return 0
	done

	return 1
}

download_profiling()
{
   if [ -n "${profiling_config}" ]; then
       for element in ${profiling_config}; do
           profiling_path="/tmp/${element%%@*}"
           profiling_url="${element#*@}"
           mkdir "$profiling_path"
           wget "$profiling_url" --directory-prefix="$profiling_path"
       done
   fi

   [ "$package_name" == "kernel" ] && {
	   local gcovdata=/tmp/pgo/gcovdata.tar.gz
	      if [ -e "${gcovdata}" ]; then
		      tar -xzvf "${gcovdata}" -C /tmp/pgo && rm -rf "${gcovdata}"
	      fi
      }
}

from_git()
{
	package_name=${upstream_repo##*/}
	package_name=${package_name%%.git*}
	tmp_rpm_dest="/tmp${rpm_dest}/$id"

	if [ -n "$snapshot_id" ]; then
		rpm_dest="/repositories/${os_project}/${os_variant}/${os_arch}/history/${snapshot_id}/steps/upload/${id}/"
		src_rpm_dest="/repositories/${os_project}/${os_variant}/${os_arch}/history/${snapshot_id}/steps/upload/${id}/"
	else
		rpm_dest="${dest_dir}/${os_arch}/Packages"
		src_rpm_dest="${dest_dir}/source/Packages"
	fi

	init_workspace
	download_upstream_repo
	download_profiling
}

echo_commit_info()
{
	for i in $(git_pretty_format "$1" "$2" "$3")
	do
		echo "$4+=('"$i"')"
	done
}

get_merge_info()
{
	declare -a name
	declare -a email
	declare -A email_name_hash

	name=(liping wangyong)
	email=(liping136@huawei.com wangyong187@huawei.com)

#	eval $(echo_commit_info "--no-color" "%ae" "$1" email)
#	eval $(echo_commit_info "--no-color" "%ce" "$1" email)
#	eval $(echo_commit_info "-n1" "%ae" "$2" email)
#	eval $(echo_commit_info "-n1" "%ce" "$2" email)
#
#	IFS_SAVE=$IFS
#	IFS=$'\n'
#	eval $(echo_commit_info "--no-color" "%an" "$1" name)
#	eval $(echo_commit_info "--no-color" "%cn" "$1" name)
#	eval $(echo_commit_info "-n1" "%an" "$2" name)
#	eval $(echo_commit_info "-n1" "%cn" "$2" name)
#	IFS=$IFS_SAVE

	for key in "${!email[@]}"
	do
		[[ "${email[$key]}" =~ ^noreply ]] && continue
		[[ "${email[$key]}" =~ ^80474298@qq.com ]] && continue
		email_name_hash["${email[$key]}"]="${name[$key]}"
	done

	IFS_SAVE=$IFS
	IFS=$';'
	eval $3='"${!email_name_hash[*]}"'
	IFS=$IFS_SAVE

	IFS_SAVE=$IFS
	IFS=$','
	eval $4='"${email_name_hash[*]}"'
	IFS=$IFS_SAVE
}

git_pretty_format()
{
	curl -sS -H 'Content-Type: Application/json' -XPOST "$REMOTE_GIT_HOST:$REMOTE_GIT_PORT"'/git_command' -d '{"git_repo": "'${upstream_dir}/${upstream_repo}.git'", "git_command": ["git-log","'$1'","--pretty=format:'$2'", "'$3'"]}'
}

is_git_merge()
{
	curl -sS -H 'Content-Type: Application/json' -XPOST "$REMOTE_GIT_HOST:$REMOTE_GIT_PORT"'/git_command' -d '{"git_repo": "'${upstream_dir}/${upstream_repo}.git'", "git_command": ["git-show", "--no-patch", "--format=%p", "'$1'"]}'
}

rpmbuild_email()
{
	[ -n "$whether_email" ] || return
	if [[ $(is_git_merge "$upstream_commit" | wc -w) -eq 1 ]]; then
		get_merge_info "" "${upstream_commit}" author_email author_name
	else
		parent_commit=$(is_git_merge $upstream_commit | sed 's/\ /.../')
		get_merge_info "${parent_commit}" "${upstream_commit}" author_email author_name
	fi

	if [ -n "$depend_job_id" ]; then
		send_email "reverse_rpmbuild"
	else
		send_email $1
	fi
}

from_srpm()
{
	[ -n "$repo_addr" ] || die "repo_addr is empty"

	rpm_dest="${dest_dir}/${os_arch}/Packages"
	src_rpm_dest="${dest_dir}/source/Packages"

	install_srpm
}

init_workspace()
{
	if [ -d /home/${build_user} ];then
		chown ${build_user} /home/${build_user}
		chgrp ${build_user} /home/${build_user}
	else
		mkdir -m 775 /home/${build_user}
		chown ${build_user} /home/${build_user}
		chgrp ${build_user} /home/${build_user}
	fi

	# generate workspace in ${HOME}
	su - ${build_user} -c rpmdev-setuptree
	rpmdev-setuptree
}

git_clone()
{
        local depth=${3:-100}
        for i in {1..10}
        do
                [[ "${i}" -gt 1 ]] && sleep 6
                git clone -q ${1} -b ${2} --depth=${depth} 2>/dev/null || continue
                return 0
        done
        return 1
}

git_lfs_clone()
{
        local depth=${3:-100}
        for i in {1..10}
        do
                [[ "${i}" -gt 1 ]] && sleep 6
                git lfs clone ${1} -b ${2} --depth=${depth} 2>/dev/null || continue
                return 0
        done
        return 1
}

download_upstream_repo()
{
	git config --global http.lowSpeedLimit 0
	git config --global http.lowSpeedTime 3600
	git config --global lfs.activitytimeout 3600

	repo_url="git://${GIT_SERVER}/${upstream_repo}"
	[[ "${is_remote}" == "true" ]] && {
		[[ "$upstream_repo" == common* ]] && repo_url="https://${upstream_repo#*/}"
		[[ "$upstream_repo" == customization* ]] && repo_url="${GIT_SERVER}/${upstream_repo}"
	}

	[ -n "${upstream_dir}" ] && repo_url="git://${GIT_SERVER}/${upstream_dir}/${upstream_repo}"


	if [ -n "$use_git_lfs" ] && [ -n "${snapshot_id}" ]; then
		git_lfs_clone "https://gitee.com/src-openeuler/${package_name}.git" "${spec_branch}" || die "git lfs clone failed, url: https://gitee.com/src-openeuler/${package_name}.git"
	else
		git_clone "${repo_url}" "${spec_branch}" || die "clone git repo failed, url: ${repo_url}"
	fi

	local old_pwd=$(pwd)
	cd "$package_name" || exit
	pr=$(echo $pr_merge_reference_name |awk '{split($0,a,"/");print a[3]}')
	[ -n "${pr_merge_reference_name}" ] && git fetch origin pull/$pr/head

	local ref=""
	[ -n "${upstream_branch}" ] && ref="${upstream_branch}"
	[ -n "${upstream_commit}" ] && ref="${upstream_commit}"
	[ -n "${ref}" ] && {
		git checkout -q "${ref}" || die "checkout git repo ${package_name} to ${ref} failed"
	}

	if [[ "$upstream_repo" == customization*  ]] && [ -n "${spec_file_name}" ]; then
		local dst_dir=$(pwd)
		download_source ${spec_file_name} ${dst_dir}
	fi

	fix_spec_content
	update_release_info
	adapt_kata_integration $package_name
	mv *.spec "${rpmdev_dir}/SPECS/"

	cp -r * "${rpmdev_dir}/SOURCES/"
	cp -r ${rpmdev_dir} /root/
}

add_user_mock()
{
	useradd mock
	useradd mockbuild
}

install_srpm()
{
	add_user_mock

	su ${build_user} -c "rpm -i --nosignature ${repo_addr} >/dev/null" || su ${build_user} -c "rpm -i --nosignature ${repo_addr}/${upstream_repo} >/dev/null" || die "failed to install source rpm: ${repo_addr}/${upstream_repo}"
}

enable_faketime()
{
	[ "$enable_faketime" == "y" ] && {
		yum install -y libfaketime || die "yum install libfaketime failed."
	}

	[ -n "$faketime_exports" ] && {
		for fe in ${faketime_exports[@]}
		do
			# FAKETIME=2023-05-01#11:12:13 => FAKETIME='2023-05-01 11:12:13'
			tfe="$(echo ${fe}|sed "s/=/='/g"|sed "s/#/ /g")'"
			echo "export ${tfe}" >> /etc/profile
		done
		source /etc/profile
	}
}

sync_system_rpms()
{
	if [[ "$mount_repo_name" =~ "lastest_repo" ]]; then
		dnf distro-sync -y --allowerasing --repo 0
		local sync_repo_file="/etc/yum.repos.d/openEuler.repo"
		[ -e "${sync_repo_file}" ] && rm -rf "${sync_repo_file}"
	fi
}

build_rpm()
{
	[ -n "$sfb" ] && sleep ${sfb}

	export spec_dir="${rpmdev_dir}/SPECS"
	export source_dir="${rpmdev_dir}/SOURCES"
	export build_dir="${rpmdev_dir}/BUILD"
	[ -n "$use_root" ] && {
		build_by_root=yes
		rpmdev_dir=/root/rpmbuild
		export spec_dir="${rpmdev_dir}/SPECS"
		export source_dir="${rpmdev_dir}/SOURCES"
		export build_dir="${rpmdev_dir}/BUILD"
	}

	startTime=$(date +%Y-%m-%d-%H:%M)
	startTime_s=$(date +%s)
	fix_on_distro
	set_macros
	install_gcc_secure
	enable_faketime
	[ -n "$depend_upstream_repo" ] && localinstall_dependent_package
	preinstall_packages
	install_glassfish
	install_java180
	install_python3
	sync_system_rpms
	yum_build_dep
	echo "yum_build_dep done"
	init_ccache

	[ -n "$sfd" ] && sleep ${sfd}

	package_source_dirs
	install_kmod_libs
	install_environment_modules
	spec_tool

	if [ "$package_name" == "glibc" ];then
		ld=$(echo $LD_LIBRARY_PATH)
		export LD_LIBRARY_PATH=
	fi

	config_maven_mirror
	export_shell
	del_rpms
	install_charset
	export_GMT

	[ -n "$sfba" ] && sleep ${sfba}

	if [ -n "$enable_bep" ];then
		rpmbuild_bep
	else
		rpmbuild_ba
	fi

	if [ "$package_name" == "glibc" ];then
		export LD_LIBRARY_PATH=$ld
	fi

	endTime=$(date +%Y-%m-%d-%H:%M)
	endTime_s=$(date +%s)

	sumTime=$[ $endTime_s - $startTime_s ]

	echo "RPM build START TIME: $startTime, END TIME: $endTime, Total duration: $sumTime seconds"
	find /${rpmdev_dir}/BUILD/*/ -maxdepth 1 -type d -name "tests" | grep "tests"
	[ $? = 0 ] && echo "All test cases are passed."
	statistic_info
}

preinstall_packages()
{
	[ -n "$preinstall" ] || return
	yum install -y ${preinstall}
}

statistic_info()
{
	echo "==========================memory===========================" >> /tmp/statistic_info
	echo "max_memory_cpu_usage_${build_id}: ${package_name} $(cat /sys/fs/cgroup/memory/memory.max_usage_in_bytes) $(cat /sys/fs/cgroup/cpu/cpuacct.usage)" >> /tmp/statistic_info
	echo "==========================rpm -qa==========================" >> /tmp/statistic_info
	rpm -qa|sort >> /tmp/statistic_info
	echo "==========================export==========================" >> /tmp/statistic_info
	export|sort >> /tmp/statistic_info
	echo "==========================uname -a==========================" >> /tmp/statistic_info
	uname -a >> /tmp/statistic_info
	echo "==========================rpm --showrc==========================" >> /tmp/statistic_info
	rpm --showrc >> /tmp/statistic_info
	echo "==========================rpmsepc -P==========================" >> /tmp/statistic_info
	rpmspec -P ${spec_dir}/${spec_file_name} >> /tmp/statistic_info
	echo "==========================os-release==========================" >> /tmp/statistic_info
	cat /etc/os-release >> /tmp/statistic_info
	upload_one_curl /tmp/statistic_info ${result_root}
}

export_shell()
{
	local need_export_shell=(firefox)
	in_array "${need_export_shell[*]}" "$package_name" && {
		echo "export SHELL=/bin/shell" >> /etc/profile
		source /etc/profile
	}
}

export_GMT()
{
	local need_export_TZ=(simple-xml)
	in_array "${need_export_TZ[*]}" "$package_name" && {
		echo "export TZ=GMT" >> /etc/profile
		source /etc/profile
	}

}

fix_spec_content()
{
	[ -n "$spec_file_name" ] && {
		perl -pi -e 's# mysql-devel# mariadb-connector-c-devel#g' ${spec_file_name}
	}
}

update_release_info()
{
        [[ "$package_name" != "openEuler-latest-release" ]] && return

	local gcc_ver=$(yum info gcc | grep 'Version' | awk '{print $NF}' |head -n 1 | sed 's/[ \t\r\n]*$//g')
	local gcc_rel=$(yum info gcc | grep 'Release' | awk '{print $NF}' |head -n 1 | sed 's/[ \t\r\n]*$//g')
	local jdk_ver=$(yum info java-1.8.0-openjdk | grep 'Version' | awk '{print $NF}'|head -n 1 | sed 's/[ \t\r\n]*$//g')
	local jdk_rel=$(yum info java-1.8.0-openjdk | grep 'Release' | awk '{print $NF}'|head -n 1 | sed 's/[ \t\r\n]*$//g')
	local kernel_ver=$(yum info kernel | grep 'Version' | awk '{print $NF}'|head -n 1 | sed 's/[ \t\r\n]*$//g')
	local kernel_rel=$(yum info kernel | grep 'Release' | awk '{print $NF}'|head -n 1 | sed 's/[ \t\r\n]*$//g')

        local version="${spec_branch}"
        local time_str="${snapshot_create_time}"
        local sdfs=(isopackage.sdf isopackage_arm64.sdf)
        for sdf in ${sdfs[@]}
        do
                rm -f "${sdf}"
                echo "openeulerversion=${version}" > "${sdf}"
                echo "compiletime=${time_str}" >> "${sdf}"
                echo "gccversion=${gcc_ver}-${gcc_rel}" >> "${sdf}"
                echo "kernelversion=${kernel_ver}-${kernel_rel}" >> "${sdf}"
                echo "openjdkversion=${jdk_ver}-${jdk_rel}" >> "${sdf}"
                chmod 775 "${sdf}"
        done
}

install_gcc_secure()
{
	if [ "$package_name" != "gcc-10"  ]
	then
		yum install -y gcc_secure
		echo "install_gcc_secure done"
	fi
}

yum_build_dep()
{
	# Install build dependencies
	[ -n "$prefer" ] && yum install -y ${prefer}

	if [ -n "$spec_file_name" ]
	then
		local need_headless=(lasso libguestfs pki-core ovirt-engine protobuf)
		in_array "${need_headless[*]}" "$package_name" && {
			yum-builddep -y "$spec_dir"/${spec_file_name}
			if [ "$package_name" == "pki-core" ]
			then
				yum install -y java-latest-openjdk-headless || \
					die "install java-latest-openjdk-headless error."
			elif [ "$package_name" == "ovirt-engine" ]
			then
				yum install -y java-11-openjdk-headless || \
					die "install java-11-openjdk-headless error."
			elif [ "$package_name" == "protobuf" ]
			then
				yum-builddep -y "$spec_dir"/${spec_file_name} 2>&1 || \
					die "failed to solve dependencies"
				return
			else
				yum install -y java-1.8.0-openjdk-headless || \
					die "install java-1.8.0-openjdk-headless error."
			fi
		}

		yum-builddep -y "$spec_dir"/${spec_file_name} 2>&1 || die "failed to solve dependencies"
	else
		yum-builddep -y "$spec_dir"/*.spec 2>&1 || die "failed to solve dependencies"
	fi
}

rpmbuild_ba()
{
	start_build_time=`date "+%Y/%m/%d %H:%M:%S"`
	local start_time_s=`date +%s`
	if [ "$skip_check" == "y" ]; then
		local check_flag="--nocheck"
	else
		local check_flag=""
	fi

	if [ -n "$build_by_root" ]; then
		if [ -n "$spec_file_name" ]; then
			rpmbuild -ba  ${check_flag} ${spec_dir}/${spec_file_name} || die "failed to build rpms by root with ${spec_file_name}"
		else
			rpmbuild -ba ${check_flag}  ${spec_dir}/*.spec || die "failed to build rpms by root"
		fi
	else
		if [ -n "$spec_file_name" ]; then
			su - ${build_user} -c "rpmbuild  -ba ${check_flag} ${spec_dir}/${spec_file_name}" || die "failed to build rpms with ${spec_file_name}"
		else
			su - ${build_user} -c "rpmbuild  -ba ${check_flag} ${spec_dir}/*.spec" || die "failed to build rpms"
		fi
	fi
	local end_time_s=`date +%s`
	build_duration=$[ ${end_time_s} - ${start_time_s}]

}

rpmbuild_bep()
{
	echo "run rpmbuild with bep"
	start_build_time=`date "+%Y/%m/%d %H:%M:%S"`
	local start_time_s=`date +%s`
	if [ "$skip_check" == "y" ]; then
		local check_flag="--nocheck"
	else
		local check_flag=""
	fi

	if [ -n "$build_by_root" ]; then
		if [ -n "$spec_file_name" ]; then
			rpmbuild -ba  ${check_flag} ${spec_dir}/${spec_file_name}
			rpmbuild -ba  ${check_flag} ${spec_dir}/${spec_file_name} || die "failed to build rpms by root with ${spec_file_name}"
		else
			rpmbuild -ba ${check_flag}  ${spec_dir}/*.spec
			rpmbuild -ba ${check_flag}  ${spec_dir}/*.spec || die "failed to build rpms by root"
		fi
	else
		if [ -n "$spec_file_name" ]; then
			su - ${build_user} -c "rpmbuild  -ba ${check_flag} ${spec_dir}/${spec_file_name}"
			su - ${build_user} -c "rpmbuild  -ba ${check_flag} ${spec_dir}/${spec_file_name}" || die "failed to build rpms with ${spec_file_name}"
		else
			su - ${build_user} -c "rpmbuild  -ba ${check_flag} ${spec_dir}/*.spec"
			su - ${build_user} -c "rpmbuild  -ba ${check_flag} ${spec_dir}/*.spec" || die "failed to build rpms"
		fi
	fi
	local end_time_s=`date +%s`
	build_duration=$[ ${end_time_s} - ${start_time_s}]

}

install_charset()
{
	yum install -y glibc-all-langpacks glibc-locale-archive
}

spec_tool()
{
	# Download tar.gz to default path ${HOME}/rpmbuild/SOURCE
	if [ -n "$snapshot_id" ];then
		if [ "$package_name" == "kernel" ]; then
			adapt_kernel
		elif [ "$package_name" == "kata-containers" ]; then
			adapt_kata_containers
		elif [ "$package_name" == "kuasar" ]; then
			adapt_kuasar
		elif [ "$package_name" == "runc" ]; then
			adapt_runc
		elif [ "$package_name" == "containerd" ]; then
			adapt_containerd
		elif [ "$package_name" == "docker" ]; then
			adapt_docker
		elif [ "$package_name" == "nestos-kernel" ]; then
			adapt_kernel
		elif [ "$package_name" == "rust" ]; then
			adapt_rust
		fi
	else
		[ -n "$package_name" ] && {
			su - ${build_user} -c "spectool -g -R $spec_dir/*.spec" || die "failed to download source file"
		}
	fi
}

set_macros()
{
	local rpmmacros_files=(/home/${build_user}/.rpmmacros /root/.rpmmacros)
	[ -n "$disable_check_path" ] && {

		for file in ${rpmmacros_files[*]}
		do
			#    case "${QA_CHECK_RPATHS:-}" in [1yY]*) /usr/lib/rpm/check-rpaths ;; esac \
			[ -f "$file" ] && sed -i '/check-rpaths/ s/^/#/' "${file}"
		done
	}

	for file in ${rpmmacros_files[*]}
	do
		cat >> "$file" <<-EOF
			${macros}
		EOF
	done
}

config_maven_mirror()
{
	local m2=/home/${build_user}/.m2
	[ -n "$build_by_root" ] && {
		local m2=/root/.m2
	}
	mkdir -p "$m2"/repository
	cat > "$m2"/settings.xml <<-EOF
	<settings>
		<localRepository>${m2}/repository</localRepository>
		<mirrors>
			<mirror>
				<id>huaweicloud</id>
				<name>huaweicloud</name>
				<mirrorOf>*</mirrorOf>
				<url>https://repo.huaweicloud.com/repository/maven/</url>
			</mirror>
		</mirrors>
	</settings>
	EOF
	chmod 777 -R "$m2"
	cat "$m2"/settings.xml
}

package_source_dirs()
{
	old_pwd=$(pwd)
	cd ${source_dir}
	for file in $(ls)
	do
		if [ -d ${file} ]; then
			[ -n "$use_xz" ] && in_array "${use_xz[*]}" "$package_name"
			if [ "$?" == "0" ];then
				tar -Jcf ${file}.tar.xz ${file}
			else
				tar -czvf ${file}.tar.gz ${file}
			fi
		fi
	done
	cd ${old_pwd}
}

down_grade()
{
	local need_down=(eclipse)
	in_array "${need_down[*]}" "$package_name" && {
		yum downgrade -y glassfish-jsp
		yum downgrade -y glassfish-jsp-api
	}

}

del_rpms()
{
	[ -n "$remove_rpms" ] && {
		yum remove -y ${remove_rpms}
	}
}

install_glassfish()
{
	local need_glassfish=(tycho httpunit h2 openwebbeans jruby jetty hive glassfish-jsp geronimo-jaxrpc apache-commons-chain grizzly)
	in_array "${need_glassfish[*]}" "$package_name" && {
		yum install -y glassfish-servlet-api
	}
}

install_python3()
{
	local need_python3=(python-rtslib python-httplib2)
	in_array "${need_python3[*]}" "$package_name" && {
		yum install -y python3-devel
	}
}

install_java180()
{
	local need_java_180=(collectd jnr-ffi jffi eclipse java-atk-wrapper tycho relaxngcc protobuf proguard kernel libdb libtcnative brltty apache-commons-chain)
	in_array "${need_java_180[*]}" "$package_name" && {
                yum install -y java-1.8.0-openjdk java-1.8.0-openjdk-devel
	}
}

install_kmod_libs()
{
	[ -n "$use_kmod_libs" ] && {
                yum install -y kmod-libs
	}
}

install_environment_modules()
{
	local need_module=(hdf5)
	in_array "${need_module[*]}" "$package_name" && {
		yum install -y environment-modules
		alias module=/usr/bin/modulecmd
	}
}

adapt_kernel()
{
	local tags=$(cat ${source_dir}/SOURCE)
	local old_pwd=$(pwd)
	cd /tmp/
	git_clone "https://gitee.com/openEuler/kernel" "$(cat ${source_dir}/SOURCE)" 1
	tar -czvf kernel.tar.gz --exclude=.git* kernel >/dev/null
	mv kernel.tar.gz ${source_dir} || die "failed to adapt kernel"
	cd ${old_pwd}
}

create_kernel_gz()
{
	local kernel_repo_url="git://${GIT_SERVER}/common/gitee.com/src-openeuler/kernel"
	[[ "$upstream_repo" == customization* ]] && {
		local kernel_repo_url="git://${GIT_SERVER}/customization/package_repos/k/kernel"
	}
	git_clone "${kernel_repo_url} src-kernel" "${spec_branch}" 50
	[ -n "$package_tag" ] && {
		git -C src-kernel checkout ${package_tag}
	}
	tags=$(cat src-kernel/SOURCE)
	git_clone "https://gitee.com/openeuler/kernel" "${tags}" 1
	tar -czvf kernel.tar.gz --exclude=.git* kernel >/dev/null
	mv kernel.tar.gz $1 || die "failed to create kernel gz"
}

create_kata_integration()
{
	local inte_repo_url="git://${GIT_SERVER}/common/gitee.com/src-openeuler/kata_integration"
	[[ "$upstream_repo" == customization* ]] && {
		local inte_repo_url="git://${GIT_SERVER}/customization/package_repos/k/kata_integration"
	}
	git_clone "${inte_repo_url}" "${spec_branch}" 50
	[ -n "$package_tag" ] && {
		git -C kata_integration checkout ${package_tag}
	}

	local tar_gz=$(cat ${spec_dir}/${spec_file_name} |grep 'Source0: '|awk '{print $NF}')
	tar -czvf ${tar_gz} -C kata_integration --exclude=.git* . >/dev/null
	mv ${tar_gz} ${source_dir} || die " failed to create ${tar_gz}"
}

create_kata_containers()
{
	old_pwd=$(pwd)
	cd /tmp
	git_clone "${repo_url}" "${spec_branch}" 50

	[ -n "$package_tag" ] && {
		git -C kata-containers checkout ${package_tag}
	}

	# 1.11.0
	local version=$(cat ${spec_dir}/${spec_file_name} |grep '%define VERSION '|awk '{print $NF}')
	# kata-containers-v${version}.tar.gz
	# kata-containers-${version}.tar.gz
	local no_version=$(cat ${spec_dir}/${spec_file_name} |grep 'Source1: '|awk '{print $NF}')
	local tar_gz=${no_version/\%\{version\}/${version}}
	tar -czvf ${tar_gz} -C kata-containers --exclude=.git* . >/dev/null
	mv ${tar_gz} ${source_dir} || die "failed to create ${tar_gz}"
	cd ${old_pwd}
}

create_kuasar()
{
	old_pwd=$(pwd)
	cd /tmp
	local kuasar_openeuler="kuasar-openeuler"
	git_clone "${repo_url} ${kuasar_openeuler}" "${spec_branch}"
	rm -rf ${kuasar_openeuler}/*.spec
	rm -rf ${kuasar_openeuler}/.git
	tar -czvf kuasar-openeuler.tar.gz ${kuasar_openeuler} >/dev/null
	mv kuasar-openeuler.tar.gz $1
	cd ${old_pwd}
}

adapt_runc()
{
	if [[ "${spec_branch}" == "openEuler-20.03-LTS-SP4" ]];then
		old_pwd=$(pwd)
		cd /tmp
		git_clone "${repo_url}" "${spec_branch}" 50
		tar -czvf docker-runc.tar.gz -C runc --exclude=.git* . >/dev/null
		mv docker-runc.tar.gz ${source_dir} || die "failed to create docker-runc.tar.gz"
		cd $old_pwd
	fi
}

adapt_rust()
{
	if [[ "${spec_branch}" == "master" ]];then
		old_pwd=$(pwd)
		cd /tmp
		local rust_version=$(grep "Version:" ${spec_dir}/rust.spec |awk '{print $NF}')
		wget -q https://user-repo.openeuler.openatom.cn/lfs-tar/rust/rustc-${rust_version}-src.tar.xz
		mv rustc-${rust_version}-src.tar.xz ${source_dir} || die "failed to create rustc-${rust_version}-src.tar.xz"
		cd $old_pwd
	fi
}

adapt_containerd()
{
	if [[ "${spec_branch}" == "openEuler-20.03-LTS-SP4" ]];then
		old_pwd=$(pwd)
		cd /tmp
		local cont_repo_url="git://${GIT_SERVER}/common/gitee.com/src-openeuler/containerd"
		[[ "$upstream_repo" == customization* ]] && {
			local cont_repo_url="git://${GIT_SERVER}/customization/package_repos/c/containerd"
		}
		git_clone "${cont_repo_url}" "${spec_branch}" 50
		tar -czvf containerd-1.2.0.tar.gz -C containerd --exclude=.git* . >/dev/null
		mv containerd-1.2.0.tar.gz ${source_dir} || die "failed to create containerd-1.2.0.tar.gz"
		cd $old_pwd
	fi
}

adapt_docker()
{
	adapt_containerd
	if [[ "${spec_branch}" == "openEuler-20.03-LTS-SP4" ]];then
		old_pwd=$(pwd)
		cd /tmp
		local runc_repo_url="git://${GIT_SERVER}/common/gitee.com/src-openeuler/runc"
		[[ "$upstream_repo" == customization* ]] && {
			local runc_repo_url="git://${GIT_SERVER}/customization/package_repos/r/runc"
		}
		git_clone "${runc_repo_url}" "${spec_branch}" 50
		tar -czvf runc-1.0.0.tar.gz -C runc --exclude=.git* . >/dev/null
		mv runc-1.0.0.tar.gz ${source_dir} || die "failed to create runc-1.0.0.tar.gz"

		git_clone "${repo_url}" "${spec_branch}" 50
		tar -czvf docker-engine.tar.gz -C docker . >/dev/null
		mv docker-engine.tar.gz ${source_dir} || die "failed to create docker-engine.tar.gz"
		cd $old_pwd
	fi
}

adapt_kuasar()
{
	create_kernel_gz ${source_dir}
	create_kuasar ${source_dir}
}

adapt_kata_containers()
{
	create_kernel_gz ${source_dir}
	create_kata_integration
	create_kata_containers
}

adapt_kata_micro_kernel()
{
	local old_pwd=$(pwd)
	local tar_gz=$(cat ${spec_file_name} |grep 'Source1: '|awk '{print $NF}'|awk -F '/' '{print $NF}')
	cd /
	tar -czvf ${tar_gz} -C $1 --exclude=.git* .
	mv ${tar_gz} $1 || die "failed to create ${tar_gz}"
	create_kernel_gz $1
	cd ${old_pwd}
}

adapt_kata_integration()
{
	[[ "$1" != "kata_integration" ]] && return

	if [ "$spec_file_name" == "kata-micro-kernel.spec" ];then
		adapt_kata_micro_kernel $1
	else
		old_pwd=$(pwd)
		local version=$(cat ${spec_file_name} |grep '%define VERSION'|awk '{print $NF}')
		local name=$(cat ${spec_file_name} |grep 'Name:'|awk '{print $NF}')
		# %{name}-%{version}.tar.gz
		local no_name_version=$(cat ${spec_file_name} |grep 'Source0: '|awk '{print $NF}'|awk -F '/' '{print $NF}')
		local _tar_gz=${no_name_version/\%\{name\}/${name}}
		local tar_gz=${_tar_gz/\%\{version\}/${version}}
		cd /
		tar -czvf ${tar_gz} --exclude=.git* kata_integration
		mv ${tar_gz} $1 || die "failed to create ${tar_gz}"
		cd ${old_pwd}
	fi
}

localinstall_dependent_package()
{
	yum install -y wget
	mkdir /tmp/rpm
	cd /tmp/rpm
	wget -c -r -np -nd -R index.html "${SRV_HTTP_PROTOCOL:-https}://${SCHED_HOST}:${SRV_HTTP_RPM_PORT:-20012}$depend_rpm_dest/" || die "failed to download local depend rpms"
	yum localinstall -y *.rpm || die "failed to local install rpms"
}

show_rpm_files()
{
	find ${rpmdev_dir}/RPMS -type f -name "*.rpm"
}

show_src_rpm_files()
{
	find ${rpmdev_dir}/SRPMS -type f -name "*.src.rpm"
}

show_spec_file()
{
	find ${rpmdev_dir}/SPECS -type f -name "*.spec"
}

check_rpm_dist()
{
	[ -f "/root/.rpmmacros" ] || return
	local rpm_dist=$(cat /root/.rpmmacros | grep '%dist' | awk '{print $NF}')
	[ -n "$rpm_dist" ] || return

	for rpm_file in $(show_rpm_files)
	do
		[[ "${rpm_file}" == *${rpm_dist}* ]] || die "${rpm_file} does not contain ${rpm_dist}"
	done

	for src_rpm_file in $(show_src_rpm_files)
	do
		[[ "${src_rpm_file}" == *${rpm_dist}* ]] || die "${src_rpm_file} does not contain ${rpm_dist}"
	done
}

upload_rpm_pkg()
{
	check_rpm_dist

	local rpm_file
	for rpm_file in $(show_rpm_files)
	do
		upload_one_curl ${rpm_file} ${rpm_dest}
		[ "$?" != "0" ] && die "upload ${rpm_file} ${rpm_dest} failed."
		[ -n "$snapshot_id" ] || {
			[ -n "$pr_merge_reference_name" ] && {
				upload_one_curl ${rpm_file} ${tmp_rpm_dest}
				[ "$?" != "0" ] && die "upload ${rpm_file} ${tmp_rpm_dest} failed."
			}
		}
	done

	local src_rpm_file
	for src_rpm_file in $(show_src_rpm_files)
	do
		upload_one_curl ${src_rpm_file} ${src_rpm_dest}
		[ "$?" != "0" ] && die "upload ${src_rpm_file} to ${src_rpm_dest} failed."
	done
}

upload_spec_file()
{
	for spec_file in $(show_spec_file)
	do
		upload_one_curl ${spec_file} ${result_root}
	done
}

handle_rpm_list()
{
	rpm_list=($(show_rpm_files | xargs basename -a))
	srpm_list=($(show_src_rpm_files | xargs basename -a))
	full_list=("${rpm_list[@]}" "${srpm_list[@]}")

	for rpm_index in "${!full_list[@]}"
	do
		rpm_name="${full_list["${rpm_index}"]}"
		full_list["${rpm_index}"]="\"/srv${rpm_dest}/${rpm_name}\""
		[[ ${rpm_name} =~ "src.rpm" ]] && full_list["${rpm_index}"]="\"/srv${src_rpm_dest}/${rpm_name}\""
	done

	full_list=$(echo "${full_list[@]}" | sed 's; ;,;g')
}

update_repo_mq()
{
	handle_rpm_list

	curl -sS -H 'Content-Type: Application/json' -XPOST "${UPDATE_REPO_HOST}:${UPDATE_REPO_PORT}/upload" \
		-d "{\"upload_rpms\": ["${full_list}"], \"job_id\": \"${id}\"}"
}

find_reverse_depends()
{
	local j
	rpm_name=${upstream_repo##*/}
	for rev_rpm in $(dnf repoquery -q --whatrequires "${rpm_name}")
	do
		rev_rpm=${rev_rpm%%:*}
		rev_rpm=${rev_rpm%-*}
		reverse_depends[j++]=$rev_rpm
	done
}

notify_scheduler_submit()
{
	content='{"depend_job_id":"'${id}'","depend_rpm_dest":"'${tmp_rpm_dest}'","reverse_depends":"'${reverse_depends[@]}'"}'
	curl -sS -XPOST "http://$SCHED_HOST:$SCHED_PORT/rpmbuild/submit_reverse_depend_jobs" -d "$content"
}

detect_reverse_depends()
{
	[ "$upstream_dir" != "openeuler" ] && return
	find_reverse_depends
	notify_scheduler_submit
}

update_srpm_list()
{
	srpm=${repo_addr##*/}
	data="{\"srpms\": [{\"os\": \"${compat_os}\", \"srpm\": \"${srpm}\", \"build_state\": \"1\"}], \"job_id\": \"${id}\", \"type\": \"update\"}"

	curl -sS -H 'Content-Type: Application/json' -XPOST ${SCHED_HOST}:${SCHED_PORT}/repo/set-srpm-info -d "${data}"
}

get_rpms_build_env()
{

	OLDIFS=$IFS
	IFS=$'\n'
	arr=($(rpm -qa --qf '%{NAME} %{PKGID}\n'))
	IFS=$OLDIFS
	rpms_build_env=/tmp/rpms_build_env

	for i in ${!arr[@]}
	do
		cc=(${arr[$i]})
		cat >> "$rpms_build_env" <<-EOF
		{"name": "${cc[0]}", "pkgid": "${cc[1]}"},
		EOF
	done
	sed -i '$ s/.$//' "$rpms_build_env"
}

get_rpms_pkgid()
{
	brpms_pkgid=$(find ${rpmdev_dir}/RPMS -type f -name "*.rpm" | xargs rpm -q --qf '\{\"name\":\"%{NAME}\",\"pkgid\":\"%{PKGID}\"\},\n')
	srpms_pkgid=$(find ${rpmdev_dir}/SRPMS -type f -name "*.rpm" | xargs rpm -q --qf '\{\"name\":\"%{NAME}\", \"pkgid\":\"%{PKGID}\"\},\n')
	rpms_pkgid=${brpms_pkgid}${srpms_pkgid}
}

get_rpms_detail()
{
        rpms_detail=/tmp/rpms_detail.json
        for path in "RPMS" "SRPMS"
        do
                full_path=${rpmdev_dir}/${path}
                for file in $(find ${full_path} -type f -name "*.rpm")
                do
                        requires=$(rpm -q ${file} --requires | awk '{printf"\"%s\",", $0}')
                        provides=$(rpm -q ${file} --provides | awk '{printf"\"%s\",", $0}')
                        cat >> "$rpms_detail" <<-EOF
			"${file##*/}":{
			"Title": "$(echo $(echo $(rpm -q ${file} --qf %{SUMMARY}) | sed $'s/\"/\'/g') | sed $'s/\\\\//g')",
			"Description": "$(echo $(echo $(rpm -q ${file} --qf %{DESCRIPTION}) | sed $'s/\"/\'/g') | sed $'s/\\\\//g')",
			"Size": "$(rpm -q ${file} --qf %{SIZE})",
			"Architecture": "$(rpm -q ${file} --qf %{ARCH})",
			"Version": "$(rpm -q ${file} --qf %{VERSION})",
			"Release": "$(rpm -q ${file} --qf %{RELEASE})",
			"Build Time": "$(rpm -q ${file} --qf %{BUILDTIME})",
			"requires": [${requires%?}],
			"provides":[${provides%?}]},
			EOF
                done
        done
        sed -i '$ s/.$//' "$rpms_detail"
}

get_rpms_install()
{
	rpms_install=/tmp/install.json

	# texlive-*
	if [[ "$package_name" =~ "texlive-" ]]; then
		res="Dependencies resolved."
	else
		res=$(find ${rpmdev_dir}/RPMS -type f -name "*.rpm" | xargs yum install --assumeno 2>&1)
	fi
	echo "$res" | grep "no arguments given for query"
	res="Dependencies resolved."

	echo "$res" | grep "Dependencies resolved."
	if [ $? == 0 ]; then
		echo "\"status\": \"success\", \"missing_deps\": {}" >> ${rpms_install}
	fi
	miss_deps=$(echo "$res" | grep '\- nothing provides' | awk -F '- nothing provides ' '{printf"\"%s\",",$2}')
	[ -n "$miss_deps" ] && {
		echo "\"status\": \"failed\", \"missing_deps\": [${miss_deps%?}]" >> ${rpms_install}
	}
}

create_rpm_json()
{
	startTime_s=$(date +%s)
	get_rpms_pkgid
	endTime_s=$(date +%s)
	sumTime=$[ $endTime_s - $startTime_s ]
	echo "get_rpms_pkgid: Total duration: $sumTime seconds"
	get_rpms_detail
	endTime_get_rpms_detail=$(date +%s)
	sumTime=$[ $endTime_get_rpms_detail - $endTime_s ]
	echo "get_rpms_detail: Total duration: $sumTime seconds"
	get_rpms_install
	endTime_get_rpms_install=$(date +%s)
	sumTime=$[ $endTime_get_rpms_install - $endTime_get_rpms_detail ]
	echo "get_rpms_install: Total duration: $sumTime seconds"
	get_rpms_build_env
	endTime_get_rpms_build_env=$(date +%s)
	sumTime=$[ $endTime_get_rpms_build_env - $endTime_get_rpms_install ]
	echo "get_rpms_build_env: Total duration: $sumTime seconds"
	#"spec_size": "$(du ${source_dir} --max-depth=1 --exclude='.git' | grep -v '/' | awk '{print $1}')",

	rpm_json=/tmp/${os_project}_${os_variant}_${os_arch}_${snapshot_id}_${id}.json
	cat > "$rpm_json" <<-EOF
	{
		"job_id": "${id}",
		"build_id": "${build_id}",
		"build_type": "${build_type}",
		"start_build_time": "${start_build_time}",
		"build_duration": "${build_duration}",
		"repo_name": "${package_name}",
		"spec_name": "${spec_name}",
		"rpm_path": "${rpm_dest}",
		"os_variant": "${os_variant}",
		"os_project": "${os_project}",
		"spec_commit": "${upstream_commit}",
		"spec_commit_time": "${upstream_commit_time}",
		"snapshot_id": "${snapshot_id}",
		"submit_time": "${submit_time}",
		"architecture": "${os_arch}",
		"rpms": [
			${rpms_pkgid%?}
		],
		"install": {
			$(cat ${rpms_install})
		},
		"rpms_detail": {
			$(cat ${rpms_detail})
		},
		"rpms_build_env": [
			$(cat ${rpms_build_env})
		]
	}
	EOF
}

upload_rpm_json()
{
	create_rpm_json
	startTime_s=$(date +%s)
	upload_one_curl "$rpm_json" "/repositories/new-jobs/"
	[ "$?" != "0" ] && die "upload ${rpm_json} to /repositories/new-jobs/ failed."
	endTime_s=$(date +%s)
	sumTime=$[ $endTime_s - $startTime_s ]
	echo "upload_one_curl $rpm_json: Total duration: $sumTime seconds"
}

startTime_s=$(date +%s)
[ -n "$snapshot_id" ] || add_repo && yum_repo_retry
endTime_s=$(date +%s)
sumTime=$[ $endTime_s - $startTime_s ]
echo "add_repo && yum_repo_retry: Total duration: $sumTime seconds"
if [ -n "$upstream_repo" ]; then
	echo $upstream_repo
	from_git
else
	from_srpm
fi
upload_spec_file
build_rpm
[ $? = 0 ] && {
	[ -n "$snapshot_id" ] || update_srpm_list
}
[ -n "$depend_upstream_repo" ] && exit 0
startTime_s=$(date +%s)
detect_reverse_depends
endTime_s=$(date +%s)
sumTime=$[ $endTime_s - $startTime_s ]
echo "detect_reverse_depends: Total duration: $sumTime seconds"
upload_rpm_pkg
endTime_upload_rpm_pkg=$(date +%s)
sumTime=$[ $endTime_upload_rpm_pkg - $endTime_s ]
echo "upload_rpm_pkg: Total duration: $sumTime seconds"
[ -n "$snapshot_id" ] && upload_rpm_json
endTime_upload_rpm_json=$(date +%s)
sumTime=$[ $endTime_upload_rpm_json - $endTime_upload_rpm_pkg ]
echo "upload_rpm_json: Total duration: $sumTime seconds"
[ -n "$snapshot_id" ] || update_repo_mq
endTime_upload_repo_mq=$(date +%s)
sumTime=$[ $endTime_upload_repo_mq - $endTime_upload_rpm_json ]
echo "upload_repo_mq: Total duration: $sumTime seconds"
#[ -n "$upstream_commit" ] && rpmbuild_email "rpmbuild_success"
#exit 0
