#!/bin/bash
# - testsuite
# - testcase

. $LKP_SRC/lib/upload.sh

. $LKP_SRC/lib/job.sh
load_job_vars os
ip route add default via 172.168.131.2

setup_repo()
{
	if [ "$os" = "openeuler" ]; then
		if [ -z "$(ls -A /etc/yum.repos.d)" ]; then
			cp $LKP_SRC/os-repo/$os/default.repo /etc/yum.repos.d/$os.repo
		fi
		
		dnf update -y && dnf install -y jq && pip install yq -i https://mirrors.aliyun.com/pypi/simple || exit 1
		
		# 如果job.yaml中repo被自定义，则将repo下的所有定义的repo_names打包为新的repo文件
		repo_names=$(yq '.repo | keys[]' $job)
		echo $repo_names
                [ -z $repo_names ] && return

		# 遍历所有定义的 repo_name
		for repo_name in $repo_names; do
			# 读取每个 repo_name 下的 baseurl
			baseurl=$(yq ".repo.$repo_name.baseurl" $job)
			enabled=$(yq ".repo.$repo_name.enabled" $job)
			gpgcheck=$(yq ".repo.$repo_name.gpgcheck" $job)
			gpgkey=$(yq ".repo.$repo_name.gpgkey" $job)
			metalink=$(yq ".repo.$repo_name.metalink" $job)
			metadata_expire=$(yq ".repo.$repo_name.metadata_expire" $job)
			priority=$(yq ".repo.$repo_name.priority" $job)

			repo_config="[${repo_name//\"/}]
name=${repo_name//\"/}
baseurl=${baseurl//\"/}
enabled=${enabled//\"/}
gpgcheck=${gpgcheck//\"/}"
			
			if [ -n $gpgkey ] && [ $gpgkey != "null" ]; then
				repo_config="$repo_config
gpgkey=${gpgkey//\"/}"
			fi

			if [ -n $metalink ] && [ $metalink != "null" ]; then
				repo_config="$repo_config
metalink=${metalink//\"/}"
			fi
			
			if [ -n $metadata_expire ] && [ $metadata_expire != "null" ]; then
				repo_config="$repo_config
metadata_expire=${metadata_expire//\"/}"
			fi
			
			if [ -n $priority ] && [ $priority != "null" ]; then
				repo_config="$repo_config
priority=${priority//\"/}"
			fi

			# 将配置写入新的 repo 文件
			echo "$repo_config"  >> "/etc/yum.repos.d/extra.repo"
		done
		cd /etc/yum.repos.d/ || exit 1
		for file in *.repo; do
  		    if [ "$file" != "extra.repo" ]; then	
		       mv "$file" "${file%.repo}.repo_bak"
		    fi
                done
		echo "============new_repo==========="
		echo "$(ls /etc/yum.repos.d/)"
		cat /etc/yum.repos.d/*.repo
		cd -
   fi    
}

setup_mugen()
{
	dnf install -y git
	max_retry_times=30
	retry_times=0
	while true; do
            if [ -d $TMP/mugen ]; then
	        echo "The download is complete"
		break
            fi

	    if [ $retry_times -ge $max_retry_times ]; then
		echo "The number of retries exceeds the limit"
                break
            fi

	    git clone git://172.168.178.72:9418/mugen $TMP/mugen

	    if [ $? -ne 0 ];then
                echo "Download failed.Retrying..."
	        ((retry_times++))
	    fi
        done
	 
	cd $TMP/mugen || exit 1
}

set_env()
{
	export LANG="en_US.UTF-8"
	bash dep_install.sh
	if [ -n "$direct_server_ips" ];then
		echo $(env | grep "direct")
		echo "direct_server_ips:$direct_server_ips, direct_client_1: $direct_client1_ips"
		bash mugen.sh -c --ip  $direct_server_ips --password $secrets_userpassword  --user $secrets_username --port 22 
		
		client_no=1
		while [ ! -z $(eval echo '${direct_client'"${client_no}"'_ips}') ];do
			direct_client_ips=$(eval echo '${direct_client'"${client_no}"'_ips}')
			bash mugen.sh -c --ip  $direct_client_ips --password $secrets_userpassword  --user $secrets_username --port 22
			{{ client_no++ }}
		done
	else
		direct_client_ips=$(ip addr show|grep inet|grep -v inet6|grep -Ewv "lo.*|docker.*|vlan.*|bond.*|virbr.*|br-.*" |awk '{print $2}'|awk -F "/" '{print $1}' |head -1)
		bash mugen.sh -c --ip  $direct_client_ips --password $secrets_userpassword  --user $secrets_username --port 22 
	fi
}

test_one_case()
{
	bash mugen.sh  -f "$testsuite" -r "$testcase" -x
}

test_one_testsuite()
{
	bash mugen.sh -f "$testsuite" -x
}

test_all_cases()
{
	bash mugen.sh -a -x
}

upload_logs()
{
       yum install -y tar
       tar -czf logs.tar logs
       tar -czf results.tar results
       upload_files -t results logs.tar
       upload_files -t results results.tar
}

run()
{
	setup_repo &&\
	setup_mugen &&\
	set_env || return 1

	if [ -n "$testsuite" ];then
		if [ -n "$testcase" ];then
			test_one_case
		else
			test_one_testsuite
		fi
	else
		test_all_cases
	fi
	upload_logs
}

run
