package com.ebupt.migu.music.api.service.impl;

import com.ebupt.migu.music.api.service.HiveToRedisService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.*;
import org.springframework.stereotype.Service;

import java.io.*;

/**
 * @Description Hive以文件形式向Redis中写入数据
 * @Author liuyangyang@cd.ebupt.com
 * @Date 2020/7/16 16:06
 */
@Slf4j
@Service
public class HiveToRedisServiceImpl implements HiveToRedisService {

    /**
     * 存储用户信息的key，内容形式为：
     * key ： user_info
     * value ： map<key, value>
     * 关于map：
     * key：手机号或uid
     * value：用户姓名
     * 例： "13688361924":"刘洋洋"
     */
    private final String _REDIS_KEY = "_user_info";

    @Autowired
    private RedisTemplate redisTemplate;


    /**
     * msisdn,uid - 用户名【一一对应】
     *
     * @param path
     */
    @Override
    public void userName(String path) throws IOException {
        // 判断redis中key是否存在, 存在则清空value, 不存在则创建key
        if (redisTemplate.hasKey(_REDIS_KEY)) {
            redisTemplate.delete(_REDIS_KEY);
        }
        // bound: 统一处理当前key
        BoundHashOperations boundHashOperations = redisTemplate.boundHashOps(_REDIS_KEY);
        File[] files = new File(path).listFiles();
        for (File file : files) {
            String fileName = file.getName();
            log.info("当前文件：{}", fileName);
            if (!fileName.endsWith(".csv")) {
                log.info("{}不是可执行文件，跳过", file.getName());
                continue;
            }
            BufferedReader br = new BufferedReader(new FileReader(file));
            String next;
            while ((next = br.readLine()) != null) {
                String[] line = next.split(",");
                if (line.length != 2){
                    log.info("当前行包含元素:{}", line.length);
                    continue;
                }
                String key = line[0];
                String value = line[1];
                if (boundHashOperations.hasKey(key)){
                    log.info("当前key：{}已经存在，跳过当前数据", key);
                    continue;
                }
                boundHashOperations.put(key, value);
                log.info("key:{}, value:{}", key, value);
            }
            br.close();
        }
        log.info("当前存储的用户信息总量：{}条", boundHashOperations.size());
    }
}