﻿#ifdef HAVE_CONFIG_H
#include <config.h>
#endif


#include "BinaryProtocol.h"
#include <glog/logging.h>
#include <openssl/hmac.h>
#include <openssl/buffer.h>
#include <hadoop/SerialUtils.hh>
#include <hadoop/StringUtils.hh>

namespace HadoopPipes {
	BinaryProtocol::BinaryProtocol(QIODevice* down, DownwardProtocol* _handler, QIODevice* up) {
		downStream = new HadoopUtils::FileInStream();
		downStream->open(down);
		uplink = new BinaryUpwardProtocol(up);
		handler = _handler;
		authDone = false;
		getPassword(password);
	}

	void BinaryProtocol::getPassword(std::string &password) {
		QString passwordFile = QProcessEnvironment::systemEnvironment().value("hadoop.pipes.shared.secret.location");
		if (passwordFile.isEmpty()) {
			return;
		}
		QFile file(passwordFile);
		if (!file.open(QIODevice::ReadOnly)){
			LOG(INFO)<< "Could not open the password file";
			return;
		}
		auto data=file.readAll();
		file.close();
		password.replace(0,data.size(),data.data(),data.size());
		return; 
	}

	void BinaryProtocol::verifyDigestAndRespond(std::string& digest, std::string& challenge) {
		if (password.empty()) {
			//password can be empty if process is running in debug mode from
			//command file.
			authDone = true;
			return;
		}

		if (!verifyDigest(password, digest, challenge)) {
			LOG(FATAL)<< "Server failed to authenticate. Exiting" ;			
			exit(-1);
		}
		authDone = true;
		std::string responseDigest = createDigest(password, digest);
		uplink->authenticate(responseDigest);
	}

	std::string BinaryProtocol::createDigest(std::string &password, std::string& msg) {
		HMAC_CTX ctx;
		unsigned char digest[EVP_MAX_MD_SIZE];
		HMAC_Init(&ctx, (const unsigned char *)password.c_str(), 
			password.length(), EVP_sha1());
		HMAC_Update(&ctx, (const unsigned char *)msg.c_str(), msg.length());
		unsigned int digestLen;
		HMAC_Final(&ctx, digest, &digestLen);
		HMAC_cleanup(&ctx);

		//now apply base64 encoding
		BIO *bmem, *b64;
		BUF_MEM *bptr;

		b64 = BIO_new(BIO_f_base64());
		bmem = BIO_new(BIO_s_mem());
		b64 = BIO_push(b64, bmem);
		BIO_write(b64, digest, digestLen);
		BIO_flush(b64);
		BIO_get_mem_ptr(b64, &bptr);

		char* digestBuffer=new char[bptr->length];
		memcpy(digestBuffer, bptr->data, bptr->length-1);
		digestBuffer[bptr->length-1] = 0;
		BIO_free_all(b64);

		std::string ret(digestBuffer);
		delete digestBuffer;
		return ret;
	}

	BinaryProtocol::~BinaryProtocol() {
		delete downStream;
		delete uplink;
	}


	void BinaryProtocol::nextEvent() {
		MESSAGE_TYPE cmd;
		cmd = (MESSAGE_TYPE)HadoopUtils::deserializeInt(*downStream);
		if (!authDone && cmd != AUTHENTICATION_REQ) {
			//Authentication request must be the first message if
			//authentication is not complete
			LOG(FATAL)<< "Command:" << cmd << "received before authentication. " 
				<< "Exiting.." << std::endl;
			throw std::runtime_error("got command before auth");
		}
		if(cmd!=MAP_ITEM && cmd!=REDUCE_KEY && cmd!=REDUCE_VALUE) 
			LOG(INFO)<<"got command"<<messagetTypeToString(cmd);
		switch (cmd) {
		case AUTHENTICATION_REQ: {
			std::string digest;
			std::string challenge;
			HadoopUtils::deserializeString(digest, *downStream);
			HadoopUtils::deserializeString(challenge, *downStream);
			LOG(INFO)<<"got digest and challenge,will verify and respond";
			verifyDigestAndRespond(digest, challenge);
			LOG(INFO)<<"auth finished";
			break;
								 }
		case START_MESSAGE: {
			int32_t prot;
			prot = HadoopUtils::deserializeInt(*downStream);
			//这里仅仅是验证下协议版本号
			handler->start(prot);
			break;
							}
		case SET_JOB_CONF: {
			int32_t entries;
			entries = HadoopUtils::deserializeInt(*downStream);
			std::vector<std::string> result(entries);
			for(int i=0; i < entries; ++i) {
				std::string item;
				HadoopUtils::deserializeString(item, *downStream);
				result.push_back(item);
			}
			handler->setJobConf(result);
			break;
						   }
		case SET_INPUT_TYPES: {
			std::string keyType;
			std::string valueType;
			HadoopUtils::deserializeString(keyType, *downStream);
			HadoopUtils::deserializeString(valueType, *downStream);
			handler->setInputTypes(keyType, valueType);
			break;
							  }
		case RUN_MAP: {
			std::string split;
			int32_t numReduces;
			int32_t piped;
			HadoopUtils::deserializeString(split, *downStream);
			numReduces = HadoopUtils::deserializeInt(*downStream);
			piped = HadoopUtils::deserializeInt(*downStream);
			handler->runMap(split, numReduces, piped!=0);
			break;
					  }
		case MAP_ITEM: {
			HadoopUtils::deserializeString(key, *downStream);
			HadoopUtils::deserializeString(value, *downStream);
			handler->mapItem(key, value);
			break;
					   }
		case RUN_REDUCE: {
			int32_t reduce;
			int32_t piped;
			reduce = HadoopUtils::deserializeInt(*downStream);
			piped = HadoopUtils::deserializeInt(*downStream);
			handler->runReduce(reduce, piped!=0);
			break;
						 }
		case REDUCE_KEY: {
			HadoopUtils::deserializeString(key, *downStream);
			handler->reduceKey(key);
			break;
						 }
		case REDUCE_VALUE: {
			HadoopUtils::deserializeString(value, *downStream);
			handler->reduceValue(value);
			break;
						   }
		case CLOSE:
			handler->close();
			break;
		case ABORT:
			handler->abort();
			break;
		default:
			HADOOP_ASSERT(false, "Unknown binary command " + HadoopUtils::toString(cmd));
		}
	}
}