﻿#ifdef HAVE_CONFIG_H
#include <config.h>
#endif

#include <glog/logging.h>

#include <stdio.h>
#include <stdlib.h>
#include <vector>
#include <map>
#include <algorithm>
#include <iostream>
#include <sstream>
#include <memory> //auto_ptr

#include "hadoop/Pipes.hh"
#include "hadoop/TemplateFactory.hh"
#include "hadoop/StringUtils.hh"


#include <google/szl/porting.h>
#include <google/szl/commandlineflags.h>
#include <google/szl/hashutils.h>

#include <google/szl/sawzall.h>
#include <google/szl/szltype.h>
#include <google/szl/szltabentry.h>
#include <google/szl/emitterinterface.h>
#include <google/szl/szlemitter.h>
#include <google/szl/szldecoder.h>
#include <google/szl/szlresults.h>
#include <google/szl/emitterinterface.h>
#include <stdexcept>
#include <QtCore/QCoreApplication>
#include <QtCore/QProcessEnvironment>
#include <QtCore/QFile>
#include <QtCore/QTextStream>

class MyErrorHandler: public sawzall::ErrorHandler{
  virtual void Report(const char* file_name, int line, int offset,
                      bool is_warning, const char* message){
    LOG(INFO)<<message;
  }
};


class MyEmitter : public SzlEmitter {
public:
  MyEmitter(const string& name, const SzlTabWriter* writer,HadoopPipes::TaskContext* taskContext)
    : SzlEmitter(name, writer, false),taskContext_(taskContext) { 
    char* buf=new char[name.length()+1];
    memcpy(buf,name.c_str(),name.length());
    buf[name.length()]='\0';
    prefix_=std::string(buf,name.length()+1);
    delete[] buf;
  }

private:
  const SzlTabWriter* writer() { return writer_; }
  virtual void WriteValue(const string& key, const string& value);
  std::string prefix_;
  HadoopPipes::TaskContext* taskContext_;
};

void MyEmitter::WriteValue(const string& key, const string& value) {
  char* buf=new char[prefix_.length()+key.length()];
  memcpy(buf,prefix_.data(),prefix_.length());
  memcpy(buf+prefix_.length(),key.data(),key.length());
  this->taskContext_->emitRecord(key,value);
  this->Flusher();
}

class HadoopEmitterFactory : public sawzall::EmitterFactory {
private:
  HadoopPipes::TaskContext* taskContext_;

public:
  HadoopEmitterFactory(HadoopPipes::TaskContext* taskContext):taskContext_(taskContext){}
  ~HadoopEmitterFactory() throw (){
    try{
      // Explicitly deallocate each emitter
      for (int i = 0; i < emitters_.size(); i++) {
	sawzall::Emitter* e = emitters_[i];
	delete e;
      }
    }catch(...){
      
    }
  }

  /** 
   * 创建一个新的emitter。这个emitter将会在Factory析构的时候被销毁
   */   
  virtual sawzall::Emitter* NewEmitter(sawzall::TableInfo* table_info, string* error) {
    const char* name = table_info->name();
    SzlEmitter* emitter = NULL;
    SzlType szl_type(SzlType::VOID);
    if (szl_type.ParseFromSzlArray(table_info->type_string().data(),
                                   table_info->type_string().size(),
                                   error)) {
      SzlTabWriter* tab_writer = SzlTabWriter::CreateSzlTabWriter(szl_type,
                                                                  error);
      if (tab_writer != NULL)
        emitter = new MyEmitter(name, tab_writer,this->taskContext_);
    }
    emitters_.push_back(emitter);
    return emitter;
  }

private:
  vector<SzlEmitter*> emitters_;    
};


class HadoopSzlMap: public HadoopPipes::Mapper {
private:
  sawzall::Process* process;
  HadoopEmitterFactory* emitterFactory; 
  HadoopPipes::TaskContext::Counter* failCounter;
public: 
   HadoopSzlMap(HadoopPipes::TaskContext& context) {
        LOG(INFO)<<"mapper初始化开始";
    QString szlScriptFilename=QProcessEnvironment::systemEnvironment().value("SZL_SCRIPT_FILENAME");
    const std::string program_name="mydemo";
    {
      QByteArray tmp=szlScriptFilename.toUtf8();
          LOG(INFO) << "从"<<tmp.data()<<"载入SZL脚本";
    }
    QFile data(szlScriptFilename);
    MyErrorHandler handler;
    QString scriptContent;
    if (data.open(QFile::ReadOnly)) {
      QTextStream f(&data);
      scriptContent=f.readAll();
    } else {
      LOG(FATAL)<<"载入SZL脚本失败";
      throw std::runtime_error("载入SZL脚本失败");
    }
    QByteArray scriptContentUtf8=scriptContent.toUtf8();
    sawzall::Executable exe(program_name.c_str(), scriptContentUtf8.data(), sawzall::kDebug,&handler);  
    if (!exe.is_executable()){
      LOG(ERROR)<<"could not compile " << program_name;
      throw std::runtime_error("编译SZL脚本失败");
    }
    LOG(INFO)<<"创建exe完成";
    this->process=new sawzall::Process(&exe, false, NULL);
    LOG(INFO)<<"创建process完成";
    this->emitterFactory=new HadoopEmitterFactory(&context);
    LOG(INFO)<<"向process注册emitter factory";
    process->set_emitter_factory(this->emitterFactory);
    LOG(INFO)<<"注册process";
    sawzall::RegisterEmitters(process);
    if (!process->Initialize()) {
      LOG(FATAL) << "could not initialize " << program_name;
      throw std::runtime_error("初始化SZL失败");
    }
    this->failCounter=context.getCounter("hadoopszl", "FAIL_RECORDS");
    LOG(INFO)<<"mapper初始化完成";
  }
  
  void map(HadoopPipes::MapContext& context) {
    std::string inputvalue=context.getInputValue();
    if(!process->Run(inputvalue.data(),inputvalue.size(),NULL,0)) {
      context.incrementCounter(this->failCounter, 1);
      return;
    }
    LOG_EVERY_N(INFO, 10000) << "10000 records";
  }
};

class HadoopSzlReduce: public HadoopPipes::Reducer {
public:
  HadoopSzlReduce(HadoopPipes::TaskContext& context) {
        LOG(INFO)<<"reduce初始化开始";
    QString szlScriptFilename=QProcessEnvironment::systemEnvironment().value("SZL_SCRIPT_FILENAME");
    const std::string program_name="mydemo";
    {
      QByteArray tmp=szlScriptFilename.toUtf8();
          LOG(INFO) << "从"<<tmp.data()<<"载入SZL脚本";
    }
    QFile data(szlScriptFilename);
    MyErrorHandler handler;
    QString scriptContent;
    if (data.open(QFile::ReadOnly)) {
      QTextStream f(&data);
      scriptContent=f.readAll();
    } else {
      LOG(FATAL)<<"载入SZL脚本失败";
      throw std::runtime_error("载入SZL脚本失败");
    }
    QByteArray scriptContentUtf8=scriptContent.toUtf8();
    sawzall::Executable exe(program_name.c_str(), scriptContentUtf8.data(), sawzall::kDebug,&handler);  
    if (!exe.is_executable()){
      LOG(ERROR)<<"could not compile " << program_name;
      throw std::runtime_error("编译SZL脚本失败");
    }
        LOG(INFO)<<"创建exe完成";
    auto tableinfo=exe.tableinfo();
    for(auto it=tableinfo->begin();it!=tableinfo->end();++it){
      sawzall::TableInfo* ti=*it;
          LOG(INFO)<<"table "<<ti->name();
    }

        LOG(INFO)<<"reduce初始化结束";
  }

  void reduce(HadoopPipes::ReduceContext& context) {
    const std::string& inputkey= context.getInputKey();
    std::string tablename;
    std::string realkey;
    for(int i=0;i!=inputkey.length();++i){
      if(inputkey[i]=='\0'){
	tablename=inputkey.substr(0,i);
	realkey=inputkey.substr(i+1);
      }
    }
    LOG(INFO)<<tablename;
    while (context.nextValue()) {
      
    }
  }
};


int main(int argc, char *argv[]) {
  setlocale(LC_ALL,"");
  google::InitGoogleLogging("hadoopszl");
  ProcessCommandLineArguments(argc, argv);
  QCoreApplication app(argc,argv);
  LOG(INFO) << "app started";
  InitializeAllModules();
  sawzall::RegisterStandardTableTypes();
  LOG(INFO) << "szl初始化完成";

  int ret=HadoopPipes::runTask(HadoopPipes::TemplateFactory<HadoopSzlMap, 
                              HadoopSzlReduce>());

  app.quit();
  LOG(INFO) << "word count simple end,ret="<<ret;
  return ret;
}

