# sqoop

# 1. loading data
# sqoop-import (generic-args) (import-args)
# sqoop import (generic-args) (import-args)

# (1) generic-args
# --connect: jdbc connect string
# --driver: driver class
# --username
# --password
# -P

# (2) import-args
# --table: table name from mysql
# --target-dir: to path
# --delete-target-dir: FileAlreadyExistsException
# --fields-terminated-by: such as ","
# -m, --num-mappers: default-value 4
# --split-by

# 1.1 to hdfs from mysql
# (1) load all
# 21/03/30 15:55:07 ERROR tool.ImportTool:
# Import failed: No primary key could be found for table emp.
# Please specify one with --split-by or perform a sequential import with '-m 1'.

# 21/03/30 15:46:02 INFO db.DataDrivenDBInputFormat:
# BoundingValsQuery: SELECT MIN(id), MAX(id) FROM staff

sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --P \
  --table emp \
  --target-dir /user/company/staff \
  --delete-target-dir \
  --fields-terminated-by '\t' \
  --num-mappers 1 \
  --split-by id

# (2) select a part of data to load
sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --P \
  --table staff \
  --target-dir /user/company/staff \
  --delete-target-dir \
  --fields-terminated-by '\t' \
  --num-mappers 1 \
  --columns id,sex

# --where
sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --P \
  --table staff \
  --target-dir /user/company/staff \
  --delete-target-dir \
  --fields-terminated-by '\t' \
  --num-mappers 1 \
  --columns id,sex \
  --where "salary >= 30000"

# (3) --query
# must contain '$CONDITIONS' in WHERE clause.
sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --P \
  --target-dir /user/company/staff \
  --delete-target-dir \
  --fields-terminated-by '\t' \
  --num-mappers 1 \
  --query 'select name, sex from staff where salary >= 30000 and $CONDITIONS;'
#--query "select name, sex from staff where salary >= 30000 and \$CONDITIONS;"

# (4) 增量导入
# --check-column(col)：in ( int,    timestamp), not in char
# --incremental(mode):      append, lastmodified
# --last-value(value)
sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --P \
  --table staff \
  --target-dir /user/company/staff \
  --fields-terminated-by '\t' \
  --num-mappers 1 \
  --check-column id \
  --incremental append \
  --last-value 6

sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --P \
  --table staff \
  --target-dir /user/company/staff \
  --fields-terminated-by '\t' \
  --num-mappers 1 \
  --check-column id \
  --incremental append \
  --last-value # 这个似乎有点问题，已解释： 增量导入时不能省略--last-value的值，必须添加后才能达到增量导入的效果。否则会将所有数据再导入一次。

# lastmodified mode by timestamp
# append and merge-key
# append: append new data to a new file
sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table employee \
  --target-dir /user/company/employee \
  --delete-target-dir \
  --fields-terminated-by '\t' \
  --num-mappers 1

#
sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table employee \
  --target-dir /user/company/employee \
  --fields-terminated-by '\t' \
  --num-mappers 1 \
  --check-column hire_date \
  --incremental lastmodified \
  --append \
  --last-value '2021-03-31 14:01:28' # where timestamp >= last-value

# merge-key: to be a file
# load all
#
sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table employee \
  --target-dir /user/company/employee \
  --fields-terminated-by '\t' \
  --num-mappers 1 \
  --check-column hire_date \
  --incremental lastmodified \
  --merge-key id \
  --last-value '2021-03-31 14:34:00' # where timestamp >= last-value
# merge key by id of string  根据字典顺序合并文件，mysql中id是int，但在导出的文件中，是当成string，所以排序时是字典顺序

#(5)import all tables
# only one primary key or must use --autoset-to-one-mapper
# must import all columns every table, don't use where
# not use un-default split
sqoop import-all-tables \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --fields-terminated-by '\t' \
  --num-mappers 1 \
  --warehouse-dir /user/company

sqoop import-all-tables \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --fields-terminated-by '\t' \
  --num-mappers 1 \
  --warehouse-dir /user/company

# 1.2 from RDBMS to Hive
# (1) one: copy table construction, then import data.
sqoop create-hive-table \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table employee \
  --hive-table mydb.employee_hive

#
sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table employee \
  --hive-table mydb.employee_hive \
  --hive-import \
  --num-mappers 1
# --fields-terminated-by '\t'

# (2)two: copy construction and data of table
# create-table-hive # Import failed: java.io.IOException: Hive exited with status 1
# hive-import
# hive-table
# hive-overwrite
sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table employee \
  --num-mappers 1 \
  --fields-terminated-by '\t' \
  --hive-import \
  --hive-table mydb.employee_hive \
  --hive-overwrite \
  --delete-target-dir
#--create-table-hive

# 21/04/01 08:29:27 ERROR tool.ImportTool: Import failed: org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory hdfs://liu:9000/user/liu/employee already exists
# hdfs中的文件已经有了，需要--delete-target-dir

# 1.3 from RDBMS to HBASE
sqoop import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table employee \
  --hbase-create-table \
  --hbase-table 'employee_hbase' \
  --column-family 'info' \
  --hbase-row-key 'id' \
  --num-mappers 1

# 2. export data
# use export
# HIVE/HDFS export to RDBMS
# can't from hbase export rdbms

# export mode
# (1)default mode: insert, all data
# (2)update mode: update to replace data

# --table: to rdbms table name
# --export-dir: from hdfs dir
# --input-fields-terminated-by
# need create table employee_sql in mysql before do codes.
sqoop export \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table employee_sql \
  --num-mappers 1 \
  --export-dir /user/company/employee \
  --input-fields-terminated-by '\t'

sqoop export \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table employee_sql_hive \
  --num-mappers 1 \
  --export-dir /user/hive/warehouse/mydb.db/employee_hive \
  --input-fields-terminated-by '\t'
# if export more than one times, will ERROR mapreduce.ExportJobBase: Export job failed!

sqoop export \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table employee_sql_hive_1 \
  --num-mappers 1 \
  --export-dir /user/hive/warehouse/mydb.db/employee_hive \
  --input-fields-terminated-by '\t' \
  --columns id,name,sex # can't add blank, one "id,name", another "id, name"

# 2.2 update mode
# (1) updateonly mode
#
# first: default mode
sqoop export \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table updateonly \
  --num-mappers 1 \
  --export-dir /input/sources/updateonly_1.txt \
  --input-fields-terminated-by ','

# second: updateonly mode
# --update-key
# --update-mode: to update mode, updateonly means update only
sqoop export \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table updateonly \
  --num-mappers 1 \
  --export-dir /input/sources/updateonly_2.txt \
  --update-key id \
  -- update-mode updateonly \
  --input-fields-terminated-by ','

# (2) allowinsert mode: mixed update and insert
# first: default export
sqoop export \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table allowinsert \
  --num-mappers 1 \
  --export-dir /input/sources/allowinsert_1.txt \
  --input-fields-terminated-by ','

# second: allowinsert
# Mixed update/insert is not supported against the target database yet
sqoop export \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --password 666666 \
  --table allowinsert \
  --num-mappers 1 \
  --update-key id \
  --update-mode allowinsert \
  --export-dir /input/sources/allowinsert_2.txt \
  --input-fields-terminated-by ','

# 3. script
# use opt file to packet sqoop, then do
# (1) create file
touch job_HDFS2TDBMS.opt
# (2) vi
vi job_HDFS2TDBMS.opt

# Options file for sqoop export
export
# JDBC driver class
--driver
com.mysql.cj.jdbc.Driver
# Connect parameter and value
--connect
jdbc:mysql://liu:3306/company
#
--username
root
--password
666666
--table
updateonly
--num-mappers
1
--export-dir
/input/sources/updateonly_1.txt
--input-fields-terminated-by
','

# (3) do
sqoop --options-file job_HDFS2TDBMS.opt

# 4. Sqoop job
# (1) job language
# sqoop job (generic-args)(job-args)[-- [subtool-name](subtool-args)]
# sqoop-job (generic-args)(job-args)[-- [subtool-name](subtool-args)]

# (2) create job
# must enter password, --password can't be used
sqoop job --create job_RDBMS2HDFS \
  -- import \
  --driver com.mysql.cj.jdbc.Driver \
  --connect jdbc:mysql://liu:3306/company \
  --username root \
  --table staff \
  --target-dir /user/company/staff_1 \
  --delete-target-dir \
  --num-mappers 1 \
  --fields-terminated-by '\t'

# (3) list
sqoop job --list
sqoop job --help

# (4) do job
sqoop job --exec job_RDBMS2HDFS

# (5) free password
# --password-file
# modified file: sqoop-site.xml -->
  #   <property>
  #    <name>sqoop.metastore.client.record.password</name>
  #    <value>true</value>
  #    <description>If true, allow saved passwords in the metastore.
  #    </description>
  #  </property>
# create password file
# bash shell : echo -n "666666" > mysql.pwd
# put hdfs
# chmod 400
sqoop job --create job_RDBMS2HDF_P \
-- import \
--driver com.mysql.cj.jdbc.Driver \
--connect jdbc:mysql://liu:3306/company \
--username root \
--table staff \
--target-dir /user/company/staff_1 \
--delete-target-dir \
--num-mappers 1 \
--password-file /user/mysql.pwd \
--fields-terminated-by '\t'

# append import by sqoop job
sqoop job --create job_RDBMS2HDFS_A \
-- import \
--driver com.mysql.cj.jdbc.Driver \
--connect jdbc:mysql://liu:3306/company \
--username root \
--table staff \
--target-dir /user/company/staff \
--fields-terminated-by '\t' \
--num-mappers 1 \
--password-file /user/mysql.pwd \
--check-column id \
--incremental append


# 5 sqoop commend with usually
# list-databases
sqoop list-databases \
--connect jdbc:mysql://liu:3306 \
--username root \
--password-file /user/mysql.pwd

# list-tables
sqoop list-tables \
--connect jdbc:mysql://liu:3306/company \
--username root \
--password-file /user/mysql.pwd

# create-hive-table: struct
sqoop create-hive-table \
--connect jdbc:mysql://liu:3306/company \
--username root \
--password-file /user/mysql.pwd \
--table staff \
--hive-table mydb.staff_hive_1
