 1.电商分析之--核心交易之数据导入
   
   已经确定的事情：DataX、导出7张表的数据。
   MySQL 导出：全量导出、增量导出（导出前一天的数据）。
   业务数据保存在MySQL中，每日凌晨导入上一天的表数据。
      (1).表数据量少，采用全量方式导出MySQL
      (2).表数据量大，而且根据字段能区分出每天新增数据，采用增量方式导出MySQL
   
   3 张增量表：
     订单表 lagou_trade_orders
     订单产品表 lagou_order_produce
     产品信息表 lagou_product_info
   4 张全量表：
     产品分类表 lagou_product_category
     商家店铺表 lagou_shops
     商家地域组织表 lagou_shop_admin_org
     支付方式表 lagou_payment
   
   1).全量数据导入
   MySQL => HDFS => Hive
   每日加载全量数据，形成新的分区；(ODS如何建表有指导左右)
   MySQLReader ===> HdfsWriter
   ebiz.lagou_product_category ===> ods.ods_trade_product_category
   (1).产品分类表
   /data/lagoudw/json/product_category.json
{
	"job": {
		"setting": {
			"speed": {
				"channel": 1
			}
		},
		"content": [{
			"reader": {
				"name": "mysqlreader",
				"parameter": {
					"username": "hive",
					"password": "12345678",
					"column": [
						"catId", 
						"parentId", 
						"catName",
						"isShow",
						"sortNum",
						"isDel",
						"createTime",
						"level"
					],
					"connection": [{
						"table": [
							"lagou_product_category"
						],
						"jdbcUrl": [
							"jdbc:mysql://linux123:3306/ebiz"
						]
					}]
				}
			},
			"writer": {
				"name": "hdfswriter",
				"parameter": {
					"defaultFS": "hdfs://linux121:9000",
					"fileType": "text",
					"path": "/user/data/trade.db/product_category/dt=$do_date",
					"fileName": "product_category_$do_date",
					"column": [{
							"name": "catId",
							"type": "INT"
						},
						{
							"name": "parentId",
							"type": "INT"
						},
						{
							"name": "catName",
							"type": "STRING"
						},
						{
							"name": "isShow",
							"type": "TINYINT"
						},
						{
							"name": "sortNum",
							"type": "INT"
						},
						{
							"name": "isDel",
							"type": "TINYINT"
						},
						{
							"name": "createTime",
							"type": "STRING"
						},
						{
							"name": "level",
							"type": "TINYINT"
						}
					],
					"writeMode": "append",
					"fieldDelimiter": ","
				}
			}
		}]
	}
}  
   
   备注：
      数据量小的表没有必要使用多个channel；使用多个channel会生成多个小文件
      执行命令之前要在HDFS上创建对应的目录：
/user/data/trade.db/product_category/dt=yyyy-mm-dd
   
   do_date='2020-07-01'
   # 创建目录
   hdfs dfs -mkdir -p /user/data/trade.db/product_category/dt=$do_date
   
   # 数据迁移
   python $DATAX_HOME/bin/datax.py -p "-Ddo_date=$do_date" /data/lagoudw/json/product_category.json
   
   DROP TABLE IF EXISTS `ods.ods_trade_product_category`;
CREATE EXTERNAL TABLE `ods.ods_trade_product_category`(
`catid` int,
`parentid` int,
`catname` string,
`isshow` tinyint,
`sortnum` int,
`isdel` tinyint,
`createtime` string,
`level` tinyint)
COMMENT '产品分类表'
PARTITIONED BY (`dt` string)
row format delimited fields terminated by ','
location '/user/data/trade.db/product_category/';
   # 加载数据
   hive -e "alter table ods.ods_trade_product_category add partition(dt='$do_date')";
   (2).商家店铺表
   lagou_shops ====> ods.ods_trade_shops
    /data/lagoudw/json/shops.json

{
	"job": {
		"setting": {
			"speed": {
				"channel": 1
			},
			"errorLimit": {
				"record": 0
			}
		},
		"content": [{
			"reader": {
				"name": "mysqlreader",
				"parameter": {
					"username": "hive",
					"password": "12345678",
					"column": [
						"shopId", 
						"userId", 
						"areaId",
						"shopName", 
						"shopLevel", 
						"status", 
						"createTime", 
						"modifyTime"
					],
					"connection": [{
						"table": [
							"lagou_shops"
						],
						"jdbcUrl": [
							"jdbc:mysql://linux123:3306/ebiz"
						]
					}]
				}
			},
			"writer": {
				"name": "hdfswriter",
				"parameter": {
					"defaultFS": "hdfs://linux121:9000",
					"fileType": "text",
					"path": "/user/data/trade.db/shops/dt=$do_date",
					"fileName": "shops_$do_date",
					"column": [{
							"name": "shopId",
							"type": "INT"
						},
						{
							"name": "userId",
							"type": "INT"
						},
						{
							"name": "areaId",
							"type": "INT"
						},
						{
							"name": "shopName",
							"type": "STRING"
						},
						{
							"name": "shopLevel",
							"type": "TINYINT"
						},
						{
							"name": "status",
							"type": "TINYINT"
						},
						{
							"name": "createTime",
							"type": "STRING"
						},
						{
							"name": "modifyTime",
							"type": "STRING"
						}
					],
					"writeMode": "append",
					"fieldDelimiter": ","
				}
			}
		}]
	}
}   
   do_date='2020-07-01'
   do_date='2020-07-02'
   # 创建目录
   hdfs dfs -mkdir -p /user/data/trade.db/shops/dt=$do_date
   hdfs dfs -ls /user/data/trade.db/shops
   # 数据迁移
   python $DATAX_HOME/bin/datax.py -p "-Ddo_date=$do_date" /data/lagoudw/json/shops.json
   
   DROP TABLE IF EXISTS `ods.ods_trade_shops`;
CREATE EXTERNAL TABLE `ods.ods_trade_shops`(
`shopid` int,
`userid` int,
`areaid` int,
`shopname` string,
`shoplevel` tinyint,
`status` tinyint,
`createtime` string,
`modifytime` string)
COMMENT '商家店铺表'
PARTITIONED BY (`dt` string)
row format delimited fields terminated by ','
location '/user/data/trade.db/shops/';
   insert overwrite table ods.ods_trade_shops location '/user/data/trade.db/shops/dt=2020-07-02/*';
   # 加载数据
   hive -e "alter table ods.ods_trade_shops add partition(dt='$do_date')"
   (3).商家地域组织表
   lagou_shop_admin_org ====> ods.ods_trade_shop_admin_org
   /data/lagoudw/json/shop_org.json

{
	"job": {
		"setting": {
			"speed": {
				"channel": 1
			},
			"errorLimit": {
				"record": 0
			}
		},
		"content": [{
			"reader": {
				"name": "mysqlreader",
				"parameter": {
					"username": "hive",
					"password": "12345678",
					"column": [
						"id", "parentId", "orgName",
						"orgLevel", "isDelete", "createTime", "updateTime", "isShow",
						"orgType"
					],
					"connection": [{
						"table": [
							"lagou_shop_admin_org"
						],
						"jdbcUrl": [
							"jdbc:mysql://linux123:3306/ebiz"
						]
					}]
				}
			},
			"writer": {
				"name": "hdfswriter",
				"parameter": {
					"defaultFS": "hdfs://linux121:9000",
					"fileType": "text",
					"path": "/user/data/trade.db/shop_org/dt=$do_date",
					"fileName": "shop_admin_org_$do_date.dat",
					"column": [{
							"name": "id",
							"type": "INT"
						},
						{
							"name": "parentId",
							"type": "INT"
						},
						{
							"name": "orgName",
							"type": "STRING"
						},
						{
							"name": "orgLevel",
							"type": "TINYINT"
						},
						{
							"name": "isDelete",
							"type": "TINYINT"
						},
						{
							"name": "createTime",
							"type": "STRING"
						},
						{
							"name": "updateTime",
							"type": "STRING"
						},
						{
							"name": "isShow",
							"type": "TINYINT"
						},
						{
							"name": "orgType",
							"type": "TINYINT"
						}
					],
					"writeMode": "append",
					"fieldDelimiter": ","
				}
			}
		}]
	}
}  
   
   do_date='2020-07-01'
   # 创建目录
   hdfs dfs -mkdir -p /user/data/trade.db/shop_org/dt=$do_date
   
   # 数据迁移
   python $DATAX_HOME/bin/datax.py -p "-Ddo_date=$do_date" /data/lagoudw/json/shop_org.json
   
   DROP TABLE IF EXISTS `ods.ods_trade_shop_admin_org`;
CREATE EXTERNAL TABLE `ods.ods_trade_shop_admin_org`(
`id` int,
`parentid` int,
`orgname` string,
`orglevel` tinyint,
`isdelete` tinyint,
`createtime` string,
`updatetime` string,
`isshow` tinyint,
`orgType` tinyint)
COMMENT '商家地域组织表'
PARTITIONED BY (`dt` string)
row format delimited fields terminated by ','
location '/user/data/trade.db/shop_org/';
   # 加载数据
   hive -e "alter table ods.ods_trade_shop_admin_org add partition(dt='$do_date')"
   (4).支付方式表
   lagou_payments ====> ods.ods_trade_payments
   /data/lagoudw/json/payments.json
   
{
	"job": {
		"setting": {
			"speed": {
				"channel": 1
			},
			"errorLimit": {
				"record": 0
			}
		},
		"content": [{
			"reader": {
				"name": "mysqlreader",
				"parameter": {
					"username": "hive",
					"password": "12345678",
					"column": [
						"id", "payMethod", "payName",
						"description", "payOrder", "online"
					],
					"connection": [{
						"table": [
							"lagou_payments"
						],
						"jdbcUrl": [
							"jdbc:mysql://linux123:3306/ebiz"
						]
					}]
				}
			},
			"writer": {
				"name": "hdfswriter",
				"parameter": {
					"defaultFS": "hdfs://linux121:9000",
					"fileType": "text",
					"path": "/user/data/trade.db/payments/dt=$do_date",
					"fileName": "payments_$do_date.dat",
					"column": [{
							"name": "id",
							"type": "INT"
						},
						{
							"name": "payMethod",
							"type": "STRING"
						},
						{
							"name": "payName",
							"type": "STRING"
						},
						{
							"name": "description",
							"type": "STRING"
						},
						{
							"name": "payOrder",
							"type": "INT"
						},
						{
							"name": "online",
							"type": "TINYINT"
						}
					],
					"writeMode": "append",
					"fieldDelimiter": ","
				}
			}
		}]
	}
}  
   do_date='2020-07-01'
   # 创建目录
   hdfs dfs -mkdir -p /user/data/trade.db/payments/dt=$do_date
   
   # 数据迁移
   python $DATAX_HOME/bin/datax.py -p "-Ddo_date=$do_date" /data/lagoudw/json/payments.json
   
   DROP TABLE IF EXISTS `ods.ods_trade_payments`;
CREATE EXTERNAL TABLE `ods.ods_trade_payments`(
`id` string,
`paymethod` string,
`payname` string,
`description` string,
`payorder` int,
`online` tinyint)
COMMENT '支付方式表'
PARTITIONED BY (`dt` string)
row format delimited fields terminated by ','
location '/user/data/trade.db/payments/';
   # 加载数据
   hive -e "alter table ods.ods_trade_payments add partition(dt='$do_date')"
   2).增量数据导入
   3 张增量表：
     订单表 lagou_trade_orders
     订单产品表 lagou_order_produce
     产品信息表 lagou_product_info
   初始数据装载（执行一次）；可以将前面的全量加载作为初次装载
   每日加载增量数据（每日数据形成分区）；
   (1).订单表
   lagou_trade_orders ====> ods.ods_trade_orders
   /data/lagoudw/json/orders.json
   备注：条件的选择，选择时间字段 modifiedTime

{
	"job": {
		"setting": {
			"speed": {
				"channel": 1
			},
			"errorLimit": {
				"record": 0
			}
		},
		"content": [{
			"reader": {
				"name": "mysqlreader",
				"parameter": {
					"username": "hive",
					"password": "12345678",
					"connection": [{
						"querySql": ["select orderId, orderNo, userId,status, 
 productMoney,totalMoney, payMethod, isPay, areaId, tradeSrc, 
 tradeType,isRefund, dataFlag, createTime, payTime, modifiedTime
 from lagou_trade_orders 
 where date_format(modifiedTime, '%Y-%m-%d') = '$do_date'"],
						"jdbcUrl": [
							"jdbc:mysql://linux123:3306/ebiz"
						]
					}]
				}
			},
			"writer": {
				"name": "hdfswriter",
				"parameter": {
					"defaultFS": "hdfs://linux121:9000",
					"fileType": "text",
					"path": "/user/data/trade.db/orders/dt=$do_date",
					"fileName": "orders_$do_date",
					"column": [{
							"name": "orderId",
							"type": "INT"
						},
						{
							"name": "orderNo",
							"type": "STRING"
						},
						{
							"name": "userId",
							"type": "BIGINT"
						},
						{
							"name": "status",
							"type": "TINYINT"
						},
						{
							"name": "productMoney",
							"type": "Float"
						},
						{
							"name": "totalMoney",
							"type": "Float"
						},
						{
							"name": "payMethod",
							"type": "TINYINT"
						},
						{
							"name": "isPay",
							"type": "TINYINT"
						},
						{
							"name": "areaId",
							"type": "INT"
						},
						{
							"name": "tradeSrc",
							"type": "TINYINT"
						},
						{
							"name": "tradeType",
							"type": "INT"
						},
						{
							"name": "isRefund",
							"type": "TINYINT"
						},
						{
							"name": "dataFlag",
							"type": "TINYINT"
						},
						{
							"name": "createTime",
							"type": "STRING"
						},
						{
							"name": "payTime",
							"type": "STRING"
						},
						{
							"name": "modifiedTime",
							"type": "STRING"
						}
					],
					"writeMode": "append",
					"fieldDelimiter": ","
				}
			}
		}]
	}
}   
   
   do_date='2020-07-12'
   # 创建目录
   hdfs dfs -mkdir -p /user/data/trade.db/orders/dt=$do_date
   # 数据迁移
   python $DATAX_HOME/bin/datax.py -p "-Ddo_date=$do_date" /data/lagoudw/json/orders.json
   
   DROP TABLE IF EXISTS `ods.ods_trade_orders`;
CREATE EXTERNAL TABLE `ods.ods_trade_orders`(
`orderid` int,
`orderno` string,
`userid` bigint,
`status` tinyint,
`productmoney` decimal(10, 0),
`totalmoney` decimal(10, 0),
`paymethod` tinyint,
`ispay` tinyint,
`areaid` int,
`tradesrc` tinyint,
`tradetype` int,
`isrefund` tinyint,
`dataflag` tinyint,
`createtime` string,
`paytime` string,
`modifiedtime` string)
COMMENT '订单表'
PARTITIONED BY (`dt` string)
row format delimited fields terminated by ','
location '/user/data/trade.db/orders/';
   # 加载数据
   hive -e "alter table ods.ods_trade_orders add partition(dt='$do_date')"
   (2).订单产品表
   lagou_order_product ====> ods.ods_trade_order_product
   /data/lagoudw/json/order_product.json

{
	"job": {
		"setting": {
			"speed": {
				"channel": 1
			},
			"errorLimit": {
				"record": 0
			}
		},
		"content": [{
			"reader": {
				"name": "mysqlreader",
				"parameter": {
					"username": "hive",
					"password": "12345678",
					"connection": [{
						"querySql": ["select id, orderId, productId,productNum, 
productPrice, money, extra, createTime 
from lagou_order_product 
where date_format(createTime, '%Y-%m-%d') = '$do_date'"],
						"jdbcUrl": [
							"jdbc:mysql://linux123:3306/ebiz"
						]
					}]
				}
			},
			"writer": {
				"name": "hdfswriter",
				"parameter": {
					"defaultFS": "hdfs://linux121:9000",
					"fileType": "text",
					"path": "/user/data/trade.db/order_product/dt=$do_date",
					"fileName": "order_product_$do_date.dat",
					"column": [{
							"name": "id",
							"type": "INT"
						},
						{
							"name": "orderId",
							"type": "INT"
						},
						{
							"name": "productId",
							"type": "INT"
						},
						{
							"name": "productNum",
							"type": "INT"
						},
						{
							"name": "productPrice",
							"type": "Float"
						},
						{
							"name": "money",
							"type": "Float"
						},
						{
							"name": "extra",
							"type": "STRING"
						},
						{
							"name": "createTime",
							"type": "STRING"
						}
					],
					"writeMode": "append",
					"fieldDelimiter": ","
				}
			}
		}]
	}
}
   
   do_date='2020-07-12'
   # 创建目录
   hdfs dfs -mkdir -p /user/data/trade.db/order_product/dt=$do_date
   
   # 数据迁移
   python $DATAX_HOME/bin/datax.py -p "-Ddo_date=$do_date" /data/lagoudw/json/order_product.json
   
   DROP TABLE IF EXISTS `ods.ods_trade_order_product`;
CREATE EXTERNAL TABLE `ods.ods_trade_order_product`(
`id` string,
`orderid` decimal(10,2),
`productid` string,
`productnum` string,
`productprice` string,
`money` string,
`extra` string,
`createtime` string)
COMMENT '订单明细表'
PARTITIONED BY (`dt` string)
row format delimited fields terminated by ','
location '/user/data/trade.db/order_product/';
   # 加载数据
   hive -e "alter table ods.ods_trade_order_product add partition(dt='$do_date')"
   (3).产品明细表
   lagou_product_info ====> ods.ods_trade_product_info
    /data/lagoudw/json/product_info.json

{
	"job": {
		"setting": {
			"speed": {
				"channel": 1
			},
			"errorLimit": {
				"record": 0
			}
		},
		"content": [{
			"reader": {
				"name": "mysqlreader",
				"parameter": {
					"username": "hive",
					"password": "12345678",
					"connection": [{
						"querySql": ["select productid, productname
,shopid, price, issale, status, categoryid, 
createtime,modifytime 
from lagou_product_info 
where date_format(modifyTime, '%Y-%m-%d') = '$do_date'"],
						"jdbcUrl": [
							"jdbc:mysql://linux123:3306/ebiz"
						]
					}]
				}
			},
			"writer": {
				"name": "hdfswriter",
				"parameter": {
					"defaultFS": "hdfs://linux121:9000",
					"fileType": "text",
					"path": "/user/data/trade.db/product_info/dt=$do_date",
					"fileName": "product_info_$do_date.dat",
					"column": [{
							"name": "productid",
							"type": "BIGINT"
						},
						{
							"name": "productname",
							"type": "STRING"
						},
						{
							"name": "shopid",
							"type": "STRING"
						},
						{
							"name": "price",
							"type": "FLOAT"
						},
						{
							"name": "issale",
							"type": "TINYINT"
						},
						{
							"name": "status",
							"type": "TINYINT"
						},
						{
							"name": "categoryid",
							"type": "STRING"
						},
						{
							"name": "createTime",
							"type": "STRING"
						},
						{
							"name": "modifytime",
							"type": "STRING"
						}
					],
					"writeMode": "append",
					"fieldDelimiter": ","
				}
			}
		}]
	}
}
	
	do_date='2020-07-12'
     # 创建目录
    hdfs dfs -mkdir -p /user/data/trade.db/product_info/dt=$do_date
	
    # 数据迁移
    python $DATAX_HOME/bin/datax.py -p "-Ddo_date=$do_date" /data/lagoudw/json/product_info.json

    DROP TABLE IF EXISTS `ods.ods_trade_product_info`;
CREATE EXTERNAL TABLE `ods.ods_trade_product_info`(
`productid` bigint,
`productname` string,
`shopid` string,
`price` decimal(10,0),
`issale` tinyint,
`status` tinyint,
`categoryid` string,
`createtime` string,
`modifytime` string)
COMMENT '产品信息表'
PARTITIONED BY (`dt` string)
row format delimited fields terminated by ','
location '/user/data/trade.db/product_info/';    
	# 加载数据
    hive -e "alter table ods.ods_trade_product_info add partition(dt='$do_date')"