bin/sqoop import \
--connect jdbc:mysql://hadoop121:3306/gmall1205 \
--username root \
--password 888888 \
--query "SELECT ui.id user_id, si.id sku_id, ui.gender user_gender, FLOOR(DATEDIFF(NOW(), ui.birthday) / 365) user_age, ui.user_level user_level, si.price sku_price, si.sku_name sku_name, si.tm_id sku_tm_id, bc3.id sku_category3_id, bc2.id sku_category2_id, bc1.id sku_category1_id, bc3.name sku_category3_name, bc2.name sku_category2_name, bc1.name sku_category1_name, si.spu_id spu_id, od.sku_num sku_num, COUNT(*) order_count, SUM(oi.total_amount) order_amount, DATE_FORMAT(oi.operate_time, '%Y-%m-%d') dt FROM order_detail od JOIN order_info oi ON od.order_id = oi.id JOIN sku_info si ON od.sku_id = si.id JOIN user_info ui ON oi.user_id = ui.id JOIN base_category3 bc3 ON bc3.id = si.category3_id JOIN base_category2 bc2 ON bc2.id = bc3.category2_id JOIN base_category1 bc1 ON bc1.id = bc2.category1_id where 1=1 or \$CONDITIONS GROUP BY user_id, dt" \
--target-dir /gmall1205/dws_sale_detail_daycount \
--num-mappers 1 \
--hive-import \
--fields-terminated-by "\t" \
--create-hive-table \
--hive-overwrite \
--hive-table gmall1205.dws_sale_detail_daycount

#到hive下查询验证
SELECT
  user_id,
  sku_id,
  user_gender,
  CAST(user_age AS INT) user_age,
  user_level,
  CAST(sku_price AS DOUBLE),
  sku_name,
  sku_tm_id,
  sku_category3_id,
  sku_category2_id,
  sku_category1_id,
  sku_category3_name,
  sku_category2_name,
  sku_category1_name,
  spu_id,
  sku_num,
  CAST(order_count AS BIGINT) order_count,
  CAST(order_amount AS DOUBLE) order_amount,
  dt
FROM
  gmall1205.dws_sale_detail_daycount
WHERE dt = '2019-10-24';