 1.车货匹配之代码开发中车货匹配流程
   
   CDH集群依赖
   由于cdh版本的所有的软件涉及版权的问题，所以并没有将所有的jar包托管到maven仓库当中去，而是托管
在了CDH自己的服务器上面，所以我们默认去maven的仓库下载不到，需要自己手动的添加repository去
CDH仓库进行下载，以下两个地址是官方文档说明，请仔细查阅
https://www.cloudera.com/documentation/enterprise/release-notes/topics/cdh_vd_cdh5_maven_repo.html
https://www.cloudera.com/documentation/enterprise/release-notes/topics/cdh_vd_cdh5_maven_repo_514x.html
   
   Apche版本
    <!--hadoop common，hadoop client,hadoop hdfs -->
    <dependencies>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>RELEASE</version>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-core</artifactId>
            <version>2.8.2</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>2.9.2</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client -->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.9.2</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs -->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>2.9.2</version>
        </dependency>
    </dependencies>

   CDH版本
<repositories>
   <repository>
     <id>cloudera</id>
     <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
   </repository>
</repositories>  
   
 <dependencies>
   <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-client</artifactId>
       <version>2.6.0-mr1-cdh5.14.0</version>
   </dependency>
   <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
       <version>2.6.0-cdh5.14.0</version>
   </dependency>
   <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
       <version>2.6.0-cdh5.14.0</version>
   </dependency>

   <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-core</artifactId>
       <version>2.6.0-cdh5.14.0</version>
   </dependency>
   <!-- https://mvnrepository.com/artifact/junit/junit -->
   <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>4.11</version>
       <scope>test</scope>
   </dependency>
   <dependency>
       <groupId>org.testng</groupId>
       <artifactId>testng</artifactId>
       <version>RELEASE</version>
   </dependency>
 </dependencies>
   
   <build>
       <plugins>
           <plugin>
               <groupId>org.apache.maven.plugins</groupId>
               <artifactId>maven-compiler-plugin</artifactId>
               <version>3.0</version>
               <configuration>
                   <source>1.8</source>
                   <target>1.8</target>
                   <encoding>UTF-8</encoding>
                   <!--  <verbal>true</verbal>-->
               </configuration>
           </plugin>
     
	       <plugin>
               <groupId>org.apache.maven.plugins</groupId>
               <artifactId>maven-shade-plugin</artifactId>
               <version>2.4.3</version>
               <executions>
                   <execution>
                       <phase>package</phase>
                       <goals>
                           <goal>shade</goal>
                       </goals>
                       <configuration>
                           <minimizeJar>true</minimizeJar>
                       </configuration>
                   </execution>
               </executions>
           </plugin>
       </plugins>
   </build>

package com.lg.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

/**
 * 测试CDH搭建的HA的hdfs文件系统使用java api方式访问
 */
public class HdfsDemo {
    //获取到文件系统对象
    static FileSystem fs;

    //初始化fs对象
    public static void init() throws URISyntaxException, IOException, InterruptedException {
        //获取配置文件
        final Configuration conf = new Configuration();
        //原来指定的namenode的信息，但是现在连接访问的HA集群，指定nameservice的信息
        fs = FileSystem.get(new URI("hdfs://linux121:9000"), conf, "root");
        System.out.println(fs);

    }

    //关闭fs对象
    public static void destory() throws IOException {
        fs.close();
    }

    //创建一个hdfs文件夹
    public static void testMkdir() throws IOException {
        fs.mkdirs(new Path("/cdh_test"));
    }

    public static void main(String[] args) throws InterruptedException, IOException, URISyntaxException {
        init();
        testMkdir();
        destory();
    }
}


package com.lg.util;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

//下载hdfs文件或者文件夹
public class DownloadFile {
    static int bufferSize=4096;
    //获取到文件系统对象
    static FileSystem fs;

    //初始化fs对象
    public static void init(String s) {
        //获取配置文件
        final Configuration conf = new Configuration();
        //设置返回外网ip
        conf.set("dfs.client.use.datanode.hostname", "true");
        //原来指定的namenode的信息，但是现在连接访问的HA集群，指定nameservice的信息
        try {
            fs = FileSystem.get(new URI("hdfs://linux121:9000"), conf, "root");
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (URISyntaxException e) {
            e.printStackTrace();
        }

    }

    //关闭fs对象
    public static void destory()  {
        try {
            fs.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    //下载hdfs指定文件到本地指定目录下
    /**
     *
     * @param srcPath:hdfs的文件路径
     * @param dstPath：本地的文件路径
     */
    public static void downLoadFile(String srcPath, String dstPath)  {
        init("hdfs://linux121:9000");
        FSDataInputStream in=null;
        FileOutputStream out=null;
        try {
            in = fs.open(new Path(srcPath));
            out = new FileOutputStream(dstPath);
            IOUtils.copyBytes(in, out, bufferSize, false);
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            IOUtils.closeStream(in);
            IOUtils.closeStream(out);
        }
        destory();
    }

    //下载hdfs指定文件夹的方法

    /**
     *
     * @param srcPath :hdfs的文件夹路径
     * @param dstPath :本地的文件夹路径
     */
    public static void downLoadFloder(String srcPath, String dstPath) {
        //初始化
        init("hdfs://linux121:9000");
        //首先本地文件夹判断并创建
        final File file = new File(dstPath);
        if (!file.exists()) {
            file.mkdirs();
        }
        //处理hdfs
        try {
            final FileStatus[] fileStatuses = fs.listStatus(new Path(srcPath));
            final Path[] paths = FileUtil.stat2Paths(fileStatuses);
            for (Path path : paths) {
                final String hdfsFile = path.toString();
                final int i = hdfsFile.lastIndexOf('/');
                final String fileName = hdfsFile.substring(i + 1);
                downLoadFile(hdfsFile, dstPath+'/'+fileName);
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        destory();
    }

    public static void download(String srcPath, String dstPath) throws Exception {
        init("hdfs://linux121:9000");
        if (fs.isFile(new Path(srcPath))) {
            downLoadFile(srcPath, dstPath);
        } else {
            downLoadFloder(srcPath, dstPath);
        }
        destory();
    }
    public static void main(String[] args) {
        //测试下载从hdfs下载文件到本地
        //读写hdfs最后都要交互dn，默认返回的是内网的ip地址，需要让其返回外网的ip地址
//        downLoadFile("/lagou/bigdata/hdfs.txt", "d://1.txt");
        downLoadFloder("/lagou/bigdata/", "d:/lagou1");
    }
}




package com.lg.transport_goods;

import com.lg.util.DownloadFile;
import com.lg.util.LgFileUtil;
import com.lg.util.UploadFile;

import java.io.IOException;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;

//参考01背包实现
public class TransportGoods3 {

    // 如何判断货物是否装载完毕
    public static void getMaxVolumeMulti(Integer[] volumeArr, Integer capacity, String fileName) {
        int num=0;
        //判断货物体积数组中是否有数据
        while (volumeArr.length != 0) {
            num++;
            Integer[] arr = getMaxVolume(volumeArr, capacity, num, fileName);
            volumeArr=arr;
        }
    }

    //对货物体积进行整数化处理
    public static Integer[] translate(Double[] volumeArr) {
        //准备一个整数数组
        Integer[] arr=new Integer[volumeArr.length];
        //保留小数点后三位的数据
        DecimalFormat decimalFormat = new DecimalFormat("######0.000");
        //对数据进行转换
        for (int i = 0; i < volumeArr.length; i++) {
            String text = decimalFormat.format(volumeArr[i]);
            double v = Double.parseDouble(text);
            arr[i]= (int) (v * 1000);

        }
        return arr;
    }
    /**
     *
     * @param volumeArr
     * @param capcity
     * @return
     */
    public static Integer[] getMaxVolume(Integer[] volumeArr, int capcity, int num, String fileName) {
        //定义状态数组
        int[][] dp = new int[volumeArr.length + 1][capcity + 1];
        //遍历数据
        for (int i = 1; i <= volumeArr.length; i++) {
            for (int j = 1; j <= capcity; j++) {
                //状态方程
                if (j < volumeArr[i - 1]) {
                    //剩余容积不满足选择该物品
                    dp[i][j] = dp[i - 1][j];
                } else {
                    //选或不选
                    dp[i][j] = Math.max(dp[i - 1][j - volumeArr[i - 1]] + volumeArr[i - 1], dp[i - 1][j]);
                }
            }
        }
        System.out.println();
        System.out.println("------------------------------");
        //打印最大体积值
        System.out.println("第"+num+"辆车，最大组合体积为" + dp[volumeArr.length][capcity]);
        //输出结果到目录文件
        try {
            LgFileUtil.writeString2SimpleFile("第"+num+"辆车，最大组合体积为" + dp[volumeArr.length][capcity], fileName, "utf-8");
        } catch (Exception e) {
            e.printStackTrace();
        }
        //调用详细货物组合方案信息
        Integer[] newArr = printGoodsInfo(dp, volumeArr, capcity, fileName);
        return newArr;
    }

    //打印出来货物组合信息
    public static Integer[] printGoodsInfo(int[][] dp, Integer[] volumeArr, int capacity, String fileName) {
        //从后往前遍历二维数组
        int i = volumeArr.length;
        int j = capacity;
        //dp[i][j]=
        //循环操作
        while (i > 0 && j > 0) {
            try {
                if (dp[i][j] == dp[i - 1][j - volumeArr[i - 1]] + volumeArr[i - 1]) {
                    //说明了volumeArr[i-1]这件物品被选择了
                    System.out.print("选择了体积为" + volumeArr[i - 1] + "货物；");
                    LgFileUtil.writeString2SimpleFile(volumeArr[i - 1]+"", fileName, "utf-8");
                    //j的改变
                    j = j - volumeArr[i - 1];

                    //标记被选择的物品
                    volumeArr[i-1]=0;
                }
            } catch (ArrayIndexOutOfBoundsException e) {

            } catch (Exception e) {
                e.printStackTrace();
            }
            //不管是否选择了物品，i都要移动到上一行
            i--;
        }
        //物品被选择之后，需要从原始数组中移除,所有数据为0的数据过滤掉
        return getNewArr(volumeArr);
    }

    //剔除选择的物品
    public static Integer[] getNewArr(Integer[] volumeArr) {
        //准备接收的数组
        ArrayList<Integer> list = new ArrayList<>();
        for (int i = 0; i < volumeArr.length; i++) {
            if (volumeArr[i] != 0) {
                list.add(volumeArr[i]);
            }

        }
        //转成数组
        Integer[] arr = new Integer[list.size()];
        return list.toArray(arr);
    }

    public static void main(String[] args) {
        //hdfs本地相关目录信息
        String day="20200608";
        String hdfsDir="/replenishment";
        String hdfsResDir="/res";

        String localDir="d:/replenishment";
        String localResDir="d:/res";
        Integer capacity = 40 * 1000;
        //下载hdfs的数据目录
        try {
            DownloadFile.init("hdfs://linux121:9000");
            DownloadFile.download(hdfsDir+"/"+day, localDir+"/"+day);
            DownloadFile.destory();
        } catch (Exception e) {
            e.printStackTrace();
        }
        //读取下载好的本地的数据文件，解析数据得到物品体积数组，然后调用动态规划算法
        HashMap<String, Double[]> map=null;
        try {
            map = LgFileUtil.readFiles(localDir + "/" + day);
        } catch (Exception e) {
            e.printStackTrace();
        }
        if (map==null){
            return;}
        //遍历map获取到每一个体积数组
        for (Map.Entry<String, Double[]> entry : map.entrySet()) {
            String key = entry.getKey();//仓库编号
            Double[] volumeArr = entry.getValue();//体积数组
            getMaxVolumeMulti(translate(volumeArr), capacity, localResDir+"/"+day+"/"+key);
        }
        
        //上传结果文件到hdfs
        try {
            UploadFile.init("hdfs://linux121:9000");
            UploadFile.uploadFolder(localResDir+"/"+day, hdfsResDir+"/"+day);
            UploadFile.destory();
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (Exception e) {
            e.printStackTrace();
        }

        //保存调度信息到mysql(车辆调度信息)以及hbase（装载货物信息）
        try {
            LgFileUtil.readScheduleFiles(localResDir+"/"+day);
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
}


   保存调度数据
       车辆调度信息存储Mysql
       车辆货物信息存储Hbase
   Mysql存储
   lg_bus表
   CREATE TABLE `lg_bus` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`plateNum` varchar(256) NOT NULL COMMENT '车牌号',
`travle_distance` bigint(20) NOT NULL COMMENT '行驶里程',
`status` tinyint(5) NOT NULL COMMENT '状态：0-->等待调度；1-->正在调度;',
`sim` varchar(50) NOT NULL COMMENT 'sim卡号',
`transportNum` varchar(50) NOT NULL COMMENT '道路运输证',
`oilRemain` int(11) NOT NULL COMMENT '剩余油量',
`weights` int(11) NOT NULL COMMENT '最大载重',
`volume` int(11) NOT NULL COMMENT '最大容积',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8;

##插入数据
INSERT INTO `lg_bus` VALUES ('1', '京A-11111', '1000', '0', '1111', 'ysz11111', '50', '25', '40');
INSERT INTO `lg_bus` VALUES ('2', '京A-22222', '2000', '0', '2222', 'ysz22222', '60', '25', '40');
INSERT INTO `lg_bus` VALUES ('3', '京A-33333', '3000', '0', '3333', 'ysz333333', '70', '25', '40');
INSERT INTO `lg_bus` VALUES ('4', '京A-44444', '4000', '0', '4444', 'ysz44444', '80', '25', '40');
   
   log_schedule
   CREATE TABLE `log_schedule` (
`id` varchar(256) NOT NULL COMMENT '主键',
`travel_id` varchar(256) DEFAULT NULL COMMENT '调度编号',
`bus_number` varchar(64) NOT NULL COMMENT '车牌号',
`src_location` varchar(256) DEFAULT NULL COMMENT '行程开始时间',
`dest_location` varchar(256) NOT NULL COMMENT '目的地',
`creat_time` timestamp NULL DEFAULT NULL COMMENT '调度任务创建时间',
`schedule_time` timestamp NULL DEFAULT NULL COMMENT '开始调度时间',
`finish_time` timestamp NULL DEFAULT NULL COMMENT '调度完成时间',
`status` tinyint(12) NOT NULL DEFAULT '0' COMMENT '0-等待调度，1-调度中，2-已完
成',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;

   Hbase存储
   create 'lg_trans_info','goods'
   
   代码
package com.lg.util;

import com.lg.bean.Bus;

import java.io.*;
import java.sql.Connection;
import java.sql.SQLException;
import java.text.NumberFormat;
import java.text.SimpleDateFormat;
import java.util.*;


public class LgFileUtil {

    // 缓冲区大小
    private final static int buffer_size = 1024;

    // 日志格式工具
    private final static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");

    // 小数的格式化工具,设置最大小数位为10
    private final static NumberFormat numFormatter = NumberFormat.getNumberInstance();

    static {
        numFormatter.setMaximumFractionDigits(10);
    }

    // 换行符

    private final static String lineSeparator = java.security.AccessController
            .doPrivileged(new sun.security.action.GetPropertyAction("line.separator"));


    /**
     * 以指定编码格式写入字符串到CSV文件
     *
     * @param csvStr      要输出的字符串(有CSV格式)
     * @param filePath    CSV文件的具体路径
     * @param charsetName GB2312
     * @throws Exception
     */
    public static void writeCsvStr2CsvFile(String csvStr, String filePath, String charsetName) throws Exception {
        LgFileUtil.writeString2SimpleFile(csvStr, filePath, charsetName);
    }

    /**
     * 以指定编码格式写入字符串到简单文件
     *
     * @param str         要输出的字符串
     * @param filePath    简单文件的具体路径
     * @param charsetName UTF-8 | GB2312
     * @throws Exception
     */
    public static void writeString2SimpleFile(String str, String filePath, String charsetName) throws Exception {

        BufferedWriter out = null;
        try {
            File file = new File(filePath);

            createNewFileIfNotExists(file);

            OutputStreamWriter os = new OutputStreamWriter(new FileOutputStream(file, true), charsetName);
            out = new BufferedWriter(os, LgFileUtil.buffer_size);

            out.write(str);
            out.newLine();
            out.flush();
        } finally {
            LgFileUtil.close(out);
        }

    }


    /**
     * 如果文件不存在,创建一个新文件
     */
    private static void createNewFileIfNotExists(File file) throws IOException {
        if (!file.exists()) {
            // 创建目录
            if (!file.getParentFile().exists()) {
                file.getParentFile().mkdirs();
            }

            // 创建文件
            file.createNewFile();
        }
    }

    /**
     * 关闭输出流
     */
    private static void close(Writer out) {
        if (null != out) {
            try {
                out.close();
            } catch (IOException e) {
                // e.printStackTrace();
            }
        }
    }

    //读取指定文件夹下的所有文件并把解析的体积数据存储到数组中
    public static HashMap<String, Double[]> readFiles(String strPath) throws Exception {
        File dir = new File(strPath);
        File[] files = dir.listFiles();
        if (files == null)
            return null;
        //准备一个存放了所有文件内容的集合
        final HashMap<String, Double[]> map = new HashMap<>();
        for (int i = 0; i < files.length; i++) {
            if (files[i].isFile()) {
                String strFileName = files[i].getAbsolutePath().toLowerCase();
                final int i1 = strFileName.lastIndexOf('\\');
                String fileName = strFileName.substring(i1 + 1);
                System.out.println("--文件名--" + fileName);
                //存放体积数据的集合

                ArrayList<Double> list = new ArrayList<Double>();
                final BufferedReader br = new BufferedReader(new FileReader(new File(strFileName)));
                String line;
                int r = 0;
                while ((line = br.readLine()) != null) {
                    if (r == 0) {
                        r++;
                        continue;
                    }

                    //解析文件，按照制表符切分
                    final String[] arr = line.split("\t");
                    list.add(Double.parseDouble(arr[1]));

                }
                br.close();
                Double[] arr = new Double[list.size()];
                list.toArray(arr);
                map.put(fileName, arr);

            }
        }
        return map;
    }


    //遍历调度文件夹结果数据保存到mysql以及hbase
    public static void readScheduleFiles(String strPath) throws Exception {
        File dir = new File(strPath);
        File[] files = dir.listFiles();
        if (files == null)
            return;
        for (int i = 0; i < files.length; i++) {
            if (files[i].isFile()) {
                final String fileName = files[i].getName();
                String entrepotNum = fileName.substring(0, fileName.lastIndexOf("."));
                System.out.println("仓库编号：" + entrepotNum);
                final BufferedReader br = new BufferedReader(new FileReader(files[i]));
                String line;
                int num = 0;
                StringBuffer sb = new StringBuffer();
                while ((line = br.readLine()) != null) {
                    final String[] arr = line.split(":");
                    if (arr.length < 2) {
                        if (num != 0) {
                            //说明是说明语句，不是货物信息，但是表明了需要一辆车来运送货物,上一辆车的货物信息
                            saveToMysqlAndHbase(entrepotNum, sb.toString());
                            sb = new StringBuffer();
                        }
                        num++; //num代表的就是第几辆车
                    } else {
                        //说明是货物信息，拼接一辆车的货物信息称为一个字符串
                        sb.append(line).append("&");
                    }
                }
                //读完整个文件，把最后一辆车信息输出
                saveToMysqlAndHbase(entrepotNum, sb.toString());
                br.close();
            }
        }

    }

    /**
     * @param entrepotNum:仓库编号
     * @param str:一辆车的货物信息
     */
    public static void saveToMysqlAndHbase(String entrepotNum, String str) throws SQLException, IOException, ClassNotFoundException {
        //1 获取一辆车
        final Connection connection = DBUtil.openConnection();
        String sql = "select * from lg_bus where status = 0 order by travle_distance,id limit 1;";
        Bus bus = null;
        try {
            bus = DBUtil.queryBean(connection, sql, Bus.class);
        } catch (Exception e) {
            System.out.println("车辆不足。。。");
            throw new RuntimeException();
        }
        //2 对它进行调度，保存到mysql指定表中
        final String id = UUID.randomUUID().toString();
        String[] deployArr = {"316d5c75-e860-4cc9-a7de-ea2148c244a0",
                "32102c12-6a73-4e03-80ab-96175a8ee686",
                "a97f6c0d-9086-4c68-9d24-8a7e89f39e5a",
                "adfgfdewr-5463243546-4c68-9d24-8a7e8"};
        final Random rd = new Random();
        String travle_id = deployArr[rd.nextInt(deployArr.length)];
        String sqlStr1 = "insert into log_schedule (id,travel_id,bus_number,src_location,dest_location,creat_time,status)" +
                "values(?,?,?,?,?,?,?);";
        DBUtil.execute(connection, sqlStr1,
                id,
                travle_id,
                bus.getPlateNum(),
                "一级仓库",
                entrepotNum+"号二级仓库",
                new Date(),
                1);
        //更新lg_bus中的状态
        String sqlStr2 = "update lg_bus set status =1 where plateNum =?";
        DBUtil.execute(connection, sqlStr2, bus.getPlateNum());

        //把货物信息存储到hbase中
        String rowkey = travle_id + bus.getPlateNum();
        DBUtil.closeConnection(connection);
        HbaseUtil.putRow("lg_trans_info", rowkey, "info", "goods", str);
    }


    public static void main(String[] args) throws Exception {
        readScheduleFiles("E:\\res\\20200608");
    }
}
   