# -*-coding:utf-8-*-
# Created by ccp on 2018/5/3.

from __future__ import unicode_literals
from hdfs3 import HDFileSystem

test_host = '192.168.88.130'
test_port = '8020'


def hdfs_exists(hdfsClient):
    path = "/tmp/test"
    if hdfsClient.exists(path):
        hdfsClient.rm(path)
    hdfsClient.mkdir(path)


def hdfs_write_read(hdfsClient):
    data = b"hello hadoop" * 20
    file_a = '/tmp/test/file_a'
    with hdfsClient.open(file_a, 'wb', replication=1) as f:
        f.write(data)

    with hdfsClient.open(file_a, 'rb') as f:
        out = f.read(len(data))

        assert out == data


def hdfs_read_lines(hdfsClient):
    file_b = "/tmp/test/file_b"
    with hdfsClient.open(file_b, 'wb', replication=1) as f:
        f.write(b"hello\nhadoop")

    with hdfsClient.open(file_b, 'rb') as f:
        lines = f.readline()
        assert len(lines) == 2


if __name__ == '__main__':
    hdfs_client = HDFileSystem(host=test_host, port=test_port)
    hdfs_exists(hdfs_client)
    hdfs_write_read(hdfs_client)
    hdfs_read_lines(hdfs_client)
    hdfs_client.disconnect()
    print("*" * 20)
    print("hello Hadoop")
