<?xml version="1.0" encoding="UTF-8"?><?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
	<property>
		<name>dfs.namenode.name.dir</name>
		<value>{{ hadoop_datadir }}/nn</value>
	</property>
	<property>
		<name>dfs.datanode.data.dir</name>
		<value>{{ hadoop_datadir }}/dn</value>
	</property>
    <property>
        <name>dfs.journalnode.edits.dir</name>
        <value>{{ hadoop_datadir }}/jn</value>
    </property>
    <property>
        <name>dfs.nameservices</name>
        <value>ns1</value>
    </property>
    <property>
        <name>dfs.ha.namenodes.ns1</name>
        <value>{{ hostvars[groups['hadoop'][0]]['hostname'] }},{{ hostvars[groups['hadoop'][1]]['hostname'] }}</value>
    </property>

    <property>
        <name>dfs.namenode.rpc-address.ns1.{{ hostvars[groups['hadoop'][0]]['hostname'] }}</name>
        <value>{{ hostvars[groups['hadoop'][0]]['ansible_host'] }}:{{ hostvars[groups['hadoop'][0]]['rpc_port'] }}</value>
    </property>
    
    <property>
        <name>dfs.namenode.http-address.ns1.{{ hostvars[groups['hadoop'][0]]['hostname'] }}</name>
        <value>{{ hostvars[groups['hadoop'][0]]['ansible_host'] }}:{{ hostvars[groups['hadoop'][0]]['http_port'] }}</value>
    </property>
    
    <property>
        <name>dfs.namenode.rpc-address.ns1.{{ hostvars[groups['hadoop'][1]]['hostname'] }}</name>
        <value>{{ hostvars[groups['hadoop'][1]]['ansible_host'] }}:{{ hostvars[groups['hadoop'][1]]['rpc_port'] }}</value>
    </property>
    
    <property>
        <name>dfs.namenode.http-address.ns1.{{ hostvars[groups['hadoop'][1]]['hostname'] }}</name>
        <value>{{ hostvars[groups['hadoop'][1]]['ansible_host'] }}:{{ hostvars[groups['hadoop'][1]]['http_port'] }}</value>
    </property>

    <property>
        <name>dfs.ha.automatic-failover.enabled.ns1</name>
        <value>true</value>
    </property>
    <property>
        <name>dfs.client.failover.proxy.provider.ns1</name>
        <value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
    </property>
    
	<property>
        <name>dfs.permissions.enabled</name>
        <value>false</value>
    </property>

	<property>
        <name>dfs.replication</name>
        <value>2</value>
    </property>

	<property>
		<name>dfs.blocksize</name>
		<value>128M</value>
        <!-- <value>67108864</value> -->
		<!-- <value>128M</value> -->
		<description>HDFS blocksize of 128MB for large file-systems</description>
	</property>

	<property>
		<name>dfs.namenode.handler.count</name>
		<value>1000</value>
		<description>More NameNode server threads to handle RPCs from large number of DataNodes.</description>
	</property>

    <property>
        <!-- JournalNode集群 -->
        <name>dfs.namenode.shared.edits.dir</name>
        <value>qjournal://{% set jn_hosts = [] %}{% for host in groups['hadoop'] %}{% if hostvars[host].jn is defined and hostvars[host].jn | bool %}{% set _ = jn_hosts.append(hostvars[host].ansible_host + ':8485') %}{% endif %}{% endfor %}{{ jn_hosts | join(';') }}/ns1</value>
    </property>

    <property>
        <name>dfs.ha.fencing.methods</name>
        <value>sshfence</value>
    </property>

    <property>
        <name>dfs.ha.fencing.ssh.private-key-files</name>
        <value>/home/{{ os_general_user }}/.ssh/id_rsa</value>
    </property>
</configuration>

