/etc/hosts
192.168.1.111 spark0
127.0.0.1 localhost
192.168.1.112 spark1
/etc/profile
JAVA_HOME=/usr/lib/jvm/java
HADOOP_HOME=/usr/local/hadoop
PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
export JAVA_HOME
export HADOOP_HOME
export PATH
hdfs-site.xml
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>192.168.1.112:50090</value>
</property>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.hosts</name>
<value>192.168.1.111,192.168.1.112</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/hadoop/hdfs/name</value>
<final>true</final>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/local/hadoop/hdfs/data</value>
<final>true</final>
</property>
core-site.xml
<property>
<name>fs.defaultFS</name>
<value>hdfs://192.168.1.111:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/var/hadoop</value>
</property>