1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
#core-site.xml <configuration> <property> <name>fs.defaultFS</name> <value>hdfs://namenode_public_hostname:9999</value> </property> </configuration> #hdfs-site.xml <configuration> <property> <name>dfs.replication</name> <value>1</value> </property> <property> <name>dfs.namenode.name.dir</name> <value>file:///usr/local/hadoop/data/hdfs/namenode</value> </property> <property> <name>dfs.datanode.data.dir</name> <value>file:///usr/local/hadoop/data/hdfs/datanode</value> </property> </configuration> #hadoop-env.sh set JAVA_HOME #mapred-site.xml <configuration> <property> <name>mapreduce.framework.name</name> <value>yarn</value> </property> </configuration> #yarn-site.xml <configuration> <!-- Site specific YARN configuration properties --> <property> <name>yarn.nodemanager.aux-services</name> <value>mapreduce_shuffle</value> </property> </configuration> #format namenode hdfs namenode -format #start dfs and create dfs directory start-dfs.sh hadoop fs --mkdir -p /usr/<username> (don't know why,default path?) #start yarn start-yarn.sh |