1.配置jdk环境变量 2.配置Hadoop环境变量 3.文件配置 1.配置完全分布式(/etc/hadoop/) core-site.xml
代码语言:javascript复制<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://master/</value>
</property>
</configuration>
hdfs-site.xml
代码语言:javascript复制<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>
</configuration>
mapred-site.xml 注意:cp mapred-site.xml.template mapred-site.xml
代码语言:javascript复制<?xml version="1.0"?>
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
yarn-site.xml
代码语言:javascript复制<?xml version="1.0"?>
<configuration>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>master</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
slaves
代码语言:javascript复制s2
s3
s4
hadoop-env.sh
代码语言:javascript复制export JAVA_HOME=/root/hd/jdk1.8
2.分发配置
代码语言:javascript复制$>cd /soft/hadoop/etc/
$>scp -r hadoop centos@s2:/soft/hadoop/etc/
$>scp -r hadoop centos@s3:/soft/hadoop/etc/
$>scp -r hadoop centos@s4:/soft/hadoop/etc/
3.格式化文件系统
代码语言:javascript复制$>hadoop namenode -format
4.启动hadoop进程
代码语言:javascript复制$>start-all.sh