linux安装hadoop及配置
bash
上传压缩文件
解压
tar -zxvf hadoop-3.3.5.tar.gz -C /export/server/
创建连接
ln -s /export/server/hadoop-3.3.5 /export/server/hadoop
bash
cd /export/server/hadoop/etc/hadoop
配置workers文件
Vim workers
master
node1
node2
配置hadoop-env.sh
export JAVA_HOME=/export/server/jdk
export HADOOP_HOME=/export/server/hadoop
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export HADOOP_LOG_DIR=$HADOOP_HOME/logs
配置core-site.xml
xml
配置core-site.xml
<property>
<name>fs.defaultFS</name>
<value>hdfs://master:8020</value>
</property>
<property>
<name>io.file.buffer.size</name>
<value>131072</value>
</property>
xml
# hdfs-site
<configuration>
<property>
<name>dfs.datanode.data.dir.perm</name>
<value>700</value>
</property>
<property>
<name>dfs.namenode.data.dir</name>
<value>/data/nn</value>
</property>
<property>
<name>dfs.namenode.hosts</name>
<value>master,node1,node2</value>
</property>
<property>
<name>dfs.blocksize</name>
<value>268435456</value>
</property>
<property>
<name>dfs.namenode.handler.count</name>
<value>100</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>/data/dn</value>
</property>
</configuration>
bash
master
mkdir -p /data/nn
mkdir /data/dn
node1 node2
mkdir -p /data/dn
bash
vim /etc/profile
export HADOOP_HOME=/export/server/hadoop
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
source /etc/profile
chown -R hadoop:hadoop /data
chown -R hadoop:hadoop /export
bash
初始化
hadoop namenode -format
bash
一键启动hdfs集群
start-dfs.sh
一键关闭hdfs集群
stop-dfs.sh
web页面查看
master:9870