依赖安装
sudo apt install ssh
sudo apt install pdsh
免密登录
# 创建ssh密匙,执行如下命令后回车到底
ssh-keygen -t rsa
# 将产生的公共密匙追加到authorized_keys
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
# 尝试免密码登录(初次登录会询问然后需要输入yes,二次登录可以直接登录)配置成功
ssh localhost
# 创建jdk文件夹
sudo mkdir -p /opt/java
# 进入Downloads文件夹
cd ~/Downloads
# 通过wget下载
wget --no-check-certificate --no-cookies --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u172-b11/a58eab1ec242421181065cdc37240b08/jdk-8u172-linux-x64.tar.gz
# 解压
tar -zxf jdk-8u172-linux-x64.tar.gz
# 将jdk文件夹移动到/opt/java/
sudo mv jdk1.8.0_172/ /opt/java/jdk1.8.0_172/
1.通过vim创建jdk-1.8.sh文件
sudo vim /etc/profile.d/jdk-1.8.sh
2.添加如下内容
#!/bin/sh
# Author:wangxiaolei 王小雷
# Blog: http://blog.csdn.net/dream_an
# Github: https://github.com/lycheeman
# project: https://github.com/lycheeman/big-data
# Date: 2018.07
export JAVA_HOME=/opt/java/jdk1.8.0_172
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH
3.让Java变量生效
source /etc/profile
4.查看已配置完成的Java
java -version
export JAVA_HOME=/opt/java/jdk1.8.0_172
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
<property>
<name>hadoop.proxyuser.busuanzi.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.busuanzi.groups</name>
<value>*</value>
</property>
</configuration>
<configuration>
<property>
<name>dfs.namenode.name.dir</name>
<value>/var/lib/hadoop/hdfs/name/</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>/var/lib/hadoop/hdfs/data/</value>
</property>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
sudo vim /etc/profile.d/hadoop-3.1.0.sh
增加如下内容
#!/bin/sh
# Author:wangxiaolei 王小雷
# Github: https://github.com/lycheeman
export HADOOP_HOME="/opt/hadoop/hadoop-3.1.0"
export PATH="$HADOOP_HOME/bin:$PATH"
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export YARN_CONF_DIR=$HADOOP_HOME/etc/hadoop
使变量生效
source /etc/profile
sudo mkdir /var/lib/hadoop
sudo chown busuanzi:busuanzi /var/lib/hadoop/
# 新建rcmd_default文件
# 设置rcmd_default入口,避免出现 master: rcmd: socket: Permission denied
sudo sh -c "echo "ssh" > /etc/pdsh/rcmd_default"
2. 格式化(注意格式化只需要执行一次即可)
cd /opt/hadoop/hadoop-3.1.0/
bin/hdfs namenode -format
sbin/start-dfs.sh
jps
注意,自Hadoop3.0之后,端口5007变成端口9870官方解释
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
<property>
<name>mapreduce.application.classpath</name>
<value>$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*</value>
</property>
</configuration>
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.env-whitelist</name>
<value>JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_MAPRED_HOME</value>
</property>
</configuration>
sbin/start-yarn.sh
jps
至此,伪分布式部署完成。
bin/mapred --daemon start historyserver
访问 http://192.168.56.101:19888/
sbin/stop-yarn.sh
sbin/stop-dfs.sh
bin/mapred --daemon stop historyserver
开源、源码获取 https://github.com/lycheeman/big-data
内容来源于网络,如有侵权,请联系作者删除!