安装jdk
rpm -ivh /opt/downloads/jdk-8u201-linux-x64.rpm
vim /etc/profile
export JAVA_HOME=/usr/java/jdk1.8.0_201-amd64
export JAVA_BIN=/usr/java/jdk1.8.0_201-amd64/bin
export PATH=$PATH:$JAVA_HOME/bin
export CLASSPATH=:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
export JAVA_HOME JAVA_BIN PATH CLASSPATH
source /etc/profile
下载hadoop
wget -P /opt/downloads http://mirror.bit.edu.cn/apache/hadoop/common/hadoop-3.2.0/hadoop-3.2.0.tar.gz
tar zxvf /opt/downloads/hadoop-3.2.0.tar.gz -C /opt
mv /opt/hadoop-3.2.0/ /opt/hadoop
cp /opt/hadoop/etc/hadoop/*.xml input
/opt/hadoop/bin/hadoop jar /opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.2.0.jar grep input output ‘dfs[a-z.]+‘
创建文件夹
mkdir /opt/hadoop/tmp
mkdir /opt/hadoop/var
mkdir /opt/hadoop/dfs
mkdir /opt/hadoop/dfs/name
mkdir /opt/hadoop/dfs/data
mkdir /opt/hadoop/fs/checkpoint -p
mkdir /opt/hadoop/fs/checkpoint/edits
查看java_home路径
echo $JAVA_HOME
修改配置
vim /opt/hadoop/etc/hadoop/core-site.xml
<configuration> <property> <name>hadoop.tmp.dir</name> <value>/opt/hadoop/tmp</value> <description>A base for other temporary directories.</description> </property> <property> <name>io.file.buffer.size</name> <value>262144</value> <description>Size of read/write buffer used in SequenceFiles.256k</description> </property> <property> <name>io.native.lib.available</name> <value>true</value> </property> <property> <name>fs.defaultFS</name> <value>hdfs://0.0.0.0:9000</value> </property> </configuration>
vim /opt/hadoop/etc/hadoop/hdfs-site.xml
<configuration> <property> <name>dfs.replication</name> <value>1</value> </property> <property> <name>dfs.name.dir</name> <value>/opt/hadoop/dfs/name</value> <description>Path on the local filesystem where theNameNode stores the namespace and transactions logs persistently.</description> </property> <property> <name>dfs.data.dir</name> <value>/opt/hadoop/dfs/data</value> <description>Comma separated list of paths on the localfilesystem of a DataNode where it should store its blocks.</description> </property> <property> <name>dfs.permissions</name> <value>false</value> <description>need not permissions</description> </property> <property> <name>dfs.block.size</name> <value>134217728</value> <description>HDFS blocksize of 128MB for large file-systems.used for on-line</description> </property> <property> <name>fs.checkpoint.dir</name> <value>/opt/hadoop/checkpoint</value> </property> <property> <name>fs.checkpoint.edits.dir</name> <value>/opt/hadoop/checkpoint</value> </property> <property> <name>dfs.namenode.http-address</name> <value>0.0.0.0:9870</value> </property> </configuration>
vim /opt/hadoop/etc/hadoop/mapred-site.xml
<configuration> <property> <name>mapred.job.tracker</name> <value>0.0.0.0:9001</value> </property> <property> <name>mapred.local.dir</name> <value>/opt/hadoop/var</value> </property> <property> <name>mapreduce.framework.name</name> <value>yarn</value> </property> </configuration>
vim /opt/hadoop/etc/hadoop/yarn-site.xml
<configuration> <property> <name>yarn.nodemanager.aux-services</name> <value>mapreduce_shuffle</value> </property> <property> <name>yarn.nodemanager.auxservices.mapreduce.shuffle.class</name> <value>org.apache.hadoop.mapred.ShuffleHandler</value> </property> <property> <name>yarn.resourcemanager.resource-tracker.address</name> <value>0.0.0.0:8031</value> </property> <property> <name>yarn.resourcemanager.address</name> <value>0.0.0.0:8032</value> </property> <property> <name>yarn.resourcemanager.scheduler.address</name> <value>0.0.0.0:8030</value> </property> <property> <name>yarn.resourcemanager.admin.address</name> <value>0.0.0.0:8033</value> </property> <property> <name>yarn.resourcemanager.webapp.address</name> <value>0.0.0.0:18088</value> </property> </configuration>
我机器上influxdb占用了8088端口,这里换成18088
vim /opt/hadoop/sbin/start-dfs.sh
头部加上
HDFS_DATANODE_USER=root
HADOOP_SECURE_DN_USER=root
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
vim /opt/hadoop/sbin/stop-dfs.sh
头部加上
HDFS_DATANODE_USER=root
HADOOP_SECURE_DN_USER=root
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
vim /opt/hadoop/sbin/start-yarn.sh
头部加上
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=root
YARN_NODEMANAGER_USER=root
vim /opt/hadoop/sbin/stop-yarn.sh
头部加上
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=root
YARN_NODEMANAGER_USER=root
vim /opt/hadoop/etc/hadoop/hadoop-env.sh
修改jdk路径,不能有空格
export JAVA_HOME=/usr/java/jdk1.8.0_201-amd64
检查ssh免密登录
ssh localhost
配置ssh免密登录
[root@localhost ~]# ssh-keygen -t rsa -P ‘‘ -f ~/.ssh/id_rsa
[root@localhost ~]# cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
[root@localhost ~]# chmod 0600 ~/.ssh/authorized_keys
再次检查
ssh localhost
//查看版本
/opt/hadoop/bin/hadoop version
/opt/hadoop/bin/hdfs namenode -format
/opt/hadoop/sbin/start-dfs.sh
/opt/hadoop/sbin/stop-dfs.sh
/opt/hadoop/sbin/start-all.sh
查看服务 jps
验证
/opt/hadoop/bin/hadoop fs -ls /
curl -X Get http://localhost:9870
curl -X Get http://localhost:18088
原文:https://www.cnblogs.com/wintersoft/p/11056759.html