第1关配置开发环境 - JavaJDK的配置解题思路 mkdir /app cd /opt tar -zxvf jdk-8u171-linux-x64.tar.gz mv jdk1.8.0_171/ /app vim /etc/profile JAVA_HOME/app/jdk1.8.0_171 CLASSPATH.:$JAVA_HOME/lib/tools.jar PATH$JAVA_HOME/bin:$PATH export JAVA_HOME CLASSPATH PATH 保存方法处于编辑模式下先按 ESC 键然后按 shift: 之后,最后输入 wq 后回车后就可以保存修改的配置文件。 #使配置生效 source /etc/profile第2关配置开发环境 - Hadoop安装与伪分布式集群搭建解题思路 cd /opt tar -zxvf hadoop-3.1.0.tar.gz -C /app cd /app mv hadoop-3.1.0/ hadoop3.1 #设置SSH免密登录 ssh-keygen -t rsa -P cat ~/.ssh/id_rsa.pub ~/.ssh/authorized_keys chmod 600 ~/.ssh/authorized_keys vim /etc/ssh/sshd_config #修改ssh配置 RSAAuthentication yes # 启用 RSA 认证 PubkeyAuthentication yes # 启用公钥私钥配对认证方式 AuthorizedKeysFile %h/.ssh/authorized_keys # 公钥文件路径 source /etc/profile #修改Hadoop的配置文件 cd /app/hadoop3.1/etc/hadoop/ #hadoop-env.sh 配置 vi hadoop-env.sh # The java implementation to use. #export JAVA_HOME${JAVA_HOME} export JAVA_HOME/app/jdk1.8.0_171 #yarn-env.sh 配置 vi yarn-env.sh export JAVA_HOME/app/jdk1.8.0_171 #core-site.xml配置 vi core-site.xml configuration property namefs.default.name/name valuehdfs://localhost:9000/value descriptionHDFS的URI文件系统://namenode标识:端口号/description /property property namehadoop.tmp.dir/name value/usr/hadoop/tmp/value descriptionnamenode上本地的hadoop临时文件夹/description /property /configuration #hdfs-site.xml文件配置 vi hdfs-site.xml configuration property namedfs.name.dir/name value/usr/hadoop/hdfs/name/value descriptionnamenode上存储hdfs名字空间元数据 /description /property property namedfs.data.dir/name value/usr/hadoop/hdfs/data/value descriptiondatanode上数据块的物理存储位置/description /property property namedfs.replication/name value1/value /property /configuration #mapred-site.xml文件配置 vi mapred-site.xml configuration property namemapreduce.framework.name/name valueyarn/value /property /configuration #yarn-site.xml配置 vi yarn-site.xml configuration property nameyarn.nodemanager.aux-services/name valuemapreduce_shuffle/value /property property nameyarn.resourcemanager.webapp.address/name value192.168.2.10:8099/value description这个地址是mr管理界面的/description /property /configuration #创建文件夹 mkdir -p /usr/hadoop/tmp mkdir /usr/hadoop/hdfs mkdir /usr/hadoop/hdfs/data mkdir /usr/hadoop/hdfs/name #将Hadoop添加到环境变量中 vim /etc/profile #set Hadoop Enviroment export HADOOP_HOME/app/hadoop3.1 export PATH$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin source /etc/profile #格式化 hadoop namenode -format cd /app/hadoop3.1/sbin vi start-dfs.sh #!/usr/bin/env bash HDFS_DATANODE_USERroot HADOOP_SECURE_DN_USERhdfs HDFS_NAMENODE_USERroot HDFS_SECONDARYNAMENODE_USERroot vi stop-dfs.sh #!/usr/bin/env bash HDFS_DATANODE_USERroot HADOOP_SECURE_DN_USERhdfs HDFS_NAMENODE_USERroot HDFS_SECONDARYNAMENODE_USERroot vi start-yarn.sh #!/usr/bin/env bash YARN_RESOURCEMANAGER_USERroot HADOOP_SECURE_DN_USERyarn YARN_NODEMANAGER_USERroot vi stop-yarn.sh #!/usr/bin/env bash YARN_RESOURCEMANAGER_USERroot HADOOP_SECURE_DN_USERyarn YARN_NODEMANAGER_USERroot #启动Hadoop start-dfs.sh