侧边栏壁纸
博主头像
落叶人生博主等级

走进秋风,寻找秋天的落叶

  • 累计撰写 130562 篇文章
  • 累计创建 28 个标签
  • 累计收到 9 条评论
标签搜索

目 录CONTENT

文章目录

002_如何安装JDK以及Hadoop集群环境搭建_2台机都要配置一致

2023-11-15 星期三 / 0 评论 / 0 点赞 / 64 阅读 / 7409 字

1.linux的jdk1.6安装和环境变量配置 #安装jdk1.6 cd /usr/local/./jdk-6u45-linux-x64.bin #配置环境变量并激活 [root@master loc

1.linux的jdk1.6安装和环境变量配置

#安装jdk1.6

cd /usr/local/./jdk-6u45-linux-x64.bin

#配置环境变量并激活

[root@master local]# vim /etc/profile.d/jdk.sh
export JAVA_HOME=/usr/local/jdk6export PATH=$JAVA_HOME/bin:$PATHexport CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
[root@master local]# source /etc/profile

2.解压hadoop1.2_创建tmp目录

tar -zxvf hadoop-1.2.1-bin.tar.gz
[root@master local]# cd hadoop-1.2.1/[root@master hadoop-1.2.1]# mkdir tmp

3.hadoop的配置文件修改(6个文件)

[root@master conf]# cd /usr/local/hadoop-1.2.1/conf/

    masters

[root@master conf]# cat mastersmaster

    slaves

[root@master conf]# cat slavesslave1

    core-site.xml

[root@master conf]# cat core-site.xml<?xml version="1.0"?><?xml-stylesheet type="text/xsl" href="configuration.xsl"?><!-- Put site-specific property overrides in this file. --><configuration>        <property>                <name>hadoop.tmp.dir</name>                <value>/usr/local/hadoop-1.2.1/tmp</value>        </property>        <property>                <name>fs.default.name</name>                <value>hdfs://192.168.1.60:9000</value>        </property></configuration>

    mapred-site.xml

[root@master conf]# cat mapred-site.xml<?xml version="1.0"?><?xml-stylesheet type="text/xsl" href="configuration.xsl"?><!-- Put site-specific property overrides in this file. --><configuration>        <property>                <name>mapred.job.tracker</name>                <value>http://192.168.1.60:9001</value>        </property></configuration>

    hdfs-site.xml

[root@master conf]# cat hdfs-site.xml<?xml version="1.0"?><?xml-stylesheet type="text/xsl" href="configuration.xsl"?><!-- Put site-specific property overrides in this file. --><configuration>        <property>                <name>dfs.replication</name>                <value>3</value>        </property></configuration>

    hadoop-env.sh

#sh文件最下方添加JAVA_HOME的值

export JAVA_HOME=/usr/local/jdk6

# 通过SCP传文件给slave1

scp -rp masters slave1:/usr/local/hadoop-1.2.1/conf/scp -rp slaves slave1:/usr/local/hadoop-1.2.1/conf/scp -rp core-site.xml slave1:/usr/local/hadoop-1.2.1/conf/scp -rp mapred-site.xml slave1:/usr/local/hadoop-1.2.1/conf/scp -rp hdfs-site.xml slave1:/usr/local/hadoop-1.2.1/conf/scp -rp hadoop-env.sh slave1:/usr/local/hadoop-1.2.1/conf/

4.启动hadoop

    (1)第一次启动需要格式化namenode

[root@master bin]# ./hadoop namenode -format17/04/11 01:41:04 INFO namenode.NameNode: STARTUP_MSG: /************************************************************STARTUP_MSG: Starting NameNodeSTARTUP_MSG:   host = master/192.168.1.34STARTUP_MSG:   args = [-format]STARTUP_MSG:   version = 1.2.1STARTUP_MSG:   build = https://svn.apache.org/repos/asf/hadoop/common/branches/branch-1.2 -r 1503152; compiled by 'mattf' on Mon Jul 22 15:23:09 PDT 2013STARTUP_MSG:   java = 1.6.0_45************************************************************/17/04/11 01:41:04 INFO util.GSet: Computing capacity for map BlocksMap17/04/11 01:41:04 INFO util.GSet: VM type       = 64-bit17/04/11 01:41:04 INFO util.GSet: 2.0% max memory = 101364531217/04/11 01:41:04 INFO util.GSet: capacity      = 2^21 = 2097152 entries17/04/11 01:41:04 INFO util.GSet: recommended=2097152, actual=209715217/04/11 01:41:04 INFO namenode.FSNamesystem: fsOwner=root17/04/11 01:41:04 INFO namenode.FSNamesystem: supergroup=supergroup17/04/11 01:41:04 INFO namenode.FSNamesystem: isPermissionEnabled=true17/04/11 01:41:04 INFO namenode.FSNamesystem: dfs.block.invalidate.limit=10017/04/11 01:41:04 INFO namenode.FSNamesystem: isAccessTokenEnabled=false accessKeyUpdateInterval=0 min(s), accessTokenLifetime=0 min(s)17/04/11 01:41:04 INFO namenode.FSEditLog: dfs.namenode.edits.toleration.length = 017/04/11 01:41:04 INFO namenode.NameNode: Caching file names occuring more than 10 times 17/04/11 01:41:04 INFO common.Storage: Image file /usr/local/hadoop-1.2.1/tmp/dfs/name/current/fsimage of size 110 bytes saved in 0 seconds.17/04/11 01:41:04 INFO namenode.FSEditLog: closing edit log: position=4, editlog=/usr/local/hadoop-1.2.1/tmp/dfs/name/current/edits17/04/11 01:41:04 INFO namenode.FSEditLog: close success: truncate to 4, editlog=/usr/local/hadoop-1.2.1/tmp/dfs/name/current/edits17/04/11 01:41:04 INFO common.Storage: Storage directory /usr/local/hadoop-1.2.1/tmp/dfs/name has been successfully formatted.17/04/11 01:41:04 INFO namenode.NameNode: SHUTDOWN_MSG: /************************************************************SHUTDOWN_MSG: Shutting down NameNode at master/192.168.1.34************************************************************/

    (2)启动hadoop

[root@master bin]# ./start-all.sh starting namenode, logging to /usr/local/hadoop-1.2.1/libexec/../logs/hadoop-root-namenode-master.outslave1: starting datanode, logging to /usr/local/hadoop-1.2.1/libexec/../logs/hadoop-root-datanode-slave1.outThe authenticity of host 'master (192.168.1.34)' can't be established.RSA key fingerprint is e3:c2:f6:71:e2:e7:97:00:f1:b7:c2:86:42:7f:5d:2c.Are you sure you want to continue connecting (yes/no)? yesmaster: Warning: Permanently added 'master,192.168.1.34' (RSA) to the list of known hosts.master: starting secondarynamenode, logging to /usr/local/hadoop-1.2.1/libexec/../logs/hadoop-root-secondarynamenode-master.outstarting jobtracker, logging to /usr/local/hadoop-1.2.1/libexec/../logs/hadoop-root-jobtracker-master.outslave1: starting tasktracker, logging to /usr/local/hadoop-1.2.1/libexec/../logs/hadoop-root-tasktracker-slave1.out

5.检查hadoop集群环境是否成功启动

    (1)master使用jps查看

[root@master bin]# jps2461 JobTracker2227 NameNode2383 SecondaryNameNode2603 Jps

   (2)slave1使用jps查看

[root@slave1 local]# jps2068 Jps1982 TaskTracker1908 DataNode

    (3)master使用hdfs命令

[root@master bin]# ./hadoop fs -ls /Found 1 itemsdrwxr-xr-x   - root supergroup          0 2017-04-11 01:43 /usr[root@master bin]# ./hadoop fs -put /etc/passwd /[root@master bin]# ./hadoop fs -ls /Found 2 items-rw-r--r--   3 root supergroup       1380 2017-04-11 01:48 /passwddrwxr-xr-x   - root supergroup          0 2017-04-11 01:43 /usr

 

#Game Over

广告 广告

评论区