Linux/Python学习论坛-京峰教育

 找回密码
 立即注册

一键登录:

搜索
热搜: 活动 交友 discuz
查看: 830|回复: 0

ScientificLinux6.4安装Hadoop2.3.0集群

[复制链接]

238

主题

288

帖子

1925

积分

超级版主

Rank: 8Rank: 8

积分
1925
QQ
发表于 2015-3-18 11:34:26 | 显示全部楼层 |阅读模式
准备了3台机器, 机器名和网卡配置如下
SL64Hadoop1
[root@SL64Hadoop1 ~]# ifconfig
eth1      Link encap:Ethernet  HWaddr 00:0C:29:98:3C:7F  
          inet addr:10.0.0.2  Bcast:10.0.0.255  Mask:255.255.255.0
          inet6 addr: fe80::20c:29ff:fe98:3c7f/64 Scope:Link
          UP BROADCAST RUNNING MULTICAST  MTU:1500  Metric:1
          RX packets:23678 errors:0 dropped:0 overruns:0 frame:0
          TX packets:15392 errors:0 dropped:0 overruns:0 carrier:0
          collisions:0 txqueuelen:1000
          RX bytes:24546012 (23.4 MiB)  TX bytes:2393336 (2.2 MiB)


lo        Link encap:Local Loopback  
          inet addr:127.0.0.1  Mask:255.0.0.0
          inet6 addr: ::1/128 Scope:Host
          UP LOOPBACK RUNNING  MTU:16436  Metric:1
          RX packets:0 errors:0 dropped:0 overruns:0 frame:0
          TX packets:0 errors:0 dropped:0 overruns:0 carrier:0
          collisions:0 txqueuelen:0
          RX bytes:0 (0.0 b)  TX bytes:0 (0.0 b)


[root@SL64Hadoop1 ~]# ▊




SL64Hadoop2
[root@SL64Hadoop2 ~]# ifconfig
eth1      Link encap:Ethernet  HWaddr 00:0C:29:A8:D8:44  
          inet addr:10.0.0.3  Bcast:10.0.0.255  Mask:255.255.255.0
          inet6 addr: fe80::20c:29ff:fea8:d844/64 Scope:Link
          UP BROADCAST RUNNING MULTICAST  MTU:1500  Metric:1
          RX packets:198 errors:0 dropped:0 overruns:0 frame:0
          TX packets:50 errors:0 dropped:0 overruns:0 carrier:0
          collisions:0 txqueuelen:1000
          RX bytes:20155 (19.6 KiB)  TX bytes:6566 (6.4 KiB)


lo        Link encap:Local Loopback  
          inet addr:127.0.0.1  Mask:255.0.0.0
          inet6 addr: ::1/128 Scope:Host
          UP LOOPBACK RUNNING  MTU:16436  Metric:1
          RX packets:0 errors:0 dropped:0 overruns:0 frame:0
          TX packets:0 errors:0 dropped:0 overruns:0 carrier:0
          collisions:0 txqueuelen:0
          RX bytes:0 (0.0 b)  TX bytes:0 (0.0 b)


[root@SL64Hadoop2 ~]# ▊




SL64Hadoop3
[root@SL64Hadoop3 ~]# ifconfig
eth1      Link encap:Ethernet  HWaddr 00:0C:29:C8:8D:17  
          inet addr:10.0.0.4  Bcast:10.0.0.255  Mask:255.255.255.0
          inet6 addr: fe80::20c:29ff:fec8:8d17/64 Scope:Link
          UP BROADCAST RUNNING MULTICAST  MTU:1500  Metric:1
          RX packets:195 errors:0 dropped:0 overruns:0 frame:0
          TX packets:54 errors:0 dropped:0 overruns:0 carrier:0
          collisions:0 txqueuelen:1000
          RX bytes:19975 (19.5 KiB)  TX bytes:6866 (6.7 KiB)


lo        Link encap:Local Loopback  
          inet addr:127.0.0.1  Mask:255.0.0.0
          inet6 addr: ::1/128 Scope:Host
          UP LOOPBACK RUNNING  MTU:16436  Metric:1
          RX packets:0 errors:0 dropped:0 overruns:0 frame:0
          TX packets:0 errors:0 dropped:0 overruns:0 carrier:0
          collisions:0 txqueuelen:0
          RX bytes:0 (0.0 b)  TX bytes:0 (0.0 b)


[root@SL64Hadoop3 ~]# ▊




准备用第一台机做namenode, 第二三台机座datanode


然后添加本地域名解析
[root@SL64Hadoop1 ~]# for i in `seq 1 3`;do echo 10.0.0.$((i+1)) SL64Hadoop$i >> /etc/hosts;done
[root@SL64Hadoop1 ~]# ▊


[root@SL64Hadoop2 ~]# for i in `seq 1 3`;do echo 10.0.0.$((i+1)) SL64Hadoop$i >> /etc/hosts;done
[root@SL64Hadoop2 ~]# ▊


[root@SL64Hadoop3 ~]# for i in `seq 1 3`;do echo 10.0.0.$((i+1)) SL64Hadoop$i >> /etc/hosts;done
[root@SL64Hadoop3 ~]# ▊




停掉防火墙和selinux
[root@SL64Hadoop1 ~]# service iptables stop
iptables: Flushing firewall rules:                         [  OK  ]
iptables: Setting chains to policy ACCEPT: filter          [  OK  ]
iptables: Unloading modules:                               [  OK  ]
[root@SL64Hadoop1 ~]# chkconfig iptables off
[root@SL64Hadoop1 ~]# sed -i 's/enforcing/disabled/g' /etc/sysconfig/selinux
[root@SL64Hadoop1 ~]# setenforce 0
[root@SL64Hadoop1 ~]# ▊


[root@SL64Hadoop2 ~]# service iptables stop
iptables: Flushing firewall rules:                         [  OK  ]
iptables: Setting chains to policy ACCEPT: filter          [  OK  ]
iptables: Unloading modules:                               [  OK  ]
[root@SL64Hadoop2 ~]# chkconfig iptables off
[root@SL64Hadoop2 ~]# sed -i 's/enforcing/disabled/g' /etc/sysconfig/selinux
[root@SL64Hadoop2 ~]# setenforce 0
[root@SL64Hadoop2 ~]# ▊


[root@SL64Hadoop3 ~]# service iptables stop
iptables: Flushing firewall rules:                         [  OK  ]
iptables: Setting chains to policy ACCEPT: filter          [  OK  ]
iptables: Unloading modules:                               [  OK  ]
[root@SL64Hadoop3 ~]# chkconfig iptables off
[root@SL64Hadoop3 ~]# sed -i 's/enforcing/disabled/g' /etc/sysconfig/selinux
[root@SL64Hadoop3 ~]# setenforce 0
[root@SL64Hadoop3 ~]# ▊




添加hadoop用户并生成密钥
[root@SL64Hadoop1 ~]# useradd hadoop
[root@SL64Hadoop1 ~]# su - hadoop
[hadoop@SL64Hadoop1 ~]$ ssh-keygen
Generating public/private rsa key pair.
Enter file in which to save the key (/home/hadoop/.ssh/id_rsa):
Created directory '/home/hadoop/.ssh'.
Enter passphrase (empty for no passphrase):
Enter same passphrase again:
Your identification has been saved in /home/hadoop/.ssh/id_rsa.
Your public key has been saved in /home/hadoop/.ssh/id_rsa.pub.
The key fingerprint is:
6d:1c:9c:fb:a0:8c:82:3b:27:a6:3f:e8:d2:e1:11:7d hadoop@SL64Hadoop1
The key's randomart image is:
+--[ RSA 2048]----+
|                 |
|         . .     |
|   .      +      |
|  . . E  o o     |
|   . .  S *      |
|  o.   o o o     |
| +.o. . o   .    |
|o B...           |
|+=o=             |
+-----------------+
[hadoop@SL64Hadoop1 ~]$ ▊


[root@SL64Hadoop2 ~]# useradd hadoop
[root@SL64Hadoop2 ~]# su - hadoop
[hadoop@SL64Hadoop2 ~]$ ssh-keygen
Generating public/private rsa key pair.
Enter file in which to save the key (/home/hadoop/.ssh/id_rsa): Created directory '/home/hadoop/.ssh'.
Enter passphrase (empty for no passphrase):
Enter same passphrase again:
Your identification has been saved in /home/hadoop/.ssh/id_rsa.
Your public key has been saved in /home/hadoop/.ssh/id_rsa.pub.
The key fingerprint is:
7e:c9:58:a3:1e:34:91:e8:3a:67:11:d8:03:94:fa:0f hadoop@SL64Hadoop2
The key's randomart image is:
+--[ RSA 2048]----+
|   .o.           |
|    .+ . .       |
|   .. = o        |
|  .  . o .       |
|   .  o S o      |
|    E. + * o     |
|    ooo = +      |
|     +.. o       |
|        .        |
+-----------------+
[hadoop@SL64Hadoop2 ~]$ ▊


[root@SL64Hadoop3 ~]# useradd hadoop
[root@SL64Hadoop3 ~]# su - hadoop
[hadoop@SL64Hadoop3 ~]$ ssh-keygen
Generating public/private rsa key pair.
Enter file in which to save the key (/home/hadoop/.ssh/id_rsa):
Created directory '/home/hadoop/.ssh'.
Enter passphrase (empty for no passphrase):
Enter same passphrase again:
Your identification has been saved in /home/hadoop/.ssh/id_rsa.
Your public key has been saved in /home/hadoop/.ssh/id_rsa.pub.
The key fingerprint is:
0f:da:6e:f5:cb:4a:8a:7b:a8:cb:6e:69:76:6d:b8:b9 hadoop@SL64Hadoop3
The key's randomart image is:
+--[ RSA 2048]----+
|                 |
|                 |
|                 |
|                 |
|        S        |
|       o o.      |
|     ..+..o.     |
|   .= ++=o ..    |
|   ==+EBo ..o.   |
+-----------------+
[hadoop@SL64Hadoop3 ~]$ ▊




配置互相信任
[hadoop@SL64Hadoop1 ~]$ cat >> .ssh/authorized_keys << EOF
> ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAu8SKbYcDmQ6E/LdobUxJNmgJS2xJ0wS7k+yeVp108XJTpLKYpaKhKkmB5pr0/upsxLDLQt7q3ZUC7k/IrrxdbEb5RNr81AfsqKkW7zYoUASvhMJsnoX4bfvG+yBOUt0Bea4G3rTYBIa+Uq+mrCIvl4IjlxczLVRXj6UJa8kscicouwFUhqFMIerB4/+hWEnX5soLZVU5y1bS7UfTw03wKdEkSOgYH3GiNZdAT9rq8FEtGlOXC9Z5lLCey0lxqNOTiHNEVLrZQNdCsC2hnrw7tgDPI9fN5COvS1LvnBbDM6LQxvBJ3cqbs9F3IeHxUrHVvGq9/zt3/qEE7u1KLwjlyw== hadoop@SL64Hadoop1
> ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA2ueDWNMObf5IjyL714wvNSTKcvUCztbL73/V3irbZALGE42LMGOOgsPr0RrLVATtYrLq1kMFvzJ4UCze+N52venCMUXJ18FDfk/al+2oHnVc/Uu+uy3BaCuGX8/VnGFFFVlSNcZvyFXE+pbyUHGAS+69E24jnwCr2s3jK3s/sAmDAHgCRKKWeWBgDFUAeLDk7psMJiPCtj2yYgdcaheV1Gtl8CnsLPjtpO/+hHlljc7SkV60q1Npi5MHdEIEi3ATsGNlmK/miPkBZdruwjgxqJ6+RxYUQ+Kk5HKCRKLfhIxIJyBWxs/h2iV+5uv0wNTfBbiPovJbUR749TtMjXEKwQ== hadoop@SL64Hadoop2
> ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAxINqC03CeFNegxNDVu9nXRFz7sIfZQ+K3Fh+GHMJyV3KMawuLHMlF9eNqFOW0bn2FMHxojcFdiNPnNqcvsSxvcrPAqFNYq1WMkeRxyJcgNJgdNRhJcr7CT07ZvSFbNLD0mgIPQ62vYOrHLHsRTVKX23aK47NiTvw8DPpU1ZELDZHN0yKXYFE8a7Q2jYcnn1I9dWnklO9WZSqt5P3XHGk/Q0/lw1xsAVdg367BeZbqIKTRADdC+l6LRcfX2Fn/WViPKcsqeZzPZ+WNrzik1flUEaZmTbEXFIw757Jx0voZUTJNcp68U7K51xrx6rmuFgkFVmRBEERxNsmIbtlTHfH1w== hadoop@SL64Hadoop3
> EOF
[hadoop@SL64Hadoop1 ~]$ chmod 600 .ssh/authorized_keys
[hadoop@SL64Hadoop1 ~]$ ▊


[hadoop@SL64Hadoop2 ~]$ cat >> .ssh/authorized_keys << EOF
> ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAu8SKbYcDmQ6E/LdobUxJNmgJS2xJ0wS7k+yeVp108XJTpLKYpaKhKkmB5pr0/upsxLDLQt7q3ZUC7k/IrrxdbEb5RNr81AfsqKkW7zYoUASvhMJsnoX4bfvG+yBOUt0Bea4G3rTYBIa+Uq+mrCIvl4IjlxczLVRXj6UJa8kscicouwFUhqFMIerB4/+hWEnX5soLZVU5y1bS7UfTw03wKdEkSOgYH3GiNZdAT9rq8FEtGlOXC9Z5lLCey0lxqNOTiHNEVLrZQNdCsC2hnrw7tgDPI9fN5COvS1LvnBbDM6LQxvBJ3cqbs9F3IeHxUrHVvGq9/zt3/qEE7u1KLwjlyw== hadoop@SL64Hadoop1
> ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA2ueDWNMObf5IjyL714wvNSTKcvUCztbL73/V3irbZALGE42LMGOOgsPr0RrLVATtYrLq1kMFvzJ4UCze+N52venCMUXJ18FDfk/al+2oHnVc/Uu+uy3BaCuGX8/VnGFFFVlSNcZvyFXE+pbyUHGAS+69E24jnwCr2s3jK3s/sAmDAHgCRKKWeWBgDFUAeLDk7psMJiPCtj2yYgdcaheV1Gtl8CnsLPjtpO/+hHlljc7SkV60q1Npi5MHdEIEi3ATsGNlmK/miPkBZdruwjgxqJ6+RxYUQ+Kk5HKCRKLfhIxIJyBWxs/h2iV+5uv0wNTfBbiPovJbUR749TtMjXEKwQ== hadoop@SL64Hadoop2
> ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAxINqC03CeFNegxNDVu9nXRFz7sIfZQ+K3Fh+GHMJyV3KMawuLHMlF9eNqFOW0bn2FMHxojcFdiNPnNqcvsSxvcrPAqFNYq1WMkeRxyJcgNJgdNRhJcr7CT07ZvSFbNLD0mgIPQ62vYOrHLHsRTVKX23aK47NiTvw8DPpU1ZELDZHN0yKXYFE8a7Q2jYcnn1I9dWnklO9WZSqt5P3XHGk/Q0/lw1xsAVdg367BeZbqIKTRADdC+l6LRcfX2Fn/WViPKcsqeZzPZ+WNrzik1flUEaZmTbEXFIw757Jx0voZUTJNcp68U7K51xrx6rmuFgkFVmRBEERxNsmIbtlTHfH1w== hadoop@SL64Hadoop3
> EOF
[hadoop@SL64Hadoop2 ~]$ chmod 600 .ssh/authorized_keys
[hadoop@SL64Hadoop2 ~]$ ▊


[hadoop@SL64Hadoop3 ~]$ cat >> .ssh/authorized_keys << EOF
> ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAu8SKbYcDmQ6E/LdobUxJNmgJS2xJ0wS7k+yeVp108XJTpLKYpaKhKkmB5pr0/upsxLDLQt7q3ZUC7k/IrrxdbEb5RNr81AfsqKkW7zYoUASvhMJsnoX4bfvG+yBOUt0Bea4G3rTYBIa+Uq+mrCIvl4IjlxczLVRXj6UJa8kscicouwFUhqFMIerB4/+hWEnX5soLZVU5y1bS7UfTw03wKdEkSOgYH3GiNZdAT9rq8FEtGlOXC9Z5lLCey0lxqNOTiHNEVLrZQNdCsC2hnrw7tgDPI9fN5COvS1LvnBbDM6LQxvBJ3cqbs9F3IeHxUrHVvGq9/zt3/qEE7u1KLwjlyw== hadoop@SL64Hadoop1
> ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA2ueDWNMObf5IjyL714wvNSTKcvUCztbL73/V3irbZALGE42LMGOOgsPr0RrLVATtYrLq1kMFvzJ4UCze+N52venCMUXJ18FDfk/al+2oHnVc/Uu+uy3BaCuGX8/VnGFFFVlSNcZvyFXE+pbyUHGAS+69E24jnwCr2s3jK3s/sAmDAHgCRKKWeWBgDFUAeLDk7psMJiPCtj2yYgdcaheV1Gtl8CnsLPjtpO/+hHlljc7SkV60q1Npi5MHdEIEi3ATsGNlmK/miPkBZdruwjgxqJ6+RxYUQ+Kk5HKCRKLfhIxIJyBWxs/h2iV+5uv0wNTfBbiPovJbUR749TtMjXEKwQ== hadoop@SL64Hadoop2
> ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAxINqC03CeFNegxNDVu9nXRFz7sIfZQ+K3Fh+GHMJyV3KMawuLHMlF9eNqFOW0bn2FMHxojcFdiNPnNqcvsSxvcrPAqFNYq1WMkeRxyJcgNJgdNRhJcr7CT07ZvSFbNLD0mgIPQ62vYOrHLHsRTVKX23aK47NiTvw8DPpU1ZELDZHN0yKXYFE8a7Q2jYcnn1I9dWnklO9WZSqt5P3XHGk/Q0/lw1xsAVdg367BeZbqIKTRADdC+l6LRcfX2Fn/WViPKcsqeZzPZ+WNrzik1flUEaZmTbEXFIw757Jx0voZUTJNcp68U7K51xrx6rmuFgkFVmRBEERxNsmIbtlTHfH1w== hadoop@SL64Hadoop3
> EOF
[hadoop@SL64Hadoop3 ~]$ chmod 600 .ssh/authorized_keys
[hadoop@SL64Hadoop3 ~]$ ▊




然后, 现在在任意一台机器应该能ssh到另一台机器了, 不用密码
[hadoop@SL64Hadoop1 ~]$ ssh SL64Hadoop1
The authenticity of host 'SL64Hadoop1 (10.0.0.2)' can't be established.
RSA key fingerprint is 23:27:20:48:3e:64:77:50:c3:d8:ad:31:2a:8d:9c:4f.
Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added 'SL64Hadoop1,10.0.0.2' (RSA) to the list of known hosts.
Last login: Mon Mar 24 02:10:13 2014 from SL64Hadoop2
[hadoop@SL64Hadoop1 ~]$ logout
Connection to SL64Hadoop1 closed.
[hadoop@SL64Hadoop1 ~]$ ssh SL64Hadoop2
The authenticity of host 'SL64Hadoop2 (10.0.0.3)' can't be established.
RSA key fingerprint is 23:27:20:48:3e:64:77:50:c3:d8:ad:31:2a:8d:9c:4f.
Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added 'SL64Hadoop2,10.0.0.3' (RSA) to the list of known hosts.
Last login: Mon Mar 24 02:10:21 2014 from SL64Hadoop2
[hadoop@SL64Hadoop2 ~]$ logout
Connection to SL64Hadoop2 closed.
[hadoop@SL64Hadoop1 ~]$ ssh SL64Hadoop3
The authenticity of host 'SL64Hadoop3 (10.0.0.4)' can't be established.
RSA key fingerprint is 23:27:20:48:3e:64:77:50:c3:d8:ad:31:2a:8d:9c:4f.
Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added 'SL64Hadoop3,10.0.0.4' (RSA) to the list of known hosts.
Last login: Mon Mar 24 02:10:29 2014 from SL64Hadoop2
[hadoop@SL64Hadoop3 ~]$ logout
Connection to SL64Hadoop3 closed.
[hadoop@SL64Hadoop1 ~]$ ▊




解压, 配置hadoop
[hadoop@SL64Hadoop1 ~]$ tar xf hadoop-2.3.0.tar.gz
[hadoop@SL64Hadoop1 ~]$ cd hadoop-2.3.0
[hadoop@SL64Hadoop1 hadoop-2.3.0]$ vim etc/hadoop/hadoop-env.sh
export JAVA_HOME=/usr/lib/jvm/java-1.7.0-openjdk-1.7.0.9.x86_64/jre
[hadoop@SL64Hadoop1 hadoop-2.3.0]$ mkdir var
[hadoop@SL64Hadoop1 hadoop-2.3.0]$ vim etc/hadoop/core-site.xml
<configuration>
        <property>
                <name>fs.default.name</name>
                <value>hdfs://SL64Hadoop1:49000</value>
        </property>
        <property>
                <name>hadoop.tmp.dir</name>
                <value>/home/hadoop/hadoop-2.3.0/var</value>
        </property>
</configuration>
[hadoop@SL64Hadoop1 hadoop-2.3.0]$ cp etc/hadoop/mapred-site.xml.template etc/hadoop/mapred-site.xml
[hadoop@SL64Hadoop1 hadoop-2.3.0]$ vim etc/hadoop/mapred-site.xml
<configuration>
        <property>
                <name>mapred.job.tracker</name>
                <value>SL64Hadoop1:49001</value>
        </property>
        <property>
                <name>mapred.local.dir</name>
                <value>/home/hadoop/hadoop-2.3.0/var</value>
        </property>
</configuration>
[hadoop@SL64Hadoop1 hadoop-2.3.0]$ vim etc/hadoop/hdfs-site.xml
<configuration>
        <property>
                <name>dfs.name.dir</name>
                <value>/home/hadoop/name1</value>
                <description> </description>
        </property>
        <property>
                <name>dfs.data.dir</name>
                <value>/home/hadoop/data1</value>
                <description> </description>
        <property>
        </property>
                <name>dfs.replication</name>
                <value>2</value>
        </property>
</configuration>
[hadoop@SL64Hadoop1 hadoop-2.3.0]$ vim etc/hadoop/masters
SL64Hadoop1
[hadoop@SL64Hadoop1 hadoop-2.3.0]$ vim etc/hadoop/slaves
SL64Hadoop2
SL64Hadoop3
[hadoop@SL64Hadoop1 hadoop-2.3.0]$ ▊




拷贝配置好的hadoop到另外两台机子
[hadoop@SL64Hadoop1 ~]$ scp -r hadoop-2.3.0 SL64Hadoop2:
[hadoop@SL64Hadoop1 ~]$ scp -r hadoop-2.3.0 SL64Hadoop3:
[hadoop@SL64Hadoop1 ~]$ ▊




格式化文件系统为hdfs
[hadoop@SL64Hadoop1 hadoop-2.3.0]$ bin/hadoop namenode -format
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.


14/03/24 03:22:43 INFO namenode.NameNode: STARTUP_MSG:
/************************************************************
STARTUP_MSG: Starting NameNode
STARTUP_MSG:   host = SL64Hadoop1/10.0.0.1
STARTUP_MSG:   args = [-format]
STARTUP_MSG:   version = 2.3.0
... omit ...
Re-format filesystem in Storage Directory /home/hadoop/name1 ? (Y or N) Y
14/03/24 03:22:51 INFO common.Storage: Storage directory /home/hadoop/name1 has been successfully formatted.
14/03/24 03:22:51 INFO namenode.FSImage: Saving image file /home/hadoop/name1/current/fsimage.ckpt_0000000000000000000 using no compression
14/03/24 03:22:51 INFO namenode.FSImage: Image file /home/hadoop/name1/current/fsimage.ckpt_0000000000000000000 of size 218 bytes saved in 0 seconds.
14/03/24 03:22:51 INFO namenode.NNStorageRetentionManager: Going to retain 1 images with txid >= 0
14/03/24 03:22:51 INFO util.ExitUtil: Exiting with status 0
14/03/24 03:22:51 INFO namenode.NameNode: SHUTDOWN_MSG:
/************************************************************
SHUTDOWN_MSG: Shutting down NameNode at SL64Hadoop1/10.0.0.1
************************************************************/
[hadoop@SL64Hadoop1 hadoop-2.3.0]$ ▊




开启hadoop啦
[hadoop@SL64Hadoop1 ~]$ hadoop-2.3.0/sbin/start-all.sh
This script is Deprecated. Instead use start-dfs.sh and start-yarn.sh
14/03/24 03:38:55 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Starting namenodes on [SL64Hadoop1]
SL64Hadoop1: starting namenode, logging to /home/hadoop/hadoop-2.3.0/logs/hadoop-hadoop-namenode-SL64Hadoop1.out
SL64Hadoop2: starting datanode, logging to /home/hadoop/hadoop-2.3.0/logs/hadoop-hadoop-datanode-SL64Hadoop2.out
SL64Hadoop3: starting datanode, logging to /home/hadoop/hadoop-2.3.0/logs/hadoop-hadoop-datanode-SL64Hadoop3.out
Starting secondary namenodes [0.0.0.0]
The authenticity of host '0.0.0.0 (0.0.0.0)' can't be established.
RSA key fingerprint is 23:27:20:48:3e:64:77:50:c3:d8:ad:31:2a:8d:9c:4f.
Are you sure you want to continue connecting (yes/no)? no
0.0.0.0: Host key verification failed.
14/03/24 03:39:35 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
starting yarn daemons
starting resourcemanager, logging to /home/hadoop/hadoop-2.3.0/logs/yarn-hadoop-resourcemanager-SL64Hadoop1.out
SL64Hadoop3: starting nodemanager, logging to /home/hadoop/hadoop-2.3.0/logs/yarn-hadoop-nodemanager-SL64Hadoop3.out
SL64Hadoop2: starting nodemanager, logging to /home/hadoop/hadoop-2.3.0/logs/yarn-hadoop-nodemanager-SL64Hadoop2.out
[hadoop@SL64Hadoop1 ~]$ ▊




查看运行状态
[hadoop@SL64Hadoop1 ~]$ hadoop-2.3.0/bin/hadoop dfsadmin -report
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.


14/03/24 03:40:31 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Configured Capacity: 58277773312 (54.28 GB)
Present Capacity: 47921741824 (44.63 GB)
DFS Remaining: 47921692672 (44.63 GB)
DFS Used: 49152 (48 KB)
DFS Used%: 0.00%
Under replicated blocks: 0
Blocks with corrupt replicas: 0
Missing blocks: 0


-------------------------------------------------
Datanodes available: 2 (2 total, 0 dead)


Live datanodes:
Name: 10.0.0.4:50010 (SL64Hadoop3)
Hostname: SL64Hadoop3
Decommission Status : Normal
Configured Capacity: 29138886656 (27.14 GB)
DFS Used: 24576 (24 KB)
Non DFS Used: 5169172480 (4.81 GB)
DFS Remaining: 23969689600 (22.32 GB)
DFS Used%: 0.00%
DFS Remaining%: 82.26%
Configured Cache Capacity: 0 (0 B)
Cache Used: 0 (0 B)
Cache Remaining: 0 (0 B)
Cache Used%: 100.00%
Cache Remaining%: 0.00%
Last contact: Mon Mar 24 03:40:32 CST 2014




Name: 10.0.0.3:50010 (SL64Hadoop2)
Hostname: SL64Hadoop2
Decommission Status : Normal
Configured Capacity: 29138886656 (27.14 GB)
DFS Used: 24576 (24 KB)
Non DFS Used: 5186859008 (4.83 GB)
DFS Remaining: 23952003072 (22.31 GB)
DFS Used%: 0.00%
DFS Remaining%: 82.20%
Configured Cache Capacity: 0 (0 B)
Cache Used: 0 (0 B)
Cache Remaining: 0 (0 B)
Cache Used%: 100.00%
Cache Remaining%: 0.00%
Last contact: Mon Mar 24 03:40:32 CST 2014




[hadoop@SL64Hadoop1 ~]$ ▊




新建一个文件夹
[hadoop@SL64Hadoop1 ~]$ hadoop-2.3.0/bin/hadoop fs -mkdir /cc
14/03/24 03:48:09 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[hadoop@SL64Hadoop1 ~]$ ▊




传一个文件上去
[hadoop@SL64Hadoop1 ~]$ hadoop-2.3.0/bin/hadoop fs -put hadoop /cc
14/03/24 03:48:09 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[hadoop@SL64Hadoop1 ~]$ ▊




用web查看状态和文件系统的文件





回复

使用道具 举报

您需要登录后才可以回帖 登录 | 立即注册

本版积分规则

QQ|小黑屋|京峰教育,只为有梦想的人 ( 京ICP备15013173号 )

GMT+8, 2020-1-26 07:50 , Processed in 0.032095 second(s), 12 queries , Redis On.

快速回复 返回顶部 返回列表