sudo adduser hadoop
sudo gedit /etc/sudoers
# User privilege specification root ALL=(ALL:ALL) ALL hadoop ALL=(ALL:ALL) ALL
su hadoop
127.0.0.1 localhost 127.0.1.1 localhost.localdomain localhost 192.168.190.128 master 192.168.190.129 slave1 192.168.190.131 slave2 # The following lines are desirable for IPv6 capable hosts ::1 ip6-localhost ip6-loopback fe00::0 ip6-localnet ff00::0 ip6-mcastprefix ff02::1 ip6-allnodes ff02::2 ip6-allrouters
jdk-7u76-linux-x64.tar.gz
tar -zxvf jdk-7u76-linux-x64.tar.gz
sudo gedit /etc/profile
#set java environment
export JAVA_HOME=/usr/lib/jvm/jdk1.7.0_76
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:/home/hadoop/hadoop-2.7.1/bin:/home/hadoop/hadoop-2.7.1/sbin:/home/hadoop/hbase-1.2.4/bin:$PATH
sudo chmod 777 /etc/profile
java -version
java version "1.7.0_76" Java(TM) SE Runtime Environment (build 1.7.0_76-b13) Java HotSpot(TM) 64-Bit Server VM (build 24.76-b04, mixed mode)
sudo update-alternatives --install /usr/bin/java java /usr/lib/jvm/jdk1.7.0_76/bin/java 300
sudo update-alternatives --install /usr/bin/javac javac /usr/lib/jvm/jdk1.7.0_76/bin/javac 300
sudo update-alternatives --config java
sudo apt-get install ssh
ls -a -l
drwxr-xr-x 9 root root 4096 Feb 1 02:41 . drwxr-xr-x 4 root root 4096 Jan 27 01:50 .. drwx------ 3 root root 4096 Jan 31 03:35 .cache drwxr-xr-x 5 root root 4096 Jan 31 03:35 .config drwxrwxrwx 11 hadoop root 4096 Feb 1 00:18 hadoop-2.7.1 drwxrwxrwx 8 hadoop root 4096 Feb 1 02:47 hbase-1.2.4 drwxr-xr-x 3 root root 4096 Jan 31 03:35 .local drwxr-xr-x 2 root root 4096 Jan 31 14:47 software drwxr-xr-x 2 hadoop root 4096 Feb 1 00:01 .ssh
sudo mkdir .ssh
sudo chown -R hadoop .ssh
ssh-keygen -t rsa
cat ~/ssh/id_rsa.pub >> ~/ssh/authorized_keys
sudo gedit authorized_keys
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC743oCP2Voa3deHBkA+N7cYJC4Jv2Tj8Z6tGVWCxg0NJl3yKwYIfgC9RiyFyRWcl5byI34Oe7dYtf+9UtvH85hca1/IDP1m02NLPXsIJmcPS4uNgMLfsWg/F/C3Bqut7i4t6eHwO/FRhjeIBu5O/9GHoXk/ykhgJIbyh8hhAlcke6Jtt80I63r2+3DnlHlNzw1sQRJp2qFRgyV61j5DfuYrhfd+/eTkFtXc7izLVCkC7x6hMo4qIMQ0GbSx9iqTO0tO1skGYLhCX3Cbo3hf4i19RUKt168eg/X2l1qIvf+vgxQudM3lZa9/pxDieK5p8c8xupcaoR67jMFLWLl3EUb hadoop@master ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDQ1Jf6ds9Y+KlQNIHq+pDGxM1OsF+RSXcgLDdlzw+qGK7NT28bRK6QUCm3kJqa/ekEkqDHdWegtiQVriOsY4A2fABkRsjiOrnc4QYQ/rqB06JuvshwToB91qwmV/J/o3mgsentJLfmBUpSyW8rRxQV+tYtqQ+gipL7x0WGUBRQYRhJJZKAxqgLGE3Md/siYjn8Ge4G31rrTcx9QDVcfTCtHkvqca0b0f98Y+U9Fu6w4Ari28oLxFTlzuCsebIPMzE4uWQuXT+2kMz0HunpejSDrLkrFqO1OKUs0peZrUVRmYBY5flt4tnV0XOQBYClzxieev/ppgH8AeB4Qs/zXB25 hadoop@slave1 ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDI8PpgXt94SAEtUhvt2JmlO4Ed11r1WLoN1Eha5vI3qqm7cgT4yS7lvxL53Dc5G7R0n4Jwsf2hTvD9JF77vEIxp5g3xQGa7HafbIMzQupuCyAHqY+v0RTepaBUNGkFz0uKv+Nq8bzjfSUv4HgRorW7Yzqaa0LjEvHiI8uVZA7dcZ6Ba1on/TlKVVzz3MdZulcn7+AzjTPTG8hPQaELQqws1UuIYIUanOSqFPCADart/pJpAzGkqek0LBRSvI+U+P0oSrz9aX3wVOUQknheinM4tmuo3TGYionjeV1jqroCxBbZaeqLLwnpA0YZBl/ZMnJHkeSITypmgZWszh3ylC8p hadoop@slave2
ssh -V
OpenSSH_7.2p2 Ubuntu-4ubuntu2.1, OpenSSL 1.0.2g 1 Mar 2016
ssh localhost
Welcome to Ubuntu 16.04 LTS (GNU/Linux 4.4.0-21-generic x86_64) * Documentation: https://help.ubuntu.com/ 458 packages can be updated. 171 updates are security updates. The programs included with the Ubuntu system are free software; the exact distribution terms for each program are described in the individual files in /usr/share/doc/*/copyright. Ubuntu comes with ABSOLUTELY NO WARRANTY, to the extent permitted by applicable law. The programs included with the Ubuntu system are free software; the exact distribution terms for each program are described in the individual files in /usr/share/doc/*/copyright. Ubuntu comes with ABSOLUTELY NO WARRANTY, to the extent permitted by applicable law. Last login: Wed Feb 1 00:02:53 2017 from 127.0.0.1 To run a command as administrator (user "root"), use "sudo <command>". See "man sudo_root" for details.
ssh slave1
Welcome to Ubuntu 16.04 LTS (GNU/Linux 4.4.0-59-generic x86_64) * Documentation: https://help.ubuntu.com/ 312 packages can be updated. 10 updates are security updates. The programs included with the Ubuntu system are free software; the exact distribution terms for each program are described in the individual files in /usr/share/doc/*/copyright. Ubuntu comes with ABSOLUTELY NO WARRANTY, to the extent permitted by applicable law. The programs included with the Ubuntu system are free software; the exact distribution terms for each program are described in the individual files in /usr/share/doc/*/copyright. Ubuntu comes with ABSOLUTELY NO WARRANTY, to the extent permitted by applicable law. Last login: Wed Feb 1 00:03:30 2017 from 192.168.190.131
tar -zxvf hadoop-2.7.1.tar.gz
total 120 drwxr-xr-x 19 hadoop hadoop 4096 Feb 1 02:28 . drwxr-xr-x 4 root root 4096 Jan 31 14:24 .. -rw------- 1 hadoop hadoop 1297 Feb 1 03:37 .bash_history -rw-r--r-- 1 hadoop hadoop 220 Jan 31 14:24 .bash_logout -rw-r--r-- 1 hadoop hadoop 3771 Jan 31 14:24 .bashrc drwx------ 3 root root 4096 Jan 31 22:49 .cache drwx------ 5 root root 4096 Jan 31 23:59 .config drwx------ 3 root root 4096 Jan 31 23:59 .dbus drwxr-xr-x 2 hadoop hadoop 4096 Feb 1 00:55 Desktop -rw-r--r-- 1 hadoop hadoop 25 Feb 1 00:55 .dmrc drwxr-xr-x 2 hadoop hadoop 4096 Feb 1 00:55 Documents drwxr-xr-x 2 hadoop hadoop 4096 Feb 1 00:55 Downloads -rw-r--r-- 1 hadoop hadoop 8980 Jan 31 14:24 examples.desktop drwx------ 2 hadoop hadoop 4096 Feb 1 00:56 .gconf drwx------ 3 hadoop hadoop 4096 Feb 1 00:55 .gnupg drwxrwxrwx 11 hadoop hadoop 4096 Feb 1 00:30 hadoop-2.7.1 drwxrwxrwx 8 hadoop hadoop 4096 Feb 1 02:44 hbase-1.2.4 -rw------- 1 hadoop hadoop 318 Feb 1 00:56 .ICEauthority drwxr-xr-x 3 root root 4096 Jan 31 22:49 .local drwxr-xr-x 2 hadoop hadoop 4096 Feb 1 00:55 Music drwxr-xr-x 2 hadoop hadoop 4096 Feb 1 00:55 Pictures -rw-r--r-- 1 hadoop hadoop 675 Jan 31 14:24 .profile drwxr-xr-x 2 hadoop hadoop 4096 Feb 1 00:55 Public drwx------ 2 hadoop hadoop 4096 Feb 1 00:02 .ssh drwxr-xr-x 2 hadoop hadoop 4096 Feb 1 00:55 Templates drwxr-xr-x 2 hadoop hadoop 4096 Feb 1 00:55 Videos -rw------- 1 hadoop hadoop 51 Feb 1 00:55 .Xauthority -rw------- 1 hadoop hadoop 1492 Feb 1 00:58 .xsession-errors
sudo gedit /etc/profile
#set java environment
export JAVA_HOME=/usr/lib/jvm/jdk1.7.0_76
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:/home/hadoop/hadoop-2.7.1/bin:/home/hadoop/hadoop-2.7.1/sbin:/home/hadoop/hbase-1.2.4/bin:$PATH
# The java implementation to use. export JAVA_HOME=/usr/lib/jvm/jdk1.7.0_76 export HADOOP_HOME=/home/master/hadoop-2.7.1 export PATH=$PATH:/home/master/hadoop-2.7.1/bin
<?xml version="1.0" encoding="UTF-8"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <!-- Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. See accompanying LICENSE file. --> <!-- Put site-specific property overrides in this file. --> <configuration> <property> <name>fs.default.name</name> <value>hdfs://master:9000</value> </property> <property> <name>hadoop.tmp.dir</name> <value>/tmp</value> </property> </configuration>
<?xml version="1.0" encoding="UTF-8"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <!-- Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. See accompanying LICENSE file. --> <!-- Put site-specific property overrides in this file. --> <configuration> <property> <name>dfs.replication</name> <value>2</value> </property> </configuration>
cp mapred-site.xml.template mapred-site.xml
<?xml version="1.0"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <!-- Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. See accompanying LICENSE file. --> <!-- Put site-specific property overrides in this file. --> <configuration> <property> <name>mapred.job.tracker</name> <value>master:9001</value> </property> </configuration>
master
slave1 slave2
scp -r hadoop-2.7.1 hadoop@slave1:~/ scp -r hadoop-2.7.1 hadoop@slave2:~/
hadoop@master:~$ hadoop namenode -format
hadoop: command not found
source /etc/profile
hadoop@master:~$ hadoop namenode -format DEPRECATED: Use of this script to execute hdfs command is deprecated. Instead use the hdfs command for it. 17/02/02 02:59:44 INFO namenode.NameNode: STARTUP_MSG: /************************************************************ STARTUP_MSG: Starting NameNode STARTUP_MSG: host = master/192.168.190.128 STARTUP_MSG: args = [-format] STARTUP_MSG: version = 2.7.1 STARTUP_MSG: classpath = /home/hadoop/hadoop-2.7.1/etc/hadoop:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/log4j-1.2.17.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jsr305-3.0.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/apacheds-i18n-2.0.0-M15.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-cli-1.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-beanutils-1.7.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/asm-3.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jaxb-api-2.2.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/java-xmlbuilder-0.4.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-net-3.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-io-2.4.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/servlet-api-2.5.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/protobuf-java-2.5.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/netty-3.6.2.Final.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/curator-recipes-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jetty-util-6.1.26.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/slf4j-api-1.7.10.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jackson-core-asl-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-configuration-1.6.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/htrace-core-3.1.0-incubating.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/activation-1.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/paranamer-2.3.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-digester-1.8.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jettison-1.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jetty-6.1.26.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/hamcrest-core-1.3.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-lang-2.6.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/hadoop-auth-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/avro-1.7.4.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/api-util-1.0.0-M20.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/guava-11.0.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jsch-0.1.42.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/gson-2.2.4.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/apacheds-kerberos-codec-2.0.0-M15.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/snappy-java-1.0.4.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/curator-client-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/hadoop-annotations-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/stax-api-1.0-2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-httpclient-3.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/httpclient-4.2.5.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jersey-core-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/curator-framework-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/xz-1.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-math3-3.1.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-beanutils-core-1.8.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jackson-jaxrs-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/xmlenc-0.52.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-logging-1.1.3.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jackson-xc-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jets3t-0.9.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/api-asn1-api-1.0.0-M20.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-compress-1.4.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jackson-mapper-asl-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jsp-api-2.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jersey-json-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/mockito-all-1.8.5.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-codec-1.4.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/jersey-server-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/httpcore-4.2.5.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/commons-collections-3.2.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/zookeeper-3.4.6.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/junit-4.11.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/hadoop-common-2.7.1-tests.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/hadoop-nfs-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/common/hadoop-common-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/log4j-1.2.17.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/jsr305-3.0.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-cli-1.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/asm-3.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-io-2.4.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/servlet-api-2.5.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/protobuf-java-2.5.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/jetty-util-6.1.26.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/jackson-core-asl-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/xml-apis-1.3.04.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/htrace-core-3.1.0-incubating.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/netty-all-4.0.23.Final.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/jetty-6.1.26.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/xercesImpl-2.9.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-lang-2.6.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/guava-11.0.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/jersey-core-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/xmlenc-0.52.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-logging-1.1.3.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/jackson-mapper-asl-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/commons-codec-1.4.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/leveldbjni-all-1.8.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/lib/jersey-server-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-2.7.1-tests.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-nfs-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/hdfs/hadoop-hdfs-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/log4j-1.2.17.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jsr305-3.0.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/commons-cli-1.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jaxb-impl-2.2.3-1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/asm-3.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jaxb-api-2.2.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/commons-io-2.4.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/servlet-api-2.5.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/protobuf-java-2.5.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/netty-3.6.2.Final.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jetty-util-6.1.26.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-core-asl-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/activation-1.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-guice-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jettison-1.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jetty-6.1.26.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/commons-lang-2.6.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-client-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/guava-11.0.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/aopalliance-1.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/guice-3.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/stax-api-1.0-2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-core-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/xz-1.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/zookeeper-3.4.6-tests.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-jaxrs-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/commons-logging-1.1.3.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/javax.inject-1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-xc-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/commons-compress-1.4.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jackson-mapper-asl-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/guice-servlet-3.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-json-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/commons-codec-1.4.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/leveldbjni-all-1.8.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/jersey-server-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/commons-collections-3.2.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/lib/zookeeper-3.4.6.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-api-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-common-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-applications-unmanaged-am-launcher-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-registry-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-nodemanager-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-web-proxy-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-sharedcachemanager-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-client-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-applicationhistoryservice-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-server-tests-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/yarn/hadoop-yarn-common-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/log4j-1.2.17.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/asm-3.2.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/commons-io-2.4.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/protobuf-java-2.5.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/netty-3.6.2.Final.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/jackson-core-asl-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/paranamer-2.3.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/jersey-guice-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/hamcrest-core-1.3.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/avro-1.7.4.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/aopalliance-1.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/guice-3.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/snappy-java-1.0.4.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/hadoop-annotations-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/jersey-core-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/xz-1.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/javax.inject-1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/commons-compress-1.4.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/jackson-mapper-asl-1.9.13.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/guice-servlet-3.0.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/leveldbjni-all-1.8.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/jersey-server-1.9.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/lib/junit-4.11.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-common-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-shuffle-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-app-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.1-tests.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-2.7.1.jar:/home/hadoop/hadoop-2.7.1/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.1.jar:/home/master/hadoop-2.7.1/contrib/capacity-scheduler/*.jar:/home/master/hadoop-2.7.1/contrib/capacity-scheduler/*.jar STARTUP_MSG: build = https://git-wip-us.apache.org/repos/asf/hadoop.git -r 15ecc87ccf4a0228f35af08fc56de536e6ce657a; compiled by 'jenkins' on 2015-06-29T06:04Z STARTUP_MSG: java = 1.7.0_76 ************************************************************/ 17/02/02 02:59:44 INFO namenode.NameNode: registered UNIX signal handlers for [TERM, HUP, INT] 17/02/02 02:59:44 INFO namenode.NameNode: createNameNode [-format] Formatting using clusterid: CID-ef219bd8-5622-49d9-b501-6370f3b5fc73 17/02/02 03:00:03 INFO namenode.FSNamesystem: No KeyProvider found. 17/02/02 03:00:03 INFO namenode.FSNamesystem: fsLock is fair:true 17/02/02 03:00:04 INFO blockmanagement.DatanodeManager: dfs.block.invalidate.limit=1000 17/02/02 03:00:04 INFO blockmanagement.DatanodeManager: dfs.namenode.datanode.registration.ip-hostname-check=true 17/02/02 03:00:04 INFO blockmanagement.BlockManager: dfs.namenode.startup.delay.block.deletion.sec is set to 000:00:00:00.000 17/02/02 03:00:04 INFO blockmanagement.BlockManager: The block deletion will start around 2017 Feb 02 03:00:04 17/02/02 03:00:04 INFO util.GSet: Computing capacity for map BlocksMap 17/02/02 03:00:04 INFO util.GSet: VM type = 64-bit 17/02/02 03:00:04 INFO util.GSet: 2.0% max memory 966.7 MB = 19.3 MB 17/02/02 03:00:04 INFO util.GSet: capacity = 2^21 = 2097152 entries 17/02/02 03:00:04 INFO blockmanagement.BlockManager: dfs.block.access.token.enable=false 17/02/02 03:00:04 INFO blockmanagement.BlockManager: defaultReplication = 2 17/02/02 03:00:04 INFO blockmanagement.BlockManager: maxReplication = 512 17/02/02 03:00:04 INFO blockmanagement.BlockManager: minReplication = 1 17/02/02 03:00:04 INFO blockmanagement.BlockManager: maxReplicationStreams = 2 17/02/02 03:00:04 INFO blockmanagement.BlockManager: shouldCheckForEnoughRacks = false 17/02/02 03:00:04 INFO blockmanagement.BlockManager: replicationRecheckInterval = 3000 17/02/02 03:00:04 INFO blockmanagement.BlockManager: encryptDataTransfer = false 17/02/02 03:00:04 INFO blockmanagement.BlockManager: maxNumBlocksToLog = 1000 17/02/02 03:00:04 INFO namenode.FSNamesystem: fsOwner = hadoop (auth:SIMPLE) 17/02/02 03:00:04 INFO namenode.FSNamesystem: supergroup = supergroup 17/02/02 03:00:04 INFO namenode.FSNamesystem: isPermissionEnabled = true 17/02/02 03:00:04 INFO namenode.FSNamesystem: HA Enabled: false 17/02/02 03:00:04 INFO namenode.FSNamesystem: Append Enabled: true 17/02/02 03:00:05 INFO util.GSet: Computing capacity for map INodeMap 17/02/02 03:00:05 INFO util.GSet: VM type = 64-bit 17/02/02 03:00:05 INFO util.GSet: 1.0% max memory 966.7 MB = 9.7 MB 17/02/02 03:00:05 INFO util.GSet: capacity = 2^20 = 1048576 entries 17/02/02 03:00:05 INFO namenode.FSDirectory: ACLs enabled? false 17/02/02 03:00:05 INFO namenode.FSDirectory: XAttrs enabled? true 17/02/02 03:00:05 INFO namenode.FSDirectory: Maximum size of an xattr: 16384 17/02/02 03:00:05 INFO namenode.NameNode: Caching file names occuring more than 10 times 17/02/02 03:00:05 INFO util.GSet: Computing capacity for map cachedBlocks 17/02/02 03:00:05 INFO util.GSet: VM type = 64-bit 17/02/02 03:00:05 INFO util.GSet: 0.25% max memory 966.7 MB = 2.4 MB 17/02/02 03:00:05 INFO util.GSet: capacity = 2^18 = 262144 entries 17/02/02 03:00:05 INFO namenode.FSNamesystem: dfs.namenode.safemode.threshold-pct = 0.9990000128746033 17/02/02 03:00:05 INFO namenode.FSNamesystem: dfs.namenode.safemode.min.datanodes = 0 17/02/02 03:00:05 INFO namenode.FSNamesystem: dfs.namenode.safemode.extension = 30000 17/02/02 03:00:05 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.window.num.buckets = 10 17/02/02 03:00:05 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.num.users = 10 17/02/02 03:00:05 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25 17/02/02 03:00:05 INFO namenode.FSNamesystem: Retry cache on namenode is enabled 17/02/02 03:00:05 INFO namenode.FSNamesystem: Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis 17/02/02 03:00:06 INFO util.GSet: Computing capacity for map NameNodeRetryCache 17/02/02 03:00:06 INFO util.GSet: VM type = 64-bit 17/02/02 03:00:06 INFO util.GSet: 0.029999999329447746% max memory 966.7 MB = 297.0 KB 17/02/02 03:00:06 INFO util.GSet: capacity = 2^15 = 32768 entries Re-format filesystem in Storage Directory /tmp/dfs/name ? (Y or N) y 17/02/02 03:00:28 INFO namenode.FSImage: Allocated new BlockPoolId: BP-1867851271-192.168.190.128-1485975628037 17/02/02 03:00:28 INFO common.Storage: Storage directory /tmp/dfs/name has been successfully formatted. 17/02/02 03:00:29 INFO namenode.NNStorageRetentionManager: Going to retain 1 images with txid >= 0 17/02/02 03:00:29 INFO util.ExitUtil: Exiting with status 0 17/02/02 03:00:29 INFO namenode.NameNode: SHUTDOWN_MSG: /************************************************************ SHUTDOWN_MSG: Shutting down NameNode at master/192.168.190.128 ************************************************************/
hadoop@master:~$ start-all.sh
hadoop@master:~$ start-all.sh This script is Deprecated. Instead use start-dfs.sh and start-yarn.sh Starting namenodes on [master] master: starting namenode, logging to /home/hadoop/hadoop-2.7.1/logs/hadoop-hadoop-namenode-master.out slave1: starting datanode, logging to /home/hadoop/hadoop-2.7.1/logs/hadoop-hadoop-datanode-slave1.out slave2: starting datanode, logging to /home/hadoop/hadoop-2.7.1/logs/hadoop-hadoop-datanode-slave2.out Starting secondary namenodes [0.0.0.0] 0.0.0.0: starting secondarynamenode, logging to /home/hadoop/hadoop-2.7.1/logs/hadoop-hadoop-secondarynamenode-master.out starting yarn daemons starting resourcemanager, logging to /home/hadoop/hadoop-2.7.1/logs/yarn-hadoop-resourcemanager-master.out slave1: starting nodemanager, logging to /home/hadoop/hadoop-2.7.1/logs/yarn-hadoop-nodemanager-slave1.out slave2: starting nodemanager, logging to /home/hadoop/hadoop-2.7.1/logs/yarn-hadoop-nodemanager-slave2.out
hadoop@master:~$ jps 11012 Jps 10748 ResourceManager 10594 SecondaryNameNode
hadoop@slave1:~$ jps 7227 Jps 7100 NodeManager 6977 DataNode
hadoop@slave2:~$ jps 6654 Jps 6496 NodeManager 6373 DataNode
Hadoop dfsadmin -report
ls -a -l
total 36 drwxr-xr-x 9 root root 4096 Feb 1 02:41 . drwxr-xr-x 4 root root 4096 Jan 27 01:50 .. drwx------ 3 root root 4096 Jan 31 03:35 .cache drwxr-xr-x 5 root root 4096 Jan 31 03:35 .config drwxrwxrwx 11 hadoop root 4096 Feb 1 00:18 hadoop-2.7.1 drwxrwxrwx 8 hadoop root 4096 Feb 1 02:47 hbase-1.2.4 drwxr-xr-x 3 root root 4096 Jan 31 03:35 .local drwxr-xr-x 2 root root 4096 Jan 31 14:47 software drwxr-xr-x 2 hadoop root 4096 Feb 1 00:01 .ssh
<?xml version="1.0"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <!-- /** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ --> <configuration> <property> <name>hbase.rootdir</name> <value>hdfs://master:9000/hbase</value> <description>HBase Data storge directory</description> </property> <property> <name>hbase.cluster.distributed</name> <value>true</value> <description>Assign HBase run mode</description> </property> <property> <name>hbase.master</name> <value>hdfs://master:60000</value> <description>Assign Master position</description> </property> <property> <name>hbase.zookeeper.quorum</name> <value>master,slave1,slave2</value> <description>Assign Zookeeper cluster</description> </property> </configuration>
master slave1 slave2
hdfs dfs -mkdir hdfs://master:9000/hbase
hadoop@master:~$ start-hbase.sh slave1: starting zookeeper, logging to /home/hadoop/hbase-1.2.4/logs/hbase-hadoop-zookeeper-slave1.out slave2: starting zookeeper, logging to /home/hadoop/hbase-1.2.4/logs/hbase-hadoop-zookeeper-slave2.out master: starting zookeeper, logging to /home/hadoop/hbase-1.2.4/logs/hbase-hadoop-zookeeper-master.out starting master, logging to /home/hadoop/hbase-1.2.4/logs/hbase-hadoop-master-master.out master: starting regionserver, logging to /home/hadoop/hbase-1.2.4/logs/hbase-hadoop-regionserver-master.out slave2: starting regionserver, logging to /home/hadoop/hbase-1.2.4/logs/hbase-hadoop-regionserver-slave2.out slave1: starting regionserver, logging to /home/hadoop/hbase-1.2.4/logs/hbase-hadoop-regionserver-slave1.out
hbase shell
hadoop@master:~$ hbase shell SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/home/hadoop/hbase-1.2.4/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/home/hadoop/hadoop-2.7.1/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] HBase Shell; enter 'help<RETURN>' for list of supported commands. Type "exit<RETURN>" to leave the HBase Shell Version 1.2.4, r67592f3d062743907f8c5ae00dbbe1ae4f69e5af, Tue Oct 25 18:10:20 CDT 2016 hbase(main):001:0>
hbase(main):009:0> status 1 active master, 0 backup masters, 3 servers, 0 dead, 0.6667 average load
hbase(main):010:0> list TABLE 0 row(s) in 0.3250 seconds => []
机械节能产品生产企业官网模板...
大气智能家居家具装修装饰类企业通用网站模板...
礼品公司网站模板
宽屏简约大气婚纱摄影影楼模板...
蓝白WAP手机综合医院类整站源码(独立后台)...苏ICP备2024110244号-2 苏公网安备32050702011978号 增值电信业务经营许可证编号:苏B2-20251499 | Copyright 2018 - 2025 源码网商城 (www.ymwmall.com) 版权所有