Hadoop单节点快速部署
sudo apt-get update sudo apt-get install openjdk-7-jdk java -version cd /usr/lib/jvm ln -s java-7-openjdk-amd64 jdk sudo addgroup hadoop_group sudo adduser --ingroup hadoop_group hduser1 sudo adduser hduser1 sudo su - hduser1 ssh-keygen -t rsa -P "" cat $HOME/.ssh/id_rsa.pub >> $HOME/.ssh/authorized_keys ssh localhost su - hduser1 # Set Hadoop-related environment variables export HADOOP_HOME=/usr/local/hadoop # Add Hadoop bin/ directory to PATH export PATH= $PATH:$HADOOP_HOME/bin wget http://ftp.yz.yamagata-u.ac.jp/pub/network/apache/hadoop/common/current/hadoop-2.7.0.tar.gz tar -zxvf hadoop-2.7.0.tar.gz sudo mv hadoop-2.7.0 /usr/local/hadoop vi ~/.bashrc 增加 #Hadoop variables export JAVA_HOME=/usr/lib/jvm/jdk/ export HADOOP_INSTALL=/usr/local/hadoop export PATH=$PATH:$HADOOP_INSTALL/bin export PATH=$PATH:$HADOOP_INSTALL/sbin export HADOOP_MAPRED_HOME=$HADOOP_INSTALL export HADOOP_COMMON_HOME=$HADOOP_INSTALL export HADOOP_HDFS_HOME=$HADOOP_INSTALL export YARN_HOME=$HADOOP_INSTALL ###end of paste vi /usr/local/hadoop/etc/hadoop/hadoop-env.sh 将JAVA_HOME 这一行修改为 export JAVA_HOME=/usr/lib/jvm/jdk vi /usr/local/hadoop/etc/hadoop/core-site.xml 在 之间加入 修改为 fs.default.name hdfs://localhost:9000 vi /usr/local/hadoop/etc/hadoop/yarn-site.xml 修改为 yarn.nodemanager.aux-services mapreduce_shuffle yarn.nodemanager.aux-services.mapreduce.shuffle.class org.apache.hadoop.mapred.ShuffleHandler cp /usr/local/hadoop/etc/hadoop/mapred-site.xml.template /usr/local/hadoop/etc/hadoop/mapred-site.xml vi /usr/local/hadoop/etc/hadoop/mapred-site.xml 修改为 mapreduce.framework.name yarn sudo mkdir -p /usr/local/hadoop_store/hdfs/namenode sudo mkdir -p /usr/local/hadoop_store/hdfs/datanode sudo chown hduser1 /usr/local/hadoop_store/hdfs/namenode sudo chown hduser1 /usr/local/hadoop_store/hdfs/datanode vi /usr/local/hadoop/etc/hadoop/hdfs-site.xml 修改为 dfs.replication 1 dfs.namenode.name.dir file:/usr/local/hadoop_store/hdfs/namenode dfs.datanode.data.dir file:/usr/local/hadoop_store/hdfs/datanode sudo chown hduser1:hadoop_group -R /usr/local/hadoop_store sudo chmod 777 -R /usr/local/hadoop_store cd /usr/local/hadoop/ hdfs namenode -format cd /usr/local/hadoop/ start-all.sh jps 10477 SecondaryNameNode 10757 NodeManager 10974 Jps 10113 NameNode 10623 ResourceManager 10251 DataNode sudo apt-get update sudo apt-get install openjdk-7-jdk java -version cd /usr/lib/jvm ln -s java-7-openjdk-amd64 jdk sudo addgroup hadoop_group sudo adduser --ingroup hadoop_group hduser1 sudo adduser hduser1 sudo su - hduser1 ssh-keygen -t rsa -P "" cat $HOME/.ssh/id_rsa.pub >> $HOME/.ssh/authorized_keys ssh localhost su - hduser1 # Set Hadoop-related environment variables export HADOOP_HOME=/usr/local/hadoop # Add Hadoop bin/ directory to PATH export PATH= $PATH:$HADOOP_HOME/bin wget http://ftp.yz.yamagata-u.ac.jp/pub/network/apache/hadoop/common/current/hadoop-2.7.0.tar.gz tar -zxvf hadoop-2.7.0.tar.gz sudo mv hadoop-2.7.0 /usr/local/hadoop vi ~/.bashrc 增加 #Hadoop variables export JAVA_HOME=/usr/lib/jvm/jdk/ export HADOOP_INSTALL=/usr/local/hadoop export PATH=$PATH:$HADOOP_INSTALL/bin export PATH=$PATH:$HADOOP_INSTALL/sbin export HADOOP_MAPRED_HOME=$HADOOP_INSTALL export HADOOP_COMMON_HOME=$HADOOP_INSTALL export HADOOP_HDFS_HOME=$HADOOP_INSTALL export YARN_HOME=$HADOOP_INSTALL ###end of paste vi /usr/local/hadoop/etc/hadoop/hadoop-env.sh 将JAVA_HOME 这一行修改为 export JAVA_HOME=/usr/lib/jvm/jdk vi /usr/local/hadoop/etc/hadoop/core-site.xml 在<configuration> </configuration>之间加入 修改为 <configuration> <property> <name>fs.default.name</name> <value>hdfs://localhost:9000</value> </property> </configuration> vi /usr/local/hadoop/etc/hadoop/yarn-site.xml 修改为 <configuration> <property> <name>yarn.nodemanager.aux-services</name> <value>mapreduce_shuffle</value> </property> <property> <name>yarn.nodemanager.aux-services.mapreduce.shuffle.class</name> <value>org.apache.hadoop.mapred.ShuffleHandler</value> </property> </configuration> cp /usr/local/hadoop/etc/hadoop/mapred-site.xml.template /usr/local/hadoop/etc/hadoop/mapred-site.xml vi /usr/local/hadoop/etc/hadoop/mapred-site.xml 修改为 <configuration> <property> <name>mapreduce.framework.name</name> <value>yarn</value> </property> </configuration> sudo mkdir -p /usr/local/hadoop_store/hdfs/namenode sudo mkdir -p /usr/local/hadoop_store/hdfs/datanode sudo chown hduser1 /usr/local/hadoop_store/hdfs/namenode sudo chown hduser1 /usr/local/hadoop_store/hdfs/datanode vi /usr/local/hadoop/etc/hadoop/hdfs-site.xml 修改为 <configuration> <property> <name>dfs.replication</name> <value>1</value> </property> <property> <name>dfs.namenode.name.dir</name> <value>file:/usr/local/hadoop_store/hdfs/namenode</value> </property> <property> <name>dfs.datanode.data.dir</name> <value>file:/usr/local/hadoop_store/hdfs/datanode</value> </property> </configuration> sudo chown hduser1:hadoop_group -R /usr/local/hadoop_store sudo chmod 777 -R /usr/local/hadoop_store cd /usr/local/hadoop/ hdfs namenode -format cd /usr/local/hadoop/ start-all.sh jps 10477 SecondaryNameNode 10757 NodeManager 10974 Jps 10113 NameNode 10623 ResourceManager 10251 DataNode
Leave a Reply