hadoop-hbase-spark单机版安装
0?需要開放的外網端口
50070,8088,60010?,7077
1?設置ssh免密碼登錄
ssh-keygen?-t?dsa?-P?''?-f?~/.ssh/id_dsa
cat?~/.ssh/id_dsa.pub?>>?~/.ssh/authorized_keys
chmod?0600?~/.ssh/authorized_keys
2?解壓安裝包
tar?-zxvf?/usr/jxx/scala-2.10.4.tgz?-C?/usr/local/
tar?-zxvf?/usr/jxx/spark-1.5.2-bin-hadoop2.6.tgz?-C?/usr/local/
tar?-zxvf?/usr/jxx/hbase-1.0.3-bin.tar.gz?-C?/usr/local/
tar?-zxvf?/usr/jxx/hadoop-2.6.0-x64.tar.gz?-C?/usr/local/
3?設置環境變量
vim?/etc/profile
添加
export?JAVA_HOME=/usr/local/java/jdk1.7.0_79#jdk如果已有就不用添加
export?PATH=$PATH:$JAVA_HOME/bin:$JAVA_HOME/jre/bin
export?CLASSPATH=.:$JAVA_HOME/lib:$JAVA_HOME/jre/lib:$CLASSPATH
export?SCALA_HOME=/usr/local/scala-2.10.4
export?PATH=$PATH:$SCALA_HOME/bin
export?HADOOP_HOME=/usr/local/hadoop-2.6.0
export?PATH=$PATH:$HADOOP_HOME/bin
export?HBASE_HOME=/usr/local/hbase-1.0.3
export?PATH=$PATH:$HBASE_HOME/bin
export?SPARK_HOME=/usr/local/spark-1.5.2-bin-hadoop2.6
export?PATH=$PATH:$SPARK_HOME/bin
然后執行
source?/etc/profile?
或者重啟機器
4?修改配置
vim?/usr/local/hadoop-2.6.0/etc/hadoop/hadoop-env.sh
修改export?JAVA_HOME=/usr/local/java/jdk1.7.0_79
?
vim?/usr/local/hadoop-2.6.0/etc/hadoop/core-site.xml
core-site.xml
<property>
<name>fs.defaultFS</name>
<value>hdfs://B250:9000</value>
</property>
?
vim?/usr/local/hadoop-2.6.0/etc/hadoop/hdfs-site.xml
hdfs-site.xml
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:///disk/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:///disk/dfs/data</value>
</property>
?
vim?/usr/local/hadoop-2.6.0/etc/hadoop/yarn-site.xml
yarn-site.xml
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
?
mv?/usr/local/hadoop-2.6.0/etc/hadoop/mapred-site.xml.template?/usr/local/hadoop-2.6.0/etc/hadoop/mapred-site.xml
vim?/usr/local/hadoop-2.6.0/etc/hadoop/mapred-site.xml
mapred-site.xml
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
?
vim?/usr/local/hbase-1.0.3/conf/hbase-site.xml
hbase-site.xml
<property>
<name>hbase.rootdir</name>
<!--?對應于hdfs中配置?micmiu.com?-->
<value>hdfs://localhost:9000/hbase</value>
</property>
<property>
<name>hbase.cluster.distributed</name>
<value>true</value>
</property>
?
vim?/usr/local/hbase-1.0.3/conf/hbase-env.sh
export?JAVA_HOME=/usr/local/java/jdk1.7.0_79
export?PATH=$PATH:$JAVA_HOME/bin:$JAVA_HOME/jre/bin
export?CLASSPATH=.:$JAVA_HOME/lib:$JAVA_HOME/jre/lib:$CLASSPATH
export?SCALA_HOME=/usr/local/scala-2.10.4
export?PATH=$PATH:$SCALA_HOME/bin
export?HADOOP_HOME=/usr/local/hadoop-2.6.0
export?PATH=$PATH:$HADOOP_HOME/bin
export?HBASE_HOME=/usr/local/hbase-1.0.3
export?PATH=$PATH:$HBASE_HOME/bin
export?HBASE_MANAGES_ZK=true
?
mv?/usr/local/spark-1.5.2-bin-hadoop2.6/conf/spark-env.sh.template?/usr/local/spark-1.5.2-bin-hadoop2.6/conf/spark-env.sh
mv?/usr/local/spark-1.5.2-bin-hadoop2.6/conf/spark-defaults.conf.template?/usr/local/spark-1.5.2-bin-hadoop2.6/conf/spark-defaults.conf
mkdir?/disk/spark
vim?/usr/local/spark-1.5.2-bin-hadoop2.6/conf/spark-env.sh
export?JAVA_HOME=/usr/local/java/jdk1.7.0_79
export?SCALA_HOME=/usr/local/scala-2.10.4
export?HADOOP_HOME=/usr/local/hadoop-2.6.0
export?HBASE_HOME=/usr/local/hbase-1.0.3
export?SPARK_HOME=/usr/local/spark-1.5.2-bin-hadoop2.6
export?HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export?SPARK_LOCAL_DIRS=/disk/spark
export?SPARK_DAEMON_MEMORY=256m
export?SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS?-Dspark.history.fs.logDirectory=/tmp/spark?-Dspark.history.ui.port=18082"
export?STANDALONE_SPARK_MASTER_HOST=localhost
?
vim?/usr/local/spark-1.5.2-bin-hadoop2.6/conf/spark-defaults.conf
spark.master=spark://localhost:7077
spark.eventLog.dir=/disk/spark/applicationHistory
spark.eventLog.enabled=true
spark.yarn.historyServer.address=localhost:18082
5?初始化環境
格式化namenode
hdfs?namenode?-format
6?啟動服務
//啟動hdfs
sh?/usr/local/hadoop-2.6.0/sbin/start-dfs.sh
//啟動hbase
sh?/usr/local/hbase-1.0.3/bin/start-hbase.sh
//啟動spark
sh?/usr/local/spark-1.5.2-bin-hadoop2.6/sbin/start-all.sh
7?設置開機啟動
su?-?root?-c?"sh?/usr/local/hadoop-2.6.0/sbin/start-dfs.sh"
?
su?-?root?-c?"sh?/usr/local/hbase-1.0.3/bin/start-hbase.sh"
?
su?-?root?-c?"sh?/usr/local/spark-1.5.2-bin-hadoop2.6/sbin/start-all.sh"
轉載于:https://www.cnblogs.com/jixiangxiang/p/5520851.html
總結
以上是生活随笔為你收集整理的hadoop-hbase-spark单机版安装的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 梦到自己怀孕打胎是什么意思
- 下一篇: 做梦梦到粪便是什么意思呢