中文字幕av专区_日韩电影在线播放_精品国产精品久久一区免费式_av在线免费观看网站

溫馨提示×

溫馨提示×

您好,登錄后才能下訂單哦!

密碼登錄×
登錄注冊×
其他方式登錄
點擊 登錄注冊 即表示同意《億速云用戶服務條款》

hadoop 2.X HA詳細配置

發布時間:2020-04-23 11:36:47 來源:網絡 閱讀:1931 作者:jethai 欄目:大數據


hadoop-daemon.sh與hadoop-daemons.sh區別

hadoop-daemon.sh只能本地執行

hadoop-daemons.sh能遠程執行


1. 啟動JN

hadoop-daemons.sh start journalnode


hdfs namenode -initializeSharedEdits //復制edits log文件到journalnode節點上,第一次創建得在格式化namenode之后使用


http://hadoop-yarn1:8480來看journal是否正常


2.格式化namenode,并啟動Active Namenode

一、Active NameNode節點上格式化namenode

hdfs namenode -format
hdfs namenode -initializeSharedEdits

初始化journalnode完畢


二、啟動Active Namenode

hadoop-daemon.sh start namenode


3.啟動 Standby namenode


一、Standby namenode節點上格式化Standby節點

復制Active Namenode上的元數據信息拷貝到Standby Namenode節點上

hdfs namenode -bootstrapStandby

二、啟動Standby節點

hadoop-daemon.sh start namenode


4.啟動Automatic Failover

在zookeeper上創建 /hadoop-ha/ns1這樣一個監控節點(ZNode)

hdfs zkfc -formatZK
start-dfs.sh

5.查看namenode狀態

hdfs  haadmin -getServiceState nn1
active

6.自動failover

hdfs  haadmin -failover nn1 nn2


配置文件詳細信息

core-site.xml

<configuration>
    <property>
        <name>fs.defaultFS</name>
        <value>hdfs://ns1</value>
    </property>
    
    <property>
        <name>hadoop.tmp.dir</name>
        <value>/opt/modules/hadoop-2.2.0/data/tmp</value>
    </property>
    
    <property>
        <name>fs.trash.interval</name>
        <value>60*24</value>
    </property>
    
    <property>
        <name>ha.zookeeper.quorum</name>
        <value>hadoop-yarn1:2181,hadoop-yarn2:2181,hadoop-yarn3:2181</value>
    </property>
    
    <property>  
        <name>hadoop.http.staticuser.user</name>
        <value>yuanhai</value>
    </property>
</configuration>

hdfs-site.xml

<configuration>
    <property>
        <name>dfs.replication</name>
        <value>3</value>
    </property>
    
    <property>
        <name>dfs.nameservices</name>
        <value>ns1</value>
    </property>
    
    <property>
        <name>dfs.ha.namenodes.ns1</name>
        <value>nn1,nn2</value>
        </property>
        
    <property>
        <name>dfs.namenode.rpc-address.ns1.nn1</name>
        <value>hadoop-yarn1:8020</value>
    </property>
    
        <property>
        <name>dfs.namenode.rpc-address.ns1.nn2</name>
        <value>hadoop-yarn2:8020</value>
    </property>
    
    <property>
        <name>dfs.namenode.http-address.ns1.nn1</name>
        <value>hadoop-yarn1:50070</value>
    </property>
    
    <property>
        <name>dfs.namenode.http-address.ns1.nn2</name>
        <value>hadoop-yarn2:50070</value>
    </property>
    
    <property>
        <name>dfs.namenode.shared.edits.dir</name>
        <value>qjournal://hadoop-yarn1:8485;hadoop-yarn2:8485;hadoop-yarn3:8485/ns1</value>
    </property>
    
    <property>
        <name>dfs.journalnode.edits.dir</name>
        <value>/opt/modules/hadoop-2.2.0/data/tmp/journal</value>
    </property>
    
     <property>
        <name>dfs.ha.automatic-failover.enabled</name>
        <value>true</value>
    </property>
    
    <property>
        <name>dfs.client.failover.proxy.provider.ns1</name>
        <value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
    </property>
    
    <property>
        <name>dfs.ha.fencing.methods</name>
        <value>sshfence</value>
    </property>
    
    <property>
        <name>dfs.ha.fencing.ssh.private-key-files</name>
        <value>/home/hadoop/.ssh/id_rsa</value>
    </property>
    
    <property>
        <name>dfs.permissions.enabled</name>
        <value>false</value>
    </property>
    

<!--     <property>
        <name>dfs.namenode.http-address</name>
        <value>hadoop-yarn.dragon.org:50070</value>
    </property>

    <property>
        <name>dfs.namenode.secondary.http-address</name>
        <value>hadoop-yarn.dragon.org:50090</value>
    </property>
    
    <property>
        <name>dfs.namenode.name.dir</name>
        <value>file://${hadoop.tmp.dir}/dfs/name</value>
    </property>
    
    <property>
        <name>dfs.namenode.edits.dir</name>
        <value>${dfs.namenode.name.dir}</value>
    </property>
    
    <property>
        <name>dfs.datanode.data.dir</name>
        <value>file://${hadoop.tmp.dir}/dfs/data</value>
    </property>
    
    <property>
        <name>dfs.namenode.checkpoint.dir</name>
        <value>file://${hadoop.tmp.dir}/dfs/namesecondary</value>
    </property>
    
    <property>
        <name>dfs.namenode.checkpoint.edits.dir</name>
        <value>${dfs.namenode.checkpoint.dir}</value>
    </property>
-->    
</configuration>


slaves

hadoop-yarn1
hadoop-yarn2
hadoop-yarn3

yarn-site.xml

<configuration>
    <property>
        <name>yarn.nodemanager.aux-services</name>
        <value>mapreduce_shuffle</value>
    </property>
    
    <property>
        <name>yarn.resourcemanager.hostname</name>
        <value>hadoop-yarn1</value>
    </property> 
    
    <property>
        <name>yarn.log-aggregation-enable</name>
        <value>true</value>
    </property>

    <property>
        <name>yarn.log-aggregation.retain-seconds</name>
        <value>604800</value>
    </property> 

</configuration>

mapred-site.xml

<configuration>
    <property>
        <name>mapreduce.framework.name</name>
        <value>yarn</value>
    </property>

    <property>
        <name>mapreduce.jobhistory.address</name>
        <value>hadoop-yarn1:10020</value>
        <description>MapReduce JobHistory Server IPC host:port</description>
    </property>

    <property>
        <name>mapreduce.jobhistory.webapp.address</name>
        <value>hadoop-yarn1:19888</value>
        <description>MapReduce JobHistory Server Web UI host:port</description>
    </property>
    
    <property>
        <name>mapreduce.job.ubertask.enable</name>
        <value>true</value>
    </property>
    
</configuration>


hadoop-env.sh

export JAVA_HOME=/opt/modules/jdk1.6.0_24



其他相關文章:

http://blog.csdn.net/zhangzhaokun/article/details/17892857


向AI問一下細節

免責聲明:本站發布的內容(圖片、視頻和文字)以原創、轉載和分享為主,文章觀點不代表本網站立場,如果涉及侵權請聯系站長郵箱:is@yisu.com進行舉報,并提供相關證據,一經查實,將立刻刪除涉嫌侵權內容。

AI

兰溪市| 建宁县| 开封市| 泾阳县| 凤凰县| 怀仁县| 海淀区| 岑巩县| 微山县| 古浪县| 紫金县| 顺义区| 武城县| 旬阳县| 泰兴市| 建水县| 邓州市| 洛川县| 金寨县| 潮州市| 华阴市| 乐亭县| 根河市| 武冈市| 芦山县| 昭平县| 海南省| 南充市| 雅江县| 濮阳县| 松阳县| 富裕县| 大石桥市| 博野县| 交口县| 永清县| 家居| 长寿区| 永春县| 哈尔滨市| 沿河|