博客
关于我
强烈建议你试试无所不能的chatGPT,快点击我
hadoop ha 读取 activce状态的活动节点
阅读量:4562 次
发布时间:2019-06-08

本文共 5840 字,大约阅读时间需要 19 分钟。

 

 方式一

package com.xxx.hadoop;import com.sun.demo.jvmti.hprof.Tracker;import com.sun.xml.bind.util.Which;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.hdfs.HAUtil;import java.io.BufferedReader;import java.io.IOException;import java.io.InputStreamReader;import java.lang.reflect.WildcardType;import java.net.InetAddress;import java.net.InetSocketAddress;import java.util.ArrayList;import java.util.List;/** * @Desription: * @CreatedDate: 2018/1/5 10:01 * @Author:wangel.lee */public class TestNodeActive {    public static void main(String[] args) throws IOException {        getActiveNameNode();    }    /**     * 判断hadoop ha的active节点是否存活     * @throws IOException     */    public static void getActiveNameNode() throws IOException {        String[] mapredJopTrackers = {"hdfs://10.1.1.12:8030","hdfs://10.1.1.11:8030"};        String[] fsDefaultNames = {"hdfs://10.1.1.12:8020","hdfs://10.1.1.11:8020"};        String hivePath = "";        for (int i = 0 ;i <2 ; i++) {            Configuration conf = createConfigurationDefault(mapredJopTrackers[i],fsDefaultNames[i]);            // 远程会出现问题 Windows测试需要如下配置            List
data = new ArrayList
(); FileSystem fileSystem = FileSystem.get(conf); InetSocketAddress active = null; try { active = HAUtil.getAddressOfActive(fileSystem); } catch ( Exception e ){ System.out.println("hostname:"+mapredJopTrackers[i]+" is not active."); } if( active == null ) continue; InetAddress address = active.getAddress(); hivePath = "hdfs://" + address.getHostAddress() + ":" + active.getPort(); break; } System.out.println(hivePath); } private static Configuration createConfigurationDefault(String mapredJopTracker, String fsDefaultName) throws IOException { Configuration conf = new Configuration(); // 远程会出现问题 Windows测试需要如下配置 conf.set("mapred.jop.tracker", mapredJopTracker); conf.set("fs.default.name", fsDefaultName); System.setProperty("hadoop.home.dir", "E:\\ProgramFiles\\java\\hadoop\\hadoop-common-2.2.0-bin-master"); return conf; }}

 

方式二

package com.yanheap.hadoop;import com.alibaba.druid.util.StringUtils;import org.apache.commons.io.FileUtils;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.hdfs.HAUtil;import java.io.BufferedReader;import java.io.IOException;import java.io.InputStreamReader;import java.net.InetAddress;import java.net.InetSocketAddress;import java.net.URI;import java.util.ArrayList;import java.util.List;/** * @Desription: * @CreatedDate: 2018/1/5 14:48 * @Author:wangel.lee */public class ZookeeperActive {    private static String clusterName = "nameservice1";    private static final String HADOOP_URL = "hdfs://"+clusterName;    private static Configuration conf;    static{        conf = new Configuration();        conf = new Configuration();        conf.set("fs.defaultFS", HADOOP_URL);        conf.set("dfs.nameservices", clusterName);        conf.set("dfs.ha.namenodes."+clusterName, "nn1,nn2");        // 高可用的配置:当其中一个变成standy时,打印异常,并自动切换到另一个namedata去取数据        conf.set("dfs.namenode.rpc-address."+clusterName+".nn1", "10.1.1.12:8020");        conf.set("dfs.namenode.rpc-address."+clusterName+".nn2", "10.1.1.11:8020");        //conf.setBoolean(name, value);        conf.set("dfs.client.failover.proxy.provider."+clusterName,                "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");    }    /**     * 读取ha的active节点     * @throws IOException     */    private static void getActive() throws IOException {        FileSystem fileSystem = FileSystem.get(URI.create(HADOOP_URL), conf);        long l1 = System.currentTimeMillis();        InetSocketAddress active = null;        //try {
active = HAUtil.getAddressOfActive(fileSystem); //} catch ( Exception e ){ //e.printStackTrace(); //} InetAddress address = active.getAddress(); String hivePath = "hdfs://" + address.getHostAddress() + ":" + active.getPort(); System.out.println("-------------------------------------->"+hivePath); System.out.println("-------------------------------------->"+(System.currentTimeMillis()-l1)); } /** * 读取hdfs中的文件 * @throws IOException */ private static void readFile() throws IOException { FileSystem fileSystem = FileSystem.get(URI.create(HADOOP_URL), conf); FSDataInputStream in = null; BufferedReader br = null; try { fileSystem = FileSystem.get(conf); Path srcPath = new Path("/tmp/media_info.csv"); in = fileSystem.open(srcPath); br = new BufferedReader(new InputStreamReader(in)); String line = null; while (null != (line = br.readLine())) { if (!StringUtils.isEmpty(line)) { System.out.println("-------->:"+line); } } } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (Exception e) { throw e; } finally { br.close(); fileSystem.close(); } } public static void main(String[] args)throws IOException { //getActive(); readFile(); }}

 

 

 

另,本地windows执行hadoop需要环境变量的支持,如下提供hadoop命令下载包,下载后直接添加环境变量对他的引用

链接:https://pan.baidu.com/s/1eRIHmdO 密码:ly38

 

转载于:https://www.cnblogs.com/hwaggLee/p/8204121.html

你可能感兴趣的文章
CSS3动画库animate.css
查看>>
Type类型详解
查看>>
C#面向对象(一)
查看>>
Excel 求差集和并集
查看>>
设计抗住千万级流量的架构思路
查看>>
Service生命周期
查看>>
malloc 内存分配
查看>>
概率论
查看>>
实验四
查看>>
python,ModuleNotFoundError,is not a package
查看>>
mybatis 空字符串和0
查看>>
服务器上centos 7 配置静态IP
查看>>
C# unsafe模式内存操作深入探索
查看>>
Redis拾遗(一)
查看>>
js字符串转换为Json对象的三种写法
查看>>
Is it possible to display icons in a PopupMenu?
查看>>
Atitit.常见的4gl 第四代编程语言 与 dsl
查看>>
Atitit js es5 es6新特性 attilax总结
查看>>
JavaWeb学习记录(三)——网页中文编码问题
查看>>
$( document ).ready()&$(window).load()
查看>>