hbase版本升级的api对比

前言

今天来介绍一下,hbase的2.1.0版本升级之后和1.2.6版本的api方法的一些不同之处。

hbase的工具类

在介绍hbase的相关的java api之前,这里先介绍一下hbase的工具类,这边我打算将这个demo写成工具类形式,具体的方法,后面可能会介绍,但是不可能面面俱到,具体的还是需要移步apache官网。

步骤一

构建maven工程,添加相关的maven依赖如下:

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">

    <modelVersion>4.0.0</modelVersion>

    <groupId>com.linewell</groupId>
    <artifactId>hbase-test</artifactId>
    <packaging>jar</packaging>
    <version>1.0-SNAPSHOT</version>

    <name>A Camel Route</name>

    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
    </properties>

    <dependencyManagement>
        <dependencies>
            <!-- Camel BOM -->
            <dependency>
                <groupId>org.apache.camel</groupId>
                <artifactId>camel-parent</artifactId>
                <version>2.22.1</version>
                <scope>import</scope>
                <type>pom</type>
            </dependency>
        </dependencies>
    </dependencyManagement>

    <dependencies>
        <dependency>
            <groupId>org.apache.camel</groupId>
            <artifactId>camel-core</artifactId>
        </dependency>

        <!-- logging -->
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-api</artifactId>
            <scope>runtime</scope>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-core</artifactId>
            <scope>runtime</scope>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-slf4j-impl</artifactId>
            <scope>runtime</scope>
        </dependency>

        <!-- testing -->
        <dependency>
            <groupId>org.apache.camel</groupId>
            <artifactId>camel-test</artifactId>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>2.7.3</version>
            <exclusions>
                <exclusion>
                    <artifactId>guava</artifactId>
                    <groupId>com.google.guava</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>xz</artifactId>
                    <groupId>org.tukaani</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>commons-compress</artifactId>
                    <groupId>org.apache.commons</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>jackson-core-asl</artifactId>
                    <groupId>org.codehaus.jackson</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>commons-lang</artifactId>
                    <groupId>commons-lang</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>jackson-jaxrs</artifactId>
                    <groupId>org.codehaus.jackson</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>jackson-mapper-asl</artifactId>
                    <groupId>org.codehaus.jackson</groupId>
                </exclusion>
            </exclusions>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-server -->
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-server</artifactId>
            <version>1.2.6</version>
            <exclusions>
                <exclusion>
                    <artifactId>jackson-xc</artifactId>
                    <groupId>org.codehaus.jackson</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>hadoop-annotations</artifactId>
                    <groupId>org.apache.hadoop</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>hadoop-auth</artifactId>
                    <groupId>org.apache.hadoop</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>guava</artifactId>
                    <groupId>com.google.guava</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>hadoop-common</artifactId>
                    <groupId>org.apache.hadoop</groupId>
                </exclusion>
            </exclusions>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-client -->
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-client</artifactId>
            <version>1.2.6</version>
            <exclusions>
                <exclusion>
                    <artifactId>hadoop-common</artifactId>
                    <groupId>org.apache.hadoop</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>guava</artifactId>
                    <groupId>com.google.guava</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>hadoop-auth</artifactId>
                    <groupId>org.apache.hadoop</groupId>
                </exclusion>
                <!--
                <exclusion>
                    <artifactId>hadoop-auth</artifactId>
                    <groupId>org.apache.hadoop</groupId>
                </exclusion>
                -->
            </exclusions>
        </dependency>
        <!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
        <dependency>
            <groupId>com.google.guava</groupId>
            <artifactId>guava</artifactId>
            <version>14.0.1</version>
            <exclusions>
                <exclusion>
                    <artifactId>jsr305</artifactId>
                    <groupId>com.google.code.findbugs</groupId>
                </exclusion>
            </exclusions>
        </dependency>
    </dependencies>

    <build>
        <defaultGoal>install</defaultGoal>

        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.7.0</version>
                <configuration>
                    <source>1.8</source>
                    <target>1.8</target>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-resources-plugin</artifactId>
                <version>3.0.2</version>
                <configuration>
                    <encoding>UTF-8</encoding>
                </configuration>
            </plugin>

            <!-- Allows the example to be run via 'mvn compile exec:java' -->
            <plugin>
                <groupId>org.codehaus.mojo</groupId>
                <artifactId>exec-maven-plugin</artifactId>
                <version>1.6.0</version>
                <configuration>
                    <mainClass>com.linewell.MainApp</mainClass>
                    <includePluginDependencies>false</includePluginDependencies>
                </configuration>
            </plugin>

        </plugins>
    </build>

</project>

步骤二

编写工具类,下面的工具类主要对表的创建和列的添加等等进行了封装,如果有更多的想法,可以在此基础上进行添加:

/**
 * Copyright (C), 2015-2018, hzhiping@linewell.com
 * Title:HbaseUtil
 * Author:hzhiping
 * Date:2018/9/19 10:01
 * Description: 编写HbaseUtil工具类,可执行常规的增删改查操作
 */
package com.linewell.util;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;

public class HbaseUtil {
    private static Configuration configuration = null;
    private static Connection connection = null;
    // 设置集群的连接
    private static final String HBASE_ZOOKEEPER_QUORUM = "master.org.cn,slave01.org.cn,slave03.org.cn";
    // 可以在IP:16010端口所在的web页面查看相关的配置
    private static final String ZOOKEEPER_ZNODE_PARENT = "/hbase";
    private static final String HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT = "2181";

    // 创建hbase连接
    static {
        configuration = HBaseConfiguration.create();
        // 设置hbase连接配置
        configuration.set("hbase.zookeeper.quorum", HBASE_ZOOKEEPER_QUORUM);
        configuration.set("zookeeper.znode.parent", ZOOKEEPER_ZNODE_PARENT);
        configuration.set("hbase.zookeeper.property.clientPort", HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT);
        try {
            connection = ConnectionFactory.createConnection(configuration);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * @title:createTable
     * @description:创建表,表名称以参数的形式传递进来
     * @param:tableName 表名
     * @param:column 创建一个列,因为hbase在创建表的同时至少需要创建一个列
     */
    public static void createTable(String column, String tableName) throws IOException {
        /* 1.2.6
        TableName table = TableName.valueOf(tableName);
        Admin admin = connection.getAdmin();
        // 判断表是否存在
        if (admin.tableExists(table)) {
            admin.disableTable(table);// disable
            admin.deleteTable(table);// delete
        }
        HTableDescriptor hTableDescriptor = new HTableDescriptor(table);
        // 创建一个新的列
        HColumnDescriptor hColumnDescriptor = new HColumnDescriptor(column);
        // 设置列的相关属性
        hColumnDescriptor.setMaxVersions(5);
        hColumnDescriptor.setBlockCacheEnabled(true);
        hColumnDescriptor.setBlocksize(1800000);
        // 添加列
        hTableDescriptor.addFamily(hColumnDescriptor);
        // 表创建
        admin.createTable(hTableDescriptor);
        System.out.println("---------表创建完成---------");
        admin.close();
        */
        TableName table = TableName.valueOf(tableName);
        Admin admin = connection.getAdmin();
        TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(table);
        // 判断表是否存在
        if (admin.tableExists(table)) {
            admin.disableTable(table);// disable
            admin.deleteTable(table);// delete
        }
        // 创建一个新的列族
        ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor columnFamilyDescriptor = new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(Bytes.toBytes(column));
        columnFamilyDescriptor.setMaxVersions(5);
        columnFamilyDescriptor.setBlockCacheEnabled(true);
        columnFamilyDescriptor.setBlocksize(1800000);
        // 将列族添加到表中
        TableDescriptorBuilder tableDescriptorBuilder =
                tableDescriptor.setColumnFamily(columnFamilyDescriptor);
        // 创建表
        admin.createTable(tableDescriptorBuilder.build());
    }

    /**
     * @title:deleteTable
     * @description:将表名以参数的形式传入,然后删除表
     * @param:tablename 表名
     */
    public static void deleteTable(String tablename) throws IOException {
        TableName tableName = TableName.valueOf(tablename);
        Admin admin = connection.getAdmin();
        // 判断表是否存在
        if (admin.tableExists(tableName)) {
            admin.disableTable(tableName);
            admin.deleteTable(tableName);
            System.out.println("---------删除表完成---------");
        }
    }

    /**
     * @title:getTable
     * @description:查询该集群中存在的表名
     * @return:TableName 表名
     */
    public static TableName[] getTable() throws IOException {
        /* 1.2.6和2.1.0版本一致*/
        Admin admin = connection.getAdmin();
        TableName[] listTableName = admin.listTableNames();
        System.out.println("---------查询表---------");
        for (TableName tableName : listTableName) {
            System.out.println(tableName.toString());
        }
        return listTableName;
    }

    /**
     * @title:addColumn
     * @description:往指定表中添加列
     * @param:columns 添加的列名
     * @param:tableName 表名
     */
    public static void addColume(String[] columns, String tableName) throws IOException {
        /* 1.2.6
        Admin admin = connection.getAdmin();
        // 列的大小
        int size = columns.length;
        // 遍历数组索引,添加列
        for (int i = 0; i < size; i++) {
            HColumnDescriptor hColumnDescriptor = new HColumnDescriptor(columns[i]);
            admin.addColumn(TableName.valueOf(tableName), hColumnDescriptor);
            System.out.println("inserted " + columns[i] + "...");
        }
        */
        Admin admin = connection.getAdmin();
        // 列的大小
        int size = columns.length;
        for (int i = 0; i < size; i++) {
            ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor columnFamilyDescriptor = new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(Bytes.toBytes(columns[i]));
            admin.addColumnFamily(TableName.valueOf(tableName), columnFamilyDescriptor);
            System.out.println("inserted " + columns[i] + "...");
        }
    }

    /**
     * @title:insertData
     * @description:从表中插入数据,表名以参数形式传入[此处插入的数据只是模拟数据]
     * @param:tableName 表名
     */
    public static void insertData(String tablename) throws IOException {
        /* 1.2.6
        Admin admin = connection.getAdmin();
        // 获取表
        HTable hTable = new HTable(configuration, TableName.valueOf(tableName));
        // 获取当前时间戳
        Long timeStamp = System.currentTimeMillis();
        // 将时间戳设置为row key的唯一辨识
        Put put = new Put(Bytes.toBytes("row " + timeStamp.toString()));
        put.add(Bytes.toBytes("personal data"), Bytes.toBytes("name"), Bytes.toBytes("raju"));
        put.add(Bytes.toBytes("personal data"), Bytes.toBytes("city"), Bytes.toBytes("quanzhou"));
        put.add(Bytes.toBytes("professional data"), Bytes.toBytes("designation"), Bytes.toBytes("manager"));
        put.add(Bytes.toBytes("professional data"), Bytes.toBytes("salary"), Bytes.toBytes("50000"));
        hTable.put(put);
        System.out.println("inserted data...");
        hTable.close();
        */
        // 获取表
        TableName tableName = TableName.valueOf(tablename);
        Table table = connection.getTable(tableName);
        // 简单测试
        Long timeStamp = System.currentTimeMillis();
        Put put = new Put(Bytes.toBytes("row " + timeStamp.toString()));
        put.addColumn(Bytes.toBytes("personal data"), Bytes.toBytes("name"), Bytes.toBytes("raju"));
        put.addColumn(Bytes.toBytes("personal data"), Bytes.toBytes("city"), Bytes.toBytes("quanzhou"));
        put.addColumn(Bytes.toBytes("professional data"), Bytes.toBytes("designation"), Bytes.toBytes("manager"));
        put.addColumn(Bytes.toBytes("professional data"), Bytes.toBytes("salary"), Bytes.toBytes("50000"));
        table.put(put);
        System.out.println("inserted data...");
    }

    /**
     * @title:uptData
     * @description:更新数据
     * @param:tableName 表名
     */
    public static void uptData(String tableName) throws IOException {
        TableName tablename = TableName.valueOf(tableName);
        Table table = connection.getTable(tablename);
        Put put = new Put(Bytes.toBytes("row 1539179928919"));
        put.addColumn(Bytes.toBytes("personal data"), Bytes.toBytes("name"), Bytes.toBytes("hzhihui"));
        table.put(put);
        System.out.println("updated data...");
    }

    /**
     * @title:delData
     * @description:删除一行或者多行的数据
     * @param:tableName 表名
     * @param:rowKeys 主键
     */
    public static void delData(String tableName, String[] rowKeys) throws IOException {
        TableName tablename = TableName.valueOf(tableName);
        Table table = connection.getTable(tablename);
        List<Delete> deleteList = new ArrayList<Delete>(rowKeys.length);
        Delete delete;
        for(String rowKey:rowKeys){
            delete = new Delete(Bytes.toBytes(rowKey));
            deleteList.add(delete);
        }
        table.delete(deleteList);
        System.out.println("deleted data...");
    }

    /**
     * @title:scanTable
     * @description:查询hbase的表数据
     * @param:tableName
     */
    public static void scanTable(String tableName) throws IOException {
        /*  1.2.6
        Admin admin = connection.getAdmin();
        HTable table = new HTable(configuration, TableName.valueOf(tableName));
        // scan多行输出
        Scan scan = new Scan();
        ResultScanner resultScanner = table.getScanner(scan);
        // 遍历结果集,输出所有的结果
        for (Result result : resultScanner) {
            for (Cell cell : result.rawCells()) {
                System.out.println(Bytes.toString(result.getRow()) + "\t"
                        + Bytes.toString(CellUtil.cloneQualifier(cell)) + "\t"
                        + Bytes.toString(CellUtil.cloneValue(cell)) + "\t"
                        + cell.getTimestamp()
                );
                System.out.println("--------------------华丽的分割线--------------------");
            }
        }
        */
        // 获取表名
        TableName tablename = TableName.valueOf(tableName);
        Table table = connection.getTable(tablename);
        Scan scan = new Scan();
        ResultScanner resultScanner = table.getScanner(scan);
        for (Result result : resultScanner) {
            for (Cell cell : result.rawCells()) {
                System.out.println(Bytes.toString(result.getRow()) + "\t"
                        + Bytes.toString(CellUtil.cloneQualifier(cell)) + "\t"
                        + Bytes.toString(CellUtil.cloneValue(cell)) + "\t"
                        + cell.getTimestamp()
                );
                System.out.println("--------------------华丽的分割线--------------------");
            }
        }
    }
}

HbaseUtil.java/**
 * Copyright (C), 2015-2018, hzhiping@linewell.com
 * Title:HbaseUtil
 * Author:hzhiping
 * Date:2018/9/19 10:01
 * Description: 编写HbaseUtil工具类,可执行常规的增删改查操作
 */
package com.linewell.util;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;

public class HbaseUtil {
    private static Configuration configuration = null;
    private static Connection connection = null;
    // 设置集群的连接
    private static final String HBASE_ZOOKEEPER_QUORUM = "master.org.cn,slave01.org.cn,slave03.org.cn";
    // 可以在IP:16010端口所在的web页面查看相关的配置
    private static final String ZOOKEEPER_ZNODE_PARENT = "/hbase";
    private static final String HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT = "2181";

    // 创建hbase连接
    static {
        configuration = HBaseConfiguration.create();
        // 设置hbase连接配置
        configuration.set("hbase.zookeeper.quorum", HBASE_ZOOKEEPER_QUORUM);
        configuration.set("zookeeper.znode.parent", ZOOKEEPER_ZNODE_PARENT);
        configuration.set("hbase.zookeeper.property.clientPort", HBASE_ZOOKEEPER_PROPERTY_CLIENTPORT);
        try {
            connection = ConnectionFactory.createConnection(configuration);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * @title:createTable
     * @description:创建表,表名称以参数的形式传递进来
     * @param:tableName 表名
     * @param:column 创建一个列,因为hbase在创建表的同时至少需要创建一个列
     */
    public static void createTable(String column, String tableName) throws IOException {
        /* 1.2.6
        TableName table = TableName.valueOf(tableName);
        Admin admin = connection.getAdmin();
        // 判断表是否存在
        if (admin.tableExists(table)) {
            admin.disableTable(table);// disable
            admin.deleteTable(table);// delete
        }
        HTableDescriptor hTableDescriptor = new HTableDescriptor(table);
        // 创建一个新的列
        HColumnDescriptor hColumnDescriptor = new HColumnDescriptor(column);
        // 设置列的相关属性
        hColumnDescriptor.setMaxVersions(5);
        hColumnDescriptor.setBlockCacheEnabled(true);
        hColumnDescriptor.setBlocksize(1800000);
        // 添加列
        hTableDescriptor.addFamily(hColumnDescriptor);
        // 表创建
        admin.createTable(hTableDescriptor);
        System.out.println("---------表创建完成---------");
        admin.close();
        */
        TableName table = TableName.valueOf(tableName);
        Admin admin = connection.getAdmin();
        TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(table);
        // 判断表是否存在
        if (admin.tableExists(table)) {
            admin.disableTable(table);// disable
            admin.deleteTable(table);// delete
        }
        // 创建一个新的列族
        ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor columnFamilyDescriptor = new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(Bytes.toBytes(column));
        columnFamilyDescriptor.setMaxVersions(5);
        columnFamilyDescriptor.setBlockCacheEnabled(true);
        columnFamilyDescriptor.setBlocksize(1800000);
        // 将列族添加到表中
        TableDescriptorBuilder tableDescriptorBuilder =
                tableDescriptor.setColumnFamily(columnFamilyDescriptor);
        // 创建表
        admin.createTable(tableDescriptorBuilder.build());
    }

    /**
     * @title:deleteTable
     * @description:将表名以参数的形式传入,然后删除表
     * @param:tablename 表名
     */
    public static void deleteTable(String tablename) throws IOException {
        TableName tableName = TableName.valueOf(tablename);
        Admin admin = connection.getAdmin();
        // 判断表是否存在
        if (admin.tableExists(tableName)) {
            admin.disableTable(tableName);
            admin.deleteTable(tableName);
            System.out.println("---------删除表完成---------");
        }
    }

    /**
     * @title:getTable
     * @description:查询该集群中存在的表名
     * @return:TableName 表名
     */
    public static TableName[] getTable() throws IOException {
        /* 1.2.6和2.1.0版本一致*/
        Admin admin = connection.getAdmin();
        TableName[] listTableName = admin.listTableNames();
        System.out.println("---------查询表---------");
        for (TableName tableName : listTableName) {
            System.out.println(tableName.toString());
        }
        return listTableName;
    }

    /**
     * @title:addColumn
     * @description:往指定表中添加列
     * @param:columns 添加的列名
     * @param:tableName 表名
     */
    public static void addColume(String[] columns, String tableName) throws IOException {
        /* 1.2.6
        Admin admin = connection.getAdmin();
        // 列的大小
        int size = columns.length;
        // 遍历数组索引,添加列
        for (int i = 0; i < size; i++) {
            HColumnDescriptor hColumnDescriptor = new HColumnDescriptor(columns[i]);
            admin.addColumn(TableName.valueOf(tableName), hColumnDescriptor);
            System.out.println("inserted " + columns[i] + "...");
        }
        */
        Admin admin = connection.getAdmin();
        // 列的大小
        int size = columns.length;
        for (int i = 0; i < size; i++) {
            ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor columnFamilyDescriptor = new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(Bytes.toBytes(columns[i]));
            admin.addColumnFamily(TableName.valueOf(tableName), columnFamilyDescriptor);
            System.out.println("inserted " + columns[i] + "...");
        }
    }

    /**
     * @title:insertData
     * @description:从表中插入数据,表名以参数形式传入[此处插入的数据只是模拟数据]
     * @param:tableName 表名
     */
    public static void insertData(String tablename) throws IOException {
        /* 1.2.6
        Admin admin = connection.getAdmin();
        // 获取表
        HTable hTable = new HTable(configuration, TableName.valueOf(tableName));
        // 获取当前时间戳
        Long timeStamp = System.currentTimeMillis();
        // 将时间戳设置为row key的唯一辨识
        Put put = new Put(Bytes.toBytes("row " + timeStamp.toString()));
        put.add(Bytes.toBytes("personal data"), Bytes.toBytes("name"), Bytes.toBytes("raju"));
        put.add(Bytes.toBytes("personal data"), Bytes.toBytes("city"), Bytes.toBytes("quanzhou"));
        put.add(Bytes.toBytes("professional data"), Bytes.toBytes("designation"), Bytes.toBytes("manager"));
        put.add(Bytes.toBytes("professional data"), Bytes.toBytes("salary"), Bytes.toBytes("50000"));
        hTable.put(put);
        System.out.println("inserted data...");
        hTable.close();
        */
        // 获取表
        TableName tableName = TableName.valueOf(tablename);
        Table table = connection.getTable(tableName);
        // 简单测试
        Long timeStamp = System.currentTimeMillis();
        Put put = new Put(Bytes.toBytes("row " + timeStamp.toString()));
        put.addColumn(Bytes.toBytes("personal data"), Bytes.toBytes("name"), Bytes.toBytes("raju"));
        put.addColumn(Bytes.toBytes("personal data"), Bytes.toBytes("city"), Bytes.toBytes("quanzhou"));
        put.addColumn(Bytes.toBytes("professional data"), Bytes.toBytes("designation"), Bytes.toBytes("manager"));
        put.addColumn(Bytes.toBytes("professional data"), Bytes.toBytes("salary"), Bytes.toBytes("50000"));
        table.put(put);
        System.out.println("inserted data...");
    }

    /**
     * @title:uptData
     * @description:更新数据
     * @param:tableName 表名
     */
    public static void uptData(String tableName) throws IOException {
        TableName tablename = TableName.valueOf(tableName);
        Table table = connection.getTable(tablename);
        Put put = new Put(Bytes.toBytes("row 1539179928919"));
        put.addColumn(Bytes.toBytes("personal data"), Bytes.toBytes("name"), Bytes.toBytes("hzhihui"));
        table.put(put);
        System.out.println("updated data...");
    }

    /**
     * @title:delData
     * @description:删除一行或者多行的数据
     * @param:tableName 表名
     * @param:rowKeys 主键
     */
    public static void delData(String tableName, String[] rowKeys) throws IOException {
        TableName tablename = TableName.valueOf(tableName);
        Table table = connection.getTable(tablename);
        List<Delete> deleteList = new ArrayList<Delete>(rowKeys.length);
        Delete delete;
        for(String rowKey:rowKeys){
            delete = new Delete(Bytes.toBytes(rowKey));
            deleteList.add(delete);
        }
        table.delete(deleteList);
        System.out.println("deleted data...");
    }

    /**
     * @title:scanTable
     * @description:查询hbase的表数据
     * @param:tableName
     */
    public static void scanTable(String tableName) throws IOException {
        /*  1.2.6
        Admin admin = connection.getAdmin();
        HTable table = new HTable(configuration, TableName.valueOf(tableName));
        // scan多行输出
        Scan scan = new Scan();
        ResultScanner resultScanner = table.getScanner(scan);
        // 遍历结果集,输出所有的结果
        for (Result result : resultScanner) {
            for (Cell cell : result.rawCells()) {
                System.out.println(Bytes.toString(result.getRow()) + "\t"
                        + Bytes.toString(CellUtil.cloneQualifier(cell)) + "\t"
                        + Bytes.toString(CellUtil.cloneValue(cell)) + "\t"
                        + cell.getTimestamp()
                );
                System.out.println("--------------------华丽的分割线--------------------");
            }
        }
        */
        // 获取表名
        TableName tablename = TableName.valueOf(tableName);
        Table table = connection.getTable(tablename);
        Scan scan = new Scan();
        ResultScanner resultScanner = table.getScanner(scan);
        for (Result result : resultScanner) {
            for (Cell cell : result.rawCells()) {
                System.out.println(Bytes.toString(result.getRow()) + "\t"
                        + Bytes.toString(CellUtil.cloneQualifier(cell)) + "\t"
                        + Bytes.toString(CellUtil.cloneValue(cell)) + "\t"
                        + cell.getTimestamp()
                );
                System.out.println("--------------------华丽的分割线--------------------");
            }
        }
    }
}

当然了,后续笔者会将demo代码链接上来。

源码链接

本例子代码:仓库的hbase01

坚壁清野

上一篇:转:linux coredump调试


下一篇:sdut 2416:Fruit Ninja II(第三届山东省省赛原题,数学题)