Jave Hbase API

Time:2021-12-8

Correspondence between HBase API class and data model

 

 

 

HBaseAdmin

 

Class: org.apache.hadoop.hbase.client.hbaseadmin

Function: it provides an interface to manage the table information of HBase database. The methods it provides include: creating tables and deleting tables   In addition to tables, list table items, make tables valid or invalid, and add or delete table family members.

 

 

 

 

HBaseConfiguration

 

Class: org.apache.hadoop.hbase.hbaseconfiguration

Function: configure HBase

 

 

 

HTableDescriptor

 

Class: org.apache.hadoop.hbase.htabledescriptor

Function: contains the name of the table and its corresponding column family

 

 

 

 

 

HColumnDescriptor

 

 

Class: org.apache.hadoop.hbase.hcolumndescriptor

Function: maintains information about column families, such as version number, compression settings, etc. It is usually used to create or add tables   Used when adding families. After the column family is created, it cannot be modified directly. It can only be deleted and recreated. When the column family is deleted, the data in the column family will be deleted at the same time.

 

 

 

 

 

 

HTable

 

 

 

 

 

 

 

 

Put

 

Class: org.apache.hadoop.hbase.client.put

Function: used to add a single row

 

 

 

 

 

 

Get

 

 

Class: org.apache.hadoop.hbase.client.get

Function: used to obtain the relevant information of a single row

 

 

 

 

Result

Class: org.apache.hadoop.hbase.client.result

Function: store the single row value of the table after get or scan operation. Using the methods provided by this class, you can get the value directly   Or various map structures (key value pairs)

 

 

Create table

 

1 package com.shujia;
 2 
 3 import org.apache.hadoop.conf.Configuration;
 4 import org.apache.hadoop.hbase.HBaseConfiguration;
 5 import org.apache.hadoop.hbase.HColumnDescriptor;
 6 import org.apache.hadoop.hbase.HTableDescriptor;
 7 import org.apache.hadoop.hbase.TableName;
 8 import org.apache.hadoop.hbase.client.Admin;
 9 import org.apache.hadoop.hbase.client.Connection;
10 import org.apache.hadoop.hbase.client.ConnectionFactory;
11 
12 import java.io.IOException;
13 
14 public class Demo01 {
15     public static void main(String[] args) throws IOException {
16 
17 // create a configuration and specify ZK cluster
18         Configuration conf = HBaseConfiguration.create();
19         conf.set("hbase.zookeeper.quorum","master,node1,node2");
20 
21 // create a connection
22         Connection coon = ConnectionFactory.createConnection(conf);
23 
24 // create admin object
25         Admin admin = coon.getAdmin();
26 
27 // create table
28         HTableDescriptor test_api = new HTableDescriptor(TableName.valueOf("test_api"));
29 
30 // create column cluster
31         HColumnDescriptor cf1 = new HColumnDescriptor("cf1");
32 
33 // configure column clusters
34         cf1.setTimeToLive(20); // Set the death time to 20s
35         cf1.setMaxVersions(3); // Set version
36 
37 // add column cluster
38         test_api.addFamily(cf1);
39 
40 // create table
41         admin.createTable(test_api);
42 
43 // close the connection
44         coon.close();
45     }
46 }

 

 

1 package com.shujia;
  2 
  3 import org.apache.hadoop.conf.Configuration;
  4 import org.apache.hadoop.hbase.HBaseConfiguration;
  5 import org.apache.hadoop.hbase.HColumnDescriptor;
  6 import org.apache.hadoop.hbase.HTableDescriptor;
  7 import org.apache.hadoop.hbase.TableName;
  8 import org.apache.hadoop.hbase.client.*;
  9 import org.apache.hadoop.hbase.util.Addressing;
 10 import org.apache.hadoop.hbase.util.Bytes;
 11 import org.junit.After;
 12 import org.junit.Before;
 13 import org.junit.Test;
 14 
 15 import javax.swing.tree.VariableHeightLayoutCache;
 16 import java.io.BufferedReader;
 17 import java.io.FileReader;
 18 import java.io.IOException;
 19 import java.util.ArrayList;
 20 
 21 public class Demo03API {
 22     Connection conn;
 23     TableName table=TableName.valueOf("test_api");
 24 
 25     @Before
 26     public void init() throws IOException {
 27         Configuration conf = HBaseConfiguration.create();
 28         conf.set("hbase.zookeeper.quorum","master,node1,node2");
 29 
 30         conn = ConnectionFactory.createConnection(conf);
 31     }
 32     //put
 33     @Test
 34     public void Put() throws IOException {
 35         Table test_api = conn.getTable(TableName.valueOf("test_api"));
 36         Put put = new Put("001".getBytes());
 37 put. Addcolumn ("CF1". Getbytes(), "name". Getbytes(), "Zhang San". Getbytes());
 38         test_api.put(put);
 39     }
 40 
 41 // putall reads students.txt and writes the data to HBase
 42     @Test
 43     public void PutAll() throws IOException {
 44 // create the students info table
 45         Admin admin = conn.getAdmin();
 46         TableName studentsT = TableName.valueOf("students");
 47 // judge whether the table exists
 48         if (!admin.tableExists(studentsT)) {
 49             HTableDescriptor students = new HTableDescriptor(studentsT);
 50             HColumnDescriptor info = new HColumnDescriptor("info");
 51             students.addFamily(info);
 52             admin.createTable(students);
 53         }
 54 
 55         Table stu = conn.getTable(studentsT);
 56 
 57 
 58         BufferedReader br = new BufferedReader(new FileReader("data/students.txt"));
 59         String line = null;
 60         ArrayList puts = new ArrayList();
 61         int batchSize = 11;
 62         while ((line = br.readLine()) != null) {
 63 
 64 // read each row of data
 65             String[] split = line.split(",");
 66             String id = split[0];
 67             String name = split[1];
 68             String age = split[2];
 69             String gender = split[3];
 70             String clazz = split[4];
 71             Put put = new Put(id.getBytes());
 72             put.addColumn("info".getBytes(), "name".getBytes(), name.getBytes());
 73             put.addColumn("info".getBytes(), "age".getBytes(), age.getBytes());
 74             put.addColumn("info".getBytes(), "gender".getBytes(), gender.getBytes());
 75             put.addColumn("info".getBytes(), "clazz".getBytes(), clazz.getBytes());
 76             puts.add(put); //  Add the put object constructed from each data to the put list
 77             if (puts.size() == batchSize) {
 78                 stu.put(puts); //  Batch write
 79                 puts = new ArrayList();
 80             }
 81         }
 82         if (puts.size() != 0) {
 83             stu.put(puts); //  Batch write
 84         }
 85 
 86     }
 87     //get
 88     @Test
 89     public void Get() throws IOException {
 90         Table test_api = conn.getTable(table);
 91         Get get = new Get("001".getBytes());
 92         Result rs = test_api.get(get);
 93         byte[] value = rs.getValue("cf1".getBytes(), "name".getBytes());
 94         System.out.println( Bytes.toString(value));
 95     }
 96 
 97 
 98 @ test // alter table modify table
 99     public void alterTable() throws IOException {
100         Admin admin = conn.getAdmin();
101 // obtain the original results of the table
102         HTableDescriptor tableDescriptor = admin.getTableDescriptor(table);
103 // get the array composed of all column clusters
104         HColumnDescriptor[] columnFamilies = tableDescriptor.getColumnFamilies();
105 // traverse column clusters
106         for (HColumnDescriptor columnFamily : columnFamilies) {
107 // get the column cluster name
108             String cfName = columnFamily.getNameAsString();
109 // modify the column cluster named CF1
110             if("cf1".equals(cfName)){
111 // modify TTL
112                 columnFamily.setTimeToLive(100000);
113             }
114         }
115 // modify the table structure
116         admin.modifyTable(table,tableDescriptor);
117 
118 
119     }
120 
121     @After
122     public void closed() throws IOException {
123         conn.close();
124     }
125 }