hbase_inputformat.py
2.6 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from common import *
from dependencies import *
from pyspark import SparkConf, SparkContext
if __name__ == "__main__":
    load_env()
    host = 'HPC-server'
    table = 'test'
    conf = SparkConf().setAppName("HBaseInputFormat")
    print conf.getAll()
    sc = SparkContext(conf=conf)
    hparams = dict(
        inputFormatClass="org.apache.hadoop.hbase.mapreduce.TableInputFormat",
        readKeyClass="org.apache.hadoop.hbase.io.ImmutableBytesWritable",
        readValueClass="org.apache.hadoop.hbase.client.Result",
        readKeyConverter="org.apache.spark.examples.pythonconverters.ImmutableBytesWritableToStringConverter",
        readValueConverter="org.apache.spark.examples.pythonconverters.CustomHBaseResultToStringConverter",
        outputFormatClass="org.apache.hadoop.hbase.mapreduce.TableOutputFormat",
        writeKeyClass="org.apache.hadoop.hbase.io.ImmutableBytesWritable",
        # writeValueClass="org.apache.hadoop.io.Writable",
        writeValueClass="org.apache.hadoop.hbase.client.Put",
        writeKeyConverter="org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter",
        writeValueConverter="org.apache.spark.examples.pythonconverters.StringListToPutConverter",
    )
    hconf = {"hbase.zookeeper.quorum": host,
             "hbase.mapreduce.inputtable": table,
    }
    hbase_rdd = sc.newAPIHadoopRDD(inputFormatClass=hparams["inputFormatClass"],
                                   keyClass=hparams["readKeyClass"],
                                   valueClass=hparams["readValueClass"],
                                   keyConverter=hparams["readKeyConverter"],
                                   valueConverter=hparams["readValueConverter"],
                                   conf=hconf)
    output = hbase_rdd.collect()
    for (k, v) in output:
        print (k, v)
    sc.stop()