summaryrefslogtreecommitdiff
path: root/src/main/java/cn/ac/iie/utils/HiveDao/HiveDataSourceUtil.java
blob: 9223f92b4e6b526b220567c5c987b3d4d9d54673 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
package cn.ac.iie.utils.HiveDao;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import net.sf.json.JSONArray;
import org.apache.log4j.Logger;

import java.sql.*;
import java.util.Properties;

/**
 * Hive-JDBC连接池
 *
 * @author Colbert
 */
public class HiveDataSourceUtil {
    private static DruidDataSource hiveDataSource = new DruidDataSource();
    public static Connection conn = null;
    private static final Logger logger = Logger.getLogger(HiveDataSourceUtil.class);

    public static DruidDataSource getHiveDataSource() {
        if (hiveDataSource.isInited()) {
            return hiveDataSource;
        }

        try {
            Properties dsProp = new Properties();
            dsProp.load(HiveDataSourceUtil.class.getClassLoader().getResourceAsStream("hive.properties"));
            hiveDataSource.setDriverClassName(dsProp.getProperty("hive_jdbc_drivers"));
            //基本属性 url、user、password
            hiveDataSource.setUrl(dsProp.getProperty("hive_jdbc_url"));
            hiveDataSource.setUsername(dsProp.getProperty("hive_jdbc_username"));
            hiveDataSource.setPassword(dsProp.getProperty("hive_jdbc_password"));

            //配置初始化大小、最小、最大
            hiveDataSource.setInitialSize(Integer.parseInt(dsProp.getProperty("hive_initialSize")));
            hiveDataSource.setMinIdle(Integer.parseInt(dsProp.getProperty("hive_minIdle")));
            hiveDataSource.setMaxActive(Integer.parseInt(dsProp.getProperty("hive_maxActive")));

            //配置获取连接等待超时的时间
            hiveDataSource.setMaxWait(Integer.parseInt(dsProp.getProperty("hive_maxWait")));

            //配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
            hiveDataSource.setTimeBetweenEvictionRunsMillis(60000);

            //配置一个连接在池中最小生存的时间,单位是毫秒
            hiveDataSource.setMinEvictableIdleTimeMillis(300000);

            hiveDataSource.setValidationQuery("SELECT 1");
            hiveDataSource.setTestWhileIdle(true);
            hiveDataSource.setTestOnBorrow(true);
//            hiveDataSource.setKeepAlive(true);

            //打开PSCache,并且指定每个连接上PSCache的大小
            hiveDataSource.setPoolPreparedStatements(true);
            hiveDataSource.setMaxPoolPreparedStatementPerConnectionSize(20);

            //配置监控统计拦截的filters
//            hiveDataSource.setFilters("stat");

            hiveDataSource.init();
        } catch (Exception e) {
            e.printStackTrace();
            closeHiveDataSource();
        }
        return hiveDataSource;
    }

    /**
     * @Description:关闭Hive连接池
     */
    public static void closeHiveDataSource() {
        if (hiveDataSource != null) {
            hiveDataSource.close();
        }
    }

    /**
     * @return
     * @Description:获取Hive连接
     */
    public static Connection getHiveConn() {
        try {
            hiveDataSource = getHiveDataSource();
            conn = hiveDataSource.getConnection();
        } catch (Exception e) {
            logger.error("HiveDataSourceUtil--" + e + ":获取Hive连接失败!");
        }
        return conn;
    }

    /**
     * @Description:关闭Hive数据连接
     */
    public static void closeConn() {
        try {
            if (conn != null) {
                conn.close();
            }
        } catch (Exception e) {
            logger.error("HiveDataSourceUtil--" + e + ":关闭Hive-conn连接失败!");
        }
    }


    public static void main(String[] args) throws Exception {
        DruidDataSource ds = HiveDataSourceUtil.getHiveDataSource();
        Connection conn = ds.getConnection();
        Statement stmt = null;
        if (conn == null) {
            System.out.println("null");
        } else {
            System.out.println("conn");
            stmt = conn.createStatement();
            ResultSet res = stmt.executeQuery("select * from test.frag_media_expire_log limit 10");
            int i = 0;
            while (res.next()) {
                if (i < 10) {
                    System.out.println(res.getString(2));
                    i++;
                }
            }
//            String s = resultSetToJson(res);
//            String s = ResultSetToJsonString(res);
//            System.out.println(s);
        }

        stmt.close();
        conn.close();
    }

    public static String resultSetToJson(ResultSet rs) throws SQLException, JSONException {
        // json数组
        JSONArray array = new JSONArray();

        // 获取列数
        ResultSetMetaData metaData = rs.getMetaData();
        int columnCount = metaData.getColumnCount();

        // 遍历ResultSet中的每条数据
        while (rs.next()) {
            JSONObject jsonObj = new JSONObject();

            // 遍历每一列
            for (int i = 1; i <= columnCount; i++) {
                String columnName = metaData.getColumnLabel(i);
                String value = rs.getString(columnName);
                jsonObj.put(columnName, value);
            }
//            array.put(jsonObj);
            array.add(jsonObj);
        }

        return array.toString();
    }

    public static final JsonObject ResultSetToJsonObject(ResultSet rs) {
        JsonObject element = null;
        JsonArray ja = new JsonArray();
        JsonObject jo = new JsonObject();
        ResultSetMetaData rsmd = null;
        String columnName, columnValue = null;
        try {
            rsmd = rs.getMetaData();
            while (rs.next()) {
                element = new JsonObject();
                for (int i = 0; i < rsmd.getColumnCount(); i++) {
                    columnName = rsmd.getColumnName(i + 1);
                    columnValue = rs.getString(columnName);
                    element.addProperty(columnName, columnValue);
                }
                ja.add(element);
            }
            jo.add("result", ja);
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return jo;
    }

    public static final String ResultSetToJsonString(ResultSet rs) {
        return ResultSetToJsonObject(rs).toString();
    }
}