diff options
| author | zhanghongqing <[email protected]> | 2020-02-26 09:54:02 +0800 |
|---|---|---|
| committer | zhanghongqing <[email protected]> | 2020-02-26 09:54:02 +0800 |
| commit | 1e4dd07d7f5c31e61630fa02fd2aed494d780c8b (patch) | |
| tree | 06df9ab50050f4e4c260f165970aa954e2bf5e0b /src/main | |
| parent | aa8441e70da14ba648a333a943d01efb1111c2e5 (diff) | |
优化snowflake Id获取,处理时间回拨,去掉无用的方法跟依赖
Diffstat (limited to 'src/main')
| -rw-r--r-- | src/main/java/com/zdjizhi/utils/FormatUtils.java | 139 | ||||
| -rw-r--r-- | src/main/java/com/zdjizhi/utils/HBaseUtils.java | 190 | ||||
| -rw-r--r-- | src/main/java/com/zdjizhi/utils/SnowflakeId.java | 50 |
3 files changed, 75 insertions, 304 deletions
diff --git a/src/main/java/com/zdjizhi/utils/FormatUtils.java b/src/main/java/com/zdjizhi/utils/FormatUtils.java index 85eb70b..80b7c1c 100644 --- a/src/main/java/com/zdjizhi/utils/FormatUtils.java +++ b/src/main/java/com/zdjizhi/utils/FormatUtils.java @@ -5,14 +5,14 @@ import java.util.Base64; import java.util.HashMap; import java.util.List; import java.util.Map.Entry; +import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.base.Splitter; import com.google.common.net.InternetDomainName; -import java.util.Set; - public class FormatUtils { protected final Logger logger = LoggerFactory.getLogger(this.getClass()); @@ -116,132 +116,55 @@ public class FormatUtils { } /** - * 有sni通过sni获取域名,有host根据host获取域名 + * 根据域名截取顶级域名 * - * @param sni sni - * @param host host + * @param host 网站域名 * @return 顶级域名 */ - public String getTopDomain(String sni, String host) { - if (StringUtil.isNotBlank(host)) { - return getDomainName(host); - } else if (StringUtil.isNotBlank(sni)) { - return getDomainName(sni); - } else { - return ""; + public static String getTopDomain(String host) { + try { + return InternetDomainName.from(Splitter.on(":").splitToList(host).get(0)).topPrivateDomain().toString(); + } catch (Exception e) { + e.printStackTrace(); + return host; } } /** - * 根据url截取顶级域名 + * base64 解码 * - * @param host 网站url - * @return 顶级域名 - */ - public String getDomainName(String host) { - String domain = ""; - domain = InternetDomainName.from(host).topPrivateDomain().toString(); - return domain; - } - - /** - * 生成当前时间戳的操作 + * @param encodedText mail subject + * @param subjectCharset 编码格式 + * @return 解码内容 / 空 */ - public int getCurrentTime() { - return (int) (System.currentTimeMillis() / 1000); + public static String base64Str(String encodedText, String subjectCharset) { + Base64.Decoder decoder = Base64.getDecoder(); + String sub = null; + try { + sub = new String(decoder.decode(encodedText), StringUtil.isBlank(subjectCharset)? "UTF-8" : subjectCharset); + } catch (Exception e) { + e.printStackTrace(); + return ""; + } + return sub; } - /** * 雪花模型生成id - * + * @param workerId + * @param dataCenterId * @return */ - public long getSnowflakeId(String zookeeperIp, String kafkaTopic, long dataCenterIdNum) { - - return SnowflakeId.generateId(zookeeperIp, kafkaTopic, dataCenterIdNum); - } - - /** - * 根据ip定位库的地址返回一个ipLookup工具类对象 - * - * @param ipLibrary - * @return ipLookup - */ - public IpLookup getIpLookup(String ipLibrary) { - - IpLookup ipLookup = new IpLookup.Builder(false).loadDataFileV4(ipLibrary + "Kazakhstan.mmdb") - .loadDataFileV6(ipLibrary + "Kazakhstan.mmdb").loadAsnDataFileV4(ipLibrary + "asn_v4.mmdb") - .loadAsnDataFileV6(ipLibrary + "asn_v6.mmdb").build(); - - return ipLookup; + public long getSnowflakeId(long workerId, long dataCenterId) { + return SnowflakeId.generateId(workerId, dataCenterId); } - /** - * 根据clientIp获取location信息 + * 雪花模型生成id * - * @param ip * @return */ - public String getGeoIpDetail(String ip, IpLookup ipLookup) { + public long getSnowflakeId(String zookeeperIp, String kafkaTopic, long dataCenterId) { - return ipLookup.cityLookupDetail(ip); + return SnowflakeId.generateId(zookeeperIp, kafkaTopic, dataCenterId); } - /** - * 根据ip获取asn信息 - * - * @param ip - * @return asn - */ - public String getGeoAsn(String ip, IpLookup ipLookup) { - - return ipLookup.asnLookup(ip, true); - } - - /** - * 根据ip获取country信息 - * - * @param ip - * @return country - */ - public String getGeoIpCountry(String ip, IpLookup ipLookup) { - - return ipLookup.countryLookup(ip); - } - - /** - * 根据ip去hbase中匹配对应的用户名 - * - * @param clientIp - * @param hbaseZookeeper - * @param hbaseTable - * @return 用户名 subscriber_id - */ - public String radiusMatch(String clientIp, String hbaseZookeeper, String hbaseTable) { - return HBaseUtils.getAccount(clientIp, hbaseZookeeper, hbaseTable); - } - - /** - * base64 解码 - * - * @param encodedText mail subject - * @param subjectCharset 编码格式 - * @return 解码内容 / 空 - */ - public String base64Str(String encodedText, String subjectCharset) { - Base64.Decoder decoder = Base64.getDecoder(); - String sub; - try { - if (StringUtil.isNotBlank(subjectCharset)) { - sub = new String(decoder.decode(encodedText), subjectCharset); - } else { - sub = new String(decoder.decode(encodedText), "UTF-8"); - } - return sub; - } catch (Exception e) { - e.printStackTrace(); - return ""; - } - } - } diff --git a/src/main/java/com/zdjizhi/utils/HBaseUtils.java b/src/main/java/com/zdjizhi/utils/HBaseUtils.java deleted file mode 100644 index dbf090e..0000000 --- a/src/main/java/com/zdjizhi/utils/HBaseUtils.java +++ /dev/null @@ -1,190 +0,0 @@ -package com.zdjizhi.utils;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.*;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Logger;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Timer;
-import java.util.TimerTask;
-
-public class HBaseUtils {
- private final static Logger logger = Logger.getLogger(HBaseUtils.class);
- private static Map<String, String> subIdMap = new HashMap<>(333334);
- private static Connection connection;
- private static Long time;
-
- private static String zookeeperIp;
- private static String hbaseTable;
-
- private static HBaseUtils hBaseUtils;
-
- private static void getHbaseInstance(String zookeeperServer, String hbaseTableName) {
- hBaseUtils = new HBaseUtils(zookeeperServer, hbaseTableName);
- }
-
- /**
- * 构造函数
- */
- public HBaseUtils(String zookeeperServer, String hbaseTableName) {
- zookeeperIp = zookeeperServer;
- hbaseTable = hbaseTableName;
- //获取连接
- getHbaseConn();
- //拉取所有
- getAll();
- //定时更新
- //updateHabaseCache();
- }
-
- private static void getHbaseConn() {
- try {
- // 管理Hbase的配置信息
- Configuration configuration = HBaseConfiguration.create();
- // 设置zookeeper节点
- configuration.set("hbase.zookeeper.quorum", zookeeperIp);
- configuration.set("hbase.client.retries.number", "3");
- configuration.set("hbase.bulkload.retries.number", "3");
- configuration.set("zookeeper.recovery.retry", "3");
- connection = ConnectionFactory.createConnection(configuration);
- time = System.currentTimeMillis();
- logger.warn("HBaseUtils get HBase connection,now to getAll().");
- } catch (IOException ioe) {
- logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<===");
- ioe.printStackTrace();
- } catch (Exception e) {
- logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<===");
- e.printStackTrace();
- }
- }
-
- /**
- * 更新变量
- */
- public static void change() {
- long nowTime = System.currentTimeMillis();
- timestampsFilter(time - 1000, nowTime + 500);
- }
- /**
- * 更新变量
- */
- public static void change(String hbaseZookeeper, String hbaseTable) {
- if (hBaseUtils == null) {
- getHbaseInstance(hbaseZookeeper, hbaseTable);
- }
- long nowTime = System.currentTimeMillis();
- timestampsFilter(time - 1000, nowTime + 500);
- }
- /**
- * 获取变更内容
- *
- * @param startTime 开始时间
- * @param endTime 结束时间
- */
- private static void timestampsFilter(Long startTime, Long endTime) {
- Long begin = System.currentTimeMillis();
- Table table = null;
- ResultScanner scanner = null;
- Scan scan2 = new Scan();
- try {
- table = connection.getTable(TableName.valueOf("sub:" + hbaseTable));
- scan2.setTimeRange(startTime, endTime);
- scanner = table.getScanner(scan2);
- for (Result result : scanner) {
- Cell[] cells = result.rawCells();
- for (Cell cell : cells) {
- String key = Bytes.toString(CellUtil.cloneRow(cell));
- String value = Bytes.toString(CellUtil.cloneValue(cell));
- if (subIdMap.containsKey(key)) {
- if (!value.equals(subIdMap.get(key))) {
- subIdMap.put(key, value);
- }
- } else {
- subIdMap.put(key, value);
- }
- }
- }
- Long end = System.currentTimeMillis();
- logger.warn("HBaseUtils Now subIdMap.keySet().size() is: " + subIdMap.keySet().size());
- logger.warn("HBaseUtils Update cache timeConsuming is: " + (end - begin) + ",BeginTime: " + begin + ",EndTime: " + end);
- time = endTime;
- } catch (IOException ioe) {
- logger.error("HBaseUtils timestampsFilter is IOException===>{" + ioe + "}<===");
- ioe.printStackTrace();
- } catch (Exception e) {
- logger.error("HBaseUtils timestampsFilter is Exception===>{" + e + "}<===");
- e.printStackTrace();
- } finally {
- if (scanner != null) {
- scanner.close();
- }
- if (table != null) {
- try {
- table.close();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
- }
- }
-
- /**
- * 获取所有的 key value
- */
- private static void getAll() {
- long begin = System.currentTimeMillis();
- try {
- Table table = connection.getTable(TableName.valueOf("sub:" + hbaseTable));
- Scan scan2 = new Scan();
- ResultScanner scanner = table.getScanner(scan2);
- for (Result result : scanner) {
- Cell[] cells = result.rawCells();
- for (Cell cell : cells) {
- subIdMap.put(Bytes.toString(CellUtil.cloneRow(cell)), Bytes.toString(CellUtil.cloneValue(cell)));
- }
- }
- logger.warn("HBaseUtils Get fullAmount List size->subIdMap.size(): " + subIdMap.size());
- logger.warn("HBaseUtils Get fullAmount List size->subIdMap.size() timeConsuming is: " + (System.currentTimeMillis() - begin));
- scanner.close();
- } catch (IOException ioe) {
- logger.error("HBaseUtils getAll() is IOException===>{" + ioe + "}<===");
- ioe.printStackTrace();
- } catch (Exception e) {
- logger.error("HBaseUtils getAll() is Exception===>{" + e + "}<===");
- e.printStackTrace();
- }
- }
-
- /**
- * 获取 account
- *
- * @param clientIp client_ip
- * @return account
- */
- public static String getAccount(String clientIp, String hbaseZookeeper, String hbaseTable) {
- if (hBaseUtils == null) {
- getHbaseInstance(hbaseZookeeper, hbaseTable);
- }
- return subIdMap.get(clientIp);
- }
-
- /**
- * 定时更新
- */
- /*
- * private void updateHabaseCache() { Timer timer = new Timer();
- * timer.scheduleAtFixedRate(new TimerTask() {
- *
- * @Override public void run() { try { change(); } catch (Exception e) {
- * logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
- * e.printStackTrace(); } } }, 1, 1000 * 60); }
- */
-
-}
diff --git a/src/main/java/com/zdjizhi/utils/SnowflakeId.java b/src/main/java/com/zdjizhi/utils/SnowflakeId.java index 8e4bfa3..bfb4323 100644 --- a/src/main/java/com/zdjizhi/utils/SnowflakeId.java +++ b/src/main/java/com/zdjizhi/utils/SnowflakeId.java @@ -81,14 +81,43 @@ public class SnowflakeId { private static SnowflakeId idWorker;
private static ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
+ /**
+ * 设置允许时间回拨的最大限制10s
+ */
+ private static final long rollBackTime = 10000L;
- private static void getSnowflakeldInstance(String zookeeperIp, String kafkaTopic, long dataCenterIdNum) {
- idWorker = new SnowflakeId(zookeeperIp, kafkaTopic, dataCenterIdNum);
+ /**
+ * @param workerId
+ * @param dataCenterId
+ */
+ private static void getSnowflakeldInstance(long workerId, long dataCenterId) {
+ idWorker = new SnowflakeId(workerId, dataCenterId);
+ }
+ /**
+ * 依赖于zookeeper
+ * @param zookeeperIp
+ * @param kafkaTopic
+ * @param dataCenterId
+ */
+ private static void getSnowflakeldInstance(String zookeeperIp,String kafkaTopic, long dataCenterId) {
+ idWorker = new SnowflakeId(zookeeperIp,kafkaTopic, dataCenterId);
}
/**
- * 构造函数
+ * 构造函数
*/
+ private SnowflakeId(long tmpWorkerId, long dataCenterIdNum) {
+ if (tmpWorkerId > maxWorkerId || tmpWorkerId < 0) {
+ throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
+ }
+ if (dataCenterIdNum > maxDataCenterId || dataCenterIdNum < 0) {
+ throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than ", maxDataCenterId));
+ }
+ this.workerId = tmpWorkerId;
+ this.dataCenterId = dataCenterIdNum;
+
+ }
+
private SnowflakeId(String zookeeperIp, String kafkaTopic, long dataCenterIdNum) {
ZooKeeperLock lock = new ZooKeeperLock(zookeeperIp, "/locks", "disLocks1");
if (lock.lock()) {
@@ -111,7 +140,6 @@ public class SnowflakeId { }
}
}
-
/**
* 获得下一个ID (该方法是线程安全的)
*
@@ -119,7 +147,10 @@ public class SnowflakeId { */
private synchronized long nextId() {
long timestamp = timeGen();
-
+ //设置一个允许回拨限制时间,系统时间回拨范围在rollBackTime内可以等待校准
+ if (lastTimestamp - timestamp > 0 && lastTimestamp - timestamp < rollBackTime) {
+ timestamp = tilNextMillis(lastTimestamp);
+ }
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
@@ -172,7 +203,13 @@ public class SnowflakeId { protected long timeGen() {
return System.currentTimeMillis();
}
-
+
+ public static Long generateId(long workerId, long dataCenterIdNum) {
+ if (idWorker == null) {
+ getSnowflakeldInstance(workerId, dataCenterIdNum);
+ }
+ return idWorker.nextId();
+ }
/**
* 静态工具类
@@ -185,4 +222,5 @@ public class SnowflakeId { }
return idWorker.nextId();
}
+
}
\ No newline at end of file |
