summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authoryinjiangyi <[email protected]>2021-08-10 09:39:22 +0800
committeryinjiangyi <[email protected]>2021-08-10 09:39:22 +0800
commit4e0c576477393fafe4604213676991053a8a181d (patch)
tree06380b845ddc798db9abbbe4b84a08784e969506 /src
parent8899dac890ec75490e97ad645571974f86e561a9 (diff)
V08
Diffstat (limited to 'src')
-rw-r--r--src/main/java/cn/mesalab/config/ApplicationConfig.java2
-rw-r--r--src/main/java/cn/mesalab/service/BaselineGeneration.java13
-rw-r--r--src/main/java/cn/mesalab/utils/HbaseUtils.java2
-rw-r--r--src/main/java/cn/mesalab/utils/SeriesUtils.java6
-rw-r--r--src/main/resources/application.properties7
5 files changed, 9 insertions, 21 deletions
diff --git a/src/main/java/cn/mesalab/config/ApplicationConfig.java b/src/main/java/cn/mesalab/config/ApplicationConfig.java
index eea8343..1f88024 100644
--- a/src/main/java/cn/mesalab/config/ApplicationConfig.java
+++ b/src/main/java/cn/mesalab/config/ApplicationConfig.java
@@ -55,7 +55,7 @@ public class ApplicationConfig {
public static final Double BASELINE_KALMAN_P = ConfigUtils.getDoubleProperty("baseline.kalman.p");
public static final Double BASELINE_KALMAN_M = ConfigUtils.getDoubleProperty("baseline.kalman.m");
- public static final Long DRUID_READ_BATCH_TIME_GRAD_HOUR = ConfigUtils.getLongProperty("druid.read.batch.time.grad.hour");
+ //public static final Long DRUID_READ_BATCH_TIME_GRAD_HOUR = ConfigUtils.getLongProperty("druid.read.batch.time.grad.hour");
public static final Integer THREAD_POOL_NUM = ConfigUtils.getIntProperty("thread.pool.num");
public static final Integer PARTITION_NUM_MAX = ConfigUtils.getIntProperty("druid.partition.num.max");
diff --git a/src/main/java/cn/mesalab/service/BaselineGeneration.java b/src/main/java/cn/mesalab/service/BaselineGeneration.java
index 74e4ca9..da90f74 100644
--- a/src/main/java/cn/mesalab/service/BaselineGeneration.java
+++ b/src/main/java/cn/mesalab/service/BaselineGeneration.java
@@ -24,10 +24,10 @@ public class BaselineGeneration {
private static final Logger LOG = LoggerFactory.getLogger(BaselineGeneration.class);
private static final List<String> ATTACK_TYPE_LIST = Arrays.asList(
- ApplicationConfig.DRUID_ATTACKTYPE_TCP_SYN_FLOOD
-// ApplicationConfig.DRUID_ATTACKTYPE_ICMP_FLOOD,
-// ApplicationConfig.DRUID_ATTACKTYPE_UDP_FLOOD,
-// ApplicationConfig.DRUID_ATTACKTYPE_DNS_AMPL
+ ApplicationConfig.DRUID_ATTACKTYPE_TCP_SYN_FLOOD,
+ ApplicationConfig.DRUID_ATTACKTYPE_ICMP_FLOOD,
+ ApplicationConfig.DRUID_ATTACKTYPE_UDP_FLOOD,
+ ApplicationConfig.DRUID_ATTACKTYPE_DNS_AMPL
);
private static final Integer BASELINE_POINT_NUM =
ApplicationConfig.BASELINE_RANGE_DAYS * 24 * (60/ApplicationConfig.HISTORICAL_GRAD);
@@ -77,9 +77,4 @@ public class BaselineGeneration {
generationExecutor.shutdown();
generateCountDownLatch.await();
}
-
- public static void main(String[] args) {
- perform();
- }
-
}
diff --git a/src/main/java/cn/mesalab/utils/HbaseUtils.java b/src/main/java/cn/mesalab/utils/HbaseUtils.java
index 16aab37..a2657b7 100644
--- a/src/main/java/cn/mesalab/utils/HbaseUtils.java
+++ b/src/main/java/cn/mesalab/utils/HbaseUtils.java
@@ -1 +1 @@
-package cn.mesalab.utils; import cn.mesalab.config.ApplicationConfig; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * @author yjy * @version 1.0 * @date 2021/7/23 4:56 下午 */ public class HbaseUtils { private static final Logger LOG = LoggerFactory.getLogger(HbaseUtils.class); private static HbaseUtils hbaseUtils; private Table hbaseTable; static { hbaseUtils = HbaseUtils.getInstance(); } public static HbaseUtils getInstance(){ if (hbaseUtils == null) { hbaseUtils = new HbaseUtils(); } return hbaseUtils; } public Table getHbaseTable(){ if(hbaseTable == null){ try{ Configuration config = HBaseConfiguration.create(); config.set(HConstants.ZOOKEEPER_QUORUM, ApplicationConfig.HBASE_ZOOKEEPER_QUORUM); config.set(HConstants.ZOOKEEPER_CLIENT_PORT, ApplicationConfig.HBASE_ZOOKEEPER_CLIENT_PORT); TableName tableName = TableName.valueOf(ApplicationConfig.HBASE_TABLE); Connection conn = ConnectionFactory.createConnection(config); hbaseTable = conn.getTable(tableName); } catch (IOException e){ LOG.error("HBase 创建HBase table失败!"); e.printStackTrace(); } } return hbaseTable; } public List<Put> cachedInPut(List<Put> putList, String ip, int[] baseline, String attackType, String metricType){ Put rowPut = new Put(Bytes.toBytes(ip)); // FOR TEST // start if(attackType.equals(ApplicationConfig.DRUID_ATTACKTYPE_TCP_SYN_FLOOD)){ attackType = "TCP SYN Flood"; } else if(attackType.equals(ApplicationConfig.DRUID_ATTACKTYPE_UDP_FLOOD)){ attackType = "UDP Flood"; } else if(attackType.equals(ApplicationConfig.DRUID_ATTACKTYPE_ICMP_FLOOD)){ attackType = "ICMP Flood"; } else { attackType = "DNS Amplification"; } // end rowPut.addColumn( Bytes.toBytes(attackType), Bytes.toBytes(metricType), WritableUtils.toByteArray(toWritable(baseline))); putList.add(rowPut); return putList; } private Writable toWritable(int[] arr) { Writable[] content = new Writable[arr.length]; for (int i = 0; i < content.length; i++) { content[i] = new IntWritable(arr[i]); } return new ArrayWritable(IntWritable.class, content); } public ArrayList<Integer> fromWritable(ArrayWritable writable) { Writable[] writables = ((ArrayWritable) writable).get(); ArrayList<Integer> list = new ArrayList<Integer>(writables.length); for (Writable wrt : writables) { list.add(((IntWritable)wrt).get()); } return list; } public void close(){ try { hbaseTable.close(); } catch (IOException e) { e.printStackTrace(); } } } \ No newline at end of file
+package cn.mesalab.utils; import cn.mesalab.config.ApplicationConfig; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * @author yjy * @version 1.0 * @date 2021/7/23 4:56 下午 */ public class HbaseUtils { private static final Logger LOG = LoggerFactory.getLogger(HbaseUtils.class); private static HbaseUtils hbaseUtils; private Table hbaseTable; static { hbaseUtils = HbaseUtils.getInstance(); } public static HbaseUtils getInstance(){ if (hbaseUtils == null) { hbaseUtils = new HbaseUtils(); } return hbaseUtils; } public Table getHbaseTable(){ if(hbaseTable == null){ try{ Configuration config = HBaseConfiguration.create(); config.set(HConstants.ZOOKEEPER_QUORUM, ApplicationConfig.HBASE_ZOOKEEPER_QUORUM); config.set(HConstants.ZOOKEEPER_CLIENT_PORT, ApplicationConfig.HBASE_ZOOKEEPER_CLIENT_PORT); TableName tableName = TableName.valueOf(ApplicationConfig.HBASE_TABLE); Connection conn = ConnectionFactory.createConnection(config); hbaseTable = conn.getTable(tableName); } catch (IOException e){ LOG.error("HBase 创建HBase table失败!"); e.printStackTrace(); } } return hbaseTable; } public List<Put> cachedInPut(List<Put> putList, String ip, int[] baseline, String attackType, String metricType){ Put rowPut = new Put(Bytes.toBytes(ip)); rowPut.addColumn( Bytes.toBytes(attackType), Bytes.toBytes(metricType), WritableUtils.toByteArray(toWritable(baseline))); putList.add(rowPut); return putList; } private Writable toWritable(int[] arr) { Writable[] content = new Writable[arr.length]; for (int i = 0; i < content.length; i++) { content[i] = new IntWritable(arr[i]); } return new ArrayWritable(IntWritable.class, content); } public ArrayList<Integer> fromWritable(ArrayWritable writable) { Writable[] writables = ((ArrayWritable) writable).get(); ArrayList<Integer> list = new ArrayList<Integer>(writables.length); for (Writable wrt : writables) { list.add(((IntWritable)wrt).get()); } return list; } public void close(){ try { hbaseTable.close(); } catch (IOException e) { e.printStackTrace(); } } } \ No newline at end of file
diff --git a/src/main/java/cn/mesalab/utils/SeriesUtils.java b/src/main/java/cn/mesalab/utils/SeriesUtils.java
index a5391b8..1d31519 100644
--- a/src/main/java/cn/mesalab/utils/SeriesUtils.java
+++ b/src/main/java/cn/mesalab/utils/SeriesUtils.java
@@ -189,11 +189,5 @@ public class SeriesUtils {
int index = (int) Math.ceil(percentile * tmp.size());
return tmp.get(index-1);
}
-
-
- public static void main(String[] args) {
- List<Integer> a = Arrays.asList(184, 134, 95, 133, 56, 128, 60, 65, 143, 149, 68, 150, 384, 516, 135, 171, 601, 778, 431, 175, 0, 0, 0, 2032, 0, 0, 0, 2978, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2354, 2291, 2826, 3775, 6544, 6736, 7748, 7027, 6483, 5670, 3153, 382, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 5, 0, 0, 0, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 9, 0, 0, 0, 53, 0, 20, 0, 0, 0, 0, 0, 0, 44, 0, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 62, 0, 0, 0, 12, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 94, 65, 126, 27, 39, 39, 67, 120, 110, 34, 57, 198, 36, 77, 41, 65, 145, 140, 107, 100, 116, 262, 185, 168, 167, 395, 339, 522, 668, 1048, 2898, 3773, 4979, 6046, 6846, 6232, 6488, 7438, 5554, 6105, 6673, 6256, 6252, 5264, 5305, 5166, 5657, 6098, 5696, 3887, 4591, 5952, 6035, 6541, 8607, 8784, 9042, 10123, 9975, 10430, 10931, 10100, 9908, 8022, 4090, 1911, 404, 832, 304, 10, 22, 54, 0, 118, 80, 17, 25, 17, 30, 0, 22, 5, 0, 0, 22, 0, 0, 0, 0, 0, 0, 0, 0, 15, 22, 0, 0, 0, 24, 31, 0, 0, 47, 0, 25, 0, 12, 46, 37, 51, 0, 0, 4, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 16, 0, 0, 0, 0, 31, 0, 0, 0, 37, 10, 57, 0, 0, 15, 13, 22, 0, 13, 13, 25, 115, 214, 85, 180, 162, 206, 95, 55, 110, 191, 37, 232, 54, 83, 81, 130, 74, 43, 107, 45, 98, 63, 44, 154, 283, 109, 95, 207, 302, 1016, 1289, 3089, 3847, 757, 4157, 4025, 4783, 5196, 5423, 5503, 3997, 5291, 5957, 3795, 4170, 4649, 5265, 5371, 4950, 1272, 2451, 4251, 4393, 5535, 7303, 7958, 7865, 9039, 8410, 9304, 9496, 7115, 6135, 5335, 1425, 1486, 1124, 935, 905, 58, 40, 19, 0, 11, 49, 0, 28, 15, 25, 16, 0, 0, 0, 0, 0, 42, 0, 0, 0, 11, 0, 53, 0, 6, 0, 9, 117, 31, 70, 56, 33, 21, 11, 0, 102, 0, 39, 15, 46, 58, 102, 66, 17, 36, 0, 31, 0, 0, 24, 0, 245, 50, 50, 0, 0, 0, 0, 75, 12, 55, 0, 18, 0, 47, 14, 92, 0, 0, 10, 27, 3, 33, 0, 0, 159, 369, 453, 264, 115, 230, 286, 92, 674, 295, 64, 201);
- System.out.println(percentile(a,0.99));
- }
}
diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties
index 2b8ab0b..5b0242b 100644
--- a/src/main/resources/application.properties
+++ b/src/main/resources/application.properties
@@ -17,7 +17,7 @@ hbase.zookeeper.client.port=2181
#读取druid时间范围方式,
# 0:读取默认范围天数read.historical.days;
# 1:指定时间范围
-read.druid.time.limit.type=1
+read.druid.time.limit.type=0
#07-05
read.druid.min.time=1625414400000
#07-08
@@ -31,7 +31,6 @@ druid.attacktype.dnsamplification=DNS Amplification
druid.columnname.serverip=destination_ip
druid.columnname.attacktype=attack_type
druid.columnname.recvtime=__time
-#FOR TEST
druid.columnname.partition.num=partition_num
baseline.metric.type=session_rate
@@ -63,8 +62,8 @@ baseline.kalman.m=2
##########################################
################ 并发参数 #################
##########################################
-druid.read.batch.time.grad.hour=4
-thread.pool.num=50
+#druid.read.batch.time.grad.hour=4
+thread.pool.num=10
#druid分区字段partition_num的最大值为9999
druid.partition.num.max=10000
druid.connection.retry.time.max=10