Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
M
Mock-Data
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
屈庆涛
Mock-Data
Commits
d97a6281
Commit
d97a6281
authored
Jun 22, 2020
by
DeleMing
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
<dev>
1. 修改代码
parent
21ec0353
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
408 additions
and
11 deletions
+408
-11
src/main/java/com/zorkdta/tools/mock/KafkaProducerUtil.java
src/main/java/com/zorkdta/tools/mock/KafkaProducerUtil.java
+174
-0
src/main/java/com/zorkdta/tools/mock/MockConnectJsonData.java
...main/java/com/zorkdta/tools/mock/MockConnectJsonData.java
+10
-8
src/main/java/com/zorkdta/tools/mock/MockFilebeatDataToKafka.java
.../java/com/zorkdta/tools/mock/MockFilebeatDataToKafka.java
+224
-0
src/main/java/com/zorkdta/tools/mock/MockKafkaConnectAvro.java
...ain/java/com/zorkdta/tools/mock/MockKafkaConnectAvro.java
+0
-1
src/main/java/com/zorkdta/tools/mock/MockKafkaConnectAvroTest.java
...java/com/zorkdta/tools/mock/MockKafkaConnectAvroTest.java
+0
-1
src/main/java/com/zorkdta/tools/mock/MockKafkaConnectJson.java
...ain/java/com/zorkdta/tools/mock/MockKafkaConnectJson.java
+0
-1
No files found.
src/main/java/com/zorkdta/tools/mock/KafkaProducerUtil.java
0 → 100644
View file @
d97a6281
package
com.zorkdta.tools.mock
;
import
com.zorkdta.tools.avro.AvroSerializerFactory
;
import
lombok.Data
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
javax.security.auth.login.Configuration
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author shaojiao
* Date: 2020/1/8
* Time: 10:12
* Description: 当前kafkaUtil适用于1.1.0版本
*/
@Data
public
class
KafkaProducerUtil
{
private
Logger
log
=
LoggerFactory
.
getLogger
(
KafkaProducerUtil
.
class
);
/**
* kafka相关代码
*/
private
String
kafkaBootstrapServers
;
private
Integer
kafkaBatchSize
;
/**
* kafka的SASL的验证
*/
private
boolean
kafkaSaslFlag
;
private
String
kafkaSaslAppkey
;
private
String
kafkaSaslSecretkey
;
private
String
kafkaSaslMechanism
;
/**
* kafka的kerberos验证
*/
private
boolean
kafkaKerberosFlag
;
private
String
kafkaKerberosKrb5Conf
;
private
String
kafkaKerberosJaasConf
;
private
String
kafkaSecurityProtocol
;
private
String
kafkaSaslKerberosServiceName
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
;
private
static
KafkaProducer
<
String
,
String
>
noAvroProducer
;
public
KafkaProducerUtil
(
String
kafkaBootstrapServers
,
Integer
kafkaBatchSize
){
this
.
kafkaBootstrapServers
=
kafkaBootstrapServers
;
this
.
kafkaBatchSize
=
kafkaBatchSize
;
this
.
kafkaSaslFlag
=
false
;
this
.
kafkaKerberosFlag
=
false
;
createKafkaClient
();
}
public
KafkaProducerUtil
(
String
kafkaBootstrapServers
,
Integer
kafkaBatchSize
,
boolean
kafkaSaslFlag
,
String
kafkaSecurityProtocol
,
String
kafkaSaslMechanism
,
String
kafkaSaslAppkey
,
String
kafkaSaslSecretkey
){
this
.
kafkaBootstrapServers
=
kafkaBootstrapServers
;
this
.
kafkaBatchSize
=
kafkaBatchSize
;
this
.
kafkaSaslFlag
=
kafkaSaslFlag
;
this
.
kafkaKerberosFlag
=
false
;
this
.
kafkaSecurityProtocol
=
kafkaSecurityProtocol
;
this
.
kafkaSaslMechanism
=
kafkaSaslMechanism
;
this
.
kafkaSaslAppkey
=
kafkaSaslAppkey
;
this
.
kafkaSaslSecretkey
=
kafkaSaslSecretkey
;
createKafkaClient
();
}
public
KafkaProducerUtil
(
String
kafkaBootstrapServers
,
Integer
kafkaBatchSize
,
boolean
kafkaKerberosFlag
,
String
kafkaKerberosKrb5Conf
,
String
kafkaKerberosJaasConf
,
String
kafkaSecurityProtocol
,
String
kafkaSaslKerberosServiceName
,
String
kafkaSaslMechanism
){
this
.
kafkaBootstrapServers
=
kafkaBootstrapServers
;
this
.
kafkaBatchSize
=
kafkaBatchSize
;
this
.
kafkaSaslFlag
=
false
;
this
.
kafkaKerberosFlag
=
kafkaKerberosFlag
;
this
.
kafkaSecurityProtocol
=
kafkaSecurityProtocol
;
this
.
kafkaSaslMechanism
=
kafkaSaslMechanism
;
this
.
kafkaKerberosKrb5Conf
=
kafkaKerberosKrb5Conf
;
this
.
kafkaKerberosJaasConf
=
kafkaKerberosJaasConf
;
this
.
kafkaSaslKerberosServiceName
=
kafkaSaslKerberosServiceName
;
createKafkaClient
();
}
public
KafkaProducerUtil
(
String
kafkaBootstrapServers
,
Integer
kafkaBatchSize
,
boolean
kafkaSaslFlag
,
boolean
kafkaKerberosFlag
,
String
kafkaKerberosKrb5Conf
,
String
kafkaKerberosJaasConf
,
String
kafkaSecurityProtocol
,
String
kafkaSaslKerberosServiceName
,
String
kafkaSaslMechanism
,
String
kafkaSaslAppkey
,
String
kafkaSaslSecretkey
){
this
.
kafkaBootstrapServers
=
kafkaBootstrapServers
;
this
.
kafkaBatchSize
=
kafkaBatchSize
;
this
.
kafkaSaslFlag
=
kafkaSaslFlag
;
this
.
kafkaKerberosFlag
=
kafkaKerberosFlag
;
this
.
kafkaSecurityProtocol
=
kafkaSecurityProtocol
;
this
.
kafkaSaslMechanism
=
kafkaSaslMechanism
;
this
.
kafkaKerberosKrb5Conf
=
kafkaKerberosKrb5Conf
;
this
.
kafkaKerberosJaasConf
=
kafkaKerberosJaasConf
;
this
.
kafkaSaslKerberosServiceName
=
kafkaSaslKerberosServiceName
;
this
.
kafkaSaslAppkey
=
kafkaSaslAppkey
;
this
.
kafkaSaslSecretkey
=
kafkaSaslSecretkey
;
createKafkaClient
();
}
public
void
createKafkaClient
()
{
try
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
kafkaBootstrapServers
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
"org.apache.kafka.common.serialization.ByteArraySerializer"
);
props
.
put
(
"batch.size"
,
kafkaBatchSize
);
if
(
kafkaSaslFlag
)
{
props
.
put
(
"security.protocol"
,
kafkaSecurityProtocol
);
props
.
put
(
"sasl.mechanism"
,
kafkaSaslMechanism
);
//Configuration.setConfiguration(new SaslConfig(kafkaSaslAppkey, kafkaSaslSecretkey));
}
if
(
kafkaKerberosFlag
)
{
System
.
setProperty
(
"java.security.krb5.conf"
,
kafkaKerberosKrb5Conf
);
System
.
setProperty
(
"java.security.auth.login.config"
,
kafkaKerberosJaasConf
);
props
.
put
(
"security.protocol"
,
kafkaSecurityProtocol
);
props
.
put
(
"sasl.kerberos.service.name"
,
kafkaSaslKerberosServiceName
);
props
.
put
(
"sasl.mechanism"
,
kafkaSaslMechanism
);
}
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
props
.
put
(
"value.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
noAvroProducer
=
new
KafkaProducer
<
String
,
String
>(
props
);
}
catch
(
Exception
ex
)
{
ex
.
printStackTrace
();
log
.
error
(
"初始化Kafka失败,系统自动退出! "
,
ex
);
System
.
exit
(
1
);
}
}
public
void
sendAlarm
(
String
topic
,
String
alarmJson
)
{
try
{
noAvroProducer
.
send
(
new
ProducerRecord
<
String
,
String
>(
topic
,
null
,
alarmJson
));
}
catch
(
Exception
e
)
{
log
.
error
(
"sendAlarm-插入Kafka失败"
,
e
);
}
}
public
void
sendMetric
(
String
metricSetName
,
String
timestamp
,
Map
<
String
,
String
>
dimensions
,
Map
<
String
,
Double
>
metrics
,
String
topic
)
{
try
{
byte
[]
bytes
=
AvroSerializerFactory
.
getMetricAvorSerializer
().
serializingMetric
(
metricSetName
,
timestamp
,
dimensions
,
metrics
);
producer
.
send
(
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
));
}
catch
(
Exception
e
)
{
log
.
error
(
"sendMetric-插入Kafka失败"
,
e
);
}
}
public
void
sendLog
(
String
topic
,
String
logTypeName
,
String
timestamp
,
String
source
,
String
offset
,
Map
<
String
,
String
>
dimensions
,
Map
<
String
,
Double
>
metrics
,
Map
<
String
,
String
>
normalFields
)
{
try
{
byte
[]
bytes
=
AvroSerializerFactory
.
getLogAvorSerializer
().
serializingLog
(
logTypeName
,
timestamp
,
source
,
offset
,
dimensions
,
metrics
,
normalFields
);
producer
.
send
(
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
));
}
catch
(
Exception
e
)
{
log
.
error
(
"sendLog-插入Kafka失败"
,
e
);
}
}
}
src/main/java/com/zorkdta/tools/mock/MockConnectJsonData.java
View file @
d97a6281
...
...
@@ -3,11 +3,13 @@ package com.zorkdta.tools.mock;
import
com.alibaba.fastjson.JSONObject
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.clients.producer.RecordMetadata
;
import
org.apache.kafka.common.serialization.StringSerializer
;
import
java.text.SimpleDateFormat
;
import
java.util.Date
;
import
java.util.Properties
;
import
java.util.concurrent.Future
;
/**
* 生产数据(功能正常)
...
...
@@ -22,14 +24,14 @@ public class MockConnectJsonData {
private
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerlist
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"acks"
,
"
-
1"
);
props
.
put
(
"retries"
,
1
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
StringSerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
String
>(
props
);
producer
=
new
KafkaProducer
<>(
props
);
}
/**
...
...
@@ -50,7 +52,7 @@ public class MockConnectJsonData {
private
static
String
buildMsg
()
{
JSONObject
jsonObject
=
new
JSONObject
();
jsonObject
.
put
(
"logtypename"
,
"tdx_filebeat"
);
jsonObject
.
put
(
"hostname"
,
"kafka-connect-2"
);
jsonObject
.
put
(
"hostname"
,
"kafka
producer
-connect-2"
);
jsonObject
.
put
(
"appprogram"
,
"tdx"
);
jsonObject
.
put
(
"offset"
,
String
.
valueOf
(
System
.
currentTimeMillis
()));
jsonObject
.
put
(
"message"
,
"10:06:41.335 功能请求 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*)\\n66650109|************|XshR9/S5SDE=|8|0||12|7.37.0||||||||||0||0|182.140.129.3;PENGKANG;Administrator;83025;Intel(R)Core(TM)i7-4510UCPU@2.00GHz*4;bfebfbff00040651-GenuineIntel;Windows7 Service Pack 1 (Build 7601);182.140.129.3,0.0.0.0,0.0.0.0;F8A963586DFF,00FF8C535532,A0A8CD0D00B0;TF655AWJ16NG2L,143116404707;07/15/2014;8DC03929-0822-453C-A2D5-EFBE95E359BE;182.140.129.3;;NTFS;0C17-8FD7;C:;113G;HTS725050A7E630;GH2Z;TF655AWJ16NG2L;|||||2,Mar 1 2018,10:22:32|0|||GETLOGINPARAM||7.37,6.01,Mar 1 2018,10:37:07|8噝\\\\5\\\\3||||\\n10:06:41.491 调用失败 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*) 耗时A:156 耗时B:0 排队:0\\n-4|资金账号或密码错误!|0|||\\n10:06:52.678 系统信息 开始关闭交易中心服务。\\n10:06:53.303 系统信息 (HS_TCP2.dll)连接守护线程退出!\\n10:06:53.335 系统信息 (HS_TCP2.dll)\\\"刷新约定购回标的证券信息\\\"线程成功退出!(记录总条数:3536)\\n10:06:54.413 系统信息 港股行情服务: 保存代码表(港股)缓存。\\n10:06:54.678 系统信息 深沪行情服务: 保存代码表缓存。\\n10:06:54.960 系统信息 交易中心服务已经成功关闭。\\n10:06:54.960 系统信息 系统正常关闭\\n"
);
...
...
@@ -64,18 +66,18 @@ public class MockConnectJsonData {
}
private
static
void
send
(
String
message
)
{
ProducerRecord
<
String
,
String
>
producerRecord
=
new
ProducerRecord
<
String
,
String
>(
topic
,
null
,
message
);
ProducerRecord
<
String
,
String
>
producerRecord
=
new
ProducerRecord
<>(
topic
,
null
,
message
);
producer
.
send
(
producerRecord
);
}
public
static
void
main
(
String
[]
args
)
{
topic
=
"t
dx3
"
;
brokerlist
=
"kafka
-1:19092,kafka-2:19092,kafka-3:1
9092"
;
topic
=
"t
est
"
;
brokerlist
=
"kafka
01:9092,kafka02:9092,kafka03:
9092"
;
init
();
for
(
int
i
=
0
;
i
<=
10000
;
i
++)
{
String
message
=
buildMsg
()
;
String
message
=
""
+
i
;
send
(
message
);
}
}
...
...
src/main/java/com/zorkdta/tools/mock/MockFilebeatDataToKafka.java
0 → 100644
View file @
d97a6281
package
com.zorkdta.tools.mock
;
import
com.alibaba.fastjson.JSONObject
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.StringSerializer
;
import
java.text.SimpleDateFormat
;
import
java.util.Date
;
import
java.util.Properties
;
import
java.util.concurrent.ExecutionException
;
/**
* @author: LiaoMingtao
* @date: 2020/6/19
*/
public
class
MockFilebeatDataToKafka
{
private
static
String
topic
;
private
static
String
brokerlist
;
private
static
KafkaProducer
<
String
,
String
>
producer
;
private
static
final
String
APP_SYSTEM
=
"TEST_JTY"
;
private
static
final
String
CLUSTER_NAME
=
"TEST_CLUSTER"
;
private
static
final
String
SERVICE_CODE
=
"TEST_SERVICE"
;
private
static
final
String
APP_PROGRAM_NAME
=
"TEST_SERVICE"
;
private
static
final
String
IP
=
"192.168.1."
;
private
static
final
String
HOSTNAME
=
"zorkdata-"
;
private
static
final
String
MESSAGE
=
"10:06:41.335 功能请求 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*)\\n66650109|************|XshR9/S5SDE=|8|0||12|7.37.0||||||||||0||0|182.140.129.3;PENGKANG;Administrator;83025;Intel(R)Core(TM)i7-4510UCPU@2.00GHz*4;bfebfbff00040651-GenuineIntel;Windows7 Service Pack 1 (Build 7601);182.140.129.3,0.0.0.0,0.0.0.0;F8A963586DFF,00FF8C535532,A0A8CD0D00B0;TF655AWJ16NG2L,143116404707;07/15/2014;8DC03929-0822-453C-A2D5-EFBE95E359BE;182.140.129.3;;NTFS;0C17-8FD7;C:;113G;HTS725050A7E630;GH2Z;TF655AWJ16NG2L;|||||2,Mar 1 2018,10:22:32|0|||GETLOGINPARAM||7.37,6.01,Mar 1 2018,10:37:07|8噝\\\\5\\\\3||||\\n10:06:41.491 调用失败 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*) 耗时A:156 耗时B:0 排队:0\\n-4|资金账号或密码错误!|0|||\\n10:06:52.678 系统信息 开始关闭交易中心服务。\\n10:06:53.303 系统信息 (HS_TCP2.dll)连接守护线程退出!\\n10:06:53.335 系统信息 (HS_TCP2.dll)\\\"刷新约定购回标的证券信息\\\"线程成功退出!(记录总条数:3536)\\n10:06:54.413 系统信息 港股行情服务: 保存代码表(港股)缓存。\\n10:06:54.678 系统信息 深沪行情服务: 保存代码表缓存。\\n10:06:54.960 系统信息 交易中心服务已经成功关闭。\\n10:06:54.960 系统信息 系统正常关闭\\n"
;
private
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerlist
);
props
.
put
(
"acks"
,
"-1"
);
props
.
put
(
"retries"
,
1
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
StringSerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<>(
props
);
}
public
static
String
[]
mockAppSystem
(
int
length
)
{
String
[]
strArr
=
new
String
[
length
];
final
String
appSystem
=
"JTY"
;
for
(
int
i
=
0
;
i
<
length
;
i
++)
{
strArr
[
i
]
=
appSystem
+
(
i
+
1
);
}
return
strArr
;
}
public
static
String
[]
mockStrArr
(
String
str
,
int
length
)
{
String
[]
strArr
=
new
String
[
length
];
for
(
int
i
=
0
;
i
<
length
;
i
++)
{
strArr
[
i
]
=
str
+
(
i
+
1
);
}
return
strArr
;
}
/**
* 获取当前采集时间
*
* @return String
*/
private
static
String
getLogTime
()
{
SimpleDateFormat
sdf
=
new
SimpleDateFormat
(
"yyyy-MM-dd"
);
return
sdf
.
format
(
new
Date
());
}
private
static
String
getCollectTime
()
{
SimpleDateFormat
sdf
=
new
SimpleDateFormat
(
"yyyyMMdd"
);
return
sdf
.
format
(
new
Date
());
}
private
static
String
buildMsg
(
String
appSystem
,
String
appProgramName
,
String
ip
,
String
clusterName
,
String
serviceCode
)
{
JSONObject
jsonObject
=
buildBaseMsg
();
jsonObject
.
put
(
"appsystem"
,
appSystem
);
jsonObject
.
put
(
"appprogramname"
,
appProgramName
);
jsonObject
.
put
(
"clustername"
,
clusterName
);
jsonObject
.
put
(
"servicecode"
,
serviceCode
);
StringBuilder
str
=
new
StringBuilder
(
MESSAGE
);
jsonObject
.
put
(
"message"
,
str
.
append
(
appSystem
).
append
(
appProgramName
).
append
(
clusterName
).
append
(
ip
));
return
jsonObject
.
toJSONString
();
}
private
static
JSONObject
buildBaseMsg
()
{
JSONObject
filebeatJson
=
new
JSONObject
();
JSONObject
metadataJson
=
new
JSONObject
();
metadataJson
.
put
(
"beat"
,
"filebeat"
);
metadataJson
.
put
(
"type"
,
"doc"
);
metadataJson
.
put
(
"version"
,
"6.8.1"
);
JSONObject
inputJson
=
new
JSONObject
();
inputJson
.
put
(
"type"
,
"log"
);
JSONObject
beatJson
=
new
JSONObject
();
// beatJson.put("name", "zorkdata-151");
beatJson
.
put
(
"hostname"
,
"zorkdata-151"
);
beatJson
.
put
(
"version"
,
"6.8.1"
);
JSONObject
hostJson
=
new
JSONObject
();
hostJson
.
put
(
"name"
,
"zorkdata-151"
);
hostJson
.
put
(
"architecture"
,
"x86_64"
);
hostJson
.
put
(
"id"
,
"8e3dfc85999b4e02bae4adf4b92b909a"
);
hostJson
.
put
(
"containerized"
,
"false"
);
JSONObject
logJson
=
new
JSONObject
();
logJson
.
put
(
"file"
,
"{ \"path\": \"/var/log/nginx/access.log\" }"
);
filebeatJson
.
put
(
"@timestamp"
,
"2020-06-19T01:29:44.181Z"
);
filebeatJson
.
put
(
"source"
,
"/var/log/nginx/access.log"
);
filebeatJson
.
put
(
"offset"
,
String
.
valueOf
(
System
.
currentTimeMillis
()));
filebeatJson
.
put
(
"message"
,
"10:06:41.335 功能请求 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*)\\n66650109|************|XshR9/S5SDE=|8|0||12|7.37.0||||||||||0||0|182.140.129.3;PENGKANG;Administrator;83025;Intel(R)Core(TM)i7-4510UCPU@2.00GHz*4;bfebfbff00040651-GenuineIntel;Windows7 Service Pack 1 (Build 7601);182.140.129.3,0.0.0.0,0.0.0.0;F8A963586DFF,00FF8C535532,A0A8CD0D00B0;TF655AWJ16NG2L,143116404707;07/15/2014;8DC03929-0822-453C-A2D5-EFBE95E359BE;182.140.129.3;;NTFS;0C17-8FD7;C:;113G;HTS725050A7E630;GH2Z;TF655AWJ16NG2L;|||||2,Mar 1 2018,10:22:32|0|||GETLOGINPARAM||7.37,6.01,Mar 1 2018,10:37:07|8噝\\\\\\\\5\\\\\\\\3||||\\\\n10:06:41.491 调用失败 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*) 耗时A:156 耗时B:0 排队:0\\\\n-4|资金账号或密码错误!|0|||\\\\n10:06:52.678 系统信息 开始关闭交易中心服务。\\\\n10:06:53.303 系统信息 (HS_TCP2.dll)连接守护线程退出!\\\\n10:06:53.335 系统信息 (HS_TCP2.dll)\\\\\\\"刷新约定购回标的证券信息\\\\\\\"线程成功退出!(记录总条数:3536)\\\\n10:06:54.413 系统信息 港股行情服务: 保存代码表(港股)缓存。\\\\n10:06:54.678 系统信息 深沪行情服务: 保存代码表缓存。\\\\n10:06:54.960 系统信息 交易中心服务已经成功关闭。\\\\n10:06:54.960 系统信息 系统正常关闭\\\\n"
);
filebeatJson
.
put
(
"appsystem"
,
"test_appsystem"
);
filebeatJson
.
put
(
"appprogramname"
,
"test_appprogramname"
);
filebeatJson
.
put
(
"clustername"
,
"test_clustername"
);
filebeatJson
.
put
(
"logTypeName"
,
"test_topic_log"
);
filebeatJson
.
put
(
"servicename"
,
"test_servicename"
);
filebeatJson
.
put
(
"servicecode"
,
"test_cdde"
);
filebeatJson
.
put
(
"collector_rule_id"
,
"1"
);
filebeatJson
.
put
(
"@metadata"
,
metadataJson
);
filebeatJson
.
put
(
"input"
,
inputJson
);
filebeatJson
.
put
(
"beat"
,
beatJson
);
filebeatJson
.
put
(
"host"
,
hostJson
);
filebeatJson
.
put
(
"log"
,
logJson
);
filebeatJson
.
put
(
"prospector"
,
inputJson
);
return
filebeatJson
;
}
private
static
String
buildMsg
()
{
JSONObject
filebeatJson
=
new
JSONObject
();
JSONObject
metadataJson
=
new
JSONObject
();
metadataJson
.
put
(
"beat"
,
"filebeat"
);
metadataJson
.
put
(
"type"
,
"doc"
);
metadataJson
.
put
(
"version"
,
"6.8.1"
);
JSONObject
inputJson
=
new
JSONObject
();
inputJson
.
put
(
"type"
,
"log"
);
JSONObject
beatJson
=
new
JSONObject
();
beatJson
.
put
(
"name"
,
"zorkdata-151"
);
beatJson
.
put
(
"hostname"
,
"zorkdata-151"
);
beatJson
.
put
(
"version"
,
"6.8.1"
);
JSONObject
hostJson
=
new
JSONObject
();
hostJson
.
put
(
"name"
,
"zorkdata-151"
);
hostJson
.
put
(
"architecture"
,
"x86_64"
);
hostJson
.
put
(
"id"
,
"8e3dfc85999b4e02bae4adf4b92b909a"
);
hostJson
.
put
(
"containerized"
,
"false"
);
JSONObject
logJson
=
new
JSONObject
();
logJson
.
put
(
"file"
,
"{ \"path\": \"/var/log/nginx/access.log\" }"
);
filebeatJson
.
put
(
"@timestamp"
,
"2020-06-19T01:29:44.181Z"
);
filebeatJson
.
put
(
"source"
,
"/var/log/nginx/access.log"
);
filebeatJson
.
put
(
"offset"
,
String
.
valueOf
(
System
.
currentTimeMillis
()));
filebeatJson
.
put
(
"message"
,
"10:06:41.335 功能请求 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*)\\n66650109|************|XshR9/S5SDE=|8|0||12|7.37.0||||||||||0||0|182.140.129.3;PENGKANG;Administrator;83025;Intel(R)Core(TM)i7-4510UCPU@2.00GHz*4;bfebfbff00040651-GenuineIntel;Windows7 Service Pack 1 (Build 7601);182.140.129.3,0.0.0.0,0.0.0.0;F8A963586DFF,00FF8C535532,A0A8CD0D00B0;TF655AWJ16NG2L,143116404707;07/15/2014;8DC03929-0822-453C-A2D5-EFBE95E359BE;182.140.129.3;;NTFS;0C17-8FD7;C:;113G;HTS725050A7E630;GH2Z;TF655AWJ16NG2L;|||||2,Mar 1 2018,10:22:32|0|||GETLOGINPARAM||7.37,6.01,Mar 1 2018,10:37:07|8噝\\\\\\\\5\\\\\\\\3||||\\\\n10:06:41.491 调用失败 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*) 耗时A:156 耗时B:0 排队:0\\\\n-4|资金账号或密码错误!|0|||\\\\n10:06:52.678 系统信息 开始关闭交易中心服务。\\\\n10:06:53.303 系统信息 (HS_TCP2.dll)连接守护线程退出!\\\\n10:06:53.335 系统信息 (HS_TCP2.dll)\\\\\\\"刷新约定购回标的证券信息\\\\\\\"线程成功退出!(记录总条数:3536)\\\\n10:06:54.413 系统信息 港股行情服务: 保存代码表(港股)缓存。\\\\n10:06:54.678 系统信息 深沪行情服务: 保存代码表缓存。\\\\n10:06:54.960 系统信息 交易中心服务已经成功关闭。\\\\n10:06:54.960 系统信息 系统正常关闭\\\\n"
);
filebeatJson
.
put
(
"appsystem"
,
"test_appsystem"
);
filebeatJson
.
put
(
"appprogramname"
,
"test_appprogramname"
);
filebeatJson
.
put
(
"logTypeName"
,
"test_topic_log"
);
filebeatJson
.
put
(
"servicename"
,
"test_servicename"
);
filebeatJson
.
put
(
"servicecode"
,
"test_cdde"
);
filebeatJson
.
put
(
"collector_rule_id"
,
"1"
);
filebeatJson
.
put
(
"@metadata"
,
metadataJson
);
filebeatJson
.
put
(
"input"
,
inputJson
);
filebeatJson
.
put
(
"beat"
,
beatJson
);
filebeatJson
.
put
(
"host"
,
hostJson
);
filebeatJson
.
put
(
"log"
,
logJson
);
filebeatJson
.
put
(
"prospector"
,
inputJson
);
return
filebeatJson
.
toJSONString
();
}
private
static
void
send
(
String
message
)
{
ProducerRecord
<
String
,
String
>
producerRecord
=
new
ProducerRecord
<>(
topic
,
null
,
message
);
try
{
producer
.
send
(
producerRecord
).
get
();
}
catch
(
InterruptedException
e
)
{
e
.
printStackTrace
();
}
catch
(
ExecutionException
e
)
{
e
.
printStackTrace
();
}
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
topic
=
"test100"
;
// brokerlist = "kafka01:9092,kafka02:9092,kafka03:9092";
brokerlist
=
"kafka01:9092,kafka02:9092,kafka03:9092"
;
init
();
int
appSystemLength
=
100
;
int
appProgramNameLength
=
10
;
int
clusterNameLength
=
10
;
int
serviceNameLength
=
10
;
int
ipLength
=
10
;
String
[]
appSystemArr
=
mockStrArr
(
APP_SYSTEM
,
appSystemLength
);
String
[]
appProgramNameArr
=
mockStrArr
(
APP_PROGRAM_NAME
,
appSystemLength
);
String
[]
clusterNameArr
=
mockStrArr
(
CLUSTER_NAME
,
appSystemLength
);
String
[]
serviceNameArr
=
mockStrArr
(
SERVICE_CODE
,
appSystemLength
);
String
[]
ipArr
=
mockStrArr
(
IP
,
appSystemLength
);
int
count
=
0
;
for
(
int
a
=
0
;
a
<
appSystemLength
;
a
++)
{
for
(
int
b
=
0
;
b
<
appProgramNameLength
;
b
++)
{
for
(
int
c
=
0
;
c
<
clusterNameLength
;
c
++)
{
for
(
int
d
=
0
;
d
<
serviceNameLength
;
d
++)
{
for
(
int
e
=
0
;
e
<
ipLength
;
e
++)
{
String
message
=
buildMsg
(
appSystemArr
[
a
],
appProgramNameArr
[
b
],
ipArr
[
e
],
clusterNameArr
[
d
],
serviceNameArr
[
c
]);
System
.
out
.
println
(
message
);
System
.
out
.
println
(++
count
);
send
(
message
);
}
}
}
}
}
// Thread.sleep(10000);
// for (int i = 0; i <= 10; i++) {
// String message = "aaabb";
// send(message);
// System.out.println("\n" +i);
// // kafkaProducerUtil.sendAlarm(topic, message);
// }
}
}
src/main/java/com/zorkdta/tools/mock/MockKafkaConnectAvro.java
View file @
d97a6281
...
...
@@ -2,7 +2,6 @@ package com.zorkdta.tools.mock;
import
com.alibaba.fastjson.JSONObject
;
import
com.zorkdta.tools.utils.*
;
import
top.xiesen.mock.kafka.utils.*
;
import
java.util.HashMap
;
import
java.util.Map
;
...
...
src/main/java/com/zorkdta/tools/mock/MockKafkaConnectAvroTest.java
View file @
d97a6281
...
...
@@ -6,7 +6,6 @@ import org.apache.kafka.clients.producer.ProducerRecord;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
com.zorkdta.tools.avro.AvroSerializer
;
import
com.zorkdta.tools.avro.AvroSerializerFactory
;
import
top.xiesen.mock.kafka.utils.*
;
import
java.util.HashMap
;
import
java.util.Map
;
...
...
src/main/java/com/zorkdta/tools/mock/MockKafkaConnectJson.java
View file @
d97a6281
...
...
@@ -5,7 +5,6 @@ import com.zorkdta.tools.utils.CustomerProducer;
import
com.zorkdta.tools.utils.ProducerPool
;
import
com.zorkdta.tools.utils.PropertiesUtil
;
import
com.zorkdta.tools.utils.StringUtil
;
import
top.xiesen.mock.kafka.utils.*
;
import
java.util.Properties
;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment