Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
M
mock-data
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
廖鸣韬
mock-data
Commits
a7c76f3f
Commit
a7c76f3f
authored
Jun 30, 2020
by
DeleMing
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
优化代码
parent
aa4ae227
Changes
12
Hide whitespace changes
Inline
Side-by-side
Showing
12 changed files
with
139 additions
and
100 deletions
+139
-100
src/main/java/com/zorkdata/tools/avro/AvroDeserializer.java
src/main/java/com/zorkdata/tools/avro/AvroDeserializer.java
+11
-16
src/main/java/com/zorkdata/tools/avro/AvroDeserializerFactory.java
...java/com/zorkdata/tools/avro/AvroDeserializerFactory.java
+1
-0
src/main/java/com/zorkdata/tools/avro/AvroSerializer.java
src/main/java/com/zorkdata/tools/avro/AvroSerializer.java
+22
-27
src/main/java/com/zorkdata/tools/avro/AvroSerializerFactory.java
...n/java/com/zorkdata/tools/avro/AvroSerializerFactory.java
+1
-0
src/main/java/com/zorkdata/tools/kafka/CommonProducer.java
src/main/java/com/zorkdata/tools/kafka/CommonProducer.java
+37
-32
src/main/java/com/zorkdata/tools/kafka/CommonProducerPool.java
...ain/java/com/zorkdata/tools/kafka/CommonProducerPool.java
+2
-2
src/main/java/com/zorkdata/tools/kafka/Config.java
src/main/java/com/zorkdata/tools/kafka/Config.java
+13
-11
src/main/java/com/zorkdata/tools/kafka/Kafka.md
src/main/java/com/zorkdata/tools/kafka/Kafka.md
+36
-0
src/main/java/com/zorkdata/tools/load/Test.java
src/main/java/com/zorkdata/tools/load/Test.java
+14
-0
src/main/java/com/zorkdata/tools/mock/MockGrok.java
src/main/java/com/zorkdata/tools/mock/MockGrok.java
+0
-9
src/main/java/com/zorkdata/tools/mock/MockStreamxLogAvro.java
...main/java/com/zorkdata/tools/mock/MockStreamxLogAvro.java
+1
-3
src/main/resources/config.properties
src/main/resources/config.properties
+1
-0
No files found.
src/main/java/com/zorkdata/tools/avro/AvroDeserializer.java
View file @
a7c76f3f
...
...
@@ -16,24 +16,22 @@ import org.slf4j.LoggerFactory;
* @author DeleMing
*/
public
class
AvroDeserializer
{
private
static
final
Logger
LOGGER
=
LoggerFactory
.
getLogger
(
AvroDeserializer
.
class
);
private
static
final
Logger
logger
=
LoggerFactory
.
getLogger
(
AvroDeserializer
.
class
);
public
JSONObject
jsonObject
;
public
JSONArray
jsonArray
;
public
Schema
schema
;
public
String
[]
keys
;
public
AvroDeserializer
(
String
schema
)
{
getKeysFrom
j
son
(
schema
);
getKeysFrom
J
son
(
schema
);
}
/**
* @param schema:Avro序列化所使用的schema
* @return void 返回类型
* @throws
* @Title: getKeysFromjson
* @Description:用于获取Avro的keys
* 用于获取Avro的keys
*
* @param schema Avro序列化所使用的schema
*/
void
getKeysFrom
j
son
(
String
schema
)
{
void
getKeysFrom
J
son
(
String
schema
)
{
this
.
jsonObject
=
JSONObject
.
parseObject
(
schema
);
this
.
schema
=
new
Schema
.
Parser
().
parse
(
schema
);
this
.
jsonArray
=
this
.
jsonObject
.
getJSONArray
(
"fields"
);
...
...
@@ -44,14 +42,11 @@ public class AvroDeserializer {
}
/**
* @param body 参数:byte[] body:kafka消息。
* @param @return 设定文件
* @return String 返回类型
* @throws
* @Title: deserializing
* @Description: 用于Avro的反序列化。
* 用于Avro的反序列化
*
* @param body 参数:byte[] body:kafka消息。
* @return GenericRecord
*/
public
GenericRecord
deserializing
(
byte
[]
body
)
{
DatumReader
<
GenericData
.
Record
>
datumReader
=
new
GenericDatumReader
<
GenericData
.
Record
>(
this
.
schema
);
Decoder
decoder
=
DecoderFactory
.
get
().
binaryDecoder
(
body
,
null
);
...
...
@@ -59,7 +54,7 @@ public class AvroDeserializer {
try
{
result
=
datumReader
.
read
(
null
,
decoder
);
}
catch
(
Exception
e
)
{
LOGGER
.
error
(
String
.
format
(
"error Avro反序列化"
)
,
e
);
logger
.
error
(
"error Avro反序列化"
,
e
);
}
return
result
;
}
...
...
src/main/java/com/zorkdata/tools/avro/AvroDeserializerFactory.java
View file @
a7c76f3f
...
...
@@ -4,6 +4,7 @@ package com.zorkdata.tools.avro;
* @author DeleMing
*/
public
class
AvroDeserializerFactory
{
private
static
AvroDeserializer
logs
=
null
;
private
static
AvroDeserializer
metrics
=
null
;
...
...
src/main/java/com/zorkdata/tools/avro/AvroSerializer.java
View file @
a7c76f3f
...
...
@@ -24,46 +24,41 @@ public class AvroSerializer {
public
JSONObject
jsonObject
;
public
JSONArray
jsonArray
;
public
Schema
schema
;
public
List
<
String
>
fi
le
dsArrayList
=
new
ArrayList
<
String
>();
public
List
<
String
>
fi
el
dsArrayList
=
new
ArrayList
<
String
>();
public
AvroSerializer
(
String
schema
)
{
getKeysFrom
j
son
(
schema
);
getKeysFrom
J
son
(
schema
);
}
/**
* @param schema
* :Avro序列化所使用的schema
* @return void 返回类型
* @throws
* @Title: getKeysFromjson
* @Description:用于获取Avro的keys
* 用于获取Avro的keys
*
* @param schema Avro序列化所使用的schema
*/
void
getKeysFrom
j
son
(
String
schema
)
{
void
getKeysFrom
J
son
(
String
schema
)
{
this
.
jsonObject
=
JSONObject
.
parseObject
(
schema
);
this
.
schema
=
new
Schema
.
Parser
().
parse
(
schema
);
this
.
jsonArray
=
this
.
jsonObject
.
getJSONArray
(
"fields"
);
if
(
fi
ledsArrayList
!=
null
&&
file
dsArrayList
.
size
()
>
0
)
{
fi
le
dsArrayList
.
clear
();
if
(
fi
eldsArrayList
!=
null
&&
fiel
dsArrayList
.
size
()
>
0
)
{
fi
el
dsArrayList
.
clear
();
}
for
(
int
i
=
0
;
i
<
this
.
jsonArray
.
size
();
i
++)
{
fi
le
dsArrayList
.
add
(
this
.
jsonArray
.
getJSONObject
(
i
).
get
(
"name"
).
toString
());
fi
el
dsArrayList
.
add
(
this
.
jsonArray
.
getJSONObject
(
i
).
get
(
"name"
).
toString
());
}
}
/**
* @param
* @param @return 设定文件
* @return String 返回类型
* @throws
* @Title: serializing
* @Description: 用于Avro的序列化。
* 用于Avro的序列化
*
* @param temtuple
* @return
*/
private
synchronized
byte
[]
serializing
(
List
<
String
>
temtuple
)
{
byte
[]
returnstr
=
null
;
GenericRecord
datum
=
new
GenericData
.
Record
(
this
.
schema
);
// 将数据加到datum中
for
(
int
i
=
0
;
i
<
fi
le
dsArrayList
.
size
();
i
++)
{
datum
.
put
(
fi
le
dsArrayList
.
get
(
i
),
temtuple
.
get
(
i
));
for
(
int
i
=
0
;
i
<
fi
el
dsArrayList
.
size
();
i
++)
{
datum
.
put
(
fi
el
dsArrayList
.
get
(
i
),
temtuple
.
get
(
i
));
}
ByteArrayOutputStream
out
=
new
ByteArrayOutputStream
();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
...
...
@@ -76,7 +71,7 @@ public class AvroSerializer {
encoder
.
flush
();
}
catch
(
IOException
e
)
{
System
.
out
.
println
(
"序列化失败 "
+
e
);
System
.
out
.
println
(
"序列化失败 "
+
e
);
}
finally
{
if
(
out
!=
null
)
{
try
{
...
...
@@ -105,8 +100,8 @@ public class AvroSerializer {
JSONObject
jsonObject
=
(
JSONObject
)
JSONObject
.
parse
(
json
);
// new TypeReference<Object>() {}
GenericRecord
datum
=
new
GenericData
.
Record
(
this
.
schema
);
// 将数据加到datum中
for
(
int
i
=
0
;
i
<
fi
le
dsArrayList
.
size
();
i
++)
{
datum
.
put
(
fi
ledsArrayList
.
get
(
i
),
new
Utf8
(
String
.
valueOf
(
jsonObject
.
get
(
file
dsArrayList
.
get
(
i
)))));
for
(
int
i
=
0
;
i
<
fi
el
dsArrayList
.
size
();
i
++)
{
datum
.
put
(
fi
eldsArrayList
.
get
(
i
),
new
Utf8
(
String
.
valueOf
(
jsonObject
.
get
(
fiel
dsArrayList
.
get
(
i
)))));
}
ByteArrayOutputStream
out
=
new
ByteArrayOutputStream
();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
...
...
@@ -146,8 +141,8 @@ public class AvroSerializer {
byte
[]
returnstr
=
null
;
GenericRecord
datum
=
new
GenericData
.
Record
(
this
.
schema
);
// 将数据加到datum中
for
(
int
i
=
0
;
i
<
fi
le
dsArrayList
.
size
();
i
++)
{
datum
.
put
(
fi
ledsArrayList
.
get
(
i
),
jsonObject
.
get
(
file
dsArrayList
.
get
(
i
)));
for
(
int
i
=
0
;
i
<
fi
el
dsArrayList
.
size
();
i
++)
{
datum
.
put
(
fi
eldsArrayList
.
get
(
i
),
jsonObject
.
get
(
fiel
dsArrayList
.
get
(
i
)));
}
ByteArrayOutputStream
out
=
new
ByteArrayOutputStream
();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
...
...
@@ -249,8 +244,8 @@ public class AvroSerializer {
byte
[]
returnstr
=
null
;
GenericRecord
datum
=
new
GenericData
.
Record
(
this
.
schema
);
// 将数据加到datum中
for
(
int
i
=
0
;
i
<
fi
le
dsArrayList
.
size
();
i
++)
{
datum
.
put
(
fi
le
dsArrayList
.
get
(
i
),
new
Utf8
(
String
.
valueOf
(
genericRecord
.
get
(
key
[
i
]))));
for
(
int
i
=
0
;
i
<
fi
el
dsArrayList
.
size
();
i
++)
{
datum
.
put
(
fi
el
dsArrayList
.
get
(
i
),
new
Utf8
(
String
.
valueOf
(
genericRecord
.
get
(
key
[
i
]))));
}
ByteArrayOutputStream
out
=
new
ByteArrayOutputStream
();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
...
...
src/main/java/com/zorkdata/tools/avro/AvroSerializerFactory.java
View file @
a7c76f3f
...
...
@@ -4,6 +4,7 @@ package com.zorkdata.tools.avro;
* @author DeleMing
*/
public
class
AvroSerializerFactory
{
private
static
AvroSerializer
metricMetadata
=
null
;
private
static
AvroSerializer
logMetadata
=
null
;
...
...
src/main/java/com/zorkdata/tools/kafka/CommonProducer.java
View file @
a7c76f3f
...
...
@@ -3,6 +3,8 @@ package com.zorkdata.tools.kafka;
import
com.zorkdata.tools.oldkafka.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.util.Map
;
import
java.util.Properties
;
...
...
@@ -13,87 +15,90 @@ import java.util.Properties;
*/
public
class
CommonProducer
{
private
static
String
kafkaServer
;
private
static
int
kafkaBathSize
;
private
static
final
Logger
logger
=
LoggerFactory
.
getLogger
(
CommonProducer
.
class
);
private
static
KafkaProducer
<
String
,
byte
[]>
producerByte
;
private
static
KafkaProducer
<
String
,
String
>
producerString
;
private
String
kafkaServer
;
private
int
kafkaBathSize
;
private
KafkaProducer
<
String
,
byte
[]>
producerByte
;
private
KafkaProducer
<
String
,
String
>
producerString
;
public
void
initConfig
()
{
kafkaServer
=
Config
.
INSTANCE
.
getKafkaServers
();
kafkaBathSize
=
Config
.
INSTANCE
.
getKafkaBathSize
();
}
public
CommonProducer
()
{
try
{
initConfig
();
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
kafkaServer
);
props
.
put
(
"client.id"
,
"webAPI4LogGather"
);
// 不自定义clientid使用自增clientid, props.put("client.id", "webAPI4LogGather");
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
"org.apache.kafka.common.serialization.ByteArraySerializer"
);
props
.
put
(
"batch.size"
,
kafkaBathSize
);
// 启用压缩
props
.
put
(
"compression.type"
,
"lz4"
);
producerByte
=
new
KafkaProducer
<
String
,
byte
[]
>(
props
);
producerByte
=
new
KafkaProducer
<>(
props
);
props
.
put
(
"value.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
producerString
=
new
KafkaProducer
<
String
,
String
>(
props
);
producerString
=
new
KafkaProducer
<>(
props
);
}
catch
(
Exception
ex
)
{
ex
.
printStackTrace
(
);
logger
.
error
(
ex
.
toString
()
);
}
}
public
void
initConfig
()
{
kafkaServer
=
Config
.
getInstance
().
kafkaServers
;
kafkaBathSize
=
Config
.
getInstance
().
kafkaBathSize
;
}
public
void
sendLog
(
String
topic
,
String
logTypeName
,
String
timestamp
,
String
source
,
String
offset
,
Map
<
String
,
String
>
dimensions
,
Map
<
String
,
Double
>
metrics
,
Map
<
String
,
String
>
normalFields
)
{
try
{
byte
[]
bytes
=
AvroSerializerFactory
.
getLogAvorSerializer
().
serializingLog
(
logTypeName
,
timestamp
,
source
,
offset
,
dimensions
,
metrics
,
normalFields
);
producerByte
.
send
(
new
ProducerRecord
<
String
,
byte
[]
>(
topic
,
null
,
bytes
));
producerByte
.
send
(
new
ProducerRecord
<>(
topic
,
null
,
bytes
));
}
catch
(
Exception
e
)
{
e
.
printStackTrace
(
);
logger
.
error
(
e
.
toString
()
);
}
}
public
void
sendLog
(
String
topic
,
String
logJson
)
{
try
{
producerString
.
send
(
new
ProducerRecord
<
String
,
String
>(
topic
,
null
,
logJson
));
producerString
.
send
(
new
ProducerRecord
<>(
topic
,
null
,
logJson
));
}
catch
(
Exception
e
)
{
e
.
printStackTrace
(
);
logger
.
error
(
e
.
toString
()
);
}
}
public
void
sendErrorLog
(
String
errorLogTopic
,
String
logJson
)
{
public
void
sendMetric
(
String
metricTopic
,
String
metricSetName
,
String
timestamp
,
Map
<
String
,
String
>
dimensions
,
Map
<
String
,
Double
>
metrics
)
{
try
{
producerString
.
send
(
new
ProducerRecord
<
String
,
String
>(
errorLogTopic
,
null
,
logJson
));
byte
[]
bytes
=
AvroSerializerFactory
.
getMetricAvorSerializer
().
serializingMetric
(
metricSetName
,
timestamp
,
dimensions
,
metrics
);
producerByte
.
send
(
new
ProducerRecord
<>(
metricTopic
,
null
,
bytes
));
}
catch
(
Exception
e
)
{
e
.
printStackTrace
(
);
logger
.
error
(
e
.
toString
()
);
}
}
public
void
sendError
Metric
(
String
errorMetric
Topic
,
String
logJson
)
{
public
void
sendError
Log
(
String
errorLog
Topic
,
String
logJson
)
{
try
{
producerString
.
send
(
new
ProducerRecord
<
String
,
String
>(
errorMetric
Topic
,
null
,
logJson
));
producerString
.
send
(
new
ProducerRecord
<
>(
errorLog
Topic
,
null
,
logJson
));
}
catch
(
Exception
e
)
{
e
.
printStackTrace
(
);
logger
.
error
(
e
.
toString
()
);
}
}
public
void
send
Alarm
(
String
alarmTopic
,
String
alarm
Json
)
{
public
void
send
ErrorMetric
(
String
errorMetricTopic
,
String
log
Json
)
{
try
{
producerString
.
send
(
new
ProducerRecord
<
String
,
String
>(
alarmTopic
,
null
,
alarm
Json
));
producerString
.
send
(
new
ProducerRecord
<
>(
errorMetricTopic
,
null
,
log
Json
));
}
catch
(
Exception
e
)
{
e
.
printStackTrace
(
);
logger
.
error
(
e
.
toString
()
);
}
}
public
void
sendMetric
(
String
metricTopic
,
String
metricSetName
,
String
timestamp
,
Map
<
String
,
String
>
dimensions
,
Map
<
String
,
Double
>
metrics
)
{
public
void
sendAlarm
(
String
alarmTopic
,
String
alarmJson
)
{
try
{
byte
[]
bytes
=
AvroSerializerFactory
.
getMetricAvorSerializer
().
serializingMetric
(
metricSetName
,
timestamp
,
dimensions
,
metrics
);
producerByte
.
send
(
new
ProducerRecord
<
String
,
byte
[]>(
metricTopic
,
null
,
bytes
));
producerString
.
send
(
new
ProducerRecord
<>(
alarmTopic
,
null
,
alarmJson
));
}
catch
(
Exception
e
)
{
e
.
printStackTrace
(
);
logger
.
error
(
e
.
toString
()
);
}
}
...
...
src/main/java/com/zorkdata/tools/kafka/CommonProducerPool.java
View file @
a7c76f3f
...
...
@@ -32,7 +32,6 @@ public class CommonProducerPool implements Closeable {
init
();
}
public
void
init
()
{
pool
=
new
CommonProducer
[
threadNum
];
for
(
int
i
=
0
;
i
<
threadNum
;
i
++)
{
...
...
@@ -41,7 +40,8 @@ public class CommonProducerPool implements Closeable {
}
public
CommonProducer
getProducer
()
{
if
(
index
>
65535
)
{
final
int
maxThread
=
65535
;
if
(
index
>
maxThread
)
{
index
=
0
;
}
return
pool
[
index
++
%
threadNum
];
...
...
src/main/java/com/zorkdata/tools/kafka/Config.java
View file @
a7c76f3f
package
com.zorkdata.tools.kafka
;
import
com.zorkdata.tools.utils.PropertiesUtil
;
import
lombok.Getter
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.util.Properties
;
/**
* @author MSI-Gaming
*/
@Getter
public
class
Config
{
private
static
Config
instance
;
public
static
Config
getInstance
()
{
if
(
instance
==
null
)
{
instance
=
new
Config
();
}
return
instance
;
}
private
static
final
Logger
logger
=
LoggerFactory
.
getLogger
(
Config
.
class
);
p
ublic
String
kafkaServers
;
p
ublic
int
kafkaBathSize
;
p
ublic
String
topicName
;
p
rivate
String
kafkaServers
;
p
rotected
int
kafkaBathSize
;
p
rotected
String
topicName
;
public
static
final
Config
INSTANCE
=
new
Config
();
/**
* 读取配置文件
*/
private
Config
()
{
try
{
Properties
properties
=
PropertiesUtil
.
getProperties
(
"/config.properties"
);
...
...
@@ -29,7 +31,7 @@ public class Config {
kafkaBathSize
=
Integer
.
parseInt
(
properties
.
getProperty
(
"kafka.batch.size"
));
topicName
=
properties
.
getProperty
(
"kafka.topic.name"
);
}
catch
(
Exception
e
)
{
e
.
printStackTrace
(
);
logger
.
error
(
e
.
toString
()
);
System
.
exit
(
1
);
}
}
...
...
src/main/java/com/zorkdata/tools/kafka/Kafka.md
0 → 100644
View file @
a7c76f3f
# Kafka Producer部分参数设置及其说明
```
.java
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop:9092");
/**
* 这个参数控制着相同分区内数据发送的批次个数大小,也就是当数据达到 这个size 时,进行数据发送,
* 但是并不是数据达不到 size 的值,就不会发送数据,默认是 1048576,即 16k
*/
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
/**
* 消息是否发送,不是仅仅通过 batch.size 的值来控制的,实际上是一种权衡策略,即吞吐量和延时之间的权衡
* linger.ms 参数就是控制消息发送延时行为的,默认是 0,表示消息需要被立即发送。
*/
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
/**
* 指定了producer 端用于缓存的缓存区大小,单位是字节,默认是 33554432, 即 32G
*/
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
/**
* 用户控制 生产者的持久性 acks 有3个值,
* 0: 表示producer 完全不理睬 broker 的处理结果
* all: 表示发送数据时,broker 不仅会将消息写入到本地磁盘,同时也要保证其他副本也写入完成,才返回结果
* 1: 表示发送数据时,broker 接收到消息写入到本地磁盘即可,无需保证其他副本是否写入成功
*/
props.put(ProducerConfig.ACKS_CONFIG, "1");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
return props;
```
\ No newline at end of file
src/main/java/com/zorkdata/tools/load/Test.java
0 → 100644
View file @
a7c76f3f
package
com.zorkdata.tools.load
;
import
com.zorkdata.tools.kafka.Config
;
/**
* @author: LiaoMingtao
* @date: 2020/6/30
*/
public
class
Test
{
public
static
void
main
(
String
[]
args
)
{
System
.
out
.
println
(
Config
.
INSTANCE
.
getKafkaServers
());
}
}
src/main/java/com/zorkdata/tools/mock/MockGrok.java
View file @
a7c76f3f
...
...
@@ -3,18 +3,9 @@ package com.zorkdata.tools.mock;
import
com.alibaba.fastjson.JSONObject
;
import
com.zorkdata.tools.kafka.CommonProducer
;
import
com.zorkdata.tools.kafka.CommonProducerPool
;
import
com.zorkdata.tools.oldkafka.Producer
;
import
com.zorkdata.tools.oldkafka.ProducerPool
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.StringSerializer
;
import
java.io.IOException
;
import
java.text.SimpleDateFormat
;
import
java.util.Date
;
import
java.util.Properties
;
import
java.util.Random
;
import
java.util.concurrent.ExecutionException
;
import
static
java
.
lang
.
System
.
currentTimeMillis
;
...
...
src/main/java/com/zorkdata/tools/mock/MockStreamxLogAvro.java
View file @
a7c76f3f
...
...
@@ -2,8 +2,6 @@ package com.zorkdata.tools.mock;
import
com.zorkdata.tools.kafka.CommonProducer
;
import
com.zorkdata.tools.kafka.CommonProducerPool
;
import
com.zorkdata.tools.oldkafka.Producer
;
import
com.zorkdata.tools.oldkafka.ProducerPool
;
import
com.zorkdata.tools.utils.DateUtil
;
import
java.util.HashMap
;
...
...
@@ -50,7 +48,7 @@ public class MockStreamxLogAvro {
public
static
void
main
(
String
[]
args
)
throws
Exception
{
// long size = 10000000L * 1;
long
size
=
10
;
long
size
=
10
0
;
// String topicName = "log2metric1y";
String
topicName
=
"test"
;
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
...
...
src/main/resources/config.properties
View file @
a7c76f3f
...
...
@@ -4,5 +4,6 @@ kafka.topic.name=a
key.serializer
=
org.apache.kafka.common.serialization.StringSerializer
value.serializer
=
org.apache.kafka.common.serialization.ByteArraySerializer
#value.serializer=org.apache.kafka.common.serialization.StringSerializer
producer.theard.num
=
15
log.size
=
10000
log.topic
=
flinkx
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment