Commit e000f9b3 authored by DeleMing's avatar DeleMing

优化代码

parent 8873bbfa
package com.zorkdata.tools.kafka; package com.zorkdata.tools.kafka;
import com.zorkdata.tools.oldkafka.AvroSerializerFactory; import com.zorkdata.tools.oldkafka.AvroSerializerFactory;
import com.zorkdata.tools.oldkafka.Config;
import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.ProducerRecord;
...@@ -27,9 +26,10 @@ public class CommonProducer { ...@@ -27,9 +26,10 @@ public class CommonProducer {
props.put("bootstrap.servers", kafkaServer); props.put("bootstrap.servers", kafkaServer);
props.put("client.id", "webAPI4LogGather"); props.put("client.id", "webAPI4LogGather");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
props.put("batch.size", kafkaBathSize); props.put("batch.size", kafkaBathSize);
// 启用压缩
props.put("compression.type", "lz4");
producerByte = new KafkaProducer<String, byte[]>(props); producerByte = new KafkaProducer<String, byte[]>(props);
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
producerString = new KafkaProducer<String, String>(props); producerString = new KafkaProducer<String, String>(props);
......
package com.zorkdata.tools.oldkafka; package com.zorkdata.tools.kafka;
import com.zorkdata.tools.utils.PropertiesUtil; import com.zorkdata.tools.utils.PropertiesUtil;
......
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.kafka.CommonProducer;
import com.zorkdata.tools.kafka.CommonProducerPool;
import com.zorkdata.tools.oldkafka.Producer;
import com.zorkdata.tools.oldkafka.ProducerPool;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
import java.util.Random;
import java.util.concurrent.ExecutionException;
import static java.lang.System.currentTimeMillis;
/**
* @author: LiaoMingtao
* @date: 2020/6/29
*/
public class MockGrok {
private static String MESSAGE = "%s - - [29/Jun/2020:16:09:23 +0800] \"%s /webserver/scene/getSceneList.do?menuItemId=1039&sceneGroupId=&templateFlag=%s HTTP/1.0\" %s 3167";
private static String MESSAGE1 = "192.168.1.151 - - [29/Jun/2020:16:09:23 +0800] \"GET /webserver/scene/getSceneList.do?menuItemId=1039&sceneGroupId=&templateFlag= HTTP/1.0\" 200 3167";
private static String buildMessage() {
Random random = new Random();
int i = random.nextInt(10);
final String baseIp = "192.168.1.";
return String.format(MESSAGE, baseIp + i, "POST", i, i * 10);
}
private static String buildJson(String message) {
JSONObject filebeatJson = new JSONObject();
JSONObject metadataJson = new JSONObject();
metadataJson.put("beat", "filebeat");
metadataJson.put("type", "doc");
metadataJson.put("version", "6.8.1");
JSONObject inputJson = new JSONObject();
inputJson.put("type", "log");
JSONObject beatJson = new JSONObject();
beatJson.put("name", "zorkdata-151");
beatJson.put("hostname", "zorkdata-151");
beatJson.put("version", "6.8.1");
JSONObject hostJson = new JSONObject();
hostJson.put("name", "zorkdata-151");
hostJson.put("architecture", "x86_64");
hostJson.put("id", "8e3dfc85999b4e02bae4adf4b92b909a");
hostJson.put("containerized", "false");
JSONObject logJson = new JSONObject();
logJson.put("file", "{ \"path\": \"/var/log/nginx/access.log\" }");
filebeatJson.put("@timestamp", "2020-06-19T01:29:44.181Z");
filebeatJson.put("source", "/var/log/nginx/access.log");
filebeatJson.put("offset", String.valueOf(currentTimeMillis()));
filebeatJson.put("message", message);
Random random = new Random();
int i = random.nextInt(10);
filebeatJson.put("appsystem", "test_appsystem" + i * 2);
filebeatJson.put("appprogramname", "test_appprogramname"+ i * 3);
filebeatJson.put("logTypeName", "test_topic_log"+ i * 4);
filebeatJson.put("servicename", "test_servicename"+ i * 5);
filebeatJson.put("servicecode", "test_cdde"+ i * 6);
filebeatJson.put("collectorruleid", "1");
filebeatJson.put("@metadata", metadataJson);
filebeatJson.put("input", inputJson);
filebeatJson.put("beat", beatJson);
filebeatJson.put("host", hostJson);
filebeatJson.put("log", logJson);
filebeatJson.put("prospector", inputJson);
return filebeatJson.toJSONString();
}
public static void main(String[] args) throws IOException, InterruptedException {
long size = 1;
// long size = 10000000L * 10;
for (int i = 0; i < size; i++) {
String json = buildJson(buildMessage());
CommonProducer producer = CommonProducerPool.getInstance().getProducer();
producer.sendLog("test", json);
}
Thread.sleep(1000);
}
}
package com.zorkdata.tools.oldmock;
public class MockGrok {
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment