Commit 9073a1fa authored by DeleMing's avatar DeleMing

<dev>

1. 规范部分代码
parent d97a6281
...@@ -62,7 +62,7 @@ public class MockProduct { ...@@ -62,7 +62,7 @@ public class MockProduct {
zorkData.setNormalFields(normalFields); zorkData.setNormalFields(normalFields);
String msg = JSON.toJSONString(zorkData); String msg = JSON.toJSONString(zorkData);
System.out.println(msg); System.out.println(msg);
AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvorSerializer(); AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvroSerializer();
byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields); byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
return bytes; return bytes;
} }
...@@ -96,7 +96,7 @@ public class MockProduct { ...@@ -96,7 +96,7 @@ public class MockProduct {
zorkData.setNormalFields(normalFields); zorkData.setNormalFields(normalFields);
String msg = JSON.toJSONString(zorkData); String msg = JSON.toJSONString(zorkData);
System.out.println(msg); System.out.println(msg);
AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvorSerializer(); AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvroSerializer();
byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields); byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
return bytes; return bytes;
} }
......
...@@ -13,11 +13,7 @@ import org.slf4j.Logger; ...@@ -13,11 +13,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* todo * @author DeleMing
*
* @version V1.0
* @Author XieSen
* @Date 2019/4/3 11:00.
*/ */
public class AvroDeserializer { public class AvroDeserializer {
private static final Logger LOGGER = LoggerFactory.getLogger(AvroDeserializer.class); private static final Logger LOGGER = LoggerFactory.getLogger(AvroDeserializer.class);
......
package com.zorkdta.tools.avro; package com.zorkdta.tools.avro;
/** /**
* todo * @author DeleMing
*
* @version V1.0
* @Author XieSen
* @Date 2019/4/3 11:01.
*/ */
public class AvroDeserializerFactory { public class AvroDeserializerFactory {
private static AvroDeserializer logs = null; private static AvroDeserializer logs = null;
......
...@@ -18,11 +18,7 @@ import java.util.List; ...@@ -18,11 +18,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
/** /**
* todo * @author DeleMing
*
* @version V1.0
* @Author XieSen
* @Date 2019/4/3 11:01.
*/ */
public class AvroSerializer { public class AvroSerializer {
public JSONObject jsonObject; public JSONObject jsonObject;
......
package com.zorkdta.tools.avro; package com.zorkdta.tools.avro;
/** /**
* todo * @author DeleMing
*
* @version V1.0
* @Author XieSen
* @Date 2019/4/3 11:05.
*/ */
public class AvroSerializerFactory { public class AvroSerializerFactory {
private static AvroSerializer metricMetadata = null; private static AvroSerializer metricMetadata = null;
private static AvroSerializer logMetadata = null; private static AvroSerializer logMetadata = null;
public static AvroSerializer getLogAvorSerializer() { public static AvroSerializer getLogAvroSerializer() {
if (logMetadata == null) { if (logMetadata == null) {
logMetadata = new AvroSerializer(LogAvroMacroDef.metadata); logMetadata = new AvroSerializer(LogAvroMacroDef.metadata);
} }
...@@ -19,7 +15,7 @@ public class AvroSerializerFactory { ...@@ -19,7 +15,7 @@ public class AvroSerializerFactory {
} }
public static AvroSerializer getMetricAvorSerializer() { public static AvroSerializer getMetricAvroSerializer() {
if (metricMetadata == null) { if (metricMetadata == null) {
metricMetadata = new AvroSerializer(MetricAvroMacroDef.metadata); metricMetadata = new AvroSerializer(MetricAvroMacroDef.metadata);
} }
......
package com.zorkdta.tools.avro; package com.zorkdta.tools.avro;
/** /**
* todo * @author DeleMing
*
* @version V1.0
* @Author XieSen
* @Date 2019/4/3 10:59.
*/ */
public class LogAvroMacroDef { public class LogAvroMacroDef {
public static String metadata = "{\n" + public static String metadata = "{\n" +
......
package com.zorkdta.tools.avro; package com.zorkdta.tools.avro;
/** /**
* todo * @author DeleMing
*
* @version V1.0
* @Author XieSen
* @Date 2019/4/3 11:00.
*/ */
public class MetricAvroMacroDef { public class MetricAvroMacroDef {
public static String metadata = "{\n" + public static String metadata = "{\n" +
......
...@@ -7,15 +7,11 @@ import org.apache.kafka.clients.producer.ProducerRecord; ...@@ -7,15 +7,11 @@ import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.security.auth.login.Configuration;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
/** /**
* @author shaojiao * @author DeleMing
* Date: 2020/1/8
* Time: 10:12
* Description: 当前kafkaUtil适用于1.1.0版本
*/ */
@Data @Data
public class KafkaProducerUtil { public class KafkaProducerUtil {
...@@ -149,7 +145,7 @@ public class KafkaProducerUtil { ...@@ -149,7 +145,7 @@ public class KafkaProducerUtil {
public void sendMetric(String metricSetName, String timestamp, Map<String, String> dimensions, public void sendMetric(String metricSetName, String timestamp, Map<String, String> dimensions,
Map<String, Double> metrics, String topic) { Map<String, Double> metrics, String topic) {
try { try {
byte[] bytes = AvroSerializerFactory.getMetricAvorSerializer().serializingMetric(metricSetName, timestamp, byte[] bytes = AvroSerializerFactory.getMetricAvroSerializer().serializingMetric(metricSetName, timestamp,
dimensions, metrics); dimensions, metrics);
producer.send(new ProducerRecord<String, byte[]>(topic, null, bytes)); producer.send(new ProducerRecord<String, byte[]>(topic, null, bytes));
} catch (Exception e) { } catch (Exception e) {
...@@ -160,7 +156,7 @@ public class KafkaProducerUtil { ...@@ -160,7 +156,7 @@ public class KafkaProducerUtil {
public void sendLog(String topic, String logTypeName, String timestamp, String source, String offset, public void sendLog(String topic, String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) { Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) {
try { try {
byte[] bytes = AvroSerializerFactory.getLogAvorSerializer().serializingLog(logTypeName, timestamp, source, byte[] bytes = AvroSerializerFactory.getLogAvroSerializer().serializingLog(logTypeName, timestamp, source,
offset, dimensions, metrics, normalFields); offset, dimensions, metrics, normalFields);
producer.send(new ProducerRecord<String, byte[]>(topic, null, bytes)); producer.send(new ProducerRecord<String, byte[]>(topic, null, bytes));
} catch (Exception e) { } catch (Exception e) {
......
...@@ -12,9 +12,7 @@ import java.util.Properties; ...@@ -12,9 +12,7 @@ import java.util.Properties;
import java.util.concurrent.Future; import java.util.concurrent.Future;
/** /**
* 生产数据(功能正常) * @author DeleMing
* @author zhangwei (<a href="mailto:zhangwei@zork.com.cn">zhangwei@zork.com.cn</a>)
* @date 2020-06-15 14:36
*/ */
public class MockConnectJsonData { public class MockConnectJsonData {
private static String topic; private static String topic;
......
...@@ -11,8 +11,7 @@ import java.util.Properties; ...@@ -11,8 +11,7 @@ import java.util.Properties;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
/** /**
* @author: LiaoMingtao * @author DeleMing
* @date: 2020/6/19
*/ */
public class MockFilebeatDataToKafka { public class MockFilebeatDataToKafka {
......
...@@ -10,10 +10,7 @@ import java.util.Properties; ...@@ -10,10 +10,7 @@ import java.util.Properties;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
/** /**
* @description: * @author DeleMing
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/17 0017 10:57
*/ */
public class MockFlinkxJson { public class MockFlinkxJson {
// private static String topic = "flinkx_json"; // private static String topic = "flinkx_json";
......
...@@ -14,10 +14,7 @@ import java.util.Properties; ...@@ -14,10 +14,7 @@ import java.util.Properties;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
/** /**
* @description: * @author DeleMing
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/17 0017 10:57
*/ */
public class MockKafkaConnect { public class MockKafkaConnect {
// private static String topic = "test"; // private static String topic = "test";
...@@ -57,7 +54,7 @@ public class MockKafkaConnect { ...@@ -57,7 +54,7 @@ public class MockKafkaConnect {
AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvorSerializer(); AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvroSerializer();
byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields); byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
return bytes; return bytes;
} }
...@@ -105,7 +102,7 @@ public class MockKafkaConnect { ...@@ -105,7 +102,7 @@ public class MockKafkaConnect {
jsonObject.put("normalFields", normalFields); jsonObject.put("normalFields", normalFields);
System.out.println(jsonObject.toJSONString()); System.out.println(jsonObject.toJSONString());
AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvorSerializer(); AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvroSerializer();
byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields); byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
send(topic, bytes); send(topic, bytes);
......
...@@ -8,10 +8,7 @@ import java.util.Map; ...@@ -8,10 +8,7 @@ import java.util.Map;
import java.util.Properties; import java.util.Properties;
/** /**
* @description: * @author DeleMing
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/17 0017 10:57
*/ */
public class MockKafkaConnectAvro { public class MockKafkaConnectAvro {
private static long getSize(String propertiesName) throws Exception { private static long getSize(String propertiesName) throws Exception {
......
...@@ -12,10 +12,7 @@ import java.util.Map; ...@@ -12,10 +12,7 @@ import java.util.Map;
import java.util.Properties; import java.util.Properties;
/** /**
* @description: * @author DeleMing
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/17 0017 10:57
*/ */
public class MockKafkaConnectAvroTest { public class MockKafkaConnectAvroTest {
...@@ -58,7 +55,7 @@ public class MockKafkaConnectAvroTest { ...@@ -58,7 +55,7 @@ public class MockKafkaConnectAvroTest {
normalFields.put("message", "成功处理"); normalFields.put("message", "成功处理");
normalFields.put("id", String.valueOf(i)); normalFields.put("id", String.valueOf(i));
AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvorSerializer(); AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvroSerializer();
byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields); byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
ProducerRecord<String, byte[]> producerRecord = new ProducerRecord<String, byte[]>( ProducerRecord<String, byte[]> producerRecord = new ProducerRecord<String, byte[]>(
......
...@@ -9,10 +9,7 @@ import com.zorkdta.tools.utils.StringUtil; ...@@ -9,10 +9,7 @@ import com.zorkdta.tools.utils.StringUtil;
import java.util.Properties; import java.util.Properties;
/** /**
* @description: * @author DeleMing
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/17 0017 10:57
*/ */
public class MockKafkaConnectJson { public class MockKafkaConnectJson {
private static long getSize(String propertiesName) throws Exception { private static long getSize(String propertiesName) throws Exception {
......
...@@ -12,10 +12,7 @@ import java.util.Properties; ...@@ -12,10 +12,7 @@ import java.util.Properties;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
/** /**
* @description: * @author DeleMing
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/16 0016 9:28
*/ */
public class MockMetricEvent { public class MockMetricEvent {
private static String topic = "flink-metric"; private static String topic = "flink-metric";
......
...@@ -10,10 +10,7 @@ import java.util.Date; ...@@ -10,10 +10,7 @@ import java.util.Date;
import java.util.Properties; import java.util.Properties;
/** /**
* @description: * @author DeleMing
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/17 0017 10:57
*/ */
public class MockStreamxJson { public class MockStreamxJson {
private static String topic = "streamx_json_test"; private static String topic = "streamx_json_test";
......
...@@ -9,10 +9,7 @@ import java.util.Properties; ...@@ -9,10 +9,7 @@ import java.util.Properties;
import java.util.Random; import java.util.Random;
/** /**
* @description: * @author DeleMing
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/17 0017 10:57
*/ */
public class MockStreamxJson1 { public class MockStreamxJson1 {
private static String topic = "streamx_json"; private static String topic = "streamx_json";
......
...@@ -12,11 +12,7 @@ import java.util.Properties; ...@@ -12,11 +12,7 @@ import java.util.Properties;
import java.util.Random; import java.util.Random;
/** /**
* @Description * @author DeleMing
* @className top.xiesen.mock.kafka.mock.MockZorkMetric
* @Author 谢森
* @Email xiesen@zork.com.cn
* @Date 2020/3/15 18:15
*/ */
public class MockZorkMetric { public class MockZorkMetric {
private static String topic = "zorkdata_metric"; private static String topic = "zorkdata_metric";
...@@ -49,7 +45,7 @@ public class MockZorkMetric { ...@@ -49,7 +45,7 @@ public class MockZorkMetric {
Map<String, Double> metrics = new HashMap<>(); Map<String, Double> metrics = new HashMap<>();
metrics.put("cpu_usage", random.nextDouble()); metrics.put("cpu_usage", random.nextDouble());
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvorSerializer(); AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestamp, dimensions, metrics); byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestamp, dimensions, metrics);
return bytes; return bytes;
} }
......
...@@ -3,7 +3,6 @@ package com.zorkdta.tools.utils; ...@@ -3,7 +3,6 @@ package com.zorkdta.tools.utils;
import com.zorkdta.tools.avro.AvroSerializerFactory; import com.zorkdta.tools.avro.AvroSerializerFactory;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.ProducerRecord;
...@@ -102,7 +101,7 @@ public class CustomerProducer { ...@@ -102,7 +101,7 @@ public class CustomerProducer {
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) throws ExecutionException, InterruptedException { Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) throws ExecutionException, InterruptedException {
try { try {
long l1 = System.currentTimeMillis(); long l1 = System.currentTimeMillis();
byte[] bytes = AvroSerializerFactory.getLogAvorSerializer().serializingLog(logTypeName, timestamp, source, byte[] bytes = AvroSerializerFactory.getLogAvroSerializer().serializingLog(logTypeName, timestamp, source,
offset, dimensions, metrics, normalFields); offset, dimensions, metrics, normalFields);
long l2 = System.currentTimeMillis(); long l2 = System.currentTimeMillis();
// System.out.println("数据序列化需要的时间: " + (l2 - l1) + "ms"); // System.out.println("数据序列化需要的时间: " + (l2 - l1) + "ms");
......
...@@ -7,6 +7,9 @@ import java.text.ParseException; ...@@ -7,6 +7,9 @@ import java.text.ParseException;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.Date; import java.util.Date;
/**
* @author DeleMing
*/
public class DateUtil { public class DateUtil {
private static DateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS+08:00"); private static DateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS+08:00");
......
...@@ -16,12 +16,9 @@ import java.util.concurrent.ExecutorService; ...@@ -16,12 +16,9 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
/** /**
* @Description * @author DeleMing
* @className top.xiesen.mock.kafka.utils.JConsumerMutil
* @Author 谢森
* @Email xiesen@zork.com.cn
* @Date 2020/4/19 23:05
*/ */
public class JConsumerMutil { public class JConsumerMutil {
private final static Logger log = LoggerFactory.getLogger(JConsumerMutil.class); private final static Logger log = LoggerFactory.getLogger(JConsumerMutil.class);
......
...@@ -9,13 +9,8 @@ import java.util.Date; ...@@ -9,13 +9,8 @@ import java.util.Date;
import java.util.Properties; import java.util.Properties;
/** /**
* @Description 单线程实现一个 kafka 生产者客户端 * @author DeleMing
* @className top.xiesen.mock.kafka.utils.JProducer
* @Author 谢森
* @Email xiesen@zork.com.cn
* @Date 2020/4/19 21:22
*/ */
public class JProducer extends Thread { public class JProducer extends Thread {
private final Logger log = LoggerFactory.getLogger(JProducer.class); private final Logger log = LoggerFactory.getLogger(JProducer.class);
......
...@@ -11,11 +11,7 @@ import java.util.concurrent.ExecutorService; ...@@ -11,11 +11,7 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
/** /**
* @Description 实现一个多线程生产者应用客户端 * @author DeleMing
* @className top.xiesen.mock.kafka.utils.JProducerThread
* @Author 谢森
* @Email xiesen@zork.com.cn
* @Date 2020/4/19 22:50
*/ */
public class JProducerThread extends Thread { public class JProducerThread extends Thread {
private final Logger log = LoggerFactory.getLogger(JProducerThread.class); private final Logger log = LoggerFactory.getLogger(JProducerThread.class);
......
...@@ -4,11 +4,7 @@ import java.io.Closeable; ...@@ -4,11 +4,7 @@ import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
/** /**
* @Description * @author DeleMing
* @className top.xiesen.mock.kafka.utils.ProducerPool
* @Author 谢森
* @Email xiesen@zork.com.cn
* @Date 2020/4/2 9:39
*/ */
public class ProducerPool implements Closeable { public class ProducerPool implements Closeable {
......
...@@ -7,11 +7,7 @@ import java.io.InputStreamReader; ...@@ -7,11 +7,7 @@ import java.io.InputStreamReader;
import java.util.Properties; import java.util.Properties;
/** /**
* @Description * @author DeleMing
* @className top.xiesen.mock.kafka.utils.PropertiesUtil
* @Author 谢森
* @Email xiesen@zork.com.cn
* @Date 2020/4/2 9:41
*/ */
public class PropertiesUtil { public class PropertiesUtil {
/** /**
......
...@@ -10,11 +10,7 @@ import java.util.regex.Matcher; ...@@ -10,11 +10,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
/** /**
* @Description * @author DeleMing
* @className top.xiesen.mock.kafka.utils.StringUtil
* @Author 谢森
* @Email xiesen@zork.com.cn
* @Date 2020/4/2 9:50
*/ */
public class StringUtil { public class StringUtil {
public static void main(String[] args) { public static void main(String[] args) {
...@@ -102,8 +98,7 @@ public class StringUtil { ...@@ -102,8 +98,7 @@ public class StringUtil {
return true; return true;
} }
str = str.trim(); str = str.trim();
return "".equals(str) || "NULL".equalsIgnoreCase(str);
return str.equals("") || str.equalsIgnoreCase("NULL");
} }
public static boolean isDouble(String str) { public static boolean isDouble(String str) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment