Merge branch 'master' into 'master'
修改项目结构,适配自动化打包 See merge request !2
| package com.zorkdata.datamask.constants; | ||
| package com.zorkdata.datamask.constant; | ||
| import java.util.Date; | ||
| /** | ||
| * Description : | ||
| ... | ... | @@ -6,7 +8,8 @@ package com.zorkdata.datamask.constants; |
| * @author : wanghaiying (<a href="wanghaiying@zorkdata.com.cn">wanghaiying@zorkdata.com.cn</a>) | ||
| * Date : Create in 2020/10/20 15:32 | ||
| */ | ||
| public interface Constants { | ||
| public interface ParamConstants { | ||
|
||
| String SOURCE = "source"; | ||
| String HDFS_SRC = "hdfs_src"; | ||
| String HDFS_DEST = "hdfs_dest"; | ||
| ... | ... | @@ -15,6 +18,12 @@ public interface Constants { |
| String START_TIME = "start_time"; | ||
| String END_TIME = "end_time"; | ||
| String SERVERS = "servers"; | ||
| String ZOOKEEPER = "zookeeper"; | ||
| String TOPIC ="topic"; | ||
| String HDFS ="hdfs"; | ||
| String KAFKA ="kafka"; | ||
| String NAME_REG_EXP = "name_reg_exp"; | ||
| String MOBILE_REG_EXP = "mobile_reg_exp"; | ||
| String PHONE_REG_EXP = "phone_reg_exp"; | ||
| ... | ... | |
| package com.zorkdata.datamask.constant; | ||
| /** | ||
| * @author 谢森 | ||
| * @Description 常量定义 | ||
| * @Email xiesen310@163.com | ||
| * @Date 2020/10/21 15:50 | ||
| */ | ||
| public interface StrConstants { | ||
|
||
| String FILE_SEPARATOR = "/"; | ||
| String AVRO_SUFFIX = ".avro"; | ||
| String EMPTY_STR = ""; | ||
| } | ||
| package com.zorkdata.datamask.domain; | ||
| import lombok.Data; | ||
| /** | ||
| * @author 谢森 | ||
| * @Description 参数实体类 | ||
| * @Email xiesen310@163.com | ||
| * @Date 2020/10/21 14:33 | ||
| */ | ||
| @Data | ||
| public class HadoopParam { | ||
| private String source; | ||
|
||
| private String hdfsSrc; | ||
|
||
| private String hdfsDest; | ||
|
||
| private String core; | ||
|
||
| private String date; | ||
|
||
| private Long startTime; | ||
|
||
| private Long endTime; | ||
|
||
| public HadoopParam(String source, String hdfsSrc, String hdfsDest, String core, String date, Long startTime, | ||
| Long endTime) { | ||
| this.source = source; | ||
| this.hdfsSrc = hdfsSrc; | ||
| this.hdfsDest = hdfsDest; | ||
| this.core = core; | ||
| this.date = date; | ||
| this.startTime = startTime; | ||
| this.endTime = endTime; | ||
| } | ||
| } | ||
| package com.zorkdata.datamask.domain; | ||
| import lombok.Data; | ||
| /** | ||
| * @author 谢森 | ||
| * @Description kafka 参数实体类 | ||
| * @Email xiesen310@163.com | ||
| * @Date 2020/10/21 15:07 | ||
| */ | ||
| @Data | ||
| public class KafkaParam { | ||
| private String servers; | ||
|
||
| private String zookeeper; | ||
|
||
| private String topic; | ||
|
||
| private String hdfsDest; | ||
|
||
| private String core; | ||
|
||
| private String date; | ||
|
||
| private Long startTime; | ||
|
||
| private Long endTime; | ||
|
||
| public KafkaParam(String servers, String zookeeper, String topic, String hdfsDest, String core, String date, | ||
|
||
| Long startTime, Long endTime) { | ||
| this.servers = servers; | ||
| this.zookeeper = zookeeper; | ||
| this.topic = topic; | ||
| this.hdfsDest = hdfsDest; | ||
| this.core = core; | ||
| this.date = date; | ||
| this.startTime = startTime; | ||
| this.endTime = endTime; | ||
| } | ||
| } | ||
| package com.zorkdata.datamask.hadoop; | ||
| import com.alibaba.fastjson.JSON; | ||
| import com.alibaba.fastjson.TypeReference; | ||
| import com.zorkdata.datamask.constant.StrConstants; | ||
| import com.zorkdata.datamask.domain.LogData; | ||
| import com.zorkdata.datamask.domain.HadoopParam; | ||
| import com.zorkdata.datamask.domain.TransactionLog; | ||
| import com.zorkdata.datamask.util.DateUtils; | ||
| import com.zorkdata.datamask.util.MaskUtil; | ||
| import com.zorkdata.datamask.util.ParamUtils; | ||
| import org.apache.avro.mapred.AvroInputFormat; | ||
| import org.apache.avro.mapred.AvroKey; | ||
| import org.apache.avro.mapred.AvroOutputFormat; | ||
| import org.apache.avro.mapred.AvroWrapper; | ||
| import org.apache.flink.api.common.functions.FlatMapFunction; | ||
| import org.apache.flink.api.common.functions.MapFunction; | ||
| import org.apache.flink.api.java.ExecutionEnvironment; | ||
| import org.apache.flink.api.java.hadoop.mapred.HadoopInputFormat; | ||
| import org.apache.flink.api.java.hadoop.mapred.HadoopOutputFormat; | ||
| import org.apache.flink.api.java.operators.DataSource; | ||
| import org.apache.flink.api.java.operators.FlatMapOperator; | ||
| import org.apache.flink.api.java.tuple.Tuple2; | ||
| import org.apache.flink.api.java.utils.ParameterTool; | ||
| import org.apache.flink.util.Collector; | ||
| import org.apache.hadoop.conf.Configuration; | ||
| import org.apache.hadoop.fs.FileSystem; | ||
| import org.apache.hadoop.fs.LocatedFileStatus; | ||
| import org.apache.hadoop.fs.Path; | ||
| import org.apache.hadoop.fs.RemoteIterator; | ||
| import org.apache.hadoop.io.NullWritable; | ||
| import org.apache.hadoop.mapred.FileOutputFormat; | ||
| import org.apache.hadoop.mapred.JobConf; | ||
| import org.slf4j.Logger; | ||
| import org.slf4j.LoggerFactory; | ||
| import java.io.IOException; | ||
| import java.net.URI; | ||
| import java.net.URISyntaxException; | ||
| import java.util.ArrayList; | ||
| import java.util.List; | ||
| import java.util.Map; | ||
| /** | ||
| * @author 谢森 | ||
| * @Description hadoop 文件数据脱敏 | ||
| * @Email xiesen310@163.com | ||
| * @Date 2020/10/21 14:29 | ||
| */ | ||
| public class HadoopMask { | ||
|
||
| private static final Logger logger = LoggerFactory.getLogger(HadoopMask.class); | ||
|
||
| /** | ||
| * hdfs日志文件脱敏 | ||
| * | ||
| * @param conf 请求参数 | ||
| * @return void | ||
| */ | ||
| public static void maskHdfsLog(Map<String, String> conf) throws Exception { | ||
|
||
| ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); | ||
| env.setParallelism(1); | ||
| JobConf jobConf = new JobConf(); | ||
| jobConf.set("avro.output.schema", TransactionLog.SCHEMA$.toString(true)); | ||
| HadoopParam hadoopParam = ParamUtils.initHadoopConf(conf); | ||
| ParameterTool parameterTool = ParameterTool.fromMap(conf); | ||
| env.getConfig().setGlobalJobParameters(parameterTool); | ||
| List<String> logFiles = filterHdfsLogFiles(hadoopParam.getHdfsSrc(), hadoopParam.getDate(), | ||
| hadoopParam.getStartTime(), hadoopParam.getEndTime()); | ||
| for (String logFile : logFiles) { | ||
| /** | ||
| * 读取hdfs日志文件,avro反序列化处理 | ||
| */ | ||
| HadoopInputFormat<Object, Object> hadoopInputFormat = new HadoopInputFormat<Object, Object> | ||
|
||
| (new AvroInputFormat(), Object.class, Object.class, jobConf); | ||
| AvroInputFormat.addInputPath(hadoopInputFormat.getJobConf(), new Path(logFile)); | ||
| DataSource<Tuple2<Object, Object>> hdfsLogInput = env.createInput(hadoopInputFormat); | ||
| /** | ||
| * 脱敏算子 | ||
| */ | ||
| FlatMapOperator<Tuple2<Object, Object>, Object> maskFlatMapOperator = | ||
| hdfsLogInput.flatMap(new FlatMapFunction<Tuple2<Object, Object>, Object>() { | ||
| @Override | ||
| public void flatMap(Tuple2<Object, Object> value, Collector<Object> collector) throws Exception { | ||
| LogData logData = JSON.parseObject(value.getField(0).toString(), | ||
| new TypeReference<LogData>() { | ||
| }); | ||
| //根据日志事件的核心信息做过滤 | ||
| if (null != hadoopParam.getCore() && logData.getDimensions().get("hostname").indexOf("c9") > -1) { | ||
| //根据日志事件的timestamp做过滤 | ||
| Long timestamp = DateUtils.utc2timestamp(logData.getTimestamp()); | ||
| boolean flag = null != timestamp && timestamp > hadoopParam.getStartTime() | ||
| && timestamp < hadoopParam.getEndTime() || Boolean.TRUE; | ||
| if (flag) { | ||
|
||
| Map maskResult = MaskUtil.mask(logData.getNormalFields()); | ||
| logData.setNormalFields(maskResult); | ||
| collector.collect(logData); | ||
| } | ||
| } | ||
| } | ||
| }); | ||
| // 获取目标hdfs的输出目录 | ||
| String logFileName = | ||
| logFile.split(StrConstants.FILE_SEPARATOR)[logFile.split(StrConstants.FILE_SEPARATOR).length - 1]; | ||
| String filePath = hadoopParam.getHdfsSrc() + logFileName.replace(StrConstants.AVRO_SUFFIX, | ||
| StrConstants.EMPTY_STR); | ||
| HadoopOutputFormat hadoopOutputFormat = new HadoopOutputFormat<>(new AvroOutputFormat(), jobConf); | ||
| FileOutputFormat.setOutputPath(jobConf, new Path(filePath)); | ||
| /** | ||
| * avro序列化算子 | ||
| */ | ||
| maskFlatMapOperator.map(new MapFunction<Object, Tuple2<AvroWrapper<LogData>, NullWritable>>() { | ||
| @Override | ||
| public Tuple2<AvroWrapper<LogData>, NullWritable> map(Object value) throws Exception { | ||
| AvroKey<LogData> key = new AvroKey<LogData>((LogData) value); | ||
|
||
| Tuple2<AvroWrapper<LogData>, NullWritable> tupple = new Tuple2<AvroWrapper<LogData>, | ||
|
|
||
| NullWritable>(key, NullWritable.get()); | ||
| return tupple; | ||
| } | ||
| }).output(hadoopOutputFormat); | ||
| try { | ||
| env.execute("国泰交易日志脱敏job"); | ||
| } catch (Exception e) { | ||
| e.printStackTrace(); | ||
|
||
| } | ||
| } | ||
| } | ||
| /** | ||
| * 过滤hdfs日志文件 | ||
| * | ||
| * @param hdfs hdfs地址 | ||
| * @param date 日期 | ||
| * @param startTime 起始时间 | ||
| * @param endTime 结束时间 | ||
| * @return hdfs文件列表 | ||
| */ | ||
| private static List<String> filterHdfsLogFiles(String hdfs, String date, Long startTime, Long endTime) { | ||
| if (!hdfs.endsWith(StrConstants.FILE_SEPARATOR)) { | ||
| hdfs += StrConstants.FILE_SEPARATOR; | ||
|
||
| } | ||
| String path = hdfs; | ||
| if (null != date) { | ||
| path = hdfs + date; | ||
| } | ||
| Configuration conf = new Configuration(); | ||
| List<String> logFiles = new ArrayList<>(); | ||
| try { | ||
| FileSystem fileSystem = null; | ||
| try { | ||
|
||
| fileSystem = FileSystem.get(new URI("hdfs://cdh-2:8020/"), conf, "hdfs"); | ||
|
||
| } catch (InterruptedException e) { | ||
|
||
| e.printStackTrace(); | ||
|
||
| } | ||
| RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = fileSystem.listFiles(new Path(path), | ||
| false); | ||
| while (locatedFileStatusRemoteIterator.hasNext()) { | ||
| LocatedFileStatus next = locatedFileStatusRemoteIterator.next(); | ||
| long modificationTime = next.getModificationTime(); | ||
| // 根据文件的修改时间做过滤,获取用户指定时间段内的文件 | ||
| if (modificationTime > startTime && modificationTime < endTime) { | ||
| Path hdfsFilePath = next.getPath(); | ||
| logFiles.add(hdfsFilePath.toString()); | ||
| } | ||
| } | ||
| } catch (IOException e) { | ||
| e.printStackTrace(); | ||
|
||
| } catch (URISyntaxException e) { | ||
|
||
| e.printStackTrace(); | ||
|
||
| } | ||
| return logFiles; | ||
| } | ||
| } | ||
| package com.zorkdata.datamask.kafka; | ||
| import com.zorkdata.datamask.domain.HadoopParam; | ||
|
||
| import com.zorkdata.datamask.domain.KafkaParam; | ||
| import com.zorkdata.datamask.util.ParamUtils; | ||
| import org.apache.flink.api.common.serialization.SimpleStringSchema; | ||
| import org.apache.flink.api.java.utils.ParameterTool; | ||
| import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; | ||
| import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; | ||
| import org.apache.flink.streaming.connectors.fs.bucketing.BucketingSink; | ||
| import org.apache.flink.streaming.connectors.fs.bucketing.DateTimeBucketer; | ||
| import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; | ||
| import java.text.SimpleDateFormat; | ||
| import java.time.ZoneId; | ||
| import java.util.Date; | ||
| import java.util.Map; | ||
| import java.util.Properties; | ||
| /** | ||
| * @author 谢森 | ||
| * @Description kafka 数据脱敏 | ||
| * @Email xiesen310@163.com | ||
| * @Date 2020/10/21 14:51 | ||
| */ | ||
| public class KafkaMask { | ||
|
||
| /** | ||
| * kafka消息数据脱敏 | ||
| * | ||
| * @param conf 请求参数 | ||
| * @return void | ||
| */ | ||
| public static void maskKafkaMsg(Map<String, String> conf) { | ||
| StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); | ||
| env.setParallelism(1); | ||
| SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); | ||
|
|
||
| KafkaParam kafkaParam = ParamUtils.initKafkaConf(conf); | ||
| ParameterTool parameterTool = ParameterTool.fromMap(conf); | ||
| env.getConfig().setGlobalJobParameters(parameterTool); | ||
| Properties props = new Properties(); | ||
| props.put("bootstrap.servers", kafkaParam.getServers()); | ||
| props.put("zookeeper.connect", kafkaParam.getZookeeper()); | ||
| props.put("group.id", "group1"); | ||
| props.put("enable.auto.commit", false); | ||
| props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); | ||
| props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); | ||
| props.put("auto.offset.reset", "earliest"); | ||
| props.put("max.poll.records", 1000); | ||
| SingleOutputStreamOperator<String> dataStreamSource = | ||
| env.addSource(new FlinkKafkaConsumer<>(kafkaParam.getTopic(), | ||
| new SimpleStringSchema(), props)).setParallelism(1); | ||
| // TODO 根据date、startTime、endTime过滤 | ||
|
||
| BucketingSink<String> hdfsSink = new BucketingSink<>(kafkaParam.getHdfsDest()); | ||
| //创建一个按照时间创建目录的bucketer,默认是yyyy-MM-dd--HH,时区默认是美国时间。这里我都改了,一天创建一次目录,上海时间 | ||
| hdfsSink.setBucketer(new DateTimeBucketer<String>("yyyy-MM-dd", ZoneId.of("Asia/Shanghai"))); | ||
| //设置每个文件的最大大小 ,默认是384M(1024 * 1024 * 384) | ||
| hdfsSink.setBatchSize(1024 * 1024 * 384); | ||
| //设置多少时间,就换一个文件写 | ||
| hdfsSink.setBatchRolloverInterval(1000 * 60 * 60); | ||
| hdfsSink.setPendingSuffix("ccc"); | ||
| hdfsSink.setInactiveBucketThreshold(60 * 1000L); | ||
| hdfsSink.setInactiveBucketCheckInterval(60 * 1000L); | ||
| hdfsSink.setAsyncTimeout(60 * 1000); | ||
| dataStreamSource.addSink(hdfsSink); | ||
| try { | ||
| env.execute("国泰交易日志脱敏job"); | ||
| } catch (Exception e) { | ||
| e.printStackTrace(); | ||
|
||
| } | ||
| } | ||
| } | ||
| ... | ... | @@ -6,9 +6,11 @@ package com.zorkdata.datamask.util; |
| * @Email xiesen@zork.com.cn | ||
| */ | ||
| public class ConfigUtils { | ||
| public static final String EMPTY_STR = ""; | ||
| public static final String NULL_STR = "null"; | ||
| public static String getString(String value, String defaultValue) { | ||
| String result = value == null || value.equals("") || value.equals("null") ? defaultValue : value; | ||
| String result = value == null || EMPTY_STR.equals(value) || NULL_STR.equals(value) ? defaultValue : value; | ||
|
||
| return result; | ||
| } | ||
| ... | ... | |
| package com.zorkdata.datamask.util; | ||
| import org.slf4j.Logger; | ||
| import org.slf4j.LoggerFactory; | ||
| import java.text.ParseException; | ||
| import java.text.SimpleDateFormat; | ||
| import java.util.Date; | ||
| import java.util.TimeZone; | ||
| /** | ||
| * @author 谢森 | ||
| * @Description 时间处理工具类 | ||
| * @Email xiesen310@163.com | ||
| * @Date 2020/10/21 14:39 | ||
| */ | ||
| public class DateUtils { | ||
|
||
| public static final Logger logger = LoggerFactory.getLogger(DateUtils.class); | ||
| private static SimpleDateFormat utcFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS+08:00"); | ||
|
||
| /** | ||
| * UTC时间转 | ||
| * | ||
| * @param utcTime UTC时间 | ||
| * @return unix时间戳 | ||
| */ | ||
| public static Long utc2timestamp(String utcTime) { | ||
| //时区定义并进行时间获取 | ||
| utcFormatter.setTimeZone(TimeZone.getTimeZone("asia/shanghai")); | ||
| Date gpsUtcDate = null; | ||
| try { | ||
| gpsUtcDate = utcFormatter.parse(utcTime); | ||
| } catch (ParseException e) { | ||
| logger.error("时间戳格式转换异常:{} 原因: {}", utcTime, e.getMessage()); | ||
| return null; | ||
| } | ||
| return gpsUtcDate.getTime(); | ||
| } | ||
| } | ||
| ... | ... | @@ -19,6 +19,7 @@ import java.util.*; |
| public class LoadConf { | ||
| private static final Logger LOG = LoggerFactory.getLogger(com.zorkdata.datamask.util.LoadConf.class); | ||
| public static final int DEFAULT_MAP_CAPACITY = 16; | ||
| public LoadConf() { | ||
| } | ||
| ... | ... | @@ -62,7 +63,7 @@ public class LoadConf { |
| throw new RuntimeException("Could not find config file on classpath " + name); | ||
| } | ||
| } else { | ||
| HashMap var19 = new HashMap(); | ||
| HashMap var19 = new HashMap(DEFAULT_MAP_CAPACITY); | ||
|
||
| return var19; | ||
| } | ||
| } catch (IOException var17) { | ||
| ... | ... | @@ -100,11 +101,11 @@ public class LoadConf { |
| return getConfigFileInputStream(configFilePath, true); | ||
| } | ||
| public static Map LoadYaml(String confPath) { | ||
| public static Map loadYaml(String confPath) { | ||
| return findAndReadYaml(confPath, true, true); | ||
| } | ||
| public static Map LoadProperty(String prop) { | ||
| public static Map loadProperty(String prop) { | ||
| InputStream in = null; | ||
| Properties properties = new Properties(); | ||
| ... | ... | @@ -125,8 +126,10 @@ public class LoadConf { |
| } | ||
| } | ||
| Map ret = new HashMap(); | ||
| Map ret = new HashMap(DEFAULT_MAP_CAPACITY); | ||
| ret.putAll(properties); | ||
| return ret; | ||
| } | ||
| } | ||
| package com.zorkdata.datamask.util; | ||
| import com.zorkdata.datamask.constant.ParamConstants; | ||
| import com.zorkdata.datamask.domain.HadoopParam; | ||
| import com.zorkdata.datamask.domain.KafkaParam; | ||
| import java.util.Map; | ||
| /** | ||
| * @author 谢森 | ||
| * @Description 参数工具类 | ||
| * @Email xiesen310@163.com | ||
| * @Date 2020/10/21 14:42 | ||
| */ | ||
| public class ParamUtils { | ||
|
||
| /** | ||
| * 初始化配置文件 | ||
| * | ||
| * @param conf | ||
| */ | ||
| public static HadoopParam initHadoopConf(Map conf) { | ||
| String source = String.valueOf(conf.get(ParamConstants.SOURCE)).trim(); | ||
| String hdfsSrc = String.valueOf(conf.get(ParamConstants.HDFS_SRC)).trim(); | ||
| String hdfsDest = String.valueOf(conf.get(ParamConstants.HDFS_DEST)).trim(); | ||
| String core = String.valueOf(conf.get(ParamConstants.CORE)).trim(); | ||
| String date = String.valueOf(conf.get(ParamConstants.DATE)).trim(); | ||
| Long startTime = Long.parseLong(String.valueOf(conf.get(ParamConstants.START_TIME)).trim()); | ||
| Long endTime = Long.parseLong(String.valueOf(conf.get(ParamConstants.END_TIME)).trim()); | ||
| return new HadoopParam(source, hdfsSrc, hdfsDest, core, date, startTime, endTime); | ||
| } | ||
| public static KafkaParam initKafkaConf(Map conf) { | ||
| String servers = String.valueOf(conf.get(ParamConstants.SERVERS)).trim(); | ||
| String zookeeper = String.valueOf(conf.get(ParamConstants.ZOOKEEPER)).trim(); | ||
| String topic = String.valueOf(conf.get(ParamConstants.TOPIC)).trim(); | ||
| String hdfsDest = String.valueOf(conf.get(ParamConstants.HDFS_DEST)).trim(); | ||
| String core = String.valueOf(conf.get(ParamConstants.CORE)).trim(); | ||
| String date = String.valueOf(conf.get(ParamConstants.DATE)).trim(); | ||
| Long startTime = Long.parseLong(String.valueOf(conf.get(ParamConstants.START_TIME)).trim()); | ||
| Long endTime = Long.parseLong(String.valueOf(conf.get(ParamConstants.END_TIME)).trim()); | ||
| return new KafkaParam(servers, zookeeper, topic, hdfsDest, core, date, startTime, endTime); | ||
| } | ||
| } | ||
| ... | ... | @@ -13,6 +13,7 @@ import java.util.Map; |
| */ | ||
| public class ZorkParameterUtil { | ||
| private static final Logger logger = LoggerFactory.getLogger(com.zorkdata.datamask.util.ZorkParameterUtil.class); | ||
| public static final String YML_SUFFIX = "yml"; | ||
| /** | ||
| * 读取参数 | ||
| ... | ... | @@ -26,22 +27,21 @@ public class ZorkParameterUtil { |
| String configPath; | ||
| try { | ||
| ParameterTool parameterTool = ParameterTool.fromArgs(args); | ||
| configPath = parameterTool.get("configPath"); | ||
| configPath = parameterTool.get("conf"); | ||
| } catch (Exception e) { | ||
| // configPath = "/etc/flinkConfig.yaml"; | ||
| configPath = "D:\\zork\\transactionLogMask\\src\\main\\resources\\application.yml"; | ||
| throw new RuntimeException("读取配置文件失败,请检查配置路径."); | ||
|
||
| } | ||
| logger.info("read config path is " + configPath); | ||
| if (!configPath.endsWith("yml")) { | ||
| if (!configPath.endsWith(YML_SUFFIX)) { | ||
| System.err.println("Please input correct configuration file and flink run mode!"); | ||
| System.exit(-1); | ||
| throw new RuntimeException("Please input correct configuration file and flink run mode!"); | ||
|
||
| } else { | ||
| conf = LoadConf.LoadYaml(configPath); | ||
| conf = LoadConf.loadYaml(configPath); | ||
| if (conf == null) { | ||
| logger.error("配置文件" + args[0] + "不存在,系统退出"); | ||
| System.exit(-1); | ||
| throw new RuntimeException("配置文件" + args[0] + "不存在,系统退出"); | ||
|
||
| } | ||
| } | ||
| return conf; | ||
| ... | ... | |
| ... | ... | @@ -100,7 +100,7 @@ public class AvroSerializer { |
| */ | ||
| public synchronized byte[] serializing(String json) { | ||
| byte[] byteArray = null; | ||
| JSONObject jsonObject = (JSONObject) JSONObject.parse(json);// new TypeReference<Object>() {} | ||
| JSONObject jsonObject = (JSONObject) JSONObject.parse(json); | ||
|
||
| GenericRecord genericRecord = new GenericData.Record(this.schema); | ||
| // 将数据加到genericRecord中 | ||
| for (int i = 0; i < filedsArrayList.size(); i++) { | ||
| ... | ... | @@ -209,7 +209,6 @@ public class AvroSerializer { |
| } catch (Exception e) { | ||
| System.out.println("序列化失败" + e); | ||
| } | ||
| // GenericRecord s = AvroDeserializerFactory.getTopicmetadataDeserializer().deserializing(returnstr); | ||
| return byteArray; | ||
| } | ||
| ... | ... | |
-
SonarQube analysis reported 155 issues
-
🚫 17 critical -
⚠ 90 major -
🔽 47 minor -
ℹ 1 info
Watch the comments in this conversation to review them.
Top 30 extra issues
Note: The following issues were found on lines that were not modified in the commit. Because these issues can't be reported as line comments, they are summarized here:
-
🚫 Add a nested comment explaining why this method is empty, throw an UnsupportedOperationException or complete the implementation.📘 -
🚫 Change this "try" to a try-with-resources. (sonar.java.source not set. Assuming 7 or greater.)📘 -
🚫 Refactor this code to not throw exceptions in finally blocks.📘 -
🚫 Refactor this code to not throw exceptions in finally blocks.📘 -
🚫 Define a constant instead of duplicating this literal "序列化失败" 15 times.📘 -
🚫 Define a constant instead of duplicating this literal " {\n" 7 times.📘 -
🚫 [Define a constant instead of duplicating this literal " "type": \n" 7 times.📘 -
🚫 Define a constant instead of duplicating this literal " "string",\n" 4 times.📘 -
🚫 Define a constant instead of duplicating this literal " "null"\n" 4 times.📘 -
🚫 [Define a constant instead of duplicating this literal " ]\n" 7 times.](https://git.zorkdata.com/wanghaiying/transactionlogmask/blob/cee08142d022c594895d067889b7f18f5734e1c6/src/main/java/com/zorkdata/datamask/util/avro/LogAvroMacroDef.java#L20)📘 -
🚫 Define a constant instead of duplicating this literal " },\n" 6 times.📘 -
🚫 Define a constant instead of duplicating this literal " "null",\n" 3 times.📘 -
🚫 Define a constant instead of duplicating this literal " {\n" 3 times.📘 -
🚫 Define a constant instead of duplicating this literal " "type": "map",\n" 3 times.📘 -
🚫 Define a constant instead of duplicating this literal " }\n" 3 times.📘 -
⚠ Add a private constructor to hide the implicit public one.📘 -
⚠ Hide this public constructor.📘 -
⚠ Define and throw a dedicated exception instead of using a generic one.📘 -
⚠ Define and throw a dedicated exception instead of using a generic one.📘 -
⚠ Define and throw a dedicated exception instead of using a generic one.📘 -
⚠ Define and throw a dedicated exception instead of using a generic one.📘 -
⚠ Define and throw a dedicated exception instead of using a generic one.📘 -
⚠ Remove this throw statement from this finally block.📘 -
⚠ Use the URI class instead.📘 -
⚠ Define and throw a dedicated exception instead of using a generic one.📘 -
⚠ Define and throw a dedicated exception instead of using a generic one.📘 -
⚠ Define and throw a dedicated exception instead of using a generic one.📘 -
⚠ Remove this throw statement from this finally block.📘 -
⚠ Remove this unused "fieldsWhiteList" private field.📘 -
⚠ Remove this unused "nameRegExp" private field.📘
- ... 74 more
-