Commit f0d075cc authored by DeleMing's avatar DeleMing

Initial commit

parents
# Default ignored files
/shelf/
/workspace.xml
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml
# Editor-based HTTP Client requests
/httpRequests/
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<annotationProcessing>
<profile name="Maven default annotation processors profile" enabled="true">
<sourceOutputDir name="target/generated-sources/annotations" />
<sourceTestOutputDir name="target/generated-test-sources/test-annotations" />
<outputRelativeToContentRoot value="true" />
<module name="mock-data" />
</profile>
</annotationProcessing>
<bytecodeTargetLevel>
<module name="mock-data" target="1.8" />
</bytecodeTargetLevel>
</component>
<component name="JavacSettings">
<option name="ADDITIONAL_OPTIONS_OVERRIDE">
<module name="mock-data" options="-parameters" />
</option>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Encoding">
<file url="file://$PROJECT_DIR$/src/main/java" charset="UTF-8" />
</component>
</project>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="JavaDoc" enabled="true" level="WARNING" enabled_by_default="true">
<option name="TOP_LEVEL_CLASS_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="INNER_CLASS_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="METHOD_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="@return@param@throws or @exception" />
</value>
</option>
<option name="FIELD_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="IGNORE_DEPRECATED" value="false" />
<option name="IGNORE_JAVADOC_PERIOD" value="true" />
<option name="IGNORE_DUPLICATED_THROWS" value="false" />
<option name="IGNORE_POINT_TO_ITSELF" value="false" />
<option name="myAdditionalJavadocTags" value="date" />
</inspection_tool>
</profile>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RemoteRepositoriesConfiguration">
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Maven Central repository" />
<option name="url" value="https://repo1.maven.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Central Repository" />
<option name="url" value="http://maven.aliyun.com/nexus/content/groups/public" />
</remote-repository>
<remote-repository>
<option name="id" value="user-release" />
<option name="name" value="Nexus Repository" />
<option name="url" value="http://nexus.zorkdata.com/repository/maven-public/" />
</remote-repository>
<remote-repository>
<option name="id" value="jboss.community" />
<option name="name" value="JBoss Community repository" />
<option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
</remote-repository>
<remote-repository>
<option name="id" value="zorkdata" />
<option name="name" value="zorkdata" />
<option name="url" value="http://nexus.zorkdata.com/repository/maven-public" />
</remote-repository>
</component>
</project>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: ch.qos.logback:logback-classic:1.2.3">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/ch/qos/logback/logback-classic/1.2.3/logback-classic-1.2.3.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/ch/qos/logback/logback-classic/1.2.3/logback-classic-1.2.3-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/ch/qos/logback/logback-classic/1.2.3/logback-classic-1.2.3-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: ch.qos.logback:logback-core:1.2.3">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/ch/qos/logback/logback-core/1.2.3/logback-core-1.2.3.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/ch/qos/logback/logback-core/1.2.3/logback-core-1.2.3-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/ch/qos/logback/logback-core/1.2.3/logback-core-1.2.3-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.101tec:zkclient:0.10">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/101tec/zkclient/0.10/zkclient-0.10.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/101tec/zkclient/0.10/zkclient-0.10-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/101tec/zkclient/0.10/zkclient-0.10-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.alibaba:fastjson:1.2.62">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.62/fastjson-1.2.62.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.62/fastjson-1.2.62-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.62/fastjson-1.2.62-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.fasterxml:classmate:1.4.0">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/classmate/1.4.0/classmate-1.4.0.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/classmate/1.4.0/classmate-1.4.0-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/classmate/1.4.0/classmate-1.4.0-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.fasterxml.jackson.core:jackson-annotations:2.9.0">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-annotations/2.9.0/jackson-annotations-2.9.0.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-annotations/2.9.0/jackson-annotations-2.9.0-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-annotations/2.9.0/jackson-annotations-2.9.0-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.fasterxml.jackson.core:jackson-core:2.9.9">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-core/2.9.9/jackson-core-2.9.9.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-core/2.9.9/jackson-core-2.9.9-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-core/2.9.9/jackson-core-2.9.9-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.fasterxml.jackson.core:jackson-databind:2.9.9">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.9/jackson-databind-2.9.9.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.9/jackson-databind-2.9.9-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.9/jackson-databind-2.9.9-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.9.9">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/datatype/jackson-datatype-jdk8/2.9.9/jackson-datatype-jdk8-2.9.9.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/datatype/jackson-datatype-jdk8/2.9.9/jackson-datatype-jdk8-2.9.9-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/datatype/jackson-datatype-jdk8/2.9.9/jackson-datatype-jdk8-2.9.9-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.9.9">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/datatype/jackson-datatype-jsr310/2.9.9/jackson-datatype-jsr310-2.9.9.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/datatype/jackson-datatype-jsr310/2.9.9/jackson-datatype-jsr310-2.9.9-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/datatype/jackson-datatype-jsr310/2.9.9/jackson-datatype-jsr310-2.9.9-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.fasterxml.jackson.module:jackson-module-parameter-names:2.9.9">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/module/jackson-module-parameter-names/2.9.9/jackson-module-parameter-names-2.9.9.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/module/jackson-module-parameter-names/2.9.9/jackson-module-parameter-names-2.9.9-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/module/jackson-module-parameter-names/2.9.9/jackson-module-parameter-names-2.9.9-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.github.stephenc.findbugs:findbugs-annotations:1.3.9-1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.jayway.jsonpath:json-path:2.4.0">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/jayway/jsonpath/json-path/2.4.0/json-path-2.4.0.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/jayway/jsonpath/json-path/2.4.0/json-path-2.4.0-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/jayway/jsonpath/json-path/2.4.0/json-path-2.4.0-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.thoughtworks.paranamer:paranamer:2.7">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.typesafe.scala-logging:scala-logging_2.11:3.8.0">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/typesafe/scala-logging/scala-logging_2.11/3.8.0/scala-logging_2.11-3.8.0.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/typesafe/scala-logging/scala-logging_2.11/3.8.0/scala-logging_2.11-3.8.0-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/typesafe/scala-logging/scala-logging_2.11/3.8.0/scala-logging_2.11-3.8.0-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.vaadin.external.google:android-json:0.0.20131108.vaadin1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: com.yammer.metrics:metrics-core:2.2.0">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: commons-cli:commons-cli:1.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-cli/commons-cli/1.2/commons-cli-1.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-cli/commons-cli/1.2/commons-cli-1.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-cli/commons-cli/1.2/commons-cli-1.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: commons-codec:commons-codec:1.11">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.11/commons-codec-1.11.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.11/commons-codec-1.11-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.11/commons-codec-1.11-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: commons-collections:commons-collections:3.2.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-collections/commons-collections/3.2.1/commons-collections-3.2.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-collections/commons-collections/3.2.1/commons-collections-3.2.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-collections/commons-collections/3.2.1/commons-collections-3.2.1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: commons-httpclient:commons-httpclient:3.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: commons-lang:commons-lang:2.6">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-lang/commons-lang/2.6/commons-lang-2.6.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-lang/commons-lang/2.6/commons-lang-2.6-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-lang/commons-lang/2.6/commons-lang-2.6-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: commons-logging:commons-logging:1.1.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging/1.1.1/commons-logging-1.1.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging/1.1.1/commons-logging-1.1.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging/1.1.1/commons-logging-1.1.1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: io.netty:netty:3.5.13.Final">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/io/netty/netty/3.5.13.Final/netty-3.5.13.Final.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/io/netty/netty/3.5.13.Final/netty-3.5.13.Final-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/io/netty/netty/3.5.13.Final/netty-3.5.13.Final-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: javax.activation:javax.activation-api:1.2.0">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/javax/activation/javax.activation-api/1.2.0/javax.activation-api-1.2.0.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/javax/activation/javax.activation-api/1.2.0/javax.activation-api-1.2.0-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/javax/activation/javax.activation-api/1.2.0/javax.activation-api-1.2.0-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: javax.annotation:javax.annotation-api:1.3.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/javax/annotation/javax.annotation-api/1.3.2/javax.annotation-api-1.3.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/javax/annotation/javax.annotation-api/1.3.2/javax.annotation-api-1.3.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/javax/annotation/javax.annotation-api/1.3.2/javax.annotation-api-1.3.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: javax.validation:validation-api:2.0.1.Final">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/javax/validation/validation-api/2.0.1.Final/validation-api-2.0.1.Final.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/javax/validation/validation-api/2.0.1.Final/validation-api-2.0.1.Final-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/javax/validation/validation-api/2.0.1.Final/validation-api-2.0.1.Final-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: javax.xml.bind:jaxb-api:2.3.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/javax/xml/bind/jaxb-api/2.3.1/jaxb-api-2.3.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/javax/xml/bind/jaxb-api/2.3.1/jaxb-api-2.3.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/javax/xml/bind/jaxb-api/2.3.1/jaxb-api-2.3.1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: joda-time:joda-time:2.10.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/joda-time/joda-time/2.10.2/joda-time-2.10.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/joda-time/joda-time/2.10.2/joda-time-2.10.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/joda-time/joda-time/2.10.2/joda-time-2.10.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: junit:junit:4.12">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/junit/junit/4.12/junit-4.12.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/junit/junit/4.12/junit-4.12-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/junit/junit/4.12/junit-4.12-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: net.bytebuddy:byte-buddy:1.9.13">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/net/bytebuddy/byte-buddy/1.9.13/byte-buddy-1.9.13.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/net/bytebuddy/byte-buddy/1.9.13/byte-buddy-1.9.13-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/net/bytebuddy/byte-buddy/1.9.13/byte-buddy-1.9.13-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: net.bytebuddy:byte-buddy-agent:1.9.13">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/net/bytebuddy/byte-buddy-agent/1.9.13/byte-buddy-agent-1.9.13.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/net/bytebuddy/byte-buddy-agent/1.9.13/byte-buddy-agent-1.9.13-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/net/bytebuddy/byte-buddy-agent/1.9.13/byte-buddy-agent-1.9.13-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: net.minidev:accessors-smart:1.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/net/minidev/accessors-smart/1.2/accessors-smart-1.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/net/minidev/accessors-smart/1.2/accessors-smart-1.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: net.minidev:json-smart:2.3">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/net/minidev/json-smart/2.3/json-smart-2.3.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/net/minidev/json-smart/2.3/json-smart-2.3-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/net/minidev/json-smart/2.3/json-smart-2.3-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: net.sf.jopt-simple:jopt-simple:5.0.4">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.avro:avro:1.8.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro/1.8.2/avro-1.8.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro/1.8.2/avro-1.8.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro/1.8.2/avro-1.8.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.avro:avro-compiler:1.8.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-compiler/1.8.2/avro-compiler-1.8.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-compiler/1.8.2/avro-compiler-1.8.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-compiler/1.8.2/avro-compiler-1.8.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.avro:avro-ipc:1.8.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-ipc/1.8.2/avro-ipc-1.8.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-ipc/1.8.2/avro-ipc-1.8.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-ipc/1.8.2/avro-ipc-1.8.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.avro:avro-mapred:1.8.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.avro:avro-mapred:hadoop2:1.8.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-hadoop2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-mapred/1.8.2/avro-mapred-1.8.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.avro:avro-tools:1.8.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-tools/1.8.2/avro-tools-1.8.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-tools/1.8.2/avro-tools-1.8.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-tools/1.8.2/avro-tools-1.8.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.avro:trevni-avro:1.8.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-avro/1.8.2/trevni-avro-1.8.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-avro/1.8.2/trevni-avro-1.8.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-avro/1.8.2/trevni-avro-1.8.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.avro:trevni-avro:tests:1.8.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-avro/1.8.2/trevni-avro-1.8.2-tests.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-avro/1.8.2/trevni-avro-1.8.2-test-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-avro/1.8.2/trevni-avro-1.8.2-test-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.avro:trevni-core:1.8.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-core/1.8.2/trevni-core-1.8.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-core/1.8.2/trevni-core-1.8.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-core/1.8.2/trevni-core-1.8.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.avro:trevni-core:tests:1.8.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-core/1.8.2/trevni-core-1.8.2-tests.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-core/1.8.2/trevni-core-1.8.2-test-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/trevni-core/1.8.2/trevni-core-1.8.2-test-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.commons:commons-compress:1.8.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-compress/1.8.1/commons-compress-1.8.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-compress/1.8.1/commons-compress-1.8.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-compress/1.8.1/commons-compress-1.8.1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.kafka:kafka_2.11:1.1.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka_2.11/1.1.1/kafka_2.11-1.1.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka_2.11/1.1.1/kafka_2.11-1.1.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka_2.11/1.1.1/kafka_2.11-1.1.1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.kafka:kafka-clients:2.0.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka-clients/2.0.1/kafka-clients-2.0.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka-clients/2.0.1/kafka-clients-2.0.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka-clients/2.0.1/kafka-clients-2.0.1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.logging.log4j:log4j-api:2.11.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/logging/log4j/log4j-api/2.11.2/log4j-api-2.11.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/logging/log4j/log4j-api/2.11.2/log4j-api-2.11.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/logging/log4j/log4j-api/2.11.2/log4j-api-2.11.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.logging.log4j:log4j-to-slf4j:2.11.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/logging/log4j/log4j-to-slf4j/2.11.2/log4j-to-slf4j-2.11.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/logging/log4j/log4j-to-slf4j/2.11.2/log4j-to-slf4j-2.11.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/logging/log4j/log4j-to-slf4j/2.11.2/log4j-to-slf4j-2.11.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.tomcat.embed:tomcat-embed-core:9.0.21">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/tomcat/embed/tomcat-embed-core/9.0.21/tomcat-embed-core-9.0.21.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/tomcat/embed/tomcat-embed-core/9.0.21/tomcat-embed-core-9.0.21-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/tomcat/embed/tomcat-embed-core/9.0.21/tomcat-embed-core-9.0.21-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.tomcat.embed:tomcat-embed-el:9.0.21">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/tomcat/embed/tomcat-embed-el/9.0.21/tomcat-embed-el-9.0.21.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/tomcat/embed/tomcat-embed-el/9.0.21/tomcat-embed-el-9.0.21-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/tomcat/embed/tomcat-embed-el/9.0.21/tomcat-embed-el-9.0.21-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.tomcat.embed:tomcat-embed-websocket:9.0.21">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/tomcat/embed/tomcat-embed-websocket/9.0.21/tomcat-embed-websocket-9.0.21.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/tomcat/embed/tomcat-embed-websocket/9.0.21/tomcat-embed-websocket-9.0.21-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/tomcat/embed/tomcat-embed-websocket/9.0.21/tomcat-embed-websocket-9.0.21-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.velocity:velocity:1.7">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/velocity/velocity/1.7/velocity-1.7.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/velocity/velocity/1.7/velocity-1.7-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/velocity/velocity/1.7/velocity-1.7-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.apache.zookeeper:zookeeper:3.4.10">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/zookeeper/zookeeper/3.4.10/zookeeper-3.4.10.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/zookeeper/zookeeper/3.4.10/zookeeper-3.4.10-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/apache/zookeeper/zookeeper/3.4.10/zookeeper-3.4.10-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.assertj:assertj-core:3.11.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/assertj/assertj-core/3.11.1/assertj-core-3.11.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/assertj/assertj-core/3.11.1/assertj-core-3.11.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/assertj/assertj-core/3.11.1/assertj-core-3.11.1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.codehaus.jackson:jackson-core-asl:1.9.13">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.9.13">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.hamcrest:hamcrest-core:1.3">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.hamcrest:hamcrest-library:1.3">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/hamcrest/hamcrest-library/1.3/hamcrest-library-1.3.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/hamcrest/hamcrest-library/1.3/hamcrest-library-1.3-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/hamcrest/hamcrest-library/1.3/hamcrest-library-1.3-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.hibernate.validator:hibernate-validator:6.0.17.Final">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/hibernate/validator/hibernate-validator/6.0.17.Final/hibernate-validator-6.0.17.Final.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/hibernate/validator/hibernate-validator/6.0.17.Final/hibernate-validator-6.0.17.Final-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/hibernate/validator/hibernate-validator/6.0.17.Final/hibernate-validator-6.0.17.Final-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.jboss.logging:jboss-logging:3.3.2.Final">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/jboss/logging/jboss-logging/3.3.2.Final/jboss-logging-3.3.2.Final.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/jboss/logging/jboss-logging/3.3.2.Final/jboss-logging-3.3.2.Final-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/jboss/logging/jboss-logging/3.3.2.Final/jboss-logging-3.3.2.Final-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.lz4:lz4-java:1.4.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/lz4/lz4-java/1.4.1/lz4-java-1.4.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/lz4/lz4-java/1.4.1/lz4-java-1.4.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/lz4/lz4-java/1.4.1/lz4-java-1.4.1-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.mockito:mockito-core:2.23.4">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/mockito/mockito-core/2.23.4/mockito-core-2.23.4.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/mockito/mockito-core/2.23.4/mockito-core-2.23.4-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/mockito/mockito-core/2.23.4/mockito-core-2.23.4-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.mortbay.jetty:jetty:6.1.26">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.mortbay.jetty:jetty-util:6.1.26">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.mortbay.jetty:servlet-api:2.5-20081211">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.objenesis:objenesis:2.6">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/objenesis/objenesis/2.6/objenesis-2.6.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/objenesis/objenesis/2.6/objenesis-2.6-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/objenesis/objenesis/2.6/objenesis-2.6-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.ow2.asm:asm:5.0.4">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/ow2/asm/asm/5.0.4/asm-5.0.4.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/ow2/asm/asm/5.0.4/asm-5.0.4-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/ow2/asm/asm/5.0.4/asm-5.0.4-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.projectlombok:lombok:1.18.8">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/projectlombok/lombok/1.18.8/lombok-1.18.8.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/projectlombok/lombok/1.18.8/lombok-1.18.8-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/projectlombok/lombok/1.18.8/lombok-1.18.8-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.scala-lang:scala-library:2.11.12">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-library/2.11.12/scala-library-2.11.12.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-library/2.11.12/scala-library-2.11.12-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-library/2.11.12/scala-library-2.11.12-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.scala-lang:scala-reflect:2.11.12">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-reflect/2.11.12/scala-reflect-2.11.12.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-reflect/2.11.12/scala-reflect-2.11.12-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-reflect/2.11.12/scala-reflect-2.11.12-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.skyscreamer:jsonassert:1.5.0">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/skyscreamer/jsonassert/1.5.0/jsonassert-1.5.0.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/skyscreamer/jsonassert/1.5.0/jsonassert-1.5.0-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/skyscreamer/jsonassert/1.5.0/jsonassert-1.5.0-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.slf4j:jul-to-slf4j:1.7.26">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/jul-to-slf4j/1.7.26/jul-to-slf4j-1.7.26.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/jul-to-slf4j/1.7.26/jul-to-slf4j-1.7.26-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/jul-to-slf4j/1.7.26/jul-to-slf4j-1.7.26-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.slf4j:slf4j-api:1.7.26">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/slf4j-api/1.7.26/slf4j-api-1.7.26.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/slf4j-api/1.7.26/slf4j-api-1.7.26-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/slf4j-api/1.7.26/slf4j-api-1.7.26-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework.boot:spring-boot:2.1.6.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot/2.1.6.RELEASE/spring-boot-2.1.6.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot/2.1.6.RELEASE/spring-boot-2.1.6.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot/2.1.6.RELEASE/spring-boot-2.1.6.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework.boot:spring-boot-autoconfigure:2.1.6.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-autoconfigure/2.1.6.RELEASE/spring-boot-autoconfigure-2.1.6.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-autoconfigure/2.1.6.RELEASE/spring-boot-autoconfigure-2.1.6.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-autoconfigure/2.1.6.RELEASE/spring-boot-autoconfigure-2.1.6.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework.boot:spring-boot-starter:2.1.6.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter/2.1.6.RELEASE/spring-boot-starter-2.1.6.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter/2.1.6.RELEASE/spring-boot-starter-2.1.6.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter/2.1.6.RELEASE/spring-boot-starter-2.1.6.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework.boot:spring-boot-starter-json:2.1.6.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-json/2.1.6.RELEASE/spring-boot-starter-json-2.1.6.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-json/2.1.6.RELEASE/spring-boot-starter-json-2.1.6.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-json/2.1.6.RELEASE/spring-boot-starter-json-2.1.6.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework.boot:spring-boot-starter-logging:2.1.6.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-logging/2.1.6.RELEASE/spring-boot-starter-logging-2.1.6.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-logging/2.1.6.RELEASE/spring-boot-starter-logging-2.1.6.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-logging/2.1.6.RELEASE/spring-boot-starter-logging-2.1.6.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework.boot:spring-boot-starter-test:2.1.6.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-test/2.1.6.RELEASE/spring-boot-starter-test-2.1.6.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-test/2.1.6.RELEASE/spring-boot-starter-test-2.1.6.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-test/2.1.6.RELEASE/spring-boot-starter-test-2.1.6.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework.boot:spring-boot-starter-tomcat:2.1.6.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-tomcat/2.1.6.RELEASE/spring-boot-starter-tomcat-2.1.6.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-tomcat/2.1.6.RELEASE/spring-boot-starter-tomcat-2.1.6.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-tomcat/2.1.6.RELEASE/spring-boot-starter-tomcat-2.1.6.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework.boot:spring-boot-starter-web:2.1.6.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-web/2.1.6.RELEASE/spring-boot-starter-web-2.1.6.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-web/2.1.6.RELEASE/spring-boot-starter-web-2.1.6.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-starter-web/2.1.6.RELEASE/spring-boot-starter-web-2.1.6.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework.boot:spring-boot-test:2.1.6.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-test/2.1.6.RELEASE/spring-boot-test-2.1.6.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-test/2.1.6.RELEASE/spring-boot-test-2.1.6.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-test/2.1.6.RELEASE/spring-boot-test-2.1.6.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework.boot:spring-boot-test-autoconfigure:2.1.6.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-test-autoconfigure/2.1.6.RELEASE/spring-boot-test-autoconfigure-2.1.6.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-test-autoconfigure/2.1.6.RELEASE/spring-boot-test-autoconfigure-2.1.6.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/boot/spring-boot-test-autoconfigure/2.1.6.RELEASE/spring-boot-test-autoconfigure-2.1.6.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework:spring-aop:5.1.8.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-aop/5.1.8.RELEASE/spring-aop-5.1.8.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-aop/5.1.8.RELEASE/spring-aop-5.1.8.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-aop/5.1.8.RELEASE/spring-aop-5.1.8.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework:spring-beans:5.1.8.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-beans/5.1.8.RELEASE/spring-beans-5.1.8.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-beans/5.1.8.RELEASE/spring-beans-5.1.8.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-beans/5.1.8.RELEASE/spring-beans-5.1.8.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework:spring-context:5.1.8.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-context/5.1.8.RELEASE/spring-context-5.1.8.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-context/5.1.8.RELEASE/spring-context-5.1.8.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-context/5.1.8.RELEASE/spring-context-5.1.8.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework:spring-core:5.1.8.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-core/5.1.8.RELEASE/spring-core-5.1.8.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-core/5.1.8.RELEASE/spring-core-5.1.8.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-core/5.1.8.RELEASE/spring-core-5.1.8.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework:spring-expression:5.1.8.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-expression/5.1.8.RELEASE/spring-expression-5.1.8.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-expression/5.1.8.RELEASE/spring-expression-5.1.8.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-expression/5.1.8.RELEASE/spring-expression-5.1.8.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework:spring-jcl:5.1.8.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-jcl/5.1.8.RELEASE/spring-jcl-5.1.8.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-jcl/5.1.8.RELEASE/spring-jcl-5.1.8.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-jcl/5.1.8.RELEASE/spring-jcl-5.1.8.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework:spring-test:5.1.8.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-test/5.1.8.RELEASE/spring-test-5.1.8.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-test/5.1.8.RELEASE/spring-test-5.1.8.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-test/5.1.8.RELEASE/spring-test-5.1.8.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework:spring-web:5.1.8.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-web/5.1.8.RELEASE/spring-web-5.1.8.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-web/5.1.8.RELEASE/spring-web-5.1.8.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-web/5.1.8.RELEASE/spring-web-5.1.8.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.springframework:spring-webmvc:5.1.8.RELEASE">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-webmvc/5.1.8.RELEASE/spring-webmvc-5.1.8.RELEASE.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-webmvc/5.1.8.RELEASE/spring-webmvc-5.1.8.RELEASE-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/springframework/spring-webmvc/5.1.8.RELEASE/spring-webmvc-5.1.8.RELEASE-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.tukaani:xz:1.5">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/tukaani/xz/1.5/xz-1.5.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/tukaani/xz/1.5/xz-1.5-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/tukaani/xz/1.5/xz-1.5-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.xerial.snappy:snappy-java:1.1.1.3">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/xerial/snappy/snappy-java/1.1.1.3/snappy-java-1.1.1.3.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/xerial/snappy/snappy-java/1.1.1.3/snappy-java-1.1.1.3-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/xerial/snappy/snappy-java/1.1.1.3/snappy-java-1.1.1.3-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.xmlunit:xmlunit-core:2.6.2">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/xmlunit/xmlunit-core/2.6.2/xmlunit-core-2.6.2.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/xmlunit/xmlunit-core/2.6.2/xmlunit-core-2.6.2-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/xmlunit/xmlunit-core/2.6.2/xmlunit-core-2.6.2-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<component name="libraryTable">
<library name="Maven: org.yaml:snakeyaml:1.23">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/yaml/snakeyaml/1.23/snakeyaml-1.23.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/yaml/snakeyaml/1.23/snakeyaml-1.23-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/yaml/snakeyaml/1.23/snakeyaml-1.23-sources.jar!/" />
</SOURCES>
</library>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="MarkdownEnhProjectSettings">
<AnnotatorSettings targetHasSpaces="true" linkCaseMismatch="true" wikiCaseMismatch="true" wikiLinkHasDashes="true" notUnderWikiHome="true" targetNotWikiPageExt="true" notUnderSourceWikiHome="true" targetNameHasAnchor="true" targetPathHasAnchor="true" wikiLinkHasSlash="true" wikiLinkHasSubdir="true" wikiLinkHasOnlyAnchor="true" linkTargetsWikiHasExt="true" linkTargetsWikiHasBadExt="true" notUnderSameRepo="true" targetNotUnderVcs="false" linkNeedsExt="true" linkHasBadExt="true" linkTargetNeedsExt="true" linkTargetHasBadExt="true" wikiLinkNotInWiki="true" imageTargetNotInRaw="true" repoRelativeAcrossVcsRoots="true" multipleWikiTargetsMatch="true" unresolvedLinkReference="true" linkIsIgnored="true" anchorIsIgnored="true" anchorIsUnresolved="true" anchorLineReferenceIsUnresolved="true" anchorLineReferenceFormat="true" anchorHasDuplicates="true" abbreviationDuplicates="true" abbreviationNotUsed="true" attributeIdDuplicateDefinition="true" attributeIdNotUsed="true" footnoteDuplicateDefinition="true" footnoteUnresolved="true" footnoteDuplicates="true" footnoteNotUsed="true" macroDuplicateDefinition="true" macroUnresolved="true" macroDuplicates="true" macroNotUsed="true" referenceDuplicateDefinition="true" referenceUnresolved="true" referenceDuplicates="true" referenceNotUsed="true" referenceUnresolvedNumericId="true" enumRefDuplicateDefinition="true" enumRefUnresolved="true" enumRefDuplicates="true" enumRefNotUsed="true" enumRefLinkUnresolved="true" enumRefLinkDuplicates="true" simTocUpdateNeeded="true" simTocTitleSpaceNeeded="true" />
<HtmlExportSettings updateOnSave="false" parentDir="" targetDir="" cssDir="css" scriptDir="js" plainHtml="false" imageDir="" copyLinkedImages="false" imagePathType="0" targetPathType="2" targetExt="" useTargetExt="false" noCssNoScripts="false" useElementStyleAttribute="false" linkToExportedHtml="true" exportOnSettingsChange="true" regenerateOnProjectOpen="false" linkFormatType="HTTP_ABSOLUTE" />
<LinkMapSettings>
<textMaps />
</LinkMapSettings>
</component>
<component name="MarkdownNavigatorHistory">
<PasteImageHistory checkeredTransparentBackground="false" filename="image" directory="" onPasteImageTargetRef="3" onPasteLinkText="0" onPasteImageElement="1" onPasteLinkElement="1" onPasteReferenceElement="2" cornerRadius="20" borderColor="0" transparentColor="16777215" borderWidth="1" trimTop="0" trimBottom="0" trimLeft="0" trimRight="0" transparent="false" roundCorners="false" showPreview="true" bordered="false" scaled="false" cropped="false" hideInapplicableOperations="false" preserveLinkFormat="false" scale="50" scalingInterpolation="1" transparentTolerance="0" saveAsDefaultOnOK="false" linkFormat="0" addHighlights="false" showHighlightCoordinates="true" showHighlights="false" mouseSelectionAddsHighlight="false" outerFilled="false" outerFillColor="0" outerFillTransparent="true" outerFillAlpha="30">
<highlightList />
<directories />
<filenames />
</PasteImageHistory>
<CopyImageHistory checkeredTransparentBackground="false" filename="image" directory="" onPasteImageTargetRef="3" onPasteLinkText="0" onPasteImageElement="1" onPasteLinkElement="1" onPasteReferenceElement="2" cornerRadius="20" borderColor="0" transparentColor="16777215" borderWidth="1" trimTop="0" trimBottom="0" trimLeft="0" trimRight="0" transparent="false" roundCorners="false" showPreview="true" bordered="false" scaled="false" cropped="false" hideInapplicableOperations="false" preserveLinkFormat="false" scale="50" scalingInterpolation="1" transparentTolerance="0" saveAsDefaultOnOK="false" linkFormat="0" addHighlights="false" showHighlightCoordinates="true" showHighlights="false" mouseSelectionAddsHighlight="false" outerFilled="false" outerFillColor="0" outerFillTransparent="true" outerFillAlpha="30">
<highlightList />
<directories />
<filenames />
</CopyImageHistory>
<PasteLinkHistory onPasteImageTargetRef="3" onPasteTargetRef="1" onPasteLinkText="0" onPasteImageElement="1" onPasteLinkElement="1" onPasteWikiElement="2" onPasteReferenceElement="2" hideInapplicableOperations="false" preserveLinkFormat="false" useHeadingForLinkText="false" linkFormat="0" saveAsDefaultOnOK="false" />
<TableToJsonHistory>
<entries />
</TableToJsonHistory>
<TableSortHistory>
<entries />
</TableSortHistory>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="FlexmarkProjectSettings">
<FlexmarkHtmlSettings flexmarkSpecExampleRendering="0" flexmarkSpecExampleRenderHtml="false">
<flexmarkSectionLanguages>
<option name="1" value="Markdown" />
<option name="2" value="HTML" />
<option name="3" value="flexmark-ast:1" />
</flexmarkSectionLanguages>
</FlexmarkHtmlSettings>
</component>
<component name="MarkdownProjectSettings">
<PreviewSettings splitEditorLayout="SPLIT" splitEditorPreview="PREVIEW" useGrayscaleRendering="false" zoomFactor="1.0" maxImageWidth="0" synchronizePreviewPosition="true" highlightPreviewType="LINE" highlightFadeOut="5" highlightOnTyping="true" synchronizeSourcePosition="true" verticallyAlignSourceAndPreviewSyncPosition="true" showSearchHighlightsInPreview="true" showSelectionInPreview="true" lastLayoutSetsDefault="false">
<PanelProvider>
<provider providerId="com.vladsch.md.nav.editor.swing.html.panel" providerName="Default - Swing" />
</PanelProvider>
</PreviewSettings>
<ParserSettings gitHubSyntaxChange="false" correctedInvalidSettings="false" emojiShortcuts="1" emojiImages="0">
<PegdownExtensions>
<option name="ANCHORLINKS" value="true" />
<option name="ATXHEADERSPACE" value="true" />
<option name="FENCED_CODE_BLOCKS" value="true" />
<option name="INTELLIJ_DUMMY_IDENTIFIER" value="true" />
<option name="RELAXEDHRULES" value="true" />
<option name="STRIKETHROUGH" value="true" />
<option name="TABLES" value="true" />
<option name="TASKLISTITEMS" value="true" />
</PegdownExtensions>
<ParserOptions>
<option name="COMMONMARK_LISTS" value="true" />
<option name="EMOJI_SHORTCUTS" value="true" />
<option name="GFM_TABLE_RENDERING" value="true" />
<option name="PRODUCTION_SPEC_PARSER" value="true" />
<option name="SIM_TOC_BLANK_LINE_SPACER" value="true" />
</ParserOptions>
</ParserSettings>
<HtmlSettings headerTopEnabled="false" headerBottomEnabled="false" bodyTopEnabled="false" bodyBottomEnabled="false" addPageHeader="false" imageUriSerials="false" addDocTypeHtml="true" noParaTags="false" plantUmlConversion="0">
<GeneratorProvider>
<provider providerId="com.vladsch.md.nav.editor.text.html.generator" providerName="Unmodified HTML Generator" />
</GeneratorProvider>
<headerTop />
<headerBottom />
<bodyTop />
<bodyBottom />
</HtmlSettings>
<CssSettings previewScheme="UI_SCHEME" cssUri="" isCssUriEnabled="false" isCssUriSerial="true" isCssTextEnabled="false" isDynamicPageWidth="true">
<StylesheetProvider>
<provider providerId="com.vladsch.md.nav.editor.text.html.css" providerName="No Stylesheet" />
</StylesheetProvider>
<ScriptProviders />
<cssText />
<cssUriHistory />
</CssSettings>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="MavenProjectsManager">
<option name="originalFiles">
<list>
<option value="$PROJECT_DIR$/pom.xml" />
</list>
</option>
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" project-jdk-name="1.8" project-jdk-type="JavaSDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/mock-data.iml" filepath="$PROJECT_DIR$/mock-data.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
<component name="FacetManager">
<facet type="Spring" name="Spring">
<configuration />
</facet>
<facet type="web" name="Web">
<configuration>
<webroots />
<sourceRoots>
<root url="file://$MODULE_DIR$/src/main/java" />
<root url="file://$MODULE_DIR$/src/main/resources" />
</sourceRoots>
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8">
<output url="file://$MODULE_DIR$/target/classes" />
<output-test url="file://$MODULE_DIR$/target/test-classes" />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/resources" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/test/java" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-web:2.1.6.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter:2.1.6.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot:2.1.6.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-autoconfigure:2.1.6.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-logging:2.1.6.RELEASE" level="project" />
<orderEntry type="library" name="Maven: ch.qos.logback:logback-classic:1.2.3" level="project" />
<orderEntry type="library" name="Maven: ch.qos.logback:logback-core:1.2.3" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-to-slf4j:2.11.2" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-api:2.11.2" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:jul-to-slf4j:1.7.26" level="project" />
<orderEntry type="library" name="Maven: javax.annotation:javax.annotation-api:1.3.2" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: org.yaml:snakeyaml:1.23" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-json:2.1.6.RELEASE" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.9.9" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.9.9" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.module:jackson-module-parameter-names:2.9.9" level="project" />
<orderEntry type="library" name="Maven: org.springframework.boot:spring-boot-starter-tomcat:2.1.6.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.apache.tomcat.embed:tomcat-embed-core:9.0.21" level="project" />
<orderEntry type="library" name="Maven: org.apache.tomcat.embed:tomcat-embed-el:9.0.21" level="project" />
<orderEntry type="library" name="Maven: org.apache.tomcat.embed:tomcat-embed-websocket:9.0.21" level="project" />
<orderEntry type="library" name="Maven: org.hibernate.validator:hibernate-validator:6.0.17.Final" level="project" />
<orderEntry type="library" name="Maven: javax.validation:validation-api:2.0.1.Final" level="project" />
<orderEntry type="library" name="Maven: org.jboss.logging:jboss-logging:3.3.2.Final" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml:classmate:1.4.0" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-web:5.1.8.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-beans:5.1.8.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-webmvc:5.1.8.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-aop:5.1.8.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-context:5.1.8.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-expression:5.1.8.RELEASE" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.springframework.boot:spring-boot-starter-test:2.1.6.RELEASE" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.springframework.boot:spring-boot-test:2.1.6.RELEASE" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.springframework.boot:spring-boot-test-autoconfigure:2.1.6.RELEASE" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.jayway.jsonpath:json-path:2.4.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: net.minidev:json-smart:2.3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: net.minidev:accessors-smart:1.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.ow2.asm:asm:5.0.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.assertj:assertj-core:3.11.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.mockito:mockito-core:2.23.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: net.bytebuddy:byte-buddy:1.9.13" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: net.bytebuddy:byte-buddy-agent:1.9.13" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.objenesis:objenesis:2.6" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-library:1.3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.skyscreamer:jsonassert:1.5.0" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.vaadin.external.google:android-json:0.0.20131108.vaadin1" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-core:5.1.8.RELEASE" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-jcl:5.1.8.RELEASE" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.springframework:spring-test:5.1.8.RELEASE" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.xmlunit:xmlunit-core:2.6.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: javax.xml.bind:jaxb-api:2.3.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: javax.activation:javax.activation-api:1.2.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.kafka:kafka_2.11:1.1.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.kafka:kafka-clients:2.0.1" level="project" />
<orderEntry type="library" name="Maven: org.lz4:lz4-java:1.4.1" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-databind:2.9.9" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-annotations:2.9.0" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-core:2.9.9" level="project" />
<orderEntry type="library" name="Maven: net.sf.jopt-simple:jopt-simple:5.0.4" level="project" />
<orderEntry type="library" name="Maven: com.yammer.metrics:metrics-core:2.2.0" level="project" />
<orderEntry type="library" name="Maven: org.scala-lang:scala-library:2.11.12" level="project" />
<orderEntry type="library" name="Maven: org.scala-lang:scala-reflect:2.11.12" level="project" />
<orderEntry type="library" name="Maven: com.typesafe.scala-logging:scala-logging_2.11:3.8.0" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.26" level="project" />
<orderEntry type="library" name="Maven: com.101tec:zkclient:0.10" level="project" />
<orderEntry type="library" name="Maven: org.apache.zookeeper:zookeeper:3.4.10" level="project" />
<orderEntry type="library" name="Maven: com.alibaba:fastjson:1.2.62" level="project" />
<orderEntry type="library" name="Maven: org.projectlombok:lombok:1.18.8" level="project" />
<orderEntry type="library" name="Maven: org.apache.avro:avro:1.8.2" level="project" />
<orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-core-asl:1.9.13" level="project" />
<orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.9.13" level="project" />
<orderEntry type="library" name="Maven: com.thoughtworks.paranamer:paranamer:2.7" level="project" />
<orderEntry type="library" name="Maven: org.xerial.snappy:snappy-java:1.1.1.3" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-compress:1.8.1" level="project" />
<orderEntry type="library" name="Maven: org.tukaani:xz:1.5" level="project" />
<orderEntry type="library" name="Maven: org.apache.avro:avro-tools:1.8.2" level="project" />
<orderEntry type="library" name="Maven: org.apache.avro:avro-compiler:1.8.2" level="project" />
<orderEntry type="library" name="Maven: commons-lang:commons-lang:2.6" level="project" />
<orderEntry type="library" name="Maven: org.apache.velocity:velocity:1.7" level="project" />
<orderEntry type="library" name="Maven: commons-collections:commons-collections:3.2.1" level="project" />
<orderEntry type="library" name="Maven: joda-time:joda-time:2.10.2" level="project" />
<orderEntry type="library" name="Maven: org.apache.avro:avro-ipc:1.8.2" level="project" />
<orderEntry type="library" name="Maven: org.mortbay.jetty:jetty:6.1.26" level="project" />
<orderEntry type="library" name="Maven: org.mortbay.jetty:jetty-util:6.1.26" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty:3.5.13.Final" level="project" />
<orderEntry type="library" name="Maven: org.mortbay.jetty:servlet-api:2.5-20081211" level="project" />
<orderEntry type="library" name="Maven: org.apache.avro:avro-mapred:1.8.2" level="project" />
<orderEntry type="library" name="Maven: commons-codec:commons-codec:1.11" level="project" />
<orderEntry type="library" name="Maven: commons-cli:commons-cli:1.2" level="project" />
<orderEntry type="library" name="Maven: commons-logging:commons-logging:1.1.1" level="project" />
<orderEntry type="library" name="Maven: commons-httpclient:commons-httpclient:3.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.avro:trevni-core:1.8.2" level="project" />
<orderEntry type="library" name="Maven: org.apache.avro:trevni-avro:1.8.2" level="project" />
<orderEntry type="library" name="Maven: org.apache.avro:avro-mapred:hadoop2:1.8.2" level="project" />
<orderEntry type="library" name="Maven: org.apache.avro:trevni-core:tests:1.8.2" level="project" />
<orderEntry type="library" name="Maven: org.apache.avro:trevni-avro:tests:1.8.2" level="project" />
<orderEntry type="library" name="Maven: com.github.stephenc.findbugs:findbugs-annotations:1.3.9-1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" />
</component>
</module>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.zorkdata</groupId>
<artifactId>mock-data</artifactId>
<version>1.0-SNAPSHOT</version>
<repositories>
<!-- Using Local Nexus Maven Repository -->
<repository>
<id>user-release</id>
<name>Nexus Repository</name>
<url>http://nexus.zorkdata.com/repository/maven-public/</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
<!-- <repository>
<id>oss</id>
<name>oss</name>
<url>https://oss.sonatype.org/content/groups/public</url>
</repository>-->
<!-- <repository>
<id>spring-milestones</id>
<name>Spring Milestones</name>
<url>https://repo.spring.io/libs-milestone</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>-->
</repositories>
<pluginRepositories>
<pluginRepository>
<id>nexus</id>
<name>Nexus Repository</name>
<url>http://nexus.zorkdata.com/repository/maven-public/</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</pluginRepository>
</pluginRepositories>
<!-- 设定团队持续集成发布包服务器 -->
<distributionManagement>
<repository>
<id>user-release</id>
<name>User Porject Release</name>
<url>http://nexus.zorkdata.com/repository/releases</url>
</repository>
<snapshotRepository>
<id>user-snapshot</id>
<name>User Porject Snapshot</name>
<url>http://nexus.zorkdata.com/repository/snapshots</url>
</snapshotRepository>
</distributionManagement>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.6.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<!--<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.8.2.2</version>
</dependency>-->
<!--<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.10.2.2</version>
</dependency>-->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>1.1.1</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.62</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.8</version>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>1.8.2</version>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro-tools</artifactId>
<version>1.8.2</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.4</version>
<configuration>
<archive>
<!-- 生成的jar中,不要包含pom.xml和pom.properties这两个文件-->
<addMavenDescriptor>false</addMavenDescriptor>
<manifest>
<!--是否要把第三方jar放到manifest的classpath中-->
<addClasspath>true</addClasspath>
<!--生成的manifest中classpath的前缀,因为要把第三方jar放到lib目录下,所以classpath的前缀是lib/-->
<classpathPrefix>lib/</classpathPrefix>
<!-- 应用的main class-->
<mainClass>com.zorkdata.tools.mock.MockFilebeatDataToKafka</mainClass>
</manifest>
</archive>
<!-- 过滤掉不希望包含在jar中的文件-->
<excludes>
<exclude>${project.basedir}/xml/*</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.4</version>
<configuration>
<descriptors>
<descriptor>src/main/assembly/package.xml</descriptor>
</descriptors>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
\ No newline at end of file
<assembly>
<id>bin</id>
<!-- 最终打包成一个用于发布的zip文件 -->
<formats>
<format>zip</format>
</formats>
<!-- Adds dependencies to zip package under lib directory -->
<dependencySets>
<dependencySet>
<!--
不使用项目的artifact,第三方jar不要解压,打包进zip文件的lib目录
-->
<useProjectArtifact>false</useProjectArtifact>
<outputDirectory>lib</outputDirectory>
<unpack>false</unpack>
</dependencySet>
</dependencySets>
<fileSets>
<!-- 把项目相关的说明文件,打包进zip文件的根目录 -->
<fileSet>
<directory>${project.basedir}</directory>
<outputDirectory>/</outputDirectory>
<includes>
<include>README*</include>
<include>LICENSE*</include>
<include>NOTICE*</include>
</includes>
</fileSet>
<!-- 把项目的配置文件,打包进zip文件的config目录 -->
<fileSet>
<directory>${project.basedir}\src\main\config</directory>
<outputDirectory>config</outputDirectory>
<includes>
<include>*.xml</include>
<include>*.properties</include>
</includes>
</fileSet>
<!-- 把项目的脚本文件目录( src/main/scripts )中的启动脚本文件,打包进zip文件的跟目录 -->
<fileSet>
<directory>${project.build.scriptSourceDirectory}</directory>
<outputDirectory></outputDirectory>
<includes>
<include>startup.*</include>
</includes>
</fileSet>
<!-- 把项目的脚本文件(除了启动脚本文件),打包进zip文件的script目录 -->
<fileSet>
<directory>${project.build.scriptSourceDirectory}</directory>
<outputDirectory></outputDirectory>
<excludes>
<exclude>startup.*</exclude>
</excludes>
</fileSet>
<!-- 把项目自己编译出来的jar文件,打包进zip文件的根目录 -->
<fileSet>
<directory>${project.build.directory}</directory>
<outputDirectory></outputDirectory>
<includes>
<include>*.jar</include>
</includes>
</fileSet>
</fileSets>
</assembly>
Manifest-Version: 1.0
Main-Class: com.zorkdata.tools.mock.MockFilebeatDataToKafka
package com.zorkdata.tools;
import com.alibaba.fastjson.JSON;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import com.zorkdata.tools.pojo.ZorkData;
import com.zorkdata.tools.utils.DateUtil;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
public class MockProduct {
private static final Logger LOGGER = LoggerFactory.getLogger(MockProduct.class);
private static String topic = "test1";
private static String brokerAddr = "zork-poc103:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("client.id", "test");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 1);
producer = new KafkaProducer<String, byte[]>(props);
}
public static byte[] buildZorkDataResp() {
Map<String, Object> event = new HashMap<String, Object>();
ZorkData zorkData = new ZorkData();
String logTypeName = "tc50_biz_filebeat";
String timestamp = DateUtil.getUTCTimeStr();
String source = "d:\\tc50\\log\\20191231.log";
String offset = String.valueOf(6322587L);
zorkData.setLogTypeName(logTypeName);
zorkData.setOffset(offset);
zorkData.setSource(source);
zorkData.setTimestamp(timestamp);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "ZVTDX-TC50223");
dimensions.put("appprogramname", "ZVTDX-TC50223_770");
dimensions.put("appsystem", "TXJY");
Map<String, Double> measures = new HashMap<>();
measures.put("latence", 301.0);
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "成功处理");
zorkData.setDimensions(dimensions);
zorkData.setMeasures(measures);
zorkData.setNormalFields(normalFields);
String msg = JSON.toJSONString(zorkData);
System.out.println(msg);
AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvroSerializer();
byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
return bytes;
}
public static byte[] buildZorkDataReq() {
Map<String, Object> event = new HashMap<String, Object>();
ZorkData zorkData = new ZorkData();
String logTypeName = "tc50_biz_filebeat_req";
String timestamp = DateUtil.getUTCTimeStr();
String source = "d:\\tc50\\log\\20191231.log";
String offset = String.valueOf(6322587L);
zorkData.setLogTypeName(logTypeName);
zorkData.setOffset(offset);
zorkData.setSource(source);
zorkData.setTimestamp(timestamp);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "ZVTDX-TC50223");
dimensions.put("appprogramname", "ZVTDX-TC50223_770");
dimensions.put("appsystem", "TXJY");
Map<String, Double> measures = new HashMap<>();
measures.put("latence", 301.0);
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "成功处理");
zorkData.setDimensions(dimensions);
zorkData.setMeasures(measures);
zorkData.setNormalFields(normalFields);
String msg = JSON.toJSONString(zorkData);
System.out.println(msg);
AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvroSerializer();
byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
return bytes;
}
public static void send() throws ExecutionException, InterruptedException {
init();
byte[] req = buildZorkDataReq();
producerRecord = new ProducerRecord<String, byte[]>(
topic,
null,
req
);
producer.send(producerRecord).get();
Thread.sleep(200);
byte[] resp = buildZorkDataResp();
producerRecord = new ProducerRecord<String, byte[]>(
topic,
null,
resp
);
producer.send(producerRecord).get();
}
public static void main(String[] args) throws ExecutionException, InterruptedException {
for (int i = 0 ; i < 3; i++) {
send();
}
}
}
package com.zorkdata.tools.avro;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author DeleMing
*/
public class AvroDeserializer {
private static final Logger LOGGER = LoggerFactory.getLogger(AvroDeserializer.class);
public JSONObject jsonObject;
public JSONArray jsonArray;
public Schema schema;
public String[] keys;
public AvroDeserializer(String schema) {
getKeysFromjson(schema);
}
/**
* @param schema:Avro序列化所使用的schema
* @return void 返回类型
* @throws
* @Title: getKeysFromjson
* @Description:用于获取Avro的keys
*/
void getKeysFromjson(String schema) {
this.jsonObject = JSONObject.parseObject(schema);
this.schema = new Schema.Parser().parse(schema);
this.jsonArray = this.jsonObject.getJSONArray("fields");
this.keys = new String[this.jsonArray.size()];
for (int i = 0; i < this.jsonArray.size(); i++) {
this.keys[i] = this.jsonArray.getJSONObject(i).get("name").toString();
}
}
/**
* @param body 参数:byte[] body:kafka消息。
* @param @return 设定文件
* @return String 返回类型
* @throws
* @Title: deserializing
* @Description: 用于Avro的反序列化。
*/
public GenericRecord deserializing(byte[] body) {
DatumReader<GenericData.Record> datumReader = new GenericDatumReader<GenericData.Record>(this.schema);
Decoder decoder = DecoderFactory.get().binaryDecoder(body, null);
GenericData.Record result = null;
try {
result = datumReader.read(null, decoder);
} catch (Exception e) {
LOGGER.error(String.format("error Avro反序列化"), e);
}
return result;
}
}
package com.zorkdata.tools.avro;
/**
* @author DeleMing
*/
public class AvroDeserializerFactory {
private static AvroDeserializer logs = null;
private static AvroDeserializer metrics = null;
public static void init() {
logs = null;
metrics = null;
}
/**
* getLogsDeserializer
*
* @return
*/
public static AvroDeserializer getLogsDeserializer() {
if (logs == null) {
logs = new AvroDeserializer(LogAvroMacroDef.metadata);
}
return logs;
}
/**
* getLogsDeserializer
*
* @return
*/
public static AvroDeserializer getMetricDeserializer() {
if (metrics == null) {
metrics = new AvroDeserializer(MetricAvroMacroDef.metadata);
}
return metrics;
}
}
package com.zorkdata.tools.avro;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.Utf8;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* @author DeleMing
*/
public class AvroSerializer {
public JSONObject jsonObject;
public JSONArray jsonArray;
public Schema schema;
public List<String> filedsArrayList = new ArrayList<String>();
public AvroSerializer(String schema) {
getKeysFromjson(schema);
}
/**
* @param schema
* :Avro序列化所使用的schema
* @return void 返回类型
* @throws
* @Title: getKeysFromjson
* @Description:用于获取Avro的keys
*/
void getKeysFromjson(String schema) {
this.jsonObject = JSONObject.parseObject(schema);
this.schema = new Schema.Parser().parse(schema);
this.jsonArray = this.jsonObject.getJSONArray("fields");
if (filedsArrayList != null && filedsArrayList.size() > 0) {
filedsArrayList.clear();
}
for (int i = 0; i < this.jsonArray.size(); i++) {
filedsArrayList.add(this.jsonArray.getJSONObject(i).get("name").toString());
}
}
/**
* @param
* @param @return 设定文件
* @return String 返回类型
* @throws
* @Title: serializing
* @Description: 用于Avro的序列化。
*/
private synchronized byte[] serializing(List<String> temtuple) {
byte[] returnstr = null;
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
for (int i = 0; i < filedsArrayList.size(); i++) {
datum.put(filedsArrayList.get(i), temtuple.get(i));
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
DatumWriter<GenericRecord> write = new GenericDatumWriter<GenericRecord>(this.schema);
// 然后由Encoder写到数据流。
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
try {
write.write(datum, encoder);
encoder.flush();
} catch (IOException e) {
System.out.println("序列化失败 " + e );
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
System.out.println("序列化失败" + e);
}
}
}
try {
returnstr = out.toByteArray();
} catch (Exception e) {
System.out.println("序列化失败" + e);
}
return returnstr;
}
/**
* 序列化json串
*
* @param json
* @return
*/
private synchronized byte[] serializing(String json) {
byte[] returnstr = null;
JSONObject jsonObject = (JSONObject) JSONObject.parse(json);// new TypeReference<Object>() {}
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
for (int i = 0; i < filedsArrayList.size(); i++) {
datum.put(filedsArrayList.get(i), new Utf8(String.valueOf(jsonObject.get(filedsArrayList.get(i)))));
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
DatumWriter<GenericRecord> write = new GenericDatumWriter<GenericRecord>(this.schema);
// 然后由Encoder写到数据流。
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
try {
write.write(datum, encoder);
encoder.flush();
} catch (IOException e) {
System.out.println("序列化失败" + e);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
System.out.println("序列化失败" + e);
}
}
}
try {
returnstr = out.toByteArray();
} catch (Exception e) {
System.out.println("序列化失败" + e);
}
return returnstr;
}
/**
* 序列化json对象
*
* @param jsonObject
* @return
*/
private synchronized byte[] serializing(JSONObject jsonObject) {
byte[] returnstr = null;
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
for (int i = 0; i < filedsArrayList.size(); i++) {
datum.put(filedsArrayList.get(i), jsonObject.get(filedsArrayList.get(i)));
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
DatumWriter<GenericRecord> write = new GenericDatumWriter<GenericRecord>(this.schema);
// 然后由Encoder写到数据流。
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
try {
write.write(datum, encoder);
encoder.flush();
} catch (IOException e) {
System.out.println("序列化失败" + e);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
System.out.println("序列化失败" + e);
}
}
}
try {
returnstr = out.toByteArray();
} catch (Exception e) {
System.out.println("序列化失败" + e);
}
return returnstr;
}
/**
* 序列化对象
*/
public synchronized byte[] serializing(GenericRecord datum) {
byte[] returnstr = null;
ByteArrayOutputStream out = new ByteArrayOutputStream();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
DatumWriter<GenericRecord> write = new GenericDatumWriter<GenericRecord>(this.schema);
// 然后由Encoder写到数据流。
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
try {
write.write(datum, encoder);
encoder.flush();
} catch (IOException e) {
System.out.println("序列化失败" + e);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
System.out.println("序列化失败" + e);
}
}
}
try {
returnstr = out.toByteArray();
} catch (Exception e) {
System.out.println("序列化失败" + e);
}
// GenericRecord s = AvroDeserializerFactory.getTopicmetadataDeserializer().deserializing(returnstr);
return returnstr;
}
/**
* 序列化对象
*/
public synchronized byte[] serializingLog(String logTypeName, String timestamp, String source, String offset, Map<String, String> dimensions, Map<String, Double> metrics,
Map<String, String> normalFields) {
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
datum.put(0, logTypeName);
datum.put(1, timestamp);
datum.put(2, source);
datum.put(3, offset);
datum.put(4, dimensions);
datum.put(5, metrics);
datum.put(6, normalFields);
return serializing(datum);
}
/**
* 序列化对象
*/
public synchronized byte[] serializingMetric(String metricSetName, String timestamp, Map<String, String> dimensions, Map<String, Double> metrics) {
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
datum.put(0, metricSetName);
datum.put(1, timestamp);
datum.put(2, dimensions);
datum.put(3, metrics);
return serializing(datum);
}
private synchronized byte[] serializing(GenericRecord genericRecord, String key[]) {
byte[] returnstr = null;
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
for (int i = 0; i < filedsArrayList.size(); i++) {
datum.put(filedsArrayList.get(i), new Utf8(String.valueOf(genericRecord.get(key[i]))));
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
DatumWriter<GenericRecord> write = new GenericDatumWriter<GenericRecord>(this.schema);
// 然后由Encoder写到数据流。
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
try {
write.write(datum, encoder);
encoder.flush();
} catch (IOException e) {
System.out.println("序列化失败" + e);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
System.out.println("序列化失败" + e);
}
}
}
try {
returnstr = out.toByteArray();
} catch (Exception e) {
System.out.println("序列化失败" + e);
}
return returnstr;
}
}
package com.zorkdata.tools.avro;
/**
* @author DeleMing
*/
public class AvroSerializerFactory {
private static AvroSerializer metricMetadata = null;
private static AvroSerializer logMetadata = null;
public static AvroSerializer getLogAvroSerializer() {
if (logMetadata == null) {
logMetadata = new AvroSerializer(LogAvroMacroDef.metadata);
}
return logMetadata;
}
public static AvroSerializer getMetricAvroSerializer() {
if (metricMetadata == null) {
metricMetadata = new AvroSerializer(MetricAvroMacroDef.metadata);
}
return metricMetadata;
}
}
package com.zorkdata.tools.avro;
/**
* @author DeleMing
*/
public class LogAvroMacroDef {
public static String metadata = "{\n" +
" \"namespace\": \"com.zork.logs\",\n" +
" \"type\": \"record\",\n" +
" \"name\": \"logs\",\n" +
" \"fields\": [\n" +
" {\n" +
" \"name\": \"logTypeName\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"timestamp\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"source\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"offset\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"dimensions\",\n" +
" \"type\": [\n" +
" \"null\",\n" +
" {\n" +
" \"type\": \"map\",\n" +
" \"values\": \"string\"\n" +
" }\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"measures\",\n" +
" \"type\": [\n" +
" \"null\",\n" +
" {\n" +
" \"type\": \"map\",\n" +
" \"values\": \"double\"\n" +
" }\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"normalFields\",\n" +
" \"type\": [\n" +
" \"null\",\n" +
" {\n" +
" \"type\": \"map\",\n" +
" \"values\": \"string\"\n" +
" }\n" +
" ]\n" +
" }\n" +
" ]\n" +
"}";
}
package com.zorkdata.tools.avro;
/**
* @author DeleMing
*/
public class MetricAvroMacroDef {
public static String metadata = "{\n" +
" \"namespace\": \"com.zork.metrics\",\n" +
" \"type\": \"record\",\n" +
" \"name\": \"metrics\",\n" +
" \"fields\": [\n" +
" {\n" +
" \"name\": \"metricsetname\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"timestamp\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"dimensions\",\n" +
" \"type\": [\n" +
" \"null\",\n" +
" {\n" +
" \"type\": \"map\",\n" +
" \"values\": \"string\"\n" +
" }\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"metrics\",\n" +
" \"type\": [\n" +
" \"null\",\n" +
" {\n" +
" \"type\": \"map\",\n" +
" \"values\": \"double\"\n" +
" }\n" +
" ]\n" +
" }\n" +
" ]\n" +
"}";
}
package com.zorkdata.tools.kafka;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Created by zhuzhigang on 16/2/29.
*/
public class AvroDeserializer {
private static final Logger LOGGER = LoggerFactory.getLogger(AvroDeserializer.class);
public JSONObject jsonObject;
public JSONArray jsonArray;
public Schema schema;
public String[] keys;
public AvroDeserializer(String schema) {
getKeysFromjson(schema);
}
/**
* @param schema:Avro序列化所使用的schema
* @return void 返回类型
* @throws
* @Title: getKeysFromjson
* @Description:用于获取Avro的keys
*/
void getKeysFromjson(String schema) {
this.jsonObject = JSONObject.parseObject(schema);
this.schema = new Schema.Parser().parse(schema);
this.jsonArray = this.jsonObject.getJSONArray("fields");
this.keys = new String[this.jsonArray.size()];
for (int i = 0; i < this.jsonArray.size(); i++) {
this.keys[i] = this.jsonArray.getJSONObject(i).get("name").toString();
}
}
/**
* @param body 参数:byte[] body:kafka消息。
* @param @return 设定文件
* @return String 返回类型
* @throws
* @Title: deserializing
* @Description: 用于Avro的反序列化。
*/
public GenericRecord deserializing(byte[] body) {
DatumReader<GenericData.Record> datumReader = new GenericDatumReader<GenericData.Record>(this.schema);
Decoder decoder = DecoderFactory.get().binaryDecoder(body, null);
GenericData.Record result = null;
try {
result = datumReader.read(null, decoder);
} catch (Exception e) {
LOGGER.error(String.format("error Avro反序列化"), e);
}
return result;
}
}
package com.zorkdata.tools.kafka;
/**
* Created by zhuzhigang on 16/2/29.
*/
public class AvroDeserializerFactory {
private static AvroDeserializer bappLauch = null;
public static void init() {
bappLauch = null;
}
/**
* Topicmetadata
*
* @return
*/
public static AvroDeserializer getTopicmetadataDeserializer() {
if (bappLauch == null) {
bappLauch = new AvroDeserializer(LogAvroMacroDef.metadata);
}
return bappLauch;
}
}
package com.zorkdata.tools.kafka;
/**
* @author zhuzhigang
* @since 1.0
*/
public class AvroMacroDef {
public static String metadata = "{\n" +
" \"namespace\": \"com.zork.metrics\",\n" +
" \"type\": \"record\",\n" +
" \"name\": \"metrics\",\n" +
" \"fields\": [\n" +
" {\n" +
" \"name\": \"metricsetname\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"timestamp\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"dimensions\",\n" +
" \"type\": [\n" +
" \"null\",\n" +
" {\n" +
" \"type\": \"map\",\n" +
" \"values\": \"string\"\n" +
" }\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"metrics\",\n" +
" \"type\": [\n" +
" \"null\",\n" +
" {\n" +
" \"type\": \"map\",\n" +
" \"values\": \"double\"\n" +
" }\n" +
" ]\n" +
" }\n" +
" ]\n" +
"}";
}
package com.zorkdata.tools.kafka;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.Utf8;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Created by zhuzhigang on 16/3/1.
*/
public class AvroSerializer {
public JSONObject jsonObject;
public JSONArray jsonArray;
public Schema schema;
public List<String> filedsArrayList = new ArrayList<String>();
public AvroSerializer(String schema) {
getKeysFromjson(schema);
}
/**
* @param schema
* :Avro序列化所使用的schema
* @return void 返回类型
* @throws
* @Title: getKeysFromjson
* @Description:用于获取Avro的keys
*/
void getKeysFromjson(String schema) {
this.jsonObject = JSONObject.parseObject(schema);
this.schema = new Schema.Parser().parse(schema);
this.jsonArray = this.jsonObject.getJSONArray("fields");
if (filedsArrayList != null && filedsArrayList.size() > 0) {
filedsArrayList.clear();
}
for (int i = 0; i < this.jsonArray.size(); i++) {
filedsArrayList.add(this.jsonArray.getJSONObject(i).get("name").toString());
}
}
/**
* @param
* @param @return 设定文件
* @return String 返回类型
* @throws
* @Title: serializing
* @Description: 用于Avro的序列化。
*/
private synchronized byte[] serializing(List<String> temtuple) {
byte[] returnstr = null;
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
for (int i = 0; i < filedsArrayList.size(); i++) {
datum.put(filedsArrayList.get(i), temtuple.get(i));
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
DatumWriter<GenericRecord> write = new GenericDatumWriter<GenericRecord>(this.schema);
// 然后由Encoder写到数据流。
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
try {
write.write(datum, encoder);
encoder.flush();
} catch (IOException e) {
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
}
}
}
try {
returnstr = out.toByteArray();
} catch (Exception e) {
}
return returnstr;
}
/**
* 序列化json串
*
* @param json
* @return
*/
private synchronized byte[] serializing(String json) {
byte[] returnstr = null;
JSONObject jsonObject = (JSONObject) JSONObject.parse(json);// new TypeReference<Object>() {}
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
for (int i = 0; i < filedsArrayList.size(); i++) {
datum.put(filedsArrayList.get(i), new Utf8(String.valueOf(jsonObject.get(filedsArrayList.get(i)))));
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
DatumWriter<GenericRecord> write = new GenericDatumWriter<GenericRecord>(this.schema);
// 然后由Encoder写到数据流。
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
try {
write.write(datum, encoder);
encoder.flush();
} catch (IOException e) {
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
}
}
}
try {
returnstr = out.toByteArray();
} catch (Exception e) {
}
return returnstr;
}
/**
* 序列化json对象
*
* @param jsonObject
* @return
*/
private synchronized byte[] serializing(JSONObject jsonObject) {
byte[] returnstr = null;
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
for (int i = 0; i < filedsArrayList.size(); i++) {
datum.put(filedsArrayList.get(i), jsonObject.get(filedsArrayList.get(i)));
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
DatumWriter<GenericRecord> write = new GenericDatumWriter<GenericRecord>(this.schema);
// 然后由Encoder写到数据流。
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
try {
write.write(datum, encoder);
encoder.flush();
} catch (IOException e) {
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
}
}
}
try {
returnstr = out.toByteArray();
} catch (Exception e) {
}
return returnstr;
}
/**
* 序列化对象
*/
public synchronized byte[] serializing(GenericRecord datum) {
byte[] returnstr = null;
ByteArrayOutputStream out = new ByteArrayOutputStream();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
DatumWriter<GenericRecord> write = new GenericDatumWriter<GenericRecord>(this.schema);
// 然后由Encoder写到数据流。
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
try {
write.write(datum, encoder);
encoder.flush();
} catch (IOException e) {
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
}
}
}
try {
returnstr = out.toByteArray();
} catch (Exception e) {
}
// GenericRecord s = AvroDeserializerFactory.getTopicmetadataDeserializer().deserializing(returnstr);
return returnstr;
}
/**
* 序列化对象
*/
public synchronized byte[] serializingLog(String logTypeName, String timestamp, String source, String offset, Map<String, String> dimensions, Map<String, Double> metrics,
Map<String, String> normalFields) {
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
datum.put(0, logTypeName);
datum.put(1, timestamp);
datum.put(2, source);
datum.put(3, offset);
datum.put(4, dimensions);
datum.put(5, metrics);
datum.put(6, normalFields);
return serializing(datum);
}
/**
* 序列化对象
*/
public synchronized byte[] serializingMetric(String metricSetName, String timestamp, Map<String, String> dimensions, Map<String, Double> metrics) {
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
datum.put(0, metricSetName);
datum.put(1, timestamp);
datum.put(2, dimensions);
datum.put(3, metrics);
return serializing(datum);
}
private synchronized byte[] serializing(GenericRecord genericRecord, String key[]) {
byte[] returnstr = null;
GenericRecord datum = new GenericData.Record(this.schema);
// 将数据加到datum中
for (int i = 0; i < filedsArrayList.size(); i++) {
datum.put(filedsArrayList.get(i), new Utf8(String.valueOf(genericRecord.get(key[i]))));
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
// DatumWriter 将数据对象翻译成Encoder对象可以理解的类型
DatumWriter<GenericRecord> write = new GenericDatumWriter<GenericRecord>(this.schema);
// 然后由Encoder写到数据流。
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
try {
write.write(datum, encoder);
encoder.flush();
} catch (IOException e) {
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
}
}
}
try {
returnstr = out.toByteArray();
} catch (Exception e) {
}
return returnstr;
}
}
package com.zorkdata.tools.kafka;
/**
* @author zhuzhigang
* @since 1.0
*/
public class AvroSerializerFactory {
private static AvroSerializer metricMetadata = null;
private static AvroSerializer logMetadata = null;
public static AvroSerializer getLogAvorSerializer() {
if (logMetadata == null) {
logMetadata = new AvroSerializer(LogAvroMacroDef.metadata);
}
return logMetadata;
}
public static AvroSerializer getMetricAvorSerializer() {
if (metricMetadata == null) {
metricMetadata = new AvroSerializer(AvroMacroDef.metadata);
}
return metricMetadata;
}
}
package com.zorkdata.tools.kafka;
/**
* @author zhuzhigang
* @since 1.0
*/
public class LogAvroMacroDef {
public static String metadata = "{\n" +
" \"namespace\": \"com.zork.logs\",\n" +
" \"type\": \"record\",\n" +
" \"name\": \"logs\",\n" +
" \"fields\": [\n" +
" {\n" +
" \"name\": \"logTypeName\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"timestamp\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"source\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"offset\",\n" +
" \"type\": [\n" +
" \"string\",\n" +
" \"null\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"dimensions\",\n" +
" \"type\": [\n" +
" \"null\",\n" +
" {\n" +
" \"type\": \"map\",\n" +
" \"values\": \"string\"\n" +
" }\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"measures\",\n" +
" \"type\": [\n" +
" \"null\",\n" +
" {\n" +
" \"type\": \"map\",\n" +
" \"values\": \"double\"\n" +
" }\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"normalFields\",\n" +
" \"type\": [\n" +
" \"null\",\n" +
" {\n" +
" \"type\": \"map\",\n" +
" \"values\": \"string\"\n" +
" }\n" +
" ]\n" +
" }\n" +
" ]\n" +
"}";
}
package com.zorkdata.tools.kafka;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import javax.security.auth.login.Configuration;
import java.util.*;
public class Producer {
static String servers = "kafka-data1:9092,kafka-data2:9092,kafka-data2:9092";
static int batchsize = 1;
static Producer testProducer;
static String metricTopic;
static String errorLogTopic;
static String errorMetricTopic;
static String alarmTopic;
static String defaultLogTopic;
static List<String> cep_change_event_logtypename = new ArrayList<String>();
static String cep_change_event_topic;
public static Map<String, String> APPSYSTEM_TOPIC_MAP = new HashMap<String, String>();
public static Map<String, Map<String, String>> APPSYSTEM_SERVICE_TOPIC_MAP = new HashMap<String, Map<String, String>>();
static int partition = 6;
private static KafkaProducer<String, byte[]> producer;
private static KafkaProducer<String, String> noAvroProducer;
public Producer() {
try {
initConfig();
Properties props = new Properties();
props.put("bootstrap.servers", servers);
props.put("client.id", "webAPI4LogGather");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
props.put("batch.size", batchsize);
producer = new KafkaProducer<String, byte[]>(props);
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
noAvroProducer = new KafkaProducer<String, String>(props);
} catch (Exception ex) {
ex.printStackTrace();
}
}
public void initConfig() throws Exception {
servers = "kafka01:9092,kafka02:9092,kafka03:9092";
batchsize = 100000;
}
public void sendLog(String topic, String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) {
try {
byte[] bytes = AvroSerializerFactory.getLogAvorSerializer().serializingLog(logTypeName, timestamp, source,
offset, dimensions, metrics, normalFields);
producer.send(new ProducerRecord<String, byte[]>(topic, "", bytes));
} catch (Exception e) {
e.printStackTrace();
}
}
public void sendErrorLog(String logJson) {
try {
noAvroProducer.send(new ProducerRecord<String, String>(errorLogTopic, null, logJson));
} catch (Exception e) {
e.printStackTrace();
}
}
public void sendErrorMetric(String logJson) {
try {
noAvroProducer.send(new ProducerRecord<String, String>(errorMetricTopic, null, logJson));
} catch (Exception e) {
e.printStackTrace();
}
}
public void sendAlarm(String alarmJson) {
try {
noAvroProducer.send(new ProducerRecord<String, String>(alarmTopic, null, alarmJson));
} catch (Exception e) {
e.printStackTrace();
}
}
public void sendMetric(String metricSetName, String timestamp, Map<String, String> dimensions,
Map<String, Double> metrics) {
try {
byte[] bytes = AvroSerializerFactory.getMetricAvorSerializer().serializingMetric(metricSetName, timestamp,
dimensions, metrics);
producer.send(new ProducerRecord<String, byte[]>(metricTopic, "", bytes));
} catch (Exception e) {
e.printStackTrace();
}
}
}
package com.zorkdata.tools.kafka;
import java.io.Closeable;
import java.io.IOException;
/**
* ClassName: ProducerPool
* Email: zhuzhigang@zork.com.cn
* Date: 2018\12\13 0013
*
* @author: zhuzhigang
**/
public class ProducerPool implements Closeable {
private Producer[] pool;
private int threadNum = 30;
// 轮循id
private int index = 0;
private static ProducerPool _interance = null;
public static ProducerPool getInstance() {
if (_interance == null) {
_interance = new ProducerPool();
}
return ProducerPool._interance;
}
private ProducerPool() {
init();
}
public void init() {
pool = new Producer[threadNum];
for (int i = 0; i < threadNum; i++) {
pool[i] = new Producer();
}
}
public Producer getProducer() {
if (index > 65535) {
index = 0;
}
return pool[index++ % threadNum];
}
/**
* Closes this stream and releases any system resources associated
* with it. If the stream is already closed then invoking this
* method has no effect.
*
* <p> As noted in {@link AutoCloseable#close()}, cases where the
* close may fail require careful attention. It is strongly advised
* to relinquish the underlying resources and to internally
* <em>mark</em> the {@code Closeable} as closed, prior to throwing
* the {@code IOException}.
*
* @throws IOException if an I/O error occurs
*/
@Override
public void close() throws IOException {
}
}
\ No newline at end of file
package com.zorkdata.tools.mock;
/**
* @author: LiaoMingtao
* @date: 2020/6/22
*/
public class BuildJarTest {
public static void main(String[] args) {
System.out.println("buildJarTest--start");
System.out.println("buildJarTest--end");
}
}
package com.zorkdata.tools.mock;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import lombok.Data;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
@Data
public class KafkaProducerUtil {
private Logger log = LoggerFactory.getLogger(KafkaProducerUtil.class);
/**
* kafka相关代码
*/
private String kafkaBootstrapServers;
private Integer kafkaBatchSize;
/**
* kafka的SASL的验证
*/
private boolean kafkaSaslFlag;
private String kafkaSaslAppkey;
private String kafkaSaslSecretkey;
private String kafkaSaslMechanism;
/**
* kafka的kerberos验证
*/
private boolean kafkaKerberosFlag;
private String kafkaKerberosKrb5Conf;
private String kafkaKerberosJaasConf;
private String kafkaSecurityProtocol;
private String kafkaSaslKerberosServiceName;
private static KafkaProducer<String, byte[]> producer;
private static KafkaProducer<String, String> noAvroProducer;
public KafkaProducerUtil(String kafkaBootstrapServers, Integer kafkaBatchSize){
this.kafkaBootstrapServers = kafkaBootstrapServers;
this.kafkaBatchSize = kafkaBatchSize;
this.kafkaSaslFlag = false;
this.kafkaKerberosFlag = false;
createKafkaClient();
}
public KafkaProducerUtil(String kafkaBootstrapServers, Integer kafkaBatchSize, boolean kafkaSaslFlag, String kafkaSecurityProtocol,
String kafkaSaslMechanism, String kafkaSaslAppkey, String kafkaSaslSecretkey){
this.kafkaBootstrapServers = kafkaBootstrapServers;
this.kafkaBatchSize = kafkaBatchSize;
this.kafkaSaslFlag = kafkaSaslFlag;
this.kafkaKerberosFlag = false;
this.kafkaSecurityProtocol= kafkaSecurityProtocol;
this.kafkaSaslMechanism = kafkaSaslMechanism;
this.kafkaSaslAppkey = kafkaSaslAppkey;
this.kafkaSaslSecretkey = kafkaSaslSecretkey;
createKafkaClient();
}
public KafkaProducerUtil(String kafkaBootstrapServers, Integer kafkaBatchSize, boolean kafkaKerberosFlag, String kafkaKerberosKrb5Conf,
String kafkaKerberosJaasConf, String kafkaSecurityProtocol, String kafkaSaslKerberosServiceName, String kafkaSaslMechanism){
this.kafkaBootstrapServers = kafkaBootstrapServers;
this.kafkaBatchSize = kafkaBatchSize;
this.kafkaSaslFlag = false;
this.kafkaKerberosFlag = kafkaKerberosFlag;
this.kafkaSecurityProtocol= kafkaSecurityProtocol;
this.kafkaSaslMechanism = kafkaSaslMechanism;
this.kafkaKerberosKrb5Conf = kafkaKerberosKrb5Conf;
this.kafkaKerberosJaasConf = kafkaKerberosJaasConf;
this.kafkaSaslKerberosServiceName = kafkaSaslKerberosServiceName;
createKafkaClient();
}
public KafkaProducerUtil(String kafkaBootstrapServers, Integer kafkaBatchSize,boolean kafkaSaslFlag, boolean kafkaKerberosFlag, String kafkaKerberosKrb5Conf,
String kafkaKerberosJaasConf, String kafkaSecurityProtocol, String kafkaSaslKerberosServiceName,
String kafkaSaslMechanism, String kafkaSaslAppkey, String kafkaSaslSecretkey){
this.kafkaBootstrapServers = kafkaBootstrapServers;
this.kafkaBatchSize = kafkaBatchSize;
this.kafkaSaslFlag = kafkaSaslFlag;
this.kafkaKerberosFlag = kafkaKerberosFlag;
this.kafkaSecurityProtocol= kafkaSecurityProtocol;
this.kafkaSaslMechanism = kafkaSaslMechanism;
this.kafkaKerberosKrb5Conf = kafkaKerberosKrb5Conf;
this.kafkaKerberosJaasConf = kafkaKerberosJaasConf;
this.kafkaSaslKerberosServiceName = kafkaSaslKerberosServiceName;
this.kafkaSaslAppkey = kafkaSaslAppkey;
this.kafkaSaslSecretkey = kafkaSaslSecretkey;
createKafkaClient();
}
public void createKafkaClient() {
try {
Properties props = new Properties();
props.put("bootstrap.servers", kafkaBootstrapServers);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
props.put("batch.size", kafkaBatchSize);
if (kafkaSaslFlag) {
props.put("security.protocol", kafkaSecurityProtocol);
props.put("sasl.mechanism", kafkaSaslMechanism);
//Configuration.setConfiguration(new SaslConfig(kafkaSaslAppkey, kafkaSaslSecretkey));
}
if (kafkaKerberosFlag) {
System.setProperty("java.security.krb5.conf", kafkaKerberosKrb5Conf);
System.setProperty("java.security.auth.login.config", kafkaKerberosJaasConf);
props.put("security.protocol", kafkaSecurityProtocol);
props.put("sasl.kerberos.service.name", kafkaSaslKerberosServiceName);
props.put("sasl.mechanism", kafkaSaslMechanism);
}
producer = new KafkaProducer<String, byte[]>(props);
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
noAvroProducer = new KafkaProducer<String, String>(props);
} catch (Exception ex) {
ex.printStackTrace();
log.error("初始化Kafka失败,系统自动退出! ", ex);
System.exit(1);
}
}
public void sendAlarm(String topic, String alarmJson) {
try {
noAvroProducer.send(new ProducerRecord<String, String>(topic, null, alarmJson));
} catch (Exception e) {
log.error("sendAlarm-插入Kafka失败", e);
}
}
public void sendMetric(String metricSetName, String timestamp, Map<String, String> dimensions,
Map<String, Double> metrics, String topic) {
try {
byte[] bytes = AvroSerializerFactory.getMetricAvroSerializer().serializingMetric(metricSetName, timestamp,
dimensions, metrics);
producer.send(new ProducerRecord<String, byte[]>(topic, null, bytes));
} catch (Exception e) {
log.error("sendMetric-插入Kafka失败", e);
}
}
public void sendLog(String topic, String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) {
try {
byte[] bytes = AvroSerializerFactory.getLogAvroSerializer().serializingLog(logTypeName, timestamp, source,
offset, dimensions, metrics, normalFields);
producer.send(new ProducerRecord<String, byte[]>(topic, null, bytes));
} catch (Exception e) {
log.error("sendLog-插入Kafka失败", e);
}
}
}
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.zorkdata.tools.kafka.Producer;
import com.zorkdata.tools.kafka.ProducerPool;
import com.zorkdata.tools.utils.StringUtil;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
public class LogAvroProducer {
public static final String MEASURES = "measures";
public static final String DIMENSIONS = "dimensions";
public static final String TIMESTAMP = "timestamp";
public static final String LOGTYPENAME = "logTypeName";
public static final String NORMALFIELDS = "normalFields";
public static final String SOURCE = "source";
public static final String OFFSET = "offset";
public static final String APPSYSTEM = "appsystem";
public static final String NULLSTR = "";
public static void main(String[] args) {
int total = 10 * 1000 * 1000 * 100;
int i = 0;
Long begin = System.currentTimeMillis();
while (i < total) {
Date d = new Date();
JSONObject logType = new JSONObject();
logType.put(LOGTYPENAME, "streamx_log_avro");
logType.put(TIMESTAMP, d.getTime());
logType.put(SOURCE, "0");
logType.put(OFFSET, "0");
JSONObject dimensions = new JSONObject();
dimensions.put("hostname", "zorkdata" + i);
dimensions.put("appprogramname", "zorkdata" + i);
dimensions.put("ip", "192.168.1.1");
dimensions.put("appsystem", "zorkdata" + i);
JSONObject normalFields = new JSONObject();
normalFields.put("countryCode", "SZ" + i);
normalFields.put("message", "ABCD" + i);
JSONObject measures = new JSONObject();
logType.put(DIMENSIONS, dimensions);
logType.put(NORMALFIELDS, normalFields);
logType.put(MEASURES, measures);
sendWhileData("bigdata1", logType);
i++;
}
Long end = System.currentTimeMillis();
System.out.println("总耗时:" + (end - begin));
}
public static String sendWhileData(String topic, JSONObject log) {
try {
String logTypeName = log.getString(LOGTYPENAME);
String _timestamp = log.getString(TIMESTAMP);
String timestamp = StringUtil.getISOTime(_timestamp);
String source = log.getString(SOURCE);
String offset = log.getString(OFFSET);
if (source == null) {
source = NULLSTR;
}
if (offset == null) {
offset = NULLSTR;
}
JSONObject dimensions = log.getJSONObject(DIMENSIONS);
JSONObject normalFields = log.getJSONObject(NORMALFIELDS);
JSONObject measures = log.getJSONObject(MEASURES);
String appSystem = dimensions.getString(APPSYSTEM);
Map<String, String> dimensionsMap = dimensions != null ? JSONObject.parseObject(dimensions.toJSONString(), new TypeReference<Map<String, String>>() {
}) : new HashMap<>();
Map<String, String> normalFieldsMap = normalFields != null ? JSONObject.parseObject(normalFields.toJSONString(), new TypeReference<Map<String, String>>() {
}) : new HashMap<>();
Map<String, Double> measuresMap = measures != null ? JSONObject.parseObject(measures.toJSONString(), new TypeReference<Map<String, Double>>() {
}) : new HashMap<>();
Producer producer = ProducerPool.getInstance().getProducer();
producer.sendLog(topic, logTypeName, timestamp, source, offset, dimensionsMap,
measuresMap, normalFieldsMap);
// System.out.println("white list send [" + logTypeName + "] success");
return "ok";
} catch (Exception e) {
System.out.println("Please define the log set. The data you sent through the whitelist is in an incorrect format");
return "faild";
}
}
}
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringSerializer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.Future;
/**
* @author DeleMing
*/
public class MockConnectJsonData {
private static String topic;
private static String brokerlist;
private static KafkaProducer<String, String> producer;
private static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerlist);
props.put("acks", "-1");
props.put("retries", 1);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", StringSerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<>(props);
}
/**
* 获取当前采集时间
*
* @return String
*/
private static String getLogTime() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
return sdf.format(new Date());
}
private static String getCollectTime() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
return sdf.format(new Date());
}
private static String buildMsg() {
JSONObject jsonObject = new JSONObject();
jsonObject.put("logtypename", "tdx_filebeat");
jsonObject.put("hostname", "kafkaproducer-connect-2");
jsonObject.put("appprogram", "tdx");
jsonObject.put("offset", String.valueOf(System.currentTimeMillis()));
jsonObject.put("message", "10:06:41.335 功能请求 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*)\\n66650109|************|XshR9/S5SDE=|8|0||12|7.37.0||||||||||0||0|182.140.129.3;PENGKANG;Administrator;83025;Intel(R)Core(TM)i7-4510UCPU@2.00GHz*4;bfebfbff00040651-GenuineIntel;Windows7 Service Pack 1 (Build 7601);182.140.129.3,0.0.0.0,0.0.0.0;F8A963586DFF,00FF8C535532,A0A8CD0D00B0;TF655AWJ16NG2L,143116404707;07/15/2014;8DC03929-0822-453C-A2D5-EFBE95E359BE;182.140.129.3;;NTFS;0C17-8FD7;C:;113G;HTS725050A7E630;GH2Z;TF655AWJ16NG2L;|||||2,Mar 1 2018,10:22:32|0|||GETLOGINPARAM||7.37,6.01,Mar 1 2018,10:37:07|8噝\\\\5\\\\3||||\\n10:06:41.491 调用失败 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*) 耗时A:156 耗时B:0 排队:0\\n-4|资金账号或密码错误!|0|||\\n10:06:52.678 系统信息 开始关闭交易中心服务。\\n10:06:53.303 系统信息 (HS_TCP2.dll)连接守护线程退出!\\n10:06:53.335 系统信息 (HS_TCP2.dll)\\\"刷新约定购回标的证券信息\\\"线程成功退出!(记录总条数:3536)\\n10:06:54.413 系统信息 港股行情服务: 保存代码表(港股)缓存。\\n10:06:54.678 系统信息 深沪行情服务: 保存代码表缓存。\\n10:06:54.960 系统信息 交易中心服务已经成功关闭。\\n10:06:54.960 系统信息 系统正常关闭\\n");
jsonObject.put("logdate", getLogTime());
jsonObject.put("source", "/opt/log_TDX/20180320.log");
jsonObject.put("collecttime", getCollectTime());
jsonObject.put("appsystem", "tdx");
jsonObject.put("logtimeflag", "true");
System.out.println(jsonObject.toJSONString());
return jsonObject.toString();
}
private static void send(String message) {
ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topic, null, message);
producer.send(producerRecord);
}
public static void main(String[] args) {
topic = "test";
brokerlist = "kafka01:9092,kafka02:9092,kafka03:9092";
init();
for (int i = 0; i <= 10000; i++) {
String message = "" + i;
send(message);
}
}
}
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import static java.lang.System.*;
/**
* @author DeleMing
*/
public class MockFilebeatDataToKafka {
private static String topic;
private static String brokerlist = "kafka01:9092,kafka02:9092,kafka03:9092";
private static KafkaProducer<String, String> producer;
private static String APP_SYSTEM = "TEST_JTY";
private static String CLUSTER_NAME = "TEST_CLUSTER";
private static String SERVICE_CODE = "TEST_SERVICE";
private static String APP_PROGRAM_NAME = "TEST_SERVICE";
private static String IP = "192.168.1.";
private static String HOSTNAME = "zorkdata-";
private static String MESSAGE = "10:06:41.335 功能请求 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*)\\n66650109|************|XshR9/S5SDE=|8|0||12|7.37.0||||||||||0||0|182.140.129.3;PENGKANG;Administrator;83025;Intel(R)Core(TM)i7-4510UCPU@2.00GHz*4;bfebfbff00040651-GenuineIntel;Windows7 Service Pack 1 (Build 7601);182.140.129.3,0.0.0.0,0.0.0.0;F8A963586DFF,00FF8C535532,A0A8CD0D00B0;TF655AWJ16NG2L,143116404707;07/15/2014;8DC03929-0822-453C-A2D5-EFBE95E359BE;182.140.129.3;;NTFS;0C17-8FD7;C:;113G;HTS725050A7E630;GH2Z;TF655AWJ16NG2L;|||||2,Mar 1 2018,10:22:32|0|||GETLOGINPARAM||7.37,6.01,Mar 1 2018,10:37:07|8噝\\\\5\\\\3||||\\n10:06:41.491 调用失败 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*) 耗时A:156 耗时B:0 排队:0\\n-4|资金账号或密码错误!|0|||\\n10:06:52.678 系统信息 开始关闭交易中心服务。\\n10:06:53.303 系统信息 (HS_TCP2.dll)连接守护线程退出!\\n10:06:53.335 系统信息 (HS_TCP2.dll)\\\"刷新约定购回标的证券信息\\\"线程成功退出!(记录总条数:3536)\\n10:06:54.413 系统信息 港股行情服务: 保存代码表(港股)缓存。\\n10:06:54.678 系统信息 深沪行情服务: 保存代码表缓存。\\n10:06:54.960 系统信息 交易中心服务已经成功关闭。\\n10:06:54.960 系统信息 系统正常关闭\\n";
private static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerlist);
props.put("acks", "-1");
props.put("retries", 1);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", StringSerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<>(props);
}
public static String[] mockAppSystem(int length) {
String[] strArr = new String[length];
final String appSystem = "JTY";
for (int i = 0; i < length; i++) {
strArr[i] = appSystem + (i + 1);
}
return strArr;
}
public static String[] mockStrArr(String str, int length) {
String[] strArr = new String[length];
for (int i = 0; i < length; i++) {
strArr[i] = str + (i + 1);
}
return strArr;
}
/**
* 获取当前采集时间
*
* @return String
*/
private static String getLogTime() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
return sdf.format(new Date());
}
private static String getCollectTime() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
return sdf.format(new Date());
}
private static String buildMsg(String appSystem, String appProgramName, String ip, String clusterName, String serviceCode) {
JSONObject jsonObject = buildBaseMsg();
jsonObject.put("appsystem", appSystem);
jsonObject.put("appprogramname", appProgramName);
jsonObject.put("clustername", clusterName);
jsonObject.put("servicecode", serviceCode);
StringBuilder str = new StringBuilder(MESSAGE);
jsonObject.put("message", str.append(appSystem).append(appProgramName).append(clusterName).append(ip));
return jsonObject.toJSONString();
}
private static JSONObject buildBaseMsg() {
JSONObject filebeatJson = new JSONObject();
JSONObject metadataJson = new JSONObject();
metadataJson.put("beat", "filebeat");
metadataJson.put("type", "doc");
metadataJson.put("version", "6.8.1");
JSONObject inputJson = new JSONObject();
inputJson.put("type", "log");
JSONObject beatJson = new JSONObject();
// beatJson.put("name", "zorkdata-151");
beatJson.put("hostname", "zorkdata-151");
beatJson.put("version", "6.8.1");
JSONObject hostJson = new JSONObject();
hostJson.put("name", "zorkdata-151");
hostJson.put("architecture", "x86_64");
hostJson.put("id", "8e3dfc85999b4e02bae4adf4b92b909a");
hostJson.put("containerized", "false");
JSONObject logJson = new JSONObject();
logJson.put("file", "{ \"path\": \"/var/log/nginx/access.log\" }");
filebeatJson.put("@timestamp", "2020-06-19T01:29:44.181Z");
filebeatJson.put("source", "/var/log/nginx/access.log");
filebeatJson.put("offset", String.valueOf(currentTimeMillis()));
filebeatJson.put("message", "10:06:41.335 功能请求 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*)\\n66650109|************|XshR9/S5SDE=|8|0||12|7.37.0||||||||||0||0|182.140.129.3;PENGKANG;Administrator;83025;Intel(R)Core(TM)i7-4510UCPU@2.00GHz*4;bfebfbff00040651-GenuineIntel;Windows7 Service Pack 1 (Build 7601);182.140.129.3,0.0.0.0,0.0.0.0;F8A963586DFF,00FF8C535532,A0A8CD0D00B0;TF655AWJ16NG2L,143116404707;07/15/2014;8DC03929-0822-453C-A2D5-EFBE95E359BE;182.140.129.3;;NTFS;0C17-8FD7;C:;113G;HTS725050A7E630;GH2Z;TF655AWJ16NG2L;|||||2,Mar 1 2018,10:22:32|0|||GETLOGINPARAM||7.37,6.01,Mar 1 2018,10:37:07|8噝\\\\\\\\5\\\\\\\\3||||\\\\n10:06:41.491 调用失败 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*) 耗时A:156 耗时B:0 排队:0\\\\n-4|资金账号或密码错误!|0|||\\\\n10:06:52.678 系统信息 开始关闭交易中心服务。\\\\n10:06:53.303 系统信息 (HS_TCP2.dll)连接守护线程退出!\\\\n10:06:53.335 系统信息 (HS_TCP2.dll)\\\\\\\"刷新约定购回标的证券信息\\\\\\\"线程成功退出!(记录总条数:3536)\\\\n10:06:54.413 系统信息 港股行情服务: 保存代码表(港股)缓存。\\\\n10:06:54.678 系统信息 深沪行情服务: 保存代码表缓存。\\\\n10:06:54.960 系统信息 交易中心服务已经成功关闭。\\\\n10:06:54.960 系统信息 系统正常关闭\\\\n");
filebeatJson.put("appsystem", "test_appsystem");
filebeatJson.put("appprogramname", "test_appprogramname");
filebeatJson.put("clustername", "test_clustername");
filebeatJson.put("logTypeName", "test_topic_log");
filebeatJson.put("servicename", "test_servicename");
filebeatJson.put("servicecode", "test_cdde");
filebeatJson.put("collectorruleid", "1");
filebeatJson.put("@metadata", metadataJson);
filebeatJson.put("input", inputJson);
filebeatJson.put("beat", beatJson);
filebeatJson.put("host", hostJson);
filebeatJson.put("log", logJson);
filebeatJson.put("prospector", inputJson);
return filebeatJson;
}
private static String buildMsg() {
JSONObject filebeatJson = new JSONObject();
JSONObject metadataJson = new JSONObject();
metadataJson.put("beat", "filebeat");
metadataJson.put("type", "doc");
metadataJson.put("version", "6.8.1");
JSONObject inputJson = new JSONObject();
inputJson.put("type", "log");
JSONObject beatJson = new JSONObject();
beatJson.put("name", "zorkdata-151");
beatJson.put("hostname", "zorkdata-151");
beatJson.put("version", "6.8.1");
JSONObject hostJson = new JSONObject();
hostJson.put("name", "zorkdata-151");
hostJson.put("architecture", "x86_64");
hostJson.put("id", "8e3dfc85999b4e02bae4adf4b92b909a");
hostJson.put("containerized", "false");
JSONObject logJson = new JSONObject();
logJson.put("file", "{ \"path\": \"/var/log/nginx/access.log\" }");
filebeatJson.put("@timestamp", "2020-06-19T01:29:44.181Z");
filebeatJson.put("source", "/var/log/nginx/access.log");
filebeatJson.put("offset", String.valueOf(currentTimeMillis()));
filebeatJson.put("message", "10:06:41.335 功能请求 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*)\\n66650109|************|XshR9/S5SDE=|8|0||12|7.37.0||||||||||0||0|182.140.129.3;PENGKANG;Administrator;83025;Intel(R)Core(TM)i7-4510UCPU@2.00GHz*4;bfebfbff00040651-GenuineIntel;Windows7 Service Pack 1 (Build 7601);182.140.129.3,0.0.0.0,0.0.0.0;F8A963586DFF,00FF8C535532,A0A8CD0D00B0;TF655AWJ16NG2L,143116404707;07/15/2014;8DC03929-0822-453C-A2D5-EFBE95E359BE;182.140.129.3;;NTFS;0C17-8FD7;C:;113G;HTS725050A7E630;GH2Z;TF655AWJ16NG2L;|||||2,Mar 1 2018,10:22:32|0|||GETLOGINPARAM||7.37,6.01,Mar 1 2018,10:37:07|8噝\\\\\\\\5\\\\\\\\3||||\\\\n10:06:41.491 调用失败 IP:182.140.129.3 MAC:F8A963586DFF 线程:00004364 通道ID:4 事务ID:16 请求:(0-98)集成客户校验(*) 营业部:(0001)国金证券集中交易(*) 耗时A:156 耗时B:0 排队:0\\\\n-4|资金账号或密码错误!|0|||\\\\n10:06:52.678 系统信息 开始关闭交易中心服务。\\\\n10:06:53.303 系统信息 (HS_TCP2.dll)连接守护线程退出!\\\\n10:06:53.335 系统信息 (HS_TCP2.dll)\\\\\\\"刷新约定购回标的证券信息\\\\\\\"线程成功退出!(记录总条数:3536)\\\\n10:06:54.413 系统信息 港股行情服务: 保存代码表(港股)缓存。\\\\n10:06:54.678 系统信息 深沪行情服务: 保存代码表缓存。\\\\n10:06:54.960 系统信息 交易中心服务已经成功关闭。\\\\n10:06:54.960 系统信息 系统正常关闭\\\\n");
filebeatJson.put("appsystem", "test_appsystem");
filebeatJson.put("appprogramname", "test_appprogramname");
filebeatJson.put("logTypeName", "test_topic_log");
filebeatJson.put("servicename", "test_servicename");
filebeatJson.put("servicecode", "test_cdde");
filebeatJson.put("collectorruleid", "1");
filebeatJson.put("@metadata", metadataJson);
filebeatJson.put("input", inputJson);
filebeatJson.put("beat", beatJson);
filebeatJson.put("host", hostJson);
filebeatJson.put("log", logJson);
filebeatJson.put("prospector", inputJson);
return filebeatJson.toJSONString();
}
private static void send(String message) {
ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topic, null, message);
try {
producer.send(producerRecord).get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
public static void main(String[] args) throws IOException {
InputStreamReader is = new InputStreamReader(in);
BufferedReader br = new BufferedReader(is);
topic = input(br, 1, "请输入数据输入的topic名称(例如:topic100w)", "请输入topic名称:");
brokerlist = input(br, 2, "请输入kafka集群地址啊(例如:kafka01:9092,kafka02:9092,kafka03:9092),回车使用默认参数:", "已使用默认参数:" + brokerlist);
APP_SYSTEM = input(br, 3, "请输入appSystem,回车使用默认参数:", "已使用默认参数:" + APP_SYSTEM);
APP_PROGRAM_NAME = input(br, 4, "请输入appProgramName,回车使用默认参数:", "已使用默认参数:" + APP_PROGRAM_NAME);
CLUSTER_NAME = input(br, 5, "请输入clusterName,回车使用默认参数:", "已使用默认参数:" + CLUSTER_NAME);
SERVICE_CODE = input(br, 6, "请输入serviceName,回车使用默认参数:", "已使用默认参数:" + SERVICE_CODE);
IP = input(br, 7, "请输入ip,回车使用默认参数:", "已使用默认参数:" + IP);
int appSystemLength = Integer.parseInt(input(br, 8, "请输入模拟appsystem个数(例如:10)", "请输入大于零的数字"));
int appProgramNameLength = Integer.parseInt(input(br, 9, "请输入模拟appProgramName个数(例如:10)", "请输入大于零的数字"));
int clusterNameLength = Integer.parseInt(input(br, 10, "请输入模拟clusterName个数(例如:10)", "请输入大于零的数字"));
int serviceNameLength = Integer.parseInt(input(br, 11, "请输入模拟serviceName个数(例如:10)", "请输入大于零的数字"));
int ipLength = Integer.parseInt(input(br, 12, "请输入模拟ip个数(例如:10)", "请输入大于零的数字"));
init();
String[] appSystemArr = mockStrArr(APP_SYSTEM, appSystemLength);
String[] appProgramNameArr = mockStrArr(APP_PROGRAM_NAME, appSystemLength);
String[] clusterNameArr = mockStrArr(CLUSTER_NAME, appSystemLength);
String[] serviceNameArr = mockStrArr(SERVICE_CODE, appSystemLength);
String[] ipArr = mockStrArr(IP, appSystemLength);
int count = 0;
for (int a = 0; a < appSystemLength; a++) {
for (int b = 0; b < appProgramNameLength; b++) {
for (int c = 0; c < clusterNameLength; c++) {
for (int d = 0; d < serviceNameLength; d++) {
for (int e = 0; e < ipLength; e++) {
String message = buildMsg(appSystemArr[a], appProgramNameArr[b], ipArr[e], clusterNameArr[d], serviceNameArr[c]);
out.println(message);
out.println(++count);
send(message);
}
}
}
}
}
}
public static String input(BufferedReader br, Integer type, String explain, String errorExplain) throws IOException {
out.println(explain);
boolean success = false;
String inputString = br.readLine();
switch (type.intValue()) {
case 1:
if (null == inputString || "".equals(inputString)) {
out.println(errorExplain);
} else {
success = true;
}
break;
case 2:
if (null == inputString || "".equals(inputString)) {
out.println(errorExplain);
inputString = brokerlist;
}
success = true;
break;
case 3:
if (null == inputString || "".equals(inputString)) {
out.println(errorExplain);
inputString = APP_SYSTEM;
success = true;
}
break;
case 4:
if (null == inputString || "".equals(inputString)) {
out.println(errorExplain);
inputString = APP_PROGRAM_NAME;
success = true;
}
break;
case 5:
if (null == inputString || "".equals(inputString)) {
out.println(errorExplain);
inputString = CLUSTER_NAME;
success = true;
}
break;
case 6:
if (null == inputString || "".equals(inputString)) {
out.println(errorExplain);
inputString = SERVICE_CODE;
success = true;
}
break;
case 7:
if (null == inputString || "".equals(inputString)) {
out.println(errorExplain);
inputString = IP;
success = true;
}
break;
default:
if (null == inputString || "".equals(inputString)) {
out.println(errorExplain);
} else {
success = true;
}
break;
}
if (success) {
return inputString;
}
return input(br, type, explain, errorExplain);
}
}
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
/**
* @author DeleMing
*/
public class MockFlinkxJson {
// private static String topic = "flinkx_json";
private static String topic = "flinkx_json";
private static String brokerAddr = "zorkdata-95:9092";
private static ProducerRecord<String, String> producerRecord = null;
private static KafkaProducer<String, String> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", StringSerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, String>(props);
}
public static void send(String topic, String msg) throws ExecutionException, InterruptedException {
init();
producerRecord = new ProducerRecord<String, String>(
topic,
null,
msg
);
producer.send(producerRecord);
}
public static void main(String[] args) throws Exception {
for (int i = 0; i <= 100; i++) {
// {"user_id":"59","name":"xs-59","id":"59","content":"xd"}
JSONObject jsonObject = new JSONObject();
jsonObject.put("id", "" + i);
jsonObject.put("user_id", "" + i);
jsonObject.put("name", "jack" + i);
jsonObject.put("content", "xxxx");
String json = jsonObject.toJSONString();
send(topic, json);
}
}
}
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import com.zorkdata.tools.utils.DateUtil;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
/**
* @author DeleMing
*/
public class MockKafkaConnect {
// private static String topic = "test";
private static String topic = "zorkdata_log_test";
private static String brokerAddr = "zorkdata-95:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static byte[] buildKafkaConnect() {
String logTypeName = "tc50_biz_filebeat";
String timestamp = DateUtil.getUTCTimeStr();
String source = "/opt/20191231.log";
String offset = String.valueOf(6322587L);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "localhost");
dimensions.put("appprogramname", "tc50");
dimensions.put("appsystem", "TXJY");
Map<String, Double> measures = new HashMap<>();
measures.put("latence", 301.0);
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "成功处理");
AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvroSerializer();
byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
return bytes;
}
public static void send(String topic) throws ExecutionException, InterruptedException {
byte[] req = buildKafkaConnect();
send(topic, req);
}
public static void send(String topic, byte[] msg) throws ExecutionException, InterruptedException {
init();
producerRecord = new ProducerRecord<String, byte[]>(
topic,
null,
msg
);
producer.send(producerRecord);
}
public static void main(String[] args) throws Exception {
for (int i = 0; i <= 100; i++) {
String logTypeName = "tc50_biz_filebeat";
String timestamp = DateUtil.getUTCTimeStr();
String source = "/opt/20191231.log";
String offset = String.valueOf(6322587L);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "localhost");
dimensions.put("appprogramname", "tc50");
dimensions.put("appsystem", "TXJY");
Map<String, Double> measures = new HashMap<>();
measures.put("latence", 301.0);
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "成功处理");
normalFields.put("id", String.valueOf(i));
JSONObject jsonObject = new JSONObject();
jsonObject.put("logTypeName", logTypeName);
jsonObject.put("timestamp", timestamp);
jsonObject.put("source", source);
jsonObject.put("offset", offset);
jsonObject.put("dimensions", dimensions);
jsonObject.put("measures", measures);
jsonObject.put("normalFields", normalFields);
System.out.println(jsonObject.toJSONString());
AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvroSerializer();
byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
send(topic, bytes);
}
}
}
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.utils.*;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MockKafkaConnectAvro {
private static long getSize(String propertiesName) throws Exception {
Properties properties = PropertiesUtil.getProperties(propertiesName);
long logSize = StringUtil.getLong(properties.getProperty("log.size", "5000").trim(), 1);
return logSize;
}
public static String sum(String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("logTypeName", logTypeName);
jsonObject.put("timestamp", timestamp);
jsonObject.put("source", source);
jsonObject.put("offset", offset);
jsonObject.put("dimensions", dimensions);
jsonObject.put("metrics", metrics);
jsonObject.put("normalFields", normalFields);
return jsonObject.toString();
}
public static void main(String[] args) throws Exception {
long start = System.currentTimeMillis();
if (args.length == 0) {
System.out.println("请指定配置文件");
System.exit(-1);
}
String propertiesName = args[0];
long size = getSize(propertiesName);
for (int i = 0; i < size; i++) {
String logTypeName = "tc50_biz_filebeat";
String timestamp = DateUtil.getUTCTimeStr();
String source = "/opt/20191231.log";
String offset = String.valueOf(6322587L);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "localhost");
dimensions.put("appprogramname", "tc50");
dimensions.put("appsystem", "TXJY");
Map<String, Double> measures = new HashMap<>();
measures.put("latence", 301.0);
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "成功处理");
normalFields.put("id", String.valueOf(i));
System.out.println(sum(logTypeName, timestamp, source, offset, dimensions, measures, normalFields));
// System.out.println("--------------------- start ----------------------------");
// long l1 = System.currentTimeMillis();
CustomerProducer producer = ProducerPool.getInstance(propertiesName).getProducer();
// long l2 = System.currentTimeMillis();
// System.out.println("获取 producer 需要的时间: " + (l2 - l1) + "ms");
producer.sendLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
// long l3 = System.currentTimeMillis();
// System.out.println("发送数据执行的时间: " + (l3 - l2) + "ms");
// System.out.println("--------------------- end ----------------------------");
}
long end = System.currentTimeMillis();
Thread.sleep(5000);
System.out.println("写入 " + size + " 条数据,一共耗时 " + (end - start) + " ms");
}
}
package com.zorkdata.tools.mock;
import com.zorkdata.tools.utils.DateUtil;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MockKafkaConnectAvroTest {
public static void main(String[] args) throws Exception {
long start = System.currentTimeMillis();
int size = 1000;
String topic = "info";
String brokerAddr = "kafka-1:9092,kafka-2:9092,kafka-3:9092";
if (args.length == 3) {
size = Integer.valueOf(args[0]);
topic = args[1];
brokerAddr = args[2];
System.out.println("请输出 topic 以及 kafka 地址");
}
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
for (int i = 0; i <= size; i++) {
String logTypeName = "tc50_biz_filebeat";
String timestamp = DateUtil.getUTCTimeStr();
String source = "/opt/20191231.log";
String offset = String.valueOf(6322587L);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "localhost");
dimensions.put("appprogramname", "tc50");
dimensions.put("appsystem", "TXJY");
Map<String, Double> measures = new HashMap<>();
measures.put("latence", 301.0);
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "成功处理");
normalFields.put("id", String.valueOf(i));
AvroSerializer avroSerializer = AvroSerializerFactory.getLogAvroSerializer();
byte[] bytes = avroSerializer.serializingLog(logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
ProducerRecord<String, byte[]> producerRecord = new ProducerRecord<String, byte[]>(
topic,
null,
bytes
);
KafkaProducer<String, byte[]> producer = new KafkaProducer<String, byte[]>(props);
producer.send(producerRecord);
producer.close();
long end = System.currentTimeMillis();
System.out.println("写入 " + size + " 条数据,一共耗时 " + (end - start) + " ms");
}
}
}
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.utils.CustomerProducer;
import com.zorkdata.tools.utils.ProducerPool;
import com.zorkdata.tools.utils.PropertiesUtil;
import com.zorkdata.tools.utils.StringUtil;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MockKafkaConnectJson {
private static long getSize(String propertiesName) throws Exception {
Properties properties = PropertiesUtil.getProperties(propertiesName);
long logSize = StringUtil.getLong(properties.getProperty("log.size", "5000").trim(), 1);
return logSize;
}
private static int dataSize() {
JSONObject jsonObject = new JSONObject();
String className = "MockKafkaConnectJson";
String message = "Apache Flink is an open source platform for distributed stream and batch data processing. Flink’s core is a streaming dataflow engine that provides data distribution, communication, and fault tolerance for distributed computations over data streams. Flink builds batch processing on top of the streaming engine, overlaying native iteration support, managed memory, and program optimization. This documentation is for Apache Flink version 1.10. These p";
jsonObject.put("className", className);
jsonObject.put("message", message);
int length = jsonObject.toJSONString().length();
System.out.println(length);
System.out.println(length);
return length;
}
public static void main(String[] args) throws Exception {
long start = System.currentTimeMillis();
if (args.length == 0) {
System.out.println("请指定配置文件");
System.exit(-1);
}
String propertiesName = args[0];
long size = getSize(propertiesName);
for (int i = 0; i <= size; i++) {
JSONObject jsonObject = new JSONObject();
String className = "MockKafkaConnectJson";
String message = "Apache Flink is an open source platform for distributed stream and batch data processing. Flink’s core is a streaming dataflow engine that provides data distribution, communication, and fault tolerance for distributed computations over data streams. Flink builds batch processing on top of the streaming engine, overlaying native iteration support, managed memory, and program optimization. This documentation is for Apache Flink version 1.10. These p";
jsonObject.put("className", className);
jsonObject.put("message", message);
int length = jsonObject.toJSONString().length();
System.out.println(length);
CustomerProducer producer = ProducerPool.getInstance(propertiesName).getProducer();
producer.sendJsonLog(jsonObject.toJSONString());
}
long end = System.currentTimeMillis();
System.out.println("写入 " + size + " 条数据,一共耗时 " + (end - start) + " ms");
}
}
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSON;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import com.zorkdata.tools.pojo.MetricEvent;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
/**
* @author DeleMing
*/
public class MockMetricEvent {
private static String topic = "flink-metric";
private static String brokerAddr = "zorkdata-91:9092";
private static ProducerRecord<String, String> producerRecord = null;
private static KafkaProducer<String, String> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", StringSerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer(props);
}
public static String buildMetricEvent() {
String name = "metric";
Long timestamp = System.currentTimeMillis();
Map<String, Object> fields = new HashMap<>();
fields.put("cpu_used", 0.6);
fields.put("disk_used", 0.4);
Map<String, String> tags = new HashMap<>();
tags.put("hostname", "localhost");
MetricEvent metricEvent = new MetricEvent(name, timestamp, fields, tags);
return JSON.toJSONString(metricEvent);
}
public static void send() throws ExecutionException, InterruptedException {
init();
String req = buildMetricEvent();
System.out.println(req);
producerRecord = new ProducerRecord<String, String>(
topic,
null,
req
);
producer.send(producerRecord).get();
}
public static void main(String[] args) throws ExecutionException, InterruptedException {
for (int i = 0; i < 300; i++) {
send();
}
}
}
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Date;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MockStreamxJson {
private static String topic = "streamx_json_test";
private static String brokerAddr = "zorkdata-95:9092";
private static ProducerRecord<String, String> producerRecord = null;
private static KafkaProducer<String, String> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", StringSerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, String>(props);
}
public static String buildMsg() {
JSONObject jsonObject = new JSONObject();
// 2020-03-08T12:35:02.659 [main] DEBUG org.apache.flink.streaming.api.graph.StreamGraphGenerator
jsonObject.put("className", "org.apache.flink.streaming.api.graph.StreamGraphGenerator");
jsonObject.put("methodName", "main");
jsonObject.put("datetime", new Date().toString());
return jsonObject.toString();
}
public static void send(String topic) {
init();
String req = buildMsg();
producerRecord = new ProducerRecord<String, String>(
topic,
null,
req
);
producer.send(producerRecord);
}
public static void main(String[] args) {
for (int i = 0; i <= 100; i++) {
send(topic);
}
}
}
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
import java.util.Random;
/**
* @author DeleMing
*/
public class MockStreamxJson1 {
private static String topic = "streamx_json";
private static String brokerAddr = "zorkdata-95:9092";
private static ProducerRecord<String, String> producerRecord = null;
private static KafkaProducer<String, String> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", StringSerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, String>(props);
}
public static String buildMsg() {
JSONObject jsonObject = new JSONObject();
// {"name":"tom","obj":{"channel":"root"},"pv":4,"xctime":1572932485}
jsonObject.put("name", "tom");
JSONObject jsonObject1 = new JSONObject();
jsonObject1.put("channel", "root");
jsonObject.put("obj", jsonObject1);
jsonObject.put("pv", new Random().nextInt(100));
jsonObject.put("xctime", System.currentTimeMillis());
return jsonObject.toString();
}
public static void send(String topic) {
init();
String req = buildMsg();
producerRecord = new ProducerRecord<String, String>(
topic,
null,
req
);
producer.send(producerRecord);
}
public static void main(String[] args) {
for (int i = 0; i <= 100; i++) {
send(topic);
}
}
}
package com.zorkdata.tools.mock;
import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.kafka.Producer;
import com.zorkdata.tools.kafka.ProducerPool;
import com.zorkdata.tools.utils.DateUtil;
import com.zorkdata.tools.utils.PropertiesUtil;
import com.zorkdata.tools.utils.StringUtil;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
/**
* @description:
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/17 0017 10:57
*/
public class MockStreamxLogAvro {
private static long getSize(String propertiesName) throws Exception {
Properties properties = PropertiesUtil.getProperties(propertiesName);
long logSize = StringUtil.getLong(properties.getProperty("log.size", "5000").trim(), 1);
return logSize;
}
public static String printData(String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("logTypeName", logTypeName);
jsonObject.put("timestamp", timestamp);
jsonObject.put("source", source);
jsonObject.put("offset", offset);
jsonObject.put("dimensions", dimensions);
jsonObject.put("measures", metrics);
jsonObject.put("normalFields", normalFields);
return jsonObject.toString();
}
private static String getRandomOffset() {
Random random = new Random();
long l = random.nextInt(10000);
return String.valueOf(l);
}
private static Map<String, String> getRandomDimensions() {
Random random = new Random();
int i = random.nextInt(10);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "zorkdata" + i);
dimensions.put("ip", "192.168.1." + i);
dimensions.put("appprogramname", "tc50");
dimensions.put("appsystem", "tdx");
return dimensions;
}
private static String[] codes = {
"AO", "AF", "AL", "DZ", "AD", "AI", "AG", "AR", "AM", "AU",
"AT", "AZ", "BS", "BH", "BD", "BB", "BY", "BE", "BZ", "BJ"
};
private static String getRandomCountryCode() {
Random random = new Random(codes.length);
return codes[new Random(codes.length).nextInt(codes.length)];
}
private static Map<String, String> getRandomNormalFields() {
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "data update success");
normalFields.put("countryCode", getRandomCountryCode());
return normalFields;
}
public static void main(String[] args) throws Exception {
long size = 10000 * 10000 * 10;
for (int i = 0; i < size; i++) {
String logTypeName = "streamx_log_avro";
String timestamp = DateUtil.getUTCTimeStr();
String source = "/var/log/xiesen.log";
String offset = getRandomOffset();
Map<String, String> dimensions = getRandomDimensions();
Map<String, Double> measures = new HashMap<>();
Map<String, String> normalFields = getRandomNormalFields();
//System.out.println(printData(logTypeName, timestamp, source, offset, dimensions, measures, normalFields));
Producer producer = ProducerPool.getInstance().getProducer();
producer.sendLog("logavro_1p", logTypeName, timestamp, source, offset, dimensions,
measures, normalFields);
}
Thread.sleep(1000);
}
}
package com.zorkdata.tools.mock;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
/**
* @author DeleMing
*/
public class MockZorkMetric {
private static String topic = "zorkdata_metric";
private static String brokerAddr = "zorkdata-95:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static byte[] buildMetric() {
Random random = new Random();
String metricSetName = "influx_cpu";
String timestamp = String.valueOf(System.currentTimeMillis());
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "localhost");
dimensions.put("appprogramname", "tc50");
dimensions.put("appsystem", "TXJY");
Map<String, Double> metrics = new HashMap<>();
metrics.put("cpu_usage", random.nextDouble());
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestamp, dimensions, metrics);
return bytes;
}
public static void send(String topic) {
init();
byte[] req = buildMetric();
producerRecord = new ProducerRecord<String, byte[]>(
topic,
null,
req
);
producer.send(producerRecord);
}
public static void main(String[] args) throws InterruptedException {
for (int i = 0; i <= 100; i++) {
send(topic);
Thread.sleep(1000);
}
}
}
package com.zorkdata.tools.pojo;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Map;
/**
* @description:
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/16 0016 9:27
*/
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class MetricEvent {
/**
* Metric name
*/
private String name;
/**
* Metric timestamp
*/
private Long timestamp;
/**
* Metric fields
*/
private Map<String, Object> fields;
/**
* Metric tags
*/
private Map<String, String> tags;
}
package com.zorkdata.tools.pojo;
/**
* @description:
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/13 0013 10:19
*/
public class Rule implements Comparable<Rule> {
private String operator;
private String times;
private String level;
public String getOperator() {
return operator;
}
public void setOperator(String operator) {
this.operator = operator;
}
public String getTimes() {
return times;
}
public void setTimes(String times) {
this.times = times;
}
public String getLevel() {
return level;
}
public void setLevel(String level) {
this.level = level;
}
/**
* 从大到小
*
* @param o
* @return
*/
@Override
public int compareTo(Rule o) {
return Integer.parseInt(o.times) - Integer.parseInt(this.times);
}
}
package com.zorkdata.tools.pojo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import java.util.Map;
@Data
@ToString
@NoArgsConstructor
@AllArgsConstructor
public class ZorkData {
private String logTypeName;
private String source;
private String timestamp;
private String offset;
private Map<String, Double> measures;
private Map<String, String> normalFields;
private Map<String, String> dimensions;
}
package com.zorkdata.tools.test;
import com.zorkdata.tools.pojo.Rule;
import java.util.Collections;
import java.util.List;
/**
* @description:
* @author: 谢森
* @Email xiesen@zork.com.cn
* @time: 2020/1/13 0013 9:24
*/
public class LogicRule {
public static String comp1(List<Rule> list, int times) {
// 默认从大到小
Collections.sort(list);
List<Rule> newList = list;
for (Rule rule : newList) {
String operator = rule.getOperator();
// 如果是 <= 或者 < 符号,规则数组进行翻转
if ("<=".equals(operator) || "<=".equals(operator)) {
Collections.reverse(newList);
}
if (">=".equals(operator)) {
for (Rule r1 : newList) {
if (times >= Integer.parseInt(r1.getTimes())) {
return r1.getLevel();
}
continue;
}
}
if (">".equals(operator)) {
for (Rule r1 : newList) {
if (times > Integer.parseInt(r1.getTimes())) {
return r1.getLevel();
}
continue;
}
}
if ("<=".equals(operator)) {
for (Rule r1 : newList) {
if (times <= Integer.parseInt(r1.getTimes())) {
return r1.getLevel();
}
continue;
}
}
if ("<".equals(operator)) {
for (Rule r1 : newList) {
if (times < Integer.parseInt(r1.getTimes())) {
return r1.getLevel();
}
continue;
}
}
break;
}
return null;
}
}
package com.zorkdata.tools.test;
import com.zorkdata.tools.pojo.ZorkData;
import org.apache.commons.lang.StringUtils;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.*;
/**
* @Description
* @className top.xiesen.mock.kafka.test.ReflectionUtils
* @Author 谢森
* @Email xiesen@zork.com.cn
* @Date 2020/3/11 17:06
*/
public class ReflectionUtils {
/**
* 构造方法私有
*/
private ReflectionUtils() {
}
/**
* 根据字段名称获取对象的属性
*
* @param fieldName
* @param target
* @return
* @throws Exception
*/
public static Object getFieldValueByName(String fieldName, Object target) throws Exception {
String firstLetter = fieldName.substring(0, 1).toUpperCase();
String getter = "get" + firstLetter + fieldName.substring(1);
Method method = target.getClass().getMethod(getter, new Class[0]);
Object e = method.invoke(target, new Object[0]);
return e;
}
/**
* 获取所有字段名字
*
* @param target
* @return
*/
public static String[] getFiledName(Object target) throws Exception {
Field[] fields = target.getClass().getDeclaredFields();
String[] fieldNames = new String[fields.length];
for (int i = 0; i < fields.length; ++i) {
fieldNames[i] = fields[i].getName();
}
return fieldNames;
}
/**
* 获取所有属性的值
*
* @param target
* @return
* @throws Exception
*/
public static Object[] getFiledValues(Object target) throws Exception {
String[] fieldNames = getFiledName(target);
Object[] value = new Object[fieldNames.length];
for (int i = 0; i < fieldNames.length; ++i) {
value[i] = getFieldValueByName(fieldNames[i], target);
}
return value;
}
public static String getMapKey(String s) {
int startindex = s.indexOf("[");
if (startindex < 0) {
return null;
}
String substr = s.substring(0, startindex);
return substr;
}
public static String getMapValue(String s) {
int startindex = s.indexOf("'");
if (startindex < 0) {
return null;
}
String substr = s.substring(startindex + 1);
int endindex = substr.indexOf("'");
if (endindex < 0) {
return null;
}
String ret = substr.substring(0, endindex);
return ret;
}
public static String getPartition(ZorkData zorkData, String partition) throws Exception {
StringBuilder builder = new StringBuilder();
if (StringUtils.isNotEmpty(partition)) {
List<String> list = Arrays.asList(partition.split(","));
for (int i = 0; i < list.size(); i++) {
String value = list.get(i);
if ("timestamp".equals(value)) {
// 日期格式化
}
if (value.contains("[")) {
String mapKey = getMapKey(value);
Map map = (Map) getFieldValueByName(mapKey, zorkData);
Object o = map.get(getMapValue(value));
builder.append("/" + o.toString());
} else {
Object o = getFieldValueByName(value, zorkData);
builder.append("/" + o.toString());
}
}
}
return builder.toString();
}
public static void main(String[] args) throws Exception {
ZorkData zorkData = new ZorkData();
zorkData.setLogTypeName("test");
zorkData.setTimestamp(new Date().toString());
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "tdx");
zorkData.setDimensions(dimensions);
Object logTypeName = getFieldValueByName("logTypeName", zorkData);
System.out.println("logTypeName = " + logTypeName);
Object dimensions1 = getFieldValueByName("dimensions", zorkData);
System.out.println(dimensions1);
Map map = (Map) dimensions1;
System.out.println(map.get("appsystem"));
String str = "logTypeName,timestamp,dimensions['appsystem']";
System.out.println(getMapKey("dimensions['appsystem']"));
System.out.println(getMapValue("dimensions['appsystem']"));
String partition = getPartition(zorkData, str);
System.out.println(partition);
}
}
package com.zorkdata.tools.test;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @Description
* @className top.xiesen.mock.kafka.test.RegularMatchTest
* @Author 谢森
* @Email xiesen@zork.com.cn
* @Date 2020/4/13 13:08
*/
public class RegularMatchTest {
public static void main(String[] args) {
String str = "{id=1, name=周军锋, password=1234, create_time=2020-04-02 17:04:04, message={\"id\":\"1\",\"name\":\"周军锋\",\"password\":\"1234\",\"create_time\":\"2020-04-02 17:04:04\"}}";
Map<String, Object> map = regularMatchMessage(str);
//遍历选项
for (Map.Entry<String, Object> entry : map.entrySet()) {
System.out.println(entry.getKey() + " = " + entry.getValue());
}
Object id = map.get("id");
System.out.println("id: " + id);
System.out.println("name: " + map.get("name"));
System.out.println("password: " + map.get("password"));
System.out.println("create_time: " + map.get("create_time"));
}
private static Map<String, Object> regularMatchMessage(String data) {
Map<String, Object> map = new HashMap<>();
String regex = "(?<=message=\\{)[^}]*(?=\\})";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(data);
String formatData;
formatData = null;
while (matcher.find()) {
formatData = matcher.group();
}
if (null != formatData) {
String[] split = formatData.split(",");
for (int i = 0; i < split.length; i++) {
String tmp = split[i];
String[] split1 = tmp.split(":");
String key = split1[0];
StringBuilder str = new StringBuilder();
for (int j = 1; j <= split1.length - 1; j++) {
str.append(split1[j]);
str.append(":");
}
String value = str.toString();
map.put(key.substring(1, key.length() - 1), value.substring(1, value.length() - 2));
}
}
return map;
}
}
package com.zorkdata.tools.test;
import java.util.Properties;
/**
* @Description
* @className top.xiesen.mock.kafka.test.Test01
* @Author 谢森
* @Email xiesen@zork.com.cn
* @Date 2020/3/16 16:24
*/
public class Test01 {
public static void main(String[] args) {
String a = "CREATE TABLE myTable(\n" +
"\tname varchar,\n" +
"\tobj.channel varchar as channel,\n" +
"\tpv INT,\n" +
"\txctime bigint\n" +
")WITH(\n" +
"\ttype='kafka11',\n" +
"\tbootstrapServers='zorkdata-95:9092',\n" +
"\tzookeeperQuorum='zorkdata-91:2181/kafka111,zorkdata-92:2181/kafka111,zorkdata-95:2181/kafka111',\n" +
"\tkafka.key.deserializer='org.apache.kafka.common.serialization.StringDeserializer',\n" +
"\tkafka.value.deserializer='org.apache.kafka.common.serialization.StringDeserializer',\n" +
"\toffsetReset='earliest',\n" +
"\tgroupId='streamx_sql_01',\n" +
"\ttopic='streamx_json',\n" +
"\tsourceDataType='json',\n" +
"\tparallelism='2'\n" +
");\n" +
"\n" +
"CREATE TABLE MyResult(\n" +
" name varchar,\n" +
" pv INT\n" +
" )WITH(\n" +
" type ='elasticsearch',\n" +
" address ='192.168.70.25:9200',\n" +
" cluster='dev-es6',\n" +
" estype ='type1',\n" +
" index ='streamx_test',\n" +
" parallelism ='1',\n" +
"\tid='0,1'\n" +
" );\n" +
"\n" +
"insert into MyResult select name,pv from myTable;";
System.out.println(a.replaceAll("\n", "").replaceAll("\t", ""));
}
}
package com.zorkdata.tools.test;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.springframework.util.ReflectionUtils;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
public class TimeStampTest {
public static Date StrToDate(String str) {
SimpleDateFormat format = new SimpleDateFormat("yyyyMMddHHmmssSSS");
Date date = null;
try {
date = format.parse(str);
} catch (ParseException e) {
System.out.print("日期转换异常");
}
return date;
}
private static DateTimeFormatter dateFormat1 = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS");
public static void test() {
String mestime = String.valueOf("20190424092143333");
String datetime = mestime.substring(0, 15);
Date date = StrToDate(datetime);
String times = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format(date).toString();
DateTime dateTime2 = DateTime.parse(times, dateFormat1);
String timestamp = dateTime2.toString();
System.out.println(timestamp);
}
public static int daysBetween(Date smdate, Date bdate) throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
smdate = sdf.parse(sdf.format(smdate));
bdate = sdf.parse(sdf.format(bdate));
Calendar cal = Calendar.getInstance();
cal.setTime(smdate);
long time1 = cal.getTimeInMillis();
cal.setTime(bdate);
long time2 = cal.getTimeInMillis();
long between_days = (time2 - time1) / (1000 * 3600 * 24);
return Integer.parseInt(String.valueOf(between_days));
}
/**
* 字符串的日期格式的计算
*/
public static int daysBetween(String smdate, String bdate) throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Calendar cal = Calendar.getInstance();
cal.setTime(sdf.parse(smdate));
long time1 = cal.getTimeInMillis();
cal.setTime(sdf.parse(bdate));
long time2 = cal.getTimeInMillis();
long between_days = (time2 - time1) / (1000 * 3600 * 24);
return Integer.parseInt(String.valueOf(between_days));
}
public static void main(String[] args) throws ParseException {
SimpleDateFormat sdf=new SimpleDateFormat("yyyy-MM-dd");
String today = sdf.format(new Date());
System.out.println(daysBetween("2018-03-17", today));
}
}
package com.zorkdata.tools.utils;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.*;
import java.util.concurrent.ExecutionException;
/**
* @Description
* @className top.xiesen.mock.kafka.utils.CustomerProducer
* @Author 谢森
* @Email xiesen@zork.com.cn
* @Date 2020/4/2 9:39
*/
@Slf4j
public class CustomerProducer {
static String servers = "kafka-1:9092,kafka-2:9092,kafka-3:9092";
static int batchSize = 1;
static CustomerProducer testProducer;
static String topics;
public static long logSize;
private static KafkaProducer<String, byte[]> producer;
private static KafkaProducer<String, String> noAvroProducer;
public static synchronized CustomerProducer getInstance(String propertiesName) {
if (testProducer == null) {
testProducer = new CustomerProducer(propertiesName);
}
return testProducer;
}
public CustomerProducer(String propertiesName) {
try {
initConfig(propertiesName);
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer");
/**
* 这个参数控制着相同分区内数据发送的批次个数大小,也就是当数据达到 这个size 时,进行数据发送,
* 但是并不是数据达不到 size 的值,就不会发送数据,默认是 1048576,即 16k
*/
props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize);
// 当数据发送失败时,重试次数设置
props.put(ProducerConfig.RETRIES_CONFIG, 5);
/**
* 消息是否发送,不是仅仅通过 batch.size 的值来控制的,实际上是一种权衡策略,即吞吐量和延时之间的权衡
* linger.ms 参数就是控制消息发送延时行为的,默认是 0,表示消息需要被立即发送。
*/
props.put(ProducerConfig.LINGER_MS_CONFIG, 100);
/**
* 控制消息发送的最大消息大小,默认是 10485760 字节 即 10Mb
*/
props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, 10485760);
/**
* 当 producer 发送消息到 broker 时,broker 需要在规定的时间内返回结果,这个时间就是该参数控制的,默认是 30s
*/
props.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 60000);
/**
* 指定了producer 端用于缓存的缓存区大小,单位是字节,默认是 33554432, 即 32G
*/
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
/**
* 用户控制 生产者的持久性 acks 有3个值,
* 0: 表示producer 完全不理睬 broker 的处理结果
* all: 表示发送数据时,broker 不仅会将消息写入到本地磁盘,同时也要保证其他副本也写入完成,才返回结果
* 1: 表示发送数据时,broker 接收到消息写入到本地磁盘即可,无需保证其他副本是否写入成功
*/
props.put(ProducerConfig.ACKS_CONFIG, "1");
producer = new KafkaProducer<String, byte[]>(props);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
noAvroProducer = new KafkaProducer<String, String>(props);
} catch (Exception ex) {
log.error("初始化Kafka失败,系统自动退出! ", ex);
System.exit(1);
}
}
public void initConfig(String propertiesName) throws Exception {
Properties properties = PropertiesUtil.getProperties(propertiesName);
topics = properties.getProperty("log.topic");
servers = properties.getProperty("kafka.servers", "zorkdata-151:9092").trim();
batchSize = StringUtil.getInt(properties.getProperty("kafka.batch.size", "5000").trim(), 1);
logSize = StringUtil.getLong(properties.getProperty("log.size", "5000").trim(), 1);
}
public void sendLog(String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) throws ExecutionException, InterruptedException {
try {
long l1 = System.currentTimeMillis();
byte[] bytes = AvroSerializerFactory.getLogAvroSerializer().serializingLog(logTypeName, timestamp, source,
offset, dimensions, metrics, normalFields);
long l2 = System.currentTimeMillis();
// System.out.println("数据序列化需要的时间: " + (l2 - l1) + "ms");
producer.send(new ProducerRecord<String, byte[]>(topics, null, bytes));
long l3 = System.currentTimeMillis();
// System.out.println("执行 producer.send 方法所需要的时间: " + (l3 - l2) + "ms");
} catch (Exception e) {
log.error("sendLog-插入Kafka失败", e);
}
}
public void sendJsonLog(String logJson) {
try {
noAvroProducer.send(new ProducerRecord<String, String>(topics, null, logJson));
} catch (Exception e) {
log.error("send json Log-插入Kafka失败", e);
}
}
}
package com.zorkdata.tools.utils;
import org.joda.time.DateTime;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* @author DeleMing
*/
public class DateUtil {
private static DateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS+08:00");
private static ThreadLocal<SimpleDateFormat> sdf = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
return new SimpleDateFormat("yyyy.MM.dd");
}
};
private static ThreadLocal<SimpleDateFormat> utcSdf = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
return new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
}
};
public static Long timestamp(String timestamp) {
return new DateTime(timestamp).toDate().getTime();
}
public static String format(String timestamp) throws ParseException {
return sdf.get().format(new DateTime(timestamp).toDate());
}
public static Long utcDate2Timestamp(String utcDateStr) throws ParseException {
return utcSdf.get().parse(utcDateStr).getTime();
}
public static String getUTCTimeStr() {
return format.format(new Date()).toString();
}
public static String getUTCTimeStr(long interval) {
long currentTimeMillis = System.currentTimeMillis();
return format.format(new Date(currentTimeMillis + interval)).toString();
}
public static void main(String[] args) {
String timeStr = getUTCTimeStr();
Date date = new DateTime(timeStr).toDate();
System.out.println(sdf.get().format(date));
}
}
package com.zorkdata.tools.utils;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
/**
* @author DeleMing
*/
public class JConsumerMutil {
private final static Logger log = LoggerFactory.getLogger(JConsumerMutil.class);
private final KafkaConsumer<String, String> consumer;
private ExecutorService executorService;
public JConsumerMutil() {
Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop:9092");
props.put(ConsumerConfig.GROUP_ID_CONFIG, "ke1");
// 开启自动提交
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
// 自动提交的间隔时间
props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumer = new KafkaConsumer<String, String>(props);
consumer.subscribe(Arrays.asList("test_kafka_game_x"));
}
public void execute() {
// 初始化线程池
executorService = Executors.newFixedThreadPool(1);
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100);
if (null != records) {
executorService.submit(new KafkaConsumerThread(records, consumer));
}
}
}
public void shutdown() {
try {
if (consumer != null) {
consumer.close();
}
if (executorService != null) {
executorService.shutdown();
}
if (!executorService.awaitTermination(10, TimeUnit.SECONDS)) {
log.error("shutdown kafka consumer thread timeout.");
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
/**
* 消费者线程实例
*/
class KafkaConsumerThread implements Runnable {
private ConsumerRecords<String, String> records;
public KafkaConsumerThread(ConsumerRecords<String, String> records,
KafkaConsumer<String, String> consumer) {
this.records = records;
}
@Override
public void run() {
for (TopicPartition partition : records.partitions()) {
// 获取消费记录数据集
List<ConsumerRecord<String, String>> partitionRecords = this.records.records(partition);
log.info("Thread Id : " + Thread.currentThread().getId());
for (ConsumerRecord<String, String> record : partitionRecords) {
System.out.println("offset =" + record.offset() + ", key=" + record.key() + ", value=" + record.value());
}
}
}
}
public static void main(String[] args) {
JConsumerMutil consumer = new JConsumerMutil();
try {
consumer.execute();
} catch (Exception e) {
log.error("mutil consumer from kafka has error , msg is " + e.getMessage());
consumer.shutdown();
}
}
}
package com.zorkdata.tools.utils;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import java.util.Properties;
/**
* @author DeleMing
*/
public class JProducer extends Thread {
private final Logger log = LoggerFactory.getLogger(JProducer.class);
/**
* 配置 kafka 链接信息
*
* @return
*/
public Properties configure() {
Properties props = new Properties();
props.put("bootstrap.servers", "hadoop:9092");
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put(ProducerConfig.ACKS_CONFIG, "1");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
return props;
}
@Override
public void run() {
KafkaProducer<String, String> producer = new KafkaProducer<>(configure());
for (int i = 0; i < 100; i++) {
JSONObject json = new JSONObject();
json.put("id", i);
json.put("ip", "192.168.0." + i);
json.put("date", new Date().toString());
String k = "key" + i;
producer.send(new ProducerRecord<String, String>("test_kafka_game_x", k, json.toJSONString()), new Callback() {
@Override
public void onCompletion(RecordMetadata recordMetadata, Exception e) {
if (null != e) {
log.error("send error,msg is " + e.getMessage());
} else {
log.info("the offset of the record we just send is: " + recordMetadata.offset());
}
}
});
}
try {
sleep(3000);
} catch (InterruptedException e) {
log.error("Interrupted thread error, msg is " + e.getMessage());
}
producer.close();
}
public static void main(String[] args) {
JProducer producer = new JProducer();
producer.start();
}
}
package com.zorkdata.tools.utils;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* @author DeleMing
*/
public class JProducerThread extends Thread {
private final Logger log = LoggerFactory.getLogger(JProducerThread.class);
private final static int MAX_THREAD_SIZE = 6;
public Properties configure() {
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop:9092");
/**
* 这个参数控制着相同分区内数据发送的批次个数大小,也就是当数据达到 这个size 时,进行数据发送,
* 但是并不是数据达不到 size 的值,就不会发送数据,默认是 1048576,即 16k
*/
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
/**
* 消息是否发送,不是仅仅通过 batch.size 的值来控制的,实际上是一种权衡策略,即吞吐量和延时之间的权衡
* linger.ms 参数就是控制消息发送延时行为的,默认是 0,表示消息需要被立即发送。
*/
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
/**
* 指定了producer 端用于缓存的缓存区大小,单位是字节,默认是 33554432, 即 32G
*/
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
/**
* 用户控制 生产者的持久性 acks 有3个值,
* 0: 表示producer 完全不理睬 broker 的处理结果
* all: 表示发送数据时,broker 不仅会将消息写入到本地磁盘,同时也要保证其他副本也写入完成,才返回结果
* 1: 表示发送数据时,broker 接收到消息写入到本地磁盘即可,无需保证其他副本是否写入成功
*/
props.put(ProducerConfig.ACKS_CONFIG, "1");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
return props;
}
@Override
public void run() {
KafkaProducer<String, String> producer = new KafkaProducer<>(configure());
for (int i = 0; i < 100; i++) {
JSONObject json = new JSONObject();
json.put("id", i);
json.put("ip", "192.168.0." + i);
json.put("date", new Date().toString());
String k = "key" + i;
producer.send(new ProducerRecord<String, String>("test_kafka_game_x", k, json.toJSONString()), new Callback() {
@Override
public void onCompletion(RecordMetadata recordMetadata, Exception e) {
if (null != e) {
log.error("send error,msg is " + e.getMessage());
} else {
log.info("the offset of the record we just send is: " + recordMetadata.offset());
}
}
});
}
try {
sleep(3000);
} catch (InterruptedException e) {
log.error("Interrupted thread error, msg is " + e.getMessage());
}
producer.close();
}
public static void main(String[] args) {
// 创建一个固定线程数的线程池
ExecutorService executorService = Executors.newFixedThreadPool(MAX_THREAD_SIZE);
// 提交任务
executorService.submit(new JProducerThread());
// 关闭线程
executorService.shutdown();
}
}
package com.zorkdata.tools.utils;
import java.io.Closeable;
import java.io.IOException;
/**
* @author DeleMing
*/
public class ProducerPool implements Closeable {
private CustomerProducer[] pool;
private int threadNum = 15;
// 轮循id
private int index = 0;
private static ProducerPool producerInstance = null;
public static ProducerPool getInstance(String propertiesName) {
if (producerInstance == null) {
producerInstance = new ProducerPool(propertiesName);
}
return ProducerPool.producerInstance;
}
private ProducerPool(String propertiesName) {
init(propertiesName);
}
public void init(String propertiesName) {
pool = new CustomerProducer[threadNum];
for (int i = 0; i < threadNum; i++) {
pool[i] = new CustomerProducer(propertiesName);
}
}
public CustomerProducer getProducer() {
if (index > 65535) {
index = 0;
}
return pool[index++ % threadNum];
}
@Override
public void close() throws IOException {
}
}
package com.zorkdata.tools.utils;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Properties;
/**
* @author DeleMing
*/
public class PropertiesUtil {
/**
* 根据文件名获取该properties对象
*
* @param propertieFileName
* @return
*/
public static Properties getProperties(String propertieFileName) throws Exception {
Properties properties = new Properties();
InputStream inputStream = null;
InputStreamReader inputStreamReader = null;
try {
// inputStream = PropertiesUtil.class.getResourceAsStream(propertieFileName);
inputStream = new FileInputStream(new File(propertieFileName));
inputStreamReader = new InputStreamReader(inputStream, "UTF-8");
properties.load(inputStreamReader);
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (Exception ex) {
}
}
if (inputStreamReader != null) {
try {
inputStreamReader.close();
} catch (Exception ex) {
}
}
}
return properties;
}
}
package com.zorkdata.tools.utils;
import org.joda.time.DateTime;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author DeleMing
*/
public class StringUtil {
public static void main(String[] args) {
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
long a = Long.parseLong("1487810258000");
System.out.println(df.format(new Date(a)));
System.out.println(getMSTime("2017-02-23T00:37:38Z"));
}
public static String getISOTime(String str) {
if (str == null) {
return str;
}
str = str.trim();
try {
DateTime datetime;
if (str.length() == 13 && str.startsWith("1")) {// 1能管到2033年
long t = Long.parseLong(str);
datetime = new DateTime(t);
} else if (str.length() == 10 && str.startsWith("1")) {// 秒数,1能管到2033年
long t = Long.parseLong(str) * 1000;
datetime = new DateTime(t);
} else {
datetime = new DateTime(str);
// datetime = new DateTime("2033-02-13T00:37:38.778Z");
// datetime = new DateTime("2017-02-23T00:37:38.778+08:00");
}
return datetime.toDateTimeISO().toString();// "2017-02-23T00:37:38.778+08:00"
} catch (Exception ex) {
return null;
}
}
public static long getMSTime(String str) {
if (str == null) {
return -1;
}
str = str.trim();
try {
if (str.length() == 13 && str.startsWith("1")) {// 豪秒数,1能管到2033年
return Long.parseLong(str);
} else if (str.length() == 10 && str.startsWith("1")) {// 秒数,1能管到2033年
return Long.parseLong(str) * 1000;
} else {
// datetime = new DateTime("2033-02-13T00:37:38.778Z");
// datetime = new DateTime("2017-02-23T00:37:38.778+08:00");
return new DateTime(str).getMillis();
}
} catch (Exception ex) {
return -1;
}
}
// 指标入kafka的后,spark处理不了特殊字符
public static String replaceChar4MetricKey(String str) {
str = str.replaceAll("\"", "").replaceAll(",", "_").replaceAll("\\[", "").replaceAll("]", "").replaceAll("\\\\", "").replaceAll(" ", "_").replaceAll("=", "").replaceAll(":", "")
.replaceAll("\\.", "_");
return str;
}
public static List<String> numbers = new ArrayList<String>() {
{
add("0");
add("1");
add("2");
add("3");
add("4");
add("5");
add("6");
add("7");
add("8");
add("9");
}
};
private static Pattern pattern = Pattern.compile("^(-?\\d+)(\\.\\d+)?$");
public static boolean isNumeric(String str) {
Matcher isNum = pattern.matcher(str);
return isNum.matches();
}
public static boolean isNull(String str) {
if (str == null) {
return true;
}
str = str.trim();
return "".equals(str) || "NULL".equalsIgnoreCase(str);
}
public static boolean isDouble(String str) {
if (str == null) {
return false;
}
str = str.trim();
try {
Double.parseDouble(str);
return true;
} catch (Exception ex) {
return false;
}
}
public static Double getDouble(String str) {
if (str == null) {
return null;
}
str = str.trim();
try {
return Double.valueOf(str);
} catch (Exception Ex) {
return null;
}
}
public static double getDouble(String str, double defaultValue) {
Double d = getDouble(str);
return d == null ? defaultValue : d;
}
public static long getLong(String str, long defaultValue) {
if (str == null) {
return defaultValue;
}
str = str.trim();
try {
return Long.valueOf(str);
} catch (Exception Ex) {
return defaultValue;
}
}
public static int getInt(String str, int defaultValue) {
if (str == null) {
return defaultValue;
}
str = str.trim();
try {
return Integer.valueOf(str);
} catch (Exception Ex) {
return defaultValue;
}
}
}
#kafka.servers = kafka-1:19092,kafka-2:19092,kafka-3:19092
kafka.servers = zorkdata-95:9092
kafka.batch.size = 1
log.size = 10000
log.topic=flinkx
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment