Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
M
Mock-Data
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
屈庆涛
Mock-Data
Commits
39754b35
Commit
39754b35
authored
May 08, 2021
by
屈庆涛
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
111
parent
66e5ae0d
Changes
34
Hide whitespace changes
Inline
Side-by-side
Showing
34 changed files
with
2009 additions
and
112 deletions
+2009
-112
.idea/.gitignore
.idea/.gitignore
+11
-0
.idea/inspectionProfiles/Project_Default.xml
.idea/inspectionProfiles/Project_Default.xml
+36
-0
.idea/modules.xml
.idea/modules.xml
+8
-0
.idea/uiDesigner.xml
.idea/uiDesigner.xml
+124
-0
.idea/vcs.xml
.idea/vcs.xml
+6
-0
mock-data.iml
mock-data.iml
+121
-1
src/main/java/com/zorkdata/tools/kafka/Producer.java
src/main/java/com/zorkdata/tools/kafka/Producer.java
+4
-4
src/main/java/com/zorkdata/tools/mock/MetricNode1ShanDong.java
...ain/java/com/zorkdata/tools/mock/MetricNode1ShanDong.java
+8
-6
src/main/java/com/zorkdata/tools/mock/MockLogNode1.java
src/main/java/com/zorkdata/tools/mock/MockLogNode1.java
+2
-2
src/main/java/com/zorkdata/tools/mock/MockMetricNode3.java
src/main/java/com/zorkdata/tools/mock/MockMetricNode3.java
+6
-28
src/main/java/com/zorkdata/tools/mock/SystemIndex/MockLogSI_Alarm_zork90_10.java
...ata/tools/mock/SystemIndex/MockLogSI_Alarm_zork90_10.java
+73
-0
src/main/java/com/zorkdata/tools/mock/SystemIndex/MockLogSI_DevTest_node1.java
...kdata/tools/mock/SystemIndex/MockLogSI_DevTest_node1.java
+2
-2
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricNoAlarmBeiYong.java
...m/zorkdata/tools/mock/hostAlarm/MetricNoAlarmBeiYong.java
+76
-0
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricNoAlarmDM.java
...va/com/zorkdata/tools/mock/hostAlarm/MetricNoAlarmDM.java
+77
-0
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricNoAlarmZork9010.java
.../zorkdata/tools/mock/hostAlarm/MetricNoAlarmZork9010.java
+11
-22
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricNode1LinuxModule.java
...zorkdata/tools/mock/hostAlarm/MetricNode1LinuxModule.java
+72
-0
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricYf121DataServiceCluster.java
...a/tools/mock/hostAlarm/MetricYf121DataServiceCluster.java
+77
-0
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricYf121JiChuJianKongCLuster.java
...tools/mock/hostAlarm/MetricYf121JiChuJianKongCLuster.java
+77
-0
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricYf122DK.java
...java/com/zorkdata/tools/mock/hostAlarm/MetricYf122DK.java
+77
-0
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricYf122JL.java
...java/com/zorkdata/tools/mock/hostAlarm/MetricYf122JL.java
+77
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeLog/MockLogNoAlarmDM.java
...om/zorkdata/tools/mock/ruleScopeLog/MockLogNoAlarmDM.java
+101
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeLog/MockLogNode1LinuxModule.java
...data/tools/mock/ruleScopeLog/MockLogNode1LinuxModule.java
+100
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeLog/MockLogYf121JCJKCluster.java
...data/tools/mock/ruleScopeLog/MockLogYf121JCJKCluster.java
+9
-10
src/main/java/com/zorkdata/tools/mock/ruleScopeLog/MockLogYf121PaasCluster.java
...data/tools/mock/ruleScopeLog/MockLogYf121PaasCluster.java
+107
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeLog/MockLogYf122DataServiceClusterKafkaModule.java
...leScopeLog/MockLogYf122DataServiceClusterKafkaModule.java
+108
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeLog/MockLogYf122JCJKClusterLinuxModule.java
...mock/ruleScopeLog/MockLogYf122JCJKClusterLinuxModule.java
+108
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricNoAlarmBeiYong.java
...data/tools/mock/ruleScopeMetric/MetricNoAlarmBeiYong.java
+75
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricNoAlarmDM.java
.../zorkdata/tools/mock/ruleScopeMetric/MetricNoAlarmDM.java
+75
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricNoAlarmZork9010.java
...ata/tools/mock/ruleScopeMetric/MetricNoAlarmZork9010.java
+76
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricNode1ShanDongModule.java
...tools/mock/ruleScopeMetric/MetricNode1ShanDongModule.java
+70
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricYf121DataServiceCluster.java
...s/mock/ruleScopeMetric/MetricYf121DataServiceCluster.java
+10
-37
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricYf121JiChuJianKongCLuster.java
...mock/ruleScopeMetric/MetricYf121JiChuJianKongCLuster.java
+75
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricYf122DK.java
...om/zorkdata/tools/mock/ruleScopeMetric/MetricYf122DK.java
+75
-0
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricYf122JL.java
...om/zorkdata/tools/mock/ruleScopeMetric/MetricYf122JL.java
+75
-0
No files found.
.idea/.gitignore
0 → 100644
View file @
39754b35
# Default ignored files
/shelf/
/workspace.xml
# Datasource local storage ignored files
#/../../../../:\IdeaProjects\mock-data\.idea/dataSources/
/dataSources.local.xml
# Editor-based HTTP Client requests
/httpRequests/
.idea/inspectionProfiles/Project_Default.xml
0 → 100644
View file @
39754b35
<component
name=
"InspectionProjectProfileManager"
>
<profile
version=
"1.0"
>
<option
name=
"myName"
value=
"Project Default"
/>
<inspection_tool
class=
"JavaDoc"
enabled=
"true"
level=
"WARNING"
enabled_by_default=
"true"
>
<option
name=
"TOP_LEVEL_CLASS_OPTIONS"
>
<value>
<option
name=
"ACCESS_JAVADOC_REQUIRED_FOR"
value=
"none"
/>
<option
name=
"REQUIRED_TAGS"
value=
""
/>
</value>
</option>
<option
name=
"INNER_CLASS_OPTIONS"
>
<value>
<option
name=
"ACCESS_JAVADOC_REQUIRED_FOR"
value=
"none"
/>
<option
name=
"REQUIRED_TAGS"
value=
""
/>
</value>
</option>
<option
name=
"METHOD_OPTIONS"
>
<value>
<option
name=
"ACCESS_JAVADOC_REQUIRED_FOR"
value=
"none"
/>
<option
name=
"REQUIRED_TAGS"
value=
"@return@param@throws or @exception"
/>
</value>
</option>
<option
name=
"FIELD_OPTIONS"
>
<value>
<option
name=
"ACCESS_JAVADOC_REQUIRED_FOR"
value=
"none"
/>
<option
name=
"REQUIRED_TAGS"
value=
""
/>
</value>
</option>
<option
name=
"IGNORE_DEPRECATED"
value=
"false"
/>
<option
name=
"IGNORE_JAVADOC_PERIOD"
value=
"true"
/>
<option
name=
"IGNORE_DUPLICATED_THROWS"
value=
"false"
/>
<option
name=
"IGNORE_POINT_TO_ITSELF"
value=
"false"
/>
<option
name=
"myAdditionalJavadocTags"
value=
"date"
/>
</inspection_tool>
</profile>
</component>
\ No newline at end of file
.idea/modules.xml
0 → 100644
View file @
39754b35
<?xml version="1.0" encoding="UTF-8"?>
<project
version=
"4"
>
<component
name=
"ProjectModuleManager"
>
<modules>
<module
fileurl=
"file://$PROJECT_DIR$/mock-data.iml"
filepath=
"$PROJECT_DIR$/mock-data.iml"
/>
</modules>
</component>
</project>
\ No newline at end of file
.idea/uiDesigner.xml
0 → 100644
View file @
39754b35
<?xml version="1.0" encoding="UTF-8"?>
<project
version=
"4"
>
<component
name=
"Palette2"
>
<group
name=
"Swing"
>
<item
class=
"com.intellij.uiDesigner.HSpacer"
tooltip-text=
"Horizontal Spacer"
icon=
"/com/intellij/uiDesigner/icons/hspacer.png"
removable=
"false"
auto-create-binding=
"false"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"1"
hsize-policy=
"6"
anchor=
"0"
fill=
"1"
/>
</item>
<item
class=
"com.intellij.uiDesigner.VSpacer"
tooltip-text=
"Vertical Spacer"
icon=
"/com/intellij/uiDesigner/icons/vspacer.png"
removable=
"false"
auto-create-binding=
"false"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"6"
hsize-policy=
"1"
anchor=
"0"
fill=
"2"
/>
</item>
<item
class=
"javax.swing.JPanel"
icon=
"/com/intellij/uiDesigner/icons/panel.png"
removable=
"false"
auto-create-binding=
"false"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"3"
hsize-policy=
"3"
anchor=
"0"
fill=
"3"
/>
</item>
<item
class=
"javax.swing.JScrollPane"
icon=
"/com/intellij/uiDesigner/icons/scrollPane.png"
removable=
"false"
auto-create-binding=
"false"
can-attach-label=
"true"
>
<default-constraints
vsize-policy=
"7"
hsize-policy=
"7"
anchor=
"0"
fill=
"3"
/>
</item>
<item
class=
"javax.swing.JButton"
icon=
"/com/intellij/uiDesigner/icons/button.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"3"
anchor=
"0"
fill=
"1"
/>
<initial-values>
<property
name=
"text"
value=
"Button"
/>
</initial-values>
</item>
<item
class=
"javax.swing.JRadioButton"
icon=
"/com/intellij/uiDesigner/icons/radioButton.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"3"
anchor=
"8"
fill=
"0"
/>
<initial-values>
<property
name=
"text"
value=
"RadioButton"
/>
</initial-values>
</item>
<item
class=
"javax.swing.JCheckBox"
icon=
"/com/intellij/uiDesigner/icons/checkBox.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"3"
anchor=
"8"
fill=
"0"
/>
<initial-values>
<property
name=
"text"
value=
"CheckBox"
/>
</initial-values>
</item>
<item
class=
"javax.swing.JLabel"
icon=
"/com/intellij/uiDesigner/icons/label.png"
removable=
"false"
auto-create-binding=
"false"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"0"
anchor=
"8"
fill=
"0"
/>
<initial-values>
<property
name=
"text"
value=
"Label"
/>
</initial-values>
</item>
<item
class=
"javax.swing.JTextField"
icon=
"/com/intellij/uiDesigner/icons/textField.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"true"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"6"
anchor=
"8"
fill=
"1"
>
<preferred-size
width=
"150"
height=
"-1"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JPasswordField"
icon=
"/com/intellij/uiDesigner/icons/passwordField.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"true"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"6"
anchor=
"8"
fill=
"1"
>
<preferred-size
width=
"150"
height=
"-1"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JFormattedTextField"
icon=
"/com/intellij/uiDesigner/icons/formattedTextField.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"true"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"6"
anchor=
"8"
fill=
"1"
>
<preferred-size
width=
"150"
height=
"-1"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JTextArea"
icon=
"/com/intellij/uiDesigner/icons/textArea.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"true"
>
<default-constraints
vsize-policy=
"6"
hsize-policy=
"6"
anchor=
"0"
fill=
"3"
>
<preferred-size
width=
"150"
height=
"50"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JTextPane"
icon=
"/com/intellij/uiDesigner/icons/textPane.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"true"
>
<default-constraints
vsize-policy=
"6"
hsize-policy=
"6"
anchor=
"0"
fill=
"3"
>
<preferred-size
width=
"150"
height=
"50"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JEditorPane"
icon=
"/com/intellij/uiDesigner/icons/editorPane.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"true"
>
<default-constraints
vsize-policy=
"6"
hsize-policy=
"6"
anchor=
"0"
fill=
"3"
>
<preferred-size
width=
"150"
height=
"50"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JComboBox"
icon=
"/com/intellij/uiDesigner/icons/comboBox.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"true"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"2"
anchor=
"8"
fill=
"1"
/>
</item>
<item
class=
"javax.swing.JTable"
icon=
"/com/intellij/uiDesigner/icons/table.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"6"
hsize-policy=
"6"
anchor=
"0"
fill=
"3"
>
<preferred-size
width=
"150"
height=
"50"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JList"
icon=
"/com/intellij/uiDesigner/icons/list.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"6"
hsize-policy=
"2"
anchor=
"0"
fill=
"3"
>
<preferred-size
width=
"150"
height=
"50"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JTree"
icon=
"/com/intellij/uiDesigner/icons/tree.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"6"
hsize-policy=
"6"
anchor=
"0"
fill=
"3"
>
<preferred-size
width=
"150"
height=
"50"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JTabbedPane"
icon=
"/com/intellij/uiDesigner/icons/tabbedPane.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"3"
hsize-policy=
"3"
anchor=
"0"
fill=
"3"
>
<preferred-size
width=
"200"
height=
"200"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JSplitPane"
icon=
"/com/intellij/uiDesigner/icons/splitPane.png"
removable=
"false"
auto-create-binding=
"false"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"3"
hsize-policy=
"3"
anchor=
"0"
fill=
"3"
>
<preferred-size
width=
"200"
height=
"200"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JSpinner"
icon=
"/com/intellij/uiDesigner/icons/spinner.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"true"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"6"
anchor=
"8"
fill=
"1"
/>
</item>
<item
class=
"javax.swing.JSlider"
icon=
"/com/intellij/uiDesigner/icons/slider.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"6"
anchor=
"8"
fill=
"1"
/>
</item>
<item
class=
"javax.swing.JSeparator"
icon=
"/com/intellij/uiDesigner/icons/separator.png"
removable=
"false"
auto-create-binding=
"false"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"6"
hsize-policy=
"6"
anchor=
"0"
fill=
"3"
/>
</item>
<item
class=
"javax.swing.JProgressBar"
icon=
"/com/intellij/uiDesigner/icons/progressbar.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"6"
anchor=
"0"
fill=
"1"
/>
</item>
<item
class=
"javax.swing.JToolBar"
icon=
"/com/intellij/uiDesigner/icons/toolbar.png"
removable=
"false"
auto-create-binding=
"false"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"6"
anchor=
"0"
fill=
"1"
>
<preferred-size
width=
"-1"
height=
"20"
/>
</default-constraints>
</item>
<item
class=
"javax.swing.JToolBar$Separator"
icon=
"/com/intellij/uiDesigner/icons/toolbarSeparator.png"
removable=
"false"
auto-create-binding=
"false"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"0"
hsize-policy=
"0"
anchor=
"0"
fill=
"1"
/>
</item>
<item
class=
"javax.swing.JScrollBar"
icon=
"/com/intellij/uiDesigner/icons/scrollbar.png"
removable=
"false"
auto-create-binding=
"true"
can-attach-label=
"false"
>
<default-constraints
vsize-policy=
"6"
hsize-policy=
"0"
anchor=
"0"
fill=
"2"
/>
</item>
</group>
</component>
</project>
\ No newline at end of file
.idea/vcs.xml
0 → 100644
View file @
39754b35
<?xml version="1.0" encoding="UTF-8"?>
<project
version=
"4"
>
<component
name=
"VcsDirectoryMappings"
>
<mapping
directory=
""
vcs=
"Git"
/>
</component>
</project>
\ No newline at end of file
mock-data.iml
View file @
39754b35
<?xml version="1.0" encoding="UTF-8"?>
<module
type=
"JAVA_MODULE"
version=
"4"
/>
\ No newline at end of file
<module
org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule=
"true"
type=
"JAVA_MODULE"
version=
"4"
>
<component
name=
"FacetManager"
>
<facet
type=
"web"
name=
"Web"
>
<configuration>
<webroots
/>
</configuration>
</facet>
<facet
type=
"Spring"
name=
"Spring"
>
<configuration
/>
</facet>
</component>
<component
name=
"NewModuleRootManager"
LANGUAGE_LEVEL=
"JDK_1_8"
>
<output
url=
"file://$MODULE_DIR$/target/classes"
/>
<output-test
url=
"file://$MODULE_DIR$/target/test-classes"
/>
<content
url=
"file://$MODULE_DIR$"
>
<sourceFolder
url=
"file://$MODULE_DIR$/src/main/java"
isTestSource=
"false"
/>
<sourceFolder
url=
"file://$MODULE_DIR$/src/main/resources"
type=
"java-resource"
/>
<excludeFolder
url=
"file://$MODULE_DIR$/target"
/>
</content>
<orderEntry
type=
"inheritedJdk"
/>
<orderEntry
type=
"sourceFolder"
forTests=
"false"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework.boot:spring-boot-starter-web:2.1.6.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework.boot:spring-boot-starter:2.1.6.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework.boot:spring-boot:2.1.6.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework.boot:spring-boot-autoconfigure:2.1.6.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework.boot:spring-boot-starter-logging:2.1.6.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: ch.qos.logback:logback-classic:1.2.3"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: ch.qos.logback:logback-core:1.2.3"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.logging.log4j:log4j-to-slf4j:2.11.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.logging.log4j:log4j-api:2.11.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.slf4j:jul-to-slf4j:1.7.26"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: javax.annotation:javax.annotation-api:1.3.2"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"RUNTIME"
name=
"Maven: org.yaml:snakeyaml:1.23"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework.boot:spring-boot-starter-json:2.1.6.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.9.9"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.9.9"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.fasterxml.jackson.module:jackson-module-parameter-names:2.9.9"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework.boot:spring-boot-starter-tomcat:2.1.6.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.tomcat.embed:tomcat-embed-core:9.0.21"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.tomcat.embed:tomcat-embed-el:9.0.21"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.tomcat.embed:tomcat-embed-websocket:9.0.21"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.hibernate.validator:hibernate-validator:6.0.17.Final"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: javax.validation:validation-api:2.0.1.Final"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.jboss.logging:jboss-logging:3.3.2.Final"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.fasterxml:classmate:1.4.0"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework:spring-web:5.1.8.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework:spring-beans:5.1.8.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework:spring-webmvc:5.1.8.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework:spring-aop:5.1.8.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework:spring-context:5.1.8.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework:spring-expression:5.1.8.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.springframework.boot:spring-boot-starter-test:2.1.6.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.springframework.boot:spring-boot-test:2.1.6.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.springframework.boot:spring-boot-test-autoconfigure:2.1.6.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: com.jayway.jsonpath:json-path:2.4.0"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: net.minidev:json-smart:2.3"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: net.minidev:accessors-smart:1.2"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.ow2.asm:asm:5.0.4"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.assertj:assertj-core:3.11.1"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.mockito:mockito-core:2.23.4"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: net.bytebuddy:byte-buddy:1.9.13"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: net.bytebuddy:byte-buddy-agent:1.9.13"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.objenesis:objenesis:2.6"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.hamcrest:hamcrest-core:1.3"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.hamcrest:hamcrest-library:1.3"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.skyscreamer:jsonassert:1.5.0"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: com.vaadin.external.google:android-json:0.0.20131108.vaadin1"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework:spring-core:5.1.8.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.springframework:spring-jcl:5.1.8.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.springframework:spring-test:5.1.8.RELEASE"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: org.xmlunit:xmlunit-core:2.6.2"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: javax.xml.bind:jaxb-api:2.3.1"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: javax.activation:javax.activation-api:1.2.0"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.kafka:kafka_2.11:1.1.1"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.kafka:kafka-clients:2.0.1"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.lz4:lz4-java:1.4.1"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.fasterxml.jackson.core:jackson-databind:2.9.9"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.fasterxml.jackson.core:jackson-annotations:2.9.0"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.fasterxml.jackson.core:jackson-core:2.9.9"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: net.sf.jopt-simple:jopt-simple:5.0.4"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.yammer.metrics:metrics-core:2.2.0"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.scala-lang:scala-library:2.11.12"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.scala-lang:scala-reflect:2.11.12"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.typesafe.scala-logging:scala-logging_2.11:3.8.0"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.slf4j:slf4j-api:1.7.26"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.101tec:zkclient:0.10"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.zookeeper:zookeeper:3.4.10"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.alibaba:fastjson:1.2.62"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.projectlombok:lombok:1.18.8"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.avro:avro:1.8.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.codehaus.jackson:jackson-core-asl:1.9.13"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.codehaus.jackson:jackson-mapper-asl:1.9.13"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.thoughtworks.paranamer:paranamer:2.7"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.xerial.snappy:snappy-java:1.1.1.3"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.commons:commons-compress:1.8.1"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.tukaani:xz:1.5"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.avro:avro-tools:1.8.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.avro:avro-compiler:1.8.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: commons-lang:commons-lang:2.6"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.velocity:velocity:1.7"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: commons-collections:commons-collections:3.2.1"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: joda-time:joda-time:2.10.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.avro:avro-ipc:1.8.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.mortbay.jetty:jetty:6.1.26"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.mortbay.jetty:jetty-util:6.1.26"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: io.netty:netty:3.5.13.Final"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.mortbay.jetty:servlet-api:2.5-20081211"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.avro:avro-mapred:1.8.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: commons-codec:commons-codec:1.11"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: commons-cli:commons-cli:1.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: commons-logging:commons-logging:1.1.1"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: commons-httpclient:commons-httpclient:3.1"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.avro:trevni-core:1.8.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.avro:trevni-avro:1.8.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.avro:avro-mapred:hadoop2:1.8.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.avro:trevni-core:tests:1.8.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: org.apache.avro:trevni-avro:tests:1.8.2"
level=
"project"
/>
<orderEntry
type=
"library"
name=
"Maven: com.github.stephenc.findbugs:findbugs-annotations:1.3.9-1"
level=
"project"
/>
<orderEntry
type=
"library"
scope=
"TEST"
name=
"Maven: junit:junit:4.12"
level=
"project"
/>
</component>
</module>
\ No newline at end of file
src/main/java/com/zorkdata/tools/kafka/Producer.java
View file @
39754b35
...
...
@@ -9,8 +9,8 @@ import java.util.*;
public
class
Producer
{
// static String servers = "yf122:9092,yf121:9092,yf120:9092";
//
static String servers = "node1:9092,node2:9092,node3:9092";
static
String
servers
=
"noahtest-215:9092,noahtest-216:9092,noahtest-217:9092"
;
static
String
servers
=
"node1:9092,node2:9092,node3:9092"
;
//
static String servers = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
static
int
batchsize
=
1
;
static
Producer
testProducer
;
static
String
metricTopic
;
...
...
@@ -45,8 +45,8 @@ public class Producer {
}
public
void
initConfig
()
throws
Exception
{
//
servers = "node1:9092,node2:9092,node3:9092";
servers
=
"noahtest-215:9092,noahtest-216:9092,noahtest-217:9092"
;
servers
=
"node1:9092,node2:9092,node3:9092"
;
//
servers = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// servers = "kafka-1:19092,kafka-2:19092,kafka-3:19092";
batchsize
=
100000
;
}
...
...
src/main/java/com/zorkdata/tools/mock/M
ockMetricNode2
.java
→
src/main/java/com/zorkdata/tools/mock/M
etricNode1ShanDong
.java
View file @
39754b35
...
...
@@ -5,20 +5,21 @@ import com.zorkdata.tools.avro.AvroSerializerFactory;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
org.joda.time.DateTime
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
import
java.util.Random
;
/**
* @param brokerAddr
* @param topic
* @author DeleMing
*/
public
class
M
ockMetricNode2
{
public
class
M
etricNode1ShanDong
{
private
static
String
topic
=
"dwd_all_metric"
;
private
static
String
brokerAddr
=
"node1:9092,node2:9092,node3:9092"
;
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
...
...
@@ -39,7 +40,6 @@ public class MockMetricNode2 {
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
String
metricSetName
=
"cpu_system_mb"
;
//Dimensions
...
...
@@ -51,9 +51,11 @@ public class MockMetricNode2 {
dimensions
.
put
(
"ip"
,
"192.168.70.212"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"user_pct"
,
0.4
);
metrics
.
put
(
"user_pct"
,
0.1
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
...
...
@@ -64,7 +66,7 @@ public class MockMetricNode2 {
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
30000
);
//210/30= 7 严重
Thread
.
sleep
(
10000
);
}
}
}
...
...
src/main/java/com/zorkdata/tools/mock/MockLogNode1.java
View file @
39754b35
...
...
@@ -50,12 +50,12 @@ public class MockLogNode1 {
dimensions
.
put
(
"ip"
,
"192.168.70.212"
);
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"基础监控"
);
dimensions
.
put
(
"appprogramname"
,
"ShanDong"
);
//
dimensions.put("appprogramname", "ShanDong");
// dimensions.put("servicename", "linux模块");
// dimensions.put("servicecode", "linux模块");
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
//
dimensions.put("appprogramname", "linux模块");
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
// dimensions.put("hostname", "host-11");
// dimensions.put("ip", "192.168.13.11");
return
dimensions
;
...
...
src/main/java/com/zorkdata/tools/mock/MockMetricNode3.java
View file @
39754b35
...
...
@@ -15,9 +15,10 @@ import java.util.Properties;
*/
public
class
MockMetricNode3
{
private
static
String
topic
=
"dwd_all_metric"
;
private
static
String
brokerAddr
=
"node1:9092,node2:9092,node3:9092"
;
//
private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
private
static
String
brokerAddr
=
"noahtest-215:9092,noahtest-216:9092,noahtest-217:9092"
;
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
...
...
@@ -43,25 +44,17 @@ public class MockMetricNode3 {
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("hostname", "shandong2");
// dimensions.put("ip", "192.168.70.220");
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"基础监控"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
dimensions
.
put
(
"servicename"
,
"linux模块"
);
dimensions
.
put
(
"hostname"
,
"yf121"
);
dimensions
.
put
(
"ip"
,
"192.168.70.121"
);
dimensions
.
put
(
"hostname"
,
"node3"
);
dimensions
.
put
(
"ip"
,
"192.168.70.214"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"user_pct"
,
0.
115
);
metrics
.
put
(
"user_pct"
,
0.
3
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
...
...
@@ -76,21 +69,6 @@ public class MockMetricNode3 {
Thread
.
sleep
(
30000
);
// 210/70=3 警告
}
}
public
static
double
fun1
(
int
i
){
double
tmp
=
0
;
if
(
i
==
0
){
tmp
=
0.05
;
}
if
(
i
==
1
){
tmp
=
0.2
;
}
if
(
i
==
2
){
tmp
=
0.2
;
}
return
tmp
;
}
}
src/main/java/com/zorkdata/tools/mock/SystemIndex/MockLogSI_Alarm_zork90_10.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.SystemIndex
;
import
com.alibaba.fastjson.JSONObject
;
import
com.zorkdata.tools.kafka.Producer
;
import
com.zorkdata.tools.kafka.ProducerPool
;
import
com.zorkdata.tools.utils.DateUtil
;
import
com.zorkdata.tools.utils.PropertiesUtil
;
import
com.zorkdata.tools.utils.StringUtil
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
import
java.util.Random
;
public
class
MockLogSI_Alarm_zork90_10
{
public
static
String
printData
(
String
logTypeName
,
String
timestamp
,
String
source
,
String
offset
,
Map
<
String
,
String
>
dimensions
,
Map
<
String
,
Double
>
metrics
,
Map
<
String
,
String
>
normalFields
)
{
JSONObject
jsonObject
=
new
JSONObject
();
jsonObject
.
put
(
"logTypeName"
,
logTypeName
);
jsonObject
.
put
(
"timestamp"
,
timestamp
);
jsonObject
.
put
(
"source"
,
source
);
jsonObject
.
put
(
"offset"
,
offset
);
jsonObject
.
put
(
"dimensions"
,
dimensions
);
jsonObject
.
put
(
"measures"
,
metrics
);
jsonObject
.
put
(
"normalFields"
,
normalFields
);
return
jsonObject
.
toString
();
}
private
static
String
getRandomOffset
()
{
Random
random
=
new
Random
();
long
l
=
random
.
nextInt
(
10000
);
return
String
.
valueOf
(
l
);
}
private
static
Map
<
String
,
String
>
getRandomDimensions
()
{
Random
random
=
new
Random
();
int
i
=
random
.
nextInt
(
10
);
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"alarm"
);
dimensions
.
put
(
"clustername"
,
"告警集群"
);
dimensions
.
put
(
"appprogramname"
,
"告警模块1"
);
dimensions
.
put
(
"hostname"
,
"zork90-10"
);
dimensions
.
put
(
"ip"
,
"192.168.90.10"
);
return
dimensions
;
}
private
static
Map
<
String
,
String
>
getRandomNormalFieldsError
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>(
5
);
normalFields
.
put
(
"message"
,
"qqt_alarm_index_message"
);
return
normalFields
;
}
public
static
void
main
(
String
[]
args
)
throws
Exception
{
long
size
=
30000
;
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
if
(
i
!=
0
)
{
Thread
.
sleep
(
5000
);
}
String
logTypeName
=
"default_analysis_template"
;
String
timestamp
=
DateUtil
.
getUTCTimeStr
();
System
.
out
.
println
(
"timestamp====="
+
timestamp
);
String
source
=
"/var/log/test.log"
;
String
offset
=
getRandomOffset
();
Map
<
String
,
String
>
dimensions
=
getRandomDimensions
();
Map
<
String
,
Double
>
measures
=
new
HashMap
<>();
Map
<
String
,
String
>
normalFields
=
getRandomNormalFieldsError
();
Producer
producer
=
ProducerPool
.
getInstance
().
getProducer
();
producer
.
sendLog
(
"dwd_default_log"
,
logTypeName
,
timestamp
,
source
,
offset
,
dimensions
,
measures
,
normalFields
);
}
}
}
src/main/java/com/zorkdata/tools/mock/
MockLogSI
_node1.java
→
src/main/java/com/zorkdata/tools/mock/
SystemIndex/MockLogSI_DevTest
_node1.java
View file @
39754b35
package
com.zorkdata.tools.mock
;
package
com.zorkdata.tools.mock
.SystemIndex
;
import
com.alibaba.fastjson.JSONObject
;
import
com.zorkdata.tools.kafka.Producer
;
...
...
@@ -15,7 +15,7 @@ import java.util.Random;
/**
* @author zhuzhigang
*/
public
class
MockLogSI_node1
{
public
class
MockLogSI_
DevTest_
node1
{
private
static
long
getSize
(
String
propertiesName
)
throws
Exception
{
Properties
properties
=
PropertiesUtil
.
getProperties
(
propertiesName
);
...
...
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricNoAlarmBeiYong.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.hostAlarm
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author DeleMing
*/
public
class
MetricNoAlarmBeiYong
{
private
static
String
topic
=
"dwd_all_metric"
;
private
static
String
brokerAddr
=
"shandong1:9092,shandong2:9092"
;
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
String
metricSetName
=
"original_agent_eb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"备用"
);
dimensions
.
put
(
"appprogramname"
,
"备用"
);
dimensions
.
put
(
"hostname"
,
"ostemplate"
);
dimensions
.
put
(
"ip"
,
"192.168.70.185"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"status"
,
0
d
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
}
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricNoAlarmDM.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.hostAlarm
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author DeleMing
*/
public
class
MetricNoAlarmDM
{
private
static
String
topic
=
"dwd_all_metric"
;
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
private
static
String
brokerAddr
=
"shandong1:9092,shandong2:9092"
;
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
// String metricSetName = "cpu_system_mb";
String
metricSetName
=
"original_agent_eb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
" dataservice大数据服务"
);
dimensions
.
put
(
"appprogramname"
,
"mysql"
);
dimensions
.
put
(
"hostname"
,
"测试非正常机器"
);
dimensions
.
put
(
"ip"
,
"192.168.122.123"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"status"
,
0
d
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
}
src/main/java/com/zorkdata/tools/mock/
QQTHostAlarm11
.java
→
src/main/java/com/zorkdata/tools/mock/
hostAlarm/MetricNoAlarmZork9010
.java
View file @
39754b35
package
com.zorkdata.tools.mock
;
package
com.zorkdata.tools.mock
.hostAlarm
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
...
...
@@ -15,9 +15,12 @@ import java.util.Properties;
* 拓扑
* 验证只有appsystem、hostname、ip维度的告警机器
*/
public
class
QQTHostAlarm11
{
public
class
MetricNoAlarmZork9010
{
private
static
String
topic
=
"dwd_all_metric"
;
private
static
String
brokerAddr
=
"node1:9092,node2:9092,node3:9092"
;
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private
static
String
brokerAddr
=
"shandong1:9092,shandong2:9092"
;
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "shandong1:9092,shandong2:9092,shandong3:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
...
...
@@ -40,15 +43,16 @@ public class QQTHostAlarm11 {
init
();
//MetricSet
// String metricSetName = "cpu_system_mb";
String
metricSetName
=
"original_agent_eb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"alarm"
);
// dimensions.put("clustername", "jichujiankong
");
// dimensions.put("appprogramname", "linuxmokuai
");
dimensions
.
put
(
"hostname"
,
"
host-11"
);
dimensions
.
put
(
"ip"
,
"192.168.
13.11
"
);
dimensions
.
put
(
"clustername"
,
"告警集群
"
);
dimensions
.
put
(
"appprogramname"
,
"告警模块1
"
);
dimensions
.
put
(
"hostname"
,
"
zork90-10"
);
//"zorkdata" + i
);
dimensions
.
put
(
"ip"
,
"192.168.
90.10
"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
...
...
@@ -69,21 +73,6 @@ public class QQTHostAlarm11 {
Thread
.
sleep
(
15000
);
}
}
public
static
double
fun1
(
int
i
){
double
tmp
=
0
;
if
(
i
==
0
){
tmp
=
0.05
;
}
if
(
i
==
1
){
tmp
=
0.2
;
}
if
(
i
==
2
){
tmp
=
0.2
;
}
return
tmp
;
}
}
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricNode1LinuxModule.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.hostAlarm
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
public
class
MetricNode1LinuxModule
{
private
static
String
topic
=
"dwd_all_metric"
;
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private
static
String
brokerAddr
=
"shandong1:9092,shandong2:9092"
;
// private static String brokerAddr = "cs42:9092,cs43:9092,cs44:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
// String metricSetName = "cpu_system_mb";
String
metricSetName
=
"original_agent_eb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"基础监控"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
dimensions
.
put
(
"hostname"
,
"node1"
);
dimensions
.
put
(
"ip"
,
"192.168.70.212"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"status"
,
0
d
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
20000
);
//210/30= 7 严重
}
}
}
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricYf121DataServiceCluster.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.hostAlarm
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author DeleMing
*/
public
class
MetricYf121DataServiceCluster
{
private
static
String
topic
=
"dwd_all_metric"
;
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private
static
String
brokerAddr
=
"shandong1:9092,shandong2:9092,shandong3:9092"
;
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
// String metricSetName = "cpu_system_mb";
String
metricSetName
=
"original_agent_eb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"paas应用服务平台"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
dimensions
.
put
(
"hostname"
,
"yf121"
);
dimensions
.
put
(
"ip"
,
"192.168.70.121"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"status"
,
0
d
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
}
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricYf121JiChuJianKongCLuster.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.hostAlarm
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author DeleMing
*/
public
class
MetricYf121JiChuJianKongCLuster
{
private
static
String
topic
=
"dwd_all_metric"
;
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private
static
String
brokerAddr
=
"shandong1:9092,shandong2:9092"
;
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
// String metricSetName = "cpu_system_mb";
String
metricSetName
=
"original_agent_eb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"基础监控"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
dimensions
.
put
(
"hostname"
,
"yf121"
);
dimensions
.
put
(
"ip"
,
"192.168.70.121"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"status"
,
0
d
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
}
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricYf122DK.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.hostAlarm
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author DeleMing
*/
public
class
MetricYf122DK
{
private
static
String
topic
=
"dwd_all_metric"
;
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private
static
String
brokerAddr
=
"shandong1:9092,shandong2:9092"
;
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
// String metricSetName = "cpu_system_mb";
String
metricSetName
=
"original_agent_eb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"dataservice大数据服务"
);
dimensions
.
put
(
"appprogramname"
,
"kafka"
);
dimensions
.
put
(
"hostname"
,
"yf122"
);
dimensions
.
put
(
"ip"
,
"192.168.70.122"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"status"
,
0
d
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
}
src/main/java/com/zorkdata/tools/mock/hostAlarm/MetricYf122JL.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.hostAlarm
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author DeleMing
*/
public
class
MetricYf122JL
{
private
static
String
topic
=
"dwd_all_metric"
;
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private
static
String
brokerAddr
=
"shandong1:9092,shandong2:9092"
;
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
// String metricSetName = "cpu_system_mb";
String
metricSetName
=
"original_agent_eb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"基础监控"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
dimensions
.
put
(
"hostname"
,
"yf122"
);
dimensions
.
put
(
"ip"
,
"192.168.70.122"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"status"
,
0
d
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
}
src/main/java/com/zorkdata/tools/mock/ruleScopeLog/MockLogNoAlarmDM.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeLog
;
import
com.alibaba.fastjson.JSONObject
;
import
com.zorkdata.tools.kafka.Producer
;
import
com.zorkdata.tools.kafka.ProducerPool
;
import
com.zorkdata.tools.utils.DateUtil
;
import
com.zorkdata.tools.utils.PropertiesUtil
;
import
com.zorkdata.tools.utils.StringUtil
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
import
java.util.Random
;
/**
* @author zhuzhigang
*/
public
class
MockLogNoAlarmDM
{
private
static
long
getSize
(
String
propertiesName
)
throws
Exception
{
Properties
properties
=
PropertiesUtil
.
getProperties
(
propertiesName
);
long
logSize
=
StringUtil
.
getLong
(
properties
.
getProperty
(
"log.size"
,
"5000"
).
trim
(),
1
);
return
logSize
;
}
public
static
String
printData
(
String
logTypeName
,
String
timestamp
,
String
source
,
String
offset
,
Map
<
String
,
String
>
dimensions
,
Map
<
String
,
Double
>
metrics
,
Map
<
String
,
String
>
normalFields
)
{
JSONObject
jsonObject
=
new
JSONObject
();
jsonObject
.
put
(
"logTypeName"
,
logTypeName
);
jsonObject
.
put
(
"timestamp"
,
timestamp
);
jsonObject
.
put
(
"source"
,
source
);
jsonObject
.
put
(
"offset"
,
offset
);
jsonObject
.
put
(
"dimensions"
,
dimensions
);
jsonObject
.
put
(
"measures"
,
metrics
);
jsonObject
.
put
(
"normalFields"
,
normalFields
);
return
jsonObject
.
toString
();
}
private
static
String
getRandomOffset
()
{
Random
random
=
new
Random
();
long
l
=
random
.
nextInt
(
10000
);
return
String
.
valueOf
(
l
);
}
private
static
Map
<
String
,
String
>
getRandomDimensions
()
{
Random
random
=
new
Random
();
int
i
=
random
.
nextInt
(
10
);
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
" dataservice大数据服务"
);
dimensions
.
put
(
"appprogramname"
,
"mysql"
);
dimensions
.
put
(
"hostname"
,
"测试非正常机器"
);
dimensions
.
put
(
"ip"
,
"192.168.122.123"
);
return
dimensions
;
}
private
static
String
[]
codes
=
{
"AO"
,
"AF"
,
"AL"
,
"DZ"
,
"AD"
,
"AI"
,
"AG"
,
"AR"
,
"AM"
,
"AU"
,
"AT"
,
"AZ"
,
"BS"
,
"BH"
,
"BD"
,
"BB"
,
"BY"
,
"BE"
,
"BZ"
,
"BJ"
};
private
static
String
getRandomCountryCode
()
{
Random
random
=
new
Random
(
codes
.
length
);
return
codes
[
new
Random
(
codes
.
length
).
nextInt
(
codes
.
length
)];
}
private
static
Map
<
String
,
String
>
getRandomNormalFieldsError
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>();
normalFields
.
put
(
"message"
,
"error"
);
return
normalFields
;
}
private
static
Map
<
String
,
String
>
getRandomNormalFieldsSuccess
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>();
normalFields
.
put
(
"message"
,
"data update success"
);
return
normalFields
;
}
public
static
void
main
(
String
[]
args
)
throws
Exception
{
long
size
=
30000
;
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
if
(
i
!=
0
)
{
Thread
.
sleep
(
5000
);
}
String
logTypeName
=
"default_analysis_template"
;
String
timestamp
=
DateUtil
.
getUTCTimeStr
();
System
.
out
.
println
(
"timestamp====="
+
timestamp
);
String
source
=
"/var/log/test.log"
;
String
offset
=
getRandomOffset
();
Map
<
String
,
String
>
dimensions
=
getRandomDimensions
();
Map
<
String
,
Double
>
measures
=
new
HashMap
<>();
Map
<
String
,
String
>
normalFields
=
null
;
normalFields
=
getRandomNormalFieldsError
();
Producer
producer
=
ProducerPool
.
getInstance
().
getProducer
();
producer
.
sendLog
(
"dwd_default_log"
,
logTypeName
,
timestamp
,
source
,
offset
,
dimensions
,
measures
,
normalFields
);
}
}
}
src/main/java/com/zorkdata/tools/mock/ruleScopeLog/MockLogNode1LinuxModule.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeLog
;
import
com.alibaba.fastjson.JSONObject
;
import
com.zorkdata.tools.kafka.Producer
;
import
com.zorkdata.tools.kafka.ProducerPool
;
import
com.zorkdata.tools.utils.DateUtil
;
import
com.zorkdata.tools.utils.PropertiesUtil
;
import
com.zorkdata.tools.utils.StringUtil
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
import
java.util.Random
;
/**
* @author zhuzhigang
*/
public
class
MockLogNode1LinuxModule
{
private
static
long
getSize
(
String
propertiesName
)
throws
Exception
{
Properties
properties
=
PropertiesUtil
.
getProperties
(
propertiesName
);
long
logSize
=
StringUtil
.
getLong
(
properties
.
getProperty
(
"log.size"
,
"5000"
).
trim
(),
1
);
return
logSize
;
}
public
static
String
printData
(
String
logTypeName
,
String
timestamp
,
String
source
,
String
offset
,
Map
<
String
,
String
>
dimensions
,
Map
<
String
,
Double
>
metrics
,
Map
<
String
,
String
>
normalFields
)
{
JSONObject
jsonObject
=
new
JSONObject
();
jsonObject
.
put
(
"logTypeName"
,
logTypeName
);
jsonObject
.
put
(
"timestamp"
,
timestamp
);
jsonObject
.
put
(
"source"
,
source
);
jsonObject
.
put
(
"offset"
,
offset
);
jsonObject
.
put
(
"dimensions"
,
dimensions
);
jsonObject
.
put
(
"measures"
,
metrics
);
jsonObject
.
put
(
"normalFields"
,
normalFields
);
return
jsonObject
.
toString
();
}
private
static
String
getRandomOffset
()
{
Random
random
=
new
Random
();
long
l
=
random
.
nextInt
(
10000
);
return
String
.
valueOf
(
l
);
}
private
static
Map
<
String
,
String
>
getRandomDimensions
()
{
Random
random
=
new
Random
();
int
i
=
random
.
nextInt
(
10
);
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"hostname"
,
"node1"
);
//"zorkdata" + i);
dimensions
.
put
(
"ip"
,
"192.168.70.212"
);
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"基础监控"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
return
dimensions
;
}
private
static
String
[]
codes
=
{
"AO"
,
"AF"
,
"AL"
,
"DZ"
,
"AD"
,
"AI"
,
"AG"
,
"AR"
,
"AM"
,
"AU"
,
"AT"
,
"AZ"
,
"BS"
,
"BH"
,
"BD"
,
"BB"
,
"BY"
,
"BE"
,
"BZ"
,
"BJ"
};
private
static
String
getRandomCountryCode
()
{
Random
random
=
new
Random
(
codes
.
length
);
return
codes
[
new
Random
(
codes
.
length
).
nextInt
(
codes
.
length
)];
}
private
static
Map
<
String
,
String
>
getRandomNormalFieldsError
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>();
normalFields
.
put
(
"message"
,
"error"
);
return
normalFields
;
}
private
static
Map
<
String
,
String
>
getRandomNormalFieldsSuccess
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>();
normalFields
.
put
(
"message"
,
"data update success"
);
return
normalFields
;
}
public
static
void
main
(
String
[]
args
)
throws
Exception
{
long
size
=
30000
;
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
if
(
i
!=
0
)
{
Thread
.
sleep
(
5000
);
}
String
logTypeName
=
"default_analysis_template"
;
String
timestamp
=
DateUtil
.
getUTCTimeStr
();
System
.
out
.
println
(
"timestamp====="
+
timestamp
);
String
source
=
"/var/log/test.log"
;
String
offset
=
getRandomOffset
();
Map
<
String
,
String
>
dimensions
=
getRandomDimensions
();
Map
<
String
,
Double
>
measures
=
new
HashMap
<>();
Map
<
String
,
String
>
normalFields
=
null
;
normalFields
=
getRandomNormalFieldsError
();
Producer
producer
=
ProducerPool
.
getInstance
().
getProducer
();
producer
.
sendLog
(
"dwd_default_log"
,
logTypeName
,
timestamp
,
source
,
offset
,
dimensions
,
measures
,
normalFields
);
}
}
}
src/main/java/com/zorkdata/tools/mock/
MockLogSI_zork90_10
.java
→
src/main/java/com/zorkdata/tools/mock/
ruleScopeLog/MockLogYf121JCJKCluster
.java
View file @
39754b35
package
com.zorkdata.tools.mock
;
package
com.zorkdata.tools.mock
.ruleScopeLog
;
import
com.alibaba.fastjson.JSONObject
;
import
com.zorkdata.tools.kafka.Producer
;
...
...
@@ -15,7 +15,7 @@ import java.util.Random;
/**
* @author zhuzhigang
*/
public
class
MockLog
SI_zork90_10
{
public
class
MockLog
Yf121JCJKCluster
{
private
static
long
getSize
(
String
propertiesName
)
throws
Exception
{
Properties
properties
=
PropertiesUtil
.
getProperties
(
propertiesName
);
...
...
@@ -46,16 +46,16 @@ public class MockLogSI_zork90_10 {
Random
random
=
new
Random
();
int
i
=
random
.
nextInt
(
10
);
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"alarm"
);
dimensions
.
put
(
"clustername"
,
"告警集群"
);
dimensions
.
put
(
"appprogramname"
,
"告警模块1"
);
dimensions
.
put
(
"hostname"
,
"zork90-10"
);
//"zorkdata" + i);
dimensions
.
put
(
"ip"
,
"192.168.90.10"
);
dimensions
.
put
(
"hostname"
,
"yf121"
);
dimensions
.
put
(
"ip"
,
"192.168.70.121"
);
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"基础监控"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("servicename", "linux模块");
// dimensions.put("servicecode", "linux模块");
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
// dimensions.put("appprogramname", "linux模块");
// dimensions.put("hostname", "host-11");
// dimensions.put("ip", "192.168.13.11");
return
dimensions
;
...
...
@@ -73,7 +73,7 @@ public class MockLogSI_zork90_10 {
private
static
Map
<
String
,
String
>
getRandomNormalFieldsError
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>();
normalFields
.
put
(
"message"
,
"
qqt_alarm_index_message
"
);
normalFields
.
put
(
"message"
,
"
error,基础监控,linux模块
"
);
return
normalFields
;
}
...
...
@@ -96,7 +96,6 @@ public class MockLogSI_zork90_10 {
String
offset
=
getRandomOffset
();
Map
<
String
,
String
>
dimensions
=
getRandomDimensions
();
Map
<
String
,
Double
>
measures
=
new
HashMap
<>();
// measures.put("mdh",4d);
Map
<
String
,
String
>
normalFields
=
null
;
normalFields
=
getRandomNormalFieldsError
();
...
...
src/main/java/com/zorkdata/tools/mock/ruleScopeLog/MockLogYf121PaasCluster.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeLog
;
import
com.alibaba.fastjson.JSONObject
;
import
com.zorkdata.tools.kafka.Producer
;
import
com.zorkdata.tools.kafka.ProducerPool
;
import
com.zorkdata.tools.utils.DateUtil
;
import
com.zorkdata.tools.utils.PropertiesUtil
;
import
com.zorkdata.tools.utils.StringUtil
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
import
java.util.Random
;
/**
* @author zhuzhigang
*/
public
class
MockLogYf121PaasCluster
{
private
static
long
getSize
(
String
propertiesName
)
throws
Exception
{
Properties
properties
=
PropertiesUtil
.
getProperties
(
propertiesName
);
long
logSize
=
StringUtil
.
getLong
(
properties
.
getProperty
(
"log.size"
,
"5000"
).
trim
(),
1
);
return
logSize
;
}
public
static
String
printData
(
String
logTypeName
,
String
timestamp
,
String
source
,
String
offset
,
Map
<
String
,
String
>
dimensions
,
Map
<
String
,
Double
>
metrics
,
Map
<
String
,
String
>
normalFields
)
{
JSONObject
jsonObject
=
new
JSONObject
();
jsonObject
.
put
(
"logTypeName"
,
logTypeName
);
jsonObject
.
put
(
"timestamp"
,
timestamp
);
jsonObject
.
put
(
"source"
,
source
);
jsonObject
.
put
(
"offset"
,
offset
);
jsonObject
.
put
(
"dimensions"
,
dimensions
);
jsonObject
.
put
(
"measures"
,
metrics
);
jsonObject
.
put
(
"normalFields"
,
normalFields
);
return
jsonObject
.
toString
();
}
private
static
String
getRandomOffset
()
{
Random
random
=
new
Random
();
long
l
=
random
.
nextInt
(
10000
);
return
String
.
valueOf
(
l
);
}
private
static
Map
<
String
,
String
>
getRandomDimensions
()
{
Random
random
=
new
Random
();
int
i
=
random
.
nextInt
(
10
);
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"hostname"
,
"yf121"
);
//"zorkdata" + i);
dimensions
.
put
(
"ip"
,
"192.168.70.121"
);
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"paas应用服务平台"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("servicename", "linux模块");
// dimensions.put("servicecode", "linux模块");
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
// dimensions.put("hostname", "host-11");
// dimensions.put("ip", "192.168.13.11");
return
dimensions
;
}
private
static
String
[]
codes
=
{
"AO"
,
"AF"
,
"AL"
,
"DZ"
,
"AD"
,
"AI"
,
"AG"
,
"AR"
,
"AM"
,
"AU"
,
"AT"
,
"AZ"
,
"BS"
,
"BH"
,
"BD"
,
"BB"
,
"BY"
,
"BE"
,
"BZ"
,
"BJ"
};
private
static
String
getRandomCountryCode
()
{
Random
random
=
new
Random
(
codes
.
length
);
return
codes
[
new
Random
(
codes
.
length
).
nextInt
(
codes
.
length
)];
}
private
static
Map
<
String
,
String
>
getRandomNormalFieldsError
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>();
normalFields
.
put
(
"message"
,
"error,paas应用服务平台,linux模块"
);
return
normalFields
;
}
private
static
Map
<
String
,
String
>
getRandomNormalFieldsSuccess
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>();
normalFields
.
put
(
"message"
,
"data update success"
);
return
normalFields
;
}
public
static
void
main
(
String
[]
args
)
throws
Exception
{
long
size
=
30000
;
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
if
(
i
!=
0
)
{
Thread
.
sleep
(
5000
);
}
String
logTypeName
=
"default_analysis_template"
;
String
timestamp
=
DateUtil
.
getUTCTimeStr
();
System
.
out
.
println
(
"timestamp====="
+
timestamp
);
String
source
=
"/var/log/test.log"
;
String
offset
=
getRandomOffset
();
Map
<
String
,
String
>
dimensions
=
getRandomDimensions
();
Map
<
String
,
Double
>
measures
=
new
HashMap
<>();
Map
<
String
,
String
>
normalFields
=
null
;
normalFields
=
getRandomNormalFieldsError
();
Producer
producer
=
ProducerPool
.
getInstance
().
getProducer
();
producer
.
sendLog
(
"dwd_default_log"
,
logTypeName
,
timestamp
,
source
,
offset
,
dimensions
,
measures
,
normalFields
);
}
}
}
src/main/java/com/zorkdata/tools/mock/ruleScopeLog/MockLogYf122DataServiceClusterKafkaModule.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeLog
;
import
com.alibaba.fastjson.JSONObject
;
import
com.zorkdata.tools.kafka.Producer
;
import
com.zorkdata.tools.kafka.ProducerPool
;
import
com.zorkdata.tools.utils.DateUtil
;
import
com.zorkdata.tools.utils.PropertiesUtil
;
import
com.zorkdata.tools.utils.StringUtil
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
import
java.util.Random
;
/**
* @author zhuzhigang
*/
public
class
MockLogYf122DataServiceClusterKafkaModule
{
private
static
long
getSize
(
String
propertiesName
)
throws
Exception
{
Properties
properties
=
PropertiesUtil
.
getProperties
(
propertiesName
);
long
logSize
=
StringUtil
.
getLong
(
properties
.
getProperty
(
"log.size"
,
"5000"
).
trim
(),
1
);
return
logSize
;
}
public
static
String
printData
(
String
logTypeName
,
String
timestamp
,
String
source
,
String
offset
,
Map
<
String
,
String
>
dimensions
,
Map
<
String
,
Double
>
metrics
,
Map
<
String
,
String
>
normalFields
)
{
JSONObject
jsonObject
=
new
JSONObject
();
jsonObject
.
put
(
"logTypeName"
,
logTypeName
);
jsonObject
.
put
(
"timestamp"
,
timestamp
);
jsonObject
.
put
(
"source"
,
source
);
jsonObject
.
put
(
"offset"
,
offset
);
jsonObject
.
put
(
"dimensions"
,
dimensions
);
jsonObject
.
put
(
"measures"
,
metrics
);
jsonObject
.
put
(
"normalFields"
,
normalFields
);
return
jsonObject
.
toString
();
}
private
static
String
getRandomOffset
()
{
Random
random
=
new
Random
();
long
l
=
random
.
nextInt
(
10000
);
return
String
.
valueOf
(
l
);
}
private
static
Map
<
String
,
String
>
getRandomDimensions
()
{
Random
random
=
new
Random
();
int
i
=
random
.
nextInt
(
10
);
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"hostname"
,
"yf122"
);
dimensions
.
put
(
"ip"
,
"192.168.70.122"
);
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"dataservice大数据服务"
);
dimensions
.
put
(
"appprogramname"
,
"kafka"
);
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("servicename", "linux模块");
// dimensions.put("servicecode", "linux模块");
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
// dimensions.put("hostname", "host-11");
// dimensions.put("ip", "192.168.13.11");
return
dimensions
;
}
private
static
String
[]
codes
=
{
"AO"
,
"AF"
,
"AL"
,
"DZ"
,
"AD"
,
"AI"
,
"AG"
,
"AR"
,
"AM"
,
"AU"
,
"AT"
,
"AZ"
,
"BS"
,
"BH"
,
"BD"
,
"BB"
,
"BY"
,
"BE"
,
"BZ"
,
"BJ"
};
private
static
String
getRandomCountryCode
()
{
Random
random
=
new
Random
(
codes
.
length
);
return
codes
[
new
Random
(
codes
.
length
).
nextInt
(
codes
.
length
)];
}
private
static
Map
<
String
,
String
>
getRandomNormalFieldsError
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>();
normalFields
.
put
(
"message"
,
"error"
);
return
normalFields
;
}
private
static
Map
<
String
,
String
>
getRandomNormalFieldsSuccess
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>();
normalFields
.
put
(
"message"
,
"data update success"
);
return
normalFields
;
}
public
static
void
main
(
String
[]
args
)
throws
Exception
{
long
size
=
30000
;
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
if
(
i
!=
0
)
{
Thread
.
sleep
(
5000
);
}
String
logTypeName
=
"default_analysis_template"
;
String
timestamp
=
DateUtil
.
getUTCTimeStr
();
System
.
out
.
println
(
"timestamp====="
+
timestamp
);
String
source
=
"/var/log/test.log"
;
String
offset
=
getRandomOffset
();
Map
<
String
,
String
>
dimensions
=
getRandomDimensions
();
Map
<
String
,
Double
>
measures
=
new
HashMap
<>();
Map
<
String
,
String
>
normalFields
=
null
;
normalFields
=
getRandomNormalFieldsError
();
Producer
producer
=
ProducerPool
.
getInstance
().
getProducer
();
producer
.
sendLog
(
"dwd_default_log"
,
logTypeName
,
timestamp
,
source
,
offset
,
dimensions
,
measures
,
normalFields
);
}
}
}
src/main/java/com/zorkdata/tools/mock/ruleScopeLog/MockLogYf122JCJKClusterLinuxModule.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeLog
;
import
com.alibaba.fastjson.JSONObject
;
import
com.zorkdata.tools.kafka.Producer
;
import
com.zorkdata.tools.kafka.ProducerPool
;
import
com.zorkdata.tools.utils.DateUtil
;
import
com.zorkdata.tools.utils.PropertiesUtil
;
import
com.zorkdata.tools.utils.StringUtil
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
import
java.util.Random
;
/**
* @author zhuzhigang
*/
public
class
MockLogYf122JCJKClusterLinuxModule
{
private
static
long
getSize
(
String
propertiesName
)
throws
Exception
{
Properties
properties
=
PropertiesUtil
.
getProperties
(
propertiesName
);
long
logSize
=
StringUtil
.
getLong
(
properties
.
getProperty
(
"log.size"
,
"5000"
).
trim
(),
1
);
return
logSize
;
}
public
static
String
printData
(
String
logTypeName
,
String
timestamp
,
String
source
,
String
offset
,
Map
<
String
,
String
>
dimensions
,
Map
<
String
,
Double
>
metrics
,
Map
<
String
,
String
>
normalFields
)
{
JSONObject
jsonObject
=
new
JSONObject
();
jsonObject
.
put
(
"logTypeName"
,
logTypeName
);
jsonObject
.
put
(
"timestamp"
,
timestamp
);
jsonObject
.
put
(
"source"
,
source
);
jsonObject
.
put
(
"offset"
,
offset
);
jsonObject
.
put
(
"dimensions"
,
dimensions
);
jsonObject
.
put
(
"measures"
,
metrics
);
jsonObject
.
put
(
"normalFields"
,
normalFields
);
return
jsonObject
.
toString
();
}
private
static
String
getRandomOffset
()
{
Random
random
=
new
Random
();
long
l
=
random
.
nextInt
(
10000
);
return
String
.
valueOf
(
l
);
}
private
static
Map
<
String
,
String
>
getRandomDimensions
()
{
Random
random
=
new
Random
();
int
i
=
random
.
nextInt
(
10
);
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"hostname"
,
"yf122"
);
dimensions
.
put
(
"ip"
,
"192.168.70.122"
);
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"基础监控"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("servicename", "linux模块");
// dimensions.put("servicecode", "linux模块");
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
// dimensions.put("hostname", "host-11");
// dimensions.put("ip", "192.168.13.11");
return
dimensions
;
}
private
static
String
[]
codes
=
{
"AO"
,
"AF"
,
"AL"
,
"DZ"
,
"AD"
,
"AI"
,
"AG"
,
"AR"
,
"AM"
,
"AU"
,
"AT"
,
"AZ"
,
"BS"
,
"BH"
,
"BD"
,
"BB"
,
"BY"
,
"BE"
,
"BZ"
,
"BJ"
};
private
static
String
getRandomCountryCode
()
{
Random
random
=
new
Random
(
codes
.
length
);
return
codes
[
new
Random
(
codes
.
length
).
nextInt
(
codes
.
length
)];
}
private
static
Map
<
String
,
String
>
getRandomNormalFieldsError
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>();
normalFields
.
put
(
"message"
,
"error"
);
return
normalFields
;
}
private
static
Map
<
String
,
String
>
getRandomNormalFieldsSuccess
()
{
Map
<
String
,
String
>
normalFields
=
new
HashMap
<>();
normalFields
.
put
(
"message"
,
"data update success"
);
return
normalFields
;
}
public
static
void
main
(
String
[]
args
)
throws
Exception
{
long
size
=
30000
;
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
if
(
i
!=
0
)
{
Thread
.
sleep
(
5000
);
}
String
logTypeName
=
"default_analysis_template"
;
String
timestamp
=
DateUtil
.
getUTCTimeStr
();
System
.
out
.
println
(
"timestamp====="
+
timestamp
);
String
source
=
"/var/log/test.log"
;
String
offset
=
getRandomOffset
();
Map
<
String
,
String
>
dimensions
=
getRandomDimensions
();
Map
<
String
,
Double
>
measures
=
new
HashMap
<>();
Map
<
String
,
String
>
normalFields
=
null
;
normalFields
=
getRandomNormalFieldsError
();
Producer
producer
=
ProducerPool
.
getInstance
().
getProducer
();
producer
.
sendLog
(
"dwd_default_log"
,
logTypeName
,
timestamp
,
source
,
offset
,
dimensions
,
measures
,
normalFields
);
}
}
}
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricNoAlarmBeiYong.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeMetric
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author DeleMing
*/
public
class
MetricNoAlarmBeiYong
{
private
static
String
topic
=
"dwd_all_metric"
;
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
private
static
String
brokerAddr
=
"noahtest-215:9092,noahtest-216:9092,noahtest-217:9092"
;
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
String
metricSetName
=
"cpu_system_mb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"备用"
);
dimensions
.
put
(
"appprogramname"
,
"备用"
);
dimensions
.
put
(
"hostname"
,
"ostemplate"
);
dimensions
.
put
(
"ip"
,
"192.168.70.185"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"user_pct"
,
0.1
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
}
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricNoAlarmDM.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeMetric
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author DeleMing
*/
public
class
MetricNoAlarmDM
{
private
static
String
topic
=
"dwd_all_metric"
;
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
private
static
String
brokerAddr
=
"noahtest-215:9092,noahtest-216:9092,noahtest-217:9092"
;
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
String
metricSetName
=
"cpu_system_mb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
" dataservice大数据服务"
);
dimensions
.
put
(
"appprogramname"
,
"mysql"
);
dimensions
.
put
(
"hostname"
,
"测试非正常机器"
);
dimensions
.
put
(
"ip"
,
"192.168.122.123"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"user_pct"
,
0.1
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
}
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricNoAlarmZork9010.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeMetric
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author
* 拓扑
* 验证只有appsystem、hostname、ip维度的告警机器
*/
public
class
MetricNoAlarmZork9010
{
private
static
String
topic
=
"dwd_all_metric"
;
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private
static
String
brokerAddr
=
"noahtest-215:9092,noahtest-216:9092,noahtest-217:9092"
;
// private static String brokerAddr = "shandong1:9092,shandong2:9092,shandong3:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
String
metricSetName
=
"cpu_system_mb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"alarm"
);
dimensions
.
put
(
"clustername"
,
"告警集群"
);
dimensions
.
put
(
"appprogramname"
,
"告警模块1"
);
dimensions
.
put
(
"hostname"
,
"zork90-10"
);
//"zorkdata" + i);
dimensions
.
put
(
"ip"
,
"192.168.90.10"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"user_pct"
,
0.9
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
15000
);
}
}
}
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricNode1ShanDongModule.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeMetric
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
public
class
MetricNode1ShanDongModule
{
private
static
String
topic
=
"dwd_all_metric"
;
private
static
String
brokerAddr
=
"node1:9092,node2:9092,node3:9092"
;
// private static String brokerAddr = "cs42:9092,cs43:9092,cs44:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
String
metricSetName
=
"cpu_system_mb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"基础监控"
);
dimensions
.
put
(
"appprogramname"
,
"ShanDong"
);
dimensions
.
put
(
"hostname"
,
"node1"
);
dimensions
.
put
(
"ip"
,
"192.168.70.212"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"user_pct"
,
0.2
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
20000
);
//210/30= 7 严重
}
}
}
src/main/java/com/zorkdata/tools/mock/
MockMetricNode1
.java
→
src/main/java/com/zorkdata/tools/mock/
ruleScopeMetric/MetricYf121DataServiceCluster
.java
View file @
39754b35
package
com.zorkdata.tools.mock
;
package
com.zorkdata.tools.mock
.ruleScopeMetric
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
org.joda.time.DateTime
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
import
java.util.Random
;
/**
* @author DeleMing
*/
public
class
M
ockMetricNode1
{
public
class
M
etricYf121DataServiceCluster
{
private
static
String
topic
=
"dwd_all_metric"
;
private
static
String
brokerAddr
=
"node1:9092,node2:9092,node3:9092"
;
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
...
...
@@ -45,29 +44,18 @@ public class MockMetricNode1 {
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("hostname", "shandong2");
// dimensions.put("ip", "192.168.70.220");
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"monitor_name"
,
""
);
dimensions
.
put
(
"hostname"
,
"shandong1"
);
dimensions
.
put
(
"ip"
,
"192.168.70.219"
);
dimensions
.
put
(
"observer_hostname"
,
""
);
dimensions
.
put
(
"observer_ip"
,
""
);
// dimensions.put("clustername", "基础监控");
// dimensions.put("appprogramname", "ShanDong");
dimensions
.
put
(
"clustername"
,
"paas应用服务平台"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
dimensions
.
put
(
"hostname"
,
"yf121"
);
dimensions
.
put
(
"ip"
,
"192.168.70.121"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"monitor_duration_us"
,
0.5
);
metrics
.
put
(
"monitor_status"
,
0.5
);
metrics
.
put
(
"icmp_requests"
,
0.5
);
metrics
.
put
(
"icmp_rtt_us"
,
0.5
);
metrics
.
put
(
"user_pct"
,
0.1
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
...
...
@@ -79,23 +67,8 @@ public class MockMetricNode1 {
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
public
static
double
fun1
(
int
i
){
double
tmp
=
0
;
if
(
i
==
0
){
tmp
=
0.05
;
}
if
(
i
==
1
){
tmp
=
0.2
;
}
if
(
i
==
2
){
tmp
=
0.2
;
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
return
tmp
;
}
}
...
...
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricYf121JiChuJianKongCLuster.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeMetric
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author DeleMing
*/
public
class
MetricYf121JiChuJianKongCLuster
{
private
static
String
topic
=
"dwd_all_metric"
;
private
static
String
brokerAddr
=
"node1:9092,node2:9092,node3:9092"
;
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
String
metricSetName
=
"cpu_system_mb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"基础监控"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
dimensions
.
put
(
"hostname"
,
"yf121"
);
dimensions
.
put
(
"ip"
,
"192.168.70.121"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"user_pct"
,
0.1
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
}
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricYf122DK.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeMetric
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author DeleMing
*/
public
class
MetricYf122DK
{
private
static
String
topic
=
"dwd_all_metric"
;
private
static
String
brokerAddr
=
"node1:9092,node2:9092,node3:9092"
;
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
String
metricSetName
=
"cpu_system_mb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"dataservice大数据服务"
);
dimensions
.
put
(
"appprogramname"
,
"kafka"
);
dimensions
.
put
(
"hostname"
,
"yf122"
);
dimensions
.
put
(
"ip"
,
"192.168.70.122"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"user_pct"
,
0.1
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
}
src/main/java/com/zorkdata/tools/mock/ruleScopeMetric/MetricYf122JL.java
0 → 100644
View file @
39754b35
package
com.zorkdata.tools.mock.ruleScopeMetric
;
import
com.zorkdata.tools.avro.AvroSerializer
;
import
com.zorkdata.tools.avro.AvroSerializerFactory
;
import
org.apache.kafka.clients.producer.KafkaProducer
;
import
org.apache.kafka.clients.producer.ProducerRecord
;
import
org.apache.kafka.common.serialization.ByteArraySerializer
;
import
java.util.HashMap
;
import
java.util.Map
;
import
java.util.Properties
;
/**
* @author DeleMing
*/
public
class
MetricYf122JL
{
private
static
String
topic
=
"dwd_all_metric"
;
private
static
String
brokerAddr
=
"node1:9092,node2:9092,node3:9092"
;
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private
static
ProducerRecord
<
String
,
byte
[]>
producerRecord
=
null
;
private
static
KafkaProducer
<
String
,
byte
[]>
producer
=
null
;
public
static
void
init
()
{
Properties
props
=
new
Properties
();
props
.
put
(
"bootstrap.servers"
,
brokerAddr
);
props
.
put
(
"acks"
,
"1"
);
props
.
put
(
"retries"
,
0
);
props
.
put
(
"key.serializer"
,
"org.apache.kafka.common.serialization.StringSerializer"
);
props
.
put
(
"value.serializer"
,
ByteArraySerializer
.
class
.
getName
());
props
.
put
(
"batch.size"
,
16384
);
props
.
put
(
"linger.ms"
,
1
);
props
.
put
(
"buffer.memory"
,
33554432
);
producer
=
new
KafkaProducer
<
String
,
byte
[]>(
props
);
}
public
static
void
main
(
String
[]
args
)
throws
InterruptedException
{
init
();
//MetricSet
String
metricSetName
=
"cpu_system_mb"
;
//Dimensions
Map
<
String
,
String
>
dimensions
=
new
HashMap
<>();
dimensions
.
put
(
"appsystem"
,
"dev_test"
);
dimensions
.
put
(
"clustername"
,
"基础监控"
);
dimensions
.
put
(
"appprogramname"
,
"linux模块"
);
dimensions
.
put
(
"hostname"
,
"yf122"
);
dimensions
.
put
(
"ip"
,
"192.168.70.122"
);
for
(
int
i
=
0
;
i
<=
30000
;
i
++)
{
//MetricItem
Map
<
String
,
Double
>
metrics
=
new
HashMap
<>();
metrics
.
put
(
"user_pct"
,
0.1
);
//timestamp
long
timestamp
=
System
.
currentTimeMillis
();
String
timestampString
=
String
.
valueOf
(
timestamp
);
System
.
out
.
println
(
"时间:"
+
timestampString
);
//AvroSerializer
AvroSerializer
metricSerializer
=
AvroSerializerFactory
.
getMetricAvroSerializer
();
byte
[]
bytes
=
metricSerializer
.
serializingMetric
(
metricSetName
,
timestampString
,
dimensions
,
metrics
);
//send
producerRecord
=
new
ProducerRecord
<
String
,
byte
[]>(
topic
,
null
,
bytes
);
producer
.
send
(
producerRecord
);
Thread
.
sleep
(
10000
);
// 210/210=1 信息
}
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment