Commit 39754b35 authored by 屈庆涛's avatar 屈庆涛

111

parent 66e5ae0d
# Default ignored files
/shelf/
/workspace.xml
# Datasource local storage ignored files
#/../../../../:\IdeaProjects\mock-data\.idea/dataSources/
/dataSources.local.xml
# Editor-based HTTP Client requests
/httpRequests/
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="JavaDoc" enabled="true" level="WARNING" enabled_by_default="true">
<option name="TOP_LEVEL_CLASS_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="INNER_CLASS_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="METHOD_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="@return@param@throws or @exception" />
</value>
</option>
<option name="FIELD_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="IGNORE_DEPRECATED" value="false" />
<option name="IGNORE_JAVADOC_PERIOD" value="true" />
<option name="IGNORE_DUPLICATED_THROWS" value="false" />
<option name="IGNORE_POINT_TO_ITSELF" value="false" />
<option name="myAdditionalJavadocTags" value="date" />
</inspection_tool>
</profile>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/mock-data.iml" filepath="$PROJECT_DIR$/mock-data.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Palette2">
<group name="Swing">
<item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
</item>
<item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
</item>
<item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.png" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
</item>
<item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.png" removable="false" auto-create-binding="false" can-attach-label="true">
<default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
</item>
<item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.png" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
<initial-values>
<property name="text" value="Button" />
</initial-values>
</item>
<item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.png" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
<initial-values>
<property name="text" value="RadioButton" />
</initial-values>
</item>
<item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.png" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
<initial-values>
<property name="text" value="CheckBox" />
</initial-values>
</item>
<item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.png" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
<initial-values>
<property name="text" value="Label" />
</initial-values>
</item>
<item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.png" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
<preferred-size width="150" height="-1" />
</default-constraints>
</item>
<item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.png" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
<preferred-size width="150" height="-1" />
</default-constraints>
</item>
<item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.png" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
<preferred-size width="150" height="-1" />
</default-constraints>
</item>
<item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.png" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.png" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
</item>
<item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.png" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.png" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.png" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
<preferred-size width="150" height="50" />
</default-constraints>
</item>
<item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.png" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
<preferred-size width="200" height="200" />
</default-constraints>
</item>
<item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.png" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
<preferred-size width="200" height="200" />
</default-constraints>
</item>
<item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.png" removable="false" auto-create-binding="true" can-attach-label="true">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
</item>
<item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.png" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
</item>
<item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.png" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
</item>
<item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
</item>
<item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.png" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
<preferred-size width="-1" height="20" />
</default-constraints>
</item>
<item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.png" removable="false" auto-create-binding="false" can-attach-label="false">
<default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
</item>
<item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
<default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
</item>
</group>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>
\ No newline at end of file
This diff is collapsed.
...@@ -9,8 +9,8 @@ import java.util.*; ...@@ -9,8 +9,8 @@ import java.util.*;
public class Producer { public class Producer {
// static String servers = "yf122:9092,yf121:9092,yf120:9092"; // static String servers = "yf122:9092,yf121:9092,yf120:9092";
// static String servers = "node1:9092,node2:9092,node3:9092"; static String servers = "node1:9092,node2:9092,node3:9092";
static String servers = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092"; // static String servers = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
static int batchsize = 1; static int batchsize = 1;
static Producer testProducer; static Producer testProducer;
static String metricTopic; static String metricTopic;
...@@ -45,8 +45,8 @@ public class Producer { ...@@ -45,8 +45,8 @@ public class Producer {
} }
public void initConfig() throws Exception { public void initConfig() throws Exception {
// servers = "node1:9092,node2:9092,node3:9092"; servers = "node1:9092,node2:9092,node3:9092";
servers = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092"; // servers = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// servers = "kafka-1:19092,kafka-2:19092,kafka-3:19092"; // servers = "kafka-1:19092,kafka-2:19092,kafka-3:19092";
batchsize = 100000; batchsize = 100000;
} }
......
...@@ -5,20 +5,21 @@ import com.zorkdata.tools.avro.AvroSerializerFactory; ...@@ -5,20 +5,21 @@ import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer; import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.joda.time.DateTime;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
import java.util.Random;
/** /**
* @param brokerAddr
* @param topic
* @author DeleMing * @author DeleMing
*/ */
public class MockMetricNode2 { public class MetricNode1ShanDong {
private static String topic = "dwd_all_metric"; private static String topic = "dwd_all_metric";
private static String brokerAddr = "node1:9092,node2:9092,node3:9092"; private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092"; // private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092"; // private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092"; // private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null; private static ProducerRecord<String, byte[]> producerRecord = null;
...@@ -39,7 +40,6 @@ public class MockMetricNode2 { ...@@ -39,7 +40,6 @@ public class MockMetricNode2 {
public static void main(String[] args) throws InterruptedException { public static void main(String[] args) throws InterruptedException {
init(); init();
//MetricSet //MetricSet
String metricSetName = "cpu_system_mb"; String metricSetName = "cpu_system_mb";
//Dimensions //Dimensions
...@@ -51,9 +51,11 @@ public class MockMetricNode2 { ...@@ -51,9 +51,11 @@ public class MockMetricNode2 {
dimensions.put("ip", "192.168.70.212"); dimensions.put("ip", "192.168.70.212");
for (int i = 0; i <= 30000; i++) { for (int i = 0; i <= 30000; i++) {
//MetricItem //MetricItem
Map<String, Double> metrics = new HashMap<>(); Map<String, Double> metrics = new HashMap<>();
metrics.put("user_pct", 0.4); metrics.put("user_pct", 0.1);
//timestamp //timestamp
long timestamp = System.currentTimeMillis(); long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp); String timestampString = String.valueOf(timestamp);
...@@ -64,7 +66,7 @@ public class MockMetricNode2 { ...@@ -64,7 +66,7 @@ public class MockMetricNode2 {
//send //send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes); producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord); producer.send(producerRecord);
Thread.sleep(30000); //210/30= 7 严重 Thread.sleep(10000);
} }
} }
} }
......
...@@ -50,12 +50,12 @@ public class MockLogNode1 { ...@@ -50,12 +50,12 @@ public class MockLogNode1 {
dimensions.put("ip", "192.168.70.212"); dimensions.put("ip", "192.168.70.212");
dimensions.put("appsystem", "dev_test"); dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "基础监控"); dimensions.put("clustername", "基础监控");
dimensions.put("appprogramname", "ShanDong"); // dimensions.put("appprogramname", "ShanDong");
// dimensions.put("servicename", "linux模块"); // dimensions.put("servicename", "linux模块");
// dimensions.put("servicecode", "linux模块"); // dimensions.put("servicecode", "linux模块");
// dimensions.put("appsystem", "dev_test"); // dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控"); // dimensions.put("clustername", "基础监控");
// dimensions.put("appprogramname", "linux模块"); dimensions.put("appprogramname", "linux模块");
// dimensions.put("hostname", "host-11"); // dimensions.put("hostname", "host-11");
// dimensions.put("ip", "192.168.13.11"); // dimensions.put("ip", "192.168.13.11");
return dimensions; return dimensions;
......
...@@ -15,9 +15,10 @@ import java.util.Properties; ...@@ -15,9 +15,10 @@ import java.util.Properties;
*/ */
public class MockMetricNode3 { public class MockMetricNode3 {
private static String topic = "dwd_all_metric"; private static String topic = "dwd_all_metric";
private static String brokerAddr = "node1:9092,node2:9092,node3:9092"; // private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092"; // private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092"; private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092"; // private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null; private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null; private static KafkaProducer<String, byte[]> producer = null;
...@@ -43,25 +44,17 @@ public class MockMetricNode3 { ...@@ -43,25 +44,17 @@ public class MockMetricNode3 {
//Dimensions //Dimensions
Map<String, String> dimensions = new HashMap<>(); Map<String, String> dimensions = new HashMap<>();
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("hostname", "shandong2");
// dimensions.put("ip", "192.168.70.220");
dimensions.put("appsystem", "dev_test"); dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "基础监控"); dimensions.put("clustername", "基础监控");
dimensions.put("appprogramname", "linux模块"); dimensions.put("appprogramname", "linux模块");
dimensions.put("servicename", "linux模块"); dimensions.put("hostname", "node3");
dimensions.put("hostname", "yf121"); dimensions.put("ip", "192.168.70.214");
dimensions.put("ip", "192.168.70.121");
for (int i = 0; i <= 30000; i++) { for (int i = 0; i <= 30000; i++) {
//MetricItem //MetricItem
Map<String, Double> metrics = new HashMap<>(); Map<String, Double> metrics = new HashMap<>();
metrics.put("user_pct", 0.115); metrics.put("user_pct", 0.3);
//timestamp //timestamp
long timestamp = System.currentTimeMillis(); long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp); String timestampString = String.valueOf(timestamp);
...@@ -76,21 +69,6 @@ public class MockMetricNode3 { ...@@ -76,21 +69,6 @@ public class MockMetricNode3 {
Thread.sleep(30000);// 210/70=3 警告 Thread.sleep(30000);// 210/70=3 警告
} }
} }
public static double fun1(int i){
double tmp = 0;
if ( i ==0){
tmp = 0.05;
}
if (i == 1){
tmp = 0.2;
}
if (i == 2){
tmp = 0.2;
}
return tmp;
}
} }
package com.zorkdata.tools.mock.SystemIndex;
import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.kafka.Producer;
import com.zorkdata.tools.kafka.ProducerPool;
import com.zorkdata.tools.utils.DateUtil;
import com.zorkdata.tools.utils.PropertiesUtil;
import com.zorkdata.tools.utils.StringUtil;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
public class MockLogSI_Alarm_zork90_10 {
public static String printData(String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("logTypeName", logTypeName);
jsonObject.put("timestamp", timestamp);
jsonObject.put("source", source);
jsonObject.put("offset", offset);
jsonObject.put("dimensions", dimensions);
jsonObject.put("measures", metrics);
jsonObject.put("normalFields", normalFields);
return jsonObject.toString();
}
private static String getRandomOffset() {
Random random = new Random();
long l = random.nextInt(10000);
return String.valueOf(l);
}
private static Map<String, String> getRandomDimensions() {
Random random = new Random();
int i = random.nextInt(10);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "alarm");
dimensions.put("clustername", "告警集群");
dimensions.put("appprogramname", "告警模块1");
dimensions.put("hostname", "zork90-10");
dimensions.put("ip", "192.168.90.10");
return dimensions;
}
private static Map<String, String> getRandomNormalFieldsError() {
Map<String, String> normalFields = new HashMap<>(5);
normalFields.put("message", "qqt_alarm_index_message");
return normalFields;
}
public static void main(String[] args) throws Exception {
long size = 30000;
for (int i = 0; i < size; i++) {
if (i != 0) {
Thread.sleep(5000);
}
String logTypeName = "default_analysis_template";
String timestamp = DateUtil.getUTCTimeStr();
System.out.println("timestamp====="+timestamp);
String source = "/var/log/test.log";
String offset = getRandomOffset();
Map<String, String> dimensions = getRandomDimensions();
Map<String, Double> measures = new HashMap<>();
Map<String, String> normalFields = getRandomNormalFieldsError();
Producer producer = ProducerPool.getInstance().getProducer();
producer.sendLog("dwd_default_log", logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
}
}
}
package com.zorkdata.tools.mock; package com.zorkdata.tools.mock.SystemIndex;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.kafka.Producer; import com.zorkdata.tools.kafka.Producer;
...@@ -15,7 +15,7 @@ import java.util.Random; ...@@ -15,7 +15,7 @@ import java.util.Random;
/** /**
* @author zhuzhigang * @author zhuzhigang
*/ */
public class MockLogSI_node1 { public class MockLogSI_DevTest_node1 {
private static long getSize(String propertiesName) throws Exception { private static long getSize(String propertiesName) throws Exception {
Properties properties = PropertiesUtil.getProperties(propertiesName); Properties properties = PropertiesUtil.getProperties(propertiesName);
......
package com.zorkdata.tools.mock.hostAlarm;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MetricNoAlarmBeiYong {
private static String topic = "dwd_all_metric";
private static String brokerAddr = "shandong1:9092,shandong2:9092";
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
String metricSetName = "original_agent_eb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "备用");
dimensions.put("appprogramname", "备用");
dimensions.put("hostname", "ostemplate");
dimensions.put("ip", "192.168.70.185");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("status", 0d);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(10000); // 210/210=1 信息
}
}
}
package com.zorkdata.tools.mock.hostAlarm;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MetricNoAlarmDM {
private static String topic = "dwd_all_metric";
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
private static String brokerAddr = "shandong1:9092,shandong2:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
// String metricSetName = "cpu_system_mb";
String metricSetName = "original_agent_eb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", " dataservice大数据服务");
dimensions.put("appprogramname", "mysql");
dimensions.put("hostname", "测试非正常机器");
dimensions.put("ip", "192.168.122.123");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("status", 0d);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(10000); // 210/210=1 信息
}
}
}
package com.zorkdata.tools.mock; package com.zorkdata.tools.mock.hostAlarm;
import com.zorkdata.tools.avro.AvroSerializer; import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory; import com.zorkdata.tools.avro.AvroSerializerFactory;
...@@ -15,9 +15,12 @@ import java.util.Properties; ...@@ -15,9 +15,12 @@ import java.util.Properties;
* 拓扑 * 拓扑
* 验证只有appsystem、hostname、ip维度的告警机器 * 验证只有appsystem、hostname、ip维度的告警机器
*/ */
public class QQTHostAlarm11 { public class MetricNoAlarmZork9010 {
private static String topic = "dwd_all_metric"; private static String topic = "dwd_all_metric";
private static String brokerAddr = "node1:9092,node2:9092,node3:9092"; // private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private static String brokerAddr = "shandong1:9092,shandong2:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "shandong1:9092,shandong2:9092,shandong3:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092"; // private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092"; // private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null; private static ProducerRecord<String, byte[]> producerRecord = null;
...@@ -40,15 +43,16 @@ public class QQTHostAlarm11 { ...@@ -40,15 +43,16 @@ public class QQTHostAlarm11 {
init(); init();
//MetricSet //MetricSet
// String metricSetName = "cpu_system_mb";
String metricSetName = "original_agent_eb"; String metricSetName = "original_agent_eb";
//Dimensions //Dimensions
Map<String, String> dimensions = new HashMap<>(); Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "alarm"); dimensions.put("appsystem", "alarm");
// dimensions.put("clustername", "jichujiankong"); dimensions.put("clustername", "告警集群");
// dimensions.put("appprogramname", "linuxmokuai"); dimensions.put("appprogramname", "告警模块1");
dimensions.put("hostname", "host-11"); dimensions.put("hostname", "zork90-10");//"zorkdata" + i);
dimensions.put("ip", "192.168.13.11"); dimensions.put("ip", "192.168.90.10");
for (int i = 0; i <= 30000; i++) { for (int i = 0; i <= 30000; i++) {
...@@ -69,21 +73,6 @@ public class QQTHostAlarm11 { ...@@ -69,21 +73,6 @@ public class QQTHostAlarm11 {
Thread.sleep(15000); Thread.sleep(15000);
} }
} }
public static double fun1(int i){
double tmp = 0;
if ( i ==0){
tmp = 0.05;
}
if (i == 1){
tmp = 0.2;
}
if (i == 2){
tmp = 0.2;
}
return tmp;
}
} }
package com.zorkdata.tools.mock.hostAlarm;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
public class MetricNode1LinuxModule {
private static String topic = "dwd_all_metric";
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private static String brokerAddr = "shandong1:9092,shandong2:9092";
// private static String brokerAddr = "cs42:9092,cs43:9092,cs44:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
// String metricSetName = "cpu_system_mb";
String metricSetName = "original_agent_eb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "基础监控");
dimensions.put("appprogramname", "linux模块");
dimensions.put("hostname", "node1");
dimensions.put("ip", "192.168.70.212");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("status", 0d);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(20000); //210/30= 7 严重
}
}
}
package com.zorkdata.tools.mock.hostAlarm;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MetricYf121DataServiceCluster {
private static String topic = "dwd_all_metric";
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private static String brokerAddr = "shandong1:9092,shandong2:9092,shandong3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
// String metricSetName = "cpu_system_mb";
String metricSetName = "original_agent_eb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "paas应用服务平台");
dimensions.put("appprogramname", "linux模块");
dimensions.put("hostname", "yf121");
dimensions.put("ip", "192.168.70.121");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("status", 0d);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(10000); // 210/210=1 信息
}
}
}
package com.zorkdata.tools.mock.hostAlarm;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MetricYf121JiChuJianKongCLuster {
private static String topic = "dwd_all_metric";
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private static String brokerAddr = "shandong1:9092,shandong2:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
// String metricSetName = "cpu_system_mb";
String metricSetName = "original_agent_eb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "基础监控");
dimensions.put("appprogramname", "linux模块");
dimensions.put("hostname", "yf121");
dimensions.put("ip", "192.168.70.121");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("status", 0d);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(10000); // 210/210=1 信息
}
}
}
package com.zorkdata.tools.mock.hostAlarm;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MetricYf122DK {
private static String topic = "dwd_all_metric";
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private static String brokerAddr = "shandong1:9092,shandong2:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
// String metricSetName = "cpu_system_mb";
String metricSetName = "original_agent_eb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "dataservice大数据服务");
dimensions.put("appprogramname", "kafka");
dimensions.put("hostname", "yf122");
dimensions.put("ip", "192.168.70.122");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("status", 0d);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(10000); // 210/210=1 信息
}
}
}
package com.zorkdata.tools.mock.hostAlarm;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MetricYf122JL {
private static String topic = "dwd_all_metric";
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private static String brokerAddr = "shandong1:9092,shandong2:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
// String metricSetName = "cpu_system_mb";
String metricSetName = "original_agent_eb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "基础监控");
dimensions.put("appprogramname", "linux模块");
dimensions.put("hostname", "yf122");
dimensions.put("ip", "192.168.70.122");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("status", 0d);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(10000); // 210/210=1 信息
}
}
}
package com.zorkdata.tools.mock.ruleScopeLog;
import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.kafka.Producer;
import com.zorkdata.tools.kafka.ProducerPool;
import com.zorkdata.tools.utils.DateUtil;
import com.zorkdata.tools.utils.PropertiesUtil;
import com.zorkdata.tools.utils.StringUtil;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
/**
* @author zhuzhigang
*/
public class MockLogNoAlarmDM {
private static long getSize(String propertiesName) throws Exception {
Properties properties = PropertiesUtil.getProperties(propertiesName);
long logSize = StringUtil.getLong(properties.getProperty("log.size", "5000").trim(), 1);
return logSize;
}
public static String printData(String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("logTypeName", logTypeName);
jsonObject.put("timestamp", timestamp);
jsonObject.put("source", source);
jsonObject.put("offset", offset);
jsonObject.put("dimensions", dimensions);
jsonObject.put("measures", metrics);
jsonObject.put("normalFields", normalFields);
return jsonObject.toString();
}
private static String getRandomOffset() {
Random random = new Random();
long l = random.nextInt(10000);
return String.valueOf(l);
}
private static Map<String, String> getRandomDimensions() {
Random random = new Random();
int i = random.nextInt(10);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", " dataservice大数据服务");
dimensions.put("appprogramname", "mysql");
dimensions.put("hostname", "测试非正常机器");
dimensions.put("ip", "192.168.122.123");
return dimensions;
}
private static String[] codes = {
"AO", "AF", "AL", "DZ", "AD", "AI", "AG", "AR", "AM", "AU",
"AT", "AZ", "BS", "BH", "BD", "BB", "BY", "BE", "BZ", "BJ"
};
private static String getRandomCountryCode() {
Random random = new Random(codes.length);
return codes[new Random(codes.length).nextInt(codes.length)];
}
private static Map<String, String> getRandomNormalFieldsError() {
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "error");
return normalFields;
}
private static Map<String, String> getRandomNormalFieldsSuccess() {
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "data update success");
return normalFields;
}
public static void main(String[] args) throws Exception {
long size = 30000;
for (int i = 0; i < size; i++) {
if (i != 0) {
Thread.sleep(5000);
}
String logTypeName = "default_analysis_template";
String timestamp = DateUtil.getUTCTimeStr();
System.out.println("timestamp====="+timestamp);
String source = "/var/log/test.log";
String offset = getRandomOffset();
Map<String, String> dimensions = getRandomDimensions();
Map<String, Double> measures = new HashMap<>();
Map<String, String> normalFields = null;
normalFields = getRandomNormalFieldsError();
Producer producer = ProducerPool.getInstance().getProducer();
producer.sendLog("dwd_default_log", logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
}
}
}
package com.zorkdata.tools.mock.ruleScopeLog;
import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.kafka.Producer;
import com.zorkdata.tools.kafka.ProducerPool;
import com.zorkdata.tools.utils.DateUtil;
import com.zorkdata.tools.utils.PropertiesUtil;
import com.zorkdata.tools.utils.StringUtil;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
/**
* @author zhuzhigang
*/
public class MockLogNode1LinuxModule {
private static long getSize(String propertiesName) throws Exception {
Properties properties = PropertiesUtil.getProperties(propertiesName);
long logSize = StringUtil.getLong(properties.getProperty("log.size", "5000").trim(), 1);
return logSize;
}
public static String printData(String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("logTypeName", logTypeName);
jsonObject.put("timestamp", timestamp);
jsonObject.put("source", source);
jsonObject.put("offset", offset);
jsonObject.put("dimensions", dimensions);
jsonObject.put("measures", metrics);
jsonObject.put("normalFields", normalFields);
return jsonObject.toString();
}
private static String getRandomOffset() {
Random random = new Random();
long l = random.nextInt(10000);
return String.valueOf(l);
}
private static Map<String, String> getRandomDimensions() {
Random random = new Random();
int i = random.nextInt(10);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "node1");//"zorkdata" + i);
dimensions.put("ip", "192.168.70.212");
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "基础监控");
dimensions.put("appprogramname", "linux模块");
return dimensions;
}
private static String[] codes = {
"AO", "AF", "AL", "DZ", "AD", "AI", "AG", "AR", "AM", "AU",
"AT", "AZ", "BS", "BH", "BD", "BB", "BY", "BE", "BZ", "BJ"
};
private static String getRandomCountryCode() {
Random random = new Random(codes.length);
return codes[new Random(codes.length).nextInt(codes.length)];
}
private static Map<String, String> getRandomNormalFieldsError() {
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "error");
return normalFields;
}
private static Map<String, String> getRandomNormalFieldsSuccess() {
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "data update success");
return normalFields;
}
public static void main(String[] args) throws Exception {
long size = 30000;
for (int i = 0; i < size; i++) {
if (i != 0) {
Thread.sleep(5000);
}
String logTypeName = "default_analysis_template";
String timestamp = DateUtil.getUTCTimeStr();
System.out.println("timestamp====="+timestamp);
String source = "/var/log/test.log";
String offset = getRandomOffset();
Map<String, String> dimensions = getRandomDimensions();
Map<String, Double> measures = new HashMap<>();
Map<String, String> normalFields = null;
normalFields = getRandomNormalFieldsError();
Producer producer = ProducerPool.getInstance().getProducer();
producer.sendLog("dwd_default_log", logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
}
}
}
package com.zorkdata.tools.mock; package com.zorkdata.tools.mock.ruleScopeLog;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.kafka.Producer; import com.zorkdata.tools.kafka.Producer;
...@@ -15,7 +15,7 @@ import java.util.Random; ...@@ -15,7 +15,7 @@ import java.util.Random;
/** /**
* @author zhuzhigang * @author zhuzhigang
*/ */
public class MockLogSI_zork90_10 { public class MockLogYf121JCJKCluster {
private static long getSize(String propertiesName) throws Exception { private static long getSize(String propertiesName) throws Exception {
Properties properties = PropertiesUtil.getProperties(propertiesName); Properties properties = PropertiesUtil.getProperties(propertiesName);
...@@ -46,16 +46,16 @@ public class MockLogSI_zork90_10 { ...@@ -46,16 +46,16 @@ public class MockLogSI_zork90_10 {
Random random = new Random(); Random random = new Random();
int i = random.nextInt(10); int i = random.nextInt(10);
Map<String, String> dimensions = new HashMap<>(); Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "alarm"); dimensions.put("hostname", "yf121");
dimensions.put("clustername", "告警集群"); dimensions.put("ip", "192.168.70.121");
dimensions.put("appprogramname", "告警模块1"); dimensions.put("appsystem", "dev_test");
dimensions.put("hostname", "zork90-10");//"zorkdata" + i); dimensions.put("clustername", "基础监控");
dimensions.put("ip", "192.168.90.10"); dimensions.put("appprogramname", "linux模块");
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("servicename", "linux模块"); // dimensions.put("servicename", "linux模块");
// dimensions.put("servicecode", "linux模块"); // dimensions.put("servicecode", "linux模块");
// dimensions.put("appsystem", "dev_test"); // dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控"); // dimensions.put("clustername", "基础监控");
// dimensions.put("appprogramname", "linux模块");
// dimensions.put("hostname", "host-11"); // dimensions.put("hostname", "host-11");
// dimensions.put("ip", "192.168.13.11"); // dimensions.put("ip", "192.168.13.11");
return dimensions; return dimensions;
...@@ -73,7 +73,7 @@ public class MockLogSI_zork90_10 { ...@@ -73,7 +73,7 @@ public class MockLogSI_zork90_10 {
private static Map<String, String> getRandomNormalFieldsError() { private static Map<String, String> getRandomNormalFieldsError() {
Map<String, String> normalFields = new HashMap<>(); Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "qqt_alarm_index_message"); normalFields.put("message", "error,基础监控,linux模块");
return normalFields; return normalFields;
} }
...@@ -96,7 +96,6 @@ public class MockLogSI_zork90_10 { ...@@ -96,7 +96,6 @@ public class MockLogSI_zork90_10 {
String offset = getRandomOffset(); String offset = getRandomOffset();
Map<String, String> dimensions = getRandomDimensions(); Map<String, String> dimensions = getRandomDimensions();
Map<String, Double> measures = new HashMap<>(); Map<String, Double> measures = new HashMap<>();
// measures.put("mdh",4d);
Map<String, String> normalFields = null; Map<String, String> normalFields = null;
normalFields = getRandomNormalFieldsError(); normalFields = getRandomNormalFieldsError();
......
package com.zorkdata.tools.mock.ruleScopeLog;
import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.kafka.Producer;
import com.zorkdata.tools.kafka.ProducerPool;
import com.zorkdata.tools.utils.DateUtil;
import com.zorkdata.tools.utils.PropertiesUtil;
import com.zorkdata.tools.utils.StringUtil;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
/**
* @author zhuzhigang
*/
public class MockLogYf121PaasCluster {
private static long getSize(String propertiesName) throws Exception {
Properties properties = PropertiesUtil.getProperties(propertiesName);
long logSize = StringUtil.getLong(properties.getProperty("log.size", "5000").trim(), 1);
return logSize;
}
public static String printData(String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("logTypeName", logTypeName);
jsonObject.put("timestamp", timestamp);
jsonObject.put("source", source);
jsonObject.put("offset", offset);
jsonObject.put("dimensions", dimensions);
jsonObject.put("measures", metrics);
jsonObject.put("normalFields", normalFields);
return jsonObject.toString();
}
private static String getRandomOffset() {
Random random = new Random();
long l = random.nextInt(10000);
return String.valueOf(l);
}
private static Map<String, String> getRandomDimensions() {
Random random = new Random();
int i = random.nextInt(10);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "yf121");//"zorkdata" + i);
dimensions.put("ip", "192.168.70.121");
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "paas应用服务平台");
dimensions.put("appprogramname", "linux模块");
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("servicename", "linux模块");
// dimensions.put("servicecode", "linux模块");
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
// dimensions.put("hostname", "host-11");
// dimensions.put("ip", "192.168.13.11");
return dimensions;
}
private static String[] codes = {
"AO", "AF", "AL", "DZ", "AD", "AI", "AG", "AR", "AM", "AU",
"AT", "AZ", "BS", "BH", "BD", "BB", "BY", "BE", "BZ", "BJ"
};
private static String getRandomCountryCode() {
Random random = new Random(codes.length);
return codes[new Random(codes.length).nextInt(codes.length)];
}
private static Map<String, String> getRandomNormalFieldsError() {
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "error,paas应用服务平台,linux模块");
return normalFields;
}
private static Map<String, String> getRandomNormalFieldsSuccess() {
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "data update success");
return normalFields;
}
public static void main(String[] args) throws Exception {
long size = 30000;
for (int i = 0; i < size; i++) {
if (i != 0) {
Thread.sleep(5000);
}
String logTypeName = "default_analysis_template";
String timestamp = DateUtil.getUTCTimeStr();
System.out.println("timestamp====="+timestamp);
String source = "/var/log/test.log";
String offset = getRandomOffset();
Map<String, String> dimensions = getRandomDimensions();
Map<String, Double> measures = new HashMap<>();
Map<String, String> normalFields = null;
normalFields = getRandomNormalFieldsError();
Producer producer = ProducerPool.getInstance().getProducer();
producer.sendLog("dwd_default_log", logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
}
}
}
package com.zorkdata.tools.mock.ruleScopeLog;
import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.kafka.Producer;
import com.zorkdata.tools.kafka.ProducerPool;
import com.zorkdata.tools.utils.DateUtil;
import com.zorkdata.tools.utils.PropertiesUtil;
import com.zorkdata.tools.utils.StringUtil;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
/**
* @author zhuzhigang
*/
public class MockLogYf122DataServiceClusterKafkaModule {
private static long getSize(String propertiesName) throws Exception {
Properties properties = PropertiesUtil.getProperties(propertiesName);
long logSize = StringUtil.getLong(properties.getProperty("log.size", "5000").trim(), 1);
return logSize;
}
public static String printData(String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("logTypeName", logTypeName);
jsonObject.put("timestamp", timestamp);
jsonObject.put("source", source);
jsonObject.put("offset", offset);
jsonObject.put("dimensions", dimensions);
jsonObject.put("measures", metrics);
jsonObject.put("normalFields", normalFields);
return jsonObject.toString();
}
private static String getRandomOffset() {
Random random = new Random();
long l = random.nextInt(10000);
return String.valueOf(l);
}
private static Map<String, String> getRandomDimensions() {
Random random = new Random();
int i = random.nextInt(10);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "yf122");
dimensions.put("ip", "192.168.70.122");
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "dataservice大数据服务");
dimensions.put("appprogramname", "kafka");
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("servicename", "linux模块");
// dimensions.put("servicecode", "linux模块");
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
// dimensions.put("hostname", "host-11");
// dimensions.put("ip", "192.168.13.11");
return dimensions;
}
private static String[] codes = {
"AO", "AF", "AL", "DZ", "AD", "AI", "AG", "AR", "AM", "AU",
"AT", "AZ", "BS", "BH", "BD", "BB", "BY", "BE", "BZ", "BJ"
};
private static String getRandomCountryCode() {
Random random = new Random(codes.length);
return codes[new Random(codes.length).nextInt(codes.length)];
}
private static Map<String, String> getRandomNormalFieldsError() {
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "error");
return normalFields;
}
private static Map<String, String> getRandomNormalFieldsSuccess() {
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "data update success");
return normalFields;
}
public static void main(String[] args) throws Exception {
long size = 30000;
for (int i = 0; i < size; i++) {
if (i != 0) {
Thread.sleep(5000);
}
String logTypeName = "default_analysis_template";
String timestamp = DateUtil.getUTCTimeStr();
System.out.println("timestamp====="+timestamp);
String source = "/var/log/test.log";
String offset = getRandomOffset();
Map<String, String> dimensions = getRandomDimensions();
Map<String, Double> measures = new HashMap<>();
Map<String, String> normalFields = null;
normalFields = getRandomNormalFieldsError();
Producer producer = ProducerPool.getInstance().getProducer();
producer.sendLog("dwd_default_log", logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
}
}
}
package com.zorkdata.tools.mock.ruleScopeLog;
import com.alibaba.fastjson.JSONObject;
import com.zorkdata.tools.kafka.Producer;
import com.zorkdata.tools.kafka.ProducerPool;
import com.zorkdata.tools.utils.DateUtil;
import com.zorkdata.tools.utils.PropertiesUtil;
import com.zorkdata.tools.utils.StringUtil;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
/**
* @author zhuzhigang
*/
public class MockLogYf122JCJKClusterLinuxModule {
private static long getSize(String propertiesName) throws Exception {
Properties properties = PropertiesUtil.getProperties(propertiesName);
long logSize = StringUtil.getLong(properties.getProperty("log.size", "5000").trim(), 1);
return logSize;
}
public static String printData(String logTypeName, String timestamp, String source, String offset,
Map<String, String> dimensions, Map<String, Double> metrics, Map<String, String> normalFields) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("logTypeName", logTypeName);
jsonObject.put("timestamp", timestamp);
jsonObject.put("source", source);
jsonObject.put("offset", offset);
jsonObject.put("dimensions", dimensions);
jsonObject.put("measures", metrics);
jsonObject.put("normalFields", normalFields);
return jsonObject.toString();
}
private static String getRandomOffset() {
Random random = new Random();
long l = random.nextInt(10000);
return String.valueOf(l);
}
private static Map<String, String> getRandomDimensions() {
Random random = new Random();
int i = random.nextInt(10);
Map<String, String> dimensions = new HashMap<>();
dimensions.put("hostname", "yf122");
dimensions.put("ip", "192.168.70.122");
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "基础监控");
dimensions.put("appprogramname", "linux模块");
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("servicename", "linux模块");
// dimensions.put("servicecode", "linux模块");
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
// dimensions.put("hostname", "host-11");
// dimensions.put("ip", "192.168.13.11");
return dimensions;
}
private static String[] codes = {
"AO", "AF", "AL", "DZ", "AD", "AI", "AG", "AR", "AM", "AU",
"AT", "AZ", "BS", "BH", "BD", "BB", "BY", "BE", "BZ", "BJ"
};
private static String getRandomCountryCode() {
Random random = new Random(codes.length);
return codes[new Random(codes.length).nextInt(codes.length)];
}
private static Map<String, String> getRandomNormalFieldsError() {
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "error");
return normalFields;
}
private static Map<String, String> getRandomNormalFieldsSuccess() {
Map<String, String> normalFields = new HashMap<>();
normalFields.put("message", "data update success");
return normalFields;
}
public static void main(String[] args) throws Exception {
long size = 30000;
for (int i = 0; i < size; i++) {
if (i != 0) {
Thread.sleep(5000);
}
String logTypeName = "default_analysis_template";
String timestamp = DateUtil.getUTCTimeStr();
System.out.println("timestamp====="+timestamp);
String source = "/var/log/test.log";
String offset = getRandomOffset();
Map<String, String> dimensions = getRandomDimensions();
Map<String, Double> measures = new HashMap<>();
Map<String, String> normalFields = null;
normalFields = getRandomNormalFieldsError();
Producer producer = ProducerPool.getInstance().getProducer();
producer.sendLog("dwd_default_log", logTypeName, timestamp, source, offset, dimensions, measures, normalFields);
}
}
}
package com.zorkdata.tools.mock.ruleScopeMetric;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MetricNoAlarmBeiYong {
private static String topic = "dwd_all_metric";
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
String metricSetName = "cpu_system_mb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "备用");
dimensions.put("appprogramname", "备用");
dimensions.put("hostname", "ostemplate");
dimensions.put("ip", "192.168.70.185");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("user_pct", 0.1);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(10000); // 210/210=1 信息
}
}
}
package com.zorkdata.tools.mock.ruleScopeMetric;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MetricNoAlarmDM {
private static String topic = "dwd_all_metric";
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
String metricSetName = "cpu_system_mb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", " dataservice大数据服务");
dimensions.put("appprogramname", "mysql");
dimensions.put("hostname", "测试非正常机器");
dimensions.put("ip", "192.168.122.123");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("user_pct", 0.1);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(10000); // 210/210=1 信息
}
}
}
package com.zorkdata.tools.mock.ruleScopeMetric;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author
* 拓扑
* 验证只有appsystem、hostname、ip维度的告警机器
*/
public class MetricNoAlarmZork9010 {
private static String topic = "dwd_all_metric";
// private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "shandong1:9092,shandong2:9092,shandong3:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
String metricSetName = "cpu_system_mb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "alarm");
dimensions.put("clustername", "告警集群");
dimensions.put("appprogramname", "告警模块1");
dimensions.put("hostname", "zork90-10");//"zorkdata" + i);
dimensions.put("ip", "192.168.90.10");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("user_pct", 0.9);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(15000);
}
}
}
package com.zorkdata.tools.mock.ruleScopeMetric;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
public class MetricNode1ShanDongModule {
private static String topic = "dwd_all_metric";
private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "cs42:9092,cs43:9092,cs44:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
String metricSetName = "cpu_system_mb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "基础监控");
dimensions.put("appprogramname", "ShanDong");
dimensions.put("hostname", "node1");
dimensions.put("ip", "192.168.70.212");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("user_pct", 0.2);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(20000); //210/30= 7 严重
}
}
}
package com.zorkdata.tools.mock; package com.zorkdata.tools.mock.ruleScopeMetric;
import com.zorkdata.tools.avro.AvroSerializer; import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory; import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer; import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.joda.time.DateTime;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
import java.util.Random;
/** /**
* @author DeleMing * @author DeleMing
*/ */
public class MockMetricNode1 { public class MetricYf121DataServiceCluster {
private static String topic = "dwd_all_metric"; private static String topic = "dwd_all_metric";
private static String brokerAddr = "node1:9092,node2:9092,node3:9092"; private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092"; // private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092"; // private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092"; // private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null; private static ProducerRecord<String, byte[]> producerRecord = null;
...@@ -45,29 +44,18 @@ public class MockMetricNode1 { ...@@ -45,29 +44,18 @@ public class MockMetricNode1 {
//Dimensions //Dimensions
Map<String, String> dimensions = new HashMap<>(); Map<String, String> dimensions = new HashMap<>();
// dimensions.put("appsystem", "dev_test");
// dimensions.put("clustername", "基础监控");
// dimensions.put("appprogramname", "ShanDong");
// dimensions.put("hostname", "shandong2");
// dimensions.put("ip", "192.168.70.220");
dimensions.put("appsystem", "dev_test"); dimensions.put("appsystem", "dev_test");
dimensions.put("monitor_name",""); dimensions.put("clustername", "paas应用服务平台");
dimensions.put("hostname", "shandong1"); dimensions.put("appprogramname", "linux模块");
dimensions.put("ip", "192.168.70.219"); dimensions.put("hostname", "yf121");
dimensions.put("observer_hostname",""); dimensions.put("ip", "192.168.70.121");
dimensions.put("observer_ip","");
// dimensions.put("clustername", "基础监控");
// dimensions.put("appprogramname", "ShanDong");
for (int i = 0; i <= 30000; i++) { for (int i = 0; i <= 30000; i++) {
//MetricItem //MetricItem
Map<String, Double> metrics = new HashMap<>(); Map<String, Double> metrics = new HashMap<>();
metrics.put("monitor_duration_us", 0.5); metrics.put("user_pct", 0.1);
metrics.put("monitor_status", 0.5);
metrics.put("icmp_requests", 0.5);
metrics.put("icmp_rtt_us", 0.5);
//timestamp //timestamp
long timestamp = System.currentTimeMillis(); long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp); String timestampString = String.valueOf(timestamp);
...@@ -79,23 +67,8 @@ public class MockMetricNode1 { ...@@ -79,23 +67,8 @@ public class MockMetricNode1 {
//send //send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes); producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord); producer.send(producerRecord);
Thread.sleep(10000);// 210/210=1 信息 Thread.sleep(10000); // 210/210=1 信息
}
}
public static double fun1(int i){
double tmp = 0;
if ( i ==0){
tmp = 0.05;
}
if (i == 1){
tmp = 0.2;
}
if (i == 2){
tmp = 0.2;
} }
return tmp;
} }
} }
......
package com.zorkdata.tools.mock.ruleScopeMetric;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MetricYf121JiChuJianKongCLuster {
private static String topic = "dwd_all_metric";
private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
String metricSetName = "cpu_system_mb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "基础监控");
dimensions.put("appprogramname", "linux模块");
dimensions.put("hostname", "yf121");
dimensions.put("ip", "192.168.70.121");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("user_pct", 0.1);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(10000); // 210/210=1 信息
}
}
}
package com.zorkdata.tools.mock.ruleScopeMetric;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MetricYf122DK {
private static String topic = "dwd_all_metric";
private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
String metricSetName = "cpu_system_mb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "dataservice大数据服务");
dimensions.put("appprogramname", "kafka");
dimensions.put("hostname", "yf122");
dimensions.put("ip", "192.168.70.122");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("user_pct", 0.1);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(10000); // 210/210=1 信息
}
}
}
package com.zorkdata.tools.mock.ruleScopeMetric;
import com.zorkdata.tools.avro.AvroSerializer;
import com.zorkdata.tools.avro.AvroSerializerFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author DeleMing
*/
public class MetricYf122JL {
private static String topic = "dwd_all_metric";
private static String brokerAddr = "node1:9092,node2:9092,node3:9092";
// private static String brokerAddr = "autotest-1:9092,autotest-2:9092,autotest-3:9092";
// private static String brokerAddr = "noahtest-215:9092,noahtest-216:9092,noahtest-217:9092";
// private static String brokerAddr = "yf170:9092,yf171:9092,yf172:9092";
// private static String brokerAddr = "localhost:9092";
private static ProducerRecord<String, byte[]> producerRecord = null;
private static KafkaProducer<String, byte[]> producer = null;
public static void init() {
Properties props = new Properties();
props.put("bootstrap.servers", brokerAddr);
props.put("acks", "1");
props.put("retries", 0);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", ByteArraySerializer.class.getName());
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
producer = new KafkaProducer<String, byte[]>(props);
}
public static void main(String[] args) throws InterruptedException {
init();
//MetricSet
String metricSetName = "cpu_system_mb";
//Dimensions
Map<String, String> dimensions = new HashMap<>();
dimensions.put("appsystem", "dev_test");
dimensions.put("clustername", "基础监控");
dimensions.put("appprogramname", "linux模块");
dimensions.put("hostname", "yf122");
dimensions.put("ip", "192.168.70.122");
for (int i = 0; i <= 30000; i++) {
//MetricItem
Map<String, Double> metrics = new HashMap<>();
metrics.put("user_pct", 0.1);
//timestamp
long timestamp = System.currentTimeMillis();
String timestampString = String.valueOf(timestamp);
System.out.println("时间:"+timestampString);
//AvroSerializer
AvroSerializer metricSerializer = AvroSerializerFactory.getMetricAvroSerializer();
byte[] bytes = metricSerializer.serializingMetric(metricSetName, timestampString, dimensions, metrics);
//send
producerRecord = new ProducerRecord<String, byte[]>(topic, null, bytes);
producer.send(producerRecord);
Thread.sleep(10000); // 210/210=1 信息
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment