Commit 84edf9ce authored by xuli's avatar xuli

修改内存去重改为存储最大时间

parent 770502d2
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
...@@ -25,7 +25,40 @@ public class InfluxDBService { ...@@ -25,7 +25,40 @@ public class InfluxDBService {
} }
public QueryResult query(String command) { public QueryResult query(String command) {
return influxDB.query(new Query(command, database)); if (command == null || command.trim().isEmpty()) {
throw new IllegalArgumentException("查询命令不能为空");
}
if (influxDB == null) {
throw new IllegalStateException("InfluxDB连接未初始化");
}
log.info("执行InfluxQL查询: {}", command);
long startTime = System.currentTimeMillis();
try {
QueryResult result = influxDB.query(new Query(command, database));
long duration = System.currentTimeMillis() - startTime;
log.info("查询执行完成,耗时: {}ms, 结果行数: {}", duration, getResultCount(result));
return result;
} catch (Exception e) {
long duration = System.currentTimeMillis() - startTime;
log.error("查询执行失败,耗时: {}ms, SQL: {}, 错误: {}", duration, command, e.getMessage());
throw e;
}
}
private int getResultCount(QueryResult result) {
try {
if (result != null && result.getResults() != null && !result.getResults().isEmpty()) {
QueryResult.Result firstResult = result.getResults().get(0);
if (firstResult.getSeries() != null && !firstResult.getSeries().isEmpty()) {
QueryResult.Series series = firstResult.getSeries().get(0);
return series.getValues() != null ? series.getValues().size() : 0;
}
}
return 0;
} catch (Exception e) {
return -1;
}
} }
public void close() { public void close() {
......
...@@ -11,7 +11,6 @@ import org.springframework.stereotype.Component; ...@@ -11,7 +11,6 @@ import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy; import javax.annotation.PreDestroy;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
...@@ -32,8 +31,6 @@ public class BatchKafkaSender { ...@@ -32,8 +31,6 @@ public class BatchKafkaSender {
@Value("${spring.kafka.producer.topic}") @Value("${spring.kafka.producer.topic}")
private String topic; private String topic;
@Value("${count-file}")
private String countFile;
@Value("${batch.size:1000}") @Value("${batch.size:1000}")
private int batchSize; private int batchSize;
...@@ -149,7 +146,7 @@ public class BatchKafkaSender { ...@@ -149,7 +146,7 @@ public class BatchKafkaSender {
try { try {
// 使用复用的Producer异步发送 // 使用复用的Producer异步发送
producer.sendMessage(batch, countFile).whenComplete((result, throwable) -> { producer.sendMessage(batch).whenComplete((result, throwable) -> {
if (throwable == null) { if (throwable == null) {
// 线程安全更新统计信息 // 线程安全更新统计信息
long newTotal = totalSent.addAndGet(batch.size()); long newTotal = totalSent.addAndGet(batch.size());
......
...@@ -3,7 +3,6 @@ package com.zork.disorder.component; ...@@ -3,7 +3,6 @@ package com.zork.disorder.component;
import com.zork.common.service.InfluxDBService; import com.zork.common.service.InfluxDBService;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.influxdb.dto.QueryResult; import org.influxdb.dto.QueryResult;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
...@@ -21,7 +20,6 @@ import java.util.function.Predicate; ...@@ -21,7 +20,6 @@ import java.util.function.Predicate;
import static com.zork.common.constant.InfluxDBConstant.AUTOGEN; import static com.zork.common.constant.InfluxDBConstant.AUTOGEN;
import static com.zork.common.service.InfluxDBService.getResult; import static com.zork.common.service.InfluxDBService.getResult;
import static com.zork.common.utils.DateUtil.yearMonthDayBar; import static com.zork.common.utils.DateUtil.yearMonthDayBar;
import static com.zork.common.utils.FileUtil.appendMethodB;
/** /**
* @Author: Prock.Liy * @Author: Prock.Liy
...@@ -56,8 +54,6 @@ public class CountComponent { ...@@ -56,8 +54,6 @@ public class CountComponent {
@Value("${interval-minute}") @Value("${interval-minute}")
private Integer intervalMinute; private Integer intervalMinute;
@Value("${count-file}")
private String countFile;
@Value("${missing-s}") @Value("${missing-s}")
private Integer missingS; private Integer missingS;
...@@ -120,9 +116,8 @@ public class CountComponent { ...@@ -120,9 +116,8 @@ public class CountComponent {
} }
tableCount.put(simpleDateFormat.format(new Date()), series.getValues().get(0).get(1).toString()); tableCount.put(simpleDateFormat.format(new Date()), series.getValues().get(0).get(1).toString());
String ip = StringUtils.substringAfterLast(StringUtils.substringBeforeLast(url,":"),"/").replaceAll("\\.","_"); // 统计每张表的count(仅记录日志)
// 用于统计每张表的总count log.info("表 {} 统计结果: {}", tableName, tableCount.toString());
appendMethodB(countFile + "\\" + ip + "_" + tableName + ".txt", tableCount.toString(), true);
}); });
// 更新count文件 // 更新count文件
influxDBService.close(); influxDBService.close();
...@@ -144,7 +139,6 @@ public class CountComponent { ...@@ -144,7 +139,6 @@ public class CountComponent {
* @return * @return
*/ */
public Date lastFewMinutesMissingS(int minutes) { public Date lastFewMinutesMissingS(int minutes) {
long time = missingS * 1000;
Calendar beforeTime = Calendar.getInstance(); Calendar beforeTime = Calendar.getInstance();
beforeTime.add(Calendar.MINUTE, -minutes); beforeTime.add(Calendar.MINUTE, -minutes);
beforeTime.add(Calendar.SECOND, -missingS); beforeTime.add(Calendar.SECOND, -missingS);
......
...@@ -12,15 +12,10 @@ import org.springframework.beans.factory.annotation.Value; ...@@ -12,15 +12,10 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled; import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.text.SimpleDateFormat;
import java.util.*; import java.util.*;
import java.util.Objects;
import java.util.stream.Collectors;
import static com.zork.common.constant.InfluxDBConstant.AUTOGEN;
import static com.zork.common.service.InfluxDBService.getResult; import static com.zork.common.service.InfluxDBService.getResult;
import static com.zork.common.utils.DateUtil.yearMonthDayBar; import static com.zork.common.utils.DateUtil.yearMonthDayBar;
import static com.zork.common.utils.FileUtil.appendMethodB;
/** /**
* @Author: Prock.Liy * @Author: Prock.Liy
...@@ -34,8 +29,6 @@ public class InfluxDisorderComponent { ...@@ -34,8 +29,6 @@ public class InfluxDisorderComponent {
@Value("${table-keywords}") @Value("${table-keywords}")
private String tableKeywords; private String tableKeywords;
@Value("${count-file}")
private String countFile;
@Value("${missing-s:15}") @Value("${missing-s:15}")
private Integer missingS; private Integer missingS;
...@@ -45,31 +38,16 @@ public class InfluxDisorderComponent { ...@@ -45,31 +38,16 @@ public class InfluxDisorderComponent {
private MultiDbConnectionManager connectionManager; private MultiDbConnectionManager connectionManager;
@Autowired @Autowired
private MultiDbTimestampTracker timestampTracker; private DataTimestampIncrementalTracker dataTimestampTracker;
@Autowired
private SmartDeduplicator deduplicator;
@Autowired @Autowired
private BatchKafkaSender batchSender; private BatchKafkaSender batchSender;
private final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
/**
* 每日凌晨清除当天统计的count条数
*/
@Scheduled(cron = "${clearCountCron}")
public void clearCountData() {
try {
appendMethodB(countFile, "0", false);
} catch (Exception e) {
log.error("task Exception:{}", e.getMessage());
}
}
/** /**
* 优化后的查询influxdb指标数据发送至Kafka(多库并行+去重+批量发送) * 优化后的查询influxdb指标数据发送至Kafka(基于数据时间戳的增量查询+批量发送)
*/ */
@Scheduled(cron = "${schedules}") @Scheduled(cron = "${schedules}")
public void optimizedExecute() { public void optimizedExecute() {
...@@ -79,16 +57,16 @@ public class InfluxDisorderComponent { ...@@ -79,16 +57,16 @@ public class InfluxDisorderComponent {
List<String> urlList = connectionManager.getAvailableUrls(); List<String> urlList = connectionManager.getAvailableUrls();
List<String> databaseList = connectionManager.getAvailableDatabases(); List<String> databaseList = connectionManager.getAvailableDatabases();
// 并行处理每个URL和数据库的组合 // 顺序处理每个URL和数据库的组合,确保时间戳追踪准确
urlList.parallelStream().forEach(url -> { for (String url : urlList) {
databaseList.parallelStream().forEach(database -> { for (String database : databaseList) {
try { try {
processUrlDatabase(url.trim(), database.trim()); processUrlDatabase(url.trim(), database.trim());
} catch (Exception e) { } catch (Exception e) {
log.error("处理URL:{}, Database:{} 失败: {}", url, database, e.getMessage()); log.error("处理URL:{}, Database:{} 失败: {}", url, database, e.getMessage());
} }
}); }
}); }
log.info("InfluxDB数据采集任务完成"); log.info("InfluxDB数据采集任务完成");
...@@ -115,12 +93,14 @@ public class InfluxDisorderComponent { ...@@ -115,12 +93,14 @@ public class InfluxDisorderComponent {
log.info("URL:{}, Database:{} 找到 {} 张表需要处理", url, database, tableNameList.size()); log.info("URL:{}, Database:{} 找到 {} 张表需要处理", url, database, tableNameList.size());
// 并行处理每张表 // 顺序处理每张表,保证时间戳更新的原子性
List<NormalFieldsDTO> allData = tableNameList.parallelStream() List<NormalFieldsDTO> allData = new ArrayList<>();
.map(tableName -> processTable(influxDBService, url, database, tableName)) for (String tableName : tableNameList) {
.filter(Objects::nonNull) List<NormalFieldsDTO> tableData = processTable(influxDBService, url, database, tableName);
.flatMap(List::stream) if (tableData != null && !tableData.isEmpty()) {
.collect(Collectors.toList()); allData.addAll(tableData);
}
}
// 批量发送到Kafka // 批量发送到Kafka
if (!allData.isEmpty()) { if (!allData.isEmpty()) {
...@@ -134,16 +114,12 @@ public class InfluxDisorderComponent { ...@@ -134,16 +114,12 @@ public class InfluxDisorderComponent {
} }
/** /**
* 处理单张表的数据 * 处理单张表的数据(基于数据时间戳的增量查询)
*/ */
private List<NormalFieldsDTO> processTable(InfluxDBService influxDBService, String url, String database, String tableName) { private List<NormalFieldsDTO> processTable(InfluxDBService influxDBService, String url, String database, String tableName) {
try { try {
// 使用增量查询SQL // 使用基于数据时间戳的增量查询SQL
String sql = timestampTracker.buildDelayTolerantQuery(url, database, tableName); String sql = dataTimestampTracker.buildDataTimestampQuery(url, database, tableName);
if (sql == null) {
log.debug("表 {} 的查询时间窗口无效,跳过", tableName);
return Collections.emptyList();
}
log.debug("查询表 {} SQL: {}", tableName, sql); log.debug("查询表 {} SQL: {}", tableName, sql);
QueryResult.Series series = getResult(influxDBService.query(sql)); QueryResult.Series series = getResult(influxDBService.query(sql));
...@@ -153,8 +129,8 @@ public class InfluxDisorderComponent { ...@@ -153,8 +129,8 @@ public class InfluxDisorderComponent {
return Collections.emptyList(); return Collections.emptyList();
} }
// 处理查询结果 // 处理查询结果并更新最大时间戳
return processTableData(series, url, database, tableName); return processTableDataWithTimestampUpdate(series, url, database, tableName);
} catch (Exception e) { } catch (Exception e) {
log.error("处理表 {} 异常: {}", tableName, e.getMessage()); log.error("处理表 {} 异常: {}", tableName, e.getMessage());
...@@ -163,13 +139,14 @@ public class InfluxDisorderComponent { ...@@ -163,13 +139,14 @@ public class InfluxDisorderComponent {
} }
/** /**
* 处理表数据,应用去重和转换 * 处理表数据,转换并更新时间戳(无需去重,增量查询天然避免重复)
*/ */
private List<NormalFieldsDTO> processTableData(QueryResult.Series series, String url, String database, String tableName) { private List<NormalFieldsDTO> processTableDataWithTimestampUpdate(QueryResult.Series series, String url, String database, String tableName) {
// 构建列名映射 // 构建列名映射
List<String> columns = series.getColumns(); List<String> columns = series.getColumns();
Map<String, Integer> columnIndexMap = buildColumnIndexMap(columns); Map<String, Integer> columnIndexMap = buildColumnIndexMap(columns);
List<Atoota> atootaList = new ArrayList<>();
List<NormalFieldsDTO> resultList = new ArrayList<>(); List<NormalFieldsDTO> resultList = new ArrayList<>();
for (List<Object> value : series.getValues()) { for (List<Object> value : series.getValues()) {
...@@ -180,10 +157,7 @@ public class InfluxDisorderComponent { ...@@ -180,10 +157,7 @@ public class InfluxDisorderComponent {
continue; continue;
} }
// 应用智能去重 atootaList.add(atoota);
if (deduplicator.isDuplicate(url, database, tableName, atoota.getId())) {
continue; // 跳过重复数据
}
// 转换为DTO // 转换为DTO
NormalFieldsDTO dto = convertToDTO(atoota); NormalFieldsDTO dto = convertToDTO(atoota);
...@@ -194,7 +168,10 @@ public class InfluxDisorderComponent { ...@@ -194,7 +168,10 @@ public class InfluxDisorderComponent {
} }
} }
log.debug("表 {} 处理完成,有效数据: {}", tableName, resultList.size()); // 更新最大时间戳(重要:只有数据处理成功后才更新)
int processedCount = dataTimestampTracker.updateMaxTimestamp(url, database, tableName, atootaList);
log.debug("表 {} 处理完成,有效数据: {} 条", tableName, processedCount);
return resultList; return resultList;
} }
...@@ -237,11 +214,11 @@ public class InfluxDisorderComponent { ...@@ -237,11 +214,11 @@ public class InfluxDisorderComponent {
} }
/** /**
* 手动刷新所有缓存(用于重新开始同步) * 手动重置时间戳(用于重新开始同步)
*/ */
public void resetAllCaches() { public void resetAllTimestamps() {
deduplicator.clearAll(); dataTimestampTracker.clearAllTimestamps();
log.info("已重置所有缓存"); log.info("已重置所有时间戳");
} }
/** /**
...@@ -250,8 +227,8 @@ public class InfluxDisorderComponent { ...@@ -250,8 +227,8 @@ public class InfluxDisorderComponent {
public String getSystemStats() { public String getSystemStats() {
return String.format("系统状态 - %s, %s, %s", return String.format("系统状态 - %s, %s, %s",
connectionManager.getConnectionStats(), connectionManager.getConnectionStats(),
timestampTracker.getStats(), dataTimestampTracker.getStats(),
deduplicator.getCacheStats()); dataTimestampTracker.getCleanupStats());
} }
/** /**
...@@ -294,9 +271,10 @@ public class InfluxDisorderComponent { ...@@ -294,9 +271,10 @@ public class InfluxDisorderComponent {
/** /**
* 转换Atoota为DTO * 转换Atoota为DTO
*/ */
@SuppressWarnings("unchecked")
private NormalFieldsDTO convertToDTO(Atoota atoota) { private NormalFieldsDTO convertToDTO(Atoota atoota) {
return NormalFieldsDTO.builder() return NormalFieldsDTO.builder()
.normalFields(JSONObject.parseObject(JSONObject.toJSONString(atoota), Map.class)) .normalFields((Map<String, Object>) JSONObject.parseObject(JSONObject.toJSONString(atoota), Map.class))
.timestamp(System.currentTimeMillis()) .timestamp(System.currentTimeMillis())
.logTypeName(topic) .logTypeName(topic)
.build(); .build();
......
...@@ -91,7 +91,13 @@ public class MultiDbConnectionManager { ...@@ -91,7 +91,13 @@ public class MultiDbConnectionManager {
// 检查连接健康状态 // 检查连接健康状态
if (!isConnectionHealthy(connectionKey, service)) { if (!isConnectionHealthy(connectionKey, service)) {
log.warn("连接不健康,尝试重新创建 - URL: {}, Database: {}", url, database); log.warn("连接不健康,尝试重新创建 - URL: {}, Database: {}", url, database);
service = recreateConnection(url, database); synchronized (this) {
// 双重检查,避免并发重复创建
service = connectionPool.get(connectionKey);
if (service == null || !isConnectionHealthy(connectionKey, service)) {
service = recreateConnection(url, database);
}
}
} }
return service; return service;
......
...@@ -50,7 +50,7 @@ public class Producer { ...@@ -50,7 +50,7 @@ public class Producer {
/** /**
* 异步批量发送消息至Kafka - 无阻塞 * 异步批量发送消息至Kafka - 无阻塞
*/ */
public CompletableFuture<Void> sendMessage(List<NormalFieldsDTO> normalFieldsDTOList, String countFile) { public CompletableFuture<Void> sendMessage(List<NormalFieldsDTO> normalFieldsDTOList) {
if (normalFieldsDTOList == null || normalFieldsDTOList.isEmpty()) { if (normalFieldsDTOList == null || normalFieldsDTOList.isEmpty()) {
return CompletableFuture.completedFuture(null); return CompletableFuture.completedFuture(null);
} }
......
server:
port: 8190
tomcat:
uri-encoding: utf-8
basedir: "log/tomcat"
accesslog:
enabled: true
directory: "logs"
encoding: utf-8
file-date-format: _yyyy-MM-dd
locale: zh_CN
max-days: 30
prefix: "tomcat_access_log"
suffix: ".log"
pattern: "[${spring.application.name}] ===> 请求时间-%t 客户端IP-%a 客户端Host-%h
客户端协议-%H 线程名称-%I SessionId-%S 请求URL-%U 请求方法-%m 请求状态码-%s 耗时(ms)-%D 连接状态-%X"
shutdown: graceful
spring: spring:
profiles: profiles:
include: include:
- datasource - datasource
- kafka - kafka
jackson: schedules: 0/5 * * * * ?
date-format: yyyy-MM-dd HH:mm:ss timestamp-file: ./data-timestamps.properties
time-zone: GMT+8 timestamp:
aop: cleanup:
proxy-target-class: true days: 3
application:
name: "@artifactId@"
package-time: "@package-time@"
# 优化后调度频率:30秒执行一次,给数据写入更多时间
schedules: 0/30 * * * * ?
table-keywords: atoota table-keywords: atoota
send:
log-url:
interval-minute: 1
interval-second: 30
initialDelay: 1
# 优化后的配置
missing-s: 15 missing-s: 15
count-file: E:\\version
clearCountCron: 0 0 0 * * ?
# 批量处理配置
batch: batch:
size: 1000 # 批次大小 - 减少积压风险 size: 1000
timeout: 2000 # 批次超时时间(ms) - 更快响应 timeout: 2000
\ No newline at end of file
# InfluxDB多库配置示例
# 多个URL用逗号分隔,多个数据库用逗号分隔
# influxdb:
# url: "http://influx1:8086,http://influx2:8086"
# database: "metrics1,metrics2"
logging:
level:
org.springframework.jdbc.core.JdbcTemplate: DEBUG
\ No newline at end of file
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
server:
port: 8190
tomcat:
uri-encoding: utf-8
basedir: "log/tomcat"
accesslog:
enabled: true
directory: "logs"
encoding: utf-8
file-date-format: _yyyy-MM-dd
locale: zh_CN
max-days: 30
prefix: "tomcat_access_log"
suffix: ".log"
pattern: "[${spring.application.name}] ===> 请求时间-%t 客户端IP-%a 客户端Host-%h
客户端协议-%H 线程名称-%I SessionId-%S 请求URL-%U 请求方法-%m 请求状态码-%s 耗时(ms)-%D 连接状态-%X"
shutdown: graceful
spring: spring:
profiles: profiles:
include: include:
- datasource - datasource
- kafka - kafka
jackson: schedules: 0/5 * * * * ?
date-format: yyyy-MM-dd HH:mm:ss timestamp-file: ./data-timestamps.properties
time-zone: GMT+8 timestamp:
aop: cleanup:
proxy-target-class: true days: 3
application:
name: "@artifactId@"
package-time: "@package-time@"
# 优化后调度频率:30秒执行一次,给数据写入更多时间
schedules: 0/30 * * * * ?
table-keywords: atoota table-keywords: atoota
send:
log-url:
interval-minute: 1
interval-second: 30
initialDelay: 1
# 优化后的配置
missing-s: 15 missing-s: 15
count-file: E:\\version
clearCountCron: 0 0 0 * * ?
# 批量处理配置
batch: batch:
size: 1000 # 批次大小 - 减少积压风险 size: 1000
timeout: 2000 # 批次超时时间(ms) - 更快响应 timeout: 2000
\ No newline at end of file
# InfluxDB多库配置示例
# 多个URL用逗号分隔,多个数据库用逗号分隔
# influxdb:
# url: "http://influx1:8086,http://influx2:8086"
# database: "metrics1,metrics2"
logging:
level:
org.springframework.jdbc.core.JdbcTemplate: DEBUG
\ No newline at end of file
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment