main
MonHun 2024-04-27 17:15:54 +09:00
commit 8eaef92801
21 changed files with 1036 additions and 0 deletions

Binary file not shown.

Binary file not shown.

95
pom.xml 100644
View File

@ -0,0 +1,95 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.7.8</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>kr.gmtc.gw</groupId>
<artifactId>elastic</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>EyeElastic</name>
<description>Demo project for Spring Boot</description>
<repositories>
<repository>
<id>local-repository</id>
<name>local-repository</name>
<url>file:${project.basedir}/lib</url>
</repository>
</repositories>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
<version>1.19</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20231013</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.9</version>
</dependency>
<!-- <dependency>
<groupId>kr.gmt.so</groupId>
<artifactId>state-spring-boot-starter</artifactId>
<version>1.0.3</version>
</dependency> -->
<!-- <dependency>
<groupId>ipworks.local</groupId>
<artifactId>ipworks-local-1.0.0</artifactId>
<scope>system</scope>
<version>1.0.0</version>
<systemPath>${basedir}/lib/ipworks-local-1.0.0.jar</systemPath>
</dependency> -->
</dependencies>
<build>
<finalName>EyeELK</finalName>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<includeSystemScope>true</includeSystemScope>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,113 @@
package kr.gmtc.gw.comp.thread;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import kr.gmtc.gw.comp.thread.handler.CustomThreadOnTerminate;
import kr.gmtc.gw.comp.thread.handler.CustomThreadWork;
public class CustomThread extends Thread{
/* 대기시간 없음 */
public static final long NO_SLEEP = 0;
/* 1밀리 초 */
public static final long SLEEP_MILLI_SEC = 1;
/* 1초 */
public static final long SLEEP_SECOND = 1000;
/* 30초 */
public static final long SLEEP_HALF_MINIUTE = 30000;
/* 1분 */
public static final long SLEEP_MINIUTE = 60000;
public final String controllClassName;
public final long repeatMiliSec;
public final CustomThreadWork definedWork;
public final CustomThreadOnTerminate definedTerminate;
public final Logger logger;
private boolean running;
/**
* .<br>
* {@link Thread#sleep(long)} .<br>
* sleep <br>
* {@link #gracefulStop()}
* @param threadName
* @param controllClass , this
* @param repeatMiliSec Sleep ( ), 0
* @param definedWork
* @param definedTerminate
* @param autoStart
*/
public CustomThread(String threadName, Object controllClass, long repeatMiliSec,
CustomThreadWork definedWork, CustomThreadOnTerminate definedTerminate, boolean autoStart) {
if (definedWork == null) {
throw new IllegalArgumentException("[CustomThread] - definedWork is null.");
}
this.definedWork = definedWork;
this.definedTerminate = definedTerminate;
this.controllClassName = controllClass == null ? "" : controllClass.getClass().getSimpleName();
this.repeatMiliSec = repeatMiliSec > 0 ? repeatMiliSec : 0;
this.logger = LoggerFactory.getLogger(CustomThread.class);
this.running = false;
setName(threadName);
setDaemon(true);
if (autoStart) {
this.start();
}
}
@Override
public void run() {
logger.info("[CustomThread] Started.");
while ( this.running && !this.isInterrupted()) {
try {
try {
this.definedWork.work();
} finally {
if (this.repeatMiliSec > 0) {
Thread.sleep(this.repeatMiliSec);
}
}
} catch(InterruptedException e) { // 인터럽트 수신시 종료
logger.error("[CustomThread] Interrupted. "+ e.toString());
Thread.currentThread().interrupt();
break;
} catch(Exception e) { // 처리되지 않은 예외 로깅, 예외에 의한 무한루프에 주의
logger.error("[CustomThread] Unknown Exception Occur. " + e.toString());
}
}
if(this.definedTerminate != null) {
this.definedTerminate.onTerminate();
}
logger.error("[CustomThread] Stoped.");
}
@Override
public String toString() {
return "CustomThread [controllClass=" + this.controllClassName + ", threadName=" + getName() +
", runnig=" + this.running + ", alive=" + isAlive()+ ", repeatMiliSec=" + this.repeatMiliSec +
", definedTerminate=" + (this.definedTerminate == null ? "no" : "yes") + "]";
}
@Override
public synchronized void start() {
this.running = true;
super.start();
}
/**
* , .
*/
public void gracefulStop() {
this.running = false;
}
}

View File

@ -0,0 +1,6 @@
package kr.gmtc.gw.comp.thread.handler;
@FunctionalInterface
public interface CustomThreadOnTerminate {
public void onTerminate();
}

View File

@ -0,0 +1,6 @@
package kr.gmtc.gw.comp.thread.handler;
@FunctionalInterface
public interface CustomThreadWork {
public void work() throws Exception;
}

View File

@ -0,0 +1,29 @@
package kr.gmtc.gw.elastic;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.ApplicationPidFileWriter;
import org.springframework.boot.system.ApplicationHome;
import org.springframework.scheduling.annotation.EnableScheduling;
@EnableScheduling
@SpringBootApplication
public class ESApplication {
public static void main(String[] args) {
ApplicationHome home = new ApplicationHome(ESApplication.class);
String root = home.getDir().getPath();
System.setProperty("user.dir", root);
Thread.currentThread().setName("JVM - Main");
SpringApplication springApplication = new SpringApplication(ESApplication.class);
springApplication.addListeners(new ApplicationPidFileWriter("./application.pid"));
springApplication.run(args);
}
}

View File

@ -0,0 +1,36 @@
package kr.gmtc.gw.elastic.config;
import org.elasticsearch.client.RestHighLevelClient;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.elasticsearch.client.ClientConfiguration;
import org.springframework.data.elasticsearch.client.RestClients;
import org.springframework.data.elasticsearch.config.AbstractElasticsearchConfiguration;
@Configuration
//@EnableElasticsearchRepositories(basePackages = "kr.gmtc.gw.elk.repositories")
@ComponentScan(basePackages = {"kr.gmtc.gw.elk"})
public class ElasticsearchClientConfig extends AbstractElasticsearchConfiguration {
@Value("${app.config.elastic.host}")
private String gsEsHost;
@Value("${app.config.elastic.port}")
private String gsEsPort;
@Override
@Bean(destroyMethod = "close")
public RestHighLevelClient elasticsearchClient() {
final ClientConfiguration clientConfiguration =
ClientConfiguration
.builder()
.connectedTo(gsEsHost + ":" + gsEsPort)
//.connectedTo( "10.200.31.129:9200")
.build();
return RestClients.create(clientConfiguration).rest();
}
}

View File

@ -0,0 +1,132 @@
package kr.gmtc.gw.elastic.parser;
import java.util.ArrayList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.LinkedBlockingQueue;
import kr.gmtc.gw.elastic.utils.LogMsgParserUtile;
import kr.gmtc.gw.elastic.vo.ElasticMsgHeader;
import kr.gmtc.gw.elastic.vo.ElasticResultMsgVO;
import kr.gmtc.gw.elastic.vo.LogMsgParserVO;
import kr.gmtc.gw.elastic.vo.ReplayLog.AiLocationVO;
import kr.gmtc.gw.elastic.vo.ReplayLog.StandRouteVO;
import kr.gmtc.gw.elastic.vo.ReplayLog.StandRouteVO.RoutePoints;
public class ReplayLogParser_AILocation {
private List<ElasticResultMsgVO> logMsgs;
Queue<LogMsgParserVO> parserAiLocQ = new LinkedBlockingQueue<LogMsgParserVO>();
LogMsgParserUtile pUtile;
public ReplayLogParser_AILocation(){
pUtile = new LogMsgParserUtile();
}
public void logParser(List<ElasticResultMsgVO> recvMsgs){
this.logMsgs = recvMsgs;
List<AiLocationVO> parserList = new ArrayList<AiLocationVO>();
AiLocationVO parserVO;
String[] arr_bb_brp;
String[] arr_bb_tlp;
String[] arr_cc_id;
String[] arr_crd_frm;
String[] arr_crd_glbl;
LogMsgParserVO logVO;
ElasticMsgHeader logHeader;
for( ElasticResultMsgVO mv : logMsgs){
String sMsg = mv.getMessage();
logVO = new LogMsgParserVO();
String[] split_Div = pUtile.splitString(sMsg, pUtile.DIV_PIPE);
/*** 메시지 Header 변환 START ***/
String[] split_Header = pUtile.splitString(split_Div[0], pUtile.DIV_COMMA);
logHeader = new ElasticMsgHeader();
logHeader.setLog_dt(split_Header[0]);
logHeader.setLog_se(split_Header[1]);
logHeader.setSw_se(split_Header[2]);
logHeader.setServer_se(split_Header[3]);
logVO.setHeader(logHeader);
/*** 메시지 Header 변환 END ***/
/*** 메시지 data(body) 변환 START ***/
String sDatas = split_Div[1];
String[] split_Data = null;
//Data부 여러줄 인지 확인
int iDivCnt_Datas = pUtile.countDivInMsg(sDatas, pUtile.DIV_CARET);
// Data부 여러줄인경우 분리(^)
if(iDivCnt_Datas > 0){
split_Data = pUtile.splitString(sDatas, pUtile.DIV_CARET);
}else{
split_Data = new String[1];
split_Data[0] = sDatas;
}
/**** Data부 한줄씩 처리 START ****/
for(String loop1Data : split_Data){
parserVO = new AiLocationVO();
// 일반 필드 분리(,)
String[] split_Field = pUtile.splitString(loop1Data, pUtile.DIV_COMMA);
// routeVO.setMv_id(split_Field[0]);
// routeVO.setTrgt_id(split_Field[1]);
// routeVO.setMv_type(split_Field[2]);
// routeVO.setDep_arr_ty(split_Field[3]);
// routeVO.setMv_point(new ArrayList<RoutePoints>());
String[] split_List = pUtile.splitString(split_Field[4], pUtile.DIV_DOLLAR);
for(String loopList : split_List){
// routePoints = new RoutePoints();
String[] split_Array = pUtile.splitString(loopList, pUtile.DIV_AT);
// routePoints.setSpot_sn(split_Array[0]);
// routePoints.setLat(split_Array[1]);
// routePoints.setLon(split_Array[2]);
// routeVO.getMv_point().add(routePoints);
}
parserList.add(parserVO);
}
/**** Data부 한줄씩 처리 END ****/
logVO.setData(parserList);
/*** 메시지 data(body) 변환 END ***/
// Que에 파싱 데이터 담기
parserAiLocQ.offer(logVO) ;
} // for( ElasticResultMsgVO mv : logMsgs){... END
}
}

View File

@ -0,0 +1,127 @@
package kr.gmtc.gw.elastic.parser;
import java.util.ArrayList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.LinkedBlockingQueue;
import kr.gmtc.gw.elastic.utils.LogMsgParserUtile;
import kr.gmtc.gw.elastic.vo.ElasticMsgHeader;
import kr.gmtc.gw.elastic.vo.ElasticResultMsgVO;
import kr.gmtc.gw.elastic.vo.LogMsgParserVO;
import kr.gmtc.gw.elastic.vo.ReplayLog.SacpStandRouteVO;
import kr.gmtc.gw.elastic.vo.ReplayLog.SacpStandRouteVO.RoutePoints;
public class ReplayLogParser_Route {
private List<ElasticResultMsgVO> logMsgs;
Queue<LogMsgParserVO> parserRouteQ = new LinkedBlockingQueue<LogMsgParserVO>();
LogMsgParserUtile pUtile;
public ReplayLogParser_Route(){
pUtile = new LogMsgParserUtile();
}
public void logParser(List<ElasticResultMsgVO> recvMsgs){
this.logMsgs = recvMsgs;
SacpStandRouteVO routeVO;
RoutePoints routePoints;
List<SacpStandRouteVO> routeList = new ArrayList<SacpStandRouteVO>();
LogMsgParserVO logVO;
ElasticMsgHeader logHeader;
for( ElasticResultMsgVO mv : logMsgs){
String sMsg = mv.getMessage();
logVO = new LogMsgParserVO();
String[] split_Div = pUtile.splitString(sMsg, pUtile.DIV_PIPE);
/*** 메시지 Header 변환 START ***/
String[] split_Header = pUtile.splitString(split_Div[0], pUtile.DIV_COMMA);
logHeader = new ElasticMsgHeader();
logHeader.setLog_dt(split_Header[0]);
logHeader.setLog_se(split_Header[1]);
logHeader.setSw_se(split_Header[2]);
logHeader.setServer_se(split_Header[3]);
logVO.setHeader(logHeader);
/*** 메시지 Header 변환 END ***/
/*** 메시지 data(body) 변환 START ***/
String sDatas = split_Div[1];
String[] split_Data = null;
//Data부 여러줄 인지 확인
int iDivCnt_Datas = pUtile.countDivInMsg(sDatas, pUtile.DIV_CARET);
// Data부 여러줄인경우 분리(^)
if(iDivCnt_Datas > 0){
split_Data = pUtile.splitString(sDatas, pUtile.DIV_CARET);
}else{
split_Data = new String[1];
split_Data[0] = sDatas;
}
/**** Data부 한줄씩 처리 START ****/
for(String loop1Data : split_Data){
routeVO = new SacpStandRouteVO();
// 일반 필드 분리(,)
String[] split_Field = pUtile.splitString(loop1Data, pUtile.DIV_COMMA);
routeVO.setMv_id(split_Field[0]);
routeVO.setTrgt_id(split_Field[1]);
routeVO.setMv_type(split_Field[2]);
routeVO.setDep_arr_ty(split_Field[3]);
routeVO.setMv_point(new ArrayList<RoutePoints>());
String[] split_List = pUtile.splitString(split_Field[4], pUtile.DIV_DOLLAR);
for(String loopList : split_List){
routePoints = new RoutePoints();
String[] split_Array = pUtile.splitString(loopList, pUtile.DIV_AT);
routePoints.setSpot_sn(split_Array[0]);
routePoints.setLat(split_Array[1]);
routePoints.setLon(split_Array[2]);
routeVO.getMv_point().add(routePoints);
}
routeList.add(routeVO);
}
/**** Data부 한줄씩 처리 END ****/
logVO.setData(routeList);
/*** 메시지 data(body) 변환 END ***/
// Que에 파싱 데이터 담기
parserRouteQ.offer(logVO) ;
} // for( ElasticResultMsgVO mv : logMsgs){... END
}
}

View File

@ -0,0 +1,140 @@
package kr.gmtc.gw.elastic.services;
import java.time.Duration;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Sort;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.BaseQuery;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.elasticsearch.core.query.StringQuery;
import org.springframework.stereotype.Service;
import kr.gmtc.gw.elastic.parser.ReplayLogParser_Route;
import kr.gmtc.gw.elastic.vo.ElasticResultMsgVO;
@Service
public class ElkSearchService {
private ElasticsearchOperations elasticsearchOperations;
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private final String INDEX_NAME = "replay_log_*";
private List<ElasticResultMsgVO> logMsgMatches;
private ReplayLogParser_Route logParser;
public ElkSearchService(ElasticsearchOperations elasticsearchOperations){
this.elasticsearchOperations = elasticsearchOperations;
logMsgMatches = new ArrayList<ElasticResultMsgVO>();
logParser = new ReplayLogParser_Route();
}
public String elkSearch_replay_log(String sLogType, String sStartDt, String sEndDt) {
// String sLogType = "3"; // 재생용 로그 S/W구분
// String sStart_tm = "20240219160000.000"; // 일시(from)
// String sEnd_tm = "20240219170000.000"; // 일시(to)
List<Object> sAfter = null; // 일시(to)
//logMsgMatches = new ArrayList<ElasticResultMsgVO>();
String esQuery = "{ " + //
" \"bool\" : { " + //
" \"filter\" : [ " + //
" { \"terms\" : { " + //
" \"header.msg_type.keyword\" : [ " + //
" "+ sLogType +" ] } }, " + //
" { \"range\" : { " + //
" \"header.recv_time\" : { " + //
" \"from\" : \""+ sStartDt +"\", " + //
" \"to\" : \""+ sEndDt +"\" } } } ] } " + //
"}";
Query searchQuery = new StringQuery(esQuery);
// 한번에 가져올 row수 설정
((BaseQuery) searchQuery).setMaxResults(1000);
// 데이터 정렬할 필드 설정
searchQuery.addSort(Sort.by(new Sort.Order(Sort.Direction.ASC, "header.recv_time.keyword")));
LocalDateTime dt_before = LocalDateTime.now();
while (true) {
searchQuery.setSearchAfter(sAfter);
// 검색 요청하여 검색 결과 가져오기
SearchHits<ElasticResultMsgVO> searchHits = elasticsearchOperations.search(searchQuery, ElasticResultMsgVO.class, IndexCoordinates.of(INDEX_NAME));
if (searchHits.getTotalHits() <= 0) break;
Duration diff = Duration.between(dt_before, LocalDateTime.now());
logger.info("elastic Call " + searchHits.getTotalHits() + "건 " + diff.toMillis() + "msec");
sAfter = searchHits.toList().get(searchHits.toList().size() -1).getSortValues();
// IndexFusion Object에 데이터 담기
searchHits.forEach(srcHit -> {
logMsgMatches.add(srcHit.getContent());
});
logger.info("map cnt " + logMsgMatches.size() + "건 " );
if (searchHits.toList().size() != 1000) {
logParser.logParser(logMsgMatches);
break;
}
}
return "OK";
}
}
// POST replay_log_*/_search
// {
// "size" : 10000,
// "track_total_hits": true,
// "query" : {
// "bool" : {
// "filter" : [
// {
// "terms" : {
// "header.msg_type.keyword" : [
// 10
// ]
// }
// },
// {
// "range" : {
// "header.recv_time" : {
// "from" : "20240416090000.000",
// "to" : "20240416103059.999"
// }
// }
// }
// ]
// }
// },
// "_source": ["@timestamp", "header", "message"],
// "sort" : [{"header.recv_time.keyword" : {"order" : "ASC"}}],
// "search_after" : ["0"]
// }

View File

@ -0,0 +1,40 @@
package kr.gmtc.gw.elastic.test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import kr.gmtc.gw.elastic.services.ElkSearchService;
@RestController
@RequestMapping("/")
public class SearchController {
private ElkSearchService elkSearchService;
private final Logger logger = LoggerFactory.getLogger(this.getClass());
public SearchController(ElkSearchService elkSearchService){
this.elkSearchService = elkSearchService;
}
// http://localhost:8080/replay?idx=10&s=20240416090000.000&e=20240416103059.999
@GetMapping("/replay")
@ResponseBody
public String processSearch_replay_loop(@RequestParam(value = "idx", required = false) String sIndex,
@RequestParam(value = "s", required = false) String sStartDt,
@RequestParam(value = "e", required = false) String sEndDt) {
logger.info("searching by Index {}", sIndex);
String IndexFusion = elkSearchService.elkSearch_replay_log(sIndex, sStartDt, sEndDt);
return IndexFusion;
}
}

View File

@ -0,0 +1,29 @@
package kr.gmtc.gw.elastic.utils;
public class LogMsgParserUtile {
public String[] sSplitChar;
public final int DIV_PIPE = 0; // Data부 시작 구분자(|)
public final int DIV_CARET = 1; // Data부 구분자(^)
public final int DIV_COMMA = 2; // 일반 필드 구분자(,)
public final int DIV_DOLLAR = 3; // 리스트 Row 구분자($)
public final int DIV_AT = 4; // 배열 필드 구분자(@)
public LogMsgParserUtile(){
sSplitChar = new String[]{"|", "^", ",", "$", "@"};
}
public String[] splitString(String s, int div_num ){
return s.split("\\"+ sSplitChar[div_num]);
}
public int countDivInMsg(String s, int div_num){
return s.indexOf(sSplitChar[div_num]);
}
}

View File

@ -0,0 +1,15 @@
package kr.gmtc.gw.elastic.vo;
import lombok.Getter;
import lombok.Setter;
@Setter
@Getter
public class ElasticMsgHeader {
private String log_dt;
private String log_se;
private String sw_se;
private String server_se;
}

View File

@ -0,0 +1,20 @@
package kr.gmtc.gw.elastic.vo;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class ElasticResultMsgVO {
@Field(type = FieldType.Text, name = "message")
private String message;
}

View File

@ -0,0 +1,17 @@
package kr.gmtc.gw.elastic.vo;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
@Setter
@Getter
public class LogMsgParserVO {
private ElasticMsgHeader header;
private List<?> data;
}

View File

@ -0,0 +1,27 @@
package kr.gmtc.gw.elastic.vo.ReplayLog;
import lombok.Getter;
import lombok.Setter;
@Setter
@Getter
public class AiLocationVO {
private String arcrft_stnd;
private String arcrft_tp;
private String arln;
private String[] arr_bb_brp;
private String[] arr_bb_tlp;
private String[] arr_cc_id;
private String[] arr_crd_frm;
private String[] arr_crd_glbl;
private String drctn;
private String dtct_cls;
private String dtct_tm;
private String mta_tm;
private String rw_tm;
private String spd;
private String trck_id;
private String zn_id;
}

View File

@ -0,0 +1,26 @@
package kr.gmtc.gw.elastic.vo.ReplayLog;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
@Setter
@Getter
public class StandRouteVO {
private String mv_id;
private String trgt_id;
private String mv_type;
private String dep_arr_ty;
private List<RoutePoints> mv_point;
@Setter
@Getter
public static class RoutePoints{
private String spot_sn;
private String lat;
private String lon;
}
}

View File

@ -0,0 +1,42 @@
{"properties": [
{
"name": "root",
"type": "java.lang.String",
"description": "A description for 'root'"
},
{
"name": "asde.filepath1",
"type": "java.lang.String",
"description": "A description for 'asde.filepath1'"
},
{
"name": "asde.filepath2",
"type": "java.lang.String",
"description": "A description for 'asde.filepath2'"
},
{
"name": "asde.service",
"type": "java.lang.String",
"description": "A description for 'asde.service'"
},
{
"name": "asde.service.queueCount",
"type": "java.lang.String",
"description": "A description for 'asde.queueCount'"
},
{
"name": "asde.service.clearQ.max-count",
"type": "java.lang.String",
"description": "A description for 'asde.clearQ.max-count'"
},
{
"name": "asde.service.clearQ.diff-time",
"type": "java.lang.String",
"description": "A description for 'asde.clearQ.diff-time'"
},
{
"name": "asde.service.clearQ.clearTime",
"type": "java.lang.String",
"description": "A description for 'asde.clearQ.clearTime'"
}
]}

View File

@ -0,0 +1,33 @@
spring:
profiles:
active: default
group:
default:
- winTest
---
spring:
config:
activate:
on-profile: default
server:
port: 8080
---
spring:
config:
activate:
on-profile: real
---
spring:
config:
activate:
on-profile: winTest
app:
config:
elastic:
host: 10.200.31.129
port: 9200

View File

@ -0,0 +1,103 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="30 seconds">
<property name="PATH_SEPARATOR" value="/"/>
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<!-- 로그 파일이 저장될 경로 -->
<property name="LOG_PATH" value=".${PATH_SEPARATOR}logs"/>
<!-- 로그 파일 이름 -->
<property name="LOG_FILE_NAME" value="trace"/>
<!-- 로그 출력 패턴 -->
<property name="LOG_PATTERN" value="[%d{yyyy-MM-dd HH:mm:ss.SSS}] %highlight([%.-1level]) %clr([%-40.40(%class{0}.%method)]){cyan} %msg%n"/>
<property name="FILE_LOG_PATTERN" value="[%d{yyyy-MM-dd HH:mm:ss.SSS}][%.-1level][%-40.40(%class{0}.%method)] %msg%n"/>
<!-- 로그 레벨 -->
<!--
1) ERROR : 오류 메시지 표시
2) WARN : 경고성 메시지 표시
3) INFO : 정보성 메시지 표시
4) DEBUG : 디버깅하기 위한 메시지 표시
5) TRACE : Debug보다 훨씬 상세한 메시지 표시
-->
<property name="LOG_LEVEL" value="INFO"/>
<!-- CONSOLE에 로그 출력 세팅 -->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<layout class="ch.qos.logback.classic.PatternLayout">
<pattern>${LOG_PATTERN}</pattern>
</layout>
</appender>
<!-- File에 로그 출력 세팅 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 로그파일은 Info부터 저장한다!! -->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!-- <level>INFO</level> -->
<level>INFO</level>
</filter>
<!-- 파일 경로 설정 -->
<file>${LOG_PATH}${PATH_SEPARATOR}${LOG_FILE_NAME}.log</file>
<!-- 출력패턴 설정-->
<layout class="ch.qos.logback.classic.PatternLayout">
<pattern>${FILE_LOG_PATTERN}</pattern>
</layout>
<!-- Rolling 정책 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- .gz,.zip 등을 넣으면 자동 일자별 로그파일 압축 -->
<fileNamePattern>${LOG_PATH}${PATH_SEPARATOR}%d{yyyyMM,aux}${PATH_SEPARATOR}%d{yyyyMMdd}.log</fileNamePattern>
<!-- 로그파일 최대 보관주기, fileNamePattern 에 따라 일별, 월별, 년별-->
<maxHistory>10</maxHistory>
<!-- 아카이브 최대 용량 maxHistory 이후에 적용됨 -->
<totalSizeCap>10GB</totalSizeCap>
<!-- 시작시 정책 적용 여부 -->
<CleanHistoryOnStart>true</CleanHistoryOnStart>
</rollingPolicy>
</appender>
<!-- 비동기 파일(FILE)
includeCallerData: fasle, 비동기 로깅에서도 Method Name 및 Line Number 등 위치 정보를 출력하게 해주는 옵션
queueSize: 기본값은 256, Log4j2와 동일하게 1024로 설정
neverBlock: false(기본값)면 로그 발생시 Queue에 넣을 공간이 없으면 빈 공간이 생길 때 까지 블락킹 상태로 기다리며, 로그를 유실하지 않는다.
discardingThreshold: Queue에 남은 용량이 {해당 설정값 n}%이하가 되면, WARN 미만 로그가 유실되기 시작한다.
- 기본 값은 20이며, Queue 남은 용량이 20%이하가 되면 로그 유실이 발생한다.
- 0으로 세팅하면 Queue에 쌓인 로그를 드랍하지 않는다.
-->
<appender name="FILE-ASYNC" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="FILE" />
<includeCallerData>false</includeCallerData>
<discardingThreshold>0</discardingThreshold>
<queueSize>1024</queueSize>
<neverBlock>true</neverBlock>
</appender>
<!-- 로그 전역 세팅 -->
<root level="${LOG_LEVEL}">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE"/>
</root>
<!-- <logger name="kr.gmtc.gw" level="INFO">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE"/>
</logger>
<logger name="kr.gmt.so" level="INFO">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE"/>
</logger> -->
<!-- <logger name="kr.gmtc.comp.status" level="TRACE">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE"/>
</logger> -->
</configuration>