This commit is contained in:
gotoeasy 2023-11-15 19:58:49 +08:00
parent ac509f930f
commit 7df31233cf
13 changed files with 80 additions and 40 deletions

View File

@ -5,7 +5,7 @@
<groupId>top.gotoeasy</groupId>
<artifactId>glc-logback-appender</artifactId>
<version>0.13.0</version>
<version>0.14.0</version>
<description>logback appender for glogcenter</description>
<repositories>

View File

@ -25,11 +25,8 @@ public class GlcFilter implements Filter {
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
// 设定日志中心相关的的traceidclientip
HttpServletRequest httpServletRequest = (HttpServletRequest)request;
String traceid = httpServletRequest.getHeader(MdcUtil.TRACE_ID);
MdcUtil.setTraceId((traceid == null || traceid.length() == 0) ? MdcUtil.generateTraceId() : traceid);
MdcUtil.setClientIp(getIpAddr(httpServletRequest));
// 设定日志中心相关的的traceidclientip若需要设定user可继承setMdcKeyValues添加相关逻辑
setMdcKeyValues(request);
chain.doFilter(request, response);
}
@ -38,7 +35,15 @@ public class GlcFilter implements Filter {
public void destroy() {
}
private static String getIpAddr(HttpServletRequest request) {
protected void setMdcKeyValues(ServletRequest request) {
// 设定日志中心相关的的traceidclientip
HttpServletRequest httpServletRequest = (HttpServletRequest)request;
String traceid = httpServletRequest.getHeader(MdcUtil.TRACE_ID);
MdcUtil.setTraceId((traceid == null || traceid.length() == 0) ? MdcUtil.generateTraceId() : traceid);
MdcUtil.setClientIp(getIpAddr(httpServletRequest));
}
protected String getIpAddr(HttpServletRequest request) {
String[] headerNames = { "X-Forwarded-For", "X-Real-IP", "Proxy-Client-IP", "WL-Proxy-Client-IP",
"HTTP_CLIENT_IP", "HTTP_X_FORWARDED_FOR" };
for (String headerName : headerNames) {

View File

@ -97,6 +97,7 @@ public class GlcAmqpAppender extends AppenderBase<ILoggingEvent> {
String traceid = event.getMDCPropertyMap().get(MdcUtil.TRACE_ID);
String clientip = event.getMDCPropertyMap().get(MdcUtil.CLIENT_IP);
String user = event.getMDCPropertyMap().get(MdcUtil.USER);
String body = "{\"text\":" + Util.encodeStr(text.trim());
body += ",\"date\":\"" + Util.getDateString() + "\"";
@ -110,6 +111,9 @@ public class GlcAmqpAppender extends AppenderBase<ILoggingEvent> {
if (clientip != null && !"".equals(clientip)) {
body += ",\"clientip\":" + Util.encodeStr(clientip);
}
if (user != null && !"".equals(user)) {
body += ",\"user\":" + Util.encodeStr(user);
}
body += "}";
channel.basicPublish("", "glc-log-queue", null, body.getBytes("utf-8"));

View File

@ -67,6 +67,7 @@ public class GlcHttpJsonAppender extends AppenderBase<ILoggingEvent> {
try {
String traceid = event.getMDCPropertyMap().get(MdcUtil.TRACE_ID);
String clientip = event.getMDCPropertyMap().get(MdcUtil.CLIENT_IP);
String user = event.getMDCPropertyMap().get(MdcUtil.USER);
body = "{\"text\":" + Util.encodeStr(text.trim());
body += ",\"date\":\"" + Util.getDateString() + "\"";
@ -80,6 +81,9 @@ public class GlcHttpJsonAppender extends AppenderBase<ILoggingEvent> {
if (clientip != null && !"".equals(clientip)) {
body += ",\"clientip\":" + Util.encodeStr(clientip);
}
if (user != null && !"".equals(user)) {
body += ",\"user\":" + Util.encodeStr(user);
}
body += "}";
URL url = new URL(glcApiUrl);

View File

@ -8,6 +8,11 @@ public class MdcUtil {
public static final String TRACE_ID = "traceid";
public static final String CLIENT_IP = "clientip";
public static final String USER = "user";
public static void setUser(String user) {
MDC.put(USER, user);
}
public static void setClientIp(String clientip) {
MDC.put(CLIENT_IP, clientip);
@ -17,6 +22,10 @@ public class MdcUtil {
MDC.put(TRACE_ID, traceid);
}
public static void removeUser() {
MDC.remove(USER);
}
public static void removeClientIp() {
MDC.remove(CLIENT_IP);
}

View File

@ -52,7 +52,7 @@ func (e *Engine) Search(cond *search.SearchCondition) *search.SearchResult {
// 分词后检索
var adds []string
adds = append(adds, cond.OrgSystem, cond.Loglevel)
adds = append(adds, cond.OrgSystem, cond.Loglevel, cond.User)
kws := tokenizer.CutForSearchEx(cond.SearchKey, adds, nil) // 检索用关键词处理
// 【快速检查1】存在无索引数据的关键词时直接返回

View File

@ -24,6 +24,7 @@ type SearchCondition struct {
DatetimeTo string // 输入的日期范围to条件
Loglevel string // 输入的日志级别(单选条件)条件【内部会过滤修改】
Loglevels []string // 输入的日志级别(多选条件)条件【内部会过滤修改】
User string // 输入的用户条件
CurrentStoreName string // 隐藏条件当前日志文档ID所属的日志仓
CurrentId uint32 // 隐藏条件当前日志文档ID
Forward bool // 隐藏条件,是否向前检索(玩下滚动查询)
@ -400,7 +401,7 @@ func findSame(cond *SearchCondition, minDocumentId uint32, maxDocumentId uint32,
idxdocStorage := indexdoc.NewDocIndexStorage(storeLogData.GetStoreName()) // 判断系统权限用
has := false
for j, max2 := 0, len(cond.Systems); j < max2; j++ {
if (idxdocStorage.GetWordDocSeq(cond.Systems[j], i)) > 0 {
if (idxdocStorage.GetWordDocSeq(cond.Systems[j], docId)) > 0 {
has = true
break
}
@ -410,12 +411,12 @@ func findSame(cond *SearchCondition, minDocumentId uint32, maxDocumentId uint32,
}
}
if cond.Loglevel == "" && len(cond.Loglevels) > 0 {
if flg && cond.Loglevel == "" && len(cond.Loglevels) > 0 {
// 日志级别范围内作过滤检查
idxdocStorage := indexdoc.NewDocIndexStorage(storeLogData.GetStoreName())
has := false
for j, max2 := 0, len(cond.Loglevels); j < max2; j++ {
if (idxdocStorage.GetWordDocSeq("!"+cond.Loglevels[j], i)) > 0 {
if (idxdocStorage.GetWordDocSeq("!"+cond.Loglevels[j], docId)) > 0 {
has = true
break
}
@ -471,7 +472,7 @@ func findSame(cond *SearchCondition, minDocumentId uint32, maxDocumentId uint32,
idxdocStorage := indexdoc.NewDocIndexStorage(storeLogData.GetStoreName()) // 判断系统权限用
has := false
for j, max2 := 0, len(cond.Systems); j < max2; j++ {
if (idxdocStorage.GetWordDocSeq(cond.Systems[j], i)) > 0 {
if (idxdocStorage.GetWordDocSeq(cond.Systems[j], docId)) > 0 {
has = true
break
}
@ -481,12 +482,12 @@ func findSame(cond *SearchCondition, minDocumentId uint32, maxDocumentId uint32,
}
}
if cond.Loglevel == "" && len(cond.Loglevels) > 0 {
if flg && cond.Loglevel == "" && len(cond.Loglevels) > 0 {
// 日志级别范围内作过滤检查
idxdocStorage := indexdoc.NewDocIndexStorage(storeLogData.GetStoreName())
has := false
for j, max2 := 0, len(cond.Loglevels); j < max2; j++ {
if (idxdocStorage.GetWordDocSeq("!"+cond.Loglevels[j], i)) > 0 {
if (idxdocStorage.GetWordDocSeq("!"+cond.Loglevels[j], docId)) > 0 {
has = true
break
}

View File

@ -14,23 +14,23 @@ import (
// 其中Tags是空格分隔的标签日期外各属性值会按空格分词
// 对应的json属性统一全小写
type LogDataModel struct {
Id string `json:"id,omitempty"` // 从1开始递增(36进制字符串)
Text string `json:"text,omitempty"` // 【必须】日志内容,多行时仅为首行,直接显示用,是全文检索对象
Date string `json:"date,omitempty"` // 日期格式YYYY-MM-DD HH:MM:SS.SSS
System string `json:"system,omitempty"` // 系统名
ServerName string `json:"servername,omitempty"` // 服务器名
ServerIp string `json:"serverip,omitempty"` // 服务器IP
ClientIp string `json:"clientip,omitempty"` // 客户端IP
TraceId string `json:"traceid,omitempty"` // 跟踪ID
LogType string `json:"logtype,omitempty"` // 日志类型1:登录日志、2:操作日志)
LogLevel string `json:"loglevel,omitempty"` // 日志级别debug、info、error
User string `json:"user,omitempty"` // 用户
Module string `json:"module,omitempty"` // 模块
Operation string `json:"action,omitempty"` // 操作
Detail string `json:"detail,omitempty"` // 多行时的详细日志信息,通常是包含错误堆栈等的日志内容(这部分内容不做索引处理)
Tags []string `json:"tags,omitempty"` // 自定义标签,都作为关键词看待处理
Keywords []string `json:"keywords,omitempty"` // 自定义的关键词
Sensitives []string `json:"sensitives,omitempty"` // 要删除的敏感词
Id string `json:"id,omitempty"` // 从1开始递增(36进制字符串)
Text string `json:"text,omitempty"` // 【必须】日志内容,多行时仅为首行,直接显示用,是全文检索对象
Date string `json:"date,omitempty"` // 日期格式YYYY-MM-DD HH:MM:SS.SSS
System string `json:"system,omitempty"` // 系统名
ServerName string `json:"servername,omitempty"` // 服务器名
ServerIp string `json:"serverip,omitempty"` // 服务器IP
ClientIp string `json:"clientip,omitempty"` // 客户端IP
TraceId string `json:"traceid,omitempty"` // 跟踪ID
// LogType string `json:"logtype,omitempty"` // 【X】日志类型1:登录日志、2:操作日志)
LogLevel string `json:"loglevel,omitempty"` // 日志级别debug、info、warn、error
User string `json:"user,omitempty"` // 用户
// Module string `json:"module,omitempty"` // 【X】模块
// Operation string `json:"action,omitempty"` // 【X】操作
Detail string `json:"detail,omitempty"` // 【内部字段】多行时的详细日志信息,通常是包含错误堆栈等的日志内容
// Tags []string `json:"tags,omitempty"` // 【X】自定义标签,都作为关键词看待处理
// Keywords []string `json:"keywords,omitempty"` // 【X】自定义的关键词
// Sensitives []string `json:"sensitives,omitempty"` // 【X】要删除的敏感词
}
func (d *LogDataModel) ToJson() string {

View File

@ -187,23 +187,24 @@ func (s *LogDataStorage) createInvertedIndex() int {
}
// 整理生成关键词
adds := docm.Keywords
adds = append(adds, docm.Tags...)
var adds []string
if docm.System != "" {
adds = append(adds, "~"+docm.System)
}
if docm.LogLevel != "" {
adds = append(adds, "!"+docm.LogLevel)
}
if docm.User != "" {
adds = append(adds, "@"+docm.User)
}
tgtStr := docm.System + " " + docm.ServerName + " " + docm.ServerIp +
" " + docm.ClientIp + " " + docm.TraceId + " " + docm.LogLevel + " " + docm.User + " " + docm.Module + " " + docm.Operation
tgtStr := docm.System + " " + docm.ServerName + " " + docm.ServerIp + " " + docm.ClientIp + " " + docm.TraceId + " " + docm.LogLevel + " " + docm.User
if docm.Detail != "" && conf.IsMulitLineSearch() {
tgtStr = tgtStr + " " + docm.Detail // 支持日志列全部行作为索引检索对象
} else {
tgtStr = tgtStr + " " + docm.Text // 日志列仅第一行作为索引检索对象
}
kws := tokenizer.CutForSearchEx(tgtStr, adds, docm.Sensitives) // 两数组参数的元素可以重复或空白,会被判断整理
kws := tokenizer.CutForSearchEx(tgtStr, adds, nil) // 两数组参数的元素可以重复或空白,会被判断整理
// 每个关键词都创建反向索引
for _, word := range kws {

View File

@ -1,4 +1,4 @@
package ver
// 版本号,升级版本时修改
const VERSION = "v0.13.0"
const VERSION = "v0.14.0"

View File

@ -28,6 +28,7 @@ func JsonLogAddTestDataController(req *gweb.HttpRequest) *gweb.HttpResult {
ClientIp: "127.0.0.1",
TraceId: traceId,
LogLevel: "INFO",
User: "tuser-" + cmn.RandomString(1),
}
addDataModelLog(md)
@ -44,6 +45,7 @@ func JsonLogAddTestDataController(req *gweb.HttpRequest) *gweb.HttpResult {
ClientIp: "127.0.0.1",
TraceId: traceId,
LogLevel: "DEBUG",
User: "tuser-" + cmn.RandomString(1),
}
addDataModelLog(md2)

View File

@ -34,7 +34,7 @@ func LogSearchController(req *gweb.HttpRequest) *gweb.HttpResult {
catchSession.Set(token, username) // 会话重新计时
}
// 准备好各种场景的检索条件
// 准备好各种场景的检索条件(系统【~】、日志级别【!】、用户【@】)
startTime := time.Now()
mnt := sysmnt.NewSysmntStorage()
cond := &search.SearchCondition{SearchSize: conf.GetPageSize()}
@ -46,8 +46,12 @@ func LogSearchController(req *gweb.HttpRequest) *gweb.HttpResult {
cond.DatetimeFrom = req.GetFormParameter("datetimeFrom") // 日期范围From
cond.DatetimeTo = req.GetFormParameter("datetimeTo") // 日期范围To
cond.OrgSystem = cmn.Trim(req.GetFormParameter("system")) // 系统
cond.Loglevel = req.GetFormParameter("loglevel") // 单选条件
cond.User = cmn.ToLower(cmn.Trim(req.GetFormParameter("user"))) // 用户
cond.Loglevel = cmn.ToLower(req.GetFormParameter("loglevel")) // 单选条件
cond.Loglevels = cmn.Split(cond.Loglevel, ",") // 多选条件
if cond.User != "" {
cond.User = "@" + cond.User // 有指定用户条件
}
if len(cond.Loglevels) <= 1 || len(cond.Loglevels) >= 4 {
cond.Loglevels = make([]string, 0) // 多选的单选或全选都清空单选走loglevel索引全选等于没选
}
@ -80,7 +84,7 @@ func LogSearchController(req *gweb.HttpRequest) *gweb.HttpResult {
cond.OrgSystem = "~" + cond.OrgSystem // 多个系统权限,按输入的系统作条件
}
} else {
ary := cmn.Split(user.Systems, ",")
ary := cmn.Split(cmn.ToLower(user.Systems), ",")
okSystem := false
for i := 0; i < len(ary); i++ {
cond.OrgSystems = append(cond.OrgSystems, "~"+ary[i]) // 仅设定的系统有访问权限

View File

@ -31,6 +31,10 @@
value-format="YYYY-MM-DD HH:mm:ss" start-placeholder="开始时间" end-placeholder="结束时间"
popper-class="c-datapicker" />
</el-form-item>
<el-form-item label="用户">
<el-input v-model="formData.user" :disabled="readonly" placeholder="请输入用户" maxlength="100"
style="width:420px;" />
</el-form-item>
</el-row>
</SearchForm>
</template>
@ -290,6 +294,7 @@ function search() {
data.loglevel = (formData.value.loglevel || []).join(',');
data.datetimeFrom = (formData.value.datetime || ['', ''])[0];
data.datetimeTo = (formData.value.datetime || ['', ''])[1];
data.user = formData.value.user;
//
moreConditon.value = data;
@ -419,6 +424,11 @@ function fnDownload() {
--el-input-width: 100%;
}
.c-search-form .el-form-item--small .el-form-item__label {
height: 30px;
line-height: 30px;
}
.c-datapicker.el-popper.is-pure {
margin-left: -100px;
}