Merge pull request #291 from AkkiaS7/fix-service-recover-notification

服务状态变更时清除静音缓存

Co-authored-by: AkkiaS7 <68485070+AkkiaS7@users.noreply.github.com>
This commit is contained in:
naiba 2023-06-19 11:02:13 +08:00 committed by GitHub
commit 2edfff334f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 103 additions and 48 deletions

View File

@ -118,12 +118,16 @@ func (s *NezhaHandler) ReportSystemInfo(c context.Context, r *pb.Host) (*pb.Rece
host.IP != "" &&
singleton.ServerList[clientID].Host.IP != host.IP {
singleton.SendNotification(singleton.Conf.IPChangeNotificationTag, fmt.Sprintf(
"[%s] %s, %s => %s",
singleton.Localizer.MustLocalize(&i18n.LocalizeConfig{
MessageID: "IPChanged",
}),
singleton.ServerList[clientID].Name, singleton.IPDesensitize(singleton.ServerList[clientID].Host.IP), singleton.IPDesensitize(host.IP)), singleton.NotificationMuteLabel.IPChanged(clientID), nil)
singleton.SendNotification(singleton.Conf.IPChangeNotificationTag,
fmt.Sprintf(
"[%s] %s, %s => %s",
singleton.Localizer.MustLocalize(&i18n.LocalizeConfig{
MessageID: "IPChanged",
}),
singleton.ServerList[clientID].Name, singleton.IPDesensitize(singleton.ServerList[clientID].Host.IP),
singleton.IPDesensitize(host.IP),
),
nil)
}
// 判断是否是机器重启,如果是机器重启要录入最后记录的流量里面

View File

@ -165,8 +165,7 @@ func checkStatus() {
go SendTriggerTasks(alert.FailTriggerTasks, curServer.ID)
go SendNotification(alert.NotificationTag, message, NotificationMuteLabel.ServerIncident(server.ID, alert.ID), &curServer)
// 清除恢复通知的静音缓存
resolvedMuteLabel := NotificationMuteLabel.AppendNotificationTag(NotificationMuteLabel.ServerIncidentResolved(server.ID, alert.ID), alert.NotificationTag)
Cache.Delete(*resolvedMuteLabel)
UnMuteNotification(alert.NotificationTag, NotificationMuteLabel.ServerIncidentResolved(server.ID, alert.ID))
}
} else {
// 本次通过检查但上一次的状态为失败,则发送恢复通知
@ -177,8 +176,7 @@ func checkStatus() {
go SendTriggerTasks(alert.RecoverTriggerTasks, curServer.ID)
go SendNotification(alert.NotificationTag, message, NotificationMuteLabel.ServerIncidentResolved(server.ID, alert.ID), &curServer)
// 清除失败通知的静音缓存
incidentMuteLabel := NotificationMuteLabel.AppendNotificationTag(NotificationMuteLabel.ServerIncident(server.ID, alert.ID), alert.NotificationTag)
Cache.Delete(*incidentMuteLabel)
UnMuteNotification(alert.NotificationTag, NotificationMuteLabel.ServerIncident(server.ID, alert.ID))
}
alertsPrevState[alert.ID][server.ID] = _RuleCheckPass
}

View File

@ -102,6 +102,11 @@ func OnDeleteNotification(id uint64) {
delete(NotificationIDToTag, id)
}
func UnMuteNotification(notificationTag string, muteLabel *string) {
fullMuteLabel := *NotificationMuteLabel.AppendNotificationTag(muteLabel, notificationTag)
Cache.Delete(fullMuteLabel)
}
// SendNotification 向指定的通知方式组的所有通知方式发送通知
func SendNotification(notificationTag string, desc string, muteLabel *string, ext ...*model.Server) {
if muteLabel != nil {
@ -199,7 +204,7 @@ func (_NotificationMuteLabel) ServiceStateChanged(serviceId uint64) *string {
return &label
}
func (_NotificationMuteLabel) ServiceSSL(serviceId uint64) *string {
label := fmt.Sprintf("bf::sssl-%d", serviceId)
func (_NotificationMuteLabel) ServiceSSL(serviceId uint64, extraInfo string) *string {
label := fmt.Sprintf("bf::sssl-%d-%s", serviceId, extraInfo)
return &label
}

View File

@ -382,103 +382,151 @@ func (ss *ServiceSentinel) worker() {
if mh.Delay > 0 {
ss.monitorsLock.RLock()
if ss.monitors[mh.GetId()].LatencyNotify {
notificationTag := ss.monitors[mh.GetId()].NotificationTag
minMuteLabel := NotificationMuteLabel.ServiceLatencyMin(mh.GetId())
maxMuteLabel := NotificationMuteLabel.ServiceLatencyMax(mh.GetId())
if mh.Delay > ss.monitors[mh.GetId()].MaxLatency {
// 延迟超过最大值
ServerLock.RLock()
reporterServer := ServerList[r.Reporter]
msg := fmt.Sprintf("[Latency] %s %2f > %2f, Reporter: %s", ss.monitors[mh.GetId()].Name, mh.Delay, ss.monitors[mh.GetId()].MaxLatency, reporterServer.Name)
muteLabel := NotificationMuteLabel.ServiceLatencyMin(mh.GetId())
go SendNotification(ss.monitors[mh.GetId()].NotificationTag, msg, muteLabel)
go SendNotification(notificationTag, msg, minMuteLabel)
ServerLock.RUnlock()
}
if mh.Delay < ss.monitors[mh.GetId()].MinLatency {
} else if mh.Delay < ss.monitors[mh.GetId()].MinLatency {
// 延迟低于最小值
ServerLock.RLock()
reporterServer := ServerList[r.Reporter]
go SendNotification(ss.monitors[mh.GetId()].NotificationTag, fmt.Sprintf("[Latency] %s %2f < %2f, Reporter: %s", ss.monitors[mh.GetId()].Name, mh.Delay, ss.monitors[mh.GetId()].MinLatency, reporterServer.Name), NotificationMuteLabel.ServiceLatencyMax(mh.GetId()))
msg := fmt.Sprintf("[Latency] %s %2f < %2f, Reporter: %s", ss.monitors[mh.GetId()].Name, mh.Delay, ss.monitors[mh.GetId()].MinLatency, reporterServer.Name)
go SendNotification(notificationTag, msg, maxMuteLabel)
ServerLock.RUnlock()
} else {
// 正常延迟, 清除静音缓存
UnMuteNotification(notificationTag, minMuteLabel)
UnMuteNotification(notificationTag, maxMuteLabel)
}
}
ss.monitorsLock.RUnlock()
}
// 状态变更报警
// 状态变更报警+触发任务执行
if stateCode == StatusDown || stateCode != ss.lastStatus[mh.GetId()] {
ss.monitorsLock.Lock()
lastStatus := ss.lastStatus[mh.GetId()]
// 存储新的状态值
ss.lastStatus[mh.GetId()] = stateCode
// 判断是否需要发送提醒
// 判断是否需要发送通知
isNeedSendNotification := ss.monitors[mh.GetId()].Notify && (lastStatus != 0 || stateCode == StatusDown)
if isNeedSendNotification {
ServerLock.RLock()
reporterServer := ServerList[r.Reporter]
go SendNotification(ss.monitors[mh.GetId()].NotificationTag, fmt.Sprintf("[%s] %s Reporter: %s, Error: %s", StatusCodeToString(stateCode), ss.monitors[mh.GetId()].Name, reporterServer.Name, mh.Data), NotificationMuteLabel.ServiceStateChanged(mh.GetId()))
reporterServer := ServerList[r.Reporter]
notificationTag := ss.monitors[mh.GetId()].NotificationTag
notificationMsg := fmt.Sprintf("[%s] %s Reporter: %s, Error: %s", StatusCodeToString(stateCode), ss.monitors[mh.GetId()].Name, reporterServer.Name, mh.Data)
muteLabel := NotificationMuteLabel.ServiceStateChanged(mh.GetId())
// 状态变更时,清除静音缓存
if stateCode != lastStatus {
UnMuteNotification(notificationTag, muteLabel)
}
go SendNotification(notificationTag, notificationMsg, muteLabel)
ServerLock.RUnlock()
}
// 判断是否需要触发任务
if ss.monitors[mh.GetId()].EnableTriggerTask && lastStatus != 0 {
isNeedTriggerTask := ss.monitors[mh.GetId()].EnableTriggerTask && lastStatus != 0
if isNeedTriggerTask {
ServerLock.RLock()
reporterServer := ServerList[r.Reporter]
ServerLock.RUnlock()
if stateCode == StatusGood && lastStatus != stateCode {
// 当前状态正常 前序状态异常时 触发恢复任务
// 当前状态正常 前序状态非正常时 触发恢复任务
go SendTriggerTasks(ss.monitors[mh.GetId()].RecoverTriggerTasks, reporterServer.ID)
} else if lastStatus == StatusGood && lastStatus != stateCode {
// 前序状态正常 当前状态常时 触发失败任务
// 前序状态正常 当前状态非正常时 触发失败任务
go SendTriggerTasks(ss.monitors[mh.GetId()].FailTriggerTasks, reporterServer.ID)
}
}
// 当前状态正常 前序状态非正常时触发恢复任务
ss.monitorsLock.Unlock()
}
ss.serviceResponseDataStoreLock.Unlock()
// SSL 证书报警
var errMsg string
if strings.HasPrefix(mh.Data, "SSL证书错误") {
// 排除 i/o timeout、connection timeout、EOF 错误
// i/o timeout、connection timeout、EOF 错误
if !strings.HasSuffix(mh.Data, "timeout") &&
!strings.HasSuffix(mh.Data, "EOF") &&
!strings.HasSuffix(mh.Data, "timed out") {
errMsg = mh.Data
ss.monitorsLock.RLock()
if ss.monitors[mh.GetId()].Notify {
muteLabel := NotificationMuteLabel.ServiceSSL(mh.GetId(), "network")
go SendNotification(ss.monitors[mh.GetId()].NotificationTag, fmt.Sprintf("[SSL] Fetch cert info failed, %s %s", ss.monitors[mh.GetId()].Name, errMsg), muteLabel)
}
ss.monitorsLock.RUnlock()
}
} else {
// 清除网络错误静音缓存
UnMuteNotification(ss.monitors[mh.GetId()].NotificationTag, NotificationMuteLabel.ServiceSSL(mh.GetId(), "network"))
var newCert = strings.Split(mh.Data, "|")
if len(newCert) > 1 {
ss.monitorsLock.Lock()
enableNotify := ss.monitors[mh.GetId()].Notify
// 首次获取证书信息时,缓存证书信息
if ss.sslCertCache[mh.GetId()] == "" {
ss.sslCertCache[mh.GetId()] = mh.Data
}
oldCert := strings.Split(ss.sslCertCache[mh.GetId()], "|")
isCertChanged := false
expiresOld, _ := time.Parse("2006-01-02 15:04:05 -0700 MST", oldCert[1])
expiresNew, _ := time.Parse("2006-01-02 15:04:05 -0700 MST", newCert[1])
// 证书过期提醒
if expiresNew.Before(time.Now().AddDate(0, 0, 7)) {
errMsg = fmt.Sprintf(
"The SSL certificate will expire within seven days. Expiration time: %s",
expiresNew.Format("2006-01-02 15:04:05"))
}
// 证书变更提醒
var oldCert = strings.Split(ss.sslCertCache[mh.GetId()], "|")
var expiresOld time.Time
if len(oldCert) > 1 {
expiresOld, _ = time.Parse("2006-01-02 15:04:05 -0700 MST", oldCert[1])
}
// 证书变更时,更新缓存
if oldCert[0] != newCert[0] && !expiresNew.Equal(expiresOld) {
isCertChanged = true
ss.sslCertCache[mh.GetId()] = mh.Data
errMsg = fmt.Sprintf(
"SSL certificate changed, old: %s, %s expired; new: %s, %s expired.",
oldCert[0], expiresOld.Format("2006-01-02 15:04:05"), newCert[0], expiresNew.Format("2006-01-02 15:04:05"))
}
notificationTag := ss.monitors[mh.GetId()].NotificationTag
serviceName := ss.monitors[mh.GetId()].Name
ss.monitorsLock.Unlock()
// 需要发送提醒
if enableNotify {
// 证书过期提醒
if expiresNew.Before(time.Now().AddDate(0, 0, 7)) {
expiresTimeStr := expiresNew.Format("2006-01-02 15:04:05")
errMsg = fmt.Sprintf(
"The SSL certificate will expire within seven days. Expiration time: %s",
expiresTimeStr,
)
// 静音规则: 服务id+证书过期时间
// 用于避免多个监测点对相同证书同时报警
muteLabel := NotificationMuteLabel.ServiceSSL(mh.GetId(), fmt.Sprintf("expire_%s", expiresTimeStr))
go SendNotification(notificationTag, fmt.Sprintf("[SSL] %s %s", serviceName, errMsg), muteLabel)
}
// 证书变更提醒
if isCertChanged {
errMsg = fmt.Sprintf(
"SSL certificate changed, old: %s, %s expired; new: %s, %s expired.",
oldCert[0], expiresOld.Format("2006-01-02 15:04:05"), newCert[0], expiresNew.Format("2006-01-02 15:04:05"))
// 证书变更后会自动更新缓存,所以不需要静音
go SendNotification(notificationTag, fmt.Sprintf("[SSL] %s %s", serviceName, errMsg), nil)
}
}
}
}
if errMsg != "" {
ss.monitorsLock.RLock()
if ss.monitors[mh.GetId()].Notify {
go SendNotification(ss.monitors[mh.GetId()].NotificationTag, fmt.Sprintf("[SSL] %s %s", ss.monitors[mh.GetId()].Name, errMsg), NotificationMuteLabel.ServiceSSL(mh.GetId()))
}
ss.monitorsLock.RUnlock()
}
}
}