Reworked the logic for retrieving data from Zabbix API to make it more efficient and filter-aware. Message generation for Telegram bot was refactored and decoupled from data retrieval logic to improve structure, readability, and reuse. Signed-off-by: UdoChudo <stream@udochudo.ru>
104 lines
3.8 KiB
Python
104 lines
3.8 KiB
Python
import re
|
||
import time
|
||
|
||
from pyzabbix import ZabbixAPI, ZabbixAPIException
|
||
from telebot import logger
|
||
|
||
from config import ZABBIX_URL, ZABBIX_API_TOKEN, ZABBIX_VERIFY_SSL
|
||
|
||
|
||
def get_region_groups(region_id: str):
|
||
"""
|
||
Получает список групп, имя которых содержит регион region_id, исключая 'test' и группы, не соответствующие шаблону 'имя_число'.
|
||
"""
|
||
try:
|
||
zapi = ZabbixAPI(ZABBIX_URL)
|
||
zapi.login(api_token=ZABBIX_API_TOKEN)
|
||
zapi.session.verify = ZABBIX_VERIFY_SSL
|
||
|
||
host_groups = zapi.hostgroup.get(output=["groupid", "name"], search={"name": region_id})
|
||
|
||
pattern = re.compile(r'.+_\d+$') # строка с нижним подчёркиванием перед числом в конце
|
||
|
||
filtered_groups = [
|
||
group for group in host_groups
|
||
if 'test' not in group['name'].lower() and pattern.match(group['name'])
|
||
]
|
||
return filtered_groups
|
||
|
||
except Exception as e:
|
||
logger.error(f"[Zabbix] Error getting region groups for '{region_id}': {e}")
|
||
return []
|
||
|
||
def get_all_groups_for_region(region_id: str):
|
||
"""
|
||
Аналогично get_region_groups, получение всех групп по региону.
|
||
"""
|
||
return get_region_groups(region_id)
|
||
|
||
|
||
def fetch_triggers_data(group_id):
|
||
"""
|
||
Возвращает список триггеров с необходимыми данными,
|
||
без форматирования сообщений.
|
||
"""
|
||
pnet_mediatypes = {"Pnet integration JS 2025", "Pnet integration JS 2024", "Pnet integration new2"}
|
||
start_time = time.time()
|
||
try:
|
||
zapi = ZabbixAPI(ZABBIX_URL)
|
||
zapi.login(api_token=ZABBIX_API_TOKEN)
|
||
|
||
# Получаем проблемы с высокой и критической важностью
|
||
problems = zapi.problem.get(
|
||
severities=[4, 5],
|
||
suppressed=0,
|
||
acknowledged=0,
|
||
groupids=group_id
|
||
)
|
||
trigger_ids = [problem["objectid"] for problem in problems]
|
||
|
||
if not trigger_ids:
|
||
logger.info(f"No triggers found for group {group_id}")
|
||
return []
|
||
|
||
triggers = zapi.trigger.get(
|
||
triggerids=trigger_ids,
|
||
output=["triggerid", "description", "priority"],
|
||
selectHosts=["hostid", "name"],
|
||
monitored=1,
|
||
expandDescription=1,
|
||
expandComment=1,
|
||
selectItems=["itemid", "lastvalue"],
|
||
selectLastEvent=["clock", "eventid"]
|
||
)
|
||
|
||
events = zapi.event.get(
|
||
severities=[4, 5],
|
||
objectids=trigger_ids,
|
||
select_alerts="mediatype"
|
||
)
|
||
|
||
event_dict = {event["objectid"]: event for event in events}
|
||
|
||
pnet_triggers = []
|
||
for trigger in triggers:
|
||
event = event_dict.get(trigger["triggerid"])
|
||
if event:
|
||
for alert in event["alerts"]:
|
||
if alert["mediatypes"] and alert["mediatypes"][0]["name"] in pnet_mediatypes:
|
||
pnet_triggers.append(trigger)
|
||
break
|
||
|
||
triggers_sorted = sorted(pnet_triggers, key=lambda t: int(t['lastEvent']['clock']))
|
||
logger.debug(f"Found {len(pnet_triggers)} pnet triggers for group {group_id}")
|
||
end_time = time.time()
|
||
logger.info(f"[Zabbix] Fetched {len(triggers_sorted)} triggers for group {group_id} in {end_time - start_time:.2f} seconds.")
|
||
return triggers_sorted
|
||
|
||
except ZabbixAPIException as e:
|
||
logger.error(f"[Zabbix] Zabbix API error for group {group_id}: {e}")
|
||
return []
|
||
except Exception as e:
|
||
logger.error(f"[Zabbix] Unexpected error fetching triggers for group {group_id}: {e}")
|
||
return []
|