Преглед изворни кода

Sprint 7: Zabbix and SNMPv3 Monitoring Integration

lanfr144 пре 2 недеља
родитељ
комит
e3f96b1f33
8 измењених фајлова са 151 додато и 27 уклоњено
  1. 4 2
      PROJECT_CONTEXT.md
  2. 6 1
      app.py
  3. 0 24
      docker/taiga/Dockerfile
  4. 38 0
      docker/zabbix/docker-compose.yml
  5. 5 0
      ingest_csv.py
  6. 1 0
      requirements.txt
  7. 60 0
      setup_sprint7_taiga.py
  8. 37 0
      snmp_notifier.py

+ 4 - 2
PROJECT_CONTEXT.md

@@ -30,10 +30,12 @@ The Ollama `mistral` model is fully integrated with Streamlit using **Tool Calli
 1. **Dynamic Tabular Analytics**: In the Clinical Search tab, users can click "Ask AI to Evaluate This Table" to grade database rows against their specific illnesses/diets.
 2. **Plate Builder & Unit Converter**: `unit_converter.py` parses natural language strings (e.g., "1.5 cups") and converts them to metric grams based on product density.
 3. **AI Meal Planner**: Multi-turn RAG loop where the AI queries the database for verified foods before outputting a strict Markdown menu table.
+4. **Enterprise Monitoring (Zabbix & SNMPv3)**: A Zabbix stack (`docker/zabbix`) continuously monitors the host, database, and application health. The Python components (`app.py`, `ingest_csv.py`) natively emit encrypted SNMPv3 traps on key events (logins, heavy SQL queries, ingestion milestones).
 
 ## 📝 Roadmap History
-- **Sprint 1-6 [COMPLETED]**: The project has successfully evolved from a foundation into a heavily optimized, vertically partitioned, RAG-integrated medical platform. All code is audited and documentation is finalized in the `docs/` folder.
-- **Future Work**: The system is in a stable state. Any future AI agents modifying this project should strictly adhere to the vertical partitioning structure and use `search_nutrition_db` for data fetching.
+- **Sprint 1-6 [COMPLETED]**: The project evolved from a foundation into a heavily optimized, vertically partitioned, RAG-integrated medical platform. All code is audited and documentation is finalized in the `docs/` folder.
+- **Sprint 7 [ACTIVE]**: SNMPv3 Enterprise Monitoring. Deployed Zabbix Docker containers relying on the host MySQL instance. Injected `pysnmp` logic into core Streamlit and backend workflows for real-time telemetry alerting.
+- **Future Work**: The system is in a stable, monitored state. Any future AI agents modifying this project should strictly adhere to the vertical partitioning structure and use `search_nutrition_db` for data fetching.
 
 ---
 *Generated by Antigravity.*

+ 6 - 1
app.py

@@ -10,6 +10,7 @@ import smtplib
 from email.message import EmailMessage
 import pandas as pd
 from unit_converter import UnitConverter
+from snmp_notifier import notifier
 
 def local_web_search(query: str) -> str:
     try:
@@ -245,9 +246,12 @@ with st.sidebar:
             l_pass = st.text_input("Password", type="password", key="l_pass")
             if st.button("Login"):
                 if verify_login(l_user, l_pass):
+                    notifier.send_alert(f"User Login Success: {l_user}")
                     st.session_state["authenticated_user"] = l_user
                     st.rerun()
-                else: st.error("Invalid login.")
+                else:
+                    notifier.send_alert(f"User Login Failed: {l_user}")
+                    st.error("Invalid login.")
         with tab2:
             r_user = st.text_input("Username", key="r_user")
             r_email = st.text_input("Email Address", key="r_email")
@@ -347,6 +351,7 @@ with tab_explore:
     limit_rc = cols[4].selectbox("Limit Results", opts, index=idx)
     
     if st.button("Search Database") and sq and conn_reader:
+        notifier.send_alert(f"Medical DB Search Executed: {sq}")
         with st.spinner("Processing massive clinical query..."):
             try:
                 with conn_reader.cursor() as cursor:

+ 0 - 24
docker/taiga/Dockerfile

@@ -1,24 +0,0 @@
-# Dockerfile for Taiga sync service
-FROM python:3.11-slim
-
-# Install system dependencies (if any)
-RUN apt-get update && apt-get install -y --no-install-recommends \
-    build-essential \
-    default-libmysqlclient-dev \
-    && rm -rf /var/lib/apt/lists/*
-
-WORKDIR /app
-
-COPY requirements.txt ./
-RUN pip install --no-cache-dir -r requirements.txt
-
-# Copy Taiga sync scripts (assumed to be in project root)
-COPY taiga_feed.py ./
-COPY generate_taiga_wiki.py ./
-COPY taiga_checker.py ./
-
-# Wrapper script to gracefully skip when no Taiga URL/token
-COPY run_sync.sh /app/run_sync.sh
-RUN chmod +x /app/run_sync.sh
-
-CMD ["/app/run_sync.sh"]

+ 38 - 0
docker/zabbix/docker-compose.yml

@@ -0,0 +1,38 @@
+version: '3.5'
+services:
+  zabbix-server:
+    image: zabbix/zabbix-server-mysql:ubuntu-7.0-latest
+    ports:
+      - "10051:10051"
+    environment:
+      - DB_SERVER_HOST=192.168.130.170 # Use the unified MySQL DB
+      - MYSQL_USER=zabbix
+      - MYSQL_PASSWORD=zabbix_pwd
+      - ZBX_SNMPTRAPPER=1
+    restart: always
+
+  zabbix-web:
+    image: zabbix/zabbix-web-nginx-mysql:ubuntu-7.0-latest
+    ports:
+      - "8080:8080"
+      - "8443:8443"
+    environment:
+      - DB_SERVER_HOST=192.168.130.170
+      - MYSQL_USER=zabbix
+      - MYSQL_PASSWORD=zabbix_pwd
+      - ZBX_SERVER_HOST=zabbix-server
+      - PHP_TZ=Europe/Paris
+    depends_on:
+      - zabbix-server
+    restart: always
+
+  zabbix-agent:
+    image: zabbix/zabbix-agent:ubuntu-7.0-latest
+    environment:
+      - ZBX_HOSTNAME=Zabbix server
+      - ZBX_SERVER_HOST=zabbix-server
+    privileged: true
+    pid: "host"
+    volumes:
+      - /var/run:/var/run
+    restart: always

+ 5 - 0
ingest_csv.py

@@ -5,6 +5,7 @@ from sqlalchemy import create_engine, text
 from sqlalchemy.types import VARCHAR, TEXT, DOUBLE
 import os
 import sys
+from snmp_notifier import notifier
 
 def get_loader_engine():
     try:
@@ -80,9 +81,13 @@ def ingest_file(filename, engine):
 
             total_processed += len(df)
             print(f"   Successfully appended {total_processed} rows into grouped tables...", end="\r")
+            if total_processed % 50000 == 0:
+                notifier.send_alert(f"Ingestion Milestone: {total_processed} rows processed")
         except BaseException as e:
+            notifier.send_alert(f"Ingestion Exception: {str(e)}")
             print(f"\n   [Warning] Chunk skipped due to error: {e}")
             
+    notifier.send_alert(f"Ingestion Finished: {filename}")
     print(f"\n✅ Finished importing {filename}.")
     return True
 

+ 1 - 0
requirements.txt

@@ -4,3 +4,4 @@ myloginpath
 streamlit
 ollama
 bcrypt
+pysnmp

+ 60 - 0
setup_sprint7_taiga.py

@@ -0,0 +1,60 @@
+import requests
+import urllib3
+from datetime import datetime, timedelta
+
+urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+
+# Authenticate
+base_url = 'https://192.168.130.161/taiga/api/v1'
+auth_url = f'{base_url}/auth'
+auth = requests.post(auth_url, json={'type': 'normal', 'username': 'FrancoisLange', 'password': 'BTSai123'}, verify=False).json()
+headers = {'Authorization': f'Bearer {auth["auth_token"]}', 'Content-Type': 'application/json'}
+proj_id = 21
+
+print("Fetching Sprints...")
+milestones = requests.get(f'{base_url}/milestones?project={proj_id}', headers=headers, verify=False).json()
+sprint7 = next((m for m in milestones if m['name'] == 'Sprint 7'), None)
+
+if not sprint7:
+    print("Sprint 7 not found, creating it...")
+    payload = {
+        "project": proj_id,
+        "name": "Sprint 7",
+        "estimated_start": datetime.now().strftime('%Y-%m-%d'),
+        "estimated_finish": (datetime.now() + timedelta(days=7)).strftime('%Y-%m-%d')
+    }
+    sprint7 = requests.post(f'{base_url}/milestones', json=payload, headers=headers, verify=False).json()
+    
+sprint_id = sprint7['id']
+print(f"Sprint 7 ID: {sprint_id}")
+
+stories = [
+    {"subject": "Zabbix Server Docker Setup", "description": "Deploy Zabbix server, Zabbix Web, and Zabbix Agent via Docker compose utilizing the host MySQL database."},
+    {"subject": "SNMPv3 Integration", "description": "Implement pysnmp to send AuthPriv SNMPv3 traps to Zabbix."},
+    {"subject": "Application Component Traps", "description": "Inject SNMP traps into Streamlit app.py and background ingestion processes."}
+]
+
+for s in stories:
+    payload = {
+        "project": proj_id,
+        "subject": s["subject"],
+        "description": s["description"],
+        "milestone": sprint_id
+    }
+    res = requests.post(f'{base_url}/userstories', json=payload, headers=headers, verify=False)
+    if res.status_code == 201:
+        us = res.json()
+        print(f"Created US: {us['subject']}")
+        
+        # Create a task for it
+        t_payload = {
+            "project": proj_id,
+            "subject": f"Execute: {us['subject']}",
+            "user_story": us['id'],
+            "milestone": sprint_id
+        }
+        requests.post(f'{base_url}/tasks', json=t_payload, headers=headers, verify=False)
+    else:
+        print(f"Failed US: {res.text}")
+
+print("Sprint 7 populated!")

+ 37 - 0
snmp_notifier.py

@@ -0,0 +1,37 @@
+import time
+import socket
+from pysnmp.hlapi import *
+
+class SNMPNotifier:
+    def __init__(self, target_host='192.168.130.170', target_port=162):
+        self.target_host = target_host
+        self.target_port = target_port
+        self.user = 'zabbix_snmp'
+        self.auth_key = 'authkey123'
+        self.priv_key = 'privkey123'
+
+    def send_alert(self, message):
+        try:
+            errorIndication, errorStatus, errorIndex, varBinds = next(
+                sendNotification(
+                    SnmpEngine(),
+                    UsmUserData(self.user, self.auth_key, self.priv_key,
+                                authProtocol=usmHMACSHAAuthProtocol,
+                                privProtocol=usmAesCfb128Protocol),
+                    UdpTransportTarget((self.target_host, self.target_port)),
+                    ContextData(),
+                    'trap',
+                    NotificationType(
+                        ObjectIdentity('1.3.6.1.4.1.8072.3.2.10') # SNMP trap OID
+                    ).addVarBinds(
+                        ('1.3.6.1.2.1.1.1.0', OctetString(f"[{socket.gethostname()}] {message}"))
+                    )
+                )
+            )
+            if errorIndication:
+                print(f"SNMP Trap Failed: {errorIndication}")
+        except Exception as e:
+            print(f"Failed to send SNMPv3 trap: {e}")
+
+# Singleton instance
+notifier = SNMPNotifier()