Explorar o código

feat: Containerize components, configure Zabbix rules, sync Taiga

lanfr144 hai 1 semana
pai
achega
2a4f8d488f
Modificáronse 8 ficheiros con 319 adicións e 0 borrados
  1. 12 0
      .env
  2. 16 0
      app.py
  3. 86 0
      configure_zabbix_dependencies.py
  4. 19 0
      create_taiga_task.py
  5. 110 0
      docker-compose.yml
  6. 1 0
      docker/ingest/Dockerfile
  7. 10 0
      ingest_csv.py
  8. 65 0
      sync_current_sprint.py

+ 12 - 0
.env

@@ -0,0 +1,12 @@
+# DOPRO1 (Food AI) Configuration
+# Do not commit this file to version control.
+
+# Git Configuration (git.btshub.lu)
+GIT_TOKEN=your_btshub_git_token
+GIT_USERNAME=lanfr144
+
+# Taiga Configuration (Local Network: 192.168.130.161)
+# Note: Network currently unavailable as per status report.
+TAIGA_URL=http://192.168.130.161/taiga
+TAIGA_TOKEN=your_local_taiga_token
+TAIGA_PROJECT_SLUG=your_project_slug

+ 16 - 0
app.py

@@ -72,6 +72,21 @@ db_search_tool_schema = {
 
 def get_db_connection(login_path):
     try:
+        import os
+        db_host = os.environ.get('DB_HOST')
+        # Check if environment variables exist for this login path
+        db_user = os.environ.get(f'{login_path.upper()}_USER') or os.environ.get('DB_USER')
+        db_pass = os.environ.get(f'{login_path.upper()}_PASS') or os.environ.get('DB_PASS')
+
+        if db_host and db_user and db_pass:
+            return pymysql.connect(
+                host=db_host,
+                user=db_user,
+                password=db_pass,
+                database='food_db',
+                cursorclass=pymysql.cursors.DictCursor
+            )
+            
         conf = myloginpath.parse(login_path)
         if not conf or not conf.get('user'):
             st.error(f"⚠️ MySQL configuration missing for `{login_path}`. If you are testing locally on Windows, this app must be run on the Ubuntu server where `mysql_config_editor` is properly configured.")
@@ -684,6 +699,7 @@ with tab_planner:
             You MUST autonomously deduce what foods are recommended, forbidden, or accepted for these specific conditions and ensure the menu perfectly respects their medical requirements!
             CRITICAL INSTRUCTIONS:
             - YOU MUST USE the `search_nutrition_db` tool to find real products and their exact macros before constructing the menu!
+            - If you cannot find appropriate products in the local DB, you MUST use the `local_web_search` tool.
             - ALWAYS output exactly as a JSON array of objects. DO NOT OUTPUT MARKDOWN. DO NOT OUTPUT ANY TEXT EXCEPT JSON.
             - JSON Format required:
             [

+ 86 - 0
configure_zabbix_dependencies.py

@@ -0,0 +1,86 @@
+import requests
+import json
+import time
+
+ZABBIX_API_URL = "http://localhost:8080/api_jsonrpc.php"
+ZABBIX_USER = "Admin"
+ZABBIX_PASSWORD = "zabbix" # Default zabbix admin password
+
+def authenticate():
+    payload = {
+        "jsonrpc": "2.0",
+        "method": "user.login",
+        "params": {
+            "user": ZABBIX_USER,
+            "password": ZABBIX_PASSWORD
+        },
+        "id": 1,
+        "auth": None
+    }
+    response = requests.post(ZABBIX_API_URL, json=payload).json()
+    if 'result' in response:
+        return response['result']
+    else:
+        print(f"Authentication failed: {response}")
+        return None
+
+def get_triggers(auth_token, description_search):
+    payload = {
+        "jsonrpc": "2.0",
+        "method": "trigger.get",
+        "params": {
+            "output": ["triggerid", "description"],
+            "search": {
+                "description": description_search
+            }
+        },
+        "id": 2,
+        "auth": auth_token
+    }
+    response = requests.post(ZABBIX_API_URL, json=payload).json()
+    return response.get('result', [])
+
+def set_dependency(auth_token, trigger_id, depends_on_trigger_id):
+    payload = {
+        "jsonrpc": "2.0",
+        "method": "trigger.update",
+        "params": {
+            "triggerid": trigger_id,
+            "dependencies": [
+                {"triggerid": depends_on_trigger_id}
+            ]
+        },
+        "id": 3,
+        "auth": auth_token
+    }
+    response = requests.post(ZABBIX_API_URL, json=payload).json()
+    if 'result' in response:
+        print(f"Successfully added dependency! Trigger {trigger_id} now depends on {depends_on_trigger_id}")
+    else:
+        print(f"Failed to add dependency: {response}")
+
+if __name__ == "__main__":
+    print("Waiting for Zabbix server to start...")
+    time.sleep(10) # Simple wait
+    
+    try:
+        auth_token = authenticate()
+        if not auth_token:
+            print("Cannot proceed without authentication.")
+            exit(1)
+            
+        # Example logic to find DB and App triggers (Names will depend on actual Zabbix config)
+        db_triggers = get_triggers(auth_token, "MySQL is down")
+        app_triggers = get_triggers(auth_token, "Application Food AI Down")
+        
+        if not db_triggers or not app_triggers:
+            print("Could not find the necessary triggers. They might need to be created first in Zabbix.")
+            print(f"DB Triggers found: {db_triggers}")
+            print(f"App Triggers found: {app_triggers}")
+        else:
+            db_trigger_id = db_triggers[0]['triggerid']
+            app_trigger_id = app_triggers[0]['triggerid']
+            set_dependency(auth_token, app_trigger_id, db_trigger_id)
+            
+    except Exception as e:
+        print(f"Error configuring Zabbix: {e}")

+ 19 - 0
create_taiga_task.py

@@ -0,0 +1,19 @@
+import requests, urllib3
+urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+base_url = 'https://192.168.130.161/taiga/api/v1'
+auth = requests.post(f'{base_url}/auth', json={'type': 'normal', 'username': 'FrancoisLange', 'password': 'BTSai123'}, verify=False).json()
+headers = {'Authorization': f'Bearer {auth["auth_token"]}', 'Content-Type': 'application/json'}
+proj_id = 21
+
+milestones = requests.get(f'{base_url}/milestones?project={proj_id}', headers=headers, verify=False).json()
+sprint8 = next((m for m in milestones if m['name'] == 'Sprint 8'), None)
+sprint_id = sprint8['id'] if sprint8 else None
+
+payload = {"project": proj_id, "subject": "Deep System Overhaul Phase 3", "description": "Fix Clinical Search Crash, Plate Builder UI, and AI Meal Planner JSON parsing.", "milestone": sprint_id}
+res = requests.post(f'{base_url}/userstories', json=payload, headers=headers, verify=False).json()
+us_id = res['id']
+print(f"Created US: TG-{res['ref']}")
+
+t_payload = {"project": proj_id, "subject": "Execute Phase 3 Overhaul", "user_story": us_id, "milestone": sprint_id}
+t_res = requests.post(f'{base_url}/tasks', json=t_payload, headers=headers, verify=False).json()
+print(f"Created Task: TG-{t_res['ref']}")

+ 110 - 0
docker-compose.yml

@@ -0,0 +1,110 @@
+version: '3.8'
+
+services:
+  mysql:
+    build:
+      context: ./docker/mysql
+    ports:
+      - "3306:3306"
+    volumes:
+      - mysql_data:/var/lib/mysql
+      - ./my.cnf:/etc/mysql/conf.d/custom_ai_app.cnf
+      - ./init.sql:/docker-entrypoint-initdb.d/1-init.sql
+      - ./init_zabbix_db.sh:/docker-entrypoint-initdb.d/2-init_zabbix.sh
+    environment:
+      - MYSQL_ROOT_PASSWORD=root_pass
+    healthcheck:
+      test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
+      interval: 10s
+      timeout: 5s
+      retries: 5
+    restart: always
+
+  ollama:
+    image: ollama/ollama:latest
+    ports:
+      - "11434:11434"
+    volumes:
+      - ollama_data:/root/.ollama
+    deploy:
+      resources:
+        reservations:
+          devices:
+            - driver: nvidia
+              count: 1
+              capabilities: [gpu]
+    restart: always
+
+  app:
+    build:
+      context: ./docker/app
+    ports:
+      - "8501:8501"
+    environment:
+      - DB_HOST=mysql
+      - DB_USER=db_reader
+      - DB_PASS=reader_pass
+      - OLLAMA_HOST=http://ollama:11434
+    depends_on:
+      mysql:
+        condition: service_healthy
+    restart: always
+
+  ingest:
+    build:
+      context: ./docker/ingest
+    environment:
+      - DB_HOST=mysql
+      - DB_USER=db_loader
+      - DB_PASS=loader_pass
+    depends_on:
+      mysql:
+        condition: service_healthy
+    profiles:
+      - manual # Only runs when explicitly requested
+
+  zabbix-server:
+    image: zabbix/zabbix-server-mysql:ubuntu-7.0-latest
+    ports:
+      - "10051:10051"
+    environment:
+      - DB_SERVER_HOST=mysql
+      - MYSQL_USER=zabbix
+      - MYSQL_PASSWORD=zabbix_pwd
+      - ZBX_SNMPTRAPPER=1
+    depends_on:
+      mysql:
+        condition: service_healthy
+    restart: always
+
+  zabbix-web:
+    image: zabbix/zabbix-web-nginx-mysql:ubuntu-7.0-latest
+    ports:
+      - "8080:8080"
+      - "8443:8443"
+    environment:
+      - DB_SERVER_HOST=mysql
+      - MYSQL_USER=zabbix
+      - MYSQL_PASSWORD=zabbix_pwd
+      - ZBX_SERVER_HOST=zabbix-server
+      - PHP_TZ=Europe/Paris
+    depends_on:
+      - zabbix-server
+    restart: always
+
+  zabbix-agent:
+    image: zabbix/zabbix-agent:ubuntu-7.0-latest
+    environment:
+      - ZBX_HOSTNAME=Zabbix server
+      - ZBX_SERVER_HOST=zabbix-server
+    privileged: true
+    pid: "host"
+    volumes:
+      - /var/run:/var/run
+    depends_on:
+      - zabbix-server
+    restart: always
+
+volumes:
+  mysql_data:
+  ollama_data:

+ 1 - 0
docker/ingest/Dockerfile

@@ -5,6 +5,7 @@ FROM python:3.11-slim
 RUN apt-get update && apt-get install -y --no-install-recommends \
     build-essential \
     default-libmysqlclient-dev \
+    snmp \
     && rm -rf /var/lib/apt/lists/*
 
 # Set working directory

+ 10 - 0
ingest_csv.py

@@ -9,6 +9,16 @@ from snmp_notifier import notifier
 
 def get_loader_engine():
     try:
+        import os
+        db_host = os.environ.get('DB_HOST')
+        db_user = os.environ.get('DB_USER')
+        db_pass = os.environ.get('DB_PASS')
+
+        if db_host and db_user and db_pass:
+            password = urllib.parse.quote_plus(db_pass)
+            conn_str = f"mysql+pymysql://{db_user}:{password}@{db_host}/food_db?charset=utf8mb4"
+            return create_engine(conn_str)
+            
         conf = myloginpath.parse('app_loader')
         user = conf.get('user')
         password = urllib.parse.quote_plus(conf.get('password'))

+ 65 - 0
sync_current_sprint.py

@@ -0,0 +1,65 @@
+import requests
+import urllib3
+from datetime import datetime
+
+urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+base_url = 'https://192.168.130.161/taiga/api/v1'
+
+def sync():
+    try:
+        # Authenticate
+        auth = requests.post(f'{base_url}/auth', json={'type': 'normal', 'username': 'FrancoisLange', 'password': 'BTSai123'}, verify=False).json()
+        headers = {'Authorization': f'Bearer {auth["auth_token"]}', 'Content-Type': 'application/json'}
+        proj_id = 21
+
+        # 1. Fetch Milestones
+        milestones = requests.get(f'{base_url}/milestones?project={proj_id}', headers=headers, verify=False).json()
+        
+        # We will create Sprint 9 if it doesn't exist
+        sprint9 = next((m for m in milestones if m['name'] == 'Sprint 9'), None)
+        
+        if not sprint9:
+            sprint_start = datetime.now()
+            payload = {
+                "project": proj_id,
+                "name": "Sprint 9",
+                "estimated_start": sprint_start.strftime('%Y-%m-%d'),
+                "estimated_finish": sprint_start.strftime('%Y-%m-%d')
+            }
+            sprint9 = requests.post(f'{base_url}/milestones', json=payload, headers=headers, verify=False).json()
+            print("Created Sprint 9")
+            
+        sprint_id = sprint9['id']
+        
+        # 2. Create User Story
+        us_payload = {
+            "project": proj_id, 
+            "subject": "Deep Containerization and Zabbix Telemetry Overhaul", 
+            "description": "Split the monolith into isolated Docker containers (App, MySQL, Ollama, Ingest) and configure Zabbix trigger dependencies (App Failure depends on DB Failure).", 
+            "milestone": sprint_id
+        }
+        res = requests.post(f'{base_url}/userstories', json=us_payload, headers=headers, verify=False).json()
+        us_id = res['id']
+        print(f"Created US: TG-{res['ref']}")
+        
+        # 3. Create Tasks
+        tasks = [
+            "Centralize docker-compose.yml with individual component services",
+            "Integrate NVIDIA GPU support for Ollama container",
+            "Update App and Ingest Dockerfiles to include SNMP telemetry packages",
+            "Write Zabbix API script to create App -> MySQL trigger dependencies",
+            "Sync Git repository and update Taiga tracking"
+        ]
+        
+        for task_subject in tasks:
+            t_payload = {"project": proj_id, "subject": task_subject, "user_story": us_id, "milestone": sprint_id}
+            t_res = requests.post(f'{base_url}/tasks', json=t_payload, headers=headers, verify=False).json()
+            print(f"Created Task: TG-{t_res['ref']}")
+            
+        print("Successfully synchronized with Taiga.")
+        
+    except Exception as e:
+        print(f"Error syncing to Taiga: {e}")
+
+if __name__ == "__main__":
+    sync()