Procházet zdrojové kódy

Add untracked project files and configs

lanfr144 před 2 týdny
rodič
revize
4655c26f1f

+ 1 - 0
.gitignore

@@ -4,3 +4,4 @@ __pycache__/
 *.csv
 *.sql.gz
 .mylogin.cnf
+*.tar

+ 22 - 0
docker/app/Dockerfile

@@ -0,0 +1,22 @@
+# Dockerfile for Streamlit UI
+FROM python:3.11-slim
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y --no-install-recommends \
+    build-essential \
+    default-libmysqlclient-dev \
+    && rm -rf /var/lib/apt/lists/*
+
+WORKDIR /app
+
+COPY requirements.txt ./
+RUN pip install --no-cache-dir -r requirements.txt
+
+# Copy application code
+COPY app.py ./
+COPY myloginpath.py ./
+
+
+EXPOSE 8501
+
+CMD ["streamlit", "run", "app.py", "--server.port", "8501", "--server.headless", "true"]

+ 22 - 0
docker/ingest/Dockerfile

@@ -0,0 +1,22 @@
+# Dockerfile for ingestion service
+FROM python:3.11-slim
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y --no-install-recommends \
+    build-essential \
+    default-libmysqlclient-dev \
+    && rm -rf /var/lib/apt/lists/*
+
+# Set working directory
+WORKDIR /app
+
+# Copy requirements (if any) – using existing requirements.txt
+COPY requirements.txt ./
+RUN pip install --no-cache-dir -r requirements.txt
+
+# Copy ingestion script and any helpers
+COPY ingest_csv.py ./
+COPY myloginpath.py ./
+
+# Entry point (will be overridden by K8s job)
+CMD ["python", "ingest_csv.py"]

+ 4 - 0
docker/mysql/Dockerfile

@@ -0,0 +1,4 @@
+FROM mysql:8.0
+
+# Minimal MySQL Dockerfile – no custom config or init scripts
+EXPOSE 3306

+ 24 - 0
docker/taiga/Dockerfile

@@ -0,0 +1,24 @@
+# Dockerfile for Taiga sync service
+FROM python:3.11-slim
+
+# Install system dependencies (if any)
+RUN apt-get update && apt-get install -y --no-install-recommends \
+    build-essential \
+    default-libmysqlclient-dev \
+    && rm -rf /var/lib/apt/lists/*
+
+WORKDIR /app
+
+COPY requirements.txt ./
+RUN pip install --no-cache-dir -r requirements.txt
+
+# Copy Taiga sync scripts (assumed to be in project root)
+COPY taiga_feed.py ./
+COPY generate_taiga_wiki.py ./
+COPY taiga_checker.py ./
+
+# Wrapper script to gracefully skip when no Taiga URL/token
+COPY run_sync.sh /app/run_sync.sh
+RUN chmod +x /app/run_sync.sh
+
+CMD ["/app/run_sync.sh"]

+ 28 - 0
download_csv.sh

@@ -0,0 +1,28 @@
+#!/bin/bash
+# download latest OpenFoodFacts CSVs if not present or if newer version exists
+DATA_DIR="$(dirname "$0")/data"
+mkdir -p "$DATA_DIR"
+
+EN_URL="https://static.openfoodfacts.org/data/en.openfoodfacts.org.products.csv"
+FR_URL="https://static.openfoodfacts.org/data/fr.openfoodfacts.org.products.csv"
+
+EN_FILE="$DATA_DIR/en.openfoodfacts.org.products.csv"
+FR_FILE="$DATA_DIR/fr.openfoodfacts.org.products.csv"
+
+download() {
+  local url=$1
+  local file=$2
+  if [ -f "$file" ]; then
+    echo "File $file already exists, checking for updates..."
+    curl -z "$file" -L -o "$file" "$url"
+  else
+    echo "Downloading $url..."
+    curl -L -o "$file" "$url"
+  fi
+}
+
+download "$EN_URL" "$EN_FILE"
+
+download "$FR_URL" "$FR_FILE"
+
+echo "CSV download completed."

+ 33 - 0
k8s/alembic-migrate-job.yaml

@@ -0,0 +1,33 @@
+apiVersion: batch/v1
+kind: Job
+metadata:
+  name: alembic-migrate
+  namespace: food-ai
+spec:
+  backoffLimit: 2
+  template:
+    spec:
+      restartPolicy: Never
+      containers:
+      - name: alembic
+        image: food-ai-app:latest
+        command: ["alembic", "-c", "/app/alembic.ini", "upgrade", "head"]
+        env:
+        - name: MYSQL_ROOT_PASSWORD
+          valueFrom:
+            secretKeyRef:
+              name: food-ai-secrets
+              key: MYSQL_ROOT_PASSWORD
+        - name: LOG_PATH
+          value: "/logs"
+        volumeMounts:
+        - name: logs
+          mountPath: /logs
+        resources:
+          limits:
+            cpu: "500m"
+            memory: "512Mi"
+      volumes:
+      - name: logs
+        persistentVolumeClaim:
+          claimName: logs-pvc

+ 47 - 0
k8s/app-deployment.yaml

@@ -0,0 +1,47 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: food-ai-app
+  namespace: food-ai
+spec:
+  replicas: 1
+  selector:
+    matchLabels:
+      app: food-ai-app
+  template:
+    metadata:
+      labels:
+        app: food-ai-app
+    spec:
+      containers:
+      - name: app
+        image: food-ai-app:latest
+        ports:
+        - containerPort: 8501
+        env:
+        - name: LOG_PATH
+          value: "/logs"
+        - name: EMAIL_USER
+          valueFrom:
+            secretKeyRef:
+              name: food-ai-secrets
+              key: EMAIL_USER
+        - name: EMAIL_PASS
+          valueFrom:
+            secretKeyRef:
+              name: food-ai-secrets
+              key: EMAIL_PASS
+        volumeMounts:
+        - name: logs
+          mountPath: /logs
+        resources:
+          limits:
+            cpu: "1"
+            memory: "1Gi"
+          requests:
+            cpu: "500m"
+            memory: "512Mi"
+      volumes:
+      - name: logs
+        persistentVolumeClaim:
+          claimName: logs-pvc

+ 14 - 0
k8s/app-service.yaml

@@ -0,0 +1,14 @@
+apiVersion: v1
+kind: Service
+metadata:
+  name: food-ai-app
+  namespace: food-ai
+spec:
+  type: NodePort
+  selector:
+    app: food-ai-app
+  ports:
+  - protocol: TCP
+    port: 8501
+    targetPort: 8501
+    nodePort: 30080

+ 14 - 0
k8s/configmap.yaml

@@ -0,0 +1,14 @@
+apiVersion: v1
+kind: ConfigMap
+metadata:
+  name: food-ai-config
+  namespace: food-ai
+data:
+  my.cnf: |
+    [mysqld]
+    sql_mode=STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION
+    innodb_file_per_table=1
+    max_allowed_packet=256M
+    innodb_log_file_size=256M
+  APP_ENV: "production"
+  LOG_PATH: "/logs"

+ 36 - 0
k8s/ingest-job.yaml

@@ -0,0 +1,36 @@
+apiVersion: batch/v1
+kind: Job
+metadata:
+  name: ingest-job
+  namespace: food-ai
+spec:
+  backoffLimit: 2
+  template:
+    spec:
+      restartPolicy: Never
+      containers:
+      - name: ingest
+        image: food-ai-ingest:latest
+        command: ["python", "ingest_csv.py"]
+        env:
+        - name: LOG_PATH
+          value: "/logs"
+        volumeMounts:
+        - name: csv-data
+          mountPath: /app
+        - name: logs
+          mountPath: /logs
+        resources:
+          limits:
+            cpu: "2"
+            memory: "2Gi"
+          requests:
+            cpu: "1"
+            memory: "1Gi"
+      volumes:
+      - name: csv-data
+        persistentVolumeClaim:
+          claimName: csv-data-pvc
+      - name: logs
+        persistentVolumeClaim:
+          claimName: logs-pvc

+ 55 - 0
k8s/mysql-deployment.yaml

@@ -0,0 +1,55 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: mysql
+  namespace: food-ai
+spec:
+  replicas: 1
+  selector:
+    matchLabels:
+      app: mysql
+  template:
+    metadata:
+      labels:
+        app: mysql
+    spec:
+      containers:
+      - name: mysql
+        image: food-ai-mysql:latest
+        env:
+        - name: MYSQL_ROOT_PASSWORD
+          valueFrom:
+            secretKeyRef:
+              name: food-ai-secrets
+              key: MYSQL_ROOT_PASSWORD
+        - name: MYSQL_DATABASE
+          value: "food_db"
+        ports:
+        - containerPort: 3306
+        volumeMounts:
+        - name: mysql-data
+          mountPath: /var/lib/mysql
+        - name: my-cnf
+          mountPath: /etc/mysql/conf.d
+        - name: logs
+          mountPath: /logs
+        resources:
+          limits:
+            cpu: "1"
+            memory: "1Gi"
+          requests:
+            cpu: "500m"
+            memory: "512Mi"
+      volumes:
+      - name: mysql-data
+        persistentVolumeClaim:
+          claimName: mysql-data-pvc
+      - name: my-cnf
+        configMap:
+          name: food-ai-config
+          items:
+          - key: my.cnf
+            path: my.cnf
+      - name: logs
+        persistentVolumeClaim:
+          claimName: logs-pvc

+ 5 - 0
k8s/namespace.yaml

@@ -0,0 +1,5 @@
+# Namespace for Food AI
+apiVersion: v1
+kind: Namespace
+metadata:
+  name: food-ai

+ 35 - 0
k8s/pvc.yaml

@@ -0,0 +1,35 @@
+apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+  name: mysql-data-pvc
+  namespace: food-ai
+spec:
+  accessModes:
+    - ReadWriteOnce
+  resources:
+    requests:
+      storage: 10Gi
+---
+apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+  name: csv-data-pvc
+  namespace: food-ai
+spec:
+  accessModes:
+    - ReadWriteOnce
+  resources:
+    requests:
+      storage: 30Gi
+---
+apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+  name: logs-pvc
+  namespace: food-ai
+spec:
+  accessModes:
+    - ReadWriteOnce
+  resources:
+    requests:
+      storage: 5Gi

+ 16 - 0
k8s/secret.yaml

@@ -0,0 +1,16 @@
+apiVersion: v1
+kind: Secret
+metadata:
+  name: food-ai-secrets
+  namespace: food-ai
+type: Opaque
+stringData:
+  MYSQL_ROOT_PASSWORD: "BTSai123"
+  DB_OWNER_PASS: "BTSai123"
+  DB_READER_PASS: "BTSai123"
+  DB_LOADER_PASS: "BTSai123"
+  DB_AUTH_PASS: "BTSai123"
+
+  EMAIL_USER: "lanfr1904@outlook.com"
+
+  EMAIL_PASS: "BTSai123"

+ 26 - 0
k8s/taiga-sync-config.yaml

@@ -0,0 +1,26 @@
+apiVersion: v1
+kind: ConfigMap
+metadata:
+  name: taiga-sync-config
+  namespace: food-ai
+
+data:
+  sync.yaml: |
+    projects:
+      - name: Food AI
+        description: "Portail clinique de données alimentaires"
+        user_stories:
+          - title: "Afficher la liste des produits"
+            description: "Vue Streamlit qui liste les produits importés"
+            sprint: "Sprint 1"
+            tasks:
+              - "Implémenter le backend"
+              - "Créer le composant UI"
+        wiki_pages:
+          - title: "Architecture"
+            content: |
+              # Architecture du projet
+              * MySQL DB
+              * Service d’ingestion
+              * UI Streamlit
+              * Synchronisation Taiga

+ 32 - 0
k8s/taiga-sync-job.yaml

@@ -0,0 +1,32 @@
+apiVersion: batch/v1
+kind: Job
+metadata:
+  name: taiga-sync-job
+  namespace: food-ai
+spec:
+  backoffLimit: 2
+  template:
+    spec:
+      restartPolicy: Never
+      containers:
+        - name: taiga-sync
+          image: food-ai-taiga:latest
+          command: ["python", "taiga_feed.py"]
+          env:
+            - name: TAIGA_TOKEN
+              valueFrom:
+                secretKeyRef:
+                  name: food-ai-secrets
+                  key: TAIGA_TOKEN
+            - name: SYNC_CONFIG
+              value: "/config/sync.yaml"
+          volumeMounts:
+            - name: sync-config
+              mountPath: /config
+      volumes:
+        - name: sync-config
+          configMap:
+            name: taiga-sync-config
+            items:
+              - key: sync.yaml
+                path: sync.yaml

+ 14 - 0
myloginpath.py

@@ -0,0 +1,14 @@
+import os
+
+def parse(login_path: str):
+    """Very simple fallback parser.
+    It looks for environment variables matching the login_path name.
+    For example, for 'app_loader' it expects:
+        APP_LOADER_HOST, APP_LOADER_USER, APP_LOADER_PASSWORD
+    If those variables are missing, returns defaults pointing to localhost.
+    """
+    prefix = login_path.upper()
+    host = os.getenv(f"{prefix}_HOST", "127.0.0.1")
+    user = os.getenv(f"{prefix}_USER", "root")
+    password = os.getenv(f"{prefix}_PASSWORD", "")
+    return {"host": host, "user": user, "password": password}

+ 13 - 0
setup_unix_user.sh

@@ -0,0 +1,13 @@
+#!/bin/bash
+# create unix service user for Food AI
+USERNAME="food_ai"
+PASSWORD="BTSai123"
+# Check if user exists
+if id -u $USERNAME >/dev/null 2>&1; then
+  echo "User $USERNAME already exists"
+else
+  sudo net user $USERNAME $PASSWORD /add
+  # Add to docker-users group (Docker Desktop group on Windows)
+  sudo net localgroup docker-users $USERNAME /add
+  echo "User $USERNAME created and added to docker-users group"
+fi

+ 91 - 0
taiga_sync_fixer.py

@@ -0,0 +1,91 @@
+import requests, urllib3
+urllib3.disable_warnings()
+
+# Configuration – adjust as needed
+TAIGA_URL = 'https://192.168.130.161/taiga/api/v1'
+USERNAME = 'lanfr1904@outlook.com'
+PASSWORD = 'BTSai123'
+PROJECT_ID = 21
+DEFAULT_TASK_SUBJECT = 'Auto‑generated task (define details)'
+DEFAULT_POINTS = 1
+
+# Authenticate and obtain token
+auth_resp = requests.post(
+    f'{TAIGA_URL}/auth',
+    json={'type': 'normal', 'username': USERNAME, 'password': PASSWORD},
+    verify=False
+).json()
+
+token = auth_resp.get('auth_token')
+if not token:
+    raise RuntimeError('Authentication to Taiga failed')
+headers = {'Authorization': f'Bearer {token}'}
+
+# Helper functions
+def get_user_stories():
+    resp = requests.get(
+        f'{TAIGA_URL}/userstories?project={PROJECT_ID}',
+        headers=headers,
+        verify=False
+    )
+    resp.raise_for_status()
+    return resp.json()
+
+def get_tasks_for_us(us_id):
+    resp = requests.get(
+        f'{TAIGA_URL}/tasks?user_story={us_id}',
+        headers=headers,
+        verify=False
+    )
+    resp.raise_for_status()
+    return resp.json()
+
+def create_task(us_id):
+    payload = {
+        'subject': DEFAULT_TASK_SUBJECT,
+        'user_story': us_id,
+        'project': PROJECT_ID,
+        'status': 101  # Status 101 = "New" for project 21
+    }
+    resp = requests.post(
+        f'{TAIGA_URL}/tasks',
+        json=payload,
+        headers=headers,
+        verify=False
+    )
+    if not resp.ok:
+        print("Error creating task:", resp.text)
+    resp.raise_for_status()
+    return resp.json()
+
+def set_points(us_id, points, version):
+    payload = {
+        'total_points': points,
+        'version': version
+    }
+    resp = requests.patch(
+        f'{TAIGA_URL}/userstories/{us_id}',
+        json=payload,
+        headers=headers,
+        verify=False
+    )
+    if not resp.ok:
+        print("Error setting points:", resp.text)
+    resp.raise_for_status()
+    return resp.json()
+
+def main():
+    us_list = get_user_stories()
+    for us in us_list:
+        # 1️⃣ Ensure at least one task exists
+        tasks = get_tasks_for_us(us['id'])
+        if not tasks:
+            print(f"US #{us['ref']} missing tasks – creating default task")
+            create_task(us['id'])
+        # 2️⃣ Ensure story has points
+        if not us.get('total_points'):
+            print(f"US #{us['ref']} missing points – setting to {DEFAULT_POINTS}")
+            set_points(us['id'], DEFAULT_POINTS, us['version'])
+
+if __name__ == '__main__':
+    main()