Init
Some checks failed
Build Master / build (controller) (push) Failing after 12s
Build Master / build (webserver) (push) Failing after 12s

This commit is contained in:
2025-12-10 14:30:44 +07:00
parent 3785aab857
commit 498468b3a2
26 changed files with 3109 additions and 0 deletions

View File

@@ -0,0 +1,40 @@
name: Build Dev
on:
push:
branches: [dev] # Only trigger on dev branch
paths:
- 'controller/**'
- 'webserver/**'
- '.gitea/workflows/dev.yml'
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
service: [controller, webserver]
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Gitea Registry
uses: docker/login-action@v3
with:
registry: gitea.lan
username: ${{ github.actor }}
password: ${{ secrets.GITEA_TOKEN }}
- name: Build & Push Dev
uses: docker/build-push-action@v5
with:
context: ./${{ matrix.service }}
file: ./${{ matrix.service }}/Dockerfile
push: true
tags: |
gitea.lan/Imrayya/EpistineFiles/${{ matrix.service }}:dev
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -0,0 +1,41 @@
name: Build Master
on:
push:
branches: [main, master]
paths:
- 'controller/**' # Watch controller folder
- 'webserver/**' # Watch webserver folder
- '.gitea/workflows/master.yml'
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
service: [controller, webserver] # Define your services here
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Gitea Registry
uses: docker/login-action@v3
with:
registry: gitea.lan
username: ${{ github.actor }} # Fixed: use github.* context
password: ${{ secrets.GITEA_TOKEN }}
- name: Build & Push ${{ matrix.service }}
uses: docker/build-push-action@v5
with:
context: ./${{ matrix.service }}
file: ./${{ matrix.service }}/Dockerfile
push: true
tags: |
gitea.lan/Imrayya/EpistineFiles/${{ matrix.service }}:latest
gitea.lan/Imrayya/EpistineFiles/${{ matrix.service }}:${{ github.sha }} # Fixed context
cache-from: type=gha
cache-to: type=gha,mode=max

41
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,41 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "🐳 Build & Run Docker Service",
"type": "shell",
"command": "powershell",
"args": [
"-Command",
"$ErrorActionPreference='Stop'; $svc='${input:service}'; $port='${input:port}'; $name=$svc.ToLower(); Write-Host \"Building $svc...\" -f Cyan; docker build -t $name $svc; if($LASTEXITCODE){exit 1}; Write-Host \"Running $svc...\" -f Green; if($svc-eq'Controller'){docker run -d --rm --name $name -p 8000:8000 -v \"${env:USERPROFILE}/Desktop/logs:/logs\" --env-file \"${workspaceFolder}/Controller/.env\" $name}else{docker run -d --rm --name $name -p $port`:$port --env-file \"${workspaceFolder}/$svc/.env\" $name}; Write-Host \"✅ Container $name is up!\" -f Yellow"
],
"group": "build",
"presentation": {
"clear": true,
"echo": false,
"reveal": "always",
"panel": "shared"
},
"problemMatcher": []
}
],
"inputs": [
{
"id": "service",
"type": "pickString",
"description": "Select service to build & run",
"options": [
{"label": "🎮 Controller (Port 8000 + Logs)", "value": "Controller"},
{"label": "🤖 Agent", "value": "Agent"},
{"label": "📋 Task1", "value": "Task1"},
{"label": "🌐 Webserver", "value": "Webserver"}
]
},
{
"id": "port",
"type": "promptString",
"description": "Service port (for non-Controller)",
"default": "8001"
}
]
}

30
controller/.dockerignore Normal file
View File

@@ -0,0 +1,30 @@
# Git
.git
.gitignore
# Python
__pycache__/
*.py[cod]
*$py.class
*.so
.Python
env/
venv/
*.egg-info/
.pytest_cache/
.mypy_cache/
# IDE
.vscode/
.idea/
# OS
.DS_Store
Thumbs.db
# Docker
Dockerfile
.dockerignore
.env
*.local

11
controller/.env.example Normal file
View File

@@ -0,0 +1,11 @@
POSTGRES_SUPERUSER_NAME=your_superuser
POSTGRES_SUPERUSER_PASSWORD=your_secure_password
POSTGRES_IP=localhost
POSTGRES_PORT=5432
SCHEMA_FILE_PATH=./schema/version1.sql
LOG_DIRECTORY = /logs
MASTER_API_KEY=your_master_api_key
SSL_KEYFILE=/app/certs/controller.local-key.pem
SSL_CERTFILE=/app/certs/controller.local.pem
CONTROLLER_PORT=8000
STREAMLIT_ORIGIN=http://localhost:8501

63
controller/Dockerfile Normal file
View File

@@ -0,0 +1,63 @@
# === Stage 1: Builder ===
FROM python:3.12-alpine AS builder
WORKDIR /app
# Install openssl for cert generation
RUN apk add --no-cache openssl
# Generate self-signed certificates for development
RUN openssl req -x509 -newkey rsa:4096 \
-keyout controller.local-key.pem \
-out controller.local.pem \
-days 365 -nodes \
-subj "/CN=controller.local/O=Dev/C=US" && \
chmod 600 controller.local-key.pem
# Copy requirements and install dependencies
COPY requirements.txt .
RUN python -m venv /opt/venv && \
/opt/venv/bin/pip install --no-cache-dir -r requirements.txt
# === Stage 2: Final Image ===
FROM python:3.12-alpine
WORKDIR /app
# Install runtime dependencies (ca-certificates for SSL verification)
RUN apk add --no-cache ca-certificates
# Create non-root user
RUN addgroup -g 1001 -S appgroup && \
adduser -u 1001 -S appuser -G appgroup
# Copy virtual environment
COPY --from=builder /opt/venv /opt/venv
# Copy application code
COPY . .
# Copy generated certificates from builder
COPY --from=builder /app/*.pem ./certs/
# Set proper permissions
RUN chmod 600 certs/* && \
chown -R appuser:appgroup /app
# Activate venv
ENV PATH="/opt/venv/bin:$PATH" \
PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1
# Use environment variables for cert paths (allows runtime override)
ENV SSL_KEYFILE=/app/certs/controller.local-key.pem
ENV SSL_CERTFILE=/app/certs/controller.local.pem
# Expose port
EXPOSE 8000
# Switch to non-root user
USER appuser
# Run with shell to allow variable expansion
CMD python app.py

16
controller/README.md Normal file
View File

@@ -0,0 +1,16 @@
# Welcome
This is the controller that handles the whole system
### Environment Variable
POSTGRES_SUPERUSER_NAME=your_superuser
POSTGRES_SUPERUSER_PASSWORD=your_secure_password
POSTGRES_IP=localhost
POSTGRES_PORT=5432
SCHEMA_FILE_PATH=./schema/version1.sql
MASTER_API_KEY=your_master_api_key
SSL_KEYFILE=/app/certs/controller.local-key.pem
SSL_CERTFILE=/app/certs/controller.local.pem
### Voluimes
/path/to/your/certs:/app/certs:ro

View File

@@ -0,0 +1,153 @@
import secrets
import bcrypt
import os
from fastapi import Depends, HTTPException, status
from fastapi.security import APIKeyHeader
from typing import Optional, List
from logger import log_message
import confighelper
import dbhandler as dbh
from scopes import Scopes as sc
API_KEY_NAME = "FilesManager-API-Key"
api_key_header = APIKeyHeader(name=API_KEY_NAME, auto_error=False)
def check_scopes(required_scopes: List[str]):
"""Factory that returns a dependency function for checking specific scopes"""
async def _check_scopes(api_key: str = Depends(api_key_header)):
"""Check if the provided API key has the required scopes"""
is_valid, key_scopes = await verify_api_key(api_key)
if not is_valid:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid API Key",
)
if "admin" in key_scopes:
return # Admin has all permissions
for scope in required_scopes:
if scope not in key_scopes:
log_message(
component="controller",
component_id=None,
level="WARNING",
log_type="security",
code="api.insufficient_scopes",
message=f"API key missing required scope: {scope}"
)
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"Insufficient scopes. Missing: {scope}",
)
log_message(
component="controller",
component_id=None,
level="DEBUG",
log_type="security",
code="api.scopes_verified",
message="API key scopes verified successfully"
)
return _check_scopes
async def __check_scopes(required_scopes: List[str], api_key: str = Depends(api_key_header)):
"""Check if the provided API key has the required scopes
- **required_scopes**: List of scopes required to access the endpoint
- **api_key**: The API key provided in the request header
- Raises HTTPException if scopes are insufficient
"""
is_valid, key_scopes = await verify_api_key(api_key)
if not is_valid:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid API Key",
)
if "admin" in key_scopes:
return # Admin has all permissions
for scope in required_scopes:
if scope not in key_scopes:
log_message(
component="controller",
component_id=None,
level="WARNING",
log_type="security",
code="api.insufficient_scopes",
message=f"API key missing required scope: {scope}"
)
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"Insufficient scopes. Missing: {scope}",
)
log_message(
component="controller",
component_id=None,
level="DEBUG",
log_type="security",
code="api.scopes_verified",
message="API key scopes verified successfully"
)
async def verify_api_key(api_key: str = Depends(api_key_header)):
"""Verify the provided API key against configured keys
- **api_key**: The API key provided in the request header
- Tuple containing:
-- bool: True if valid key, False otherwise
-- Optional[List[str]]: Scopes list if valid, None if invalid
"""
# Allow master key for bootstrapping
if api_key == os.getenv("MASTER_API_KEY"):
return (True, ["admin"])
validated = dbh.validate_api_key(api_key)
if validated[0] is False:
log_message(
component="controller",
component_id=None,
level="WARNING",
log_type="security",
code="api.invalid_api_key",
message="Invalid API key attempt"
)
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid API Key",
)
log_message(
component="controller",
component_id=None,
level="DEBUG",
log_type="security",
code="api.api_key_verified",
message="API key verified successfully"
)
return validated
async def create_api_key(
name: str,
created_by: Optional[str] = None,
description: Optional[str] = None,
scopes: Optional[List[sc]] = None
) -> str:
"""Generate and store a new API key"""
prefix = 'sk_'
secret = secrets.token_urlsafe(32)
api_key = f"{prefix}{secret}" # Removed extra underscore
# Hash the API key
hashed = bcrypt.hashpw(api_key.encode(), bcrypt.gensalt())
# Convert scopes to JSON string for PostgreSQL JSONB field
import json
scopes_json = json.dumps(scopes) if scopes else '[]'
dbh.insert_api_key(
name=name,
key_hash=hashed.decode('utf-8'),
key_prefix=api_key[:8],
created_by=created_by,
description=description,
scopes=scopes_json
)
return api_key

465
controller/app.py Normal file
View File

@@ -0,0 +1,465 @@
import asyncio
from datetime import datetime, timezone
from typing import Dict, Optional, List, Any
from pydantic import BaseModel, Field
from fastapi import FastAPI, HTTPException, status, Query, Depends
from fastapi.responses import PlainTextResponse
from fastapi.middleware.cors import CORSMiddleware
from fastapi.routing import APIRouter
from fastapi.security import APIKeyHeader
from pathlib import Path
import secrets
import os
from scopes import Scopes as sc
from api_key_handler import verify_api_key, create_api_key, check_scopes
from logger import setup_logging, log_message, get_logs
from confighelper import load_config, sanitize_settings_toml
API_KEY_NAME = "FilesManager-API-Key"
api_key_header = APIKeyHeader(name=API_KEY_NAME, auto_error=False)
# Load config & setup logging
CONFIG = load_config("settings.toml")
setup_logging(CONFIG.get("shared", {}).get("logging", {}))
# Pydantic models
class LogEntryResponse(BaseModel):
"""Represents a single log entry in the response"""
timestamp: str = Field(...,
description="ISO 8601 timestamp of the log entry")
level: str = Field(...,
description="Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)")
type: str = Field(...,
description="Log type (app, audit, security, system)")
code: str = Field(..., description="Log code for categorization (e.g., db.createtable or api.health_check)")
content: str = Field(..., description="The log message content")
class LogsResponse(BaseModel):
"""Response model for log retrieval"""
component: str = Field(...,
description="Component name (controller, agents, tasks)")
component_id: Optional[str] = Field(
None, description="ID of the component if applicable")
entries: List[LogEntryResponse] = Field(...,
description="List of log entries")
total_count: int = Field(...,
description="Total number of log entries returned")
class WorkerInfo(BaseModel):
"""Information about a registered worker"""
name: str = Field(..., description="Worker name")
version: str = Field(..., description="Worker version")
last_seen: datetime = Field(..., description="Last seen timestamp in UTC")
class RegisterRequest(BaseModel):
"""Worker registration request model"""
name: str = Field(..., description="Worker name", min_length=1)
version: str = Field(..., description="Worker version", min_length=1)
class VersionResponse(BaseModel):
"""Response model for version check"""
expected_version: str = Field(..., description="Expected version string")
status: str = Field(..., description='Status - "ok" or "outdated"')
update_command: Optional[str] = Field(
None, description="Command to update if outdated")
class CommandRequest(BaseModel):
"""Command request model for sending commands to workers"""
worker_name: str = Field(..., description="Target worker name")
command: str = Field(..., description="Command to execute")
payload: Optional[Dict[str, Any]] = Field(
None, description="Additional command parameters")
class SettingRequest(BaseModel):
"""Request model for providing the settings"""
settings: bytes = Field(..., description="Settings file content in bytes")
class WorkerLogEntry(BaseModel):
"""Log entry model sent by workers"""
component: str = Field(..., pattern="^(agents|tasks)$",
description="Component type (agents or tasks)")
component_id: str = Field(..., description="ID of the component")
level: str = Field(..., pattern="^(DEBUG|INFO|WARNING|ERROR|CRITICAL)$",
description="Log level")
log_type: str = Field(..., pattern="^(app|audit|security|system)$",
description="Log type")
code: str = Field(..., description="Log code for categorization")
message: str = Field(..., description="The log message content")
class NewAPIKeyRequest(BaseModel):
"""Request model for creating a new API key"""
name: str = Field(..., description="The name for the API key")
created_by: str = Field(..., description="Who created the API key")
description: str = Field(..., description="Description for the API key")
scopes: List[sc] = Field(..., description="The scopes for the API key")
# API Router
api_router = APIRouter(prefix="/api/v1")
worker_registry: Dict[str, WorkerInfo] = {}
EXPECTED_VERSION = "0.0.1"
@api_router.post("/createAPIKey", status_code=status.HTTP_201_CREATED, summary="Create a new API key")
async def create_new_api_key(
entry: NewAPIKeyRequest,
api_key: str = Depends(check_scopes([sc.CREATE_API_KEY]))
) -> Dict[str, str]:
"""
Create and store a new API key.
- **entry**: Details for the new API key
- **api_key**: API key for authentication
- Returns confirmation status and the new API key (shown only once)
"""
try:
new_key = await create_api_key(
name=entry.name,
created_by=entry.created_by,
description=entry.description,
scopes=[scope.value for scope in entry.scopes]
)
log_message(
component="controller",
component_id=None,
level="INFO",
log_type="security",
code="api.api_key_created",
message=f"New API key created: {entry.name} by {entry.created_by}"
)
return {"status": "api_key_created", "api_key": new_key}
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to create API key: {str(e)}"
)
@api_router.post("/log", status_code=status.HTTP_201_CREATED, summary="Submit worker log")
async def receive_worker_log(entry: WorkerLogEntry, api_key: str = Depends(check_scopes([sc.LOGS_READ]))) -> Dict[str, str]:
"""
Workers/agents POST their logs here for centralized storage.
- **entry**: Complete log entry from the worker
- **api_key**: API key for authentication
- Returns confirmation status
"""
log_message(
component=entry.component,
component_id=entry.component_id,
level=entry.level,
log_type=entry.log_type,
code=entry.code,
message=entry.message
)
return {"status": "logged"}
@api_router.post("/register", status_code=status.HTTP_202_ACCEPTED, response_model=VersionResponse, summary="Register a worker")
async def register_worker(data: RegisterRequest, api_key: str = Depends(check_scopes([sc.WORKER_REGISTER]))
) -> VersionResponse:
"""
Worker announces itself to master and checks version compatibility.
- **data**: Worker registration details
- **api_key**: API key for authentication
- Returns version compatibility status and update instructions if needed
"""
is_current = data.version == EXPECTED_VERSION
worker_registry[data.name] = WorkerInfo(
name=data.name,
version=data.version,
last_seen=datetime.now(timezone.utc)
)
log_message(
component="controller",
component_id=None,
level="INFO",
log_type="app",
code="api.worker_registered",
message=f"Worker {data.name} registered with version {data.version}"
)
return VersionResponse(
expected_version=EXPECTED_VERSION,
status="ok" if is_current else "outdated",
update_command=None if is_current else "git pull && restart"
)
@api_router.get("/workers", response_model=Dict[str, List[WorkerInfo]], summary="List all workers")
async def list_workers(api_key: str = Depends(check_scopes([sc.WORKER_READ]))
) -> Dict[str, List[WorkerInfo]]:
"""
Get all currently registered workers with their information.
- **api_key**: API key for authentication
- Returns a list of registered workers
"""
return {"workers": list(worker_registry.values())}
@api_router.get("/version", response_model=VersionResponse, summary="Get expected version")
async def get_version() -> VersionResponse:
"""
Get the master's expected version for workers.
"""
log_message(
component="controller",
component_id=None,
level="DEBUG",
log_type="app",
code="api.version_accessed",
message="Version endpoint accessed"
)
return VersionResponse(
expected_version=EXPECTED_VERSION,
status="ok",
update_command=None
)
@api_router.post("/initializeDB", status_code=status.HTTP_201_CREATED, summary="Initialize database")
async def initialize_db(
) -> Dict[str, str]:
"""
Initialize the database schema from the configured SQL file.
This will execute the schema file specified in settings.toml
- **api_key**: API key for authentication
- Returns confirmation status
"""
from dbhandler import execute_sql_file
from confighelper import get_db_config
DB_CONFIG = get_db_config(CONFIG)
SQL_FILE_PATH = CONFIG["shared"]["postgresDB"]["schemaFile"]
try:
execute_sql_file(SQL_FILE_PATH, DB_CONFIG)
log_message(
component="controller",
component_id=None,
level="INFO",
log_type="app",
code="api.db_initialized",
message="Database initialized successfully"
)
return {"status": "database_initialized"}
except Exception as e:
log_message(
component="controller",
component_id=None,
level="ERROR",
log_type="app",
code="api.db_initialization_failed",
message=f"Database initialization failed: {e}"
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Database initialization failed"
)
@api_router.get(
"/settings",
status_code=status.HTTP_200_OK,
summary="Download current settings file",
response_description="The settings.toml file"
)
async def download_settings(api_key: str = Depends(check_scopes([sc.SETTINGS_READ]))
) -> PlainTextResponse:
"""
Download the current settings.toml file.
- **api_key**: API key for authentication
- Returns the sanitized settings file as a download
"""
settings_path = Path("settings.toml").resolve()
# Security: Ensure file is within allowed directory and exists
if not settings_path.is_file():
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Settings file not found"
)
try:
sanitized_content = sanitize_settings_toml(settings_path)
log_message(
component="controller",
component_id=None,
level="INFO",
log_type="app",
code="api.settings_downloaded",
message="Settings file downloaded"
)
# Return the sanitized content as a PlainTextResponse
# Use headers to force the browser to download the file with the correct filename
return PlainTextResponse(
content=sanitized_content,
media_type="application/toml",
headers={"Content-Disposition": "attachment; filename=settings.toml"}
)
except Exception as e:
log_message(
component="controller",
component_id=None,
level="ERROR",
log_type="app",
code="api.settings_download_failed",
message=f"Failed to download settings file: {e}"
)
# Catch exceptions from sanitize_settings_toml or other I/O errors
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to read or process settings file"
)
@api_router.post("/command", response_model=Dict[str, Any], summary="Send command to worker")
async def send_command(cmd: CommandRequest, api_key: str = Depends(verify_api_key)
) -> Dict[str, Any]:
"""
Send a command to a specific worker.
Currently supports:
- **cmd**: Command details including target worker and command type
- **api_key**: API key for authentication
"""
if cmd.worker_name not in worker_registry:
log_message(
component="controller",
component_id=None,
level="ERROR",
log_type="app",
code="api.command_unknown_worker",
message=f"Command sent to unknown worker: {cmd.worker_name}"
)
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Worker not found")
log_message(
component="controller",
component_id=None,
level="INFO",
log_type="app",
code="api.command_sent",
message=f"Command '{cmd.command}' sent to worker {cmd.worker_name}"
)
if cmd.command == "update":
worker_registry[cmd.worker_name].version = EXPECTED_VERSION
return {"status": "update_acknowledged"}
return {"status": "command_received", "command": cmd.command}
@api_router.get("/logs", response_model=LogsResponse, summary="Retrieve logs")
async def get_component_logs(
component: str = Query(..., pattern="^(controller|agents|tasks)$",
description="Component name to retrieve logs from"),
component_id: Optional[str] = Query(
None, description="Component ID (required for agents/tasks)"),
level: Optional[str] = Query(
None, pattern="^(DEBUG|INFO|WARNING|ERROR|CRITICAL)$", description="Filter by log level"),
log_type: Optional[str] = Query(
None, pattern="^(app|audit|security|system)$", description="Filter by log type"),
tail: int = Query(100, ge=1, le=10000,
description="Number of recent log entries to return (max 10,000)"), api_key: str = Depends(verify_api_key)
) -> LogsResponse:
"""
Retrieve logs from a component with optional filtering.
- **component**: Which component to get logs from
- **component_id**: Required when component is 'agents' or 'tasks'
- **level**: Optional log level filter
- **log_type**: Optional log type filter
- **tail**: Number of recent entries to return (1-10,000)
- **api_key**: API key for authentication
- Returns filtered log entries
"""
if component in ["agents", "tasks"] and not component_id:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"component_id required for {component}"
)
try:
entries = get_logs(component, component_id, level, log_type, tail)
return LogsResponse(
component=component,
component_id=component_id,
entries=entries,
total_count=len(entries)
)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
# Main app with metadata
app = FastAPI(
title="Controller Server API",
description="Centralized controller for agent/task management, logging, and database operations",
version="0.0.1"
)
app.add_middleware(
CORSMiddleware,
allow_origins=[os.getenv("STREAMLIT_ORIGIN", "http://localhost:8501")],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*", "FilesManager-API-Key"],
)
app.include_router(api_router)
@app.get("/health", response_model=Dict[str, Any], summary="Health check")
async def health() -> Dict[str, Any]:
"""
Check the health status of the master server.
"""
log_message(
component="controller",
component_id="App",
level="DEBUG",
log_type="system",
code="api.health_check",
message="Health check accessed"
)
return {
"status": "healthy",
"timestamp": datetime.now(timezone.utc).isoformat()
}
if __name__ == "__main__":
import uvicorn
log_message(
component="controller",
component_id="App",
level="INFO",
log_type="system",
code="api.startup",
message=f"Starting Master Server v{app.version}"
)
uvicorn.run(
app,
host="0.0.0.0",
port=8000,
ssl_keyfile=os.getenv("SSL_KEYFILE"),
ssl_certfile=os.getenv("SSL_CERTFILE")
)

View File

@@ -0,0 +1,81 @@
import os
from typing import Dict, Any
from functools import lru_cache
from pathlib import Path
# Python 3.11+ tomllib, older: pip install tomli
try:
import tomllib
except ImportError:
import tomli as tomllib
@lru_cache(maxsize=1)
def load_config(toml_path: str = "settings.toml") -> Dict[str, Any]:
"""Load TOML config with environment variable overrides"""
# Load base config
with open(toml_path, "rb") as f:
config = tomllib.load(f)
# Map env vars to nested config paths (env vars win)
overrides = {
"POSTGRES_SUPERUSER_NAME": ("server", "postgresDB", "superuserName"),
"POSTGRES_SUPERUSER_PASSWORD": ("server", "postgresDB", "superuserPassword"),
"POSTGRES_DATABASE_NAME": ("server", "postgresDB", "databaseName"),
"POSTGRES_IP": ("server", "postgresDB", "ip"),
"POSTGRES_PORT": ("server", "postgresDB", "port"),
"SCHEMA_FILE_PATH": ("server", "postgresDB", "schemaFile"),
}
# Apply overrides with type conversion
for env_var, path in overrides.items():
env_value = os.getenv(env_var)
if env_value is not None:
# Navigate to parent dict
target = config
for key in path[:-1]:
target = target[key]
# Preserve original type (e.g., port -> int)
current_value = target[path[-1]]
if isinstance(current_value, int):
env_value = int(env_value)
target[path[-1]] = env_value
return config
# Helper to extract DB config
def get_db_config(config: Dict[str, Any]) -> Dict[str, Any]:
"""Extract psycopg2 connection params from config"""
pg = config["server"]["postgresDB"]
return {
"host": pg["ip"],
"database": pg["databaseName"], # Or your DB name from config
"user": pg["superuserName"],
"password": pg["superuserPassword"],
"port": pg["port"],
}
def sanitize_settings_toml(settings_path: Path) -> str:
"""
Reads a TOML file, removes sensitive sections, and returns the sanitized content as a string.
"""
try:
# Load the entire settings file
with open(settings_path, 'r') as f:
settings_data = tomllib.load(f)
# 1. Identify and remove sections that should not be exposed.
if 'server' in settings_data:
del settings_data['server']
sanitized_content = tomllib.dumps(settings_data)
return sanitized_content
except FileNotFoundError:
# This is handled in the main function, but good to have a specific error here
raise
except Exception as e:
raise RuntimeError("Failed to sanitize settings file content") from e

View File

@@ -0,0 +1,356 @@
-- ---------------------------------------------------------
-- 1. MACHINE MANAGEMENT
-- ---------------------------------------------------------
CREATE TABLE machines (
machine_id SERIAL PRIMARY KEY,
machine_name VARCHAR(100) NOT NULL UNIQUE,
machine_type VARCHAR(50) CHECK (machine_type IN ('extractor', 'ocr', 'transcriber', 'tokenizer')),
last_connected TIMESTAMPTZ DEFAULT NOW(),
created_at TIMESTAMPTZ DEFAULT NOW()
);
CREATE INDEX idx_machines_type ON machines(machine_type);
CREATE INDEX idx_machines_last_connected ON machines(last_connected);
-- ---------------------------------------------------------
-- 2. EXTRACTION JOBS (Single table for all job states)
-- ---------------------------------------------------------
CREATE TYPE job_status AS ENUM ('pending', 'assigned', 'in_progress', 'completed', 'failed', 'cancelled');
CREATE TYPE job_type AS ENUM ('pdf_extract', 'ocr_process', 'audio_transcribe', 'video_extract', 'image_ocr', 'tokenize', 'metadata_enrich', 'scoring', 'summarize_process');
CREATE TABLE extraction_jobs (
job_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
document_id UUID NOT NULL, -- FK constraint added after documents table
job_type job_type NOT NULL,
extraction_version VARCHAR(6) NOT NULL,
machine_id INTEGER REFERENCES machines(machine_id) ON DELETE SET NULL,
status job_status DEFAULT 'pending',
date_added TIMESTAMPTZ DEFAULT NOW(),
assigned_at TIMESTAMPTZ, -- When a machine claimed the job
start_date TIMESTAMPTZ, -- When actual processing began
finished_date TIMESTAMPTZ,
error_message TEXT,
retry_count INTEGER DEFAULT 0,
created_at TIMESTAMPTZ DEFAULT NOW(),
updated_at TIMESTAMPTZ DEFAULT NOW()
);
-- Indexes for efficient job queue polling and monitoring
CREATE INDEX idx_jobs_status_date ON extraction_jobs(status, date_added);
CREATE INDEX idx_jobs_machine_id ON extraction_jobs(machine_id);
CREATE INDEX idx_jobs_document_id ON extraction_jobs(document_id);
-- ---------------------------------------------------------
-- 3. CORE ENTITIES (People & Companies)
-- ---------------------------------------------------------
CREATE TABLE people (
person_id SERIAL PRIMARY KEY,
canonical_name TEXT NOT NULL UNIQUE, -- Primary name
created_at TIMESTAMPTZ DEFAULT NOW()
);
CREATE INDEX idx_people_name ON people(canonical_name);
-- People aliases (handles multiple aliases per person)
CREATE TABLE person_aliases (
alias_id SERIAL PRIMARY KEY,
person_id INTEGER REFERENCES people(person_id) ON DELETE CASCADE,
alias_name TEXT NOT NULL,
source TEXT, -- Where this alias was found: 'document_title', 'signature', 'metadata'
confidence INTEGER CHECK (confidence BETWEEN 0 AND 100), -- Auto-extraction confidence
created_at TIMESTAMPTZ DEFAULT NOW(),
UNIQUE(person_id, alias_name)
);
CREATE INDEX idx_personaliases_name ON person_aliases(alias_name);
CREATE INDEX idx_personaliases_person ON person_aliases(person_id);
-- Core companies table
CREATE TABLE companies (
company_id SERIAL PRIMARY KEY,
canonical_name TEXT NOT NULL UNIQUE, -- Primary name (e.g., "Acme Corp, Inc.")
entity_type VARCHAR(50), -- 'corporation', 'llc', 'non_profit', 'partnership'
created_at TIMESTAMPTZ DEFAULT NOW()
);
CREATE INDEX idx_companies_name ON companies(canonical_name);
-- Company aliases (DBA, abbreviations, etc.)
CREATE TABLE company_aliases (
alias_id SERIAL PRIMARY KEY,
company_id INTEGER REFERENCES companies(company_id) ON DELETE CASCADE,
alias_name TEXT NOT NULL, -- e.g., "ACI" for "Acme Corp, Inc."
source TEXT,
confidence INTEGER CHECK (confidence BETWEEN 0 AND 100),
created_at TIMESTAMPTZ DEFAULT NOW(),
UNIQUE(company_id, alias_name)
);
CREATE INDEX idx_companyaliases_name ON company_aliases(alias_name);
CREATE INDEX idx_companyaliases_company ON company_aliases(company_id);
-- Example query to find person with any alias
-- SELECT p.* FROM people p
-- LEFT JOIN person_aliases pa ON p.person_id = pa.person_id
-- WHERE p.canonical_name = 'John Doe' OR pa.alias_name = 'J. Doe';
-- ---------------------------------------------------------
-- 4. MAIN DOCUMENTS TABLE
-- ---------------------------------------------------------
CREATE TABLE documents (
document_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
original_filename TEXT NOT NULL,
document_type VARCHAR(50) NOT NULL CHECK (document_type IN ('pdf', 'doc', 'docx', 'txt', 'audio', 'image', 'video', 'transcription', 'email', 'email_metadata')),
-- Extraction tracking
extraction_version VARCHAR(6),
machine_id INTEGER REFERENCES machines(machine_id) ON DELETE SET NULL,
extraction_score INTEGER CHECK (extraction_score BETWEEN 0 AND 100),
extraction_status VARCHAR(20) DEFAULT 'pending' CHECK (extraction_status IN ('pending', 'success', 'failed', 'partial')),
extraction_error TEXT,
-- Content storage
content_markdown TEXT, -- NULL for non-text docs; may be truncated for huge docs
summary TEXT,
key_points JSONB, -- Array of strings
-- Tokenization data (flexible JSON for future expansion)
token_count_json JSONB, -- {tokenizer: "cl100k_base", total_tokens: 12345, tokenization_machine_id: 1, chunk_tokens: [1000, 1500, ...]}
-- File metadata
file_size_bytes BIGINT,
file_hash VARCHAR(64) UNIQUE, -- SHA-256 for deduplication & integrity
storage_path TEXT NOT NULL, -- S3 path or local filepath
file_type TEXT NOT NULL,
-- Flexible metadata by document type
metadata JSONB, -- Type-specific: page_count, duration_seconds, image_dimensions, etc.
-- Timestamps
created_at TIMESTAMPTZ DEFAULT NOW(),
updated_at TIMESTAMPTZ DEFAULT NOW(),
-- Full-text search vector
fts TSVECTOR
);
-- Core indexes
CREATE INDEX idx_documents_type ON documents(document_type);
CREATE INDEX idx_documents_hash ON documents(file_hash);
CREATE INDEX idx_documents_extraction_status ON documents(extraction_status);
CREATE INDEX idx_documents_machine_id ON documents(machine_id);
CREATE INDEX idx_documents_metadata_gin ON documents USING GIN (metadata);
CREATE INDEX idx_token_count_json ON documents USING GIN (token_count_json);
-- FTS index
CREATE INDEX idx_fts ON documents USING GIN (fts);
-- Expression indexes for common metadata queries
CREATE INDEX idx_pdf_pages ON documents((metadata->>'page_count')) WHERE document_type = 'pdf';
CREATE INDEX idx_audio_duration ON documents((metadata->>'duration_seconds')) WHERE document_type = 'audio';
-- ---------------------------------------------------------
-- 5. DOCUMENT CHUNKS (For large documents >10MB)
-- ---------------------------------------------------------
CREATE TABLE document_chunks (
chunk_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
document_id UUID NOT NULL REFERENCES documents(document_id) ON DELETE CASCADE,
chunk_order INTEGER NOT NULL,
chunk_markdown TEXT NOT NULL,
chunk_fts TSVECTOR,
token_count INTEGER, -- Token count for this specific chunk
created_at TIMESTAMPTZ DEFAULT NOW(),
UNIQUE (document_id, chunk_order)
);
CREATE INDEX idx_chunks_document ON document_chunks(document_id);
CREATE INDEX idx_chunks_fts ON document_chunks USING GIN (chunk_fts);
-- ---------------------------------------------------------
-- 6. LEGAL-SPECIFIC METADATA (Normalized for fast querying)
-- ---------------------------------------------------------
CREATE TABLE document_legal_metadata (
document_id UUID PRIMARY KEY REFERENCES documents(document_id) ON DELETE CASCADE,
case_number VARCHAR(100),
court_name VARCHAR(200),
filing_date DATE,
document_category VARCHAR(50) CHECK (document_category IN ('motion', 'brief', 'transcript', 'evidence', 'exhibit', 'order', 'correspondence')),
bates_start VARCHAR(50),
bates_end VARCHAR(50),
privilege_level VARCHAR(20) DEFAULT 'public' CHECK (privilege_level IN ('public', 'redacted', 'sealed', 'confidential')),
filing_party TEXT,
judge_name TEXT,
exhibit_number VARCHAR(50),
redaction_details JSONB -- Array of {page: int, reason: string, original_text: string}
);
CREATE INDEX idx_legal_case_number ON document_legal_metadata(case_number);
CREATE INDEX idx_legal_filing_date ON document_legal_metadata(filing_date);
CREATE INDEX idx_legal_privilege ON document_legal_metadata(privilege_level);
-- ---------------------------------------------------------
-- 7. JUNCTION TABLES (Many-to-Many Relationships)
-- ---------------------------------------------------------
-- Document-People (with context)
CREATE TABLE document_people (
document_id UUID REFERENCES documents(document_id) ON DELETE CASCADE,
person_id INTEGER REFERENCES people(person_id) ON DELETE CASCADE,
mention_context TEXT, -- Snippet where mentioned
page_number INTEGER,
PRIMARY KEY (document_id, person_id)
);
CREATE INDEX idx_docpeople_person ON document_people(person_id);
-- Document-Companies (with context)
CREATE TABLE document_companies (
document_id UUID REFERENCES documents(document_id) ON DELETE CASCADE,
company_id INTEGER REFERENCES companies(company_id) ON DELETE CASCADE,
mention_context TEXT,
page_number INTEGER,
PRIMARY KEY (document_id, company_id)
);
CREATE INDEX idx_doccompanies_company ON document_companies(company_id);
-- Document-Tag relationships
CREATE TABLE tags (
tag_id SERIAL PRIMARY KEY,
tag_name VARCHAR(100) NOT NULL UNIQUE,
tag_category VARCHAR(50), -- 'case', 'evidence_type', 'privilege', 'topic'
created_by TEXT,
created_at TIMESTAMPTZ DEFAULT NOW()
);
CREATE TABLE document_tags (
document_id UUID REFERENCES documents(document_id) ON DELETE CASCADE,
tag_id INTEGER REFERENCES tags(tag_id) ON DELETE CASCADE,
confidence_score INTEGER CHECK (confidence_score BETWEEN 0 AND 100), -- Auto-tagging confidence
PRIMARY KEY (document_id, tag_id)
);
CREATE INDEX idx_doctags_tag ON document_tags(tag_id);
-- Document relations (self-referential)
CREATE TABLE document_relations (
source_doc_id UUID REFERENCES documents(document_id) ON DELETE CASCADE,
target_doc_id UUID REFERENCES documents(document_id) ON DELETE CASCADE,
relation_type VARCHAR(50) NOT NULL CHECK (relation_type IN ('cites', 'supersedes', 'appendix', 'exhibit', 'response-to', 'attachment', 'amends')),
PRIMARY KEY (source_doc_id, target_doc_id)
);
CREATE INDEX idx_docrelations_target ON document_relations(target_doc_id);
-- ---------------------------------------------------------
-- 8. EXTRACTED ENTITIES (Locations & Dates)
-- ---------------------------------------------------------
CREATE TABLE locations (
location_id SERIAL PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
location_type VARCHAR(50), -- 'city', 'court', 'address', 'state'
coordinates POINT, -- For mapping
created_at TIMESTAMPTZ DEFAULT NOW()
);
CREATE TABLE document_locations (
document_id UUID REFERENCES documents(document_id) ON DELETE CASCADE,
location_id INTEGER REFERENCES locations(location_id) ON DELETE CASCADE,
context_snippet TEXT,
page_number INTEGER,
PRIMARY KEY (document_id, location_id)
);
CREATE INDEX idx_doclocations_location ON document_locations(location_id);
-- Dates mentioned in documents
CREATE TABLE document_dates (
document_id UUID REFERENCES documents(document_id) ON DELETE CASCADE,
date_mentioned DATE NOT NULL,
context TEXT,
page_number INTEGER,
date_type VARCHAR(30), -- 'filing', 'incident', 'hearing', 'signature'
PRIMARY KEY (document_id, date_mentioned)
);
CREATE INDEX idx_docdates_date ON document_dates(date_mentioned);
CREATE INDEX idx_docdates_type ON document_dates(date_type);
-- ---------------------------------------------------------
-- 9. TRIGGERS & AUTO-UPDATES
-- ---------------------------------------------------------
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER update_documents_updated_at BEFORE UPDATE ON documents
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
CREATE TRIGGER update_jobs_updated_at BEFORE UPDATE ON extraction_jobs
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
-- Auto-populate fts vector
CREATE OR REPLACE FUNCTION update_fts_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.fts = to_tsvector('english', COALESCE(NEW.original_filename, '') || ' ' || COALESCE(NEW.content_markdown, ''));
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER update_fts_before_insert_update BEFORE INSERT OR UPDATE ON documents
FOR EACH ROW EXECUTE FUNCTION update_fts_column();
-- Chunk FTS trigger
CREATE OR REPLACE FUNCTION update_chunk_fts_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.chunk_fts = to_tsvector('english', COALESCE(NEW.chunk_markdown, ''));
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER update_chunk_fts_before_insert_update BEFORE INSERT OR UPDATE ON document_chunks
FOR EACH ROW EXECUTE FUNCTION update_chunk_fts_column();
-- ---------------------------------------------------------
-- 10. API KEY MANAGEMENT
-- ---------------------------------------------------------
CREATE TABLE api_keys (
key_id SERIAL PRIMARY KEY,
name VARCHAR(100) NOT NULL,
key_prefix VARCHAR(8) NOT NULL,
key_hash VARCHAR(255) NOT NULL, -- bcrypt hash
created_at TIMESTAMPTZ DEFAULT NOW(),
last_used_at TIMESTAMPTZ,
revoked_at TIMESTAMPTZ, -- NULL = active, set = revoked
scopes JSONB,
created_by TEXT, -- Who generated the key (audit trail)
total_uses INTEGER DEFAULT 0,
description TEXT -- Optional notes about key purpose
);
-- Fast lookup by key hash during authentication
CREATE UNIQUE INDEX idx_api_keys_key_hash ON api_keys(key_hash);
-- Fast lookup by prefix (first 8 chars) for indexing/organization
CREATE INDEX idx_api_keys_prefix ON api_keys(key_prefix);
-- Track keys by creator for audit/compliance
CREATE INDEX idx_api_keys_created_by ON api_keys(created_by);
-- Efficient querying of only active keys
CREATE INDEX idx_api_keys_active ON api_keys(revoked_at) WHERE revoked_at IS NULL;

1396
controller/dbhandler.py Normal file

File diff suppressed because it is too large Load Diff

155
controller/logger.py Normal file
View File

@@ -0,0 +1,155 @@
import os
import logging
from logging.handlers import RotatingFileHandler
from datetime import datetime
from pathlib import Path
from typing import List, Dict, Any, Optional
LOG_CONFIG: Dict[str, Any] = {}
class LogEntry:
"""Represents a single log entry"""
def __init__(self,
timestamp: datetime,
level: str,
log_type: str,
code: str,
content: str):
self.timestamp = timestamp
self.level = level
self.log_type = log_type
self.code = code
self.content = content
def to_dict(self) -> Dict[str, Any]:
return {
"timestamp": self.timestamp.isoformat(),
"level": self.level,
"type": self.log_type,
"code": self.code,
"content": self.content
}
class CustomFormatter(logging.Formatter):
"""Custom formatter: datetime~level~type~code~content"""
def format(self, record):
log_type = getattr(record, 'log_type', 'app')
code = getattr(record, 'code', 'GENERAL')
timestamp = datetime.fromtimestamp(record.created).isoformat()
content = super().format(record).replace('~', '\\~')
return f"{timestamp}~{record.levelname}~{log_type}~{code}~{content}"
def setup_logging(config: Dict[str, Any]) -> None:
"""Initialize logging system"""
global LOG_CONFIG
LOG_CONFIG = config
Path(config['log_directory']).mkdir(parents=True, exist_ok=True)
def get_logger(component: str, component_id: Optional[str] = None) -> logging.Logger:
"""Get/create logger for component with rotation"""
if component == "controller":
logger_name, log_file = "controller", Path(
LOG_CONFIG['log_directory']) / "controller.log"
elif component == "agents":
if not component_id:
raise ValueError("Agent ID required")
logger_name, log_file = f"agent.{component_id}", Path(
LOG_CONFIG['log_directory']) / "agents" / f"{component_id}.log"
elif component == "tasks":
if not component_id:
raise ValueError("Task ID required")
logger_name, log_file = f"task.{component_id}", Path(
LOG_CONFIG['log_directory']) / "tasks" / f"{component_id}.log"
else:
raise ValueError(f"Unknown component: {component}")
logger = logging.getLogger(logger_name)
if logger.handlers:
return logger
logger.propagate = False
logger.setLevel(logging.DEBUG)
log_file.parent.mkdir(parents=True, exist_ok=True)
max_bytes = LOG_CONFIG['max_file_size_mb'] * 1024 * 1024
handler = RotatingFileHandler(
log_file, maxBytes=max_bytes, backupCount=5, encoding='utf-8')
handler.setFormatter(CustomFormatter())
logger.addHandler(handler)
return logger
def log_message(component: str,
component_id: Optional[str],
level: str,
log_type: str,
code: str,
message: str
) -> None:
"""Log a structured message
Args:
component (str): Which component is logging ~ controller, agents, tasks
component_id (Optional[str]): ID of agent or task if applicable
level (str): What level ~ DEBUG, INFO, WARNING, ERROR, CRITICAL
log_type (str): What type ~ app, audit, security, system
code (str): Code for categorizing the log ~ e.g., db.createtable or api.health_check
message (str): The log message content
"""
logger = get_logger(component, component_id)
level_map = {"DEBUG": logging.DEBUG, "INFO": logging.INFO, "WARNING": logging.WARNING,
"ERROR": logging.ERROR, "CRITICAL": logging.CRITICAL}
logger.log(level_map.get(level.upper(), logging.INFO),
message, extra={"log_type": log_type, "code": code})
def get_logs(component: str, component_id: Optional[str] = None,
level_filter: Optional[str] = None, type_filter: Optional[str] = None,
tail: int = 100) -> List[Dict[str, Any]]:
"""Retrieve and filter logs"""
if component == "controller":
log_file = Path(LOG_CONFIG['log_directory']) / "controller.log"
elif component == "agents":
if not component_id:
raise ValueError("Agent ID required")
log_file = Path(LOG_CONFIG['log_directory']) / \
"agents" / f"{component_id}.log"
elif component == "tasks":
if not component_id:
raise ValueError("Task ID required")
log_file = Path(LOG_CONFIG['log_directory']) / \
"tasks" / f"{component_id}.log"
else:
raise ValueError(f"Unknown component: {component}")
if not log_file.exists():
return []
entries = []
with open(log_file, 'r', encoding='utf-8') as f:
lines = f.readlines()[-tail:]
for line in lines:
parts = line.strip().split('~', 4)
if len(parts) != 5:
continue
ts, level, log_type, code, content = parts
content = content.replace('\\~', '~')
if level_filter and level.upper() != level_filter.upper():
continue
if type_filter and log_type.lower() != type_filter.lower():
continue
try:
entries.append(LogEntry(datetime.fromisoformat(
ts), level, log_type, code, content).to_dict())
except ValueError:
continue
return entries

View File

@@ -0,0 +1,7 @@
fastapi==0.104.1
uvicorn[standard]==0.24.0
pydantic==2.5.0
requests==2.31.0
psycopg2-binary==2.9.11
tomli==2.3.0
bcrypt==5.0.0

33
controller/scopes.py Normal file
View File

@@ -0,0 +1,33 @@
from enum import Enum
class Scopes(str, Enum):
"""All available API scopes/permissions"""
# Admin - grants all permissions
ADMIN = "admin"
# Worker operations
WORKER_REGISTER = "worker:register"
WORKER_READ = "worker:read"
WORKER_COMMAND = "worker:command"
# Logging
LOGS_READ = "logs:read"
LOGS_WRITE = "logs:write"
# Database
DB_INIT = "db:initialize"
DB_MIGRATE = "db:migrate" # For future use
# Settings
SETTINGS_READ = "settings:read"
SETTINGS_WRITE = "settings:write"
# System
CREATE_API_KEY = "system:create_api_key"
REVOKE_API_KEY = "system:revoke_api_key"
# Helper for all non-admin scopes
ALL_SCOPES = [scope.value for scope in Scopes]
# Admin automatically gets all scopes
ADMIN_SCOPES = ALL_SCOPES + [Scopes.ADMIN.value]

20
controller/settings.toml Normal file
View File

@@ -0,0 +1,20 @@
[shared]
controller_address = "http:/100.73.204.34:8000"
[shared.logging]
log_directory = "/logs"
max_file_size_mb = 10
log_types = ["app", "audit", "security", "system"]
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
[server]
[server.postgresDB]
databaseName = "epistine_controller_db"
superuserName = "Saturday6170"
superuserPassword = "Rule-Calorie9-Underhand"
ip = '100.103.185.76'
port = 5432
schemaFile = 'data/schema/version1.sql'
[server.security]
[agent]
[task]

30
webserver/.dockerignore Normal file
View File

@@ -0,0 +1,30 @@
# Git
.git
.gitignore
# Python
__pycache__/
*.py[cod]
*$py.class
*.so
.Python
env/
venv/
*.egg-info/
.pytest_cache/
.mypy_cache/
# IDE
.vscode/
.idea/
# OS
.DS_Store
Thumbs.db
# Docker
Dockerfile
.dockerignore
.env
*.local

3
webserver/.env.example Normal file
View File

@@ -0,0 +1,3 @@
API_KEY=your_api_key
CONTROLLER_URL=http://localhost:8000
API_VERSION=v1

29
webserver/Dockerfile Normal file
View File

@@ -0,0 +1,29 @@
# Use official Python slim image as base
FROM python:3.11-slim
# Set working directory inside container
WORKDIR /app
# Install system dependencies (only if needed later)
# RUN apt-get update && apt-get install -y --no-install-recommends \
# gcc \
# && rm -rf /var/lib/apt/lists/*
# Copy requirements first for better caching
COPY requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Copy entire application
COPY . .
# Expose Streamlit's default port
EXPOSE 8501
# Start Streamlit with proper configuration
CMD ["streamlit", "run", "Home.py", \
"--server.port=8501", \
"--server.address=0.0.0.0", \
"--browser.gatherUsageStats=false", \
"--server.enableXsrfProtection=false"]

5
webserver/Home.py Normal file
View File

@@ -0,0 +1,5 @@
import streamlit as st
import requests
st.set_page_config(page_title="API Dashboard", layout="wide")
st.title("My API Dashboard")
st.info("Select a section from the sidebar")

8
webserver/Util.py Normal file
View File

@@ -0,0 +1,8 @@
import os
controller_url = os.getenv("CONTROLLER_URL", "http://localhost:8000")
api_version = os.getenv("API_VERSION", "v1")
combined_url = f"{controller_url}/api/{api_version}"
def get_endpoint(endpoint: str) -> str:
return f"{combined_url}/{endpoint}"

View File

@@ -0,0 +1,6 @@
import streamlit as st
import requests
st.title("📊 Data Explorer")
response = requests.get("http://your-api.com/data")
st.dataframe(response.json()) # Built-in search, filter, download

View File

@@ -0,0 +1,75 @@
import requests
import streamlit as st
import os
import Util
st.title("🤖 Agent Control")
st.header("Initialize DB")
with st.form("Initialize DB"):
if st.form_submit_button("Initialize Database"):
try:
api_key = os.getenv("API_KEY", "your_api_key")
response = requests.post(Util.get_endpoint("initializeDBinit"),
headers={"FilesManager-API-Key": api_key},
timeout=5 # Don't hang forever
)
response.raise_for_status() # Raises error for 4xx/5xx
st.success("✅ Database initialized!")
except requests.exceptions.RequestException as e:
st.error(f"❌ Failed: {e}")
if hasattr(e.response, 'json'):
st.json(e.response.json()) # Show API's error details
st.markdown("---")
st.header("Create New Machine API Key")
with st.form("new_machine"):
name = st.text_input("Name", value="My Machine")
created_by = st.text_input("Created By", value="Admin")
description = st.text_area("Description", value="This is my machine.")
scopes = st.multiselect(
"Scopes",
options=[
"worker:register",
"worker:read",
"worker:command",
"logs:read",
"logs:write",
"db:initialize",
"db:migrate",
"settings:read",
"settings:write",
"system:create_api_key",
"system:revoke_api_key",
],
default=[
"worker:register",
"worker:read",
"worker:command",
],
)
if st.form_submit_button("Create API Key"):
try:
api_key = os.getenv("API_KEY", "your_api_key")
response = requests.post(Util.get_endpoint("createAPIKey"),
json={"name": name, "created_by": created_by, "description": description, "scopes": scopes},
headers={"FilesManager-API-Key": api_key},
timeout=5 # Don't hang forever
)
response.raise_for_status() # Raises error for 4xx/5xx
result = response.json()
st.success("✅ Key created!")
st.code(result['api_key'], language='text') # Copyable format
st.warning("⚠️ Save this key now - you won't see it again!")
except requests.exceptions.RequestException as e:
st.error(f"❌ Failed: {e}")
if hasattr(e.response, 'json'):
st.json(e.response.json()) # Show API's error details

View File

@@ -0,0 +1,9 @@
import requests
import streamlit as st
import os
st.title("📜 Logs")
logs = requests.get(combined_url,json={"component":"component"}).text
st.code(logs, language='log') # Scrollable, copyable

View File

@@ -0,0 +1,3 @@
streamlit>=1.29.0
requests>=2.31.0
pandas>=2.0.0

33
webserver/scopes.py Normal file
View File

@@ -0,0 +1,33 @@
from enum import Enum
class Scopes(str, Enum):
"""All available API scopes/permissions"""
# Admin - grants all permissions
ADMIN = "admin"
# Worker operations
WORKER_REGISTER = "worker:register"
WORKER_READ = "worker:read"
WORKER_COMMAND = "worker:command"
# Logging
LOGS_READ = "logs:read"
LOGS_WRITE = "logs:write"
# Database
DB_INIT = "db:initialize"
DB_MIGRATE = "db:migrate" # For future use
# Settings
SETTINGS_READ = "settings:read"
SETTINGS_WRITE = "settings:write"
# System
CREATE_API_KEY = "system:create_api_key"
REVOKE_API_KEY = "system:revoke_api_key"
# Helper for all non-admin scopes
ALL_SCOPES = [scope.value for scope in Scopes]
# Admin automatically gets all scopes
ADMIN_SCOPES = ALL_SCOPES + [Scopes.ADMIN.value]