chore: initialize repository with deployment baseline
This commit is contained in:
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.sh text eol=lf
|
||||
99
.github/workflows/ci.yml
vendored
Normal file
99
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "**"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
frontend:
|
||||
name: Frontend Build + Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: npm
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Lint frontend
|
||||
run: npm run lint
|
||||
|
||||
- name: Build frontend
|
||||
run: npm run build
|
||||
|
||||
backend:
|
||||
name: Backend Build + Test + Prisma Checks
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: proxpanel
|
||||
POSTGRES_PASSWORD: proxpanel
|
||||
POSTGRES_DB: proxpanel
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd "pg_isready -U proxpanel -d proxpanel"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
env:
|
||||
DATABASE_URL: postgresql://proxpanel:proxpanel@localhost:5432/proxpanel?schema=public
|
||||
SHADOW_DATABASE_URL: postgresql://proxpanel:proxpanel@localhost:5432/proxpanel_shadow?schema=public
|
||||
JWT_SECRET: ci_super_secret_key_for_testing_12345
|
||||
JWT_REFRESH_SECRET: ci_super_refresh_secret_key_67890
|
||||
CORS_ORIGIN: http://localhost:5173
|
||||
NODE_ENV: test
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: npm
|
||||
cache-dependency-path: backend/package-lock.json
|
||||
|
||||
- name: Install backend dependencies
|
||||
working-directory: backend
|
||||
run: npm ci
|
||||
|
||||
- name: Prepare shadow database
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y postgresql-client
|
||||
PGPASSWORD=proxpanel psql -h localhost -U proxpanel -d proxpanel -c 'CREATE DATABASE proxpanel_shadow;'
|
||||
|
||||
- name: Prisma generate
|
||||
working-directory: backend
|
||||
run: npm run prisma:generate
|
||||
|
||||
- name: Prisma validate
|
||||
working-directory: backend
|
||||
run: npm run prisma:validate
|
||||
|
||||
- name: Prisma migrate deploy
|
||||
working-directory: backend
|
||||
run: npm run prisma:deploy
|
||||
|
||||
- name: Prisma migration drift check
|
||||
working-directory: backend
|
||||
run: npx prisma migrate diff --from-migrations prisma/migrations --to-schema-datamodel prisma/schema.prisma --shadow-database-url "$SHADOW_DATABASE_URL" --exit-code
|
||||
|
||||
- name: Build backend
|
||||
working-directory: backend
|
||||
run: npm run build
|
||||
|
||||
- name: Run backend tests
|
||||
working-directory: backend
|
||||
run: npm run test
|
||||
43
.gitignore
vendored
Normal file
43
.gitignore
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# dependencies
|
||||
node_modules
|
||||
|
||||
# build outputs
|
||||
dist
|
||||
dist-ssr
|
||||
.vite
|
||||
backend/dist
|
||||
|
||||
# environment
|
||||
.env
|
||||
.env.*
|
||||
backend/.env
|
||||
backend/.env.*
|
||||
|
||||
# logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
|
||||
# ide
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
|
||||
# deployment artifacts / backups
|
||||
backups/
|
||||
_deploy_bundle.tar.gz
|
||||
|
||||
# local secret/material files
|
||||
Proxmox_API_Token.txt
|
||||
myProx_template_ssh_key.txt
|
||||
more_dev_work.txt
|
||||
audit.md
|
||||
proxpanel-report.md
|
||||
5
.vscode/extensions.json
vendored
Normal file
5
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"codeium.codeium"
|
||||
]
|
||||
}
|
||||
72
API.md
Normal file
72
API.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# API Documentation (Core)
|
||||
|
||||
Base URL: `http://<host>:8080`
|
||||
|
||||
## Health
|
||||
|
||||
- `GET /api/health`
|
||||
|
||||
## Auth
|
||||
|
||||
- `POST /api/auth/login`
|
||||
- Body: `{ "email": "user@example.com", "password": "..." }`
|
||||
- Returns: `{ token, refresh_token, user }`
|
||||
- `POST /api/auth/refresh`
|
||||
- Body: `{ "refresh_token": "..." }`
|
||||
- Returns: `{ token, refresh_token }`
|
||||
- `GET /api/auth/me` (Bearer token)
|
||||
|
||||
## Proxmox Operations
|
||||
|
||||
- `POST /api/proxmox/sync`
|
||||
- `POST /api/proxmox/vms/:id/actions/:action`
|
||||
- `POST /api/proxmox/vms/:id/migrate`
|
||||
- `PATCH /api/proxmox/vms/:id/config`
|
||||
- `PATCH /api/proxmox/vms/:id/network`
|
||||
- `POST /api/proxmox/vms/:id/disks`
|
||||
- `POST /api/proxmox/vms/:id/reinstall`
|
||||
- `GET /api/proxmox/vms/:id/console`
|
||||
- `GET /api/proxmox/vms/:id/usage-graphs?timeframe=hour|day|week|month|year`
|
||||
- `GET /api/proxmox/nodes/:id/usage-graphs?timeframe=hour|day|week|month|year`
|
||||
- `GET /api/proxmox/cluster/usage-graphs?timeframe=hour|day|week|month|year`
|
||||
|
||||
## Resources API
|
||||
|
||||
Generic secured resource endpoints:
|
||||
|
||||
- `GET /api/resources/:resource`
|
||||
- `GET /api/resources/:resource/:id`
|
||||
- `POST /api/resources/:resource`
|
||||
- `PATCH /api/resources/:resource/:id`
|
||||
- `DELETE /api/resources/:resource/:id`
|
||||
|
||||
Tenant scope protections are enforced for tenant-scoped resources.
|
||||
|
||||
## Client Area
|
||||
|
||||
- `GET /api/client/overview`
|
||||
- `GET /api/client/usage-trends`
|
||||
- `GET /api/client/machines`
|
||||
- `POST /api/client/machines`
|
||||
- `PATCH /api/client/machines/:vmId/resources`
|
||||
- `POST /api/client/machines/:vmId/power-schedules`
|
||||
- `POST /api/client/machines/:vmId/backup-schedules`
|
||||
- `GET /api/client/firewall/rules`
|
||||
- `POST /api/client/firewall/rules`
|
||||
- `PATCH /api/client/firewall/rules/:id`
|
||||
- `DELETE /api/client/firewall/rules/:id`
|
||||
|
||||
## Monitoring
|
||||
|
||||
- `GET /api/monitoring/overview`
|
||||
- `GET /api/monitoring/health-checks`
|
||||
- `POST /api/monitoring/health-checks`
|
||||
- `GET /api/monitoring/alerts/events`
|
||||
- `GET /api/monitoring/insights/faulty-deployments`
|
||||
- `GET /api/monitoring/insights/cluster-forecast`
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
- Global API rate limiting is enabled.
|
||||
- Auth endpoints use stricter limits.
|
||||
- When exceeded, API returns HTTP `429`.
|
||||
98
DEPLOYMENT.md
Normal file
98
DEPLOYMENT.md
Normal file
@@ -0,0 +1,98 @@
|
||||
# ProxPanel Deployment Guide (Production Ubuntu)
|
||||
|
||||
## 1) Hands-Free Production Install (Recommended)
|
||||
|
||||
Run this on your Ubuntu server:
|
||||
|
||||
```bash
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y git
|
||||
git clone <YOUR_REPO_URL> /opt/proxpanel
|
||||
cd /opt/proxpanel
|
||||
sudo bash infra/deploy/install-proxpanel.sh \
|
||||
--branch main \
|
||||
--public-url http://102.69.243.167 \
|
||||
--admin-email admin@yourdomain.com \
|
||||
--configure-ufw
|
||||
```
|
||||
|
||||
If the repo already exists on the server, just run:
|
||||
|
||||
```bash
|
||||
cd /opt/proxpanel
|
||||
sudo bash infra/deploy/install-proxpanel.sh \
|
||||
--branch main \
|
||||
--public-url http://102.69.243.167 \
|
||||
--admin-email admin@yourdomain.com \
|
||||
--configure-ufw
|
||||
```
|
||||
|
||||
Installer behavior:
|
||||
- Installs Docker + prerequisites.
|
||||
- Builds and starts PostgreSQL, backend, frontend.
|
||||
- Applies Prisma schema (`prisma:deploy`, fallback to `prisma:push`).
|
||||
- Seeds admin user.
|
||||
- Verifies API health and login.
|
||||
- Writes deployment summary to `/root/proxpanel-install-summary.txt`.
|
||||
|
||||
## 2) Fast Production Checks
|
||||
|
||||
```bash
|
||||
cd /opt/proxpanel
|
||||
docker compose --env-file .env.production -f infra/deploy/docker-compose.production.yml ps
|
||||
curl -fsS http://127.0.0.1:8080/api/health
|
||||
curl -I http://102.69.243.167
|
||||
```
|
||||
|
||||
## 3) Connect Proxmox Cluster In App
|
||||
|
||||
### A. Create Proxmox API token
|
||||
In Proxmox UI:
|
||||
1. Open `Datacenter -> Permissions -> API Tokens`.
|
||||
2. Select your user (for example `root@pam` or a dedicated service user).
|
||||
3. Click `Add`.
|
||||
4. Set `Token ID` (example: `proxpanel`).
|
||||
5. Copy the generated token secret immediately.
|
||||
|
||||
### B. Save credentials in ProxPanel
|
||||
In ProxPanel UI:
|
||||
1. Login as admin.
|
||||
2. Go to `Settings -> Proxmox`.
|
||||
3. Fill:
|
||||
- `Host`: Proxmox hostname or IP (no `https://` prefix)
|
||||
- `Port`: `8006`
|
||||
- `Username`: e.g. `root@pam`
|
||||
- `Token ID`: e.g. `proxpanel`
|
||||
- `Token Secret`: generated secret
|
||||
- `Verify SSL`: enabled if Proxmox cert is trusted; disable only if using self-signed cert temporarily
|
||||
4. Click `Save Proxmox`.
|
||||
|
||||
### C. Trigger first sync
|
||||
Use API once to import nodes/VMs:
|
||||
|
||||
```bash
|
||||
APP_URL="http://102.69.243.167"
|
||||
ADMIN_EMAIL="admin@yourdomain.com"
|
||||
ADMIN_PASSWORD="<YOUR_ADMIN_PASSWORD>"
|
||||
|
||||
TOKEN=$(curl -s -X POST "$APP_URL/api/auth/login" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"email\":\"$ADMIN_EMAIL\",\"password\":\"$ADMIN_PASSWORD\"}" | jq -r '.token')
|
||||
|
||||
curl -s -X POST "$APP_URL/api/proxmox/sync" \
|
||||
-H "Authorization: Bearer $TOKEN" \
|
||||
-H "Content-Type: application/json"
|
||||
```
|
||||
|
||||
Then confirm:
|
||||
- `Nodes` page shows imported nodes.
|
||||
- Dashboard cards and usage graphs populate.
|
||||
|
||||
## 4) Security Hardening Checklist
|
||||
|
||||
- Set a DNS name and terminate TLS (Nginx/Caddy/Cloudflare).
|
||||
- Change the seeded admin password immediately.
|
||||
- Keep `CORS_ORIGIN` set to your real public URL only.
|
||||
- Use a dedicated Proxmox API user/token with least privileges.
|
||||
- Keep backend bound to localhost (`127.0.0.1`) and expose only frontend port.
|
||||
- Enable off-host backups for DB and app config.
|
||||
15
Dockerfile
Normal file
15
Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM node:22-alpine AS build
|
||||
WORKDIR /app
|
||||
COPY package.json package-lock.json* ./
|
||||
RUN npm ci
|
||||
COPY . .
|
||||
ARG VITE_API_BASE_URL=http://localhost:8080
|
||||
ENV VITE_API_BASE_URL=${VITE_API_BASE_URL}
|
||||
RUN npm run build
|
||||
|
||||
FROM nginx:1.27-alpine AS runtime
|
||||
WORKDIR /usr/share/nginx/html
|
||||
COPY --from=build /app/dist ./
|
||||
COPY infra/nginx/default.conf /etc/nginx/conf.d/default.conf
|
||||
EXPOSE 80
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
73
SETUP.md
Normal file
73
SETUP.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# Setup Guide
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js 22+
|
||||
- npm 10+
|
||||
- PostgreSQL 15+
|
||||
|
||||
## 1) Install Dependencies
|
||||
|
||||
```bash
|
||||
npm install
|
||||
cd backend && npm install
|
||||
```
|
||||
|
||||
## 2) Configure Backend Environment
|
||||
|
||||
Copy the template and set real secrets:
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
Required values:
|
||||
|
||||
- `DATABASE_URL`
|
||||
- `JWT_SECRET`
|
||||
- `JWT_REFRESH_SECRET`
|
||||
- `CORS_ORIGIN`
|
||||
|
||||
## 3) Prepare Database
|
||||
|
||||
Preferred (versioned migrations):
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
npm run prisma:migrate
|
||||
npm run prisma:generate
|
||||
npm run prisma:seed
|
||||
```
|
||||
|
||||
Alternative (dev only):
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
npm run prisma:push
|
||||
npm run prisma:seed
|
||||
```
|
||||
|
||||
## 4) Run Development Stack
|
||||
|
||||
Backend:
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
npm run dev
|
||||
```
|
||||
|
||||
Frontend (new terminal):
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
## 5) Quality Gates
|
||||
|
||||
```bash
|
||||
cd backend && npm run build
|
||||
cd ..
|
||||
npm run lint
|
||||
npm run build
|
||||
```
|
||||
89
Upgrade-Implementation-Tracker.md
Normal file
89
Upgrade-Implementation-Tracker.md
Normal file
@@ -0,0 +1,89 @@
|
||||
# Enterprise Upgrade Implementation Tracker
|
||||
|
||||
This tracker maps the feature scope from Upgrade.md into implementation phases with delivered status.
|
||||
|
||||
## Phase 1 - Control Plane Foundation (Implemented)
|
||||
|
||||
### Admin Area
|
||||
- [x] Boot/Reboot/Stop/Shut Down server actions (/api/proxmox/vms/:id/actions/:action)
|
||||
- [x] Migrate server between nodes (/api/proxmox/vms/:id/migrate)
|
||||
- [x] Access noVNC console ticket (/api/proxmox/vms/:id/console)
|
||||
- [x] Reinstall workflow endpoint (/api/proxmox/vms/:id/reinstall)
|
||||
- [x] Change VM hostname/ISO/boot settings/SSH key (/api/proxmox/vms/:id/config)
|
||||
- [x] Reconfigure server network (/api/proxmox/vms/:id/network)
|
||||
- [x] Add additional disk storage (/api/proxmox/vms/:id/disks)
|
||||
- [x] Auto backup before reinstall flag (ackup_before_reinstall)
|
||||
|
||||
### Tasking / Queue / History
|
||||
- [x] Operations task history model (OperationTask)
|
||||
- [x] Operation status lifecycle: queued/running/success/failed
|
||||
- [x] Operations task list API (GET /api/operations/tasks)
|
||||
- [x] Queue summary stats for waiting/running/failed/success
|
||||
- [x] Audit logging linked with task IDs for critical operations
|
||||
|
||||
### Scheduled Automation
|
||||
- [x] VM power schedule model (PowerSchedule)
|
||||
- [x] Power schedule CRUD APIs (/api/operations/power-schedules)
|
||||
- [x] Run-now trigger for schedules (POST /run)
|
||||
- [x] Cron-based power schedule worker
|
||||
|
||||
### Frontend
|
||||
- [x] Operations Center page (/operations)
|
||||
- [x] Task history table + queue counters
|
||||
- [x] Power schedules list/create/toggle/delete/run-now
|
||||
|
||||
## Phase 2 - Provisioning & Templates (Implemented)
|
||||
- [x] App template catalog (KVM/LXC templates, ISO, archives)
|
||||
- [x] Application groups + template assignment policies
|
||||
- [x] VM ID range policies per server/group
|
||||
- [x] Auto-node and weighted placement engine
|
||||
- [x] Service create/suspend/unsuspend/terminate flows with package options
|
||||
- [x] Deep Proxmox template-clone/image-boot orchestration per template type
|
||||
|
||||
## Phase 3 - Backup, Restore, Snapshots (In Progress)
|
||||
- [x] PBS integration workflow for file-level restore tasks
|
||||
- [x] Backup limits (count/size) enforcement per tenant/product
|
||||
- [x] Backup protection flags and routing policies
|
||||
- [x] Snapshot jobs with recurring policies and retention
|
||||
- [x] Cross-VM restore from owned servers
|
||||
|
||||
## Phase 4 - Network & IPAM Enterprise (In Progress)
|
||||
- [x] Public/private IPAM across server/VLAN/tag/node/bridge (Prisma models + APIs)
|
||||
- [x] IPv4/IPv6/subnet import/return workflows (bulk import + assignment return endpoints)
|
||||
- [x] Additional IP assignment automation and audit logs
|
||||
- [x] SDN-aware private network attach/detach controls (API + UI wiring)
|
||||
- [x] IP subnet utilization dashboard APIs and admin UI
|
||||
- [x] Stricter pool policies (tenant quotas + reserved ranges + policy-based best-fit allocation)
|
||||
- [x] Subnet heatmap widgets + tenant-level utilization trend charts on dashboard
|
||||
|
||||
## Phase 5 - Monitoring, Alerts, Notifications (Implemented)
|
||||
- [x] Server health check definitions and result logs
|
||||
- [x] Threshold alerts (CPU/RAM/network/disk I/O) with notifications
|
||||
- [x] Faulty deployment insights and failed-task analytics
|
||||
- [x] Cluster remaining-resource forecasting
|
||||
|
||||
## Phase 6 - Client Area Enterprise (Implemented)
|
||||
- [x] Client machine create/manage with configurable limits
|
||||
- [x] Resource upgrade/downgrade workflows
|
||||
- [x] Firewall rule management and policy packs
|
||||
- [x] VM power schedules and backup schedules in tenant UI
|
||||
- [x] Console proxy per-node/per-cluster configuration
|
||||
|
||||
## Phase 7 - Platform Governance, Scheduler, Logs (Implemented)
|
||||
- [x] Cron scheduler policy settings with live runtime reconfiguration from Admin Settings
|
||||
- [x] Operation task repetition thresholds (retry attempts + backoff) with automated retry worker
|
||||
- [x] Failure notification policy for operation tasks (webhook + email gateway routing)
|
||||
- [x] Queue insights API for waiting/retrying/failed/stale tasks and due scheduled actions
|
||||
- [x] Settings UI upgraded from mock form to real backend-backed enterprise controls
|
||||
|
||||
## Phase 8 - Resource Graphs & Timescale Telemetry (Implemented)
|
||||
- [x] Proxmox VM usage graph API with time-scale controls (hour/day/week/month/year)
|
||||
- [x] Graph data includes CPU, memory, disk usage, network throughput, and disk I/O
|
||||
- [x] Admin VM panel updated with interactive usage graph dialogs
|
||||
- [x] Client Area updated with per-machine telemetry graphs and timescale selector
|
||||
- [x] Node-level resource graph API and Nodes page telemetry dialogs (CPU/RAM/Disk/I/O wait + network)
|
||||
- [x] Cluster MRTG-style dashboard views with selectable timescale windows and aggregate summaries
|
||||
|
||||
## Notes
|
||||
- This phase establishes the operational backbone required by most advanced features.
|
||||
- Remaining phases will build on the task engine + schedule worker + audited VM operation APIs implemented in Phase 1.
|
||||
257
Upgrade.md
Normal file
257
Upgrade.md
Normal file
@@ -0,0 +1,257 @@
|
||||
Features
|
||||
|
||||
Admin Area
|
||||
Create/Suspend/Unsuspend/Terminate Service:
|
||||
VPS Type Product With Single VM Machine
|
||||
Cloud Type Product With Multiple VM Machines Created Within Defined Limits
|
||||
Create/Terminate User Account
|
||||
Change Package - Supports Configurable Options
|
||||
Reconfigure Server Network
|
||||
Import/Detach VM Machine
|
||||
Boot/Reboot/Stop/Shut Down Server
|
||||
Change User Role
|
||||
Access noVNC, SPICE And Xterm.js Console
|
||||
Migrate Server Between Nodes In The Same Cluster
|
||||
Reinstall Server
|
||||
Change Server Hostname, ISO Image, Boot Devices And SSH Public Key
|
||||
View Server Status, Details And Statistics
|
||||
View Graphs - With Option To Change Time Scale of MRTG Graphs
|
||||
Display Disk And Bandwidth Usage Of Each Product
|
||||
Display CPU And Memory Usage Of Each Product
|
||||
Display IP Sets (KVM)
|
||||
Auto Node - Automatically Create VM On Node With Most Free Space
|
||||
Configure Client Area Features Per Product
|
||||
Configure Network And Private Network Settings With SDN Support
|
||||
Import IP Address To Hosting IP Addresses Table
|
||||
Return IP Address To IP Addresses Subnet
|
||||
Add Additional IP Address To VM
|
||||
Add Additional Disks Storage To VM (KVM)
|
||||
Enable Qemu Guest Agent (KVM)
|
||||
Enable Backups Routing
|
||||
Enable Auto VM Backups Before Reinstallation
|
||||
Enable Load Balancer
|
||||
Receive Notifications About VM Upgrades And Creation Failures
|
||||
Display Servers:
|
||||
|
||||
List Per VPS And Cloud
|
||||
VMs List
|
||||
Clusters List
|
||||
VM Cleaner - Manage VM Not Existing In Your WHMCS
|
||||
Templates - Convert KVM VPS To KVM Template
|
||||
Settings
|
||||
Groups
|
||||
Recovery VM Configurations List With Export To Backup File
|
||||
Task History
|
||||
Statuses, Resources Usage, IP Assignments And Details
|
||||
|
||||
Manage Public And Private IP Addresses Per Server/VLAN/Tag/Node/Bridge:
|
||||
|
||||
IPv4 Addresses
|
||||
IPv6 Addresses
|
||||
IPv6 Subnets
|
||||
|
||||
View Logs Of IP Assignment Changes
|
||||
Configure App Templates:
|
||||
|
||||
Applications
|
||||
KVM/LXC Templates
|
||||
ISO Images
|
||||
KVM/LXC Archives
|
||||
|
||||
Create And Manage Custom Cloud-Init Scripts Per App Template
|
||||
Configure High Availability Settings Per App Template
|
||||
Create Application Groups And Assign App Templates
|
||||
Assign Virtual Machines To Nodes Based On Selected Application Groups
|
||||
Define VM ID Ranges Per Server
|
||||
Set Minimum VM ID For Product Without ID Ranges Defined
|
||||
Configure Resource Weights For Load Balancer Prioritization
|
||||
Configure Tasks Repetition Threshold And Email Notifications
|
||||
Configure Backup Tasks Limitation And File Restoration Settings
|
||||
Configure Console Proxy For Multiple Or Per-Node Connections
|
||||
Set Admin Area And Proxmox VE Widget Features
|
||||
Configure Scheduled Backups And Firewall
|
||||
View And Manage Logs
|
||||
View Queue Of Scheduled Tasks
|
||||
Configure Cron Scheduler Settings
|
||||
Customize Module Language Files With "Translations" Tool
|
||||
Manage Media Library With Logotypes For App Templates
|
||||
View Backup Tasks, Virtual Machine And Resource Usage Statistics
|
||||
|
||||
View Faulty VM Deployments
|
||||
View Waiting And Failed Tasks
|
||||
View Cluster Remaining Resources
|
||||
View Node Resources
|
||||
|
||||
Configurable Options
|
||||
|
||||
KVM For "VPS" Product Type:
|
||||
Additional Disks Space (With Configurable Storage, Units And Size)
|
||||
Amount of RAM
|
||||
Application
|
||||
Backup Files
|
||||
Backups Size
|
||||
Bandwidth
|
||||
CPU Cores
|
||||
CPU Sockets
|
||||
CPU units for a VM
|
||||
Custom Cloud-Init Configuration
|
||||
Disk Space
|
||||
Download Backup Files
|
||||
IPv4 Addresses
|
||||
IPv6 Addresses
|
||||
IPv6 Subnets
|
||||
Limit Of CPU
|
||||
Managed View
|
||||
Network Rate
|
||||
OS Type
|
||||
Private Network
|
||||
Protected Backup Files
|
||||
Restore Backup Files
|
||||
Server Monitoring
|
||||
Snapshot Jobs
|
||||
Snapshots
|
||||
Storage Disk Space
|
||||
TPM
|
||||
Tag
|
||||
VCPUs
|
||||
KVM Limits For "Cloud" Product Type:
|
||||
Additional Disk Space
|
||||
Backups Files Limit
|
||||
Backups Size
|
||||
Bandwidth
|
||||
CPU Cores
|
||||
CPU Limit
|
||||
CPU Sockets
|
||||
CPU Units Limit
|
||||
IPv4 Addresses
|
||||
IPv6 Addresses
|
||||
IPv6 Subnets
|
||||
Memory
|
||||
Network Rate
|
||||
Snapshot Jobs
|
||||
Snapshots
|
||||
Storage
|
||||
Storage Disk Space
|
||||
VCPUs
|
||||
Virtual Networks
|
||||
LXC For "VPS" Product Type:
|
||||
Additional Disks Space (With Configurable Storage, Units And Size)
|
||||
Amount of RAM
|
||||
Amount of SWAP
|
||||
Application
|
||||
Backup Files
|
||||
Backups Size
|
||||
Bandwidth
|
||||
CPU Cores
|
||||
CPU units for a VM
|
||||
Disk Space
|
||||
Download Backup Files
|
||||
IPv4 Addresses
|
||||
IPv6 Addresses
|
||||
IPv6 Subnets
|
||||
Limit Of CPU
|
||||
Managed View
|
||||
Network Rate
|
||||
Private Network
|
||||
Protected Backup Files
|
||||
Restore Backup Files
|
||||
Server Monitoring
|
||||
Snapshot Jobs
|
||||
Snapshots
|
||||
Storage Disk Space
|
||||
Tag
|
||||
LXC Limits For "Cloud" Product Type:
|
||||
Additional Disk Space
|
||||
Backups Files Limit
|
||||
Backups Size
|
||||
Bandwidth
|
||||
CPU Limit
|
||||
CPU Units Limit
|
||||
IPv4 Addresses
|
||||
IPv6 Addresses
|
||||
IPv6 Subnets
|
||||
Memory
|
||||
Network Rate
|
||||
SWAP
|
||||
Snapshot Jobs
|
||||
Snapshots
|
||||
Storage
|
||||
Storage Disk Space
|
||||
VCPUs
|
||||
Virtual Networks
|
||||
|
||||
Client Area
|
||||
|
||||
Create/Manage/View Server Status, Details And Statistics:
|
||||
VPS Type Product With Single VM Machine
|
||||
Cloud Type Product With Multiple VM Machines Created Within Available Limits:
|
||||
Define Machine Settings:
|
||||
Name
|
||||
Type
|
||||
Description
|
||||
Define Machine Parameters:
|
||||
Location
|
||||
Sockets (KVM)
|
||||
Cores (LXC)
|
||||
vCPU (KVM)
|
||||
CPU Priority
|
||||
VM RAM
|
||||
SWAP (LXC)
|
||||
Disk Size
|
||||
Default User (KVM)
|
||||
Password
|
||||
SSH Key
|
||||
Search Domain (KVM)
|
||||
Name Servers (KVM)
|
||||
Add Virtual Networks
|
||||
Add Additional Disks
|
||||
Start/Reboot/Stop/Shut Down/Delete Server
|
||||
Reconfigure Server Network
|
||||
Access noVNC, SPICE And Xterm.js Console
|
||||
Change Server Hostname, ISO Image, Boot Devices And SSH Public Key
|
||||
View And Edit Public SSH Key (KVM)
|
||||
Download Public And Private SSH Keys (LXC)
|
||||
Create/Restore/Delete Backups Of Current Server
|
||||
Manage Backups Within Defined Limits (Max Number And Size Of Files)
|
||||
Restore Backups From:
|
||||
Any Owned Server
|
||||
Proxmox Backup Server (PBS)
|
||||
Restore Backups Of:
|
||||
Selected Single Files And Directories With Option To Download Them (PBS)
|
||||
Full Server Backups
|
||||
Manage Backup Schedules Within Defined Limits (Max Number And Size Of Files)
|
||||
Protect Selected Backups From Manual Deletion And Backups Routing
|
||||
Add And Manage Additional Disks
|
||||
Manage Firewall Rules And Options
|
||||
View Resources Usage Graphs - With Option To Change Time Scale of MRTG Graphs:
|
||||
CPU
|
||||
Memory
|
||||
Network Traffic
|
||||
Disk I/O
|
||||
View Network Devices, Manage Private Interface And Attach Servers
|
||||
Reinstall Server Using Templates (KVM) And ISO Images
|
||||
Send Email Notifications When Server Exceeds Resource Thresholds:
|
||||
Network Traffic
|
||||
CPU Usage
|
||||
Memory Usage
|
||||
Disk Read And Write Speed
|
||||
Monitor Server Health
|
||||
Create Server Monitoring Checks
|
||||
View Check Result Logs
|
||||
View Successful And Failed Checks Graphs
|
||||
Create Server Snapshots:
|
||||
Manually
|
||||
Automatically:
|
||||
Every Number Of Hours
|
||||
Each Specified Day
|
||||
View Task History
|
||||
Manage VM Power Tasks To Automatically Start/Stop/Reboot Server At Specified Time
|
||||
Display CPU, Memory, Disk And Bandwidth Usage
|
||||
Choose Server Resources While Ordering And Upgrade/Downgrade Them Freely
|
||||
Convert KVM VPS To KVM Template ("Cloud" Type Product)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
21
backend/Dockerfile
Normal file
21
backend/Dockerfile
Normal file
@@ -0,0 +1,21 @@
|
||||
FROM node:22-alpine AS deps
|
||||
WORKDIR /app
|
||||
COPY package.json package-lock.json* ./
|
||||
RUN npm install
|
||||
|
||||
FROM node:22-alpine AS build
|
||||
WORKDIR /app
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
RUN npm run prisma:generate
|
||||
RUN npm run build
|
||||
|
||||
FROM node:22-alpine AS runtime
|
||||
WORKDIR /app
|
||||
ENV NODE_ENV=production
|
||||
COPY --from=build /app/node_modules ./node_modules
|
||||
COPY --from=build /app/dist ./dist
|
||||
COPY --from=build /app/prisma ./prisma
|
||||
COPY --from=build /app/package.json ./package.json
|
||||
EXPOSE 8080
|
||||
CMD ["sh", "-c", "npm run prisma:deploy && node dist/index.js"]
|
||||
2402
backend/package-lock.json
generated
Normal file
2402
backend/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
47
backend/package.json
Normal file
47
backend/package.json
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"name": "proxpanel-backend",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"description": "Production API for ProxPanel (Proxmox VE SaaS control panel)",
|
||||
"scripts": {
|
||||
"dev": "tsx watch src/index.ts",
|
||||
"build": "tsc -p tsconfig.json",
|
||||
"start": "node dist/index.js",
|
||||
"test": "node --test dist/tests/**/*.test.js",
|
||||
"prisma:generate": "prisma generate",
|
||||
"prisma:migrate": "prisma migrate dev",
|
||||
"prisma:deploy": "prisma migrate deploy",
|
||||
"prisma:push": "prisma db push",
|
||||
"prisma:seed": "prisma db seed",
|
||||
"prisma:validate": "prisma validate"
|
||||
},
|
||||
"prisma": {
|
||||
"seed": "tsx prisma/seed.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@prisma/client": "^6.6.0",
|
||||
"axios": "^1.9.0",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"compression": "^1.8.0",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.5.0",
|
||||
"express": "^4.21.2",
|
||||
"helmet": "^8.1.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"morgan": "^1.10.0",
|
||||
"node-cron": "^4.0.7",
|
||||
"zod": "^3.24.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bcryptjs": "^2.4.6",
|
||||
"@types/compression": "^1.7.5",
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/jsonwebtoken": "^9.0.9",
|
||||
"@types/morgan": "^1.9.9",
|
||||
"@types/node": "^22.15.2",
|
||||
"prisma": "^6.6.0",
|
||||
"tsx": "^4.19.4",
|
||||
"typescript": "^5.8.3"
|
||||
}
|
||||
}
|
||||
1352
backend/prisma/migrations/20260417120000_init/migration.sql
Normal file
1352
backend/prisma/migrations/20260417120000_init/migration.sql
Normal file
File diff suppressed because it is too large
Load Diff
1
backend/prisma/migrations/migration_lock.toml
Normal file
1
backend/prisma/migrations/migration_lock.toml
Normal file
@@ -0,0 +1 @@
|
||||
provider = "postgresql"
|
||||
1205
backend/prisma/schema.prisma
Normal file
1205
backend/prisma/schema.prisma
Normal file
File diff suppressed because it is too large
Load Diff
184
backend/prisma/seed.js
Normal file
184
backend/prisma/seed.js
Normal file
@@ -0,0 +1,184 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const bcryptjs_1 = __importDefault(require("bcryptjs"));
|
||||
const client_1 = require("@prisma/client");
|
||||
const prisma = new client_1.PrismaClient();
|
||||
async function main() {
|
||||
const adminEmail = process.env.ADMIN_EMAIL ?? "admin@proxpanel.local";
|
||||
const adminPassword = process.env.ADMIN_PASSWORD ?? "ChangeMe123!";
|
||||
const password_hash = await bcryptjs_1.default.hash(adminPassword, 12);
|
||||
const tenant = await prisma.tenant.upsert({
|
||||
where: { slug: "default-tenant" },
|
||||
update: {},
|
||||
create: {
|
||||
name: "Default Tenant",
|
||||
slug: "default-tenant",
|
||||
owner_email: adminEmail,
|
||||
currency: client_1.Currency.NGN,
|
||||
payment_provider: client_1.PaymentProvider.PAYSTACK
|
||||
}
|
||||
});
|
||||
await prisma.user.upsert({
|
||||
where: { email: adminEmail },
|
||||
update: {
|
||||
role: client_1.Role.SUPER_ADMIN,
|
||||
password_hash,
|
||||
tenant_id: tenant.id
|
||||
},
|
||||
create: {
|
||||
email: adminEmail,
|
||||
full_name: "System Administrator",
|
||||
password_hash,
|
||||
role: client_1.Role.SUPER_ADMIN,
|
||||
tenant_id: tenant.id
|
||||
}
|
||||
});
|
||||
await prisma.setting.upsert({
|
||||
where: { key: "proxmox" },
|
||||
update: {},
|
||||
create: {
|
||||
key: "proxmox",
|
||||
type: "PROXMOX",
|
||||
value: {
|
||||
host: "",
|
||||
port: 8006,
|
||||
username: "root@pam",
|
||||
token_id: "",
|
||||
token_secret: "",
|
||||
verify_ssl: true
|
||||
}
|
||||
}
|
||||
});
|
||||
await prisma.setting.upsert({
|
||||
where: { key: "payment" },
|
||||
update: {},
|
||||
create: {
|
||||
key: "payment",
|
||||
type: "PAYMENT",
|
||||
value: {
|
||||
default_provider: "paystack",
|
||||
paystack_public: "",
|
||||
paystack_secret: "",
|
||||
flutterwave_public: "",
|
||||
flutterwave_secret: "",
|
||||
flutterwave_webhook_hash: "",
|
||||
callback_url: ""
|
||||
}
|
||||
}
|
||||
});
|
||||
await prisma.setting.upsert({
|
||||
where: { key: "provisioning" },
|
||||
update: {},
|
||||
create: {
|
||||
key: "provisioning",
|
||||
type: "GENERAL",
|
||||
value: {
|
||||
min_vmid: 100
|
||||
}
|
||||
}
|
||||
});
|
||||
await prisma.setting.upsert({
|
||||
where: { key: "backup" },
|
||||
update: {},
|
||||
create: {
|
||||
key: "backup",
|
||||
type: "GENERAL",
|
||||
value: {
|
||||
default_source: "local",
|
||||
default_storage: "local-lvm",
|
||||
max_restore_file_count: 100,
|
||||
pbs_enabled: false,
|
||||
pbs_host: "",
|
||||
pbs_datastore: "",
|
||||
pbs_namespace: "",
|
||||
pbs_verify_ssl: true
|
||||
}
|
||||
}
|
||||
});
|
||||
await prisma.billingPlan.upsert({
|
||||
where: { slug: "starter" },
|
||||
update: {},
|
||||
create: {
|
||||
name: "Starter",
|
||||
slug: "starter",
|
||||
description: "Entry plan for lightweight VM workloads",
|
||||
price_monthly: 12000,
|
||||
price_hourly: 12000 / 720,
|
||||
currency: client_1.Currency.NGN,
|
||||
cpu_cores: 2,
|
||||
ram_mb: 4096,
|
||||
disk_gb: 60,
|
||||
bandwidth_gb: 2000,
|
||||
features: ["basic-support", "daily-backups"]
|
||||
}
|
||||
});
|
||||
const ubuntuTemplate = await prisma.appTemplate.upsert({
|
||||
where: { slug: "ubuntu-22-04-golden" },
|
||||
update: {},
|
||||
create: {
|
||||
name: "Ubuntu 22.04 Golden",
|
||||
slug: "ubuntu-22-04-golden",
|
||||
template_type: "KVM_TEMPLATE",
|
||||
virtualization_type: "QEMU",
|
||||
source: "local:vztmpl/ubuntu-22.04-golden.qcow2",
|
||||
description: "Baseline hardened Ubuntu template",
|
||||
metadata: {
|
||||
os_family: "linux",
|
||||
os_version: "22.04"
|
||||
}
|
||||
}
|
||||
});
|
||||
const webGroup = await prisma.applicationGroup.upsert({
|
||||
where: { slug: "web-workloads" },
|
||||
update: {},
|
||||
create: {
|
||||
name: "Web Workloads",
|
||||
slug: "web-workloads",
|
||||
description: "HTTP-facing application services"
|
||||
}
|
||||
});
|
||||
await prisma.applicationGroupTemplate.upsert({
|
||||
where: {
|
||||
group_id_template_id: {
|
||||
group_id: webGroup.id,
|
||||
template_id: ubuntuTemplate.id
|
||||
}
|
||||
},
|
||||
update: {},
|
||||
create: {
|
||||
group_id: webGroup.id,
|
||||
template_id: ubuntuTemplate.id,
|
||||
priority: 10
|
||||
}
|
||||
});
|
||||
await prisma.backupPolicy.upsert({
|
||||
where: {
|
||||
id: "default-tenant-backup-policy"
|
||||
},
|
||||
update: {},
|
||||
create: {
|
||||
id: "default-tenant-backup-policy",
|
||||
tenant_id: tenant.id,
|
||||
max_files: 25,
|
||||
max_total_size_mb: 102400,
|
||||
max_protected_files: 5,
|
||||
allow_file_restore: true,
|
||||
allow_cross_vm_restore: true,
|
||||
allow_pbs_restore: true
|
||||
}
|
||||
});
|
||||
}
|
||||
main()
|
||||
.then(async () => {
|
||||
await prisma.$disconnect();
|
||||
})
|
||||
.catch(async (error) => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error("Seed failed:", error);
|
||||
await prisma.$disconnect();
|
||||
process.exit(1);
|
||||
});
|
||||
//# sourceMappingURL=seed.js.map
|
||||
1
backend/prisma/seed.js.map
Normal file
1
backend/prisma/seed.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"seed.js","sourceRoot":"","sources":["seed.ts"],"names":[],"mappings":";;;;;AAAA,wDAA8B;AAC9B,2CAA+E;AAE/E,MAAM,MAAM,GAAG,IAAI,qBAAY,EAAE,CAAC;AAElC,KAAK,UAAU,IAAI;IACjB,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,WAAW,IAAI,uBAAuB,CAAC;IACtE,MAAM,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,cAAc,CAAC;IACnE,MAAM,aAAa,GAAG,MAAM,kBAAM,CAAC,IAAI,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC;IAE3D,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC;QACxC,KAAK,EAAE,EAAE,IAAI,EAAE,gBAAgB,EAAE;QACjC,MAAM,EAAE,EAAE;QACV,MAAM,EAAE;YACN,IAAI,EAAE,gBAAgB;YACtB,IAAI,EAAE,gBAAgB;YACtB,WAAW,EAAE,UAAU;YACvB,QAAQ,EAAE,iBAAQ,CAAC,GAAG;YACtB,gBAAgB,EAAE,wBAAe,CAAC,QAAQ;SAC3C;KACF,CAAC,CAAC;IAEH,MAAM,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC;QACvB,KAAK,EAAE,EAAE,KAAK,EAAE,UAAU,EAAE;QAC5B,MAAM,EAAE;YACN,IAAI,EAAE,aAAI,CAAC,WAAW;YACtB,aAAa;YACb,SAAS,EAAE,MAAM,CAAC,EAAE;SACrB;QACD,MAAM,EAAE;YACN,KAAK,EAAE,UAAU;YACjB,SAAS,EAAE,sBAAsB;YACjC,aAAa;YACb,IAAI,EAAE,aAAI,CAAC,WAAW;YACtB,SAAS,EAAE,MAAM,CAAC,EAAE;SACrB;KACF,CAAC,CAAC;IAEH,MAAM,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC;QAC1B,KAAK,EAAE,EAAE,GAAG,EAAE,SAAS,EAAE;QACzB,MAAM,EAAE,EAAE;QACV,MAAM,EAAE;YACN,GAAG,EAAE,SAAS;YACd,IAAI,EAAE,SAAS;YACf,KAAK,EAAE;gBACL,IAAI,EAAE,EAAE;gBACR,IAAI,EAAE,IAAI;gBACV,QAAQ,EAAE,UAAU;gBACpB,QAAQ,EAAE,EAAE;gBACZ,YAAY,EAAE,EAAE;gBAChB,UAAU,EAAE,IAAI;aACjB;SACF;KACF,CAAC,CAAC;IAEH,MAAM,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC;QAC1B,KAAK,EAAE,EAAE,GAAG,EAAE,SAAS,EAAE;QACzB,MAAM,EAAE,EAAE;QACV,MAAM,EAAE;YACN,GAAG,EAAE,SAAS;YACd,IAAI,EAAE,SAAS;YACf,KAAK,EAAE;gBACL,gBAAgB,EAAE,UAAU;gBAC5B,eAAe,EAAE,EAAE;gBACnB,eAAe,EAAE,EAAE;gBACnB,kBAAkB,EAAE,EAAE;gBACtB,kBAAkB,EAAE,EAAE;gBACtB,wBAAwB,EAAE,EAAE;gBAC5B,YAAY,EAAE,EAAE;aACjB;SACF;KACF,CAAC,CAAC;IAEH,MAAM,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC;QAC1B,KAAK,EAAE,EAAE,GAAG,EAAE,cAAc,EAAE;QAC9B,MAAM,EAAE,EAAE;QACV,MAAM,EAAE;YACN,GAAG,EAAE,cAAc;YACnB,IAAI,EAAE,SAAS;YACf,KAAK,EAAE;gBACL,QAAQ,EAAE,GAAG;aACd;SACF;KACF,CAAC,CAAC;IAEH,MAAM,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC;QAC1B,KAAK,EAAE,EAAE,GAAG,EAAE,QAAQ,EAAE;QACxB,MAAM,EAAE,EAAE;QACV,MAAM,EAAE;YACN,GAAG,EAAE,QAAQ;YACb,IAAI,EAAE,SAAS;YACf,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,eAAe,EAAE,WAAW;gBAC5B,sBAAsB,EAAE,GAAG;gBAC3B,WAAW,EAAE,KAAK;gBAClB,QAAQ,EAAE,EAAE;gBACZ,aAAa,EAAE,EAAE;gBACjB,aAAa,EAAE,EAAE;gBACjB,cAAc,EAAE,IAAI;aACrB;SACF;KACF,CAAC,CAAC;IAEH,MAAM,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC;QAC9B,KAAK,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE;QAC1B,MAAM,EAAE,EAAE;QACV,MAAM,EAAE;YACN,IAAI,EAAE,SAAS;YACf,IAAI,EAAE,SAAS;YACf,WAAW,EAAE,yCAAyC;YACtD,aAAa,EAAE,KAAK;YACpB,YAAY,EAAE,KAAK,GAAG,GAAG;YACzB,QAAQ,EAAE,iBAAQ,CAAC,GAAG;YACtB,SAAS,EAAE,CAAC;YACZ,MAAM,EAAE,IAAI;YACZ,OAAO,EAAE,EAAE;YACX,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,CAAC,eAAe,EAAE,eAAe,CAAC;SAC7C;KACF,CAAC,CAAC;IAEH,MAAM,cAAc,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC;QACrD,KAAK,EAAE,EAAE,IAAI,EAAE,qBAAqB,EAAE;QACtC,MAAM,EAAE,EAAE;QACV,MAAM,EAAE;YACN,IAAI,EAAE,qBAAqB;YAC3B,IAAI,EAAE,qBAAqB;YAC3B,aAAa,EAAE,cAAc;YAC7B,mBAAmB,EAAE,MAAM;YAC3B,MAAM,EAAE,wCAAwC;YAChD,WAAW,EAAE,mCAAmC;YAChD,QAAQ,EAAE;gBACR,SAAS,EAAE,OAAO;gBAClB,UAAU,EAAE,OAAO;aACpB;SACF;KACF,CAAC,CAAC;IAEH,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,gBAAgB,CAAC,MAAM,CAAC;QACpD,KAAK,EAAE,EAAE,IAAI,EAAE,eAAe,EAAE;QAChC,MAAM,EAAE,EAAE;QACV,MAAM,EAAE;YACN,IAAI,EAAE,eAAe;YACrB,IAAI,EAAE,eAAe;YACrB,WAAW,EAAE,kCAAkC;SAChD;KACF,CAAC,CAAC;IAEH,MAAM,MAAM,CAAC,wBAAwB,CAAC,MAAM,CAAC;QAC3C,KAAK,EAAE;YACL,oBAAoB,EAAE;gBACpB,QAAQ,EAAE,QAAQ,CAAC,EAAE;gBACrB,WAAW,EAAE,cAAc,CAAC,EAAE;aAC/B;SACF;QACD,MAAM,EAAE,EAAE;QACV,MAAM,EAAE;YACN,QAAQ,EAAE,QAAQ,CAAC,EAAE;YACrB,WAAW,EAAE,cAAc,CAAC,EAAE;YAC9B,QAAQ,EAAE,EAAE;SACb;KACF,CAAC,CAAC;IAEH,MAAM,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC;QAC/B,KAAK,EAAE;YACL,EAAE,EAAE,8BAA8B;SACnC;QACD,MAAM,EAAE,EAAE;QACV,MAAM,EAAE;YACN,EAAE,EAAE,8BAA8B;YAClC,SAAS,EAAE,MAAM,CAAC,EAAE;YACpB,SAAS,EAAE,EAAE;YACb,iBAAiB,EAAE,MAAM;YACzB,mBAAmB,EAAE,CAAC;YACtB,kBAAkB,EAAE,IAAI;YACxB,sBAAsB,EAAE,IAAI;YAC5B,iBAAiB,EAAE,IAAI;SACxB;KACF,CAAC,CAAC;AACL,CAAC;AAED,IAAI,EAAE;KACH,IAAI,CAAC,KAAK,IAAI,EAAE;IACf,MAAM,MAAM,CAAC,WAAW,EAAE,CAAC;AAC7B,CAAC,CAAC;KACD,KAAK,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE;IACrB,sCAAsC;IACtC,OAAO,CAAC,KAAK,CAAC,cAAc,EAAE,KAAK,CAAC,CAAC;IACrC,MAAM,MAAM,CAAC,WAAW,EAAE,CAAC;IAC3B,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC,CAAC,CAAC"}
|
||||
192
backend/prisma/seed.ts
Normal file
192
backend/prisma/seed.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import bcrypt from "bcryptjs";
|
||||
import { PrismaClient, Role, Currency, PaymentProvider } from "@prisma/client";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function main() {
|
||||
const adminEmail = process.env.ADMIN_EMAIL ?? "admin@proxpanel.local";
|
||||
const adminPassword = process.env.ADMIN_PASSWORD ?? "ChangeMe123!";
|
||||
const password_hash = await bcrypt.hash(adminPassword, 12);
|
||||
|
||||
const tenant = await prisma.tenant.upsert({
|
||||
where: { slug: "default-tenant" },
|
||||
update: {},
|
||||
create: {
|
||||
name: "Default Tenant",
|
||||
slug: "default-tenant",
|
||||
owner_email: adminEmail,
|
||||
currency: Currency.NGN,
|
||||
payment_provider: PaymentProvider.PAYSTACK
|
||||
}
|
||||
});
|
||||
|
||||
await prisma.user.upsert({
|
||||
where: { email: adminEmail },
|
||||
update: {
|
||||
role: Role.SUPER_ADMIN,
|
||||
password_hash,
|
||||
tenant_id: tenant.id
|
||||
},
|
||||
create: {
|
||||
email: adminEmail,
|
||||
full_name: "System Administrator",
|
||||
password_hash,
|
||||
role: Role.SUPER_ADMIN,
|
||||
tenant_id: tenant.id
|
||||
}
|
||||
});
|
||||
|
||||
await prisma.setting.upsert({
|
||||
where: { key: "proxmox" },
|
||||
update: {},
|
||||
create: {
|
||||
key: "proxmox",
|
||||
type: "PROXMOX",
|
||||
value: {
|
||||
host: "",
|
||||
port: 8006,
|
||||
username: "root@pam",
|
||||
token_id: "",
|
||||
token_secret: "",
|
||||
verify_ssl: true
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await prisma.setting.upsert({
|
||||
where: { key: "payment" },
|
||||
update: {},
|
||||
create: {
|
||||
key: "payment",
|
||||
type: "PAYMENT",
|
||||
value: {
|
||||
default_provider: "paystack",
|
||||
paystack_public: "",
|
||||
paystack_secret: "",
|
||||
flutterwave_public: "",
|
||||
flutterwave_secret: "",
|
||||
flutterwave_webhook_hash: "",
|
||||
callback_url: ""
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await prisma.setting.upsert({
|
||||
where: { key: "provisioning" },
|
||||
update: {},
|
||||
create: {
|
||||
key: "provisioning",
|
||||
type: "GENERAL",
|
||||
value: {
|
||||
min_vmid: 100
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await prisma.setting.upsert({
|
||||
where: { key: "backup" },
|
||||
update: {},
|
||||
create: {
|
||||
key: "backup",
|
||||
type: "GENERAL",
|
||||
value: {
|
||||
default_source: "local",
|
||||
default_storage: "local-lvm",
|
||||
max_restore_file_count: 100,
|
||||
pbs_enabled: false,
|
||||
pbs_host: "",
|
||||
pbs_datastore: "",
|
||||
pbs_namespace: "",
|
||||
pbs_verify_ssl: true
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await prisma.billingPlan.upsert({
|
||||
where: { slug: "starter" },
|
||||
update: {},
|
||||
create: {
|
||||
name: "Starter",
|
||||
slug: "starter",
|
||||
description: "Entry plan for lightweight VM workloads",
|
||||
price_monthly: 12000,
|
||||
price_hourly: 12000 / 720,
|
||||
currency: Currency.NGN,
|
||||
cpu_cores: 2,
|
||||
ram_mb: 4096,
|
||||
disk_gb: 60,
|
||||
bandwidth_gb: 2000,
|
||||
features: ["basic-support", "daily-backups"]
|
||||
}
|
||||
});
|
||||
|
||||
const ubuntuTemplate = await prisma.appTemplate.upsert({
|
||||
where: { slug: "ubuntu-22-04-golden" },
|
||||
update: {},
|
||||
create: {
|
||||
name: "Ubuntu 22.04 Golden",
|
||||
slug: "ubuntu-22-04-golden",
|
||||
template_type: "KVM_TEMPLATE",
|
||||
virtualization_type: "QEMU",
|
||||
source: "local:vztmpl/ubuntu-22.04-golden.qcow2",
|
||||
description: "Baseline hardened Ubuntu template",
|
||||
metadata: {
|
||||
os_family: "linux",
|
||||
os_version: "22.04"
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const webGroup = await prisma.applicationGroup.upsert({
|
||||
where: { slug: "web-workloads" },
|
||||
update: {},
|
||||
create: {
|
||||
name: "Web Workloads",
|
||||
slug: "web-workloads",
|
||||
description: "HTTP-facing application services"
|
||||
}
|
||||
});
|
||||
|
||||
await prisma.applicationGroupTemplate.upsert({
|
||||
where: {
|
||||
group_id_template_id: {
|
||||
group_id: webGroup.id,
|
||||
template_id: ubuntuTemplate.id
|
||||
}
|
||||
},
|
||||
update: {},
|
||||
create: {
|
||||
group_id: webGroup.id,
|
||||
template_id: ubuntuTemplate.id,
|
||||
priority: 10
|
||||
}
|
||||
});
|
||||
|
||||
await prisma.backupPolicy.upsert({
|
||||
where: {
|
||||
id: "default-tenant-backup-policy"
|
||||
},
|
||||
update: {},
|
||||
create: {
|
||||
id: "default-tenant-backup-policy",
|
||||
tenant_id: tenant.id,
|
||||
max_files: 25,
|
||||
max_total_size_mb: 102400,
|
||||
max_protected_files: 5,
|
||||
allow_file_restore: true,
|
||||
allow_cross_vm_restore: true,
|
||||
allow_pbs_restore: true
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
main()
|
||||
.then(async () => {
|
||||
await prisma.$disconnect();
|
||||
})
|
||||
.catch(async (error) => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error("Seed failed:", error);
|
||||
await prisma.$disconnect();
|
||||
process.exit(1);
|
||||
});
|
||||
90
backend/src/app.ts
Normal file
90
backend/src/app.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import express from "express";
|
||||
import cors from "cors";
|
||||
import helmet from "helmet";
|
||||
import compression from "compression";
|
||||
import morgan from "morgan";
|
||||
import { env } from "./config/env";
|
||||
import authRoutes from "./routes/auth.routes";
|
||||
import healthRoutes from "./routes/health.routes";
|
||||
import dashboardRoutes from "./routes/dashboard.routes";
|
||||
import resourceRoutes from "./routes/resources.routes";
|
||||
import billingRoutes from "./routes/billing.routes";
|
||||
import paymentRoutes from "./routes/payment.routes";
|
||||
import proxmoxRoutes from "./routes/proxmox.routes";
|
||||
import settingsRoutes from "./routes/settings.routes";
|
||||
import operationsRoutes from "./routes/operations.routes";
|
||||
import provisioningRoutes from "./routes/provisioning.routes";
|
||||
import backupRoutes from "./routes/backup.routes";
|
||||
import networkRoutes from "./routes/network.routes";
|
||||
import monitoringRoutes from "./routes/monitoring.routes";
|
||||
import clientRoutes from "./routes/client.routes";
|
||||
import { errorHandler, notFoundHandler } from "./middleware/error-handler";
|
||||
import { createRateLimit } from "./middleware/rate-limit";
|
||||
|
||||
export function createApp() {
|
||||
const app = express();
|
||||
app.set("trust proxy", 1);
|
||||
|
||||
const globalRateLimit = createRateLimit({
|
||||
windowMs: env.RATE_LIMIT_WINDOW_MS,
|
||||
max: env.RATE_LIMIT_MAX
|
||||
});
|
||||
const authRateLimit = createRateLimit({
|
||||
windowMs: env.AUTH_RATE_LIMIT_WINDOW_MS,
|
||||
max: env.AUTH_RATE_LIMIT_MAX,
|
||||
keyGenerator: (req) => {
|
||||
const email = typeof req.body?.email === "string" ? req.body.email.toLowerCase().trim() : "";
|
||||
return `${req.ip}:${email}`;
|
||||
}
|
||||
});
|
||||
|
||||
app.use(
|
||||
cors({
|
||||
origin: env.CORS_ORIGIN === "*" ? true : env.CORS_ORIGIN.split(",").map((item) => item.trim()),
|
||||
credentials: true
|
||||
})
|
||||
);
|
||||
app.use(helmet());
|
||||
app.use(compression());
|
||||
app.use(
|
||||
express.json({
|
||||
limit: "2mb",
|
||||
verify: (req, _res, buffer) => {
|
||||
const request = req as express.Request;
|
||||
request.rawBody = buffer.toString("utf8");
|
||||
}
|
||||
})
|
||||
);
|
||||
app.use(morgan("dev"));
|
||||
app.use("/api", globalRateLimit);
|
||||
app.use("/api/auth/login", authRateLimit);
|
||||
app.use("/api/auth/refresh", authRateLimit);
|
||||
|
||||
app.get("/", (_req, res) => {
|
||||
res.json({
|
||||
name: "ProxPanel API",
|
||||
version: "1.0.0",
|
||||
docs: "/api/health"
|
||||
});
|
||||
});
|
||||
|
||||
app.use("/api/health", healthRoutes);
|
||||
app.use("/api/auth", authRoutes);
|
||||
app.use("/api/dashboard", dashboardRoutes);
|
||||
app.use("/api/resources", resourceRoutes);
|
||||
app.use("/api/billing", billingRoutes);
|
||||
app.use("/api/payments", paymentRoutes);
|
||||
app.use("/api/proxmox", proxmoxRoutes);
|
||||
app.use("/api/settings", settingsRoutes);
|
||||
app.use("/api/operations", operationsRoutes);
|
||||
app.use("/api/provisioning", provisioningRoutes);
|
||||
app.use("/api/backups", backupRoutes);
|
||||
app.use("/api/network", networkRoutes);
|
||||
app.use("/api/monitoring", monitoringRoutes);
|
||||
app.use("/api/client", clientRoutes);
|
||||
|
||||
app.use(notFoundHandler);
|
||||
app.use(errorHandler);
|
||||
|
||||
return app;
|
||||
}
|
||||
38
backend/src/config/env.ts
Normal file
38
backend/src/config/env.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import dotenv from "dotenv";
|
||||
import { z } from "zod";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const envSchema = z.object({
|
||||
NODE_ENV: z.enum(["development", "test", "production"]).default("development"),
|
||||
PORT: z.coerce.number().default(8080),
|
||||
DATABASE_URL: z.string().min(1, "DATABASE_URL is required"),
|
||||
JWT_SECRET: z.string().min(16, "JWT_SECRET must be at least 16 characters"),
|
||||
JWT_EXPIRES_IN: z.string().default("7d"),
|
||||
JWT_REFRESH_SECRET: z.string().min(16, "JWT_REFRESH_SECRET must be at least 16 characters").optional(),
|
||||
JWT_REFRESH_EXPIRES_IN: z.string().default("30d"),
|
||||
CORS_ORIGIN: z.string().default("*"),
|
||||
RATE_LIMIT_WINDOW_MS: z.coerce.number().int().positive().default(60_000),
|
||||
RATE_LIMIT_MAX: z.coerce.number().int().positive().default(600),
|
||||
AUTH_RATE_LIMIT_WINDOW_MS: z.coerce.number().int().positive().default(60_000),
|
||||
AUTH_RATE_LIMIT_MAX: z.coerce.number().int().positive().default(20),
|
||||
SCHEDULER_LEASE_MS: z.coerce.number().int().positive().default(180_000),
|
||||
SCHEDULER_HEARTBEAT_MS: z.coerce.number().int().positive().default(30_000),
|
||||
ENABLE_SCHEDULER: z.coerce.boolean().default(true),
|
||||
BILLING_CRON: z.string().default("0 * * * *"),
|
||||
BACKUP_CRON: z.string().default("*/15 * * * *"),
|
||||
POWER_SCHEDULE_CRON: z.string().default("* * * * *"),
|
||||
MONITORING_CRON: z.string().default("*/5 * * * *"),
|
||||
PROXMOX_TIMEOUT_MS: z.coerce.number().default(15000)
|
||||
});
|
||||
|
||||
const parsed = envSchema.parse(process.env);
|
||||
|
||||
if (parsed.NODE_ENV === "production" && parsed.CORS_ORIGIN === "*") {
|
||||
throw new Error("CORS_ORIGIN cannot be '*' in production");
|
||||
}
|
||||
|
||||
export const env = {
|
||||
...parsed,
|
||||
JWT_REFRESH_SECRET: parsed.JWT_REFRESH_SECRET ?? parsed.JWT_SECRET
|
||||
};
|
||||
23
backend/src/index.ts
Normal file
23
backend/src/index.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { createApp } from "./app";
|
||||
import { env } from "./config/env";
|
||||
import { prisma } from "./lib/prisma";
|
||||
import { startSchedulers } from "./services/scheduler.service";
|
||||
|
||||
async function bootstrap() {
|
||||
await prisma.$connect();
|
||||
|
||||
const app = createApp();
|
||||
app.listen(env.PORT, () => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`ProxPanel API running on port ${env.PORT}`);
|
||||
});
|
||||
|
||||
await startSchedulers();
|
||||
}
|
||||
|
||||
bootstrap().catch(async (error) => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error("Failed to start server:", error);
|
||||
await prisma.$disconnect();
|
||||
process.exit(1);
|
||||
});
|
||||
12
backend/src/lib/http-error.ts
Normal file
12
backend/src/lib/http-error.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export class HttpError extends Error {
|
||||
status: number;
|
||||
code: string;
|
||||
details?: unknown;
|
||||
|
||||
constructor(status: number, message: string, code = "HTTP_ERROR", details?: unknown) {
|
||||
super(message);
|
||||
this.status = status;
|
||||
this.code = code;
|
||||
this.details = details;
|
||||
}
|
||||
}
|
||||
48
backend/src/lib/prisma-json.ts
Normal file
48
backend/src/lib/prisma-json.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import type { Prisma } from "@prisma/client";
|
||||
|
||||
export function toPrismaJsonValue(value: unknown): Prisma.InputJsonValue {
|
||||
if (value === null) {
|
||||
return "null";
|
||||
}
|
||||
|
||||
if (typeof value === "string" || typeof value === "boolean") {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (typeof value === "number") {
|
||||
return Number.isFinite(value) ? value : String(value);
|
||||
}
|
||||
|
||||
if (typeof value === "bigint") {
|
||||
return value.toString();
|
||||
}
|
||||
|
||||
if (value instanceof Date) {
|
||||
return value.toISOString();
|
||||
}
|
||||
|
||||
if (value instanceof Error) {
|
||||
return {
|
||||
name: value.name,
|
||||
message: value.message,
|
||||
stack: value.stack ?? ""
|
||||
};
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return value.map((item) => toPrismaJsonValue(item));
|
||||
}
|
||||
|
||||
if (typeof value === "object") {
|
||||
const output: Record<string, Prisma.InputJsonValue> = {};
|
||||
|
||||
for (const [key, raw] of Object.entries(value as Record<string, unknown>)) {
|
||||
if (raw === undefined) continue;
|
||||
output[key] = toPrismaJsonValue(raw);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
return String(value);
|
||||
}
|
||||
3
backend/src/lib/prisma.ts
Normal file
3
backend/src/lib/prisma.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
|
||||
export const prisma = new PrismaClient();
|
||||
163
backend/src/middleware/auth.ts
Normal file
163
backend/src/middleware/auth.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import type { NextFunction, Request as ExpressRequest, Response } from "express";
|
||||
import jwt, { type JwtPayload, type SignOptions } from "jsonwebtoken";
|
||||
import type { Role } from "@prisma/client";
|
||||
import { env } from "../config/env";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
|
||||
type Permission =
|
||||
| "vm:create"
|
||||
| "vm:read"
|
||||
| "vm:update"
|
||||
| "vm:delete"
|
||||
| "vm:start"
|
||||
| "vm:stop"
|
||||
| "node:manage"
|
||||
| "node:read"
|
||||
| "tenant:manage"
|
||||
| "tenant:read"
|
||||
| "billing:manage"
|
||||
| "billing:read"
|
||||
| "backup:manage"
|
||||
| "backup:read"
|
||||
| "rbac:manage"
|
||||
| "settings:manage"
|
||||
| "settings:read"
|
||||
| "audit:read"
|
||||
| "security:manage"
|
||||
| "security:read"
|
||||
| "user:manage"
|
||||
| "user:read";
|
||||
|
||||
const rolePermissions: Record<Role, Set<Permission>> = {
|
||||
SUPER_ADMIN: new Set<Permission>([
|
||||
"vm:create",
|
||||
"vm:read",
|
||||
"vm:update",
|
||||
"vm:delete",
|
||||
"vm:start",
|
||||
"vm:stop",
|
||||
"node:manage",
|
||||
"node:read",
|
||||
"tenant:manage",
|
||||
"tenant:read",
|
||||
"billing:manage",
|
||||
"billing:read",
|
||||
"backup:manage",
|
||||
"backup:read",
|
||||
"rbac:manage",
|
||||
"settings:manage",
|
||||
"settings:read",
|
||||
"audit:read",
|
||||
"security:manage",
|
||||
"security:read",
|
||||
"user:manage",
|
||||
"user:read"
|
||||
]),
|
||||
TENANT_ADMIN: new Set<Permission>([
|
||||
"vm:create",
|
||||
"vm:read",
|
||||
"vm:update",
|
||||
"vm:delete",
|
||||
"vm:start",
|
||||
"vm:stop",
|
||||
"node:read",
|
||||
"tenant:read",
|
||||
"billing:read",
|
||||
"backup:manage",
|
||||
"backup:read",
|
||||
"settings:read",
|
||||
"audit:read",
|
||||
"security:read",
|
||||
"user:read"
|
||||
]),
|
||||
OPERATOR: new Set<Permission>([
|
||||
"vm:read",
|
||||
"vm:start",
|
||||
"vm:stop",
|
||||
"node:manage",
|
||||
"node:read",
|
||||
"billing:read",
|
||||
"backup:manage",
|
||||
"backup:read",
|
||||
"audit:read",
|
||||
"security:manage",
|
||||
"security:read"
|
||||
]),
|
||||
VIEWER: new Set<Permission>([
|
||||
"vm:read",
|
||||
"node:read",
|
||||
"tenant:read",
|
||||
"billing:read",
|
||||
"backup:read",
|
||||
"audit:read",
|
||||
"security:read",
|
||||
"settings:read",
|
||||
"user:read"
|
||||
])
|
||||
};
|
||||
|
||||
export function createJwtToken(payload: Express.UserToken): string {
|
||||
const expiresIn = env.JWT_EXPIRES_IN as SignOptions["expiresIn"];
|
||||
return jwt.sign(payload, env.JWT_SECRET, {
|
||||
expiresIn
|
||||
});
|
||||
}
|
||||
|
||||
export function createRefreshToken(payload: Express.UserToken): string {
|
||||
const expiresIn = env.JWT_REFRESH_EXPIRES_IN as SignOptions["expiresIn"];
|
||||
return jwt.sign(payload, env.JWT_REFRESH_SECRET, {
|
||||
expiresIn
|
||||
});
|
||||
}
|
||||
|
||||
export function verifyRefreshToken(token: string): Express.UserToken | null {
|
||||
try {
|
||||
const decoded = jwt.verify(token, env.JWT_REFRESH_SECRET) as JwtPayload & Express.UserToken;
|
||||
if (!decoded?.id || !decoded?.email || !decoded?.role) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
id: decoded.id,
|
||||
email: decoded.email,
|
||||
role: decoded.role,
|
||||
tenant_id: decoded.tenant_id
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function requireAuth(req: ExpressRequest, _res: Response, next: NextFunction) {
|
||||
const authHeader = req.header("authorization");
|
||||
const token = authHeader?.startsWith("Bearer ") ? authHeader.slice(7) : null;
|
||||
|
||||
if (!token) {
|
||||
return next(new HttpError(401, "Missing bearer token", "AUTH_REQUIRED"));
|
||||
}
|
||||
|
||||
try {
|
||||
const decoded = jwt.verify(token, env.JWT_SECRET) as Express.UserToken;
|
||||
req.user = decoded;
|
||||
return next();
|
||||
} catch {
|
||||
return next(new HttpError(401, "Invalid or expired token", "INVALID_TOKEN"));
|
||||
}
|
||||
}
|
||||
|
||||
export function authorize(permission: Permission) {
|
||||
return (req: ExpressRequest, _res: Response, next: NextFunction) => {
|
||||
if (!req.user) {
|
||||
return next(new HttpError(401, "Unauthenticated", "AUTH_REQUIRED"));
|
||||
}
|
||||
const allowed = rolePermissions[req.user.role]?.has(permission);
|
||||
if (!allowed) {
|
||||
return next(new HttpError(403, "Insufficient permission", "FORBIDDEN"));
|
||||
}
|
||||
return next();
|
||||
};
|
||||
}
|
||||
|
||||
export function isTenantScopedUser(req: Pick<Express.Request, "user">): boolean {
|
||||
if (!req.user) return false;
|
||||
return req.user.role === "TENANT_ADMIN" || req.user.role === "VIEWER";
|
||||
}
|
||||
54
backend/src/middleware/error-handler.ts
Normal file
54
backend/src/middleware/error-handler.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import type { NextFunction, Request, Response } from "express";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import { ZodError } from "zod";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
|
||||
export function notFoundHandler(_req: Request, res: Response) {
|
||||
res.status(404).json({
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: "Resource not found"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function errorHandler(error: unknown, _req: Request, res: Response, _next: NextFunction) {
|
||||
if (error instanceof HttpError) {
|
||||
return res.status(error.status).json({
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (error instanceof ZodError) {
|
||||
return res.status(400).json({
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "Payload validation failed",
|
||||
details: error.flatten()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (error instanceof Prisma.PrismaClientKnownRequestError) {
|
||||
return res.status(400).json({
|
||||
error: {
|
||||
code: "DATABASE_ERROR",
|
||||
message: error.message,
|
||||
details: error.meta
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
console.error("Unhandled error:", error);
|
||||
return res.status(500).json({
|
||||
error: {
|
||||
code: "INTERNAL_SERVER_ERROR",
|
||||
message: "An unexpected server error occurred"
|
||||
}
|
||||
});
|
||||
}
|
||||
60
backend/src/middleware/rate-limit.ts
Normal file
60
backend/src/middleware/rate-limit.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import type { NextFunction, Request, Response } from "express";
|
||||
|
||||
type RateLimitOptions = {
|
||||
windowMs: number;
|
||||
max: number;
|
||||
keyGenerator?: (req: Request) => string;
|
||||
};
|
||||
|
||||
type Bucket = {
|
||||
count: number;
|
||||
resetAt: number;
|
||||
};
|
||||
|
||||
export function createRateLimit(options: RateLimitOptions) {
|
||||
const windowMs = Math.max(1_000, options.windowMs);
|
||||
const max = Math.max(1, options.max);
|
||||
const buckets = new Map<string, Bucket>();
|
||||
|
||||
return (req: Request, res: Response, next: NextFunction) => {
|
||||
const key = options.keyGenerator?.(req) ?? req.ip ?? "unknown";
|
||||
const now = Date.now();
|
||||
const existing = buckets.get(key);
|
||||
|
||||
if (!existing || existing.resetAt <= now) {
|
||||
buckets.set(key, {
|
||||
count: 1,
|
||||
resetAt: now + windowMs
|
||||
});
|
||||
res.setHeader("X-RateLimit-Limit", String(max));
|
||||
res.setHeader("X-RateLimit-Remaining", String(max - 1));
|
||||
res.setHeader("X-RateLimit-Reset", String(Math.ceil((now + windowMs) / 1000)));
|
||||
return next();
|
||||
}
|
||||
|
||||
existing.count += 1;
|
||||
const remaining = Math.max(0, max - existing.count);
|
||||
res.setHeader("X-RateLimit-Limit", String(max));
|
||||
res.setHeader("X-RateLimit-Remaining", String(remaining));
|
||||
res.setHeader("X-RateLimit-Reset", String(Math.ceil(existing.resetAt / 1000)));
|
||||
|
||||
if (existing.count > max) {
|
||||
return res.status(429).json({
|
||||
error: {
|
||||
code: "RATE_LIMIT_EXCEEDED",
|
||||
message: "Too many requests. Please retry later."
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (buckets.size > 10_000) {
|
||||
for (const [bucketKey, bucketValue] of buckets.entries()) {
|
||||
if (bucketValue.resetAt <= now) {
|
||||
buckets.delete(bucketKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return next();
|
||||
};
|
||||
}
|
||||
123
backend/src/routes/auth.routes.ts
Normal file
123
backend/src/routes/auth.routes.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { Router } from "express";
|
||||
import bcrypt from "bcryptjs";
|
||||
import { z } from "zod";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
import { createJwtToken, createRefreshToken, requireAuth, verifyRefreshToken } from "../middleware/auth";
|
||||
|
||||
const router = Router();
|
||||
|
||||
const loginSchema = z.object({
|
||||
email: z.string().email(),
|
||||
password: z.string().min(1)
|
||||
});
|
||||
|
||||
const refreshSchema = z.object({
|
||||
refresh_token: z.string().min(1)
|
||||
});
|
||||
|
||||
router.post("/login", async (req, res, next) => {
|
||||
try {
|
||||
const payload = loginSchema.parse(req.body);
|
||||
const user = await prisma.user.findUnique({ where: { email: payload.email } });
|
||||
if (!user || !user.is_active) {
|
||||
throw new HttpError(401, "Invalid email or password", "INVALID_CREDENTIALS");
|
||||
}
|
||||
const matched = await bcrypt.compare(payload.password, user.password_hash);
|
||||
if (!matched) {
|
||||
throw new HttpError(401, "Invalid email or password", "INVALID_CREDENTIALS");
|
||||
}
|
||||
|
||||
await prisma.user.update({
|
||||
where: { id: user.id },
|
||||
data: { last_login_at: new Date() }
|
||||
});
|
||||
|
||||
const userPayload = {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
role: user.role,
|
||||
tenant_id: user.tenant_id
|
||||
};
|
||||
const token = createJwtToken(userPayload);
|
||||
const refreshToken = createRefreshToken(userPayload);
|
||||
|
||||
res.json({
|
||||
token,
|
||||
refresh_token: refreshToken,
|
||||
user: {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
full_name: user.full_name,
|
||||
role: user.role,
|
||||
tenant_id: user.tenant_id
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/refresh", async (req, res, next) => {
|
||||
try {
|
||||
const payload = refreshSchema.parse(req.body ?? {});
|
||||
const decoded = verifyRefreshToken(payload.refresh_token);
|
||||
if (!decoded) {
|
||||
throw new HttpError(401, "Invalid refresh token", "INVALID_REFRESH_TOKEN");
|
||||
}
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: decoded.id },
|
||||
select: {
|
||||
id: true,
|
||||
email: true,
|
||||
role: true,
|
||||
tenant_id: true,
|
||||
is_active: true
|
||||
}
|
||||
});
|
||||
if (!user || !user.is_active) {
|
||||
throw new HttpError(401, "Refresh token user is invalid", "INVALID_REFRESH_TOKEN");
|
||||
}
|
||||
|
||||
const userPayload = {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
role: user.role,
|
||||
tenant_id: user.tenant_id
|
||||
};
|
||||
const token = createJwtToken(userPayload);
|
||||
const refreshToken = createRefreshToken(userPayload);
|
||||
|
||||
res.json({
|
||||
token,
|
||||
refresh_token: refreshToken
|
||||
});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/me", requireAuth, async (req, res, next) => {
|
||||
try {
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: req.user!.id },
|
||||
select: {
|
||||
id: true,
|
||||
email: true,
|
||||
full_name: true,
|
||||
role: true,
|
||||
tenant_id: true,
|
||||
is_active: true,
|
||||
created_at: true
|
||||
}
|
||||
});
|
||||
if (!user) throw new HttpError(404, "User not found", "USER_NOT_FOUND");
|
||||
if (!user.is_active) throw new HttpError(401, "User account is inactive", "USER_INACTIVE");
|
||||
res.json(user);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
491
backend/src/routes/backup.routes.ts
Normal file
491
backend/src/routes/backup.routes.ts
Normal file
@@ -0,0 +1,491 @@
|
||||
import {
|
||||
BackupRestoreMode,
|
||||
BackupRestoreStatus,
|
||||
BackupSchedule,
|
||||
BackupSource,
|
||||
BackupStatus,
|
||||
BackupType,
|
||||
SnapshotFrequency
|
||||
} from "@prisma/client";
|
||||
import { Router } from "express";
|
||||
import { z } from "zod";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { authorize, isTenantScopedUser, requireAuth } from "../middleware/auth";
|
||||
import { logAudit } from "../services/audit.service";
|
||||
import {
|
||||
createBackup,
|
||||
createRestoreTask,
|
||||
createSnapshotJob,
|
||||
deleteBackup,
|
||||
deleteSnapshotJob,
|
||||
listBackupPolicies,
|
||||
listBackups,
|
||||
listRestoreTasks,
|
||||
listSnapshotJobs,
|
||||
runRestoreTaskNow,
|
||||
runSnapshotJobNow,
|
||||
toggleBackupProtection,
|
||||
updateSnapshotJob,
|
||||
upsertBackupPolicy
|
||||
} from "../services/backup.service";
|
||||
|
||||
const router = Router();
|
||||
|
||||
const createBackupSchema = z.object({
|
||||
vm_id: z.string().min(1),
|
||||
type: z.nativeEnum(BackupType).optional(),
|
||||
source: z.nativeEnum(BackupSource).optional(),
|
||||
schedule: z.nativeEnum(BackupSchedule).optional(),
|
||||
retention_days: z.number().int().positive().optional(),
|
||||
storage: z.string().optional(),
|
||||
route_key: z.string().optional(),
|
||||
is_protected: z.boolean().optional(),
|
||||
notes: z.string().optional(),
|
||||
requested_size_mb: z.number().positive().optional()
|
||||
});
|
||||
|
||||
const protectionSchema = z.object({
|
||||
is_protected: z.boolean()
|
||||
});
|
||||
|
||||
const createRestoreSchema = z.object({
|
||||
backup_id: z.string().min(1),
|
||||
target_vm_id: z.string().optional(),
|
||||
mode: z.nativeEnum(BackupRestoreMode),
|
||||
requested_files: z.array(z.string().min(1)).optional(),
|
||||
pbs_enabled: z.boolean().optional(),
|
||||
run_immediately: z.boolean().default(true)
|
||||
});
|
||||
|
||||
const createSnapshotSchema = z.object({
|
||||
vm_id: z.string().min(1),
|
||||
name: z.string().min(2),
|
||||
frequency: z.nativeEnum(SnapshotFrequency),
|
||||
interval: z.number().int().positive().optional(),
|
||||
day_of_week: z.number().int().min(0).max(6).optional(),
|
||||
hour_utc: z.number().int().min(0).max(23).optional(),
|
||||
minute_utc: z.number().int().min(0).max(59).optional(),
|
||||
retention: z.number().int().positive().optional(),
|
||||
enabled: z.boolean().optional()
|
||||
});
|
||||
|
||||
const updateSnapshotSchema = z.object({
|
||||
name: z.string().min(2).optional(),
|
||||
frequency: z.nativeEnum(SnapshotFrequency).optional(),
|
||||
interval: z.number().int().positive().optional(),
|
||||
day_of_week: z.number().int().min(0).max(6).nullable().optional(),
|
||||
hour_utc: z.number().int().min(0).max(23).optional(),
|
||||
minute_utc: z.number().int().min(0).max(59).optional(),
|
||||
retention: z.number().int().positive().optional(),
|
||||
enabled: z.boolean().optional()
|
||||
});
|
||||
|
||||
const upsertPolicySchema = z.object({
|
||||
tenant_id: z.string().optional(),
|
||||
billing_plan_id: z.string().optional(),
|
||||
max_files: z.number().int().positive().optional(),
|
||||
max_total_size_mb: z.number().positive().optional(),
|
||||
max_protected_files: z.number().int().positive().optional(),
|
||||
allow_file_restore: z.boolean().optional(),
|
||||
allow_cross_vm_restore: z.boolean().optional(),
|
||||
allow_pbs_restore: z.boolean().optional()
|
||||
});
|
||||
|
||||
function parseOptionalBackupStatus(value: unknown) {
|
||||
if (typeof value !== "string") return undefined;
|
||||
const normalized = value.toUpperCase();
|
||||
return Object.values(BackupStatus).includes(normalized as BackupStatus)
|
||||
? (normalized as BackupStatus)
|
||||
: undefined;
|
||||
}
|
||||
|
||||
function parseOptionalRestoreStatus(value: unknown) {
|
||||
if (typeof value !== "string") return undefined;
|
||||
const normalized = value.toUpperCase();
|
||||
return Object.values(BackupRestoreStatus).includes(normalized as BackupRestoreStatus)
|
||||
? (normalized as BackupRestoreStatus)
|
||||
: undefined;
|
||||
}
|
||||
|
||||
async function ensureVmTenantScope(vmId: string, req: Express.Request) {
|
||||
const vm = await prisma.virtualMachine.findUnique({
|
||||
where: { id: vmId },
|
||||
select: {
|
||||
id: true,
|
||||
tenant_id: true,
|
||||
name: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!vm) throw new HttpError(404, "VM not found", "VM_NOT_FOUND");
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && vm.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
return vm;
|
||||
}
|
||||
|
||||
async function ensureBackupTenantScope(backupId: string, req: Express.Request) {
|
||||
const backup = await prisma.backup.findUnique({
|
||||
where: { id: backupId },
|
||||
include: {
|
||||
vm: {
|
||||
select: {
|
||||
id: true,
|
||||
tenant_id: true,
|
||||
name: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!backup) throw new HttpError(404, "Backup not found", "BACKUP_NOT_FOUND");
|
||||
|
||||
const tenantId = backup.tenant_id ?? backup.vm.tenant_id;
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && tenantId !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
return backup;
|
||||
}
|
||||
|
||||
async function ensureRestoreTaskTenantScope(taskId: string, req: Express.Request) {
|
||||
const task = await prisma.backupRestoreTask.findUnique({
|
||||
where: { id: taskId },
|
||||
include: {
|
||||
source_vm: {
|
||||
select: {
|
||||
tenant_id: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!task) throw new HttpError(404, "Restore task not found", "RESTORE_TASK_NOT_FOUND");
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && task.source_vm.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
return task;
|
||||
}
|
||||
|
||||
async function ensureSnapshotJobTenantScope(jobId: string, req: Express.Request) {
|
||||
const job = await prisma.snapshotJob.findUnique({
|
||||
where: { id: jobId },
|
||||
include: {
|
||||
vm: {
|
||||
select: {
|
||||
tenant_id: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!job) throw new HttpError(404, "Snapshot job not found", "SNAPSHOT_JOB_NOT_FOUND");
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && job.vm.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
return job;
|
||||
}
|
||||
|
||||
router.get("/", requireAuth, authorize("backup:read"), async (req, res, next) => {
|
||||
try {
|
||||
const status = parseOptionalBackupStatus(req.query.status);
|
||||
const vmId = typeof req.query.vm_id === "string" ? req.query.vm_id : undefined;
|
||||
const limit = typeof req.query.limit === "string" ? Number(req.query.limit) : undefined;
|
||||
const offset = typeof req.query.offset === "string" ? Number(req.query.offset) : undefined;
|
||||
|
||||
if (vmId) {
|
||||
await ensureVmTenantScope(vmId, req);
|
||||
}
|
||||
|
||||
const result = await listBackups({
|
||||
tenantId: isTenantScopedUser(req) ? req.user?.tenant_id ?? undefined : undefined,
|
||||
status,
|
||||
vmId,
|
||||
limit,
|
||||
offset
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/", requireAuth, authorize("backup:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = createBackupSchema.parse(req.body ?? {});
|
||||
await ensureVmTenantScope(payload.vm_id, req);
|
||||
|
||||
const backup = await createBackup({
|
||||
vmId: payload.vm_id,
|
||||
type: payload.type,
|
||||
source: payload.source,
|
||||
schedule: payload.schedule,
|
||||
retentionDays: payload.retention_days,
|
||||
storage: payload.storage,
|
||||
routeKey: payload.route_key,
|
||||
isProtected: payload.is_protected,
|
||||
notes: payload.notes,
|
||||
requestedSizeMb: payload.requested_size_mb,
|
||||
createdBy: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "backup.create",
|
||||
resource_type: "BACKUP",
|
||||
resource_id: backup.id,
|
||||
resource_name: backup.vm_name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: payload,
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.status(201).json(backup);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/:id/protection", requireAuth, authorize("backup:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = protectionSchema.parse(req.body ?? {});
|
||||
await ensureBackupTenantScope(req.params.id, req);
|
||||
|
||||
const backup = await toggleBackupProtection(req.params.id, payload.is_protected);
|
||||
res.json(backup);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.delete("/:id", requireAuth, authorize("backup:manage"), async (req, res, next) => {
|
||||
try {
|
||||
await ensureBackupTenantScope(req.params.id, req);
|
||||
const force = req.query.force === "true";
|
||||
await deleteBackup(req.params.id, force);
|
||||
res.status(204).send();
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/restores", requireAuth, authorize("backup:read"), async (req, res, next) => {
|
||||
try {
|
||||
const status = parseOptionalRestoreStatus(req.query.status);
|
||||
const limit = typeof req.query.limit === "string" ? Number(req.query.limit) : undefined;
|
||||
const offset = typeof req.query.offset === "string" ? Number(req.query.offset) : undefined;
|
||||
|
||||
const result = await listRestoreTasks({
|
||||
tenantId: isTenantScopedUser(req) ? req.user?.tenant_id ?? undefined : undefined,
|
||||
status,
|
||||
limit,
|
||||
offset
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/restores", requireAuth, authorize("backup:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = createRestoreSchema.parse(req.body ?? {});
|
||||
await ensureBackupTenantScope(payload.backup_id, req);
|
||||
|
||||
if (payload.target_vm_id) {
|
||||
await ensureVmTenantScope(payload.target_vm_id, req);
|
||||
}
|
||||
|
||||
const task = await createRestoreTask({
|
||||
backupId: payload.backup_id,
|
||||
targetVmId: payload.target_vm_id,
|
||||
mode: payload.mode,
|
||||
requestedFiles: payload.requested_files,
|
||||
pbsEnabled: payload.pbs_enabled,
|
||||
createdBy: req.user?.email,
|
||||
runImmediately: payload.run_immediately
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "backup.restore.create",
|
||||
resource_type: "BACKUP",
|
||||
resource_id: payload.backup_id,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: payload,
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.status(201).json(task);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/restores/:id/run", requireAuth, authorize("backup:manage"), async (req, res, next) => {
|
||||
try {
|
||||
await ensureRestoreTaskTenantScope(req.params.id, req);
|
||||
const task = await runRestoreTaskNow(req.params.id);
|
||||
res.json(task);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/snapshot-jobs", requireAuth, authorize("backup:read"), async (req, res, next) => {
|
||||
try {
|
||||
const jobs = await listSnapshotJobs({
|
||||
tenantId: isTenantScopedUser(req) ? req.user?.tenant_id ?? undefined : undefined
|
||||
});
|
||||
res.json({ data: jobs });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/snapshot-jobs", requireAuth, authorize("backup:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = createSnapshotSchema.parse(req.body ?? {});
|
||||
await ensureVmTenantScope(payload.vm_id, req);
|
||||
|
||||
const job = await createSnapshotJob({
|
||||
vmId: payload.vm_id,
|
||||
name: payload.name,
|
||||
frequency: payload.frequency,
|
||||
interval: payload.interval,
|
||||
dayOfWeek: payload.day_of_week,
|
||||
hourUtc: payload.hour_utc,
|
||||
minuteUtc: payload.minute_utc,
|
||||
retention: payload.retention,
|
||||
enabled: payload.enabled,
|
||||
createdBy: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "snapshot_job.create",
|
||||
resource_type: "BACKUP",
|
||||
resource_id: job.id,
|
||||
resource_name: job.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: payload,
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.status(201).json(job);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/snapshot-jobs/:id", requireAuth, authorize("backup:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = updateSnapshotSchema.parse(req.body ?? {});
|
||||
await ensureSnapshotJobTenantScope(req.params.id, req);
|
||||
|
||||
const job = await updateSnapshotJob(req.params.id, {
|
||||
name: payload.name,
|
||||
frequency: payload.frequency,
|
||||
interval: payload.interval,
|
||||
dayOfWeek: payload.day_of_week,
|
||||
hourUtc: payload.hour_utc,
|
||||
minuteUtc: payload.minute_utc,
|
||||
retention: payload.retention,
|
||||
enabled: payload.enabled
|
||||
});
|
||||
|
||||
res.json(job);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.delete("/snapshot-jobs/:id", requireAuth, authorize("backup:manage"), async (req, res, next) => {
|
||||
try {
|
||||
await ensureSnapshotJobTenantScope(req.params.id, req);
|
||||
await deleteSnapshotJob(req.params.id);
|
||||
res.status(204).send();
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/snapshot-jobs/:id/run", requireAuth, authorize("backup:manage"), async (req, res, next) => {
|
||||
try {
|
||||
await ensureSnapshotJobTenantScope(req.params.id, req);
|
||||
const result = await runSnapshotJobNow(req.params.id);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/policies", requireAuth, authorize("backup:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const all = await listBackupPolicies();
|
||||
const data =
|
||||
isTenantScopedUser(_req) && _req.user?.tenant_id
|
||||
? all.filter((item) => item.tenant_id === _req.user?.tenant_id)
|
||||
: all;
|
||||
res.json({ data });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/policies", requireAuth, authorize("backup:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = upsertPolicySchema.parse(req.body ?? {});
|
||||
const tenantId = isTenantScopedUser(req) ? req.user?.tenant_id ?? undefined : payload.tenant_id;
|
||||
if (isTenantScopedUser(req) && payload.tenant_id && req.user?.tenant_id && payload.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
const policy = await upsertBackupPolicy({
|
||||
tenantId,
|
||||
billingPlanId: payload.billing_plan_id,
|
||||
maxFiles: payload.max_files,
|
||||
maxTotalSizeMb: payload.max_total_size_mb,
|
||||
maxProtectedFiles: payload.max_protected_files,
|
||||
allowFileRestore: payload.allow_file_restore,
|
||||
allowCrossVmRestore: payload.allow_cross_vm_restore,
|
||||
allowPbsRestore: payload.allow_pbs_restore
|
||||
});
|
||||
|
||||
res.status(201).json(policy);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/policies/:id", requireAuth, authorize("backup:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = upsertPolicySchema.parse(req.body ?? {});
|
||||
const tenantId = isTenantScopedUser(req) ? req.user?.tenant_id ?? undefined : payload.tenant_id;
|
||||
if (isTenantScopedUser(req) && payload.tenant_id && req.user?.tenant_id && payload.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
const policy = await upsertBackupPolicy({
|
||||
policyId: req.params.id,
|
||||
tenantId,
|
||||
billingPlanId: payload.billing_plan_id,
|
||||
maxFiles: payload.max_files,
|
||||
maxTotalSizeMb: payload.max_total_size_mb,
|
||||
maxProtectedFiles: payload.max_protected_files,
|
||||
allowFileRestore: payload.allow_file_restore,
|
||||
allowCrossVmRestore: payload.allow_cross_vm_restore,
|
||||
allowPbsRestore: payload.allow_pbs_restore
|
||||
});
|
||||
|
||||
res.json(policy);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
46
backend/src/routes/billing.routes.ts
Normal file
46
backend/src/routes/billing.routes.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { Router } from "express";
|
||||
import { z } from "zod";
|
||||
import { authorize, requireAuth } from "../middleware/auth";
|
||||
import { generateInvoicesFromUnbilledUsage, markInvoicePaid, meterHourlyUsage } from "../services/billing.service";
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.post("/meter/hourly", requireAuth, authorize("billing:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const result = await meterHourlyUsage(req.user?.email ?? "system@proxpanel.local");
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/invoices/generate", requireAuth, authorize("billing:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const result = await generateInvoicesFromUnbilledUsage(req.user?.email ?? "system@proxpanel.local");
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
const markPaidSchema = z.object({
|
||||
payment_provider: z.enum(["PAYSTACK", "FLUTTERWAVE", "MANUAL"]).default("MANUAL"),
|
||||
payment_reference: z.string().min(2)
|
||||
});
|
||||
|
||||
router.post("/invoices/:id/pay", requireAuth, authorize("billing:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = markPaidSchema.parse(req.body ?? {});
|
||||
const invoice = await markInvoicePaid(
|
||||
req.params.id,
|
||||
payload.payment_provider,
|
||||
payload.payment_reference,
|
||||
req.user?.email ?? "system@proxpanel.local"
|
||||
);
|
||||
res.json(invoice);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
1247
backend/src/routes/client.routes.ts
Normal file
1247
backend/src/routes/client.routes.ts
Normal file
File diff suppressed because it is too large
Load Diff
390
backend/src/routes/dashboard.routes.ts
Normal file
390
backend/src/routes/dashboard.routes.ts
Normal file
@@ -0,0 +1,390 @@
|
||||
import { Router } from "express";
|
||||
import { IpScope, IpVersion } from "@prisma/client";
|
||||
import { authorize, isTenantScopedUser, requireAuth } from "../middleware/auth";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { subnetUtilizationDashboard } from "../services/network.service";
|
||||
|
||||
const router = Router();
|
||||
|
||||
type HeatLevel = "critical" | "warning" | "elevated" | "healthy";
|
||||
|
||||
function clampInteger(value: unknown, min: number, max: number, fallback: number) {
|
||||
if (typeof value !== "string") return fallback;
|
||||
const parsed = Number(value);
|
||||
if (!Number.isInteger(parsed)) return fallback;
|
||||
return Math.min(Math.max(parsed, min), max);
|
||||
}
|
||||
|
||||
function toUtcDayStart(date: Date) {
|
||||
return new Date(Date.UTC(date.getUTCFullYear(), date.getUTCMonth(), date.getUTCDate()));
|
||||
}
|
||||
|
||||
function toDateKey(date: Date) {
|
||||
return date.toISOString().slice(0, 10);
|
||||
}
|
||||
|
||||
function resolveHeatLevel(pressurePct: number): HeatLevel {
|
||||
if (pressurePct >= 90) return "critical";
|
||||
if (pressurePct >= 75) return "warning";
|
||||
if (pressurePct >= 60) return "elevated";
|
||||
return "healthy";
|
||||
}
|
||||
|
||||
router.get("/summary", requireAuth, authorize("vm:read"), async (req, res, next) => {
|
||||
try {
|
||||
const tenantScoped = isTenantScopedUser(req) && req.user?.tenant_id;
|
||||
const tenantWhere = tenantScoped ? { tenant_id: req.user!.tenant_id! } : {};
|
||||
|
||||
const [vmTotal, vmRunning, nodeTotal, tenantTotal, invoicePaidAgg, invoicePendingAgg] = await Promise.all([
|
||||
prisma.virtualMachine.count({ where: tenantWhere }),
|
||||
prisma.virtualMachine.count({ where: { ...tenantWhere, status: "RUNNING" } }),
|
||||
prisma.proxmoxNode.count(),
|
||||
prisma.tenant.count(),
|
||||
prisma.invoice.aggregate({
|
||||
where: { ...tenantWhere, status: "PAID" },
|
||||
_sum: { amount: true }
|
||||
}),
|
||||
prisma.invoice.aggregate({
|
||||
where: { ...tenantWhere, status: "PENDING" },
|
||||
_sum: { amount: true }
|
||||
})
|
||||
]);
|
||||
|
||||
const usage = await prisma.usageRecord.findMany({
|
||||
where: {
|
||||
...tenantWhere,
|
||||
period_start: {
|
||||
gte: new Date(Date.now() - 7 * 24 * 60 * 60 * 1000)
|
||||
}
|
||||
},
|
||||
orderBy: { period_start: "asc" }
|
||||
});
|
||||
|
||||
const hourlyRevenueMap = new Map<string, number>();
|
||||
for (const record of usage) {
|
||||
const key = new Date(record.period_start).toISOString().slice(0, 13) + ":00:00Z";
|
||||
hourlyRevenueMap.set(key, (hourlyRevenueMap.get(key) ?? 0) + Number(record.total_cost));
|
||||
}
|
||||
|
||||
const topVmMap = new Map<string, { vm_name: string; total: number }>();
|
||||
for (const record of usage) {
|
||||
const current = topVmMap.get(record.vm_id) ?? { vm_name: record.vm_name, total: 0 };
|
||||
current.total += Number(record.total_cost);
|
||||
topVmMap.set(record.vm_id, current);
|
||||
}
|
||||
|
||||
const topVms = Array.from(topVmMap.entries())
|
||||
.map(([vm_id, value]) => ({ vm_id, ...value }))
|
||||
.sort((a, b) => b.total - a.total)
|
||||
.slice(0, 5);
|
||||
|
||||
const recentVms = await prisma.virtualMachine.findMany({
|
||||
where: tenantWhere,
|
||||
orderBy: { created_at: "desc" },
|
||||
take: 8,
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
status: true,
|
||||
node: true,
|
||||
tenant_id: true,
|
||||
cpu_usage: true,
|
||||
ram_usage: true,
|
||||
disk_usage: true,
|
||||
created_at: true
|
||||
}
|
||||
});
|
||||
|
||||
res.json({
|
||||
metrics: {
|
||||
vm_total: vmTotal,
|
||||
vm_running: vmRunning,
|
||||
node_total: nodeTotal,
|
||||
tenant_total: tenantTotal,
|
||||
revenue_paid_total: Number(invoicePaidAgg._sum.amount ?? 0),
|
||||
revenue_pending_total: Number(invoicePendingAgg._sum.amount ?? 0)
|
||||
},
|
||||
hourly_revenue_7d: Array.from(hourlyRevenueMap.entries()).map(([time, value]) => ({
|
||||
time,
|
||||
value
|
||||
})),
|
||||
top_vms_by_cost: topVms,
|
||||
recent_vms: recentVms
|
||||
});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/network-utilization", requireAuth, authorize("vm:read"), async (req, res, next) => {
|
||||
try {
|
||||
const tenantScoped = isTenantScopedUser(req) && req.user?.tenant_id;
|
||||
const selectedTenantId =
|
||||
tenantScoped && req.user?.tenant_id
|
||||
? req.user.tenant_id
|
||||
: typeof req.query.tenant_id === "string"
|
||||
? req.query.tenant_id
|
||||
: undefined;
|
||||
|
||||
const scopeQuery = typeof req.query.scope === "string" ? req.query.scope.toUpperCase() : undefined;
|
||||
const versionQuery = typeof req.query.version === "string" ? req.query.version.toUpperCase() : undefined;
|
||||
const rawVlanTag = typeof req.query.vlan_tag === "string" ? Number(req.query.vlan_tag) : undefined;
|
||||
const vlanTag = typeof rawVlanTag === "number" && Number.isInteger(rawVlanTag) ? rawVlanTag : undefined;
|
||||
|
||||
const scope = Object.values(IpScope).includes(scopeQuery as IpScope) ? (scopeQuery as IpScope) : undefined;
|
||||
const version = Object.values(IpVersion).includes(versionQuery as IpVersion) ? (versionQuery as IpVersion) : undefined;
|
||||
|
||||
const days = clampInteger(req.query.days, 7, 60, 14);
|
||||
const maxTenants = clampInteger(req.query.max_tenants, 1, 10, 5);
|
||||
|
||||
const subnetDashboard = await subnetUtilizationDashboard({
|
||||
scope,
|
||||
version,
|
||||
node_hostname: typeof req.query.node_hostname === "string" ? req.query.node_hostname : undefined,
|
||||
bridge: typeof req.query.bridge === "string" ? req.query.bridge : undefined,
|
||||
vlan_tag: vlanTag,
|
||||
tenant_id: selectedTenantId
|
||||
});
|
||||
|
||||
const heatmapCells = subnetDashboard.subnets.slice(0, 18).map((subnet, index) => ({
|
||||
rank: index + 1,
|
||||
subnet: subnet.subnet,
|
||||
scope: subnet.scope,
|
||||
version: subnet.version,
|
||||
node_hostname: subnet.node_hostname,
|
||||
bridge: subnet.bridge,
|
||||
vlan_tag: subnet.vlan_tag,
|
||||
total: subnet.total,
|
||||
assigned: subnet.assigned,
|
||||
reserved: subnet.reserved,
|
||||
available: subnet.available,
|
||||
utilization_pct: subnet.utilization_pct,
|
||||
pressure_pct: subnet.pressure_pct,
|
||||
heat_level: resolveHeatLevel(subnet.pressure_pct)
|
||||
}));
|
||||
|
||||
const heatmapSummary = subnetDashboard.subnets.reduce(
|
||||
(acc, subnet) => {
|
||||
const level = resolveHeatLevel(subnet.pressure_pct);
|
||||
acc.total_subnets += 1;
|
||||
if (level === "critical") acc.critical += 1;
|
||||
if (level === "warning") acc.warning += 1;
|
||||
if (level === "elevated") acc.elevated += 1;
|
||||
if (level === "healthy") acc.healthy += 1;
|
||||
return acc;
|
||||
},
|
||||
{
|
||||
total_subnets: 0,
|
||||
critical: 0,
|
||||
warning: 0,
|
||||
elevated: 0,
|
||||
healthy: 0
|
||||
}
|
||||
);
|
||||
|
||||
let tenantIds: string[] = [];
|
||||
if (selectedTenantId) {
|
||||
tenantIds = [selectedTenantId];
|
||||
} else {
|
||||
const groupedTenants = await prisma.ipAssignment.groupBy({
|
||||
by: ["tenant_id"],
|
||||
where: {
|
||||
is_active: true,
|
||||
tenant_id: {
|
||||
not: null
|
||||
}
|
||||
},
|
||||
_count: {
|
||||
_all: true
|
||||
},
|
||||
orderBy: {
|
||||
_count: {
|
||||
tenant_id: "desc"
|
||||
}
|
||||
},
|
||||
take: maxTenants
|
||||
});
|
||||
|
||||
tenantIds = groupedTenants.map((item) => item.tenant_id).filter((item): item is string => Boolean(item));
|
||||
}
|
||||
|
||||
if (tenantIds.length === 0) {
|
||||
return res.json({
|
||||
generated_at: new Date().toISOString(),
|
||||
subnet_heatmap: {
|
||||
summary: heatmapSummary,
|
||||
cells: heatmapCells
|
||||
},
|
||||
tenant_trends: {
|
||||
window_days: days,
|
||||
series: [],
|
||||
chart_points: []
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const rangeEnd = new Date();
|
||||
rangeEnd.setUTCHours(23, 59, 59, 999);
|
||||
const rangeStart = toUtcDayStart(rangeEnd);
|
||||
rangeStart.setUTCDate(rangeStart.getUTCDate() - (days - 1));
|
||||
|
||||
const dayFrames = Array.from({ length: days }, (_, index) => {
|
||||
const start = new Date(rangeStart);
|
||||
start.setUTCDate(rangeStart.getUTCDate() + index);
|
||||
const end = new Date(start);
|
||||
end.setUTCHours(23, 59, 59, 999);
|
||||
return {
|
||||
key: toDateKey(start),
|
||||
end
|
||||
};
|
||||
});
|
||||
|
||||
const [tenants, quotas, assignments] = await Promise.all([
|
||||
prisma.tenant.findMany({
|
||||
where: {
|
||||
id: {
|
||||
in: tenantIds
|
||||
}
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true
|
||||
}
|
||||
}),
|
||||
prisma.tenantIpQuota.findMany({
|
||||
where: {
|
||||
tenant_id: {
|
||||
in: tenantIds
|
||||
}
|
||||
},
|
||||
select: {
|
||||
tenant_id: true,
|
||||
ipv4_limit: true,
|
||||
ipv6_limit: true,
|
||||
burst_allowed: true
|
||||
}
|
||||
}),
|
||||
prisma.ipAssignment.findMany({
|
||||
where: {
|
||||
tenant_id: {
|
||||
in: tenantIds
|
||||
},
|
||||
assigned_at: {
|
||||
lte: rangeEnd
|
||||
},
|
||||
OR: [
|
||||
{
|
||||
released_at: null
|
||||
},
|
||||
{
|
||||
released_at: {
|
||||
gte: rangeStart
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
select: {
|
||||
tenant_id: true,
|
||||
assigned_at: true,
|
||||
released_at: true,
|
||||
ip_address: {
|
||||
select: {
|
||||
version: true
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
]);
|
||||
|
||||
const tenantMap = new Map(tenants.map((tenant) => [tenant.id, tenant]));
|
||||
const quotaMap = new Map(quotas.map((quota) => [quota.tenant_id, quota]));
|
||||
const assignmentsByTenant = new Map<string, typeof assignments>();
|
||||
|
||||
for (const assignment of assignments) {
|
||||
if (!assignment.tenant_id) continue;
|
||||
if (!assignmentsByTenant.has(assignment.tenant_id)) {
|
||||
assignmentsByTenant.set(assignment.tenant_id, []);
|
||||
}
|
||||
assignmentsByTenant.get(assignment.tenant_id)!.push(assignment);
|
||||
}
|
||||
|
||||
const orderedTenantIds = tenantIds.filter((tenantId) => tenantMap.has(tenantId));
|
||||
const series = orderedTenantIds.map((tenantId) => {
|
||||
const tenant = tenantMap.get(tenantId)!;
|
||||
const quota = quotaMap.get(tenantId);
|
||||
const tenantAssignments = assignmentsByTenant.get(tenantId) ?? [];
|
||||
|
||||
const points = dayFrames.map((day) => {
|
||||
let assignedIpv4 = 0;
|
||||
let assignedIpv6 = 0;
|
||||
|
||||
for (const assignment of tenantAssignments) {
|
||||
const activeAtDayEnd =
|
||||
assignment.assigned_at <= day.end && (!assignment.released_at || assignment.released_at > day.end);
|
||||
if (!activeAtDayEnd) continue;
|
||||
if (assignment.ip_address.version === IpVersion.IPV4) assignedIpv4 += 1;
|
||||
if (assignment.ip_address.version === IpVersion.IPV6) assignedIpv6 += 1;
|
||||
}
|
||||
|
||||
const quotaPressure: number[] = [];
|
||||
if (typeof quota?.ipv4_limit === "number" && quota.ipv4_limit > 0) {
|
||||
quotaPressure.push((assignedIpv4 / quota.ipv4_limit) * 100);
|
||||
}
|
||||
if (typeof quota?.ipv6_limit === "number" && quota.ipv6_limit > 0) {
|
||||
quotaPressure.push((assignedIpv6 / quota.ipv6_limit) * 100);
|
||||
}
|
||||
|
||||
return {
|
||||
date: day.key,
|
||||
assigned_total: assignedIpv4 + assignedIpv6,
|
||||
assigned_ipv4: assignedIpv4,
|
||||
assigned_ipv6: assignedIpv6,
|
||||
quota_utilization_pct: quotaPressure.length > 0 ? Number(Math.max(...quotaPressure).toFixed(2)) : null
|
||||
};
|
||||
});
|
||||
|
||||
const lastPoint = points[points.length - 1];
|
||||
return {
|
||||
tenant_id: tenant.id,
|
||||
tenant_name: tenant.name,
|
||||
current_assigned: lastPoint?.assigned_total ?? 0,
|
||||
peak_assigned: points.reduce((peak, point) => (point.assigned_total > peak ? point.assigned_total : peak), 0),
|
||||
quota: {
|
||||
ipv4_limit: quota?.ipv4_limit ?? null,
|
||||
ipv6_limit: quota?.ipv6_limit ?? null,
|
||||
burst_allowed: quota?.burst_allowed ?? false
|
||||
},
|
||||
points
|
||||
};
|
||||
});
|
||||
|
||||
const chartPoints = dayFrames.map((day, index) => {
|
||||
const point: Record<string, string | number> = {
|
||||
date: day.key
|
||||
};
|
||||
|
||||
for (const tenant of series) {
|
||||
point[tenant.tenant_id] = tenant.points[index]?.assigned_total ?? 0;
|
||||
}
|
||||
|
||||
return point;
|
||||
});
|
||||
|
||||
return res.json({
|
||||
generated_at: new Date().toISOString(),
|
||||
subnet_heatmap: {
|
||||
summary: heatmapSummary,
|
||||
cells: heatmapCells
|
||||
},
|
||||
tenant_trends: {
|
||||
window_days: days,
|
||||
series,
|
||||
chart_points: chartPoints
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
22
backend/src/routes/health.routes.ts
Normal file
22
backend/src/routes/health.routes.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { Router } from "express";
|
||||
import { prisma } from "../lib/prisma";
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.get("/", async (_req, res) => {
|
||||
let db = "ok";
|
||||
try {
|
||||
await prisma.$queryRaw`SELECT 1`;
|
||||
} catch {
|
||||
db = "error";
|
||||
}
|
||||
res.json({
|
||||
status: db === "ok" ? "ok" : "degraded",
|
||||
services: {
|
||||
database: db
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
});
|
||||
|
||||
export default router;
|
||||
391
backend/src/routes/monitoring.routes.ts
Normal file
391
backend/src/routes/monitoring.routes.ts
Normal file
@@ -0,0 +1,391 @@
|
||||
import {
|
||||
AlertChannel,
|
||||
HealthCheckTargetType,
|
||||
HealthCheckType,
|
||||
MonitoringAlertStatus,
|
||||
Severity
|
||||
} from "@prisma/client";
|
||||
import { Router } from "express";
|
||||
import { z } from "zod";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { toPrismaJsonValue } from "../lib/prisma-json";
|
||||
import { authorize, isTenantScopedUser, requireAuth } from "../middleware/auth";
|
||||
import { logAudit } from "../services/audit.service";
|
||||
import {
|
||||
clusterResourceForecast,
|
||||
createAlertRule,
|
||||
createHealthCheckDefinition,
|
||||
evaluateAlertRulesNow,
|
||||
faultyDeploymentInsights,
|
||||
listAlertEvents,
|
||||
listAlertNotifications,
|
||||
listAlertRules,
|
||||
listHealthCheckResults,
|
||||
listHealthChecks,
|
||||
monitoringOverview,
|
||||
runHealthCheckNow,
|
||||
updateAlertRule,
|
||||
updateHealthCheckDefinition
|
||||
} from "../services/monitoring.service";
|
||||
|
||||
const router = Router();
|
||||
|
||||
const healthCheckSchema = z.object({
|
||||
name: z.string().min(2),
|
||||
description: z.string().optional(),
|
||||
target_type: z.nativeEnum(HealthCheckTargetType),
|
||||
check_type: z.nativeEnum(HealthCheckType).optional(),
|
||||
tenant_id: z.string().optional(),
|
||||
vm_id: z.string().optional(),
|
||||
node_id: z.string().optional(),
|
||||
cpu_warn_pct: z.number().min(0).max(100).optional(),
|
||||
cpu_critical_pct: z.number().min(0).max(100).optional(),
|
||||
ram_warn_pct: z.number().min(0).max(100).optional(),
|
||||
ram_critical_pct: z.number().min(0).max(100).optional(),
|
||||
disk_warn_pct: z.number().min(0).max(100).optional(),
|
||||
disk_critical_pct: z.number().min(0).max(100).optional(),
|
||||
disk_io_read_warn: z.number().min(0).optional(),
|
||||
disk_io_read_critical: z.number().min(0).optional(),
|
||||
disk_io_write_warn: z.number().min(0).optional(),
|
||||
disk_io_write_critical: z.number().min(0).optional(),
|
||||
network_in_warn: z.number().min(0).optional(),
|
||||
network_in_critical: z.number().min(0).optional(),
|
||||
network_out_warn: z.number().min(0).optional(),
|
||||
network_out_critical: z.number().min(0).optional(),
|
||||
latency_warn_ms: z.number().int().min(1).optional(),
|
||||
latency_critical_ms: z.number().int().min(1).optional(),
|
||||
schedule_minutes: z.number().int().min(1).max(1440).optional(),
|
||||
enabled: z.boolean().optional(),
|
||||
metadata: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
const alertRuleSchema = z.object({
|
||||
name: z.string().min(2),
|
||||
description: z.string().optional(),
|
||||
tenant_id: z.string().optional(),
|
||||
vm_id: z.string().optional(),
|
||||
node_id: z.string().optional(),
|
||||
cpu_threshold_pct: z.number().min(0).max(100).optional(),
|
||||
ram_threshold_pct: z.number().min(0).max(100).optional(),
|
||||
disk_threshold_pct: z.number().min(0).max(100).optional(),
|
||||
disk_io_read_threshold: z.number().min(0).optional(),
|
||||
disk_io_write_threshold: z.number().min(0).optional(),
|
||||
network_in_threshold: z.number().min(0).optional(),
|
||||
network_out_threshold: z.number().min(0).optional(),
|
||||
consecutive_breaches: z.number().int().min(1).max(20).optional(),
|
||||
evaluation_window_minutes: z.number().int().min(1).max(1440).optional(),
|
||||
severity: z.nativeEnum(Severity).optional(),
|
||||
channels: z.array(z.nativeEnum(AlertChannel)).optional(),
|
||||
enabled: z.boolean().optional(),
|
||||
metadata: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
async function ensureVmTenantScope(vmId: string, req: Pick<Express.Request, "user">) {
|
||||
const vm = await prisma.virtualMachine.findUnique({
|
||||
where: { id: vmId },
|
||||
select: {
|
||||
id: true,
|
||||
tenant_id: true,
|
||||
name: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!vm) {
|
||||
throw new HttpError(404, "VM not found", "VM_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && vm.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
return vm;
|
||||
}
|
||||
|
||||
function scopedTenantId(req: Pick<Express.Request, "user">) {
|
||||
return isTenantScopedUser(req) ? req.user?.tenant_id ?? undefined : undefined;
|
||||
}
|
||||
|
||||
function queryTenantId(req: { query?: Record<string, unknown> }) {
|
||||
return typeof req.query?.tenant_id === "string" ? req.query.tenant_id : undefined;
|
||||
}
|
||||
|
||||
router.get("/overview", requireAuth, authorize("security:read"), async (req, res, next) => {
|
||||
try {
|
||||
const data = await monitoringOverview({
|
||||
tenant_id: scopedTenantId(req)
|
||||
});
|
||||
return res.json(data);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/health-checks", requireAuth, authorize("security:read"), async (req, res, next) => {
|
||||
try {
|
||||
const data = await listHealthChecks({
|
||||
tenant_id: scopedTenantId(req) ?? queryTenantId(req),
|
||||
enabled: typeof req.query.enabled === "string" ? req.query.enabled === "true" : undefined
|
||||
});
|
||||
return res.json({ data });
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/health-checks", requireAuth, authorize("security:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = healthCheckSchema.parse(req.body ?? {});
|
||||
|
||||
if (payload.vm_id) {
|
||||
await ensureVmTenantScope(payload.vm_id, req);
|
||||
}
|
||||
|
||||
const tenantId = scopedTenantId(req) ?? payload.tenant_id;
|
||||
const check = await createHealthCheckDefinition({
|
||||
...payload,
|
||||
tenant_id: tenantId,
|
||||
created_by: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "monitoring.health_check.create",
|
||||
resource_type: "SECURITY",
|
||||
resource_id: check.id,
|
||||
resource_name: check.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue(payload),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
return res.status(201).json(check);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/health-checks/:id", requireAuth, authorize("security:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = healthCheckSchema.partial().parse(req.body ?? {});
|
||||
const existing = await prisma.serverHealthCheck.findUnique({
|
||||
where: { id: req.params.id },
|
||||
select: {
|
||||
id: true,
|
||||
tenant_id: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
throw new HttpError(404, "Health check not found", "HEALTH_CHECK_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && existing.tenant_id && existing.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
if (payload.vm_id) {
|
||||
await ensureVmTenantScope(payload.vm_id, req);
|
||||
}
|
||||
|
||||
const updated = await updateHealthCheckDefinition(req.params.id, {
|
||||
...payload,
|
||||
tenant_id: scopedTenantId(req) ?? payload.tenant_id
|
||||
});
|
||||
|
||||
return res.json(updated);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/health-checks/:id/run", requireAuth, authorize("security:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const existing = await prisma.serverHealthCheck.findUnique({
|
||||
where: { id: req.params.id },
|
||||
select: { id: true, tenant_id: true }
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
throw new HttpError(404, "Health check not found", "HEALTH_CHECK_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && existing.tenant_id && existing.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
const result = await runHealthCheckNow(existing.id);
|
||||
return res.json(result);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/health-checks/:id/results", requireAuth, authorize("security:read"), async (req, res, next) => {
|
||||
try {
|
||||
const existing = await prisma.serverHealthCheck.findUnique({
|
||||
where: { id: req.params.id },
|
||||
select: { id: true, tenant_id: true }
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
throw new HttpError(404, "Health check not found", "HEALTH_CHECK_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && existing.tenant_id && existing.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
const limit = typeof req.query.limit === "string" ? Number(req.query.limit) : undefined;
|
||||
const data = await listHealthCheckResults(existing.id, limit);
|
||||
return res.json({ data });
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/alerts/rules", requireAuth, authorize("security:read"), async (req, res, next) => {
|
||||
try {
|
||||
const data = await listAlertRules({
|
||||
tenant_id: scopedTenantId(req) ?? queryTenantId(req),
|
||||
enabled: typeof req.query.enabled === "string" ? req.query.enabled === "true" : undefined
|
||||
});
|
||||
return res.json({ data });
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/alerts/rules", requireAuth, authorize("security:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = alertRuleSchema.parse(req.body ?? {});
|
||||
|
||||
if (payload.vm_id) {
|
||||
await ensureVmTenantScope(payload.vm_id, req);
|
||||
}
|
||||
|
||||
const tenantId = scopedTenantId(req) ?? payload.tenant_id;
|
||||
const rule = await createAlertRule({
|
||||
...payload,
|
||||
tenant_id: tenantId,
|
||||
created_by: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "monitoring.alert_rule.create",
|
||||
resource_type: "SECURITY",
|
||||
resource_id: rule.id,
|
||||
resource_name: rule.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue(payload),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
return res.status(201).json(rule);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/alerts/rules/:id", requireAuth, authorize("security:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = alertRuleSchema.partial().parse(req.body ?? {});
|
||||
const existing = await prisma.monitoringAlertRule.findUnique({
|
||||
where: { id: req.params.id },
|
||||
select: {
|
||||
id: true,
|
||||
tenant_id: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
throw new HttpError(404, "Alert rule not found", "ALERT_RULE_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && existing.tenant_id && existing.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
if (payload.vm_id) {
|
||||
await ensureVmTenantScope(payload.vm_id, req);
|
||||
}
|
||||
|
||||
const updated = await updateAlertRule(req.params.id, {
|
||||
...payload,
|
||||
tenant_id: scopedTenantId(req) ?? payload.tenant_id
|
||||
});
|
||||
return res.json(updated);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/alerts/events", requireAuth, authorize("security:read"), async (req, res, next) => {
|
||||
try {
|
||||
const statusRaw = typeof req.query.status === "string" ? req.query.status.toUpperCase() : undefined;
|
||||
const status = Object.values(MonitoringAlertStatus).includes(statusRaw as MonitoringAlertStatus)
|
||||
? (statusRaw as MonitoringAlertStatus)
|
||||
: undefined;
|
||||
|
||||
const limit = typeof req.query.limit === "string" ? Number(req.query.limit) : undefined;
|
||||
const data = await listAlertEvents({
|
||||
tenant_id: scopedTenantId(req) ?? queryTenantId(req),
|
||||
status,
|
||||
limit
|
||||
});
|
||||
return res.json({ data });
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/alerts/notifications", requireAuth, authorize("security:read"), async (req, res, next) => {
|
||||
try {
|
||||
const limit = typeof req.query.limit === "string" ? Number(req.query.limit) : undefined;
|
||||
const data = await listAlertNotifications({
|
||||
tenant_id: scopedTenantId(req) ?? queryTenantId(req),
|
||||
limit
|
||||
});
|
||||
return res.json({ data });
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/alerts/evaluate", requireAuth, authorize("security:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const result = await evaluateAlertRulesNow(scopedTenantId(req));
|
||||
return res.json(result);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/insights/faulty-deployments", requireAuth, authorize("security:read"), async (req, res, next) => {
|
||||
try {
|
||||
const days = typeof req.query.days === "string" ? Number(req.query.days) : undefined;
|
||||
const data = await faultyDeploymentInsights({
|
||||
days,
|
||||
tenant_id: scopedTenantId(req) ?? queryTenantId(req)
|
||||
});
|
||||
return res.json(data);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/insights/cluster-forecast", requireAuth, authorize("security:read"), async (req, res, next) => {
|
||||
try {
|
||||
const horizon = typeof req.query.horizon_days === "string" ? Number(req.query.horizon_days) : undefined;
|
||||
const data = await clusterResourceForecast({
|
||||
horizon_days: horizon,
|
||||
tenant_id: scopedTenantId(req) ?? queryTenantId(req)
|
||||
});
|
||||
return res.json(data);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
636
backend/src/routes/network.routes.ts
Normal file
636
backend/src/routes/network.routes.ts
Normal file
@@ -0,0 +1,636 @@
|
||||
import { IpAddressStatus, IpAssignmentType, IpAllocationStrategy, IpScope, IpVersion, PrivateNetworkType } from "@prisma/client";
|
||||
import { Router } from "express";
|
||||
import { z } from "zod";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { toPrismaJsonValue } from "../lib/prisma-json";
|
||||
import { authorize, isTenantScopedUser, requireAuth } from "../middleware/auth";
|
||||
import { logAudit } from "../services/audit.service";
|
||||
import {
|
||||
assignIpToVm,
|
||||
attachPrivateNetwork,
|
||||
createPrivateNetwork,
|
||||
detachPrivateNetwork,
|
||||
importIpAddresses,
|
||||
listIpAddresses,
|
||||
listIpAssignments,
|
||||
listIpPoolPolicies,
|
||||
listIpReservedRanges,
|
||||
listPrivateNetworks,
|
||||
listTenantIpQuotas,
|
||||
returnAssignedIp,
|
||||
subnetUtilizationDashboard,
|
||||
upsertIpPoolPolicy,
|
||||
upsertTenantIpQuota,
|
||||
createIpReservedRange,
|
||||
updateIpReservedRange
|
||||
} from "../services/network.service";
|
||||
|
||||
const router = Router();
|
||||
|
||||
const ipImportSchema = z.object({
|
||||
addresses: z.array(z.string().min(2)).optional(),
|
||||
cidr_blocks: z.array(z.string().min(3)).optional(),
|
||||
scope: z.nativeEnum(IpScope).optional(),
|
||||
server: z.string().optional(),
|
||||
node_id: z.string().optional(),
|
||||
node_hostname: z.string().optional(),
|
||||
bridge: z.string().optional(),
|
||||
vlan_tag: z.number().int().min(0).max(4094).optional(),
|
||||
sdn_zone: z.string().optional(),
|
||||
gateway: z.string().optional(),
|
||||
subnet: z.string().optional(),
|
||||
tags: z.array(z.string().min(1)).optional(),
|
||||
metadata: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
const ipAssignSchema = z.object({
|
||||
vm_id: z.string().min(1),
|
||||
ip_address_id: z.string().optional(),
|
||||
address: z.string().optional(),
|
||||
scope: z.nativeEnum(IpScope).optional(),
|
||||
version: z.nativeEnum(IpVersion).optional(),
|
||||
assignment_type: z.nativeEnum(IpAssignmentType).default(IpAssignmentType.ADDITIONAL),
|
||||
interface_name: z.string().optional(),
|
||||
notes: z.string().optional(),
|
||||
metadata: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
const ipReturnSchema = z
|
||||
.object({
|
||||
assignment_id: z.string().optional(),
|
||||
ip_address_id: z.string().optional()
|
||||
})
|
||||
.refine((value) => value.assignment_id || value.ip_address_id, {
|
||||
message: "assignment_id or ip_address_id is required"
|
||||
});
|
||||
|
||||
const privateNetworkCreateSchema = z.object({
|
||||
name: z.string().min(2),
|
||||
slug: z.string().optional(),
|
||||
network_type: z.nativeEnum(PrivateNetworkType).optional(),
|
||||
cidr: z.string().min(3),
|
||||
gateway: z.string().optional(),
|
||||
bridge: z.string().optional(),
|
||||
vlan_tag: z.number().int().min(0).max(4094).optional(),
|
||||
sdn_zone: z.string().optional(),
|
||||
server: z.string().optional(),
|
||||
node_hostname: z.string().optional(),
|
||||
metadata: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
const privateNetworkAttachSchema = z.object({
|
||||
network_id: z.string().min(1),
|
||||
vm_id: z.string().min(1),
|
||||
interface_name: z.string().optional(),
|
||||
requested_ip: z.string().optional(),
|
||||
metadata: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
const tenantQuotaSchema = z.object({
|
||||
tenant_id: z.string().min(1),
|
||||
ipv4_limit: z.number().int().positive().nullable().optional(),
|
||||
ipv6_limit: z.number().int().positive().nullable().optional(),
|
||||
reserved_ipv4: z.number().int().min(0).optional(),
|
||||
reserved_ipv6: z.number().int().min(0).optional(),
|
||||
burst_allowed: z.boolean().optional(),
|
||||
burst_ipv4_limit: z.number().int().positive().nullable().optional(),
|
||||
burst_ipv6_limit: z.number().int().positive().nullable().optional(),
|
||||
is_active: z.boolean().optional(),
|
||||
metadata: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
const reservedRangeSchema = z.object({
|
||||
name: z.string().min(2),
|
||||
cidr: z.string().min(3),
|
||||
scope: z.nativeEnum(IpScope).optional(),
|
||||
tenant_id: z.string().optional(),
|
||||
reason: z.string().optional(),
|
||||
node_hostname: z.string().optional(),
|
||||
bridge: z.string().optional(),
|
||||
vlan_tag: z.number().int().min(0).max(4094).optional(),
|
||||
sdn_zone: z.string().optional(),
|
||||
is_active: z.boolean().optional(),
|
||||
metadata: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
const ipPoolPolicySchema = z.object({
|
||||
name: z.string().min(2),
|
||||
tenant_id: z.string().optional(),
|
||||
scope: z.nativeEnum(IpScope).optional(),
|
||||
version: z.nativeEnum(IpVersion).optional(),
|
||||
node_hostname: z.string().optional(),
|
||||
bridge: z.string().optional(),
|
||||
vlan_tag: z.number().int().min(0).max(4094).optional(),
|
||||
sdn_zone: z.string().optional(),
|
||||
allocation_strategy: z.nativeEnum(IpAllocationStrategy).optional(),
|
||||
enforce_quota: z.boolean().optional(),
|
||||
disallow_reserved_use: z.boolean().optional(),
|
||||
is_active: z.boolean().optional(),
|
||||
priority: z.number().int().min(1).max(1000).optional(),
|
||||
metadata: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
async function ensureVmTenantScope(vmId: string, req: Pick<Express.Request, "user">) {
|
||||
const vm = await prisma.virtualMachine.findUnique({
|
||||
where: { id: vmId },
|
||||
select: {
|
||||
id: true,
|
||||
tenant_id: true,
|
||||
name: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!vm) {
|
||||
throw new HttpError(404, "VM not found", "VM_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && vm.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
return vm;
|
||||
}
|
||||
|
||||
router.get("/ip-addresses", requireAuth, authorize("node:read"), async (req, res, next) => {
|
||||
try {
|
||||
const status = typeof req.query.status === "string" ? req.query.status.toUpperCase() : undefined;
|
||||
const version = typeof req.query.version === "string" ? req.query.version.toUpperCase() : undefined;
|
||||
const scope = typeof req.query.scope === "string" ? req.query.scope.toUpperCase() : undefined;
|
||||
|
||||
const result = await listIpAddresses({
|
||||
status: Object.values(IpAddressStatus).includes(status as IpAddressStatus) ? (status as IpAddressStatus) : undefined,
|
||||
version: Object.values(IpVersion).includes(version as IpVersion) ? (version as IpVersion) : undefined,
|
||||
scope: Object.values(IpScope).includes(scope as IpScope) ? (scope as IpScope) : undefined,
|
||||
nodeHostname: typeof req.query.node_hostname === "string" ? req.query.node_hostname : undefined,
|
||||
bridge: typeof req.query.bridge === "string" ? req.query.bridge : undefined,
|
||||
vlanTag: typeof req.query.vlan_tag === "string" ? Number(req.query.vlan_tag) : undefined,
|
||||
assignedVmId: typeof req.query.assigned_vm_id === "string" ? req.query.assigned_vm_id : undefined,
|
||||
limit: typeof req.query.limit === "string" ? Number(req.query.limit) : undefined,
|
||||
offset: typeof req.query.offset === "string" ? Number(req.query.offset) : undefined
|
||||
});
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id) {
|
||||
const tenantData = result.data.filter(
|
||||
(item) =>
|
||||
item.assigned_tenant_id === req.user?.tenant_id ||
|
||||
(item.status === IpAddressStatus.AVAILABLE && item.scope === IpScope.PRIVATE)
|
||||
);
|
||||
return res.json({
|
||||
data: tenantData,
|
||||
meta: {
|
||||
...result.meta,
|
||||
total: tenantData.length
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return res.json(result);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/ip-addresses/import", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = ipImportSchema.parse(req.body ?? {});
|
||||
const result = await importIpAddresses({
|
||||
...payload,
|
||||
imported_by: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "ip_address.import",
|
||||
resource_type: "SYSTEM",
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue({
|
||||
...payload,
|
||||
result
|
||||
}),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
return res.status(201).json(result);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/subnet-utilization", requireAuth, authorize("node:read"), async (req, res, next) => {
|
||||
try {
|
||||
const scope = typeof req.query.scope === "string" ? req.query.scope.toUpperCase() : undefined;
|
||||
const version = typeof req.query.version === "string" ? req.query.version.toUpperCase() : undefined;
|
||||
|
||||
const dashboard = await subnetUtilizationDashboard({
|
||||
scope: Object.values(IpScope).includes(scope as IpScope) ? (scope as IpScope) : undefined,
|
||||
version: Object.values(IpVersion).includes(version as IpVersion) ? (version as IpVersion) : undefined,
|
||||
node_hostname: typeof req.query.node_hostname === "string" ? req.query.node_hostname : undefined,
|
||||
bridge: typeof req.query.bridge === "string" ? req.query.bridge : undefined,
|
||||
vlan_tag: typeof req.query.vlan_tag === "string" ? Number(req.query.vlan_tag) : undefined,
|
||||
tenant_id:
|
||||
isTenantScopedUser(req) && req.user?.tenant_id
|
||||
? req.user.tenant_id
|
||||
: typeof req.query.tenant_id === "string"
|
||||
? req.query.tenant_id
|
||||
: undefined
|
||||
});
|
||||
|
||||
return res.json(dashboard);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/ip-assignments", requireAuth, authorize("vm:read"), async (req, res, next) => {
|
||||
try {
|
||||
const data = await listIpAssignments({
|
||||
vm_id: typeof req.query.vm_id === "string" ? req.query.vm_id : undefined,
|
||||
tenant_id:
|
||||
isTenantScopedUser(req) && req.user?.tenant_id
|
||||
? req.user.tenant_id
|
||||
: typeof req.query.tenant_id === "string"
|
||||
? req.query.tenant_id
|
||||
: undefined,
|
||||
active_only: req.query.active_only === "true"
|
||||
});
|
||||
return res.json({ data });
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/ip-assignments", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = ipAssignSchema.parse(req.body ?? {});
|
||||
await ensureVmTenantScope(payload.vm_id, req);
|
||||
|
||||
const assignment = await assignIpToVm({
|
||||
vm_id: payload.vm_id,
|
||||
ip_address_id: payload.ip_address_id,
|
||||
address: payload.address,
|
||||
scope: payload.scope,
|
||||
version: payload.version,
|
||||
assignment_type: payload.assignment_type,
|
||||
interface_name: payload.interface_name,
|
||||
notes: payload.notes,
|
||||
metadata: payload.metadata,
|
||||
actor_email: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "ip_address.assign",
|
||||
resource_type: "VM",
|
||||
resource_id: payload.vm_id,
|
||||
resource_name: assignment.vm.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue({
|
||||
assignment_id: assignment.id,
|
||||
ip_address: assignment.ip_address.address,
|
||||
cidr: assignment.ip_address.cidr,
|
||||
assignment_type: assignment.assignment_type,
|
||||
interface_name: assignment.interface_name
|
||||
}),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
return res.status(201).json(assignment);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/ip-assignments/return", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = ipReturnSchema.parse(req.body ?? {});
|
||||
if (payload.assignment_id) {
|
||||
const existing = await prisma.ipAssignment.findUnique({
|
||||
where: { id: payload.assignment_id },
|
||||
include: {
|
||||
vm: {
|
||||
select: {
|
||||
id: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!existing) throw new HttpError(404, "IP assignment not found", "IP_ASSIGNMENT_NOT_FOUND");
|
||||
await ensureVmTenantScope(existing.vm.id, req);
|
||||
}
|
||||
|
||||
const assignment = await returnAssignedIp(payload);
|
||||
|
||||
await logAudit({
|
||||
action: "ip_address.return",
|
||||
resource_type: "VM",
|
||||
resource_id: assignment.vm_id,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue({
|
||||
assignment_id: assignment.id,
|
||||
ip_address_id: assignment.ip_address_id
|
||||
}),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
return res.json(assignment);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/tenant-quotas", requireAuth, authorize("tenant:read"), async (req, res, next) => {
|
||||
try {
|
||||
const data = await listTenantIpQuotas(
|
||||
isTenantScopedUser(req) && req.user?.tenant_id ? req.user.tenant_id : typeof req.query.tenant_id === "string" ? req.query.tenant_id : undefined
|
||||
);
|
||||
return res.json({ data });
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/tenant-quotas", requireAuth, authorize("tenant:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = tenantQuotaSchema.parse(req.body ?? {});
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && payload.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
const quota = await upsertTenantIpQuota({
|
||||
...payload,
|
||||
created_by: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "ip_quota.upsert",
|
||||
resource_type: "TENANT",
|
||||
resource_id: quota.tenant_id,
|
||||
resource_name: quota.tenant.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue(payload),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
return res.status(201).json(quota);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/reserved-ranges", requireAuth, authorize("node:read"), async (req, res, next) => {
|
||||
try {
|
||||
const all = await listIpReservedRanges();
|
||||
const data =
|
||||
isTenantScopedUser(req) && req.user?.tenant_id
|
||||
? all.filter((item) => !item.tenant_id || item.tenant_id === req.user?.tenant_id)
|
||||
: all;
|
||||
return res.json({ data });
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/reserved-ranges", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = reservedRangeSchema.parse(req.body ?? {});
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && payload.tenant_id && payload.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
const range = await createIpReservedRange({
|
||||
...payload,
|
||||
created_by: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "ip_reserved_range.create",
|
||||
resource_type: "NETWORK",
|
||||
resource_id: range.id,
|
||||
resource_name: range.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue(payload),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
return res.status(201).json(range);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/reserved-ranges/:id", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = reservedRangeSchema.partial().parse(req.body ?? {});
|
||||
const existing = await prisma.ipReservedRange.findUnique({ where: { id: req.params.id } });
|
||||
if (!existing) throw new HttpError(404, "Reserved range not found", "RESERVED_RANGE_NOT_FOUND");
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && existing.tenant_id && existing.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
const updated = await updateIpReservedRange(req.params.id, payload);
|
||||
return res.json(updated);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/policies", requireAuth, authorize("node:read"), async (req, res, next) => {
|
||||
try {
|
||||
const all = await listIpPoolPolicies();
|
||||
const data =
|
||||
isTenantScopedUser(req) && req.user?.tenant_id
|
||||
? all.filter((item) => !item.tenant_id || item.tenant_id === req.user?.tenant_id)
|
||||
: all;
|
||||
return res.json({ data });
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/policies", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = ipPoolPolicySchema.parse(req.body ?? {});
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && payload.tenant_id && payload.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
const policy = await upsertIpPoolPolicy({
|
||||
...payload,
|
||||
created_by: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "ip_pool_policy.create",
|
||||
resource_type: "NETWORK",
|
||||
resource_id: policy.id,
|
||||
resource_name: policy.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue(payload),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
return res.status(201).json(policy);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/policies/:id", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = ipPoolPolicySchema.partial().parse(req.body ?? {});
|
||||
const existing = await prisma.ipPoolPolicy.findUnique({ where: { id: req.params.id } });
|
||||
if (!existing) throw new HttpError(404, "IP pool policy not found", "IP_POOL_POLICY_NOT_FOUND");
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && existing.tenant_id && existing.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
const policy = await upsertIpPoolPolicy({
|
||||
policy_id: existing.id,
|
||||
name: payload.name ?? existing.name,
|
||||
tenant_id: payload.tenant_id ?? existing.tenant_id ?? undefined,
|
||||
scope: payload.scope ?? existing.scope ?? undefined,
|
||||
version: payload.version ?? existing.version ?? undefined,
|
||||
node_hostname: payload.node_hostname ?? existing.node_hostname ?? undefined,
|
||||
bridge: payload.bridge ?? existing.bridge ?? undefined,
|
||||
vlan_tag: payload.vlan_tag ?? existing.vlan_tag ?? undefined,
|
||||
sdn_zone: payload.sdn_zone ?? existing.sdn_zone ?? undefined,
|
||||
allocation_strategy: payload.allocation_strategy ?? existing.allocation_strategy,
|
||||
enforce_quota: payload.enforce_quota ?? existing.enforce_quota,
|
||||
disallow_reserved_use: payload.disallow_reserved_use ?? existing.disallow_reserved_use,
|
||||
is_active: payload.is_active ?? existing.is_active,
|
||||
priority: payload.priority ?? existing.priority,
|
||||
metadata: payload.metadata
|
||||
});
|
||||
|
||||
return res.json(policy);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/private-networks", requireAuth, authorize("node:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const data = await listPrivateNetworks();
|
||||
return res.json({ data });
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/private-networks", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = privateNetworkCreateSchema.parse(req.body ?? {});
|
||||
const network = await createPrivateNetwork({
|
||||
name: payload.name,
|
||||
slug: payload.slug,
|
||||
network_type: payload.network_type,
|
||||
cidr: payload.cidr,
|
||||
gateway: payload.gateway,
|
||||
bridge: payload.bridge,
|
||||
vlan_tag: payload.vlan_tag,
|
||||
sdn_zone: payload.sdn_zone,
|
||||
server: payload.server,
|
||||
node_hostname: payload.node_hostname,
|
||||
metadata: payload.metadata,
|
||||
created_by: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "private_network.create",
|
||||
resource_type: "NETWORK",
|
||||
resource_id: network.id,
|
||||
resource_name: network.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue(payload),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
return res.status(201).json(network);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/private-networks/attach", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = privateNetworkAttachSchema.parse(req.body ?? {});
|
||||
await ensureVmTenantScope(payload.vm_id, req);
|
||||
|
||||
const attachment = await attachPrivateNetwork({
|
||||
network_id: payload.network_id,
|
||||
vm_id: payload.vm_id,
|
||||
interface_name: payload.interface_name,
|
||||
requested_ip: payload.requested_ip,
|
||||
metadata: payload.metadata,
|
||||
actor_email: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "private_network.attach",
|
||||
resource_type: "VM",
|
||||
resource_id: payload.vm_id,
|
||||
resource_name: attachment.vm.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue({
|
||||
attachment_id: attachment.id,
|
||||
network_id: payload.network_id,
|
||||
interface_name: attachment.interface_name,
|
||||
requested_ip: payload.requested_ip
|
||||
}),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
return res.status(201).json(attachment);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/private-networks/attachments/:id/detach", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const existing = await prisma.privateNetworkAttachment.findUnique({
|
||||
where: { id: req.params.id },
|
||||
select: {
|
||||
id: true,
|
||||
vm_id: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!existing) throw new HttpError(404, "Private network attachment not found", "PRIVATE_NETWORK_ATTACHMENT_NOT_FOUND");
|
||||
await ensureVmTenantScope(existing.vm_id, req);
|
||||
|
||||
const attachment = await detachPrivateNetwork({
|
||||
attachment_id: req.params.id,
|
||||
actor_email: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "private_network.detach",
|
||||
resource_type: "VM",
|
||||
resource_id: attachment.vm.id,
|
||||
resource_name: attachment.vm.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue({
|
||||
attachment_id: attachment.id,
|
||||
network_id: attachment.network_id,
|
||||
interface_name: attachment.interface_name
|
||||
}),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
return res.json(attachment);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
275
backend/src/routes/operations.routes.ts
Normal file
275
backend/src/routes/operations.routes.ts
Normal file
@@ -0,0 +1,275 @@
|
||||
import { OperationTaskStatus, OperationTaskType, PowerScheduleAction } from "@prisma/client";
|
||||
import { Router } from "express";
|
||||
import { z } from "zod";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { authorize, isTenantScopedUser, requireAuth } from "../middleware/auth";
|
||||
import {
|
||||
createPowerSchedule,
|
||||
deletePowerSchedule,
|
||||
executeVmPowerActionNow,
|
||||
listOperationTasks,
|
||||
operationQueueInsights,
|
||||
listPowerSchedules,
|
||||
updatePowerSchedule
|
||||
} from "../services/operations.service";
|
||||
import { logAudit } from "../services/audit.service";
|
||||
|
||||
const router = Router();
|
||||
|
||||
const scheduleCreateSchema = z.object({
|
||||
vm_id: z.string().min(1),
|
||||
action: z.nativeEnum(PowerScheduleAction),
|
||||
cron_expression: z.string().min(5),
|
||||
timezone: z.string().default("UTC")
|
||||
});
|
||||
|
||||
const scheduleUpdateSchema = z.object({
|
||||
action: z.nativeEnum(PowerScheduleAction).optional(),
|
||||
cron_expression: z.string().min(5).optional(),
|
||||
timezone: z.string().min(1).optional(),
|
||||
enabled: z.boolean().optional()
|
||||
});
|
||||
|
||||
function parseOptionalEnum<T extends Record<string, string>>(value: unknown, enumObject: T) {
|
||||
if (typeof value !== "string") return undefined;
|
||||
const candidate = value.toUpperCase();
|
||||
return Object.values(enumObject).includes(candidate as T[keyof T])
|
||||
? (candidate as T[keyof T])
|
||||
: undefined;
|
||||
}
|
||||
|
||||
async function ensureVmTenantAccess(vmId: string, req: Express.Request) {
|
||||
const vm = await prisma.virtualMachine.findUnique({
|
||||
where: { id: vmId },
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
node: true,
|
||||
tenant_id: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!vm) {
|
||||
throw new HttpError(404, "VM not found", "VM_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && vm.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
return vm;
|
||||
}
|
||||
|
||||
router.get("/tasks", requireAuth, authorize("audit:read"), async (req, res, next) => {
|
||||
try {
|
||||
const status = parseOptionalEnum(req.query.status, OperationTaskStatus);
|
||||
const taskType = parseOptionalEnum(req.query.task_type, OperationTaskType);
|
||||
const vmId = typeof req.query.vm_id === "string" ? req.query.vm_id : undefined;
|
||||
const limit = typeof req.query.limit === "string" ? Number(req.query.limit) : undefined;
|
||||
const offset = typeof req.query.offset === "string" ? Number(req.query.offset) : undefined;
|
||||
|
||||
const result = await listOperationTasks({
|
||||
status,
|
||||
taskType,
|
||||
vmId,
|
||||
limit,
|
||||
offset,
|
||||
tenantId: isTenantScopedUser(req) ? req.user?.tenant_id : undefined
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/queue-insights", requireAuth, authorize("audit:read"), async (req, res, next) => {
|
||||
try {
|
||||
const data = await operationQueueInsights(isTenantScopedUser(req) ? req.user?.tenant_id : undefined);
|
||||
return res.json(data);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/power-schedules", requireAuth, authorize("vm:read"), async (req, res, next) => {
|
||||
try {
|
||||
const schedules = await listPowerSchedules(isTenantScopedUser(req) ? req.user?.tenant_id : undefined);
|
||||
res.json({ data: schedules });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/power-schedules", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = scheduleCreateSchema.parse(req.body ?? {});
|
||||
const vm = await ensureVmTenantAccess(payload.vm_id, req);
|
||||
|
||||
const schedule = await createPowerSchedule({
|
||||
vmId: vm.id,
|
||||
action: payload.action,
|
||||
cronExpression: payload.cron_expression,
|
||||
timezone: payload.timezone,
|
||||
createdBy: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "power_schedule.create",
|
||||
resource_type: "VM",
|
||||
resource_id: vm.id,
|
||||
resource_name: vm.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: {
|
||||
schedule_id: schedule.id,
|
||||
action: payload.action,
|
||||
cron_expression: payload.cron_expression
|
||||
},
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.status(201).json(schedule);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/power-schedules/:id", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = scheduleUpdateSchema.parse(req.body ?? {});
|
||||
const existing = await prisma.powerSchedule.findUnique({
|
||||
where: { id: req.params.id },
|
||||
include: {
|
||||
vm: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
tenant_id: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
throw new HttpError(404, "Power schedule not found", "POWER_SCHEDULE_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && existing.vm.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
const schedule = await updatePowerSchedule(existing.id, {
|
||||
action: payload.action,
|
||||
cronExpression: payload.cron_expression,
|
||||
timezone: payload.timezone,
|
||||
enabled: payload.enabled
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "power_schedule.update",
|
||||
resource_type: "VM",
|
||||
resource_id: existing.vm.id,
|
||||
resource_name: existing.vm.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: {
|
||||
schedule_id: existing.id,
|
||||
payload
|
||||
},
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.json(schedule);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.delete("/power-schedules/:id", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const existing = await prisma.powerSchedule.findUnique({
|
||||
where: { id: req.params.id },
|
||||
include: {
|
||||
vm: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
tenant_id: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
throw new HttpError(404, "Power schedule not found", "POWER_SCHEDULE_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && existing.vm.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
await deletePowerSchedule(existing.id);
|
||||
|
||||
await logAudit({
|
||||
action: "power_schedule.delete",
|
||||
resource_type: "VM",
|
||||
resource_id: existing.vm.id,
|
||||
resource_name: existing.vm.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: {
|
||||
schedule_id: existing.id
|
||||
},
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.status(204).send();
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/power-schedules/:id/run", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const existing = await prisma.powerSchedule.findUnique({
|
||||
where: { id: req.params.id },
|
||||
include: {
|
||||
vm: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
tenant_id: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
throw new HttpError(404, "Power schedule not found", "POWER_SCHEDULE_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && existing.vm.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
const execution = await executeVmPowerActionNow(existing.vm_id, existing.action, req.user!.email, {
|
||||
payload: {
|
||||
source: "manual_schedule_run",
|
||||
schedule_id: existing.id
|
||||
},
|
||||
scheduledFor: new Date()
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
task_id: execution.task.id,
|
||||
upid: execution.upid
|
||||
});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
71
backend/src/routes/payment.routes.ts
Normal file
71
backend/src/routes/payment.routes.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { Router } from "express";
|
||||
import { z } from "zod";
|
||||
import { authorize, requireAuth } from "../middleware/auth";
|
||||
import {
|
||||
createInvoicePaymentLink,
|
||||
handleManualInvoicePayment,
|
||||
processFlutterwaveWebhook,
|
||||
processPaystackWebhook,
|
||||
verifyFlutterwaveSignature,
|
||||
verifyPaystackSignature
|
||||
} from "../services/payment.service";
|
||||
|
||||
const router = Router();
|
||||
|
||||
const createLinkSchema = z.object({
|
||||
provider: z.enum(["paystack", "flutterwave", "manual"]).optional()
|
||||
});
|
||||
|
||||
router.post("/invoices/:id/link", requireAuth, authorize("billing:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = createLinkSchema.parse(req.body ?? {});
|
||||
const result = await createInvoicePaymentLink(req.params.id, payload.provider);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
const manualSchema = z.object({
|
||||
payment_reference: z.string().min(2)
|
||||
});
|
||||
|
||||
router.post("/invoices/:id/manual-pay", requireAuth, authorize("billing:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = manualSchema.parse(req.body ?? {});
|
||||
const invoice = await handleManualInvoicePayment(req.params.id, payload.payment_reference, req.user?.email ?? "manual@system");
|
||||
res.json(invoice);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/webhooks/paystack", async (req, res, next) => {
|
||||
try {
|
||||
const signature = req.header("x-paystack-signature");
|
||||
const valid = await verifyPaystackSignature(signature, req.rawBody);
|
||||
if (!valid) {
|
||||
return res.status(401).json({ error: { code: "INVALID_SIGNATURE", message: "Invalid signature" } });
|
||||
}
|
||||
const result = await processPaystackWebhook(req.body);
|
||||
return res.json(result);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/webhooks/flutterwave", async (req, res, next) => {
|
||||
try {
|
||||
const signature = req.header("verif-hash");
|
||||
const valid = await verifyFlutterwaveSignature(signature);
|
||||
if (!valid) {
|
||||
return res.status(401).json({ error: { code: "INVALID_SIGNATURE", message: "Invalid signature" } });
|
||||
}
|
||||
const result = await processFlutterwaveWebhook(req.body);
|
||||
return res.json(result);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
566
backend/src/routes/provisioning.routes.ts
Normal file
566
backend/src/routes/provisioning.routes.ts
Normal file
@@ -0,0 +1,566 @@
|
||||
import {
|
||||
ProductType,
|
||||
ServiceLifecycleStatus,
|
||||
TemplateType,
|
||||
VmType
|
||||
} from "@prisma/client";
|
||||
import { Router } from "express";
|
||||
import { z } from "zod";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
import { toPrismaJsonValue } from "../lib/prisma-json";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { authorize, isTenantScopedUser, requireAuth } from "../middleware/auth";
|
||||
import { logAudit } from "../services/audit.service";
|
||||
import {
|
||||
createApplicationGroup,
|
||||
createPlacementPolicy,
|
||||
createProvisionedService,
|
||||
createTemplate,
|
||||
createVmIdRange,
|
||||
deleteApplicationGroup,
|
||||
deletePlacementPolicy,
|
||||
deleteTemplate,
|
||||
deleteVmIdRange,
|
||||
listApplicationGroups,
|
||||
listPlacementPolicies,
|
||||
listProvisionedServices,
|
||||
listTemplates,
|
||||
listVmIdRanges,
|
||||
setApplicationGroupTemplates,
|
||||
suspendProvisionedService,
|
||||
terminateProvisionedService,
|
||||
unsuspendProvisionedService,
|
||||
updateApplicationGroup,
|
||||
updatePlacementPolicy,
|
||||
updateProvisionedServicePackage,
|
||||
updateTemplate,
|
||||
updateVmIdRange
|
||||
} from "../services/provisioning.service";
|
||||
|
||||
const router = Router();
|
||||
|
||||
const templateCreateSchema = z.object({
|
||||
name: z.string().min(2),
|
||||
slug: z.string().optional(),
|
||||
template_type: z.nativeEnum(TemplateType),
|
||||
virtualization_type: z.nativeEnum(VmType).optional(),
|
||||
source: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
default_cloud_init: z.string().optional(),
|
||||
metadata: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
const templateUpdateSchema = z.object({
|
||||
name: z.string().min(2).optional(),
|
||||
slug: z.string().optional(),
|
||||
source: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
default_cloud_init: z.string().optional(),
|
||||
is_active: z.boolean().optional(),
|
||||
metadata: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
const groupCreateSchema = z.object({
|
||||
name: z.string().min(2),
|
||||
slug: z.string().optional(),
|
||||
description: z.string().optional()
|
||||
});
|
||||
|
||||
const groupUpdateSchema = z.object({
|
||||
name: z.string().min(2).optional(),
|
||||
slug: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
is_active: z.boolean().optional()
|
||||
});
|
||||
|
||||
const groupTemplatesSchema = z.object({
|
||||
templates: z
|
||||
.array(
|
||||
z.object({
|
||||
template_id: z.string().min(1),
|
||||
priority: z.number().int().positive().optional()
|
||||
})
|
||||
)
|
||||
.default([])
|
||||
});
|
||||
|
||||
const placementPolicySchema = z.object({
|
||||
group_id: z.string().optional(),
|
||||
node_id: z.string().optional(),
|
||||
product_type: z.nativeEnum(ProductType).optional(),
|
||||
cpu_weight: z.number().int().min(0).max(1000).optional(),
|
||||
ram_weight: z.number().int().min(0).max(1000).optional(),
|
||||
disk_weight: z.number().int().min(0).max(1000).optional(),
|
||||
vm_count_weight: z.number().int().min(0).max(1000).optional(),
|
||||
max_vms: z.number().int().positive().optional(),
|
||||
min_free_ram_mb: z.number().int().positive().optional(),
|
||||
min_free_disk_gb: z.number().int().positive().optional(),
|
||||
is_active: z.boolean().optional()
|
||||
});
|
||||
|
||||
const vmidRangeCreateSchema = z.object({
|
||||
node_id: z.string().optional(),
|
||||
node_hostname: z.string().min(1),
|
||||
application_group_id: z.string().optional(),
|
||||
range_start: z.number().int().positive(),
|
||||
range_end: z.number().int().positive(),
|
||||
next_vmid: z.number().int().positive().optional()
|
||||
});
|
||||
|
||||
const vmidRangeUpdateSchema = z.object({
|
||||
range_start: z.number().int().positive().optional(),
|
||||
range_end: z.number().int().positive().optional(),
|
||||
next_vmid: z.number().int().positive().optional(),
|
||||
is_active: z.boolean().optional()
|
||||
});
|
||||
|
||||
const serviceCreateSchema = z.object({
|
||||
name: z.string().min(2),
|
||||
tenant_id: z.string().min(1),
|
||||
product_type: z.nativeEnum(ProductType).default(ProductType.VPS),
|
||||
virtualization_type: z.nativeEnum(VmType).default(VmType.QEMU),
|
||||
vm_count: z.number().int().min(1).max(20).default(1),
|
||||
target_node: z.string().optional(),
|
||||
auto_node: z.boolean().default(true),
|
||||
application_group_id: z.string().optional(),
|
||||
template_id: z.string().optional(),
|
||||
billing_plan_id: z.string().optional(),
|
||||
package_options: z.record(z.unknown()).optional()
|
||||
});
|
||||
|
||||
const serviceSuspendSchema = z.object({
|
||||
reason: z.string().optional()
|
||||
});
|
||||
|
||||
const serviceTerminateSchema = z.object({
|
||||
reason: z.string().optional(),
|
||||
hard_delete: z.boolean().default(false)
|
||||
});
|
||||
|
||||
const servicePackageSchema = z.object({
|
||||
package_options: z.record(z.unknown())
|
||||
});
|
||||
|
||||
function parseOptionalLifecycleStatus(value: unknown) {
|
||||
if (typeof value !== "string") return undefined;
|
||||
const normalized = value.toUpperCase();
|
||||
return Object.values(ServiceLifecycleStatus).includes(normalized as ServiceLifecycleStatus)
|
||||
? (normalized as ServiceLifecycleStatus)
|
||||
: undefined;
|
||||
}
|
||||
|
||||
async function ensureServiceTenantScope(serviceId: string, req: Express.Request) {
|
||||
const service = await prisma.provisionedService.findUnique({
|
||||
where: { id: serviceId },
|
||||
include: {
|
||||
vm: {
|
||||
select: {
|
||||
id: true,
|
||||
tenant_id: true,
|
||||
name: true
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!service) {
|
||||
throw new HttpError(404, "Provisioned service not found", "SERVICE_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && service.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
return service;
|
||||
}
|
||||
|
||||
router.get("/templates", requireAuth, authorize("vm:read"), async (req, res, next) => {
|
||||
try {
|
||||
const templateType = typeof req.query.template_type === "string" ? req.query.template_type.toUpperCase() : undefined;
|
||||
const isActive =
|
||||
typeof req.query.is_active === "string"
|
||||
? req.query.is_active === "true"
|
||||
: undefined;
|
||||
|
||||
const templates = await listTemplates({
|
||||
templateType,
|
||||
isActive
|
||||
});
|
||||
|
||||
res.json({ data: templates });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/templates", requireAuth, authorize("vm:create"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = templateCreateSchema.parse(req.body ?? {});
|
||||
const template = await createTemplate({
|
||||
name: payload.name,
|
||||
slug: payload.slug,
|
||||
templateType: payload.template_type,
|
||||
virtualizationType: payload.virtualization_type,
|
||||
source: payload.source,
|
||||
description: payload.description,
|
||||
defaultCloudInit: payload.default_cloud_init,
|
||||
metadata: payload.metadata ? toPrismaJsonValue(payload.metadata) : undefined
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "template.create",
|
||||
resource_type: "SYSTEM",
|
||||
resource_id: template.id,
|
||||
resource_name: template.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue(payload),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.status(201).json(template);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/templates/:id", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = templateUpdateSchema.parse(req.body ?? {});
|
||||
const template = await updateTemplate(req.params.id, {
|
||||
name: payload.name,
|
||||
slug: payload.slug,
|
||||
source: payload.source,
|
||||
description: payload.description,
|
||||
defaultCloudInit: payload.default_cloud_init,
|
||||
isActive: payload.is_active,
|
||||
metadata: payload.metadata ? toPrismaJsonValue(payload.metadata) : undefined
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "template.update",
|
||||
resource_type: "SYSTEM",
|
||||
resource_id: template.id,
|
||||
resource_name: template.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue(payload),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.json(template);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.delete("/templates/:id", requireAuth, authorize("vm:delete"), async (req, res, next) => {
|
||||
try {
|
||||
await deleteTemplate(req.params.id);
|
||||
res.status(204).send();
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/application-groups", requireAuth, authorize("vm:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const groups = await listApplicationGroups();
|
||||
res.json({ data: groups });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/application-groups", requireAuth, authorize("vm:create"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = groupCreateSchema.parse(req.body ?? {});
|
||||
const group = await createApplicationGroup({
|
||||
name: payload.name,
|
||||
slug: payload.slug,
|
||||
description: payload.description
|
||||
});
|
||||
res.status(201).json(group);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/application-groups/:id", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = groupUpdateSchema.parse(req.body ?? {});
|
||||
const group = await updateApplicationGroup(req.params.id, {
|
||||
name: payload.name,
|
||||
slug: payload.slug,
|
||||
description: payload.description,
|
||||
isActive: payload.is_active
|
||||
});
|
||||
res.json(group);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.delete("/application-groups/:id", requireAuth, authorize("vm:delete"), async (req, res, next) => {
|
||||
try {
|
||||
await deleteApplicationGroup(req.params.id);
|
||||
res.status(204).send();
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.put("/application-groups/:id/templates", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = groupTemplatesSchema.parse(req.body ?? {});
|
||||
|
||||
const assignments = await setApplicationGroupTemplates(
|
||||
req.params.id,
|
||||
payload.templates.map((template) => ({
|
||||
templateId: template.template_id,
|
||||
priority: template.priority
|
||||
}))
|
||||
);
|
||||
|
||||
res.json({ data: assignments });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/placement-policies", requireAuth, authorize("node:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const policies = await listPlacementPolicies();
|
||||
res.json({ data: policies });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/placement-policies", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = placementPolicySchema.parse(req.body ?? {});
|
||||
const policy = await createPlacementPolicy({
|
||||
groupId: payload.group_id,
|
||||
nodeId: payload.node_id,
|
||||
productType: payload.product_type,
|
||||
cpuWeight: payload.cpu_weight,
|
||||
ramWeight: payload.ram_weight,
|
||||
diskWeight: payload.disk_weight,
|
||||
vmCountWeight: payload.vm_count_weight,
|
||||
maxVms: payload.max_vms,
|
||||
minFreeRamMb: payload.min_free_ram_mb,
|
||||
minFreeDiskGb: payload.min_free_disk_gb
|
||||
});
|
||||
res.status(201).json(policy);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/placement-policies/:id", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = placementPolicySchema.parse(req.body ?? {});
|
||||
const policy = await updatePlacementPolicy(req.params.id, {
|
||||
cpuWeight: payload.cpu_weight,
|
||||
ramWeight: payload.ram_weight,
|
||||
diskWeight: payload.disk_weight,
|
||||
vmCountWeight: payload.vm_count_weight,
|
||||
maxVms: payload.max_vms ?? null,
|
||||
minFreeRamMb: payload.min_free_ram_mb ?? null,
|
||||
minFreeDiskGb: payload.min_free_disk_gb ?? null,
|
||||
isActive: payload.is_active
|
||||
});
|
||||
res.json(policy);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.delete("/placement-policies/:id", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
await deletePlacementPolicy(req.params.id);
|
||||
res.status(204).send();
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/vmid-ranges", requireAuth, authorize("node:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const ranges = await listVmIdRanges();
|
||||
res.json({ data: ranges });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/vmid-ranges", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = vmidRangeCreateSchema.parse(req.body ?? {});
|
||||
const range = await createVmIdRange({
|
||||
nodeId: payload.node_id,
|
||||
nodeHostname: payload.node_hostname,
|
||||
applicationGroupId: payload.application_group_id,
|
||||
rangeStart: payload.range_start,
|
||||
rangeEnd: payload.range_end,
|
||||
nextVmid: payload.next_vmid
|
||||
});
|
||||
res.status(201).json(range);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/vmid-ranges/:id", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = vmidRangeUpdateSchema.parse(req.body ?? {});
|
||||
const range = await updateVmIdRange(req.params.id, {
|
||||
rangeStart: payload.range_start,
|
||||
rangeEnd: payload.range_end,
|
||||
nextVmid: payload.next_vmid,
|
||||
isActive: payload.is_active
|
||||
});
|
||||
res.json(range);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.delete("/vmid-ranges/:id", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
await deleteVmIdRange(req.params.id);
|
||||
res.status(204).send();
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/services", requireAuth, authorize("vm:read"), async (req, res, next) => {
|
||||
try {
|
||||
const lifecycleStatus = parseOptionalLifecycleStatus(req.query.lifecycle_status);
|
||||
const limit = typeof req.query.limit === "string" ? Number(req.query.limit) : undefined;
|
||||
const offset = typeof req.query.offset === "string" ? Number(req.query.offset) : undefined;
|
||||
|
||||
const result = await listProvisionedServices({
|
||||
tenantId: isTenantScopedUser(req) ? req.user?.tenant_id ?? undefined : undefined,
|
||||
lifecycleStatus,
|
||||
limit,
|
||||
offset
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/services", requireAuth, authorize("vm:create"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = serviceCreateSchema.parse(req.body ?? {});
|
||||
|
||||
if (isTenantScopedUser(req) && req.user?.tenant_id && payload.tenant_id !== req.user.tenant_id) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
|
||||
const services = await createProvisionedService({
|
||||
name: payload.name,
|
||||
tenantId: payload.tenant_id,
|
||||
productType: payload.product_type,
|
||||
virtualizationType: payload.virtualization_type,
|
||||
vmCount: payload.vm_count,
|
||||
targetNode: payload.target_node,
|
||||
autoNode: payload.auto_node,
|
||||
applicationGroupId: payload.application_group_id,
|
||||
templateId: payload.template_id,
|
||||
billingPlanId: payload.billing_plan_id,
|
||||
packageOptions: payload.package_options ? toPrismaJsonValue(payload.package_options) : undefined,
|
||||
createdBy: req.user?.email
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "service.create",
|
||||
resource_type: "VM",
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: {
|
||||
tenant_id: payload.tenant_id,
|
||||
product_type: payload.product_type,
|
||||
vm_count: payload.vm_count,
|
||||
created_services: services.map((service) => service.id)
|
||||
},
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.status(201).json({ data: services });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/services/:id/suspend", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = serviceSuspendSchema.parse(req.body ?? {});
|
||||
await ensureServiceTenantScope(req.params.id, req);
|
||||
|
||||
const service = await suspendProvisionedService({
|
||||
serviceId: req.params.id,
|
||||
actorEmail: req.user!.email,
|
||||
reason: payload.reason
|
||||
});
|
||||
|
||||
res.json(service);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/services/:id/unsuspend", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
await ensureServiceTenantScope(req.params.id, req);
|
||||
const service = await unsuspendProvisionedService({
|
||||
serviceId: req.params.id,
|
||||
actorEmail: req.user!.email
|
||||
});
|
||||
res.json(service);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/services/:id/terminate", requireAuth, authorize("vm:delete"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = serviceTerminateSchema.parse(req.body ?? {});
|
||||
await ensureServiceTenantScope(req.params.id, req);
|
||||
|
||||
const service = await terminateProvisionedService({
|
||||
serviceId: req.params.id,
|
||||
actorEmail: req.user!.email,
|
||||
reason: payload.reason,
|
||||
hardDelete: payload.hard_delete
|
||||
});
|
||||
|
||||
res.json(service);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/services/:id/package-options", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = servicePackageSchema.parse(req.body ?? {});
|
||||
await ensureServiceTenantScope(req.params.id, req);
|
||||
|
||||
const service = await updateProvisionedServicePackage({
|
||||
serviceId: req.params.id,
|
||||
actorEmail: req.user!.email,
|
||||
packageOptions: toPrismaJsonValue(payload.package_options)
|
||||
});
|
||||
|
||||
res.json(service);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
637
backend/src/routes/proxmox.routes.ts
Normal file
637
backend/src/routes/proxmox.routes.ts
Normal file
@@ -0,0 +1,637 @@
|
||||
import { OperationTaskType, Prisma } from "@prisma/client";
|
||||
import { Router } from "express";
|
||||
import { z } from "zod";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { authorize, requireAuth } from "../middleware/auth";
|
||||
import {
|
||||
addVmDisk,
|
||||
clusterUsageGraphs,
|
||||
deleteVm,
|
||||
migrateVm,
|
||||
nodeUsageGraphs,
|
||||
vmUsageGraphs,
|
||||
reinstallVm,
|
||||
reconfigureVmNetwork,
|
||||
restartVm,
|
||||
resumeVm,
|
||||
shutdownVm,
|
||||
startVm,
|
||||
stopVm,
|
||||
suspendVm,
|
||||
syncNodesAndVirtualMachines,
|
||||
updateVmConfiguration,
|
||||
vmConsoleTicket
|
||||
} from "../services/proxmox.service";
|
||||
import { logAudit } from "../services/audit.service";
|
||||
import {
|
||||
createOperationTask,
|
||||
markOperationTaskFailed,
|
||||
markOperationTaskRunning,
|
||||
markOperationTaskSuccess
|
||||
} from "../services/operations.service";
|
||||
|
||||
const router = Router();
|
||||
const consoleTypeSchema = z.enum(["novnc", "spice", "xterm"]);
|
||||
const graphTimeframeSchema = z.enum(["hour", "day", "week", "month", "year"]);
|
||||
|
||||
function vmRuntimeType(vm: { type: "QEMU" | "LXC" }) {
|
||||
return vm.type === "LXC" ? "lxc" : "qemu";
|
||||
}
|
||||
|
||||
function withUpid(payload: Prisma.InputJsonObject, upid?: string): Prisma.InputJsonObject {
|
||||
if (!upid) {
|
||||
return payload;
|
||||
}
|
||||
|
||||
return {
|
||||
...payload,
|
||||
upid
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchVm(vmId: string) {
|
||||
const vm = await prisma.virtualMachine.findUnique({ where: { id: vmId } });
|
||||
if (!vm) {
|
||||
throw new HttpError(404, "VM not found", "VM_NOT_FOUND");
|
||||
}
|
||||
return vm;
|
||||
}
|
||||
|
||||
async function resolveConsoleProxyTarget(node: string, consoleType: "novnc" | "spice" | "xterm") {
|
||||
const setting = await prisma.setting.findUnique({
|
||||
where: {
|
||||
key: "console_proxy"
|
||||
}
|
||||
});
|
||||
|
||||
const raw = setting?.value as
|
||||
| {
|
||||
mode?: "cluster" | "per_node";
|
||||
cluster?: Record<string, unknown>;
|
||||
nodes?: Record<string, Record<string, unknown>>;
|
||||
}
|
||||
| undefined;
|
||||
|
||||
if (!raw) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const mode = raw.mode ?? "cluster";
|
||||
if (mode === "per_node") {
|
||||
const nodeConfig = raw.nodes?.[node];
|
||||
if (nodeConfig && typeof nodeConfig[consoleType] === "string") {
|
||||
return String(nodeConfig[consoleType]);
|
||||
}
|
||||
}
|
||||
|
||||
if (raw.cluster && typeof raw.cluster[consoleType] === "string") {
|
||||
return String(raw.cluster[consoleType]);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
router.post("/sync", requireAuth, authorize("node:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const task = await createOperationTask({
|
||||
taskType: OperationTaskType.SYSTEM_SYNC,
|
||||
requestedBy: req.user?.email,
|
||||
payload: { source: "manual_sync" }
|
||||
});
|
||||
|
||||
await markOperationTaskRunning(task.id);
|
||||
|
||||
try {
|
||||
const result = await syncNodesAndVirtualMachines();
|
||||
await markOperationTaskSuccess(task.id, {
|
||||
node_count: result.node_count
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "proxmox_sync",
|
||||
resource_type: "NODE",
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: {
|
||||
node_count: result.node_count,
|
||||
task_id: task.id
|
||||
},
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.json({
|
||||
...result,
|
||||
task_id: task.id
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Proxmox sync failed";
|
||||
await markOperationTaskFailed(task.id, message);
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
const actionSchema = z.object({
|
||||
action: z.enum(["start", "stop", "restart", "shutdown", "suspend", "resume", "delete"])
|
||||
});
|
||||
|
||||
router.post("/vms/:id/actions/:action", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const { action } = actionSchema.parse(req.params);
|
||||
const vm = await fetchVm(req.params.id);
|
||||
const type = vmRuntimeType(vm);
|
||||
|
||||
const taskType = action === "delete" ? OperationTaskType.VM_DELETE : OperationTaskType.VM_POWER;
|
||||
const task = await createOperationTask({
|
||||
taskType,
|
||||
vm: {
|
||||
id: vm.id,
|
||||
name: vm.name,
|
||||
node: vm.node
|
||||
},
|
||||
requestedBy: req.user?.email,
|
||||
payload: { action }
|
||||
});
|
||||
|
||||
await markOperationTaskRunning(task.id);
|
||||
|
||||
let upid: string | undefined;
|
||||
|
||||
try {
|
||||
if (action === "start") {
|
||||
upid = await startVm(vm.node, vm.vmid, type);
|
||||
await prisma.virtualMachine.update({ where: { id: vm.id }, data: { status: "RUNNING", proxmox_upid: upid } });
|
||||
} else if (action === "stop") {
|
||||
upid = await stopVm(vm.node, vm.vmid, type);
|
||||
await prisma.virtualMachine.update({ where: { id: vm.id }, data: { status: "STOPPED", proxmox_upid: upid } });
|
||||
} else if (action === "restart") {
|
||||
upid = await restartVm(vm.node, vm.vmid, type);
|
||||
await prisma.virtualMachine.update({ where: { id: vm.id }, data: { status: "RUNNING", proxmox_upid: upid } });
|
||||
} else if (action === "shutdown") {
|
||||
upid = await shutdownVm(vm.node, vm.vmid, type);
|
||||
await prisma.virtualMachine.update({ where: { id: vm.id }, data: { status: "STOPPED", proxmox_upid: upid } });
|
||||
} else if (action === "suspend") {
|
||||
upid = await suspendVm(vm.node, vm.vmid, type);
|
||||
await prisma.virtualMachine.update({ where: { id: vm.id }, data: { status: "PAUSED", proxmox_upid: upid } });
|
||||
} else if (action === "resume") {
|
||||
upid = await resumeVm(vm.node, vm.vmid, type);
|
||||
await prisma.virtualMachine.update({ where: { id: vm.id }, data: { status: "RUNNING", proxmox_upid: upid } });
|
||||
} else {
|
||||
upid = await deleteVm(vm.node, vm.vmid, type);
|
||||
await prisma.virtualMachine.delete({ where: { id: vm.id } });
|
||||
}
|
||||
|
||||
const taskResult = withUpid(
|
||||
{
|
||||
vm_id: vm.id,
|
||||
action
|
||||
},
|
||||
upid
|
||||
);
|
||||
|
||||
await markOperationTaskSuccess(task.id, taskResult, upid);
|
||||
|
||||
await logAudit({
|
||||
action: `vm_${action}`,
|
||||
resource_type: "VM",
|
||||
resource_id: vm.id,
|
||||
resource_name: vm.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: {
|
||||
...taskResult,
|
||||
task_id: task.id
|
||||
},
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.json({ success: true, action, upid, task_id: task.id });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "VM action failed";
|
||||
await markOperationTaskFailed(task.id, message);
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
const migrateSchema = z.object({
|
||||
target_node: z.string().min(1)
|
||||
});
|
||||
|
||||
router.post("/vms/:id/migrate", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = migrateSchema.parse(req.body);
|
||||
const vm = await fetchVm(req.params.id);
|
||||
const type = vmRuntimeType(vm);
|
||||
|
||||
const task = await createOperationTask({
|
||||
taskType: OperationTaskType.VM_MIGRATION,
|
||||
vm: {
|
||||
id: vm.id,
|
||||
name: vm.name,
|
||||
node: vm.node
|
||||
},
|
||||
requestedBy: req.user?.email,
|
||||
payload
|
||||
});
|
||||
|
||||
await markOperationTaskRunning(task.id);
|
||||
|
||||
try {
|
||||
const upid = await migrateVm(vm.node, vm.vmid, payload.target_node, type);
|
||||
await prisma.virtualMachine.update({
|
||||
where: { id: vm.id },
|
||||
data: { node: payload.target_node, status: "MIGRATING", proxmox_upid: upid }
|
||||
});
|
||||
|
||||
const migrationResult = withUpid(
|
||||
{
|
||||
vm_id: vm.id,
|
||||
from_node: vm.node,
|
||||
target_node: payload.target_node
|
||||
},
|
||||
upid
|
||||
);
|
||||
|
||||
await markOperationTaskSuccess(task.id, migrationResult, upid);
|
||||
res.json({ success: true, upid, target_node: payload.target_node, task_id: task.id });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "VM migrate failed";
|
||||
await markOperationTaskFailed(task.id, message);
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
const configSchema = z
|
||||
.object({
|
||||
hostname: z.string().min(1).optional(),
|
||||
iso_image: z.string().min(1).optional(),
|
||||
boot_order: z.string().min(1).optional(),
|
||||
ssh_public_key: z.string().min(10).optional(),
|
||||
qemu_guest_agent: z.boolean().optional()
|
||||
})
|
||||
.refine((value) => Object.keys(value).length > 0, {
|
||||
message: "At least one configuration field is required"
|
||||
});
|
||||
|
||||
router.patch("/vms/:id/config", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = configSchema.parse(req.body ?? {});
|
||||
const vm = await fetchVm(req.params.id);
|
||||
const type = vmRuntimeType(vm);
|
||||
|
||||
const config: Record<string, string | number | boolean> = {};
|
||||
if (payload.hostname) config.name = payload.hostname;
|
||||
if (payload.boot_order) config.boot = payload.boot_order;
|
||||
if (payload.ssh_public_key) config.sshkeys = payload.ssh_public_key;
|
||||
if (payload.iso_image && vm.type === "QEMU") config.ide2 = `${payload.iso_image},media=cdrom`;
|
||||
if (typeof payload.qemu_guest_agent === "boolean" && vm.type === "QEMU") {
|
||||
config.agent = payload.qemu_guest_agent ? 1 : 0;
|
||||
}
|
||||
|
||||
const task = await createOperationTask({
|
||||
taskType: OperationTaskType.VM_CONFIG,
|
||||
vm: { id: vm.id, name: vm.name, node: vm.node },
|
||||
requestedBy: req.user?.email,
|
||||
payload
|
||||
});
|
||||
|
||||
await markOperationTaskRunning(task.id);
|
||||
|
||||
try {
|
||||
const upid = await updateVmConfiguration(vm.node, vm.vmid, type, config);
|
||||
const configResult = withUpid(
|
||||
{
|
||||
vm_id: vm.id,
|
||||
config: config as unknown as Prisma.InputJsonValue
|
||||
},
|
||||
upid
|
||||
);
|
||||
await markOperationTaskSuccess(task.id, configResult, upid);
|
||||
|
||||
await logAudit({
|
||||
action: "vm_config_update",
|
||||
resource_type: "VM",
|
||||
resource_id: vm.id,
|
||||
resource_name: vm.name,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: {
|
||||
config: config as unknown as Prisma.InputJsonValue,
|
||||
task_id: task.id,
|
||||
...(upid ? { upid } : {})
|
||||
},
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.json({ success: true, upid, task_id: task.id, config_applied: config });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "VM config update failed";
|
||||
await markOperationTaskFailed(task.id, message);
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
const networkSchema = z.object({
|
||||
interface_name: z.string().optional(),
|
||||
bridge: z.string().min(1),
|
||||
vlan_tag: z.number().int().min(0).max(4094).optional(),
|
||||
rate_mbps: z.number().int().positive().optional(),
|
||||
firewall: z.boolean().optional(),
|
||||
ip_mode: z.enum(["dhcp", "static"]).default("dhcp"),
|
||||
ip_cidr: z.string().optional(),
|
||||
gateway: z.string().optional()
|
||||
});
|
||||
|
||||
router.patch("/vms/:id/network", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = networkSchema.parse(req.body ?? {});
|
||||
if (payload.ip_mode === "static" && !payload.ip_cidr) {
|
||||
throw new HttpError(400, "ip_cidr is required when ip_mode=static", "INVALID_NETWORK_PAYLOAD");
|
||||
}
|
||||
|
||||
const vm = await fetchVm(req.params.id);
|
||||
const type = vmRuntimeType(vm);
|
||||
|
||||
const task = await createOperationTask({
|
||||
taskType: OperationTaskType.VM_NETWORK,
|
||||
vm: { id: vm.id, name: vm.name, node: vm.node },
|
||||
requestedBy: req.user?.email,
|
||||
payload
|
||||
});
|
||||
|
||||
await markOperationTaskRunning(task.id);
|
||||
|
||||
try {
|
||||
const networkInput: Parameters<typeof reconfigureVmNetwork>[3] = {
|
||||
interface_name: payload.interface_name,
|
||||
bridge: payload.bridge,
|
||||
vlan_tag: payload.vlan_tag,
|
||||
rate_mbps: payload.rate_mbps,
|
||||
firewall: payload.firewall,
|
||||
ip_mode: payload.ip_mode,
|
||||
ip_cidr: payload.ip_cidr,
|
||||
gateway: payload.gateway
|
||||
};
|
||||
const upid = await reconfigureVmNetwork(vm.node, vm.vmid, type, networkInput);
|
||||
const networkResult = withUpid(
|
||||
{
|
||||
vm_id: vm.id,
|
||||
network: payload as unknown as Prisma.InputJsonValue
|
||||
},
|
||||
upid
|
||||
);
|
||||
await markOperationTaskSuccess(task.id, networkResult, upid);
|
||||
res.json({ success: true, upid, task_id: task.id });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "VM network update failed";
|
||||
await markOperationTaskFailed(task.id, message);
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
const diskSchema = z.object({
|
||||
storage: z.string().min(1),
|
||||
size_gb: z.number().int().positive(),
|
||||
bus: z.enum(["scsi", "sata", "virtio", "ide"]).default("scsi"),
|
||||
mount_point: z.string().optional()
|
||||
});
|
||||
|
||||
router.post("/vms/:id/disks", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = diskSchema.parse(req.body ?? {});
|
||||
const vm = await fetchVm(req.params.id);
|
||||
const type = vmRuntimeType(vm);
|
||||
|
||||
const task = await createOperationTask({
|
||||
taskType: OperationTaskType.VM_CONFIG,
|
||||
vm: { id: vm.id, name: vm.name, node: vm.node },
|
||||
requestedBy: req.user?.email,
|
||||
payload
|
||||
});
|
||||
|
||||
await markOperationTaskRunning(task.id);
|
||||
|
||||
try {
|
||||
const diskInput: Parameters<typeof addVmDisk>[3] = {
|
||||
storage: payload.storage,
|
||||
size_gb: payload.size_gb,
|
||||
bus: payload.bus,
|
||||
mount_point: payload.mount_point
|
||||
};
|
||||
const upid = await addVmDisk(vm.node, vm.vmid, type, diskInput);
|
||||
const diskResult = withUpid(
|
||||
{
|
||||
vm_id: vm.id,
|
||||
disk: payload as unknown as Prisma.InputJsonValue
|
||||
},
|
||||
upid
|
||||
);
|
||||
await markOperationTaskSuccess(task.id, diskResult, upid);
|
||||
res.status(201).json({ success: true, upid, task_id: task.id });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "VM disk attach failed";
|
||||
await markOperationTaskFailed(task.id, message);
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
const reinstallSchema = z.object({
|
||||
backup_before_reinstall: z.boolean().default(false),
|
||||
iso_image: z.string().optional(),
|
||||
ssh_public_key: z.string().optional()
|
||||
});
|
||||
|
||||
router.post("/vms/:id/reinstall", requireAuth, authorize("vm:update"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = reinstallSchema.parse(req.body ?? {});
|
||||
const vm = await fetchVm(req.params.id);
|
||||
const type = vmRuntimeType(vm);
|
||||
|
||||
if (payload.backup_before_reinstall) {
|
||||
await prisma.backup.create({
|
||||
data: {
|
||||
vm_id: vm.id,
|
||||
vm_name: vm.name,
|
||||
node: vm.node,
|
||||
status: "PENDING",
|
||||
type: "FULL",
|
||||
schedule: "MANUAL",
|
||||
notes: "Auto-created before VM reinstall"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const task = await createOperationTask({
|
||||
taskType: OperationTaskType.VM_REINSTALL,
|
||||
vm: { id: vm.id, name: vm.name, node: vm.node },
|
||||
requestedBy: req.user?.email,
|
||||
payload
|
||||
});
|
||||
|
||||
await markOperationTaskRunning(task.id);
|
||||
|
||||
try {
|
||||
const upid = await reinstallVm(vm.node, vm.vmid, type, {
|
||||
iso_image: payload.iso_image,
|
||||
ssh_public_key: payload.ssh_public_key
|
||||
});
|
||||
|
||||
await prisma.virtualMachine.update({
|
||||
where: { id: vm.id },
|
||||
data: {
|
||||
status: "RUNNING",
|
||||
proxmox_upid: upid ?? undefined
|
||||
}
|
||||
});
|
||||
|
||||
const reinstallResult = withUpid(
|
||||
{
|
||||
vm_id: vm.id,
|
||||
reinstall: payload as unknown as Prisma.InputJsonValue
|
||||
},
|
||||
upid
|
||||
);
|
||||
|
||||
await markOperationTaskSuccess(task.id, reinstallResult, upid);
|
||||
|
||||
res.json({ success: true, upid, task_id: task.id });
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "VM reinstall failed";
|
||||
await markOperationTaskFailed(task.id, message);
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/vms/:id/console", requireAuth, authorize("vm:read"), async (req, res, next) => {
|
||||
try {
|
||||
const vm = await fetchVm(req.params.id);
|
||||
const type = vmRuntimeType(vm);
|
||||
const consoleType = consoleTypeSchema.parse(
|
||||
typeof req.query.console_type === "string"
|
||||
? req.query.console_type.toLowerCase()
|
||||
: "novnc"
|
||||
);
|
||||
const ticket = await vmConsoleTicket(vm.node, vm.vmid, type, consoleType);
|
||||
const proxyTarget = await resolveConsoleProxyTarget(vm.node, consoleType);
|
||||
|
||||
res.json({
|
||||
...ticket,
|
||||
console_type: consoleType,
|
||||
proxy_target: proxyTarget ?? null
|
||||
});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/vms/:id/usage-graphs", requireAuth, authorize("vm:read"), async (req, res, next) => {
|
||||
try {
|
||||
const vm = await fetchVm(req.params.id);
|
||||
const type = vmRuntimeType(vm);
|
||||
const timeframe = graphTimeframeSchema.parse(
|
||||
typeof req.query.timeframe === "string" ? req.query.timeframe.toLowerCase() : "day"
|
||||
);
|
||||
|
||||
const graph = await vmUsageGraphs(vm.node, vm.vmid, type, timeframe, {
|
||||
cpu_usage: vm.cpu_usage,
|
||||
ram_usage: vm.ram_usage,
|
||||
disk_usage: vm.disk_usage,
|
||||
network_in: vm.network_in,
|
||||
network_out: vm.network_out
|
||||
});
|
||||
|
||||
return res.json({
|
||||
vm_id: vm.id,
|
||||
vm_name: vm.name,
|
||||
vm_type: vm.type,
|
||||
node: vm.node,
|
||||
timeframe: graph.timeframe,
|
||||
source: graph.source,
|
||||
summary: graph.summary,
|
||||
points: graph.points
|
||||
});
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/nodes/:id/usage-graphs", requireAuth, authorize("node:read"), async (req, res, next) => {
|
||||
try {
|
||||
const node = await prisma.proxmoxNode.findFirst({
|
||||
where: {
|
||||
OR: [{ id: req.params.id }, { hostname: req.params.id }, { name: req.params.id }]
|
||||
}
|
||||
});
|
||||
|
||||
if (!node) {
|
||||
throw new HttpError(404, "Node not found", "NODE_NOT_FOUND");
|
||||
}
|
||||
|
||||
const timeframe = graphTimeframeSchema.parse(
|
||||
typeof req.query.timeframe === "string" ? req.query.timeframe.toLowerCase() : "day"
|
||||
);
|
||||
|
||||
const graph = await nodeUsageGraphs(node.hostname, timeframe, {
|
||||
cpu_usage: node.cpu_usage,
|
||||
ram_used_mb: node.ram_used_mb,
|
||||
ram_total_mb: node.ram_total_mb,
|
||||
disk_used_gb: node.disk_used_gb,
|
||||
disk_total_gb: node.disk_total_gb
|
||||
});
|
||||
|
||||
return res.json({
|
||||
node_id: node.id,
|
||||
node_name: node.name,
|
||||
node_hostname: node.hostname,
|
||||
timeframe: graph.timeframe,
|
||||
source: graph.source,
|
||||
summary: graph.summary,
|
||||
points: graph.points
|
||||
});
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/cluster/usage-graphs", requireAuth, authorize("node:read"), async (req, res, next) => {
|
||||
try {
|
||||
const timeframe = graphTimeframeSchema.parse(
|
||||
typeof req.query.timeframe === "string" ? req.query.timeframe.toLowerCase() : "day"
|
||||
);
|
||||
const graph = await clusterUsageGraphs(timeframe);
|
||||
|
||||
return res.json({
|
||||
timeframe: graph.timeframe,
|
||||
source: graph.source,
|
||||
node_count: graph.node_count,
|
||||
nodes: graph.nodes,
|
||||
summary: graph.summary,
|
||||
points: graph.points
|
||||
});
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
723
backend/src/routes/resources.routes.ts
Normal file
723
backend/src/routes/resources.routes.ts
Normal file
@@ -0,0 +1,723 @@
|
||||
import { Router } from "express";
|
||||
import { authorize, isTenantScopedUser, requireAuth } from "../middleware/auth";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
import { toPrismaJsonValue } from "../lib/prisma-json";
|
||||
import { logAudit } from "../services/audit.service";
|
||||
import { prisma } from "../lib/prisma";
|
||||
const router = Router();
|
||||
|
||||
type ResourceMeta = {
|
||||
model: string;
|
||||
readPermission: Parameters<typeof authorize>[0];
|
||||
createPermission?: Parameters<typeof authorize>[0];
|
||||
updatePermission?: Parameters<typeof authorize>[0];
|
||||
deletePermission?: Parameters<typeof authorize>[0];
|
||||
tenantScoped: boolean;
|
||||
searchFields?: string[];
|
||||
};
|
||||
|
||||
const resourceMap: Record<string, ResourceMeta> = {
|
||||
tenants: {
|
||||
model: "tenant",
|
||||
readPermission: "tenant:read",
|
||||
createPermission: "tenant:manage",
|
||||
updatePermission: "tenant:manage",
|
||||
deletePermission: "tenant:manage",
|
||||
tenantScoped: false,
|
||||
searchFields: ["name", "owner_email", "slug"]
|
||||
},
|
||||
"virtual-machines": {
|
||||
model: "virtualMachine",
|
||||
readPermission: "vm:read",
|
||||
createPermission: "vm:create",
|
||||
updatePermission: "vm:update",
|
||||
deletePermission: "vm:delete",
|
||||
tenantScoped: true,
|
||||
searchFields: ["name", "ip_address", "node"]
|
||||
},
|
||||
nodes: {
|
||||
model: "proxmoxNode",
|
||||
readPermission: "node:read",
|
||||
createPermission: "node:manage",
|
||||
updatePermission: "node:manage",
|
||||
deletePermission: "node:manage",
|
||||
tenantScoped: false,
|
||||
searchFields: ["name", "hostname"]
|
||||
},
|
||||
"billing-plans": {
|
||||
model: "billingPlan",
|
||||
readPermission: "billing:read",
|
||||
createPermission: "billing:manage",
|
||||
updatePermission: "billing:manage",
|
||||
deletePermission: "billing:manage",
|
||||
tenantScoped: false,
|
||||
searchFields: ["name", "slug", "description"]
|
||||
},
|
||||
invoices: {
|
||||
model: "invoice",
|
||||
readPermission: "billing:read",
|
||||
createPermission: "billing:manage",
|
||||
updatePermission: "billing:manage",
|
||||
deletePermission: "billing:manage",
|
||||
tenantScoped: true,
|
||||
searchFields: ["invoice_number", "tenant_name", "payment_reference"]
|
||||
},
|
||||
"usage-records": {
|
||||
model: "usageRecord",
|
||||
readPermission: "billing:read",
|
||||
createPermission: "billing:manage",
|
||||
updatePermission: "billing:manage",
|
||||
deletePermission: "billing:manage",
|
||||
tenantScoped: true,
|
||||
searchFields: ["vm_name", "tenant_name", "plan_name"]
|
||||
},
|
||||
backups: {
|
||||
model: "backup",
|
||||
readPermission: "backup:read",
|
||||
createPermission: "backup:manage",
|
||||
updatePermission: "backup:manage",
|
||||
deletePermission: "backup:manage",
|
||||
tenantScoped: true,
|
||||
searchFields: ["vm_name", "node", "storage"]
|
||||
},
|
||||
"backup-policies": {
|
||||
model: "backupPolicy",
|
||||
readPermission: "backup:read",
|
||||
createPermission: "backup:manage",
|
||||
updatePermission: "backup:manage",
|
||||
deletePermission: "backup:manage",
|
||||
tenantScoped: true
|
||||
},
|
||||
"backup-restore-tasks": {
|
||||
model: "backupRestoreTask",
|
||||
readPermission: "backup:read",
|
||||
createPermission: "backup:manage",
|
||||
updatePermission: "backup:manage",
|
||||
deletePermission: "backup:manage",
|
||||
tenantScoped: true
|
||||
},
|
||||
"snapshot-jobs": {
|
||||
model: "snapshotJob",
|
||||
readPermission: "backup:read",
|
||||
createPermission: "backup:manage",
|
||||
updatePermission: "backup:manage",
|
||||
deletePermission: "backup:manage",
|
||||
tenantScoped: true
|
||||
},
|
||||
"audit-logs": {
|
||||
model: "auditLog",
|
||||
readPermission: "audit:read",
|
||||
tenantScoped: false,
|
||||
searchFields: ["action", "resource_name", "actor_email"]
|
||||
},
|
||||
"security-events": {
|
||||
model: "securityEvent",
|
||||
readPermission: "security:read",
|
||||
createPermission: "security:manage",
|
||||
updatePermission: "security:manage",
|
||||
deletePermission: "security:manage",
|
||||
tenantScoped: false,
|
||||
searchFields: ["event_type", "source_ip", "description"]
|
||||
},
|
||||
"firewall-rules": {
|
||||
model: "firewallRule",
|
||||
readPermission: "security:read",
|
||||
createPermission: "security:manage",
|
||||
updatePermission: "security:manage",
|
||||
deletePermission: "security:manage",
|
||||
tenantScoped: false,
|
||||
searchFields: ["name", "source_ip", "destination_ip", "description"]
|
||||
},
|
||||
users: {
|
||||
model: "user",
|
||||
readPermission: "user:read",
|
||||
createPermission: "user:manage",
|
||||
updatePermission: "user:manage",
|
||||
deletePermission: "user:manage",
|
||||
tenantScoped: true,
|
||||
searchFields: ["email", "full_name"]
|
||||
},
|
||||
"app-templates": {
|
||||
model: "appTemplate",
|
||||
readPermission: "vm:read",
|
||||
createPermission: "vm:create",
|
||||
updatePermission: "vm:update",
|
||||
deletePermission: "vm:delete",
|
||||
tenantScoped: false,
|
||||
searchFields: ["name", "slug", "description", "source"]
|
||||
},
|
||||
"application-groups": {
|
||||
model: "applicationGroup",
|
||||
readPermission: "vm:read",
|
||||
createPermission: "vm:create",
|
||||
updatePermission: "vm:update",
|
||||
deletePermission: "vm:delete",
|
||||
tenantScoped: false,
|
||||
searchFields: ["name", "slug", "description"]
|
||||
},
|
||||
"placement-policies": {
|
||||
model: "nodePlacementPolicy",
|
||||
readPermission: "node:read",
|
||||
createPermission: "node:manage",
|
||||
updatePermission: "node:manage",
|
||||
deletePermission: "node:manage",
|
||||
tenantScoped: false
|
||||
},
|
||||
"vmid-ranges": {
|
||||
model: "vmIdRange",
|
||||
readPermission: "node:read",
|
||||
createPermission: "node:manage",
|
||||
updatePermission: "node:manage",
|
||||
deletePermission: "node:manage",
|
||||
tenantScoped: false
|
||||
},
|
||||
"provisioned-services": {
|
||||
model: "provisionedService",
|
||||
readPermission: "vm:read",
|
||||
createPermission: "vm:create",
|
||||
updatePermission: "vm:update",
|
||||
deletePermission: "vm:delete",
|
||||
tenantScoped: true
|
||||
},
|
||||
"ip-addresses": {
|
||||
model: "ipAddressPool",
|
||||
readPermission: "node:read",
|
||||
createPermission: "node:manage",
|
||||
updatePermission: "node:manage",
|
||||
deletePermission: "node:manage",
|
||||
tenantScoped: true,
|
||||
searchFields: ["address", "subnet", "node_hostname", "bridge", "sdn_zone"]
|
||||
},
|
||||
"ip-assignments": {
|
||||
model: "ipAssignment",
|
||||
readPermission: "vm:read",
|
||||
createPermission: "vm:update",
|
||||
updatePermission: "vm:update",
|
||||
deletePermission: "vm:update",
|
||||
tenantScoped: true
|
||||
},
|
||||
"private-networks": {
|
||||
model: "privateNetwork",
|
||||
readPermission: "node:read",
|
||||
createPermission: "node:manage",
|
||||
updatePermission: "node:manage",
|
||||
deletePermission: "node:manage",
|
||||
tenantScoped: false,
|
||||
searchFields: ["name", "slug", "cidr", "bridge", "sdn_zone", "node_hostname"]
|
||||
},
|
||||
"private-network-attachments": {
|
||||
model: "privateNetworkAttachment",
|
||||
readPermission: "vm:read",
|
||||
createPermission: "vm:update",
|
||||
updatePermission: "vm:update",
|
||||
deletePermission: "vm:update",
|
||||
tenantScoped: true
|
||||
},
|
||||
"tenant-ip-quotas": {
|
||||
model: "tenantIpQuota",
|
||||
readPermission: "tenant:read",
|
||||
createPermission: "tenant:manage",
|
||||
updatePermission: "tenant:manage",
|
||||
deletePermission: "tenant:manage",
|
||||
tenantScoped: true
|
||||
},
|
||||
"ip-reserved-ranges": {
|
||||
model: "ipReservedRange",
|
||||
readPermission: "node:read",
|
||||
createPermission: "node:manage",
|
||||
updatePermission: "node:manage",
|
||||
deletePermission: "node:manage",
|
||||
tenantScoped: true,
|
||||
searchFields: ["name", "cidr", "reason", "node_hostname", "bridge", "sdn_zone"]
|
||||
},
|
||||
"ip-pool-policies": {
|
||||
model: "ipPoolPolicy",
|
||||
readPermission: "node:read",
|
||||
createPermission: "node:manage",
|
||||
updatePermission: "node:manage",
|
||||
deletePermission: "node:manage",
|
||||
tenantScoped: true,
|
||||
searchFields: ["name", "node_hostname", "bridge", "sdn_zone"]
|
||||
},
|
||||
"server-health-checks": {
|
||||
model: "serverHealthCheck",
|
||||
readPermission: "security:read",
|
||||
createPermission: "security:manage",
|
||||
updatePermission: "security:manage",
|
||||
deletePermission: "security:manage",
|
||||
tenantScoped: true,
|
||||
searchFields: ["name", "description"]
|
||||
},
|
||||
"server-health-check-results": {
|
||||
model: "serverHealthCheckResult",
|
||||
readPermission: "security:read",
|
||||
tenantScoped: true
|
||||
},
|
||||
"monitoring-alert-rules": {
|
||||
model: "monitoringAlertRule",
|
||||
readPermission: "security:read",
|
||||
createPermission: "security:manage",
|
||||
updatePermission: "security:manage",
|
||||
deletePermission: "security:manage",
|
||||
tenantScoped: true,
|
||||
searchFields: ["name", "description"]
|
||||
},
|
||||
"monitoring-alert-events": {
|
||||
model: "monitoringAlertEvent",
|
||||
readPermission: "security:read",
|
||||
updatePermission: "security:manage",
|
||||
tenantScoped: true,
|
||||
searchFields: ["title", "message", "metric_key"]
|
||||
},
|
||||
"monitoring-alert-notifications": {
|
||||
model: "monitoringAlertNotification",
|
||||
readPermission: "security:read",
|
||||
tenantScoped: true,
|
||||
searchFields: ["destination", "provider_message"]
|
||||
}
|
||||
};
|
||||
|
||||
function toEnumUpper(value: unknown): unknown {
|
||||
if (typeof value !== "string") return value;
|
||||
return value.replace(/-/g, "_").toUpperCase();
|
||||
}
|
||||
|
||||
function normalizePayload(resource: string, input: Record<string, unknown>) {
|
||||
const data = { ...input };
|
||||
const enumFieldsByResource: Record<string, string[]> = {
|
||||
tenants: ["status", "currency", "payment_provider"],
|
||||
"virtual-machines": ["status", "type"],
|
||||
nodes: ["status"],
|
||||
"billing-plans": ["currency"],
|
||||
invoices: ["status", "currency", "payment_provider"],
|
||||
"usage-records": ["currency"],
|
||||
backups: ["status", "type", "schedule", "source"],
|
||||
"backup-restore-tasks": ["mode", "status"],
|
||||
"snapshot-jobs": ["frequency"],
|
||||
"audit-logs": ["resource_type", "severity"],
|
||||
"security-events": ["severity", "status"],
|
||||
"firewall-rules": ["direction", "action", "protocol", "applies_to"],
|
||||
users: ["role"],
|
||||
"app-templates": ["template_type", "virtualization_type"],
|
||||
"placement-policies": ["product_type"],
|
||||
"provisioned-services": ["product_type", "lifecycle_status"],
|
||||
"server-health-checks": ["target_type", "check_type"],
|
||||
"server-health-check-results": ["status", "severity"],
|
||||
"monitoring-alert-rules": ["severity"],
|
||||
"monitoring-alert-events": ["status", "severity"],
|
||||
"monitoring-alert-notifications": ["channel", "status"]
|
||||
};
|
||||
|
||||
for (const field of enumFieldsByResource[resource] ?? []) {
|
||||
if (field in data && data[field] !== undefined && data[field] !== null) {
|
||||
data[field] = toEnumUpper(data[field]);
|
||||
}
|
||||
}
|
||||
|
||||
if (resource === "billing-plans") {
|
||||
const monthly = data.price_monthly;
|
||||
if (monthly !== undefined && (data.price_hourly === undefined || data.price_hourly === null)) {
|
||||
const monthlyNumber = Number(monthly);
|
||||
data.price_hourly = Number((monthlyNumber / 720).toFixed(4));
|
||||
}
|
||||
if (typeof data.features === "string") {
|
||||
try {
|
||||
data.features = JSON.parse(data.features);
|
||||
} catch {
|
||||
data.features = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (resource === "tenants" && typeof data.member_emails === "string") {
|
||||
try {
|
||||
data.member_emails = JSON.parse(data.member_emails);
|
||||
} catch {
|
||||
data.member_emails = [];
|
||||
}
|
||||
}
|
||||
|
||||
if (resource === "invoices" && !data.invoice_number) {
|
||||
data.invoice_number = `INV-${Date.now()}-${Math.floor(1000 + Math.random() * 9000)}`;
|
||||
}
|
||||
|
||||
if (resource === "invoices" && data.due_date && typeof data.due_date === "string") {
|
||||
data.due_date = new Date(data.due_date);
|
||||
}
|
||||
if (resource === "invoices" && data.paid_date && typeof data.paid_date === "string") {
|
||||
data.paid_date = new Date(data.paid_date);
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function getModel(meta: ResourceMeta) {
|
||||
return (prisma as any)[meta.model];
|
||||
}
|
||||
|
||||
function normalizeSortField(field: string) {
|
||||
const aliases: Record<string, string> = {
|
||||
created_date: "created_at",
|
||||
updated_date: "updated_at"
|
||||
};
|
||||
return aliases[field] ?? field;
|
||||
}
|
||||
|
||||
function parseOrder(sort?: string) {
|
||||
if (!sort) return { created_at: "desc" as const };
|
||||
if (sort.startsWith("-")) return { [normalizeSortField(sort.slice(1))]: "desc" as const };
|
||||
return { [normalizeSortField(sort)]: "asc" as const };
|
||||
}
|
||||
|
||||
function attachTenantWhere(req: Express.Request, meta: ResourceMeta, where: Record<string, unknown>) {
|
||||
if (!meta.tenantScoped || !isTenantScopedUser(req)) return;
|
||||
const tenantId = req.user?.tenant_id;
|
||||
if (!tenantId) return;
|
||||
|
||||
if (meta.model === "backup") {
|
||||
where.OR = [{ tenant_id: tenantId }, { vm: { tenant_id: tenantId } }];
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "backupRestoreTask") {
|
||||
where.source_vm = { tenant_id: tenantId };
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "snapshotJob") {
|
||||
where.vm = { tenant_id: tenantId };
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "backupPolicy") {
|
||||
where.tenant_id = tenantId;
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "ipAddressPool") {
|
||||
where.OR = [{ assigned_tenant_id: tenantId }, { status: "AVAILABLE", scope: "PRIVATE" }];
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "ipAssignment") {
|
||||
where.tenant_id = tenantId;
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "privateNetworkAttachment") {
|
||||
where.tenant_id = tenantId;
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "tenantIpQuota") {
|
||||
where.tenant_id = tenantId;
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "ipReservedRange" || meta.model === "ipPoolPolicy") {
|
||||
where.OR = [{ tenant_id: tenantId }, { tenant_id: null }];
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "serverHealthCheck") {
|
||||
where.OR = [{ tenant_id: tenantId }, { tenant_id: null }];
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "serverHealthCheckResult") {
|
||||
where.check = {
|
||||
OR: [{ tenant_id: tenantId }, { tenant_id: null }]
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "monitoringAlertRule" || meta.model === "monitoringAlertEvent") {
|
||||
where.OR = [{ tenant_id: tenantId }, { tenant_id: null }];
|
||||
return;
|
||||
}
|
||||
|
||||
if (meta.model === "monitoringAlertNotification") {
|
||||
where.event = {
|
||||
OR: [{ tenant_id: tenantId }, { tenant_id: null }]
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
where.tenant_id = tenantId;
|
||||
}
|
||||
|
||||
function attachSearchWhere(
|
||||
where: Record<string, unknown>,
|
||||
search: string,
|
||||
searchFields: string[] | undefined
|
||||
) {
|
||||
if (!search || !searchFields?.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
const searchFilter = {
|
||||
OR: searchFields.map((field) => ({
|
||||
[field]: { contains: search, mode: "insensitive" }
|
||||
}))
|
||||
};
|
||||
|
||||
if (Array.isArray(where.OR)) {
|
||||
const existingOr = where.OR;
|
||||
delete where.OR;
|
||||
const existingAnd = Array.isArray(where.AND) ? where.AND : [];
|
||||
where.AND = [...existingAnd, { OR: existingOr }, searchFilter];
|
||||
return;
|
||||
}
|
||||
|
||||
if (Array.isArray(where.AND)) {
|
||||
where.AND = [...where.AND, searchFilter];
|
||||
return;
|
||||
}
|
||||
|
||||
where.AND = [searchFilter];
|
||||
}
|
||||
|
||||
async function ensureItemTenantScope(req: Express.Request, meta: ResourceMeta, item: Record<string, unknown>) {
|
||||
if (!meta.tenantScoped || !isTenantScopedUser(req) || !req.user?.tenant_id) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tenantId = req.user.tenant_id;
|
||||
let ownerTenantId: string | null | undefined;
|
||||
|
||||
if (meta.model === "backup") {
|
||||
ownerTenantId = (item.tenant_id as string | null | undefined) ?? null;
|
||||
if (!ownerTenantId && typeof item.vm_id === "string") {
|
||||
const vm = await prisma.virtualMachine.findUnique({
|
||||
where: { id: item.vm_id },
|
||||
select: { tenant_id: true }
|
||||
});
|
||||
ownerTenantId = vm?.tenant_id;
|
||||
}
|
||||
} else if (meta.model === "backupRestoreTask") {
|
||||
if (typeof item.source_vm_id === "string") {
|
||||
const vm = await prisma.virtualMachine.findUnique({
|
||||
where: { id: item.source_vm_id },
|
||||
select: { tenant_id: true }
|
||||
});
|
||||
ownerTenantId = vm?.tenant_id;
|
||||
}
|
||||
} else if (meta.model === "snapshotJob") {
|
||||
if (typeof item.vm_id === "string") {
|
||||
const vm = await prisma.virtualMachine.findUnique({
|
||||
where: { id: item.vm_id },
|
||||
select: { tenant_id: true }
|
||||
});
|
||||
ownerTenantId = vm?.tenant_id;
|
||||
}
|
||||
} else if (meta.model === "ipAddressPool") {
|
||||
ownerTenantId = item.assigned_tenant_id as string | null | undefined;
|
||||
if (!ownerTenantId && item.status === "AVAILABLE" && item.scope === "PRIVATE") {
|
||||
return;
|
||||
}
|
||||
} else if (meta.model === "ipAssignment" || meta.model === "privateNetworkAttachment") {
|
||||
ownerTenantId = item.tenant_id as string | null | undefined;
|
||||
} else if (meta.model === "tenantIpQuota" || meta.model === "ipReservedRange" || meta.model === "ipPoolPolicy") {
|
||||
ownerTenantId = (item.tenant_id as string | null | undefined) ?? null;
|
||||
if (!ownerTenantId) return;
|
||||
} else if (meta.model === "serverHealthCheck") {
|
||||
ownerTenantId = (item.tenant_id as string | null | undefined) ?? null;
|
||||
if (!ownerTenantId) return;
|
||||
} else if (meta.model === "serverHealthCheckResult") {
|
||||
if (typeof item.check_id === "string") {
|
||||
const check = await prisma.serverHealthCheck.findUnique({
|
||||
where: { id: item.check_id },
|
||||
select: { tenant_id: true }
|
||||
});
|
||||
ownerTenantId = check?.tenant_id;
|
||||
if (!ownerTenantId) return;
|
||||
}
|
||||
} else if (meta.model === "monitoringAlertRule" || meta.model === "monitoringAlertEvent") {
|
||||
ownerTenantId = (item.tenant_id as string | null | undefined) ?? null;
|
||||
if (!ownerTenantId) return;
|
||||
} else if (meta.model === "monitoringAlertNotification") {
|
||||
if (typeof item.alert_event_id === "string") {
|
||||
const event = await prisma.monitoringAlertEvent.findUnique({
|
||||
where: { id: item.alert_event_id },
|
||||
select: { tenant_id: true }
|
||||
});
|
||||
ownerTenantId = event?.tenant_id;
|
||||
if (!ownerTenantId) return;
|
||||
}
|
||||
} else {
|
||||
ownerTenantId = item.tenant_id as string | null | undefined;
|
||||
}
|
||||
|
||||
if (ownerTenantId !== tenantId) {
|
||||
throw new HttpError(403, "Access denied for tenant scope", "TENANT_SCOPE_VIOLATION");
|
||||
}
|
||||
}
|
||||
|
||||
router.get("/:resource", requireAuth, async (req, res, next) => {
|
||||
try {
|
||||
const resource = req.params.resource;
|
||||
const meta = resourceMap[resource];
|
||||
if (!meta) throw new HttpError(404, "Unknown resource", "UNKNOWN_RESOURCE");
|
||||
await new Promise<void>((resolve, reject) => authorize(meta.readPermission)(req, res, (error) => (error ? reject(error) : resolve())));
|
||||
|
||||
const model = getModel(meta);
|
||||
const rawLimit = Number(req.query.limit ?? 100);
|
||||
const rawOffset = Number(req.query.offset ?? 0);
|
||||
const limit = Number.isFinite(rawLimit) && rawLimit > 0 ? Math.min(Math.floor(rawLimit), 500) : 100;
|
||||
const offset = Number.isFinite(rawOffset) && rawOffset >= 0 ? Math.floor(rawOffset) : 0;
|
||||
const where: Record<string, unknown> = {};
|
||||
|
||||
attachTenantWhere(req, meta, where);
|
||||
|
||||
if (typeof req.query.status === "string") where.status = toEnumUpper(req.query.status);
|
||||
if (typeof req.query.tenant_id === "string" && !isTenantScopedUser(req)) where.tenant_id = req.query.tenant_id;
|
||||
if (typeof req.query.vm_id === "string") where.vm_id = req.query.vm_id;
|
||||
if (typeof req.query.node === "string") where.node = req.query.node;
|
||||
|
||||
const search = typeof req.query.search === "string" ? req.query.search.trim() : "";
|
||||
attachSearchWhere(where, search, meta.searchFields);
|
||||
|
||||
const [data, total] = await Promise.all([
|
||||
model.findMany({
|
||||
where,
|
||||
orderBy: parseOrder(typeof req.query.sort === "string" ? req.query.sort : undefined),
|
||||
take: limit,
|
||||
skip: offset
|
||||
}),
|
||||
model.count({ where })
|
||||
]);
|
||||
|
||||
res.json({
|
||||
data,
|
||||
meta: { total, limit, offset }
|
||||
});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/:resource/:id", requireAuth, async (req, res, next) => {
|
||||
try {
|
||||
const resource = req.params.resource;
|
||||
const meta = resourceMap[resource];
|
||||
if (!meta) throw new HttpError(404, "Unknown resource", "UNKNOWN_RESOURCE");
|
||||
await new Promise<void>((resolve, reject) => authorize(meta.readPermission)(req, res, (error) => (error ? reject(error) : resolve())));
|
||||
|
||||
const model = getModel(meta);
|
||||
const item = await model.findUnique({ where: { id: req.params.id } });
|
||||
if (!item) throw new HttpError(404, "Record not found", "NOT_FOUND");
|
||||
await ensureItemTenantScope(req, meta, item);
|
||||
res.json(item);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/:resource", requireAuth, async (req, res, next) => {
|
||||
try {
|
||||
const resource = req.params.resource;
|
||||
const meta = resourceMap[resource];
|
||||
if (!meta) throw new HttpError(404, "Unknown resource", "UNKNOWN_RESOURCE");
|
||||
if (!meta.createPermission) throw new HttpError(405, "Resource is read-only", "READ_ONLY");
|
||||
await new Promise<void>((resolve, reject) => authorize(meta.createPermission!)(req, res, (error) => (error ? reject(error) : resolve())));
|
||||
|
||||
const model = getModel(meta);
|
||||
const payload = normalizePayload(resource, req.body ?? {});
|
||||
|
||||
if (meta.tenantScoped && isTenantScopedUser(req) && req.user?.tenant_id) {
|
||||
if (
|
||||
meta.model !== "backupRestoreTask" &&
|
||||
meta.model !== "snapshotJob"
|
||||
) {
|
||||
payload.tenant_id = req.user.tenant_id;
|
||||
}
|
||||
}
|
||||
|
||||
const created = await model.create({ data: payload });
|
||||
|
||||
await logAudit({
|
||||
action: `${resource}.create`,
|
||||
resource_type: resource === "virtual-machines" ? "VM" : "SYSTEM",
|
||||
resource_id: created.id,
|
||||
resource_name: created.name ?? created.invoice_number ?? created.id,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue({ resource, payload: created }),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.status(201).json(created);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.patch("/:resource/:id", requireAuth, async (req, res, next) => {
|
||||
try {
|
||||
const resource = req.params.resource;
|
||||
const meta = resourceMap[resource];
|
||||
if (!meta) throw new HttpError(404, "Unknown resource", "UNKNOWN_RESOURCE");
|
||||
if (!meta.updatePermission) throw new HttpError(405, "Resource is read-only", "READ_ONLY");
|
||||
await new Promise<void>((resolve, reject) => authorize(meta.updatePermission!)(req, res, (error) => (error ? reject(error) : resolve())));
|
||||
|
||||
const model = getModel(meta);
|
||||
const existing = await model.findUnique({ where: { id: req.params.id } });
|
||||
if (!existing) throw new HttpError(404, "Record not found", "NOT_FOUND");
|
||||
await ensureItemTenantScope(req, meta, existing);
|
||||
|
||||
const payload = normalizePayload(resource, req.body ?? {});
|
||||
const updated = await model.update({
|
||||
where: { id: req.params.id },
|
||||
data: payload
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: `${resource}.update`,
|
||||
resource_type: resource === "virtual-machines" ? "VM" : "SYSTEM",
|
||||
resource_id: updated.id,
|
||||
resource_name: updated.name ?? updated.invoice_number ?? updated.id,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue({ resource, payload }),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.json(updated);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.delete("/:resource/:id", requireAuth, async (req, res, next) => {
|
||||
try {
|
||||
const resource = req.params.resource;
|
||||
const meta = resourceMap[resource];
|
||||
if (!meta) throw new HttpError(404, "Unknown resource", "UNKNOWN_RESOURCE");
|
||||
if (!meta.deletePermission) throw new HttpError(405, "Resource is read-only", "READ_ONLY");
|
||||
await new Promise<void>((resolve, reject) => authorize(meta.deletePermission!)(req, res, (error) => (error ? reject(error) : resolve())));
|
||||
|
||||
const model = getModel(meta);
|
||||
const existing = await model.findUnique({ where: { id: req.params.id } });
|
||||
if (!existing) throw new HttpError(404, "Record not found", "NOT_FOUND");
|
||||
await ensureItemTenantScope(req, meta, existing);
|
||||
|
||||
await model.delete({ where: { id: req.params.id } });
|
||||
|
||||
await logAudit({
|
||||
action: `${resource}.delete`,
|
||||
resource_type: resource === "virtual-machines" ? "VM" : "SYSTEM",
|
||||
resource_id: req.params.id,
|
||||
resource_name: existing.name ?? existing.invoice_number ?? existing.id,
|
||||
actor_email: req.user!.email,
|
||||
actor_role: req.user!.role,
|
||||
details: toPrismaJsonValue({ resource }),
|
||||
ip_address: req.ip
|
||||
});
|
||||
|
||||
res.status(204).send();
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
280
backend/src/routes/settings.routes.ts
Normal file
280
backend/src/routes/settings.routes.ts
Normal file
@@ -0,0 +1,280 @@
|
||||
import { Router } from "express";
|
||||
import { z } from "zod";
|
||||
import { authorize, requireAuth } from "../middleware/auth";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { getOperationsPolicy } from "../services/operations.service";
|
||||
import { getSchedulerRuntimeSnapshot, reconfigureSchedulers, schedulerDefaults } from "../services/scheduler.service";
|
||||
|
||||
const router = Router();
|
||||
|
||||
const proxmoxSchema = z.object({
|
||||
host: z.string().min(1),
|
||||
port: z.number().int().positive().default(8006),
|
||||
username: z.string().min(1),
|
||||
token_id: z.string().min(1),
|
||||
token_secret: z.string().min(1),
|
||||
verify_ssl: z.boolean().default(true)
|
||||
});
|
||||
|
||||
const paymentSchema = z.object({
|
||||
default_provider: z.enum(["paystack", "flutterwave", "manual"]).default("paystack"),
|
||||
paystack_public: z.string().optional(),
|
||||
paystack_secret: z.string().optional(),
|
||||
flutterwave_public: z.string().optional(),
|
||||
flutterwave_secret: z.string().optional(),
|
||||
flutterwave_webhook_hash: z.string().optional(),
|
||||
callback_url: z.string().optional()
|
||||
});
|
||||
|
||||
const backupSchema = z.object({
|
||||
default_source: z.enum(["local", "pbs", "remote"]).default("local"),
|
||||
default_storage: z.string().default("local-lvm"),
|
||||
max_restore_file_count: z.number().int().positive().default(100),
|
||||
pbs_enabled: z.boolean().default(false),
|
||||
pbs_host: z.string().optional(),
|
||||
pbs_datastore: z.string().optional(),
|
||||
pbs_namespace: z.string().optional(),
|
||||
pbs_verify_ssl: z.boolean().default(true)
|
||||
});
|
||||
|
||||
const consoleProxyNodeSchema = z.object({
|
||||
novnc: z.string().url().optional(),
|
||||
spice: z.string().url().optional(),
|
||||
xterm: z.string().url().optional()
|
||||
});
|
||||
|
||||
const consoleProxySchema = z.object({
|
||||
mode: z.enum(["cluster", "per_node"]).default("cluster"),
|
||||
cluster: consoleProxyNodeSchema.optional(),
|
||||
nodes: z.record(consoleProxyNodeSchema).optional()
|
||||
});
|
||||
|
||||
const schedulerSchema = z.object({
|
||||
enable_scheduler: z.boolean().optional(),
|
||||
billing_cron: z.string().min(5).optional(),
|
||||
backup_cron: z.string().min(5).optional(),
|
||||
power_schedule_cron: z.string().min(5).optional(),
|
||||
monitoring_cron: z.string().min(5).optional(),
|
||||
operation_retry_cron: z.string().min(5).optional()
|
||||
});
|
||||
|
||||
const operationsPolicySchema = z.object({
|
||||
max_retry_attempts: z.number().int().min(0).max(10).optional(),
|
||||
retry_backoff_minutes: z.number().int().min(1).max(720).optional(),
|
||||
notify_on_task_failure: z.boolean().optional(),
|
||||
notification_email: z.string().email().optional(),
|
||||
notification_webhook_url: z.string().url().optional(),
|
||||
email_gateway_url: z.string().url().optional()
|
||||
});
|
||||
|
||||
const notificationsSchema = z.object({
|
||||
email_alerts: z.boolean().optional(),
|
||||
backup_alerts: z.boolean().optional(),
|
||||
billing_alerts: z.boolean().optional(),
|
||||
vm_alerts: z.boolean().optional(),
|
||||
monitoring_webhook_url: z.string().url().optional(),
|
||||
alert_webhook_url: z.string().url().optional(),
|
||||
email_gateway_url: z.string().url().optional(),
|
||||
notification_email_webhook: z.string().url().optional(),
|
||||
ops_email: z.string().email().optional()
|
||||
});
|
||||
|
||||
router.get("/proxmox", requireAuth, authorize("settings:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const setting = await prisma.setting.findUnique({ where: { key: "proxmox" } });
|
||||
res.json(setting?.value ?? {});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.put("/proxmox", requireAuth, authorize("settings:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = proxmoxSchema.parse(req.body);
|
||||
const setting = await prisma.setting.upsert({
|
||||
where: { key: "proxmox" },
|
||||
update: { value: payload },
|
||||
create: { key: "proxmox", type: "PROXMOX", value: payload, is_encrypted: true }
|
||||
});
|
||||
res.json(setting.value);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/payment", requireAuth, authorize("settings:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const setting = await prisma.setting.findUnique({ where: { key: "payment" } });
|
||||
res.json(setting?.value ?? {});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.put("/payment", requireAuth, authorize("settings:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = paymentSchema.parse(req.body);
|
||||
const setting = await prisma.setting.upsert({
|
||||
where: { key: "payment" },
|
||||
update: { value: payload },
|
||||
create: { key: "payment", type: "PAYMENT", value: payload, is_encrypted: true }
|
||||
});
|
||||
res.json(setting.value);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/backup", requireAuth, authorize("settings:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const setting = await prisma.setting.findUnique({ where: { key: "backup" } });
|
||||
res.json(setting?.value ?? {});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.put("/backup", requireAuth, authorize("settings:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = backupSchema.parse(req.body);
|
||||
const setting = await prisma.setting.upsert({
|
||||
where: { key: "backup" },
|
||||
update: { value: payload },
|
||||
create: { key: "backup", type: "GENERAL", value: payload, is_encrypted: false }
|
||||
});
|
||||
res.json(setting.value);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/console-proxy", requireAuth, authorize("settings:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const setting = await prisma.setting.findUnique({ where: { key: "console_proxy" } });
|
||||
res.json(
|
||||
setting?.value ?? {
|
||||
mode: "cluster",
|
||||
cluster: {},
|
||||
nodes: {}
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.put("/console-proxy", requireAuth, authorize("settings:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = consoleProxySchema.parse(req.body);
|
||||
const setting = await prisma.setting.upsert({
|
||||
where: { key: "console_proxy" },
|
||||
update: { value: payload },
|
||||
create: { key: "console_proxy", type: "PROXMOX", value: payload, is_encrypted: false }
|
||||
});
|
||||
res.json(setting.value);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/scheduler", requireAuth, authorize("settings:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const setting = await prisma.setting.findUnique({ where: { key: "scheduler" } });
|
||||
const defaults = schedulerDefaults();
|
||||
const persisted =
|
||||
setting?.value && typeof setting.value === "object" && !Array.isArray(setting.value)
|
||||
? (setting.value as Record<string, unknown>)
|
||||
: {};
|
||||
const config = {
|
||||
...defaults,
|
||||
...persisted
|
||||
};
|
||||
return res.json({
|
||||
config,
|
||||
runtime: getSchedulerRuntimeSnapshot()
|
||||
});
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.put("/scheduler", requireAuth, authorize("settings:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = schedulerSchema.parse(req.body);
|
||||
const setting = await prisma.setting.upsert({
|
||||
where: { key: "scheduler" },
|
||||
update: { value: payload },
|
||||
create: { key: "scheduler", type: "GENERAL", value: payload, is_encrypted: false }
|
||||
});
|
||||
|
||||
const runtime = await reconfigureSchedulers(payload);
|
||||
return res.json({
|
||||
config: setting.value,
|
||||
runtime
|
||||
});
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/operations-policy", requireAuth, authorize("settings:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const policy = await getOperationsPolicy();
|
||||
return res.json(policy);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.put("/operations-policy", requireAuth, authorize("settings:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = operationsPolicySchema.parse(req.body);
|
||||
await prisma.setting.upsert({
|
||||
where: { key: "operations_policy" },
|
||||
update: { value: payload },
|
||||
create: { key: "operations_policy", type: "GENERAL", value: payload, is_encrypted: false }
|
||||
});
|
||||
|
||||
const policy = await getOperationsPolicy();
|
||||
return res.json(policy);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/notifications", requireAuth, authorize("settings:read"), async (_req, res, next) => {
|
||||
try {
|
||||
const setting = await prisma.setting.findUnique({ where: { key: "notifications" } });
|
||||
return res.json(
|
||||
setting?.value ?? {
|
||||
email_alerts: true,
|
||||
backup_alerts: true,
|
||||
billing_alerts: true,
|
||||
vm_alerts: true,
|
||||
monitoring_webhook_url: "",
|
||||
alert_webhook_url: "",
|
||||
email_gateway_url: "",
|
||||
notification_email_webhook: "",
|
||||
ops_email: ""
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.put("/notifications", requireAuth, authorize("settings:manage"), async (req, res, next) => {
|
||||
try {
|
||||
const payload = notificationsSchema.parse(req.body);
|
||||
const setting = await prisma.setting.upsert({
|
||||
where: { key: "notifications" },
|
||||
update: { value: payload },
|
||||
create: { key: "notifications", type: "EMAIL", value: payload, is_encrypted: false }
|
||||
});
|
||||
return res.json(setting.value);
|
||||
} catch (error) {
|
||||
return next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
30
backend/src/services/audit.service.ts
Normal file
30
backend/src/services/audit.service.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import type { Prisma, ResourceType, Severity } from "@prisma/client";
|
||||
import { prisma } from "../lib/prisma";
|
||||
|
||||
type AuditInput = {
|
||||
action: string;
|
||||
resource_type: ResourceType;
|
||||
resource_id?: string;
|
||||
resource_name?: string;
|
||||
actor_email: string;
|
||||
actor_role?: string;
|
||||
severity?: Severity;
|
||||
details?: Prisma.InputJsonValue;
|
||||
ip_address?: string;
|
||||
};
|
||||
|
||||
export async function logAudit(input: AuditInput) {
|
||||
await prisma.auditLog.create({
|
||||
data: {
|
||||
action: input.action,
|
||||
resource_type: input.resource_type,
|
||||
resource_id: input.resource_id,
|
||||
resource_name: input.resource_name,
|
||||
actor_email: input.actor_email,
|
||||
actor_role: input.actor_role,
|
||||
severity: input.severity ?? "INFO",
|
||||
details: input.details,
|
||||
ip_address: input.ip_address
|
||||
}
|
||||
});
|
||||
}
|
||||
1086
backend/src/services/backup.service.ts
Normal file
1086
backend/src/services/backup.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
245
backend/src/services/billing.service.ts
Normal file
245
backend/src/services/billing.service.ts
Normal file
@@ -0,0 +1,245 @@
|
||||
import { Prisma, InvoiceStatus, PaymentProvider } from "@prisma/client";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { logAudit } from "./audit.service";
|
||||
|
||||
function startOfHour(date = new Date()) {
|
||||
const d = new Date(date);
|
||||
d.setMinutes(0, 0, 0);
|
||||
return d;
|
||||
}
|
||||
|
||||
export async function meterHourlyUsage(actorEmail = "system@proxpanel.local") {
|
||||
const periodStart = startOfHour();
|
||||
const periodEnd = new Date(periodStart.getTime() + 60 * 60 * 1000);
|
||||
|
||||
const vms = await prisma.virtualMachine.findMany({
|
||||
where: { status: "RUNNING" },
|
||||
include: {
|
||||
tenant: true,
|
||||
billing_plan: true
|
||||
}
|
||||
});
|
||||
|
||||
let created = 0;
|
||||
for (const vm of vms) {
|
||||
if (!vm.billing_plan) continue;
|
||||
|
||||
const exists = await prisma.usageRecord.findFirst({
|
||||
where: {
|
||||
vm_id: vm.id,
|
||||
period_start: periodStart,
|
||||
period_end: periodEnd
|
||||
}
|
||||
});
|
||||
if (exists) continue;
|
||||
|
||||
const hoursUsed = new Prisma.Decimal(1);
|
||||
const pricePerHour = vm.billing_plan.price_hourly;
|
||||
const totalCost = pricePerHour.mul(hoursUsed);
|
||||
|
||||
await prisma.usageRecord.create({
|
||||
data: {
|
||||
vm_id: vm.id,
|
||||
vm_name: vm.name,
|
||||
tenant_id: vm.tenant_id,
|
||||
tenant_name: vm.tenant.name,
|
||||
billing_plan_id: vm.billing_plan_id ?? undefined,
|
||||
plan_name: vm.billing_plan.name,
|
||||
hours_used: hoursUsed,
|
||||
price_per_hour: pricePerHour,
|
||||
currency: vm.billing_plan.currency,
|
||||
total_cost: totalCost,
|
||||
period_start: periodStart,
|
||||
period_end: periodEnd,
|
||||
cpu_hours: new Prisma.Decimal(vm.cpu_cores),
|
||||
ram_gb_hours: new Prisma.Decimal(vm.ram_mb / 1024),
|
||||
disk_gb_hours: new Prisma.Decimal(vm.disk_gb)
|
||||
}
|
||||
});
|
||||
created += 1;
|
||||
}
|
||||
|
||||
await logAudit({
|
||||
action: "hourly_usage_metering",
|
||||
resource_type: "BILLING",
|
||||
actor_email: actorEmail,
|
||||
severity: "INFO",
|
||||
details: { period_start: periodStart.toISOString(), created_records: created }
|
||||
});
|
||||
|
||||
return { created_records: created, period_start: periodStart.toISOString() };
|
||||
}
|
||||
|
||||
function invoiceNumber() {
|
||||
const rand = Math.floor(1000 + Math.random() * 9000);
|
||||
return `INV-${Date.now()}-${rand}`;
|
||||
}
|
||||
|
||||
export async function generateInvoicesFromUnbilledUsage(actorEmail = "system@proxpanel.local") {
|
||||
const usageRecords = await prisma.usageRecord.findMany({
|
||||
where: { billed: false },
|
||||
orderBy: { created_at: "asc" }
|
||||
});
|
||||
if (usageRecords.length === 0) {
|
||||
return { generated: 0, invoices: [] as Array<{ id: string; tenant_id: string; amount: string }> };
|
||||
}
|
||||
|
||||
const grouped = new Map<string, typeof usageRecords>();
|
||||
for (const item of usageRecords) {
|
||||
const key = `${item.tenant_id}:${item.currency}`;
|
||||
const current = grouped.get(key) ?? [];
|
||||
current.push(item);
|
||||
grouped.set(key, current);
|
||||
}
|
||||
|
||||
const createdInvoices: Array<{ id: string; tenant_id: string; amount: string }> = [];
|
||||
|
||||
for (const [key, records] of grouped.entries()) {
|
||||
const [tenantId] = key.split(":");
|
||||
const amount = records.reduce((sum, record) => sum.add(record.total_cost), new Prisma.Decimal(0));
|
||||
const tenant = await prisma.tenant.findUniqueOrThrow({ where: { id: tenantId } });
|
||||
|
||||
const invoice = await prisma.invoice.create({
|
||||
data: {
|
||||
invoice_number: invoiceNumber(),
|
||||
tenant_id: tenantId,
|
||||
tenant_name: tenant.name,
|
||||
status: InvoiceStatus.PENDING,
|
||||
amount,
|
||||
currency: records[0].currency,
|
||||
due_date: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000),
|
||||
payment_provider: tenant.payment_provider,
|
||||
line_items: records.map((r) => ({
|
||||
usage_record_id: r.id,
|
||||
vm_name: r.vm_name,
|
||||
period_start: r.period_start,
|
||||
period_end: r.period_end,
|
||||
hours_used: r.hours_used.toString(),
|
||||
unit_price: r.price_per_hour.toString(),
|
||||
amount: r.total_cost.toString()
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
await prisma.usageRecord.updateMany({
|
||||
where: { id: { in: records.map((r) => r.id) } },
|
||||
data: {
|
||||
billed: true,
|
||||
invoice_id: invoice.id
|
||||
}
|
||||
});
|
||||
|
||||
createdInvoices.push({
|
||||
id: invoice.id,
|
||||
tenant_id: invoice.tenant_id,
|
||||
amount: invoice.amount.toString()
|
||||
});
|
||||
}
|
||||
|
||||
await logAudit({
|
||||
action: "invoice_batch_generation",
|
||||
resource_type: "BILLING",
|
||||
actor_email: actorEmail,
|
||||
severity: "INFO",
|
||||
details: {
|
||||
generated_invoices: createdInvoices.length
|
||||
}
|
||||
});
|
||||
|
||||
return { generated: createdInvoices.length, invoices: createdInvoices };
|
||||
}
|
||||
|
||||
export async function markInvoicePaid(
|
||||
invoiceId: string,
|
||||
paymentProvider: PaymentProvider,
|
||||
paymentReference: string,
|
||||
actorEmail: string
|
||||
) {
|
||||
const invoice = await prisma.invoice.update({
|
||||
where: { id: invoiceId },
|
||||
data: {
|
||||
status: "PAID",
|
||||
paid_date: new Date(),
|
||||
payment_provider: paymentProvider,
|
||||
payment_reference: paymentReference
|
||||
}
|
||||
});
|
||||
|
||||
await logAudit({
|
||||
action: "invoice_mark_paid",
|
||||
resource_type: "INVOICE",
|
||||
resource_id: invoice.id,
|
||||
resource_name: invoice.invoice_number,
|
||||
actor_email: actorEmail,
|
||||
severity: "INFO",
|
||||
details: { payment_provider: paymentProvider, payment_reference: paymentReference }
|
||||
});
|
||||
|
||||
return invoice;
|
||||
}
|
||||
|
||||
export async function updateOverdueInvoices(actorEmail = "system@proxpanel.local") {
|
||||
const result = await prisma.invoice.updateMany({
|
||||
where: {
|
||||
status: "PENDING",
|
||||
due_date: { lt: new Date() }
|
||||
},
|
||||
data: { status: "OVERDUE" }
|
||||
});
|
||||
|
||||
if (result.count > 0) {
|
||||
await logAudit({
|
||||
action: "invoice_overdue_scan",
|
||||
resource_type: "BILLING",
|
||||
actor_email: actorEmail,
|
||||
severity: "WARNING",
|
||||
details: { marked_overdue: result.count }
|
||||
});
|
||||
}
|
||||
|
||||
return result.count;
|
||||
}
|
||||
|
||||
function nextRunDate(schedule: "DAILY" | "WEEKLY" | "MONTHLY" | "MANUAL") {
|
||||
const now = new Date();
|
||||
if (schedule === "DAILY") return new Date(now.getTime() + 24 * 60 * 60 * 1000);
|
||||
if (schedule === "WEEKLY") return new Date(now.getTime() + 7 * 24 * 60 * 60 * 1000);
|
||||
if (schedule === "MONTHLY") return new Date(now.getTime() + 30 * 24 * 60 * 60 * 1000);
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function processBackupSchedule(actorEmail = "system@proxpanel.local") {
|
||||
const now = new Date();
|
||||
const dueBackups = await prisma.backup.findMany({
|
||||
where: {
|
||||
schedule: { not: "MANUAL" },
|
||||
next_run_at: { lte: now },
|
||||
status: { in: ["PENDING", "COMPLETED", "FAILED"] }
|
||||
}
|
||||
});
|
||||
|
||||
for (const backup of dueBackups) {
|
||||
const nextRunAt = nextRunDate(backup.schedule);
|
||||
await prisma.backup.update({
|
||||
where: { id: backup.id },
|
||||
data: {
|
||||
status: "PENDING",
|
||||
started_at: null,
|
||||
completed_at: null,
|
||||
next_run_at: nextRunAt
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (dueBackups.length > 0) {
|
||||
await logAudit({
|
||||
action: "backup_scheduler_run",
|
||||
resource_type: "BACKUP",
|
||||
actor_email: actorEmail,
|
||||
severity: "INFO",
|
||||
details: { queued_backups: dueBackups.length }
|
||||
});
|
||||
}
|
||||
|
||||
return dueBackups.length;
|
||||
}
|
||||
1454
backend/src/services/monitoring.service.ts
Normal file
1454
backend/src/services/monitoring.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
1402
backend/src/services/network.service.ts
Normal file
1402
backend/src/services/network.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
954
backend/src/services/operations.service.ts
Normal file
954
backend/src/services/operations.service.ts
Normal file
@@ -0,0 +1,954 @@
|
||||
import {
|
||||
OperationTaskStatus,
|
||||
OperationTaskType,
|
||||
PowerScheduleAction,
|
||||
Prisma,
|
||||
VmStatus
|
||||
} from "@prisma/client";
|
||||
import axios from "axios";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
import { restartVm, shutdownVm, startVm, stopVm } from "./proxmox.service";
|
||||
|
||||
type TaskCreateInput = {
|
||||
taskType: OperationTaskType;
|
||||
requestedBy?: string;
|
||||
vm?: {
|
||||
id: string;
|
||||
name: string;
|
||||
node: string;
|
||||
};
|
||||
payload?: Prisma.InputJsonValue;
|
||||
scheduledFor?: Date | null;
|
||||
status?: OperationTaskStatus;
|
||||
};
|
||||
|
||||
type TaskListInput = {
|
||||
status?: OperationTaskStatus;
|
||||
taskType?: OperationTaskType;
|
||||
vmId?: string;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
tenantId?: string | null;
|
||||
};
|
||||
|
||||
type PowerScheduleCreateInput = {
|
||||
vmId: string;
|
||||
action: PowerScheduleAction;
|
||||
cronExpression: string;
|
||||
timezone?: string;
|
||||
createdBy?: string;
|
||||
};
|
||||
|
||||
type PowerScheduleUpdateInput = {
|
||||
action?: PowerScheduleAction;
|
||||
cronExpression?: string;
|
||||
timezone?: string;
|
||||
enabled?: boolean;
|
||||
};
|
||||
|
||||
type ExecutePowerOptions = {
|
||||
scheduledFor?: Date | null;
|
||||
payload?: Prisma.InputJsonValue;
|
||||
};
|
||||
|
||||
export type OperationsPolicy = {
|
||||
max_retry_attempts: number;
|
||||
retry_backoff_minutes: number;
|
||||
notify_on_task_failure: boolean;
|
||||
notification_email: string | null;
|
||||
notification_webhook_url: string | null;
|
||||
email_gateway_url: string | null;
|
||||
};
|
||||
|
||||
const DEFAULT_OPERATIONS_POLICY: OperationsPolicy = {
|
||||
max_retry_attempts: 2,
|
||||
retry_backoff_minutes: 10,
|
||||
notify_on_task_failure: true,
|
||||
notification_email: null,
|
||||
notification_webhook_url: null,
|
||||
email_gateway_url: null
|
||||
};
|
||||
|
||||
function numberRange(min: number, max: number) {
|
||||
return Array.from({ length: max - min + 1 }, (_, idx) => min + idx);
|
||||
}
|
||||
|
||||
function parseSingleToken(token: string, min: number, max: number): number[] {
|
||||
if (token === "*") {
|
||||
return numberRange(min, max);
|
||||
}
|
||||
|
||||
if (token.includes("/")) {
|
||||
const [baseToken, stepToken] = token.split("/");
|
||||
const step = Number(stepToken);
|
||||
if (!Number.isInteger(step) || step <= 0) {
|
||||
throw new Error(`Invalid cron step: ${token}`);
|
||||
}
|
||||
|
||||
const baseValues = parseSingleToken(baseToken, min, max);
|
||||
const startValue = Math.min(...baseValues);
|
||||
return baseValues.filter((value) => (value - startValue) % step === 0);
|
||||
}
|
||||
|
||||
if (token.includes("-")) {
|
||||
const [startToken, endToken] = token.split("-");
|
||||
const start = Number(startToken);
|
||||
const end = Number(endToken);
|
||||
if (!Number.isInteger(start) || !Number.isInteger(end) || start > end) {
|
||||
throw new Error(`Invalid cron range: ${token}`);
|
||||
}
|
||||
if (start < min || end > max) {
|
||||
throw new Error(`Cron range out of bounds: ${token}`);
|
||||
}
|
||||
return numberRange(start, end);
|
||||
}
|
||||
|
||||
const value = Number(token);
|
||||
if (!Number.isInteger(value) || value < min || value > max) {
|
||||
throw new Error(`Invalid cron value: ${token}`);
|
||||
}
|
||||
return [value];
|
||||
}
|
||||
|
||||
function parseCronField(field: string, min: number, max: number): Set<number> {
|
||||
const values = new Set<number>();
|
||||
for (const rawToken of field.split(",")) {
|
||||
const token = rawToken.trim();
|
||||
if (!token) continue;
|
||||
for (const value of parseSingleToken(token, min, max)) {
|
||||
values.add(value);
|
||||
}
|
||||
}
|
||||
if (values.size === 0) {
|
||||
throw new Error(`Invalid cron field: ${field}`);
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
function parseCronExpression(expression: string) {
|
||||
const parts = expression.trim().split(/\s+/);
|
||||
if (parts.length !== 5) {
|
||||
throw new Error("Cron expression must contain exactly 5 fields");
|
||||
}
|
||||
|
||||
return {
|
||||
minute: parseCronField(parts[0], 0, 59),
|
||||
hour: parseCronField(parts[1], 0, 23),
|
||||
dayOfMonth: parseCronField(parts[2], 1, 31),
|
||||
month: parseCronField(parts[3], 1, 12),
|
||||
dayOfWeek: parseCronField(parts[4], 0, 6)
|
||||
};
|
||||
}
|
||||
|
||||
function cronMatchesParsed(date: Date, parsed: ReturnType<typeof parseCronExpression>) {
|
||||
return (
|
||||
parsed.minute.has(date.getMinutes()) &&
|
||||
parsed.hour.has(date.getHours()) &&
|
||||
parsed.dayOfMonth.has(date.getDate()) &&
|
||||
parsed.month.has(date.getMonth() + 1) &&
|
||||
parsed.dayOfWeek.has(date.getDay())
|
||||
);
|
||||
}
|
||||
|
||||
export function nextRunAt(cronExpression: string, fromDate = new Date()): Date | null {
|
||||
const parsed = parseCronExpression(cronExpression);
|
||||
const base = new Date(fromDate);
|
||||
base.setSeconds(0, 0);
|
||||
|
||||
const maxChecks = 60 * 24 * 365;
|
||||
for (let index = 1; index <= maxChecks; index += 1) {
|
||||
const candidate = new Date(base.getTime() + index * 60 * 1000);
|
||||
if (cronMatchesParsed(candidate, parsed)) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function validateCronExpression(cronExpression: string) {
|
||||
parseCronExpression(cronExpression);
|
||||
}
|
||||
|
||||
export async function createOperationTask(input: TaskCreateInput) {
|
||||
return prisma.operationTask.create({
|
||||
data: {
|
||||
task_type: input.taskType,
|
||||
status: input.status ?? OperationTaskStatus.QUEUED,
|
||||
vm_id: input.vm?.id,
|
||||
vm_name: input.vm?.name,
|
||||
node: input.vm?.node,
|
||||
requested_by: input.requestedBy,
|
||||
payload: input.payload,
|
||||
scheduled_for: input.scheduledFor ?? undefined
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function markOperationTaskRunning(taskId: string) {
|
||||
return prisma.operationTask.update({
|
||||
where: { id: taskId },
|
||||
data: {
|
||||
status: OperationTaskStatus.RUNNING,
|
||||
started_at: new Date(),
|
||||
error_message: null
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function markOperationTaskSuccess(taskId: string, result?: Prisma.InputJsonValue, proxmoxUpid?: string) {
|
||||
return prisma.operationTask.update({
|
||||
where: { id: taskId },
|
||||
data: {
|
||||
status: OperationTaskStatus.SUCCESS,
|
||||
result,
|
||||
proxmox_upid: proxmoxUpid,
|
||||
completed_at: new Date()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function markOperationTaskFailed(taskId: string, errorMessage: string) {
|
||||
return prisma.operationTask.update({
|
||||
where: { id: taskId },
|
||||
data: {
|
||||
status: OperationTaskStatus.FAILED,
|
||||
error_message: errorMessage,
|
||||
completed_at: new Date()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function asPlainObject(value: Prisma.JsonValue | Prisma.InputJsonValue | null | undefined): Record<string, unknown> {
|
||||
if (!value || typeof value !== "object" || Array.isArray(value)) return {};
|
||||
return value as Record<string, unknown>;
|
||||
}
|
||||
|
||||
function toPowerAction(value: unknown): PowerScheduleAction | null {
|
||||
if (typeof value !== "string") return null;
|
||||
const candidate = value.toUpperCase();
|
||||
return Object.values(PowerScheduleAction).includes(candidate as PowerScheduleAction)
|
||||
? (candidate as PowerScheduleAction)
|
||||
: null;
|
||||
}
|
||||
|
||||
function asStringOrNull(value: unknown) {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
||||
}
|
||||
|
||||
function addMinutes(date: Date, minutes: number) {
|
||||
const copy = new Date(date);
|
||||
copy.setMinutes(copy.getMinutes() + minutes);
|
||||
return copy;
|
||||
}
|
||||
|
||||
export async function getOperationsPolicy(): Promise<OperationsPolicy> {
|
||||
const [setting, notificationsSetting] = await Promise.all([
|
||||
prisma.setting.findUnique({
|
||||
where: { key: "operations_policy" },
|
||||
select: { value: true }
|
||||
}),
|
||||
prisma.setting.findUnique({
|
||||
where: { key: "notifications" },
|
||||
select: { value: true }
|
||||
})
|
||||
]);
|
||||
|
||||
const value =
|
||||
setting?.value && typeof setting.value === "object" && !Array.isArray(setting.value)
|
||||
? (setting.value as Record<string, unknown>)
|
||||
: {};
|
||||
const notificationsValue =
|
||||
notificationsSetting?.value && typeof notificationsSetting.value === "object" && !Array.isArray(notificationsSetting.value)
|
||||
? (notificationsSetting.value as Record<string, unknown>)
|
||||
: {};
|
||||
|
||||
const maxRetryAttemptsRaw = Number(value.max_retry_attempts);
|
||||
const retryBackoffRaw = Number(value.retry_backoff_minutes);
|
||||
|
||||
return {
|
||||
max_retry_attempts:
|
||||
Number.isInteger(maxRetryAttemptsRaw) && maxRetryAttemptsRaw >= 0
|
||||
? Math.min(maxRetryAttemptsRaw, 10)
|
||||
: DEFAULT_OPERATIONS_POLICY.max_retry_attempts,
|
||||
retry_backoff_minutes:
|
||||
Number.isInteger(retryBackoffRaw) && retryBackoffRaw >= 1
|
||||
? Math.min(retryBackoffRaw, 720)
|
||||
: DEFAULT_OPERATIONS_POLICY.retry_backoff_minutes,
|
||||
notify_on_task_failure:
|
||||
typeof value.notify_on_task_failure === "boolean"
|
||||
? value.notify_on_task_failure
|
||||
: DEFAULT_OPERATIONS_POLICY.notify_on_task_failure,
|
||||
notification_email: asStringOrNull(value.notification_email) ?? asStringOrNull(notificationsValue.ops_email),
|
||||
notification_webhook_url:
|
||||
asStringOrNull(value.notification_webhook_url) ??
|
||||
asStringOrNull(notificationsValue.monitoring_webhook_url) ??
|
||||
asStringOrNull(notificationsValue.alert_webhook_url),
|
||||
email_gateway_url:
|
||||
asStringOrNull(value.email_gateway_url) ??
|
||||
asStringOrNull(notificationsValue.email_gateway_url) ??
|
||||
asStringOrNull(notificationsValue.notification_email_webhook)
|
||||
};
|
||||
}
|
||||
|
||||
async function dispatchTaskFailureNotifications(input: {
|
||||
task: {
|
||||
id: string;
|
||||
task_type: OperationTaskType;
|
||||
vm_name: string | null;
|
||||
vm_id: string | null;
|
||||
node: string | null;
|
||||
retry_count: number;
|
||||
error_message: string | null;
|
||||
created_at: Date;
|
||||
completed_at: Date | null;
|
||||
requested_by: string | null;
|
||||
};
|
||||
policy: OperationsPolicy;
|
||||
stage: "retry_exhausted" | "non_retryable";
|
||||
}) {
|
||||
const destinationEmail = input.policy.notification_email;
|
||||
const emailGatewayUrl = input.policy.email_gateway_url;
|
||||
const webhookUrl = input.policy.notification_webhook_url;
|
||||
const eventPayload = {
|
||||
type: "operations.task_failure",
|
||||
stage: input.stage,
|
||||
task_id: input.task.id,
|
||||
task_type: input.task.task_type,
|
||||
vm_id: input.task.vm_id,
|
||||
vm_name: input.task.vm_name,
|
||||
node: input.task.node,
|
||||
retry_count: input.task.retry_count,
|
||||
error_message: input.task.error_message,
|
||||
created_at: input.task.created_at.toISOString(),
|
||||
completed_at: input.task.completed_at?.toISOString() ?? null,
|
||||
requested_by: input.task.requested_by
|
||||
};
|
||||
|
||||
const notifications: Array<{
|
||||
channel: "WEBHOOK" | "EMAIL";
|
||||
destination: string | null;
|
||||
status: "SENT" | "FAILED";
|
||||
provider_message: string;
|
||||
sent_at: Date | null;
|
||||
}> = [];
|
||||
|
||||
if (webhookUrl) {
|
||||
try {
|
||||
const response = await axios.post(webhookUrl, eventPayload, { timeout: 10_000 });
|
||||
notifications.push({
|
||||
channel: "WEBHOOK",
|
||||
destination: webhookUrl,
|
||||
status: "SENT",
|
||||
provider_message: `HTTP ${response.status}`,
|
||||
sent_at: new Date()
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Webhook dispatch failed";
|
||||
notifications.push({
|
||||
channel: "WEBHOOK",
|
||||
destination: webhookUrl,
|
||||
status: "FAILED",
|
||||
provider_message: message.slice(0, 240),
|
||||
sent_at: null
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (emailGatewayUrl && destinationEmail) {
|
||||
try {
|
||||
const response = await axios.post(
|
||||
emailGatewayUrl,
|
||||
{
|
||||
type: "operations.task_failure.email",
|
||||
to: destinationEmail,
|
||||
subject: `[Task Failure] ${input.task.task_type} ${input.task.vm_name ?? input.task.vm_id ?? ""}`.trim(),
|
||||
message: input.task.error_message ?? "Operation task failed",
|
||||
payload: eventPayload
|
||||
},
|
||||
{ timeout: 10_000 }
|
||||
);
|
||||
notifications.push({
|
||||
channel: "EMAIL",
|
||||
destination: destinationEmail,
|
||||
status: "SENT",
|
||||
provider_message: `HTTP ${response.status}`,
|
||||
sent_at: new Date()
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Email dispatch failed";
|
||||
notifications.push({
|
||||
channel: "EMAIL",
|
||||
destination: destinationEmail,
|
||||
status: "FAILED",
|
||||
provider_message: message.slice(0, 240),
|
||||
sent_at: null
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (notifications.length > 0) {
|
||||
await prisma.auditLog.createMany({
|
||||
data: notifications.map((notification) => ({
|
||||
action: "operations.task_failure_notification",
|
||||
resource_type: "SYSTEM",
|
||||
resource_id: input.task.id,
|
||||
resource_name: input.task.vm_name ?? input.task.id,
|
||||
actor_email: "system@proxpanel.local",
|
||||
actor_role: "SYSTEM",
|
||||
severity: notification.status === "FAILED" ? "ERROR" : "INFO",
|
||||
details: {
|
||||
channel: notification.channel,
|
||||
destination: notification.destination,
|
||||
dispatch_status: notification.status,
|
||||
provider_message: notification.provider_message,
|
||||
task_id: input.task.id,
|
||||
stage: input.stage
|
||||
}
|
||||
}))
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function handleOperationTaskFailure(taskId: string, errorMessage: string) {
|
||||
const policy = await getOperationsPolicy();
|
||||
const existing = await prisma.operationTask.findUnique({ where: { id: taskId } });
|
||||
|
||||
if (!existing) {
|
||||
return { status: "missing" as const, retry_scheduled: false };
|
||||
}
|
||||
|
||||
const canRetry =
|
||||
existing.task_type === OperationTaskType.VM_POWER &&
|
||||
existing.retry_count < policy.max_retry_attempts &&
|
||||
policy.max_retry_attempts > 0;
|
||||
|
||||
if (canRetry) {
|
||||
const nextRetryAt = addMinutes(new Date(), policy.retry_backoff_minutes);
|
||||
await prisma.operationTask.update({
|
||||
where: { id: existing.id },
|
||||
data: {
|
||||
status: OperationTaskStatus.RETRYING,
|
||||
error_message: errorMessage,
|
||||
completed_at: new Date(),
|
||||
retry_count: existing.retry_count + 1,
|
||||
scheduled_for: nextRetryAt
|
||||
}
|
||||
});
|
||||
return { status: "retrying" as const, retry_scheduled: true, next_retry_at: nextRetryAt };
|
||||
}
|
||||
|
||||
const failed = await prisma.operationTask.update({
|
||||
where: { id: existing.id },
|
||||
data: {
|
||||
status: OperationTaskStatus.FAILED,
|
||||
error_message: errorMessage,
|
||||
completed_at: new Date(),
|
||||
scheduled_for: null
|
||||
}
|
||||
});
|
||||
|
||||
if (policy.notify_on_task_failure) {
|
||||
await dispatchTaskFailureNotifications({
|
||||
task: failed,
|
||||
policy,
|
||||
stage: existing.task_type === OperationTaskType.VM_POWER ? "retry_exhausted" : "non_retryable"
|
||||
});
|
||||
}
|
||||
|
||||
return { status: "failed" as const, retry_scheduled: false };
|
||||
}
|
||||
|
||||
export async function listOperationTasks(input: TaskListInput) {
|
||||
const where: Prisma.OperationTaskWhereInput = {};
|
||||
|
||||
if (input.status) where.status = input.status;
|
||||
if (input.taskType) where.task_type = input.taskType;
|
||||
if (input.vmId) where.vm_id = input.vmId;
|
||||
if (input.tenantId) {
|
||||
where.vm = { tenant_id: input.tenantId };
|
||||
}
|
||||
|
||||
const limit = Math.min(Math.max(input.limit ?? 50, 1), 200);
|
||||
const offset = Math.max(input.offset ?? 0, 0);
|
||||
|
||||
const [data, total] = await Promise.all([
|
||||
prisma.operationTask.findMany({
|
||||
where,
|
||||
include: {
|
||||
vm: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
tenant_id: true,
|
||||
node: true
|
||||
}
|
||||
}
|
||||
},
|
||||
orderBy: { created_at: "desc" },
|
||||
take: limit,
|
||||
skip: offset
|
||||
}),
|
||||
prisma.operationTask.count({ where })
|
||||
]);
|
||||
|
||||
const queue = await prisma.operationTask.groupBy({
|
||||
by: ["status"],
|
||||
_count: { status: true },
|
||||
where: input.tenantId ? { vm: { tenant_id: input.tenantId } } : undefined
|
||||
});
|
||||
|
||||
return {
|
||||
data,
|
||||
meta: {
|
||||
total,
|
||||
limit,
|
||||
offset,
|
||||
queue_summary: queue.reduce<Record<string, number>>((acc, item) => {
|
||||
acc[item.status] = item._count.status;
|
||||
return acc;
|
||||
}, {})
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function vmStatusFromPowerAction(action: PowerScheduleAction): VmStatus {
|
||||
if (action === PowerScheduleAction.START || action === PowerScheduleAction.RESTART) {
|
||||
return VmStatus.RUNNING;
|
||||
}
|
||||
return VmStatus.STOPPED;
|
||||
}
|
||||
|
||||
async function fetchVmForAction(vmId: string) {
|
||||
const vm = await prisma.virtualMachine.findUnique({
|
||||
where: { id: vmId },
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
node: true,
|
||||
vmid: true,
|
||||
type: true,
|
||||
tenant_id: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!vm) {
|
||||
throw new HttpError(404, "VM not found", "VM_NOT_FOUND");
|
||||
}
|
||||
|
||||
return vm;
|
||||
}
|
||||
|
||||
async function runPowerAction(vm: Awaited<ReturnType<typeof fetchVmForAction>>, action: PowerScheduleAction) {
|
||||
const type = vm.type === "LXC" ? "lxc" : "qemu";
|
||||
|
||||
if (action === PowerScheduleAction.START) {
|
||||
return startVm(vm.node, vm.vmid, type);
|
||||
}
|
||||
|
||||
if (action === PowerScheduleAction.STOP) {
|
||||
return stopVm(vm.node, vm.vmid, type);
|
||||
}
|
||||
|
||||
if (action === PowerScheduleAction.RESTART) {
|
||||
return restartVm(vm.node, vm.vmid, type);
|
||||
}
|
||||
|
||||
return shutdownVm(vm.node, vm.vmid, type);
|
||||
}
|
||||
|
||||
export async function executeVmPowerActionNow(
|
||||
vmId: string,
|
||||
action: PowerScheduleAction,
|
||||
actorEmail: string,
|
||||
options?: ExecutePowerOptions
|
||||
) {
|
||||
const vm = await fetchVmForAction(vmId);
|
||||
const rawPayload = asPlainObject(options?.payload ?? null);
|
||||
const taskPayload: Prisma.InputJsonObject = {
|
||||
...rawPayload,
|
||||
action,
|
||||
vm_id: vm.id
|
||||
};
|
||||
|
||||
const task = await createOperationTask({
|
||||
taskType: OperationTaskType.VM_POWER,
|
||||
vm: {
|
||||
id: vm.id,
|
||||
name: vm.name,
|
||||
node: vm.node
|
||||
},
|
||||
requestedBy: actorEmail,
|
||||
payload: taskPayload,
|
||||
scheduledFor: options?.scheduledFor
|
||||
});
|
||||
|
||||
await markOperationTaskRunning(task.id);
|
||||
|
||||
try {
|
||||
const upid = await runPowerAction(vm, action);
|
||||
await prisma.virtualMachine.update({
|
||||
where: { id: vm.id },
|
||||
data: {
|
||||
status: vmStatusFromPowerAction(action),
|
||||
proxmox_upid: upid ?? undefined
|
||||
}
|
||||
});
|
||||
|
||||
const resultPayload: Prisma.InputJsonObject = upid
|
||||
? {
|
||||
vm_id: vm.id,
|
||||
action,
|
||||
upid
|
||||
}
|
||||
: {
|
||||
vm_id: vm.id,
|
||||
action
|
||||
};
|
||||
|
||||
const updatedTask = await markOperationTaskSuccess(task.id, resultPayload, upid ?? undefined);
|
||||
return { task: updatedTask, upid };
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown power action error";
|
||||
await handleOperationTaskFailure(task.id, message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function listPowerSchedules(tenantId?: string | null) {
|
||||
const where: Prisma.PowerScheduleWhereInput = tenantId
|
||||
? {
|
||||
vm: {
|
||||
tenant_id: tenantId
|
||||
}
|
||||
}
|
||||
: {};
|
||||
|
||||
return prisma.powerSchedule.findMany({
|
||||
where,
|
||||
include: {
|
||||
vm: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
node: true,
|
||||
tenant_id: true,
|
||||
status: true
|
||||
}
|
||||
}
|
||||
},
|
||||
orderBy: [
|
||||
{ enabled: "desc" },
|
||||
{ next_run_at: "asc" },
|
||||
{ created_at: "desc" }
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
export async function createPowerSchedule(input: PowerScheduleCreateInput) {
|
||||
validateCronExpression(input.cronExpression);
|
||||
const vm = await fetchVmForAction(input.vmId);
|
||||
|
||||
const nextRun = nextRunAt(input.cronExpression, new Date());
|
||||
|
||||
return prisma.powerSchedule.create({
|
||||
data: {
|
||||
vm_id: vm.id,
|
||||
action: input.action,
|
||||
cron_expression: input.cronExpression,
|
||||
timezone: input.timezone ?? "UTC",
|
||||
next_run_at: nextRun,
|
||||
created_by: input.createdBy
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function updatePowerSchedule(scheduleId: string, input: PowerScheduleUpdateInput) {
|
||||
const existing = await prisma.powerSchedule.findUnique({ where: { id: scheduleId } });
|
||||
if (!existing) {
|
||||
throw new HttpError(404, "Power schedule not found", "POWER_SCHEDULE_NOT_FOUND");
|
||||
}
|
||||
|
||||
if (input.cronExpression) {
|
||||
validateCronExpression(input.cronExpression);
|
||||
}
|
||||
|
||||
const cronExpression = input.cronExpression ?? existing.cron_expression;
|
||||
const enabled = input.enabled ?? existing.enabled;
|
||||
const nextRun = enabled ? nextRunAt(cronExpression, new Date()) : null;
|
||||
|
||||
return prisma.powerSchedule.update({
|
||||
where: { id: scheduleId },
|
||||
data: {
|
||||
action: input.action,
|
||||
cron_expression: input.cronExpression,
|
||||
timezone: input.timezone,
|
||||
enabled: input.enabled,
|
||||
next_run_at: nextRun
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function deletePowerSchedule(scheduleId: string) {
|
||||
return prisma.powerSchedule.delete({ where: { id: scheduleId } });
|
||||
}
|
||||
|
||||
export async function processDuePowerSchedules(actorEmail = "system@proxpanel.local") {
|
||||
const now = new Date();
|
||||
const dueSchedules = await prisma.powerSchedule.findMany({
|
||||
where: {
|
||||
enabled: true,
|
||||
next_run_at: {
|
||||
lte: now
|
||||
}
|
||||
},
|
||||
include: {
|
||||
vm: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
node: true,
|
||||
vmid: true,
|
||||
type: true
|
||||
}
|
||||
}
|
||||
},
|
||||
orderBy: {
|
||||
next_run_at: "asc"
|
||||
},
|
||||
take: 100
|
||||
});
|
||||
|
||||
let executed = 0;
|
||||
let failed = 0;
|
||||
let skipped = 0;
|
||||
|
||||
for (const schedule of dueSchedules) {
|
||||
const nextRun = nextRunAt(schedule.cron_expression, now);
|
||||
const claim = await prisma.powerSchedule.updateMany({
|
||||
where: {
|
||||
id: schedule.id,
|
||||
enabled: true,
|
||||
next_run_at: {
|
||||
lte: now
|
||||
}
|
||||
},
|
||||
data: {
|
||||
last_run_at: now,
|
||||
next_run_at: nextRun,
|
||||
enabled: nextRun ? schedule.enabled : false
|
||||
}
|
||||
});
|
||||
|
||||
if (claim.count === 0) {
|
||||
skipped += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const payload: Prisma.InputJsonValue = {
|
||||
source: "power_schedule",
|
||||
schedule_id: schedule.id,
|
||||
action: schedule.action
|
||||
};
|
||||
|
||||
try {
|
||||
await executeVmPowerActionNow(schedule.vm_id, schedule.action, actorEmail, {
|
||||
payload,
|
||||
scheduledFor: schedule.next_run_at
|
||||
});
|
||||
executed += 1;
|
||||
} catch {
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
scanned: dueSchedules.length,
|
||||
executed,
|
||||
failed,
|
||||
skipped
|
||||
};
|
||||
}
|
||||
|
||||
export async function processDueOperationRetries(actorEmail = "system@proxpanel.local") {
|
||||
const now = new Date();
|
||||
const dueRetries = await prisma.operationTask.findMany({
|
||||
where: {
|
||||
task_type: OperationTaskType.VM_POWER,
|
||||
status: OperationTaskStatus.RETRYING,
|
||||
scheduled_for: {
|
||||
lte: now
|
||||
}
|
||||
},
|
||||
orderBy: { scheduled_for: "asc" },
|
||||
take: 100
|
||||
});
|
||||
|
||||
let executed = 0;
|
||||
let succeeded = 0;
|
||||
let failed = 0;
|
||||
let rescheduled = 0;
|
||||
let invalidPayload = 0;
|
||||
let skipped = 0;
|
||||
|
||||
for (const task of dueRetries) {
|
||||
const claimedAt = new Date();
|
||||
const claim = await prisma.operationTask.updateMany({
|
||||
where: {
|
||||
id: task.id,
|
||||
task_type: OperationTaskType.VM_POWER,
|
||||
status: OperationTaskStatus.RETRYING,
|
||||
scheduled_for: {
|
||||
lte: now
|
||||
}
|
||||
},
|
||||
data: {
|
||||
status: OperationTaskStatus.RUNNING,
|
||||
started_at: claimedAt,
|
||||
error_message: null,
|
||||
completed_at: null
|
||||
}
|
||||
});
|
||||
|
||||
if (claim.count === 0) {
|
||||
skipped += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
executed += 1;
|
||||
const payload = asPlainObject(task.payload as Prisma.JsonValue | null);
|
||||
const action = toPowerAction(payload.action);
|
||||
|
||||
if (!task.vm_id || !action) {
|
||||
invalidPayload += 1;
|
||||
await handleOperationTaskFailure(task.id, "Retry payload missing actionable power action");
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const vm = await fetchVmForAction(task.vm_id);
|
||||
const upid = await runPowerAction(vm, action);
|
||||
await prisma.virtualMachine.update({
|
||||
where: { id: vm.id },
|
||||
data: {
|
||||
status: vmStatusFromPowerAction(action),
|
||||
proxmox_upid: upid ?? undefined
|
||||
}
|
||||
});
|
||||
|
||||
const resultPayload: Prisma.InputJsonObject = upid
|
||||
? {
|
||||
retry_of_task: task.id,
|
||||
vm_id: vm.id,
|
||||
action,
|
||||
upid
|
||||
}
|
||||
: {
|
||||
retry_of_task: task.id,
|
||||
vm_id: vm.id,
|
||||
action
|
||||
};
|
||||
|
||||
await markOperationTaskSuccess(task.id, resultPayload, upid ?? undefined);
|
||||
succeeded += 1;
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Retry power action failed";
|
||||
const failureResult = await handleOperationTaskFailure(task.id, message);
|
||||
failed += 1;
|
||||
if (failureResult.retry_scheduled) {
|
||||
rescheduled += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (dueRetries.length > 0 || failed > 0 || rescheduled > 0) {
|
||||
await prisma.auditLog.create({
|
||||
data: {
|
||||
action: "operations.retry_cycle",
|
||||
resource_type: "SYSTEM",
|
||||
resource_name: "Operation Retry Worker",
|
||||
actor_email: actorEmail,
|
||||
actor_role: "SYSTEM",
|
||||
severity: failed > 0 ? "WARNING" : "INFO",
|
||||
details: {
|
||||
scanned: dueRetries.length,
|
||||
executed,
|
||||
succeeded,
|
||||
failed,
|
||||
rescheduled,
|
||||
invalid_payload: invalidPayload,
|
||||
skipped
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
scanned: dueRetries.length,
|
||||
executed,
|
||||
succeeded,
|
||||
failed,
|
||||
rescheduled,
|
||||
invalid_payload: invalidPayload,
|
||||
skipped
|
||||
};
|
||||
}
|
||||
|
||||
export async function operationQueueInsights(tenantId?: string | null) {
|
||||
const now = new Date();
|
||||
const staleThreshold = addMinutes(now, -15);
|
||||
const dayAgo = addMinutes(now, -24 * 60);
|
||||
|
||||
const tenantWhere: Prisma.OperationTaskWhereInput = tenantId ? { vm: { tenant_id: tenantId } } : {};
|
||||
|
||||
const [statusBuckets, staleQueued, failed24h, dueRetries, powerSchedulesDue] = await Promise.all([
|
||||
prisma.operationTask.groupBy({
|
||||
by: ["status"],
|
||||
_count: { status: true },
|
||||
where: tenantWhere
|
||||
}),
|
||||
prisma.operationTask.count({
|
||||
where: {
|
||||
...tenantWhere,
|
||||
status: OperationTaskStatus.QUEUED,
|
||||
created_at: { lte: staleThreshold }
|
||||
}
|
||||
}),
|
||||
prisma.operationTask.count({
|
||||
where: {
|
||||
...tenantWhere,
|
||||
status: OperationTaskStatus.FAILED,
|
||||
completed_at: { gte: dayAgo }
|
||||
}
|
||||
}),
|
||||
prisma.operationTask.count({
|
||||
where: {
|
||||
...tenantWhere,
|
||||
status: OperationTaskStatus.RETRYING,
|
||||
scheduled_for: { lte: now }
|
||||
}
|
||||
}),
|
||||
prisma.powerSchedule.count({
|
||||
where: {
|
||||
enabled: true,
|
||||
next_run_at: { lte: now },
|
||||
...(tenantId ? { vm: { tenant_id: tenantId } } : {})
|
||||
}
|
||||
})
|
||||
]);
|
||||
|
||||
const queueSummary = statusBuckets.reduce<Record<string, number>>((acc, bucket) => {
|
||||
acc[bucket.status] = bucket._count.status;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
return {
|
||||
generated_at: now.toISOString(),
|
||||
queue_summary: queueSummary,
|
||||
stale_queued_tasks: staleQueued,
|
||||
failed_tasks_24h: failed24h,
|
||||
due_retries: dueRetries,
|
||||
due_power_schedules: powerSchedulesDue
|
||||
};
|
||||
}
|
||||
182
backend/src/services/payment.service.ts
Normal file
182
backend/src/services/payment.service.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
import axios from "axios";
|
||||
import crypto from "crypto";
|
||||
import { PaymentProvider } from "@prisma/client";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { HttpError } from "../lib/http-error";
|
||||
import { markInvoicePaid } from "./billing.service";
|
||||
|
||||
type PaymentSettings = {
|
||||
default_provider?: "paystack" | "flutterwave" | "manual";
|
||||
paystack_public?: string;
|
||||
paystack_secret?: string;
|
||||
flutterwave_public?: string;
|
||||
flutterwave_secret?: string;
|
||||
flutterwave_webhook_hash?: string;
|
||||
callback_url?: string;
|
||||
};
|
||||
|
||||
async function getPaymentSettings(): Promise<PaymentSettings> {
|
||||
const setting = await prisma.setting.findUnique({
|
||||
where: { key: "payment" }
|
||||
});
|
||||
return (setting?.value as PaymentSettings) ?? {};
|
||||
}
|
||||
|
||||
function normalizeProvider(provider: string | undefined, fallback: string): PaymentProvider {
|
||||
const value = (provider ?? fallback).toLowerCase();
|
||||
if (value === "paystack") return PaymentProvider.PAYSTACK;
|
||||
if (value === "flutterwave") return PaymentProvider.FLUTTERWAVE;
|
||||
return PaymentProvider.MANUAL;
|
||||
}
|
||||
|
||||
export async function createInvoicePaymentLink(invoiceId: string, requestedProvider?: string) {
|
||||
const invoice = await prisma.invoice.findUnique({
|
||||
where: { id: invoiceId },
|
||||
include: { tenant: true }
|
||||
});
|
||||
if (!invoice) {
|
||||
throw new HttpError(404, "Invoice not found", "INVOICE_NOT_FOUND");
|
||||
}
|
||||
|
||||
const settings = await getPaymentSettings();
|
||||
const provider = normalizeProvider(requestedProvider, settings.default_provider ?? "manual");
|
||||
if (provider === PaymentProvider.MANUAL) {
|
||||
throw new HttpError(400, "Manual payment provider cannot generate online links", "MANUAL_PROVIDER");
|
||||
}
|
||||
|
||||
const reference = invoice.payment_reference ?? `PAY-${invoice.invoice_number}-${Date.now()}`;
|
||||
|
||||
if (provider === PaymentProvider.PAYSTACK) {
|
||||
if (!settings.paystack_secret) {
|
||||
throw new HttpError(400, "Paystack secret key is missing", "PAYSTACK_CONFIG_MISSING");
|
||||
}
|
||||
const response = await axios.post(
|
||||
"https://api.paystack.co/transaction/initialize",
|
||||
{
|
||||
email: invoice.tenant.owner_email,
|
||||
amount: Math.round(Number(invoice.amount) * 100),
|
||||
reference,
|
||||
currency: invoice.currency,
|
||||
callback_url: settings.callback_url,
|
||||
metadata: {
|
||||
invoice_id: invoice.id,
|
||||
tenant_id: invoice.tenant_id
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${settings.paystack_secret}`,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const paymentUrl = response.data?.data?.authorization_url as string | undefined;
|
||||
await prisma.invoice.update({
|
||||
where: { id: invoice.id },
|
||||
data: {
|
||||
status: "PENDING",
|
||||
payment_provider: provider,
|
||||
payment_reference: reference,
|
||||
payment_url: paymentUrl
|
||||
}
|
||||
});
|
||||
return { provider: "paystack", payment_url: paymentUrl, reference };
|
||||
}
|
||||
|
||||
if (!settings.flutterwave_secret) {
|
||||
throw new HttpError(400, "Flutterwave secret key is missing", "FLUTTERWAVE_CONFIG_MISSING");
|
||||
}
|
||||
const response = await axios.post(
|
||||
"https://api.flutterwave.com/v3/payments",
|
||||
{
|
||||
tx_ref: reference,
|
||||
amount: Number(invoice.amount),
|
||||
currency: invoice.currency,
|
||||
redirect_url: settings.callback_url,
|
||||
customer: {
|
||||
email: invoice.tenant.owner_email,
|
||||
name: invoice.tenant.name
|
||||
},
|
||||
customizations: {
|
||||
title: "ProxPanel Invoice Payment",
|
||||
description: `Invoice ${invoice.invoice_number}`
|
||||
},
|
||||
meta: {
|
||||
invoice_id: invoice.id,
|
||||
tenant_id: invoice.tenant_id
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${settings.flutterwave_secret}`,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
const paymentUrl = response.data?.data?.link as string | undefined;
|
||||
await prisma.invoice.update({
|
||||
where: { id: invoice.id },
|
||||
data: {
|
||||
status: "PENDING",
|
||||
payment_provider: provider,
|
||||
payment_reference: reference,
|
||||
payment_url: paymentUrl
|
||||
}
|
||||
});
|
||||
return { provider: "flutterwave", payment_url: paymentUrl, reference };
|
||||
}
|
||||
|
||||
export async function handleManualInvoicePayment(invoiceId: string, reference: string, actorEmail: string) {
|
||||
return markInvoicePaid(invoiceId, PaymentProvider.MANUAL, reference, actorEmail);
|
||||
}
|
||||
|
||||
export async function verifyPaystackSignature(signature: string | undefined, rawBody: string | undefined) {
|
||||
if (!signature || !rawBody) return false;
|
||||
const settings = await getPaymentSettings();
|
||||
if (!settings.paystack_secret) return false;
|
||||
const expected = crypto
|
||||
.createHmac("sha512", settings.paystack_secret)
|
||||
.update(rawBody)
|
||||
.digest("hex");
|
||||
return expected === signature;
|
||||
}
|
||||
|
||||
export async function verifyFlutterwaveSignature(signature: string | undefined) {
|
||||
const settings = await getPaymentSettings();
|
||||
if (!settings.flutterwave_webhook_hash) return false;
|
||||
return settings.flutterwave_webhook_hash === signature;
|
||||
}
|
||||
|
||||
export async function processPaystackWebhook(payload: any) {
|
||||
if (payload?.event !== "charge.success") return { handled: false };
|
||||
const reference = payload?.data?.reference as string | undefined;
|
||||
if (!reference) return { handled: false };
|
||||
|
||||
const invoice = await prisma.invoice.findFirst({
|
||||
where: { payment_reference: reference }
|
||||
});
|
||||
if (!invoice) return { handled: false };
|
||||
|
||||
if (invoice.status !== "PAID") {
|
||||
await markInvoicePaid(invoice.id, PaymentProvider.PAYSTACK, reference, "webhook@paystack");
|
||||
}
|
||||
return { handled: true, invoice_id: invoice.id };
|
||||
}
|
||||
|
||||
export async function processFlutterwaveWebhook(payload: any) {
|
||||
const status = payload?.status?.toLowerCase();
|
||||
if (status !== "successful") return { handled: false };
|
||||
const reference = (payload?.txRef ?? payload?.tx_ref) as string | undefined;
|
||||
if (!reference) return { handled: false };
|
||||
|
||||
const invoice = await prisma.invoice.findFirst({
|
||||
where: { payment_reference: reference }
|
||||
});
|
||||
if (!invoice) return { handled: false };
|
||||
|
||||
if (invoice.status !== "PAID") {
|
||||
await markInvoicePaid(invoice.id, PaymentProvider.FLUTTERWAVE, reference, "webhook@flutterwave");
|
||||
}
|
||||
return { handled: true, invoice_id: invoice.id };
|
||||
}
|
||||
1123
backend/src/services/provisioning.service.ts
Normal file
1123
backend/src/services/provisioning.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
1451
backend/src/services/proxmox.service.ts
Normal file
1451
backend/src/services/proxmox.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
495
backend/src/services/scheduler.service.ts
Normal file
495
backend/src/services/scheduler.service.ts
Normal file
@@ -0,0 +1,495 @@
|
||||
import cron, { type ScheduledTask } from "node-cron";
|
||||
import os from "os";
|
||||
import { SettingType } from "@prisma/client";
|
||||
import { env } from "../config/env";
|
||||
import { prisma } from "../lib/prisma";
|
||||
import { meterHourlyUsage, generateInvoicesFromUnbilledUsage, processBackupSchedule, updateOverdueInvoices } from "./billing.service";
|
||||
import { processDuePowerSchedules, processDueOperationRetries } from "./operations.service";
|
||||
import { processDueSnapshotJobs, processPendingBackups } from "./backup.service";
|
||||
import { evaluateAlertRulesNow, processDueHealthChecks } from "./monitoring.service";
|
||||
|
||||
export type SchedulerConfig = {
|
||||
enable_scheduler: boolean;
|
||||
billing_cron: string;
|
||||
backup_cron: string;
|
||||
power_schedule_cron: string;
|
||||
monitoring_cron: string;
|
||||
operation_retry_cron: string;
|
||||
};
|
||||
|
||||
type WorkerKey = "billing" | "backup" | "power" | "monitoring" | "operation_retry";
|
||||
type WorkerStatus = "disabled" | "scheduled" | "running" | "success" | "failed";
|
||||
|
||||
type WorkerState = {
|
||||
worker: WorkerKey;
|
||||
cron: string;
|
||||
status: WorkerStatus;
|
||||
last_run_at: string | null;
|
||||
last_duration_ms: number | null;
|
||||
last_message: string | null;
|
||||
last_error: string | null;
|
||||
};
|
||||
|
||||
type SchedulerLeasePayload = {
|
||||
owner_id: string;
|
||||
lease_until: string;
|
||||
acquired_at: string;
|
||||
heartbeat_at: string;
|
||||
worker: WorkerKey;
|
||||
};
|
||||
|
||||
type SchedulerState = {
|
||||
started_at: string | null;
|
||||
config: SchedulerConfig;
|
||||
workers: Record<WorkerKey, WorkerState>;
|
||||
};
|
||||
|
||||
const DEFAULT_SCHEDULER_CONFIG: SchedulerConfig = {
|
||||
enable_scheduler: env.ENABLE_SCHEDULER,
|
||||
billing_cron: env.BILLING_CRON,
|
||||
backup_cron: env.BACKUP_CRON,
|
||||
power_schedule_cron: env.POWER_SCHEDULE_CRON,
|
||||
monitoring_cron: env.MONITORING_CRON,
|
||||
operation_retry_cron: "*/5 * * * *"
|
||||
};
|
||||
|
||||
let scheduledJobs: Partial<Record<WorkerKey, ScheduledTask>> = {};
|
||||
const activeWorkerRuns = new Set<WorkerKey>();
|
||||
const schedulerInstanceId = `${os.hostname()}:${process.pid}:${Math.random().toString(36).slice(2, 10)}`;
|
||||
|
||||
const schedulerState: SchedulerState = {
|
||||
started_at: null,
|
||||
config: DEFAULT_SCHEDULER_CONFIG,
|
||||
workers: {
|
||||
billing: {
|
||||
worker: "billing",
|
||||
cron: DEFAULT_SCHEDULER_CONFIG.billing_cron,
|
||||
status: DEFAULT_SCHEDULER_CONFIG.enable_scheduler ? "scheduled" : "disabled",
|
||||
last_run_at: null,
|
||||
last_duration_ms: null,
|
||||
last_message: null,
|
||||
last_error: null
|
||||
},
|
||||
backup: {
|
||||
worker: "backup",
|
||||
cron: DEFAULT_SCHEDULER_CONFIG.backup_cron,
|
||||
status: DEFAULT_SCHEDULER_CONFIG.enable_scheduler ? "scheduled" : "disabled",
|
||||
last_run_at: null,
|
||||
last_duration_ms: null,
|
||||
last_message: null,
|
||||
last_error: null
|
||||
},
|
||||
power: {
|
||||
worker: "power",
|
||||
cron: DEFAULT_SCHEDULER_CONFIG.power_schedule_cron,
|
||||
status: DEFAULT_SCHEDULER_CONFIG.enable_scheduler ? "scheduled" : "disabled",
|
||||
last_run_at: null,
|
||||
last_duration_ms: null,
|
||||
last_message: null,
|
||||
last_error: null
|
||||
},
|
||||
monitoring: {
|
||||
worker: "monitoring",
|
||||
cron: DEFAULT_SCHEDULER_CONFIG.monitoring_cron,
|
||||
status: DEFAULT_SCHEDULER_CONFIG.enable_scheduler ? "scheduled" : "disabled",
|
||||
last_run_at: null,
|
||||
last_duration_ms: null,
|
||||
last_message: null,
|
||||
last_error: null
|
||||
},
|
||||
operation_retry: {
|
||||
worker: "operation_retry",
|
||||
cron: DEFAULT_SCHEDULER_CONFIG.operation_retry_cron,
|
||||
status: DEFAULT_SCHEDULER_CONFIG.enable_scheduler ? "scheduled" : "disabled",
|
||||
last_run_at: null,
|
||||
last_duration_ms: null,
|
||||
last_message: null,
|
||||
last_error: null
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function normalizeCronExpression(value: unknown, fallback: string) {
|
||||
if (typeof value !== "string") return fallback;
|
||||
const trimmed = value.trim();
|
||||
if (trimmed.length === 0) return fallback;
|
||||
return cron.validate(trimmed) ? trimmed : fallback;
|
||||
}
|
||||
|
||||
function normalizeSchedulerConfig(raw?: unknown): SchedulerConfig {
|
||||
const record = raw && typeof raw === "object" && !Array.isArray(raw) ? (raw as Record<string, unknown>) : {};
|
||||
|
||||
const enabled =
|
||||
typeof record.enable_scheduler === "boolean" ? record.enable_scheduler : DEFAULT_SCHEDULER_CONFIG.enable_scheduler;
|
||||
|
||||
return {
|
||||
enable_scheduler: enabled,
|
||||
billing_cron: normalizeCronExpression(record.billing_cron, DEFAULT_SCHEDULER_CONFIG.billing_cron),
|
||||
backup_cron: normalizeCronExpression(record.backup_cron, DEFAULT_SCHEDULER_CONFIG.backup_cron),
|
||||
power_schedule_cron: normalizeCronExpression(record.power_schedule_cron, DEFAULT_SCHEDULER_CONFIG.power_schedule_cron),
|
||||
monitoring_cron: normalizeCronExpression(record.monitoring_cron, DEFAULT_SCHEDULER_CONFIG.monitoring_cron),
|
||||
operation_retry_cron: normalizeCronExpression(record.operation_retry_cron, DEFAULT_SCHEDULER_CONFIG.operation_retry_cron)
|
||||
};
|
||||
}
|
||||
|
||||
function lockSettingKey(worker: WorkerKey) {
|
||||
return `scheduler_lock:${worker}`;
|
||||
}
|
||||
|
||||
function nextLeaseDeadline(from = new Date()) {
|
||||
return new Date(from.getTime() + env.SCHEDULER_LEASE_MS);
|
||||
}
|
||||
|
||||
function parseLeasePayload(value: unknown): SchedulerLeasePayload | null {
|
||||
if (!value || typeof value !== "object" || Array.isArray(value)) return null;
|
||||
const record = value as Record<string, unknown>;
|
||||
if (
|
||||
typeof record.owner_id !== "string" ||
|
||||
typeof record.lease_until !== "string" ||
|
||||
typeof record.acquired_at !== "string" ||
|
||||
typeof record.heartbeat_at !== "string" ||
|
||||
typeof record.worker !== "string"
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
owner_id: record.owner_id,
|
||||
lease_until: record.lease_until,
|
||||
acquired_at: record.acquired_at,
|
||||
heartbeat_at: record.heartbeat_at,
|
||||
worker: record.worker as WorkerKey
|
||||
};
|
||||
}
|
||||
|
||||
function leasePayload(worker: WorkerKey, now = new Date(), acquiredAt?: string): SchedulerLeasePayload {
|
||||
return {
|
||||
owner_id: schedulerInstanceId,
|
||||
lease_until: nextLeaseDeadline(now).toISOString(),
|
||||
acquired_at: acquiredAt ?? now.toISOString(),
|
||||
heartbeat_at: now.toISOString(),
|
||||
worker
|
||||
};
|
||||
}
|
||||
|
||||
async function acquireWorkerLease(worker: WorkerKey) {
|
||||
const now = new Date();
|
||||
const key = lockSettingKey(worker);
|
||||
const existing = await prisma.setting.findUnique({
|
||||
where: { key },
|
||||
select: {
|
||||
id: true,
|
||||
value: true,
|
||||
updated_at: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
try {
|
||||
await prisma.setting.create({
|
||||
data: {
|
||||
key,
|
||||
type: SettingType.GENERAL,
|
||||
is_encrypted: false,
|
||||
value: leasePayload(worker, now)
|
||||
}
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const parsed = parseLeasePayload(existing.value);
|
||||
const leaseUntilMs = parsed ? Date.parse(parsed.lease_until) : 0;
|
||||
const activeOwner =
|
||||
parsed &&
|
||||
parsed.owner_id &&
|
||||
parsed.owner_id !== schedulerInstanceId &&
|
||||
Number.isFinite(leaseUntilMs) &&
|
||||
leaseUntilMs > now.getTime();
|
||||
|
||||
if (activeOwner) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const updated = await prisma.setting.updateMany({
|
||||
where: {
|
||||
id: existing.id,
|
||||
updated_at: existing.updated_at
|
||||
},
|
||||
data: {
|
||||
value: leasePayload(worker, now, parsed?.acquired_at)
|
||||
}
|
||||
});
|
||||
|
||||
return updated.count === 1;
|
||||
}
|
||||
|
||||
async function renewWorkerLease(worker: WorkerKey) {
|
||||
const now = new Date();
|
||||
const key = lockSettingKey(worker);
|
||||
const existing = await prisma.setting.findUnique({
|
||||
where: { key },
|
||||
select: {
|
||||
id: true,
|
||||
value: true,
|
||||
updated_at: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const parsed = parseLeasePayload(existing.value);
|
||||
if (!parsed || parsed.owner_id !== schedulerInstanceId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const updated = await prisma.setting.updateMany({
|
||||
where: {
|
||||
id: existing.id,
|
||||
updated_at: existing.updated_at
|
||||
},
|
||||
data: {
|
||||
value: leasePayload(worker, now, parsed.acquired_at)
|
||||
}
|
||||
});
|
||||
|
||||
return updated.count === 1;
|
||||
}
|
||||
|
||||
async function releaseWorkerLease(worker: WorkerKey) {
|
||||
const key = lockSettingKey(worker);
|
||||
const existing = await prisma.setting.findUnique({
|
||||
where: { key },
|
||||
select: {
|
||||
id: true,
|
||||
value: true,
|
||||
updated_at: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
return;
|
||||
}
|
||||
|
||||
const parsed = parseLeasePayload(existing.value);
|
||||
if (!parsed || parsed.owner_id !== schedulerInstanceId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
const leaseExpired = new Date(now.getTime() - 1000).toISOString();
|
||||
await prisma.setting.updateMany({
|
||||
where: {
|
||||
id: existing.id,
|
||||
updated_at: existing.updated_at
|
||||
},
|
||||
data: {
|
||||
value: {
|
||||
...parsed,
|
||||
owner_id: "",
|
||||
lease_until: leaseExpired,
|
||||
heartbeat_at: now.toISOString()
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function stopAllScheduledJobs() {
|
||||
const entries = Object.entries(scheduledJobs) as Array<[WorkerKey, ScheduledTask]>;
|
||||
for (const [, task] of entries) {
|
||||
try {
|
||||
task.stop();
|
||||
task.destroy();
|
||||
} catch {
|
||||
task.stop();
|
||||
}
|
||||
}
|
||||
scheduledJobs = {};
|
||||
}
|
||||
|
||||
function setWorkerDisabledState(config: SchedulerConfig) {
|
||||
schedulerState.workers.billing = {
|
||||
...schedulerState.workers.billing,
|
||||
cron: config.billing_cron,
|
||||
status: "disabled"
|
||||
};
|
||||
schedulerState.workers.backup = {
|
||||
...schedulerState.workers.backup,
|
||||
cron: config.backup_cron,
|
||||
status: "disabled"
|
||||
};
|
||||
schedulerState.workers.power = {
|
||||
...schedulerState.workers.power,
|
||||
cron: config.power_schedule_cron,
|
||||
status: "disabled"
|
||||
};
|
||||
schedulerState.workers.monitoring = {
|
||||
...schedulerState.workers.monitoring,
|
||||
cron: config.monitoring_cron,
|
||||
status: "disabled"
|
||||
};
|
||||
schedulerState.workers.operation_retry = {
|
||||
...schedulerState.workers.operation_retry,
|
||||
cron: config.operation_retry_cron,
|
||||
status: "disabled"
|
||||
};
|
||||
}
|
||||
|
||||
async function runWorker(worker: WorkerKey, execute: () => Promise<string>) {
|
||||
if (activeWorkerRuns.has(worker)) {
|
||||
schedulerState.workers[worker] = {
|
||||
...schedulerState.workers[worker],
|
||||
status: "scheduled",
|
||||
last_message: "Skipped: worker already running in this process"
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
const acquired = await acquireWorkerLease(worker);
|
||||
if (!acquired) {
|
||||
schedulerState.workers[worker] = {
|
||||
...schedulerState.workers[worker],
|
||||
status: "scheduled",
|
||||
last_message: "Skipped: lease held by another scheduler instance"
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
activeWorkerRuns.add(worker);
|
||||
const startedAt = Date.now();
|
||||
schedulerState.workers[worker] = {
|
||||
...schedulerState.workers[worker],
|
||||
status: "running",
|
||||
last_error: null
|
||||
};
|
||||
|
||||
const heartbeatEveryMs = Math.max(1_000, Math.min(env.SCHEDULER_HEARTBEAT_MS, Math.floor(env.SCHEDULER_LEASE_MS / 2)));
|
||||
const heartbeat = setInterval(() => {
|
||||
void renewWorkerLease(worker);
|
||||
}, heartbeatEveryMs);
|
||||
|
||||
try {
|
||||
const message = await execute();
|
||||
schedulerState.workers[worker] = {
|
||||
...schedulerState.workers[worker],
|
||||
status: "success",
|
||||
last_run_at: new Date().toISOString(),
|
||||
last_duration_ms: Date.now() - startedAt,
|
||||
last_message: message,
|
||||
last_error: null
|
||||
};
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown scheduler error";
|
||||
schedulerState.workers[worker] = {
|
||||
...schedulerState.workers[worker],
|
||||
status: "failed",
|
||||
last_run_at: new Date().toISOString(),
|
||||
last_duration_ms: Date.now() - startedAt,
|
||||
last_error: message
|
||||
};
|
||||
} finally {
|
||||
clearInterval(heartbeat);
|
||||
activeWorkerRuns.delete(worker);
|
||||
await releaseWorkerLease(worker);
|
||||
}
|
||||
}
|
||||
|
||||
function registerWorker(worker: WorkerKey, cronExpression: string, execute: () => Promise<string>) {
|
||||
schedulerState.workers[worker] = {
|
||||
...schedulerState.workers[worker],
|
||||
cron: cronExpression,
|
||||
status: "scheduled",
|
||||
last_error: null
|
||||
};
|
||||
|
||||
const task = cron.schedule(cronExpression, () => {
|
||||
void runWorker(worker, execute);
|
||||
});
|
||||
|
||||
scheduledJobs[worker] = task;
|
||||
}
|
||||
|
||||
async function readSchedulerConfigSetting() {
|
||||
const setting = await prisma.setting.findUnique({
|
||||
where: { key: "scheduler" },
|
||||
select: { value: true }
|
||||
});
|
||||
return normalizeSchedulerConfig(setting?.value);
|
||||
}
|
||||
|
||||
function applyRuntimeConfig(config: SchedulerConfig) {
|
||||
schedulerState.config = config;
|
||||
schedulerState.started_at = new Date().toISOString();
|
||||
}
|
||||
|
||||
export async function configureSchedulers(config?: SchedulerConfig) {
|
||||
const resolvedConfig = config ?? (await readSchedulerConfigSetting());
|
||||
applyRuntimeConfig(resolvedConfig);
|
||||
|
||||
stopAllScheduledJobs();
|
||||
|
||||
if (!resolvedConfig.enable_scheduler) {
|
||||
setWorkerDisabledState(resolvedConfig);
|
||||
return getSchedulerRuntimeSnapshot();
|
||||
}
|
||||
|
||||
registerWorker("billing", resolvedConfig.billing_cron, async () => {
|
||||
await meterHourlyUsage();
|
||||
await generateInvoicesFromUnbilledUsage();
|
||||
await updateOverdueInvoices();
|
||||
return "Billing cycle completed";
|
||||
});
|
||||
|
||||
registerWorker("backup", resolvedConfig.backup_cron, async () => {
|
||||
const queued = await processBackupSchedule();
|
||||
const backupResult = await processPendingBackups();
|
||||
const snapshotResult = await processDueSnapshotJobs();
|
||||
return `Backup queue=${queued}, backups_completed=${backupResult.completed}, backups_skipped=${backupResult.skipped}, snapshot_scanned=${snapshotResult.scanned}, snapshot_executed=${snapshotResult.executed}, snapshot_failed=${snapshotResult.failed}, snapshot_pruned=${snapshotResult.pruned}, snapshot_skipped=${snapshotResult.skipped}`;
|
||||
});
|
||||
|
||||
registerWorker("power", resolvedConfig.power_schedule_cron, async () => {
|
||||
const result = await processDuePowerSchedules();
|
||||
return `Power schedules scanned=${result.scanned}, executed=${result.executed}, failed=${result.failed}, skipped=${result.skipped}`;
|
||||
});
|
||||
|
||||
registerWorker("monitoring", resolvedConfig.monitoring_cron, async () => {
|
||||
const checkResult = await processDueHealthChecks();
|
||||
const alertResult = await evaluateAlertRulesNow();
|
||||
return `Checks scanned=${checkResult.scanned}, executed=${checkResult.executed}, failed=${checkResult.failed}, skipped=${checkResult.skipped}; alerts evaluated=${alertResult.evaluated}, triggered=${alertResult.triggered}, resolved=${alertResult.resolved}`;
|
||||
});
|
||||
|
||||
registerWorker("operation_retry", resolvedConfig.operation_retry_cron, async () => {
|
||||
const retryResult = await processDueOperationRetries();
|
||||
return `Retry tasks scanned=${retryResult.scanned}, executed=${retryResult.executed}, succeeded=${retryResult.succeeded}, failed=${retryResult.failed}, rescheduled=${retryResult.rescheduled}, invalid_payload=${retryResult.invalid_payload}, skipped=${retryResult.skipped}`;
|
||||
});
|
||||
|
||||
return getSchedulerRuntimeSnapshot();
|
||||
}
|
||||
|
||||
export async function startSchedulers() {
|
||||
await configureSchedulers();
|
||||
}
|
||||
|
||||
export async function reconfigureSchedulers(config?: Partial<SchedulerConfig>) {
|
||||
const persisted = await readSchedulerConfigSetting();
|
||||
const merged = normalizeSchedulerConfig({
|
||||
...persisted,
|
||||
...(config ?? {})
|
||||
});
|
||||
return configureSchedulers(merged);
|
||||
}
|
||||
|
||||
export function getSchedulerRuntimeSnapshot() {
|
||||
return {
|
||||
generated_at: new Date().toISOString(),
|
||||
...schedulerState
|
||||
};
|
||||
}
|
||||
|
||||
export function schedulerDefaults() {
|
||||
return { ...DEFAULT_SCHEDULER_CONFIG };
|
||||
}
|
||||
20
backend/src/tests/operations.test.ts
Normal file
20
backend/src/tests/operations.test.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { nextRunAt, validateCronExpression } from "../services/operations.service";
|
||||
|
||||
test("nextRunAt returns a future date for a valid cron expression", () => {
|
||||
const base = new Date("2026-01-01T00:00:00.000Z");
|
||||
const next = nextRunAt("*/5 * * * *", base);
|
||||
assert.ok(next instanceof Date);
|
||||
assert.ok(next.getTime() > base.getTime());
|
||||
});
|
||||
|
||||
test("validateCronExpression accepts valid expressions", () => {
|
||||
assert.doesNotThrow(() => validateCronExpression("0 * * * *"));
|
||||
assert.doesNotThrow(() => validateCronExpression("*/10 1-23 * * 1,3,5"));
|
||||
});
|
||||
|
||||
test("validateCronExpression rejects invalid expressions", () => {
|
||||
assert.throws(() => validateCronExpression("invalid-cron"));
|
||||
assert.throws(() => validateCronExpression("* * * * * *"));
|
||||
});
|
||||
19
backend/src/types/express.d.ts
vendored
Normal file
19
backend/src/types/express.d.ts
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import type { Role } from "@prisma/client";
|
||||
|
||||
declare global {
|
||||
namespace Express {
|
||||
interface UserToken {
|
||||
id: string;
|
||||
email: string;
|
||||
role: Role;
|
||||
tenant_id?: string | null;
|
||||
}
|
||||
|
||||
interface Request {
|
||||
user?: UserToken;
|
||||
rawBody?: string;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export {};
|
||||
20
backend/tsconfig.json
Normal file
20
backend/tsconfig.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"lib": ["ES2022"],
|
||||
"module": "CommonJS",
|
||||
"moduleResolution": "Node",
|
||||
"rootDir": "src",
|
||||
"outDir": "dist",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"skipLibCheck": true,
|
||||
"resolveJsonModule": true,
|
||||
"declaration": false,
|
||||
"sourceMap": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.d.ts"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
21
components.json
Normal file
21
components.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"$schema": "https://ui.shadcn.com/schema.json",
|
||||
"style": "new-york",
|
||||
"rsc": false,
|
||||
"tsx": false,
|
||||
"tailwind": {
|
||||
"config": "tailwind.config.js",
|
||||
"css": "src/index.css",
|
||||
"baseColor": "neutral",
|
||||
"cssVariables": true,
|
||||
"prefix": ""
|
||||
},
|
||||
"aliases": {
|
||||
"components": "@/components",
|
||||
"utils": "@/lib/utils",
|
||||
"ui": "@/components/ui",
|
||||
"lib": "@/lib",
|
||||
"hooks": "@/hooks"
|
||||
},
|
||||
"iconLibrary": "lucide"
|
||||
}
|
||||
63
docker-compose.yml
Normal file
63
docker-compose.yml
Normal file
@@ -0,0 +1,63 @@
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
container_name: proxpanel-postgres
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: proxpanel
|
||||
POSTGRES_PASSWORD: proxpanel
|
||||
POSTGRES_DB: proxpanel
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U proxpanel -d proxpanel"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
backend:
|
||||
build:
|
||||
context: ./backend
|
||||
container_name: proxpanel-backend
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
NODE_ENV: production
|
||||
PORT: 8080
|
||||
DATABASE_URL: postgresql://proxpanel:proxpanel@postgres:5432/proxpanel
|
||||
JWT_SECRET: change_this_to_a_long_secret_key_please
|
||||
JWT_REFRESH_SECRET: change_this_to_another_long_secret_key
|
||||
JWT_EXPIRES_IN: 15m
|
||||
JWT_REFRESH_EXPIRES_IN: 30d
|
||||
CORS_ORIGIN: http://localhost:80
|
||||
RATE_LIMIT_WINDOW_MS: 60000
|
||||
RATE_LIMIT_MAX: 600
|
||||
AUTH_RATE_LIMIT_WINDOW_MS: 60000
|
||||
AUTH_RATE_LIMIT_MAX: 20
|
||||
ENABLE_SCHEDULER: "true"
|
||||
BILLING_CRON: "0 * * * *"
|
||||
BACKUP_CRON: "*/15 * * * *"
|
||||
POWER_SCHEDULE_CRON: "* * * * *"
|
||||
MONITORING_CRON: "*/5 * * * *"
|
||||
PROXMOX_TIMEOUT_MS: 15000
|
||||
ports:
|
||||
- "8080:8080"
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
VITE_API_BASE_URL: http://localhost:8080
|
||||
container_name: proxpanel-frontend
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- backend
|
||||
ports:
|
||||
- "80:80"
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
80
entities/AuditLog.json
Normal file
80
entities/AuditLog.json
Normal file
@@ -0,0 +1,80 @@
|
||||
{
|
||||
"name": "Backup",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"vm_id": {
|
||||
"type": "string",
|
||||
"title": "VM ID"
|
||||
},
|
||||
"vm_name": {
|
||||
"type": "string",
|
||||
"title": "VM Name"
|
||||
},
|
||||
"node": {
|
||||
"type": "string",
|
||||
"title": "Node"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"pending",
|
||||
"running",
|
||||
"completed",
|
||||
"failed",
|
||||
"expired"
|
||||
],
|
||||
"title": "Status"
|
||||
},
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"full",
|
||||
"incremental",
|
||||
"snapshot"
|
||||
],
|
||||
"title": "Backup Type"
|
||||
},
|
||||
"size_mb": {
|
||||
"type": "number",
|
||||
"title": "Size (MB)"
|
||||
},
|
||||
"storage": {
|
||||
"type": "string",
|
||||
"title": "Storage Location"
|
||||
},
|
||||
"schedule": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"manual",
|
||||
"daily",
|
||||
"weekly",
|
||||
"monthly"
|
||||
],
|
||||
"title": "Schedule"
|
||||
},
|
||||
"retention_days": {
|
||||
"type": "number",
|
||||
"title": "Retention Days"
|
||||
},
|
||||
"started_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"title": "Started At"
|
||||
},
|
||||
"completed_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"title": "Completed At"
|
||||
},
|
||||
"notes": {
|
||||
"type": "string",
|
||||
"title": "Notes"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"vm_id",
|
||||
"vm_name",
|
||||
"status",
|
||||
"type"
|
||||
]
|
||||
}
|
||||
72
entities/Backup.json
Normal file
72
entities/Backup.json
Normal file
@@ -0,0 +1,72 @@
|
||||
{
|
||||
"name": "BillingPlan",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"title": "Plan Name"
|
||||
},
|
||||
"slug": {
|
||||
"type": "string",
|
||||
"title": "Slug"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"title": "Description"
|
||||
},
|
||||
"price_monthly": {
|
||||
"type": "number",
|
||||
"title": "Monthly Price"
|
||||
},
|
||||
"price_hourly": {
|
||||
"type": "number",
|
||||
"title": "Hourly Price"
|
||||
},
|
||||
"currency": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"NGN",
|
||||
"USD",
|
||||
"GHS",
|
||||
"KES",
|
||||
"ZAR"
|
||||
],
|
||||
"title": "Currency"
|
||||
},
|
||||
"cpu_cores": {
|
||||
"type": "number",
|
||||
"title": "CPU Cores"
|
||||
},
|
||||
"ram_mb": {
|
||||
"type": "number",
|
||||
"title": "RAM (MB)"
|
||||
},
|
||||
"disk_gb": {
|
||||
"type": "number",
|
||||
"title": "Disk (GB)"
|
||||
},
|
||||
"bandwidth_gb": {
|
||||
"type": "number",
|
||||
"title": "Bandwidth (GB)"
|
||||
},
|
||||
"is_active": {
|
||||
"type": "boolean",
|
||||
"title": "Active"
|
||||
},
|
||||
"features": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": "Features"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"price_monthly",
|
||||
"currency",
|
||||
"cpu_cores",
|
||||
"ram_mb",
|
||||
"disk_gb"
|
||||
]
|
||||
}
|
||||
83
entities/BillingPlan.json
Normal file
83
entities/BillingPlan.json
Normal file
@@ -0,0 +1,83 @@
|
||||
{
|
||||
"name": "Invoice",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"invoice_number": {
|
||||
"type": "string",
|
||||
"title": "Invoice Number"
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"title": "Tenant ID"
|
||||
},
|
||||
"tenant_name": {
|
||||
"type": "string",
|
||||
"title": "Tenant Name"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"draft",
|
||||
"pending",
|
||||
"paid",
|
||||
"overdue",
|
||||
"cancelled",
|
||||
"refunded"
|
||||
],
|
||||
"title": "Status"
|
||||
},
|
||||
"amount": {
|
||||
"type": "number",
|
||||
"title": "Amount"
|
||||
},
|
||||
"currency": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"NGN",
|
||||
"USD",
|
||||
"GHS",
|
||||
"KES",
|
||||
"ZAR"
|
||||
],
|
||||
"title": "Currency"
|
||||
},
|
||||
"due_date": {
|
||||
"type": "string",
|
||||
"format": "date",
|
||||
"title": "Due Date"
|
||||
},
|
||||
"paid_date": {
|
||||
"type": "string",
|
||||
"format": "date",
|
||||
"title": "Paid Date"
|
||||
},
|
||||
"payment_provider": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"paystack",
|
||||
"flutterwave",
|
||||
"manual"
|
||||
],
|
||||
"title": "Payment Provider"
|
||||
},
|
||||
"payment_reference": {
|
||||
"type": "string",
|
||||
"title": "Payment Reference"
|
||||
},
|
||||
"line_items": {
|
||||
"type": "string",
|
||||
"title": "Line Items JSON"
|
||||
},
|
||||
"notes": {
|
||||
"type": "string",
|
||||
"title": "Notes"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"invoice_number",
|
||||
"tenant_id",
|
||||
"status",
|
||||
"amount",
|
||||
"currency"
|
||||
]
|
||||
}
|
||||
95
entities/FirewallRule.json
Normal file
95
entities/FirewallRule.json
Normal file
@@ -0,0 +1,95 @@
|
||||
{
|
||||
"name": "UsageRecord",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"vm_id": {
|
||||
"type": "string",
|
||||
"title": "VM ID"
|
||||
},
|
||||
"vm_name": {
|
||||
"type": "string",
|
||||
"title": "VM Name"
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"title": "Tenant ID"
|
||||
},
|
||||
"tenant_name": {
|
||||
"type": "string",
|
||||
"title": "Tenant Name"
|
||||
},
|
||||
"billing_plan_id": {
|
||||
"type": "string",
|
||||
"title": "Billing Plan ID"
|
||||
},
|
||||
"plan_name": {
|
||||
"type": "string",
|
||||
"title": "Plan Name"
|
||||
},
|
||||
"hours_used": {
|
||||
"type": "number",
|
||||
"title": "Hours Used"
|
||||
},
|
||||
"price_per_hour": {
|
||||
"type": "number",
|
||||
"title": "Price Per Hour"
|
||||
},
|
||||
"currency": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"NGN",
|
||||
"USD",
|
||||
"GHS",
|
||||
"KES",
|
||||
"ZAR"
|
||||
],
|
||||
"title": "Currency"
|
||||
},
|
||||
"total_cost": {
|
||||
"type": "number",
|
||||
"title": "Total Cost"
|
||||
},
|
||||
"period_start": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"title": "Period Start"
|
||||
},
|
||||
"period_end": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"title": "Period End"
|
||||
},
|
||||
"billed": {
|
||||
"type": "boolean",
|
||||
"title": "Billed"
|
||||
},
|
||||
"invoice_id": {
|
||||
"type": "string",
|
||||
"title": "Invoice ID"
|
||||
},
|
||||
"cpu_hours": {
|
||||
"type": "number",
|
||||
"title": "CPU Hours"
|
||||
},
|
||||
"ram_gb_hours": {
|
||||
"type": "number",
|
||||
"title": "RAM GB-Hours"
|
||||
},
|
||||
"disk_gb_hours": {
|
||||
"type": "number",
|
||||
"title": "Disk GB-Hours"
|
||||
},
|
||||
"network_gb": {
|
||||
"type": "number",
|
||||
"title": "Network GB Used"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"vm_id",
|
||||
"vm_name",
|
||||
"hours_used",
|
||||
"price_per_hour",
|
||||
"currency",
|
||||
"total_cost"
|
||||
]
|
||||
}
|
||||
83
entities/Invoice.json
Normal file
83
entities/Invoice.json
Normal file
@@ -0,0 +1,83 @@
|
||||
{
|
||||
"name": "Invoice",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"invoice_number": {
|
||||
"type": "string",
|
||||
"title": "Invoice Number"
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"title": "Tenant ID"
|
||||
},
|
||||
"tenant_name": {
|
||||
"type": "string",
|
||||
"title": "Tenant Name"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"draft",
|
||||
"pending",
|
||||
"paid",
|
||||
"overdue",
|
||||
"cancelled",
|
||||
"refunded"
|
||||
],
|
||||
"title": "Status"
|
||||
},
|
||||
"amount": {
|
||||
"type": "number",
|
||||
"title": "Amount"
|
||||
},
|
||||
"currency": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"NGN",
|
||||
"USD",
|
||||
"GHS",
|
||||
"KES",
|
||||
"ZAR"
|
||||
],
|
||||
"title": "Currency"
|
||||
},
|
||||
"due_date": {
|
||||
"type": "string",
|
||||
"format": "date",
|
||||
"title": "Due Date"
|
||||
},
|
||||
"paid_date": {
|
||||
"type": "string",
|
||||
"format": "date",
|
||||
"title": "Paid Date"
|
||||
},
|
||||
"payment_provider": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"paystack",
|
||||
"flutterwave",
|
||||
"manual"
|
||||
],
|
||||
"title": "Payment Provider"
|
||||
},
|
||||
"payment_reference": {
|
||||
"type": "string",
|
||||
"title": "Payment Reference"
|
||||
},
|
||||
"line_items": {
|
||||
"type": "string",
|
||||
"title": "Line Items JSON"
|
||||
},
|
||||
"notes": {
|
||||
"type": "string",
|
||||
"title": "Notes"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"invoice_number",
|
||||
"tenant_id",
|
||||
"status",
|
||||
"amount",
|
||||
"currency"
|
||||
]
|
||||
}
|
||||
95
entities/ProxmoxNode.json
Normal file
95
entities/ProxmoxNode.json
Normal file
@@ -0,0 +1,95 @@
|
||||
{
|
||||
"name": "Tenant",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"title": "Organization Name"
|
||||
},
|
||||
"slug": {
|
||||
"type": "string",
|
||||
"title": "Slug"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"active",
|
||||
"suspended",
|
||||
"trial",
|
||||
"cancelled"
|
||||
],
|
||||
"title": "Status"
|
||||
},
|
||||
"plan": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"starter",
|
||||
"professional",
|
||||
"enterprise",
|
||||
"custom"
|
||||
],
|
||||
"title": "Plan"
|
||||
},
|
||||
"owner_email": {
|
||||
"type": "string",
|
||||
"title": "Owner Email"
|
||||
},
|
||||
"member_emails": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": "Member Emails"
|
||||
},
|
||||
"vm_limit": {
|
||||
"type": "number",
|
||||
"title": "VM Limit"
|
||||
},
|
||||
"cpu_limit": {
|
||||
"type": "number",
|
||||
"title": "CPU Limit"
|
||||
},
|
||||
"ram_limit_mb": {
|
||||
"type": "number",
|
||||
"title": "RAM Limit (MB)"
|
||||
},
|
||||
"disk_limit_gb": {
|
||||
"type": "number",
|
||||
"title": "Disk Limit (GB)"
|
||||
},
|
||||
"balance": {
|
||||
"type": "number",
|
||||
"title": "Balance"
|
||||
},
|
||||
"currency": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"NGN",
|
||||
"USD",
|
||||
"GHS",
|
||||
"KES",
|
||||
"ZAR"
|
||||
],
|
||||
"title": "Currency"
|
||||
},
|
||||
"payment_provider": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"paystack",
|
||||
"flutterwave",
|
||||
"manual"
|
||||
],
|
||||
"title": "Payment Provider"
|
||||
},
|
||||
"metadata": {
|
||||
"type": "string",
|
||||
"title": "Metadata JSON"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"status",
|
||||
"plan",
|
||||
"owner_email"
|
||||
]
|
||||
}
|
||||
88
entities/SecurityEvent.json
Normal file
88
entities/SecurityEvent.json
Normal file
@@ -0,0 +1,88 @@
|
||||
{
|
||||
"name": "FirewallRule",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"title": "Rule Name"
|
||||
},
|
||||
"direction": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"inbound",
|
||||
"outbound",
|
||||
"both"
|
||||
],
|
||||
"title": "Direction"
|
||||
},
|
||||
"action": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"allow",
|
||||
"deny",
|
||||
"rate_limit",
|
||||
"log"
|
||||
],
|
||||
"title": "Action"
|
||||
},
|
||||
"protocol": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"tcp",
|
||||
"udp",
|
||||
"icmp",
|
||||
"any"
|
||||
],
|
||||
"title": "Protocol"
|
||||
},
|
||||
"source_ip": {
|
||||
"type": "string",
|
||||
"title": "Source IP / CIDR"
|
||||
},
|
||||
"destination_ip": {
|
||||
"type": "string",
|
||||
"title": "Destination IP / CIDR"
|
||||
},
|
||||
"port_range": {
|
||||
"type": "string",
|
||||
"title": "Port Range"
|
||||
},
|
||||
"priority": {
|
||||
"type": "number",
|
||||
"title": "Priority"
|
||||
},
|
||||
"enabled": {
|
||||
"type": "boolean",
|
||||
"title": "Enabled"
|
||||
},
|
||||
"applies_to": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"all_nodes",
|
||||
"all_vms",
|
||||
"specific_node",
|
||||
"specific_vm"
|
||||
],
|
||||
"title": "Applies To"
|
||||
},
|
||||
"target_id": {
|
||||
"type": "string",
|
||||
"title": "Target Node/VM ID"
|
||||
},
|
||||
"hit_count": {
|
||||
"type": "number",
|
||||
"title": "Hit Count"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"title": "Description"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"direction",
|
||||
"action",
|
||||
"protocol",
|
||||
"enabled"
|
||||
]
|
||||
}
|
||||
109
entities/Tenant.json
Normal file
109
entities/Tenant.json
Normal file
@@ -0,0 +1,109 @@
|
||||
{
|
||||
"name": "VirtualMachine",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"title": "VM Name"
|
||||
},
|
||||
"vmid": {
|
||||
"type": "number",
|
||||
"title": "VM ID"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"running",
|
||||
"stopped",
|
||||
"paused",
|
||||
"migrating",
|
||||
"error"
|
||||
],
|
||||
"title": "Status"
|
||||
},
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"qemu",
|
||||
"lxc"
|
||||
],
|
||||
"title": "Type"
|
||||
},
|
||||
"node": {
|
||||
"type": "string",
|
||||
"title": "Proxmox Node"
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"title": "Tenant ID"
|
||||
},
|
||||
"os_template": {
|
||||
"type": "string",
|
||||
"title": "OS Template"
|
||||
},
|
||||
"cpu_cores": {
|
||||
"type": "number",
|
||||
"title": "CPU Cores"
|
||||
},
|
||||
"ram_mb": {
|
||||
"type": "number",
|
||||
"title": "RAM (MB)"
|
||||
},
|
||||
"disk_gb": {
|
||||
"type": "number",
|
||||
"title": "Disk (GB)"
|
||||
},
|
||||
"ip_address": {
|
||||
"type": "string",
|
||||
"title": "IP Address"
|
||||
},
|
||||
"cpu_usage": {
|
||||
"type": "number",
|
||||
"title": "CPU Usage %"
|
||||
},
|
||||
"ram_usage": {
|
||||
"type": "number",
|
||||
"title": "RAM Usage %"
|
||||
},
|
||||
"disk_usage": {
|
||||
"type": "number",
|
||||
"title": "Disk Usage %"
|
||||
},
|
||||
"network_in": {
|
||||
"type": "number",
|
||||
"title": "Network In (MB)"
|
||||
},
|
||||
"network_out": {
|
||||
"type": "number",
|
||||
"title": "Network Out (MB)"
|
||||
},
|
||||
"uptime_seconds": {
|
||||
"type": "number",
|
||||
"title": "Uptime (seconds)"
|
||||
},
|
||||
"billing_plan_id": {
|
||||
"type": "string",
|
||||
"title": "Billing Plan ID"
|
||||
},
|
||||
"tags": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": "Tags"
|
||||
},
|
||||
"notes": {
|
||||
"type": "string",
|
||||
"title": "Notes"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"status",
|
||||
"type",
|
||||
"node",
|
||||
"cpu_cores",
|
||||
"ram_mb",
|
||||
"disk_gb"
|
||||
]
|
||||
}
|
||||
37
entities/UsageRecord.json
Normal file
37
entities/UsageRecord.json
Normal file
@@ -0,0 +1,37 @@
|
||||
import { useEffect } from 'react';
|
||||
import { Outlet } from 'react-router-dom';
|
||||
import { useAuth } from '@/lib/AuthContext';
|
||||
import UserNotRegisteredError from '@/components/UserNotRegisteredError';
|
||||
|
||||
const DefaultFallback = () => (
|
||||
<div className="fixed inset-0 flex items-center justify-center">
|
||||
<div className="w-8 h-8 border-4 border-slate-200 border-t-slate-800 rounded-full animate-spin"></div>
|
||||
</div>
|
||||
);
|
||||
|
||||
export default function ProtectedRoute({ fallback = <DefaultFallback />, unauthenticatedElement }) {
|
||||
const { isAuthenticated, isLoadingAuth, authChecked, authError, checkUserAuth } = useAuth();
|
||||
|
||||
useEffect(() => {
|
||||
if (!authChecked && !isLoadingAuth) {
|
||||
checkUserAuth();
|
||||
}
|
||||
}, [authChecked, isLoadingAuth, checkUserAuth]);
|
||||
|
||||
if (isLoadingAuth || !authChecked) {
|
||||
return fallback;
|
||||
}
|
||||
|
||||
if (authError) {
|
||||
if (authError.type === 'user_not_registered') {
|
||||
return <UserNotRegisteredError />;
|
||||
}
|
||||
return unauthenticatedElement;
|
||||
}
|
||||
|
||||
if (!isAuthenticated) {
|
||||
return unauthenticatedElement;
|
||||
}
|
||||
|
||||
return <Outlet />;
|
||||
}
|
||||
63
entities/VirtualMachine.json
Normal file
63
entities/VirtualMachine.json
Normal file
@@ -0,0 +1,63 @@
|
||||
{
|
||||
"name": "AuditLog",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"action": {
|
||||
"type": "string",
|
||||
"title": "Action"
|
||||
},
|
||||
"resource_type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"vm",
|
||||
"tenant",
|
||||
"user",
|
||||
"backup",
|
||||
"invoice",
|
||||
"node",
|
||||
"system"
|
||||
],
|
||||
"title": "Resource Type"
|
||||
},
|
||||
"resource_id": {
|
||||
"type": "string",
|
||||
"title": "Resource ID"
|
||||
},
|
||||
"resource_name": {
|
||||
"type": "string",
|
||||
"title": "Resource Name"
|
||||
},
|
||||
"actor_email": {
|
||||
"type": "string",
|
||||
"title": "Actor Email"
|
||||
},
|
||||
"actor_role": {
|
||||
"type": "string",
|
||||
"title": "Actor Role"
|
||||
},
|
||||
"severity": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"info",
|
||||
"warning",
|
||||
"error",
|
||||
"critical"
|
||||
],
|
||||
"title": "Severity"
|
||||
},
|
||||
"details": {
|
||||
"type": "string",
|
||||
"title": "Details JSON"
|
||||
},
|
||||
"ip_address": {
|
||||
"type": "string",
|
||||
"title": "IP Address"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"action",
|
||||
"resource_type",
|
||||
"actor_email",
|
||||
"severity"
|
||||
]
|
||||
}
|
||||
42
eslint.config.js
Normal file
42
eslint.config.js
Normal file
@@ -0,0 +1,42 @@
|
||||
import globals from "globals";
|
||||
import pluginJs from "@eslint/js";
|
||||
import pluginReact from "eslint-plugin-react";
|
||||
import pluginReactHooks from "eslint-plugin-react-hooks";
|
||||
import pluginUnusedImports from "eslint-plugin-unused-imports";
|
||||
import reactRefresh from "eslint-plugin-react-refresh";
|
||||
|
||||
export default [
|
||||
{ ignores: ["dist", "node_modules", "backend/dist"] },
|
||||
{
|
||||
files: ["src/**/*.{js,jsx}"],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2022,
|
||||
sourceType: "module",
|
||||
globals: globals.browser,
|
||||
parserOptions: { ecmaFeatures: { jsx: true } }
|
||||
},
|
||||
plugins: {
|
||||
react: pluginReact,
|
||||
"react-hooks": pluginReactHooks,
|
||||
"react-refresh": reactRefresh,
|
||||
"unused-imports": pluginUnusedImports
|
||||
},
|
||||
settings: {
|
||||
react: { version: "detect" }
|
||||
},
|
||||
rules: {
|
||||
...pluginJs.configs.recommended.rules,
|
||||
...pluginReact.configs.recommended.rules,
|
||||
...pluginReactHooks.configs.recommended.rules,
|
||||
"react/react-in-jsx-scope": "off",
|
||||
"react/prop-types": "off",
|
||||
"no-unused-vars": "off",
|
||||
"unused-imports/no-unused-imports": "error",
|
||||
"unused-imports/no-unused-vars": [
|
||||
"warn",
|
||||
{ vars: "all", varsIgnorePattern: "^_", args: "after-used", argsIgnorePattern: "^_" }
|
||||
],
|
||||
"react-refresh/only-export-components": ["warn", { allowConstantExport: true }]
|
||||
}
|
||||
}
|
||||
];
|
||||
12
index.html
Normal file
12
index.html
Normal file
@@ -0,0 +1,12 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>ProxPanel</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.jsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
76
infra/deploy/docker-compose.production.yml
Normal file
76
infra/deploy/docker-compose.production.yml
Normal file
@@ -0,0 +1,76 @@
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
container_name: proxpanel-postgres
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER:-proxpanel}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
POSTGRES_DB: ${POSTGRES_DB:-proxpanel}
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-proxpanel} -d ${POSTGRES_DB:-proxpanel}"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
|
||||
backend:
|
||||
build:
|
||||
context: ../../backend
|
||||
container_name: proxpanel-backend
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
NODE_ENV: production
|
||||
PORT: 8080
|
||||
DATABASE_URL: postgresql://${POSTGRES_USER:-proxpanel}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB:-proxpanel}
|
||||
JWT_SECRET: ${JWT_SECRET}
|
||||
JWT_REFRESH_SECRET: ${JWT_REFRESH_SECRET}
|
||||
JWT_EXPIRES_IN: ${JWT_EXPIRES_IN:-15m}
|
||||
JWT_REFRESH_EXPIRES_IN: ${JWT_REFRESH_EXPIRES_IN:-30d}
|
||||
CORS_ORIGIN: ${CORS_ORIGIN}
|
||||
RATE_LIMIT_WINDOW_MS: ${RATE_LIMIT_WINDOW_MS:-60000}
|
||||
RATE_LIMIT_MAX: ${RATE_LIMIT_MAX:-600}
|
||||
AUTH_RATE_LIMIT_WINDOW_MS: ${AUTH_RATE_LIMIT_WINDOW_MS:-60000}
|
||||
AUTH_RATE_LIMIT_MAX: ${AUTH_RATE_LIMIT_MAX:-20}
|
||||
ENABLE_SCHEDULER: ${ENABLE_SCHEDULER:-true}
|
||||
BILLING_CRON: ${BILLING_CRON:-0 * * * *}
|
||||
BACKUP_CRON: ${BACKUP_CRON:-*/15 * * * *}
|
||||
POWER_SCHEDULE_CRON: ${POWER_SCHEDULE_CRON:-* * * * *}
|
||||
MONITORING_CRON: ${MONITORING_CRON:-*/5 * * * *}
|
||||
PROXMOX_TIMEOUT_MS: ${PROXMOX_TIMEOUT_MS:-15000}
|
||||
ADMIN_EMAIL: ${ADMIN_EMAIL}
|
||||
ADMIN_PASSWORD: ${ADMIN_PASSWORD}
|
||||
expose:
|
||||
- "8080"
|
||||
ports:
|
||||
- "127.0.0.1:${BACKEND_PORT:-8080}:8080"
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget -q -O - http://localhost:8080/api/health >/dev/null 2>&1 || exit 1"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ../../
|
||||
args:
|
||||
VITE_API_BASE_URL: ""
|
||||
container_name: proxpanel-frontend
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
backend:
|
||||
condition: service_healthy
|
||||
ports:
|
||||
- "127.0.0.1:${FRONTEND_PORT:-80}:80"
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "nginx -t >/dev/null 2>&1 || exit 1"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
345
infra/deploy/install-proxpanel.sh
Normal file
345
infra/deploy/install-proxpanel.sh
Normal file
@@ -0,0 +1,345 @@
|
||||
#!/usr/bin/env bash
|
||||
set -Eeuo pipefail
|
||||
|
||||
APP_DIR="/opt/proxpanel"
|
||||
REPO_URL=""
|
||||
BRANCH="main"
|
||||
PUBLIC_URL=""
|
||||
ADMIN_EMAIL="admin@proxpanel.local"
|
||||
ADMIN_PASSWORD=""
|
||||
POSTGRES_PASSWORD=""
|
||||
FRONTEND_PORT="80"
|
||||
BACKEND_PORT="8080"
|
||||
CONFIGURE_UFW="false"
|
||||
|
||||
log() {
|
||||
printf '\n[%s] %s\n' "$(date +'%Y-%m-%d %H:%M:%S')" "$*"
|
||||
}
|
||||
|
||||
die() {
|
||||
printf '\n[ERROR] %s\n' "$*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage:
|
||||
bash infra/deploy/install-proxpanel.sh [options]
|
||||
|
||||
Options:
|
||||
--repo-url <url> Git repository URL (required if app is not already in /opt/proxpanel)
|
||||
--branch <name> Git branch to deploy (default: main)
|
||||
--app-dir <path> Deployment directory (default: /opt/proxpanel)
|
||||
--public-url <url> Public base URL (example: http://102.69.243.167)
|
||||
--admin-email <email> Initial admin email (default: admin@proxpanel.local)
|
||||
--admin-password <pass> Initial admin password (auto-generated if omitted)
|
||||
--postgres-password <pass> Postgres password (auto-generated if omitted)
|
||||
--frontend-port <port> Public frontend port (default: 80)
|
||||
--backend-port <port> Local backend bind port (default: 8080 on 127.0.0.1)
|
||||
--configure-ufw Allow OpenSSH + frontend port via UFW (if available)
|
||||
-h, --help Show this help
|
||||
|
||||
Examples:
|
||||
bash infra/deploy/install-proxpanel.sh \
|
||||
--repo-url https://github.com/your-org/proxpanel.git \
|
||||
--branch main \
|
||||
--public-url http://102.69.243.167 \
|
||||
--admin-email admin@yourdomain.com
|
||||
EOF
|
||||
}
|
||||
|
||||
random_secret() {
|
||||
openssl rand -base64 72 | tr -d '\n'
|
||||
}
|
||||
|
||||
random_db_password() {
|
||||
# URL-safe hex string to avoid DATABASE_URL parsing issues.
|
||||
openssl rand -hex 32 | tr -d '\n'
|
||||
}
|
||||
|
||||
parse_args() {
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--repo-url)
|
||||
REPO_URL="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--branch)
|
||||
BRANCH="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--app-dir)
|
||||
APP_DIR="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--public-url)
|
||||
PUBLIC_URL="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--admin-email)
|
||||
ADMIN_EMAIL="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--admin-password)
|
||||
ADMIN_PASSWORD="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--postgres-password)
|
||||
POSTGRES_PASSWORD="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--frontend-port)
|
||||
FRONTEND_PORT="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--backend-port)
|
||||
BACKEND_PORT="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--configure-ufw)
|
||||
CONFIGURE_UFW="true"
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
die "Unknown argument: $1"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
ensure_root() {
|
||||
if [[ "${EUID:-$(id -u)}" -ne 0 ]]; then
|
||||
die "Run as root (or with sudo)."
|
||||
fi
|
||||
}
|
||||
|
||||
install_prereqs() {
|
||||
log "Installing OS prerequisites..."
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get update -y
|
||||
apt-get install -y ca-certificates curl git openssl jq rsync
|
||||
}
|
||||
|
||||
install_docker_if_needed() {
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
log "Docker already installed."
|
||||
else
|
||||
log "Installing Docker..."
|
||||
curl -fsSL https://get.docker.com | sh
|
||||
fi
|
||||
|
||||
systemctl enable docker >/dev/null 2>&1 || true
|
||||
systemctl start docker
|
||||
|
||||
docker compose version >/dev/null 2>&1 || die "Docker Compose plugin is required but not available."
|
||||
}
|
||||
|
||||
sync_source() {
|
||||
if [[ -d "${APP_DIR}/.git" ]]; then
|
||||
log "Updating existing repository in ${APP_DIR}..."
|
||||
git -C "${APP_DIR}" fetch --all --prune
|
||||
git -C "${APP_DIR}" checkout "${BRANCH}"
|
||||
git -C "${APP_DIR}" pull --ff-only origin "${BRANCH}"
|
||||
return
|
||||
fi
|
||||
|
||||
if [[ -n "${REPO_URL}" ]]; then
|
||||
log "Cloning repository into ${APP_DIR}..."
|
||||
mkdir -p "$(dirname "${APP_DIR}")"
|
||||
git clone --branch "${BRANCH}" --single-branch "${REPO_URL}" "${APP_DIR}"
|
||||
return
|
||||
fi
|
||||
|
||||
if [[ -f "./package.json" && -d "./backend" && -d "./infra" ]]; then
|
||||
log "No repo URL provided; copying current directory into ${APP_DIR}..."
|
||||
mkdir -p "${APP_DIR}"
|
||||
rsync -a --delete --exclude .git --exclude node_modules --exclude backend/node_modules ./ "${APP_DIR}/"
|
||||
return
|
||||
fi
|
||||
|
||||
die "Could not determine source. Provide --repo-url or run this script from project root."
|
||||
}
|
||||
|
||||
validate_project_layout() {
|
||||
[[ -f "${APP_DIR}/infra/deploy/docker-compose.production.yml" ]] || die "Missing infra/deploy/docker-compose.production.yml"
|
||||
[[ -f "${APP_DIR}/backend/Dockerfile" ]] || die "Missing backend/Dockerfile"
|
||||
[[ -f "${APP_DIR}/Dockerfile" ]] || die "Missing frontend Dockerfile"
|
||||
}
|
||||
|
||||
infer_public_url() {
|
||||
if [[ -n "${PUBLIC_URL}" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
local ip
|
||||
ip="$(hostname -I | awk '{print $1}')"
|
||||
[[ -n "${ip}" ]] || ip="127.0.0.1"
|
||||
PUBLIC_URL="http://${ip}"
|
||||
}
|
||||
|
||||
write_env_file() {
|
||||
[[ -n "${ADMIN_PASSWORD}" ]] || ADMIN_PASSWORD="$(openssl rand -base64 18 | tr -d '\n' | tr '/+' 'ab')A9!"
|
||||
[[ -n "${POSTGRES_PASSWORD}" ]] || POSTGRES_PASSWORD="$(random_db_password)"
|
||||
|
||||
local jwt_secret jwt_refresh_secret env_file
|
||||
jwt_secret="$(random_secret)"
|
||||
jwt_refresh_secret="$(random_secret)"
|
||||
env_file="${APP_DIR}/.env.production"
|
||||
|
||||
log "Writing production env file..."
|
||||
umask 077
|
||||
cat > "${env_file}" <<EOF
|
||||
POSTGRES_USER=proxpanel
|
||||
POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
|
||||
POSTGRES_DB=proxpanel
|
||||
|
||||
JWT_SECRET=${jwt_secret}
|
||||
JWT_REFRESH_SECRET=${jwt_refresh_secret}
|
||||
JWT_EXPIRES_IN=15m
|
||||
JWT_REFRESH_EXPIRES_IN=30d
|
||||
|
||||
CORS_ORIGIN=${PUBLIC_URL}
|
||||
ADMIN_EMAIL=${ADMIN_EMAIL}
|
||||
ADMIN_PASSWORD=${ADMIN_PASSWORD}
|
||||
|
||||
ENABLE_SCHEDULER=true
|
||||
RATE_LIMIT_WINDOW_MS=60000
|
||||
RATE_LIMIT_MAX=600
|
||||
AUTH_RATE_LIMIT_WINDOW_MS=60000
|
||||
AUTH_RATE_LIMIT_MAX=20
|
||||
PROXMOX_TIMEOUT_MS=15000
|
||||
|
||||
FRONTEND_PORT=${FRONTEND_PORT}
|
||||
BACKEND_PORT=${BACKEND_PORT}
|
||||
EOF
|
||||
}
|
||||
|
||||
deploy_stack() {
|
||||
log "Building and starting production stack..."
|
||||
cd "${APP_DIR}"
|
||||
docker compose --env-file .env.production -f infra/deploy/docker-compose.production.yml pull || true
|
||||
docker compose --env-file .env.production -f infra/deploy/docker-compose.production.yml up -d --build
|
||||
}
|
||||
|
||||
wait_for_health() {
|
||||
log "Waiting for API health..."
|
||||
local max_tries=60
|
||||
local i
|
||||
for ((i=1; i<=max_tries; i++)); do
|
||||
if curl -fsS "http://127.0.0.1:${BACKEND_PORT}/api/health" >/dev/null 2>&1; then
|
||||
log "Backend is healthy."
|
||||
return
|
||||
fi
|
||||
sleep 3
|
||||
done
|
||||
die "Backend health check failed."
|
||||
}
|
||||
|
||||
apply_database_schema() {
|
||||
log "Applying database schema..."
|
||||
cd "${APP_DIR}"
|
||||
|
||||
if docker compose --env-file .env.production -f infra/deploy/docker-compose.production.yml exec -T backend npm run prisma:deploy; then
|
||||
log "Schema migration deploy completed."
|
||||
return
|
||||
fi
|
||||
|
||||
log "prisma:deploy failed or no migrations found. Falling back to prisma:push..."
|
||||
docker compose --env-file .env.production -f infra/deploy/docker-compose.production.yml exec -T backend npm run prisma:push
|
||||
log "Schema push completed."
|
||||
}
|
||||
|
||||
seed_database() {
|
||||
log "Running Prisma seed (idempotent)..."
|
||||
cd "${APP_DIR}"
|
||||
docker compose --env-file .env.production -f infra/deploy/docker-compose.production.yml exec -T backend npm run prisma:seed
|
||||
}
|
||||
|
||||
verify_login() {
|
||||
log "Verifying login endpoint with seeded admin credentials..."
|
||||
local status
|
||||
status="$(curl -s -o /tmp/proxpanel-login.json -w "%{http_code}" \
|
||||
-X POST "http://127.0.0.1:${FRONTEND_PORT}/api/auth/login" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"email\":\"${ADMIN_EMAIL}\",\"password\":\"${ADMIN_PASSWORD}\"}")"
|
||||
|
||||
if [[ "${status}" != "200" ]]; then
|
||||
cat /tmp/proxpanel-login.json >&2 || true
|
||||
die "Login verification failed with status ${status}."
|
||||
fi
|
||||
|
||||
jq -e '.token and .refresh_token' /tmp/proxpanel-login.json >/dev/null 2>&1 || die "Login response missing token."
|
||||
log "Login verification passed."
|
||||
}
|
||||
|
||||
configure_ufw_if_requested() {
|
||||
if [[ "${CONFIGURE_UFW}" != "true" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
if ! command -v ufw >/dev/null 2>&1; then
|
||||
log "UFW not installed; skipping firewall configuration."
|
||||
return
|
||||
fi
|
||||
|
||||
log "Configuring UFW rules..."
|
||||
ufw allow OpenSSH >/dev/null 2>&1 || true
|
||||
ufw allow "${FRONTEND_PORT}/tcp" >/dev/null 2>&1 || true
|
||||
ufw --force enable >/dev/null 2>&1 || true
|
||||
}
|
||||
|
||||
write_summary() {
|
||||
local summary_file="/root/proxpanel-install-summary.txt"
|
||||
cat > "${summary_file}" <<EOF
|
||||
ProxPanel production deployment completed.
|
||||
|
||||
Public URL: ${PUBLIC_URL}
|
||||
Server IP: $(echo "${PUBLIC_URL}" | sed -E 's#^https?://##')
|
||||
Admin Email: ${ADMIN_EMAIL}
|
||||
Admin Password: ${ADMIN_PASSWORD}
|
||||
|
||||
Deployment Directory: ${APP_DIR}
|
||||
Compose File: infra/deploy/docker-compose.production.yml
|
||||
Env File: ${APP_DIR}/.env.production
|
||||
|
||||
Quick checks:
|
||||
curl -fsS http://127.0.0.1:${BACKEND_PORT}/api/health
|
||||
docker compose --env-file .env.production -f infra/deploy/docker-compose.production.yml ps
|
||||
|
||||
IMPORTANT:
|
||||
1) Change admin password immediately after first login.
|
||||
2) Configure Proxmox credentials in Settings -> Proxmox.
|
||||
3) Add TLS/reverse-proxy (Nginx/Caddy) if exposing publicly.
|
||||
EOF
|
||||
chmod 600 "${summary_file}"
|
||||
log "Summary saved to ${summary_file}"
|
||||
}
|
||||
|
||||
main() {
|
||||
parse_args "$@"
|
||||
ensure_root
|
||||
install_prereqs
|
||||
install_docker_if_needed
|
||||
sync_source
|
||||
validate_project_layout
|
||||
infer_public_url
|
||||
write_env_file
|
||||
deploy_stack
|
||||
wait_for_health
|
||||
apply_database_schema
|
||||
seed_database
|
||||
verify_login
|
||||
configure_ufw_if_requested
|
||||
write_summary
|
||||
|
||||
log "Deployment finished successfully."
|
||||
printf '\nOpen: %s\n' "${PUBLIC_URL}"
|
||||
printf 'Admin email: %s\n' "${ADMIN_EMAIL}"
|
||||
printf 'Admin password: %s\n' "${ADMIN_PASSWORD}"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
29
infra/nginx/default.conf
Normal file
29
infra/nginx/default.conf
Normal file
@@ -0,0 +1,29 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name _;
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://backend:8080/api/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_read_timeout 90s;
|
||||
}
|
||||
|
||||
location = /api/health {
|
||||
proxy_pass http://backend:8080/api/health;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location / {
|
||||
try_files $uri /index.html;
|
||||
}
|
||||
}
|
||||
16
jsconfig.json
Normal file
16
jsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Bundler",
|
||||
"jsx": "react-jsx",
|
||||
"allowJs": true,
|
||||
"checkJs": false,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["src/*"]
|
||||
},
|
||||
"types": ["vite/client"]
|
||||
},
|
||||
"include": ["src", "vite.config.js"]
|
||||
}
|
||||
8290
package-lock.json
generated
Normal file
8290
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
85
package.json
Normal file
85
package.json
Normal file
@@ -0,0 +1,85 @@
|
||||
{
|
||||
"name": "proxpanel-app",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"dev:api": "npm --prefix backend run dev",
|
||||
"build": "vite build",
|
||||
"build:api": "npm --prefix backend run build",
|
||||
"lint": "eslint . --quiet",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@hookform/resolvers": "^4.1.2",
|
||||
"@radix-ui/react-accordion": "^1.2.3",
|
||||
"@radix-ui/react-alert-dialog": "^1.1.6",
|
||||
"@radix-ui/react-aspect-ratio": "^1.1.2",
|
||||
"@radix-ui/react-avatar": "^1.1.3",
|
||||
"@radix-ui/react-checkbox": "^1.1.4",
|
||||
"@radix-ui/react-collapsible": "^1.1.3",
|
||||
"@radix-ui/react-context-menu": "^2.2.6",
|
||||
"@radix-ui/react-dialog": "^1.1.6",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.6",
|
||||
"@radix-ui/react-hover-card": "^1.1.6",
|
||||
"@radix-ui/react-label": "^2.1.2",
|
||||
"@radix-ui/react-menubar": "^1.1.6",
|
||||
"@radix-ui/react-navigation-menu": "^1.2.5",
|
||||
"@radix-ui/react-popover": "^1.1.6",
|
||||
"@radix-ui/react-progress": "^1.1.2",
|
||||
"@radix-ui/react-radio-group": "^1.2.3",
|
||||
"@radix-ui/react-scroll-area": "^1.2.3",
|
||||
"@radix-ui/react-select": "^2.1.6",
|
||||
"@radix-ui/react-separator": "^1.1.2",
|
||||
"@radix-ui/react-slider": "^1.2.3",
|
||||
"@radix-ui/react-slot": "^1.1.2",
|
||||
"@radix-ui/react-switch": "^1.1.3",
|
||||
"@radix-ui/react-tabs": "^1.1.3",
|
||||
"@radix-ui/react-toast": "^1.2.2",
|
||||
"@radix-ui/react-toggle": "^1.1.2",
|
||||
"@radix-ui/react-toggle-group": "^1.1.2",
|
||||
"@radix-ui/react-tooltip": "^1.1.8",
|
||||
"@tanstack/react-query": "^5.84.1",
|
||||
"axios": "^1.9.0",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.0.0",
|
||||
"date-fns": "^3.6.0",
|
||||
"embla-carousel-react": "^8.5.2",
|
||||
"framer-motion": "^11.16.4",
|
||||
"input-otp": "^1.4.2",
|
||||
"lucide-react": "^0.475.0",
|
||||
"moment": "^2.30.1",
|
||||
"next-themes": "^0.4.4",
|
||||
"react": "^18.2.0",
|
||||
"react-day-picker": "^8.10.1",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-hook-form": "^7.54.2",
|
||||
"react-resizable-panels": "^2.1.7",
|
||||
"react-router-dom": "^6.26.0",
|
||||
"recharts": "^2.15.4",
|
||||
"sonner": "^2.0.1",
|
||||
"tailwind-merge": "^3.0.2",
|
||||
"tailwindcss-animate": "^1.0.7",
|
||||
"vaul": "^1.1.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.19.0",
|
||||
"@types/node": "^22.13.5",
|
||||
"@types/react": "^18.2.66",
|
||||
"@types/react-dom": "^18.2.22",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
"autoprefixer": "^10.4.20",
|
||||
"eslint": "^9.19.0",
|
||||
"eslint-plugin-react": "^7.37.4",
|
||||
"eslint-plugin-react-hooks": "^5.0.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.18",
|
||||
"eslint-plugin-unused-imports": "^4.3.0",
|
||||
"globals": "^15.14.0",
|
||||
"postcss": "^8.5.3",
|
||||
"tailwindcss": "^3.4.17",
|
||||
"typescript": "^5.8.2",
|
||||
"vite": "^6.1.0"
|
||||
}
|
||||
}
|
||||
6
postcss.config.js
Normal file
6
postcss.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
export default {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {}
|
||||
}
|
||||
};
|
||||
83
src/App.jsx
Normal file
83
src/App.jsx
Normal file
@@ -0,0 +1,83 @@
|
||||
import { Toaster } from "@/components/ui/toaster"
|
||||
import { QueryClientProvider } from '@tanstack/react-query'
|
||||
import { queryClientInstance } from '@/lib/query-client'
|
||||
import { BrowserRouter as Router, Navigate, Route, Routes } from 'react-router-dom';
|
||||
import PageNotFound from './lib/PageNotFound';
|
||||
import { AuthProvider, useAuth } from '@/lib/AuthContext';
|
||||
import UserNotRegisteredError from '@/components/UserNotRegisteredError';
|
||||
import AppLayout from './components/layout/AppLayout';
|
||||
import Login from './pages/Login';
|
||||
import Dashboard from './pages/Dashboard';
|
||||
import VirtualMachines from './pages/VirtualMachines';
|
||||
import Nodes from './pages/Nodes';
|
||||
import Tenants from './pages/Tenants';
|
||||
import Billing from './pages/Billing';
|
||||
import Backups from './pages/Backups';
|
||||
import Monitoring from './pages/Monitoring';
|
||||
import AuditLogs from './pages/AuditLogs';
|
||||
import RBAC from './pages/RBAC';
|
||||
import Settings from './pages/Settings';
|
||||
import Operations from './pages/Operations';
|
||||
import Provisioning from './pages/Provisioning';
|
||||
import NetworkIpam from './pages/NetworkIpam';
|
||||
import ClientArea from './pages/ClientArea';
|
||||
import Security from './pages/Security';
|
||||
|
||||
const AuthenticatedApp = () => {
|
||||
const { isLoadingAuth, isLoadingPublicSettings, authError, isAuthenticated } = useAuth();
|
||||
|
||||
// Show loading spinner while checking app public settings or auth
|
||||
if (isLoadingPublicSettings || isLoadingAuth) {
|
||||
return (
|
||||
<div className="fixed inset-0 flex items-center justify-center bg-background">
|
||||
<div className="h-9 w-9 rounded-full border-4 border-muted border-t-primary animate-spin" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Handle authentication errors
|
||||
if (authError?.type === 'user_not_registered') {
|
||||
return <UserNotRegisteredError />;
|
||||
}
|
||||
|
||||
return (
|
||||
<Routes>
|
||||
<Route path="/login" element={isAuthenticated ? <Navigate to="/" replace /> : <Login />} />
|
||||
<Route element={isAuthenticated ? <AppLayout /> : <Navigate to="/login" replace />}>
|
||||
<Route path="/" element={<Dashboard />} />
|
||||
<Route path="/vms" element={<VirtualMachines />} />
|
||||
<Route path="/nodes" element={<Nodes />} />
|
||||
<Route path="/tenants" element={<Tenants />} />
|
||||
<Route path="/billing" element={<Billing />} />
|
||||
<Route path="/backups" element={<Backups />} />
|
||||
<Route path="/monitoring" element={<Monitoring />} />
|
||||
<Route path="/provisioning" element={<Provisioning />} />
|
||||
<Route path="/network" element={<NetworkIpam />} />
|
||||
<Route path="/security" element={<Security />} />
|
||||
<Route path="/client" element={<ClientArea />} />
|
||||
<Route path="/operations" element={<Operations />} />
|
||||
<Route path="/audit-logs" element={<AuditLogs />} />
|
||||
<Route path="/rbac" element={<RBAC />} />
|
||||
<Route path="/settings" element={<Settings />} />
|
||||
</Route>
|
||||
<Route path="*" element={isAuthenticated ? <PageNotFound /> : <Navigate to="/login" replace />} />
|
||||
</Routes>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
function App() {
|
||||
|
||||
return (
|
||||
<AuthProvider>
|
||||
<QueryClientProvider client={queryClientInstance}>
|
||||
<Router>
|
||||
<AuthenticatedApp />
|
||||
</Router>
|
||||
<Toaster />
|
||||
</QueryClientProvider>
|
||||
</AuthProvider>
|
||||
)
|
||||
}
|
||||
|
||||
export default App
|
||||
1129
src/api/appClient.js
Normal file
1129
src/api/appClient.js
Normal file
File diff suppressed because it is too large
Load Diff
34
src/components/ProtectedRoute.jsx
Normal file
34
src/components/ProtectedRoute.jsx
Normal file
@@ -0,0 +1,34 @@
|
||||
import { useEffect } from "react";
|
||||
import { Outlet } from "react-router-dom";
|
||||
import { useAuth } from "@/lib/AuthContext";
|
||||
import UserNotRegisteredError from "@/components/UserNotRegisteredError";
|
||||
|
||||
const DefaultFallback = () => (
|
||||
<div className="fixed inset-0 flex items-center justify-center bg-background">
|
||||
<div className="h-9 w-9 rounded-full border-4 border-muted border-t-primary animate-spin" />
|
||||
</div>
|
||||
);
|
||||
|
||||
export default function ProtectedRoute({ fallback = <DefaultFallback />, unauthenticatedElement = null }) {
|
||||
const { isAuthenticated, isLoadingAuth, authError, checkAppState } = useAuth();
|
||||
|
||||
useEffect(() => {
|
||||
if (!isAuthenticated && !isLoadingAuth) {
|
||||
checkAppState();
|
||||
}
|
||||
}, [isAuthenticated, isLoadingAuth, checkAppState]);
|
||||
|
||||
if (isLoadingAuth) {
|
||||
return fallback;
|
||||
}
|
||||
|
||||
if (authError?.type === "user_not_registered") {
|
||||
return <UserNotRegisteredError />;
|
||||
}
|
||||
|
||||
if (!isAuthenticated) {
|
||||
return unauthenticatedElement;
|
||||
}
|
||||
|
||||
return <Outlet />;
|
||||
}
|
||||
34
src/components/UserNotRegisteredError.jsx
Normal file
34
src/components/UserNotRegisteredError.jsx
Normal file
@@ -0,0 +1,34 @@
|
||||
import React from "react";
|
||||
|
||||
export default function UserNotRegisteredError() {
|
||||
return (
|
||||
<div className="app-shell-bg flex min-h-screen items-center justify-center px-4 py-8">
|
||||
<div className="w-full max-w-lg rounded-2xl border border-border bg-card p-8 shadow-[0_16px_45px_rgba(15,23,42,0.12)]">
|
||||
<div className="text-center">
|
||||
<div className="mb-6 inline-flex h-16 w-16 items-center justify-center rounded-2xl bg-amber-50 text-amber-600 ring-1 ring-amber-200">
|
||||
<svg className="h-8 w-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth="2"
|
||||
d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
<h1 className="mb-3 text-3xl font-semibold tracking-tight text-foreground">Access Restricted</h1>
|
||||
<p className="mb-7 text-sm text-muted-foreground">
|
||||
Your account is authenticated but does not have application access. Contact an administrator to grant the correct role.
|
||||
</p>
|
||||
<div className="rounded-xl border border-border bg-muted/50 p-4 text-left text-sm text-muted-foreground">
|
||||
<p className="font-medium text-foreground">Quick checks:</p>
|
||||
<ul className="mt-2 list-inside list-disc space-y-1">
|
||||
<li>Confirm you signed in with the expected organization account.</li>
|
||||
<li>Request tenant membership or RBAC role assignment.</li>
|
||||
<li>Retry login after the admin updates permissions.</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
54
src/components/layout/AppLayout.jsx
Normal file
54
src/components/layout/AppLayout.jsx
Normal file
@@ -0,0 +1,54 @@
|
||||
import { Bell, ChevronRight, Search } from "lucide-react";
|
||||
import { Outlet, useLocation } from "react-router-dom";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import Sidebar from "./Sidebar";
|
||||
import { resolveNavigation } from "./nav-config";
|
||||
|
||||
export default function AppLayout() {
|
||||
const location = useLocation();
|
||||
const currentNav = resolveNavigation(location.pathname);
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-background text-foreground app-shell-bg flex">
|
||||
<Sidebar />
|
||||
<div className="relative flex-1 min-w-0">
|
||||
<header className="sticky top-0 z-30 border-b border-border/80 bg-background/85 backdrop-blur-xl">
|
||||
<div className="mx-auto flex h-16 w-full max-w-[1680px] items-center gap-3 px-4 md:px-6">
|
||||
<div className="hidden lg:flex items-center gap-1.5 text-xs text-muted-foreground">
|
||||
<span className="font-medium">Control Plane</span>
|
||||
<ChevronRight className="h-3.5 w-3.5" />
|
||||
<span>{currentNav?.group ?? "Workspace"}</span>
|
||||
<ChevronRight className="h-3.5 w-3.5" />
|
||||
<span className="font-semibold text-foreground">{currentNav?.label ?? "Overview"}</span>
|
||||
</div>
|
||||
|
||||
<div className="relative ml-auto w-full max-w-sm">
|
||||
<Search className="pointer-events-none absolute left-3 top-1/2 h-4 w-4 -translate-y-1/2 text-muted-foreground" />
|
||||
<Input
|
||||
aria-label="Global search"
|
||||
placeholder="Search resources, tenants, events..."
|
||||
className="h-9 rounded-lg border-border bg-card/70 pl-9 pr-16 text-sm"
|
||||
/>
|
||||
<span className="pointer-events-none absolute right-2 top-1/2 hidden -translate-y-1/2 rounded-md border border-border bg-background px-1.5 py-0.5 font-mono text-[10px] text-muted-foreground md:block">
|
||||
Ctrl K
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
className="inline-flex h-9 w-9 items-center justify-center rounded-lg border border-border bg-card/70 text-muted-foreground transition-colors hover:text-foreground"
|
||||
>
|
||||
<Bell className="h-4 w-4" />
|
||||
</button>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<main className="mx-auto w-full max-w-[1680px] px-4 pb-8 pt-5 md:px-6 md:pb-10 md:pt-6">
|
||||
<div className="page-enter">
|
||||
<Outlet />
|
||||
</div>
|
||||
</main>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
164
src/components/layout/Sidebar.jsx
Normal file
164
src/components/layout/Sidebar.jsx
Normal file
@@ -0,0 +1,164 @@
|
||||
import { useState } from "react";
|
||||
import { Link, useLocation } from "react-router-dom";
|
||||
import {
|
||||
Activity,
|
||||
Boxes,
|
||||
ChevronLeft,
|
||||
ChevronRight,
|
||||
CreditCard,
|
||||
Database,
|
||||
FileText,
|
||||
HardDrive,
|
||||
LayoutDashboard,
|
||||
ListChecks,
|
||||
LogOut,
|
||||
Menu,
|
||||
Network,
|
||||
Server,
|
||||
Settings,
|
||||
Shield,
|
||||
Users
|
||||
} from "lucide-react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { appClient } from "@/api/appClient";
|
||||
import { navigationGroups } from "./nav-config";
|
||||
|
||||
const iconMap = {
|
||||
dashboard: LayoutDashboard,
|
||||
monitoring: Activity,
|
||||
operations: ListChecks,
|
||||
audit: FileText,
|
||||
vms: Server,
|
||||
nodes: HardDrive,
|
||||
provisioning: Boxes,
|
||||
backups: Database,
|
||||
network: Network,
|
||||
security: Shield,
|
||||
tenants: Users,
|
||||
client: Users,
|
||||
billing: CreditCard,
|
||||
rbac: Shield,
|
||||
settings: Settings
|
||||
};
|
||||
|
||||
export default function Sidebar() {
|
||||
const location = useLocation();
|
||||
const [collapsed, setCollapsed] = useState(false);
|
||||
const [mobileOpen, setMobileOpen] = useState(false);
|
||||
|
||||
const isActive = (path) => {
|
||||
if (path === "/") return location.pathname === "/";
|
||||
return location.pathname.startsWith(path);
|
||||
};
|
||||
|
||||
const sidebarContent = (
|
||||
<aside
|
||||
className={cn(
|
||||
"flex h-full flex-col border-r border-sidebar-border bg-sidebar/95 backdrop-blur-xl transition-[width] duration-300",
|
||||
collapsed ? "w-[88px]" : "w-[276px]"
|
||||
)}
|
||||
>
|
||||
<div className="border-b border-sidebar-border px-4 py-4">
|
||||
<div className="flex min-w-0 items-center gap-3">
|
||||
<div className="flex h-10 w-10 items-center justify-center rounded-xl bg-primary/10 text-primary ring-1 ring-primary/20">
|
||||
<Server className="h-5 w-5" />
|
||||
</div>
|
||||
{!collapsed && (
|
||||
<div className="min-w-0">
|
||||
<p className="truncate text-sm font-semibold text-foreground tracking-tight">ProxPanel Cloud</p>
|
||||
<p className="truncate text-[11px] text-muted-foreground">Enterprise Control Console</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<nav className="flex-1 overflow-y-auto px-3 py-3">
|
||||
<div className="space-y-5">
|
||||
{navigationGroups.map((group) => (
|
||||
<div key={group.id} className="space-y-1.5">
|
||||
{!collapsed && (
|
||||
<p className="px-2 text-[10px] font-semibold uppercase tracking-[0.14em] text-muted-foreground/90">
|
||||
{group.label}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{group.items.map((item) => {
|
||||
const Icon = iconMap[item.iconKey] ?? LayoutDashboard;
|
||||
const active = isActive(item.path);
|
||||
|
||||
return (
|
||||
<Link
|
||||
key={item.path}
|
||||
to={item.path}
|
||||
title={collapsed ? item.label : undefined}
|
||||
onClick={() => setMobileOpen(false)}
|
||||
className={cn(
|
||||
"group relative flex items-center gap-3 rounded-xl px-3 py-2.5 text-sm transition-all",
|
||||
active
|
||||
? "bg-primary/12 text-primary ring-1 ring-primary/20"
|
||||
: "text-sidebar-foreground hover:bg-sidebar-accent hover:text-foreground"
|
||||
)}
|
||||
>
|
||||
{active && <span className="absolute left-0 top-2.5 bottom-2.5 w-[3px] rounded-r-full bg-primary" />}
|
||||
<Icon className={cn("h-[17px] w-[17px] shrink-0", active ? "text-primary" : "text-muted-foreground group-hover:text-foreground")} />
|
||||
{!collapsed && <span className="truncate font-medium">{item.label}</span>}
|
||||
</Link>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div className="border-t border-sidebar-border p-2">
|
||||
{!collapsed && (
|
||||
<div className="mb-2 rounded-xl border border-border/80 bg-card/65 px-3 py-2">
|
||||
<p className="text-[11px] font-semibold text-foreground">Production Workspace</p>
|
||||
<p className="text-[11px] text-muted-foreground">Latency target: 99.95% SLA</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => appClient.auth.logout("/login")}
|
||||
className="flex w-full items-center gap-2.5 rounded-xl px-3 py-2.5 text-sm text-muted-foreground transition-colors hover:bg-rose-50 hover:text-rose-700"
|
||||
>
|
||||
<LogOut className="h-[17px] w-[17px] shrink-0" />
|
||||
{!collapsed && <span className="font-medium">Sign Out</span>}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="hidden border-t border-sidebar-border p-2 md:block">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setCollapsed((value) => !value)}
|
||||
className="inline-flex h-9 w-full items-center justify-center rounded-xl border border-border/80 bg-card/60 text-muted-foreground transition-colors hover:text-foreground"
|
||||
>
|
||||
{collapsed ? <ChevronRight className="h-4 w-4" /> : <ChevronLeft className="h-4 w-4" />}
|
||||
</button>
|
||||
</div>
|
||||
</aside>
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setMobileOpen(true)}
|
||||
className="fixed left-4 top-3 z-40 inline-flex h-9 w-9 items-center justify-center rounded-lg border border-border bg-background/90 text-foreground shadow-sm backdrop-blur md:hidden"
|
||||
>
|
||||
<Menu className="h-5 w-5" />
|
||||
</button>
|
||||
|
||||
{mobileOpen && (
|
||||
<div className="fixed inset-0 z-40 bg-slate-950/45 backdrop-blur-[2px] md:hidden" onClick={() => setMobileOpen(false)}>
|
||||
<div className="h-full max-w-[290px]" onClick={(event) => event.stopPropagation()}>
|
||||
{sidebarContent}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="sticky top-0 hidden h-screen md:flex">{sidebarContent}</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
54
src/components/layout/nav-config.js
Normal file
54
src/components/layout/nav-config.js
Normal file
@@ -0,0 +1,54 @@
|
||||
export const navigationGroups = [
|
||||
{
|
||||
id: "overview",
|
||||
label: "Overview",
|
||||
items: [
|
||||
{ path: "/", label: "Dashboard", iconKey: "dashboard" },
|
||||
{ path: "/monitoring", label: "Monitoring", iconKey: "monitoring" },
|
||||
{ path: "/operations", label: "Operations", iconKey: "operations" },
|
||||
{ path: "/audit-logs", label: "Audit Logs", iconKey: "audit" }
|
||||
]
|
||||
},
|
||||
{
|
||||
id: "compute",
|
||||
label: "Compute",
|
||||
items: [
|
||||
{ path: "/vms", label: "Virtual Machines", iconKey: "vms" },
|
||||
{ path: "/nodes", label: "Nodes", iconKey: "nodes" },
|
||||
{ path: "/provisioning", label: "Provisioning", iconKey: "provisioning" },
|
||||
{ path: "/backups", label: "Backups", iconKey: "backups" }
|
||||
]
|
||||
},
|
||||
{
|
||||
id: "network",
|
||||
label: "Network",
|
||||
items: [
|
||||
{ path: "/network", label: "IPAM & Pools", iconKey: "network" },
|
||||
{ path: "/security", label: "Security", iconKey: "security" }
|
||||
]
|
||||
},
|
||||
{
|
||||
id: "tenant",
|
||||
label: "Tenants",
|
||||
items: [
|
||||
{ path: "/tenants", label: "Tenants", iconKey: "tenants" },
|
||||
{ path: "/client", label: "Client Area", iconKey: "client" },
|
||||
{ path: "/billing", label: "Billing", iconKey: "billing" },
|
||||
{ path: "/rbac", label: "RBAC", iconKey: "rbac" },
|
||||
{ path: "/settings", label: "Settings", iconKey: "settings" }
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
export const flatNavigation = navigationGroups.flatMap((group) =>
|
||||
group.items.map((item) => ({ ...item, group: group.label }))
|
||||
);
|
||||
|
||||
export function resolveNavigation(pathname) {
|
||||
if (!pathname || pathname === "/") {
|
||||
return flatNavigation.find((item) => item.path === "/") ?? null;
|
||||
}
|
||||
|
||||
const sortedBySpecificity = [...flatNavigation].sort((a, b) => b.path.length - a.path.length);
|
||||
return sortedBySpecificity.find((item) => item.path !== "/" && pathname.startsWith(item.path)) ?? null;
|
||||
}
|
||||
14
src/components/shared/EmptyState.jsx
Normal file
14
src/components/shared/EmptyState.jsx
Normal file
@@ -0,0 +1,14 @@
|
||||
export default function EmptyState({ icon: Icon, title, description, action }) {
|
||||
return (
|
||||
<div className="surface-card p-10 text-center">
|
||||
{Icon ? (
|
||||
<div className="mx-auto mb-3 flex h-14 w-14 items-center justify-center rounded-2xl bg-muted text-muted-foreground">
|
||||
<Icon className="h-6 w-6" />
|
||||
</div>
|
||||
) : null}
|
||||
<h3 className="text-base font-semibold text-foreground">{title}</h3>
|
||||
{description ? <p className="mx-auto mt-1 max-w-md text-sm text-muted-foreground">{description}</p> : null}
|
||||
{action ? <div className="mt-5 flex justify-center">{action}</div> : null}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
12
src/components/shared/PageHeader.jsx
Normal file
12
src/components/shared/PageHeader.jsx
Normal file
@@ -0,0 +1,12 @@
|
||||
export default function PageHeader({ title, description, children }) {
|
||||
return (
|
||||
<div className="flex flex-col gap-4 lg:flex-row lg:items-start lg:justify-between">
|
||||
<div className="min-w-0 space-y-1">
|
||||
<p className="text-[11px] font-semibold uppercase tracking-[0.12em] text-muted-foreground">Enterprise Console</p>
|
||||
<h1 className="text-2xl font-semibold tracking-tight text-foreground sm:text-[30px]">{title}</h1>
|
||||
{description ? <p className="max-w-3xl text-sm text-muted-foreground">{description}</p> : null}
|
||||
</div>
|
||||
{children ? <div className="flex flex-wrap items-center gap-2">{children}</div> : null}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
25
src/components/shared/ResourceBar.jsx
Normal file
25
src/components/shared/ResourceBar.jsx
Normal file
@@ -0,0 +1,25 @@
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
export default function ResourceBar({ label, used = 0, total = 0, unit = "", percentage }) {
|
||||
const computed = percentage ?? (total > 0 ? (used / total) * 100 : 0);
|
||||
const safePercentage = Math.max(0, Math.min(100, Number.isFinite(computed) ? computed : 0));
|
||||
|
||||
return (
|
||||
<div className="space-y-1.5">
|
||||
<div className="flex items-center justify-between text-xs text-muted-foreground">
|
||||
<span className="font-medium">{label}</span>
|
||||
<span className="font-mono text-[11px]">
|
||||
{used}
|
||||
{unit} / {total}
|
||||
{unit}
|
||||
</span>
|
||||
</div>
|
||||
<div className="h-2.5 w-full overflow-hidden rounded-full bg-muted/80">
|
||||
<div
|
||||
className={cn("h-full rounded-full bg-primary/85 transition-all")}
|
||||
style={{ width: `${safePercentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
28
src/components/shared/StatCard.jsx
Normal file
28
src/components/shared/StatCard.jsx
Normal file
@@ -0,0 +1,28 @@
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
const colorMap = {
|
||||
primary: "text-primary bg-primary/12 ring-primary/20",
|
||||
success: "text-emerald-700 bg-emerald-50 ring-emerald-200",
|
||||
warning: "text-amber-700 bg-amber-50 ring-amber-200",
|
||||
danger: "text-rose-700 bg-rose-50 ring-rose-200"
|
||||
};
|
||||
|
||||
export default function StatCard({ icon: Icon, label, value, subtitle, trend, color = "primary" }) {
|
||||
return (
|
||||
<div className="surface-card p-5">
|
||||
<div className="flex items-start justify-between gap-3">
|
||||
<div className="min-w-0">
|
||||
<p className="text-[11px] font-semibold uppercase tracking-[0.11em] text-muted-foreground">{label}</p>
|
||||
<p className="mt-1 truncate text-2xl font-semibold tracking-tight text-foreground">{value}</p>
|
||||
{subtitle ? <p className="mt-1 text-xs text-muted-foreground">{subtitle}</p> : null}
|
||||
{trend ? <p className="mt-2 inline-flex rounded-full bg-muted px-2 py-0.5 text-[11px] font-medium text-muted-foreground">{trend}</p> : null}
|
||||
</div>
|
||||
{Icon ? (
|
||||
<div className={cn("flex h-10 w-10 shrink-0 items-center justify-center rounded-xl ring-1", colorMap[color] ?? colorMap.primary)}>
|
||||
<Icon className="h-4.5 w-4.5" />
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
35
src/components/shared/StatusBadge.jsx
Normal file
35
src/components/shared/StatusBadge.jsx
Normal file
@@ -0,0 +1,35 @@
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
const statusColors = {
|
||||
running: "bg-emerald-50 text-emerald-700 border-emerald-200",
|
||||
active: "bg-emerald-50 text-emerald-700 border-emerald-200",
|
||||
online: "bg-emerald-50 text-emerald-700 border-emerald-200",
|
||||
paid: "bg-emerald-50 text-emerald-700 border-emerald-200",
|
||||
completed: "bg-emerald-50 text-emerald-700 border-emerald-200",
|
||||
success: "bg-emerald-50 text-emerald-700 border-emerald-200",
|
||||
pending: "bg-amber-50 text-amber-700 border-amber-200",
|
||||
warning: "bg-amber-50 text-amber-700 border-amber-200",
|
||||
stopped: "bg-slate-100 text-slate-700 border-slate-200",
|
||||
offline: "bg-slate-100 text-slate-700 border-slate-200",
|
||||
failed: "bg-rose-50 text-rose-700 border-rose-200",
|
||||
critical: "bg-rose-50 text-rose-700 border-rose-200",
|
||||
error: "bg-rose-50 text-rose-700 border-rose-200",
|
||||
default: "bg-muted text-muted-foreground border-border"
|
||||
};
|
||||
|
||||
export default function StatusBadge({ status, size = "sm" }) {
|
||||
const normalized = String(status ?? "").toLowerCase();
|
||||
const sizeClass = size === "lg" ? "px-2.5 py-1 text-xs" : "px-2 py-0.5 text-[11px]";
|
||||
|
||||
return (
|
||||
<span
|
||||
className={cn(
|
||||
"inline-flex items-center rounded-full border font-semibold capitalize tracking-wide",
|
||||
sizeClass,
|
||||
statusColors[normalized] ?? statusColors.default
|
||||
)}
|
||||
>
|
||||
{normalized || "unknown"}
|
||||
</span>
|
||||
);
|
||||
}
|
||||
19
src/components/ui/UserNotRegisteredError.jsx
Normal file
19
src/components/ui/UserNotRegisteredError.jsx
Normal file
@@ -0,0 +1,19 @@
|
||||
import * as React from "react"
|
||||
|
||||
const MOBILE_BREAKPOINT = 768
|
||||
|
||||
export function useIsMobile() {
|
||||
const [isMobile, setIsMobile] = React.useState(undefined)
|
||||
|
||||
React.useEffect(() => {
|
||||
const mql = window.matchMedia(`(max-width: ${MOBILE_BREAKPOINT - 1}px)`)
|
||||
const onChange = () => {
|
||||
setIsMobile(window.innerWidth < MOBILE_BREAKPOINT)
|
||||
}
|
||||
mql.addEventListener("change", onChange)
|
||||
setIsMobile(window.innerWidth < MOBILE_BREAKPOINT)
|
||||
return () => mql.removeEventListener("change", onChange);
|
||||
}, [])
|
||||
|
||||
return !!isMobile
|
||||
}
|
||||
97
src/components/ui/accordion.jsx
Normal file
97
src/components/ui/accordion.jsx
Normal file
@@ -0,0 +1,97 @@
|
||||
import * as React from "react"
|
||||
import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
import { buttonVariants } from "@/components/ui/button"
|
||||
|
||||
const AlertDialog = AlertDialogPrimitive.Root
|
||||
|
||||
const AlertDialogTrigger = AlertDialogPrimitive.Trigger
|
||||
|
||||
const AlertDialogPortal = AlertDialogPrimitive.Portal
|
||||
|
||||
const AlertDialogOverlay = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Overlay
|
||||
className={cn(
|
||||
"fixed inset-0 z-50 bg-black/80 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
ref={ref} />
|
||||
))
|
||||
AlertDialogOverlay.displayName = AlertDialogPrimitive.Overlay.displayName
|
||||
|
||||
const AlertDialogContent = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<AlertDialogPortal>
|
||||
<AlertDialogOverlay />
|
||||
<AlertDialogPrimitive.Content
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"fixed left-[50%] top-[50%] z-50 grid w-full max-w-lg translate-x-[-50%] translate-y-[-50%] gap-4 border bg-background p-6 shadow-lg duration-200 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[state=closed]:slide-out-to-left-1/2 data-[state=closed]:slide-out-to-top-[48%] data-[state=open]:slide-in-from-left-1/2 data-[state=open]:slide-in-from-top-[48%] sm:rounded-lg",
|
||||
className
|
||||
)}
|
||||
{...props} />
|
||||
</AlertDialogPortal>
|
||||
))
|
||||
AlertDialogContent.displayName = AlertDialogPrimitive.Content.displayName
|
||||
|
||||
const AlertDialogHeader = ({
|
||||
className,
|
||||
...props
|
||||
}) => (
|
||||
<div
|
||||
className={cn("flex flex-col space-y-2 text-center sm:text-left", className)}
|
||||
{...props} />
|
||||
)
|
||||
AlertDialogHeader.displayName = "AlertDialogHeader"
|
||||
|
||||
const AlertDialogFooter = ({
|
||||
className,
|
||||
...props
|
||||
}) => (
|
||||
<div
|
||||
className={cn("flex flex-col-reverse sm:flex-row sm:justify-end sm:space-x-2", className)}
|
||||
{...props} />
|
||||
)
|
||||
AlertDialogFooter.displayName = "AlertDialogFooter"
|
||||
|
||||
const AlertDialogTitle = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Title ref={ref} className={cn("text-lg font-semibold", className)} {...props} />
|
||||
))
|
||||
AlertDialogTitle.displayName = AlertDialogPrimitive.Title.displayName
|
||||
|
||||
const AlertDialogDescription = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Description
|
||||
ref={ref}
|
||||
className={cn("text-sm text-muted-foreground", className)}
|
||||
{...props} />
|
||||
))
|
||||
AlertDialogDescription.displayName =
|
||||
AlertDialogPrimitive.Description.displayName
|
||||
|
||||
const AlertDialogAction = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Action ref={ref} className={cn(buttonVariants(), className)} {...props} />
|
||||
))
|
||||
AlertDialogAction.displayName = AlertDialogPrimitive.Action.displayName
|
||||
|
||||
const AlertDialogCancel = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Cancel
|
||||
ref={ref}
|
||||
className={cn(buttonVariants({ variant: "outline" }), "mt-2 sm:mt-0", className)}
|
||||
{...props} />
|
||||
))
|
||||
AlertDialogCancel.displayName = AlertDialogPrimitive.Cancel.displayName
|
||||
|
||||
export {
|
||||
AlertDialog,
|
||||
AlertDialogPortal,
|
||||
AlertDialogOverlay,
|
||||
AlertDialogTrigger,
|
||||
AlertDialogContent,
|
||||
AlertDialogHeader,
|
||||
AlertDialogFooter,
|
||||
AlertDialogTitle,
|
||||
AlertDialogDescription,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
}
|
||||
47
src/components/ui/alert-dialog.jsx
Normal file
47
src/components/ui/alert-dialog.jsx
Normal file
@@ -0,0 +1,47 @@
|
||||
import * as React from "react"
|
||||
import { cva } from "class-variance-authority";
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const alertVariants = cva(
|
||||
"relative w-full rounded-lg border px-4 py-3 text-sm [&>svg+div]:translate-y-[-3px] [&>svg]:absolute [&>svg]:left-4 [&>svg]:top-4 [&>svg]:text-foreground [&>svg~*]:pl-7",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default: "bg-background text-foreground",
|
||||
destructive:
|
||||
"border-destructive/50 text-destructive dark:border-destructive [&>svg]:text-destructive",
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
variant: "default",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
const Alert = React.forwardRef(({ className, variant, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
role="alert"
|
||||
className={cn(alertVariants({ variant }), className)}
|
||||
{...props} />
|
||||
))
|
||||
Alert.displayName = "Alert"
|
||||
|
||||
const AlertTitle = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<h5
|
||||
ref={ref}
|
||||
className={cn("mb-1 font-medium leading-none tracking-tight", className)}
|
||||
{...props} />
|
||||
))
|
||||
AlertTitle.displayName = "AlertTitle"
|
||||
|
||||
const AlertDescription = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn("text-sm [&_p]:leading-relaxed", className)}
|
||||
{...props} />
|
||||
))
|
||||
AlertDescription.displayName = "AlertDescription"
|
||||
|
||||
export { Alert, AlertTitle, AlertDescription }
|
||||
5
src/components/ui/alert.jsx
Normal file
5
src/components/ui/alert.jsx
Normal file
@@ -0,0 +1,5 @@
|
||||
import * as AspectRatioPrimitive from "@radix-ui/react-aspect-ratio"
|
||||
|
||||
const AspectRatio = AspectRatioPrimitive.Root
|
||||
|
||||
export { AspectRatio }
|
||||
35
src/components/ui/aspect-ratio.jsx
Normal file
35
src/components/ui/aspect-ratio.jsx
Normal file
@@ -0,0 +1,35 @@
|
||||
"use client"
|
||||
|
||||
import * as React from "react"
|
||||
import * as AvatarPrimitive from "@radix-ui/react-avatar"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const Avatar = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<AvatarPrimitive.Root
|
||||
ref={ref}
|
||||
className={cn("relative flex h-10 w-10 shrink-0 overflow-hidden rounded-full", className)}
|
||||
{...props} />
|
||||
))
|
||||
Avatar.displayName = AvatarPrimitive.Root.displayName
|
||||
|
||||
const AvatarImage = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<AvatarPrimitive.Image
|
||||
ref={ref}
|
||||
className={cn("aspect-square h-full w-full", className)}
|
||||
{...props} />
|
||||
))
|
||||
AvatarImage.displayName = AvatarPrimitive.Image.displayName
|
||||
|
||||
const AvatarFallback = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<AvatarPrimitive.Fallback
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex h-full w-full items-center justify-center rounded-full bg-muted",
|
||||
className
|
||||
)}
|
||||
{...props} />
|
||||
))
|
||||
AvatarFallback.displayName = AvatarPrimitive.Fallback.displayName
|
||||
|
||||
export { Avatar, AvatarImage, AvatarFallback }
|
||||
34
src/components/ui/avatar.jsx
Normal file
34
src/components/ui/avatar.jsx
Normal file
@@ -0,0 +1,34 @@
|
||||
import * as React from "react"
|
||||
import { cva } from "class-variance-authority";
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const badgeVariants = cva(
|
||||
"inline-flex items-center rounded-md border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default:
|
||||
"border-transparent bg-primary text-primary-foreground shadow hover:bg-primary/80",
|
||||
secondary:
|
||||
"border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80",
|
||||
destructive:
|
||||
"border-transparent bg-destructive text-destructive-foreground shadow hover:bg-destructive/80",
|
||||
outline: "text-foreground",
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
variant: "default",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
function Badge({
|
||||
className,
|
||||
variant,
|
||||
...props
|
||||
}) {
|
||||
return (<div className={cn(badgeVariants({ variant }), className)} {...props} />);
|
||||
}
|
||||
|
||||
export { Badge, badgeVariants }
|
||||
13
src/components/ui/badge.jsx
Normal file
13
src/components/ui/badge.jsx
Normal file
@@ -0,0 +1,13 @@
|
||||
import * as React from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
function Badge({ className, ...props }) {
|
||||
return (
|
||||
<span
|
||||
className={cn("inline-flex items-center rounded-full border border-border px-2 py-0.5 text-xs font-medium", className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export { Badge };
|
||||
48
src/components/ui/breadcrumb.jsx
Normal file
48
src/components/ui/breadcrumb.jsx
Normal file
@@ -0,0 +1,48 @@
|
||||
import * as React from "react"
|
||||
import { Slot } from "@radix-ui/react-slot"
|
||||
import { cva } from "class-variance-authority";
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const buttonVariants = cva(
|
||||
"inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default:
|
||||
"bg-primary text-primary-foreground shadow hover:bg-primary/90",
|
||||
destructive:
|
||||
"bg-destructive text-destructive-foreground shadow-sm hover:bg-destructive/90",
|
||||
outline:
|
||||
"border border-input bg-transparent shadow-sm hover:bg-accent hover:text-accent-foreground",
|
||||
secondary:
|
||||
"bg-secondary text-secondary-foreground shadow-sm hover:bg-secondary/80",
|
||||
ghost: "hover:bg-accent hover:text-accent-foreground",
|
||||
link: "text-primary underline-offset-4 hover:underline",
|
||||
},
|
||||
size: {
|
||||
default: "h-9 px-4 py-2",
|
||||
sm: "h-8 rounded-md px-3 text-xs",
|
||||
lg: "h-10 rounded-md px-8",
|
||||
icon: "h-9 w-9",
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
variant: "default",
|
||||
size: "default",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
const Button = React.forwardRef(({ className, variant, size, asChild = false, ...props }, ref) => {
|
||||
const Comp = asChild ? Slot : "button"
|
||||
return (
|
||||
(<Comp
|
||||
className={cn(buttonVariants({ variant, size, className }))}
|
||||
ref={ref}
|
||||
{...props} />)
|
||||
);
|
||||
})
|
||||
Button.displayName = "Button"
|
||||
|
||||
export { Button, buttonVariants }
|
||||
37
src/components/ui/button.jsx
Normal file
37
src/components/ui/button.jsx
Normal file
@@ -0,0 +1,37 @@
|
||||
import * as React from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
const variants = {
|
||||
default: "border border-primary bg-primary text-primary-foreground shadow-sm hover:bg-primary/95",
|
||||
destructive: "border border-destructive bg-destructive text-destructive-foreground shadow-sm hover:bg-destructive/90",
|
||||
outline: "border border-border bg-card text-foreground hover:bg-muted",
|
||||
ghost: "border border-transparent text-muted-foreground hover:bg-muted hover:text-foreground",
|
||||
secondary: "border border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80"
|
||||
};
|
||||
|
||||
const sizes = {
|
||||
default: "h-10 px-4 py-2 text-sm",
|
||||
sm: "h-8 px-3 text-xs",
|
||||
lg: "h-11 px-6 text-sm",
|
||||
icon: "h-10 w-10"
|
||||
};
|
||||
|
||||
const Button = React.forwardRef(({ className, variant = "default", size = "default", type = "button", ...props }, ref) => {
|
||||
return (
|
||||
<button
|
||||
ref={ref}
|
||||
type={type}
|
||||
className={cn(
|
||||
"inline-flex items-center justify-center gap-2 rounded-lg font-medium transition-[background-color,color,border-color,box-shadow,transform] duration-150 focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring/35 focus-visible:ring-offset-1 disabled:pointer-events-none disabled:opacity-45 disabled:saturate-50 active:translate-y-px",
|
||||
variants[variant] ?? variants.default,
|
||||
sizes[size] ?? sizes.default,
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
});
|
||||
|
||||
Button.displayName = "Button";
|
||||
|
||||
export { Button };
|
||||
50
src/components/ui/calendar.jsx
Normal file
50
src/components/ui/calendar.jsx
Normal file
@@ -0,0 +1,50 @@
|
||||
import * as React from "react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const Card = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn("rounded-xl border bg-card text-card-foreground shadow", className)}
|
||||
{...props} />
|
||||
))
|
||||
Card.displayName = "Card"
|
||||
|
||||
const CardHeader = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn("flex flex-col space-y-1.5 p-6", className)}
|
||||
{...props} />
|
||||
))
|
||||
CardHeader.displayName = "CardHeader"
|
||||
|
||||
const CardTitle = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn("font-semibold leading-none tracking-tight", className)}
|
||||
{...props} />
|
||||
))
|
||||
CardTitle.displayName = "CardTitle"
|
||||
|
||||
const CardDescription = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn("text-sm text-muted-foreground", className)}
|
||||
{...props} />
|
||||
))
|
||||
CardDescription.displayName = "CardDescription"
|
||||
|
||||
const CardContent = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<div ref={ref} className={cn("p-6 pt-0", className)} {...props} />
|
||||
))
|
||||
CardContent.displayName = "CardContent"
|
||||
|
||||
const CardFooter = React.forwardRef(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn("flex items-center p-6 pt-0", className)}
|
||||
{...props} />
|
||||
))
|
||||
CardFooter.displayName = "CardFooter"
|
||||
|
||||
export { Card, CardHeader, CardFooter, CardTitle, CardDescription, CardContent }
|
||||
193
src/components/ui/card.jsx
Normal file
193
src/components/ui/card.jsx
Normal file
@@ -0,0 +1,193 @@
|
||||
import * as React from "react"
|
||||
import useEmblaCarousel from "embla-carousel-react";
|
||||
import { ArrowLeft, ArrowRight } from "lucide-react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
import { Button } from "@/components/ui/button"
|
||||
|
||||
const CarouselContext = React.createContext(null)
|
||||
|
||||
function useCarousel() {
|
||||
const context = React.useContext(CarouselContext)
|
||||
|
||||
if (!context) {
|
||||
throw new Error("useCarousel must be used within a <Carousel />")
|
||||
}
|
||||
|
||||
return context
|
||||
}
|
||||
|
||||
const Carousel = React.forwardRef((
|
||||
{
|
||||
orientation = "horizontal",
|
||||
opts,
|
||||
setApi,
|
||||
plugins,
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const [carouselRef, api] = useEmblaCarousel({
|
||||
...opts,
|
||||
axis: orientation === "horizontal" ? "x" : "y",
|
||||
}, plugins)
|
||||
const [canScrollPrev, setCanScrollPrev] = React.useState(false)
|
||||
const [canScrollNext, setCanScrollNext] = React.useState(false)
|
||||
|
||||
const onSelect = React.useCallback((api) => {
|
||||
if (!api) {
|
||||
return
|
||||
}
|
||||
|
||||
setCanScrollPrev(api.canScrollPrev())
|
||||
setCanScrollNext(api.canScrollNext())
|
||||
}, [])
|
||||
|
||||
const scrollPrev = React.useCallback(() => {
|
||||
api?.scrollPrev()
|
||||
}, [api])
|
||||
|
||||
const scrollNext = React.useCallback(() => {
|
||||
api?.scrollNext()
|
||||
}, [api])
|
||||
|
||||
const handleKeyDown = React.useCallback((event) => {
|
||||
if (event.key === "ArrowLeft") {
|
||||
event.preventDefault()
|
||||
scrollPrev()
|
||||
} else if (event.key === "ArrowRight") {
|
||||
event.preventDefault()
|
||||
scrollNext()
|
||||
}
|
||||
}, [scrollPrev, scrollNext])
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!api || !setApi) {
|
||||
return
|
||||
}
|
||||
|
||||
setApi(api)
|
||||
}, [api, setApi])
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!api) {
|
||||
return
|
||||
}
|
||||
|
||||
onSelect(api)
|
||||
api.on("reInit", onSelect)
|
||||
api.on("select", onSelect)
|
||||
|
||||
return () => {
|
||||
api?.off("select", onSelect)
|
||||
};
|
||||
}, [api, onSelect])
|
||||
|
||||
return (
|
||||
(<CarouselContext.Provider
|
||||
value={{
|
||||
carouselRef,
|
||||
api: api,
|
||||
opts,
|
||||
orientation:
|
||||
orientation || (opts?.axis === "y" ? "vertical" : "horizontal"),
|
||||
scrollPrev,
|
||||
scrollNext,
|
||||
canScrollPrev,
|
||||
canScrollNext,
|
||||
}}>
|
||||
<div
|
||||
ref={ref}
|
||||
onKeyDownCapture={handleKeyDown}
|
||||
className={cn("relative", className)}
|
||||
role="region"
|
||||
aria-roledescription="carousel"
|
||||
{...props}>
|
||||
{children}
|
||||
</div>
|
||||
</CarouselContext.Provider>)
|
||||
);
|
||||
})
|
||||
Carousel.displayName = "Carousel"
|
||||
|
||||
const CarouselContent = React.forwardRef(({ className, ...props }, ref) => {
|
||||
const { carouselRef, orientation } = useCarousel()
|
||||
|
||||
return (
|
||||
(<div ref={carouselRef} className="overflow-hidden">
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex",
|
||||
orientation === "horizontal" ? "-ml-4" : "-mt-4 flex-col",
|
||||
className
|
||||
)}
|
||||
{...props} />
|
||||
</div>)
|
||||
);
|
||||
})
|
||||
CarouselContent.displayName = "CarouselContent"
|
||||
|
||||
const CarouselItem = React.forwardRef(({ className, ...props }, ref) => {
|
||||
const { orientation } = useCarousel()
|
||||
|
||||
return (
|
||||
(<div
|
||||
ref={ref}
|
||||
role="group"
|
||||
aria-roledescription="slide"
|
||||
className={cn(
|
||||
"min-w-0 shrink-0 grow-0 basis-full",
|
||||
orientation === "horizontal" ? "pl-4" : "pt-4",
|
||||
className
|
||||
)}
|
||||
{...props} />)
|
||||
);
|
||||
})
|
||||
CarouselItem.displayName = "CarouselItem"
|
||||
|
||||
const CarouselPrevious = React.forwardRef(({ className, variant = "outline", size = "icon", ...props }, ref) => {
|
||||
const { orientation, scrollPrev, canScrollPrev } = useCarousel()
|
||||
|
||||
return (
|
||||
(<Button
|
||||
ref={ref}
|
||||
variant={variant}
|
||||
size={size}
|
||||
className={cn("absolute h-8 w-8 rounded-full", orientation === "horizontal"
|
||||
? "-left-12 top-1/2 -translate-y-1/2"
|
||||
: "-top-12 left-1/2 -translate-x-1/2 rotate-90", className)}
|
||||
disabled={!canScrollPrev}
|
||||
onClick={scrollPrev}
|
||||
{...props}>
|
||||
<ArrowLeft className="h-4 w-4" />
|
||||
<span className="sr-only">Previous slide</span>
|
||||
</Button>)
|
||||
);
|
||||
})
|
||||
CarouselPrevious.displayName = "CarouselPrevious"
|
||||
|
||||
const CarouselNext = React.forwardRef(({ className, variant = "outline", size = "icon", ...props }, ref) => {
|
||||
const { orientation, scrollNext, canScrollNext } = useCarousel()
|
||||
|
||||
return (
|
||||
(<Button
|
||||
ref={ref}
|
||||
variant={variant}
|
||||
size={size}
|
||||
className={cn("absolute h-8 w-8 rounded-full", orientation === "horizontal"
|
||||
? "-right-12 top-1/2 -translate-y-1/2"
|
||||
: "-bottom-12 left-1/2 -translate-x-1/2 rotate-90", className)}
|
||||
disabled={!canScrollNext}
|
||||
onClick={scrollNext}
|
||||
{...props}>
|
||||
<ArrowRight className="h-4 w-4" />
|
||||
<span className="sr-only">Next slide</span>
|
||||
</Button>)
|
||||
);
|
||||
})
|
||||
CarouselNext.displayName = "CarouselNext"
|
||||
|
||||
export { Carousel, CarouselContent, CarouselItem, CarouselPrevious, CarouselNext };
|
||||
309
src/components/ui/carousel.jsx
Normal file
309
src/components/ui/carousel.jsx
Normal file
@@ -0,0 +1,309 @@
|
||||
"use client";
|
||||
import * as React from "react"
|
||||
import * as RechartsPrimitive from "recharts"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
// Format: { THEME_NAME: CSS_SELECTOR }
|
||||
const THEMES = {
|
||||
light: "",
|
||||
dark: ".dark"
|
||||
}
|
||||
|
||||
const ChartContext = React.createContext(null)
|
||||
|
||||
function useChart() {
|
||||
const context = React.useContext(ChartContext)
|
||||
|
||||
if (!context) {
|
||||
throw new Error("useChart must be used within a <ChartContainer />")
|
||||
}
|
||||
|
||||
return context
|
||||
}
|
||||
|
||||
const ChartContainer = React.forwardRef(({ id, className, children, config, ...props }, ref) => {
|
||||
const uniqueId = React.useId()
|
||||
const chartId = `chart-${id || uniqueId.replace(/:/g, "")}`
|
||||
|
||||
return (
|
||||
(<ChartContext.Provider value={{ config }}>
|
||||
<div
|
||||
data-chart={chartId}
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex aspect-video justify-center text-xs [&_.recharts-cartesian-axis-tick_text]:fill-muted-foreground [&_.recharts-cartesian-grid_line[stroke='#ccc']]:stroke-border/50 [&_.recharts-curve.recharts-tooltip-cursor]:stroke-border [&_.recharts-dot[stroke='#fff']]:stroke-transparent [&_.recharts-layer]:outline-none [&_.recharts-polar-grid_[stroke='#ccc']]:stroke-border [&_.recharts-radial-bar-background-sector]:fill-muted [&_.recharts-rectangle.recharts-tooltip-cursor]:fill-muted [&_.recharts-reference-line_[stroke='#ccc']]:stroke-border [&_.recharts-sector[stroke='#fff']]:stroke-transparent [&_.recharts-sector]:outline-none [&_.recharts-surface]:outline-none",
|
||||
className
|
||||
)}
|
||||
{...props}>
|
||||
<ChartStyle id={chartId} config={config} />
|
||||
<RechartsPrimitive.ResponsiveContainer>
|
||||
{children}
|
||||
</RechartsPrimitive.ResponsiveContainer>
|
||||
</div>
|
||||
</ChartContext.Provider>)
|
||||
);
|
||||
})
|
||||
ChartContainer.displayName = "Chart"
|
||||
|
||||
const ChartStyle = ({
|
||||
id,
|
||||
config
|
||||
}) => {
|
||||
const colorConfig = Object.entries(config).filter(([, config]) => config.theme || config.color)
|
||||
|
||||
if (!colorConfig.length) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
(<style
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: Object.entries(THEMES)
|
||||
.map(([theme, prefix]) => `
|
||||
${prefix} [data-chart=${id}] {
|
||||
${colorConfig
|
||||
.map(([key, itemConfig]) => {
|
||||
const color =
|
||||
itemConfig.theme?.[theme] ||
|
||||
itemConfig.color
|
||||
return color ? ` --color-${key}: ${color};` : null
|
||||
})
|
||||
.join("\n")}
|
||||
}
|
||||
`)
|
||||
.join("\n"),
|
||||
}} />)
|
||||
);
|
||||
}
|
||||
|
||||
const ChartTooltip = RechartsPrimitive.Tooltip
|
||||
|
||||
const ChartTooltipContent = React.forwardRef((
|
||||
{
|
||||
active,
|
||||
payload,
|
||||
className,
|
||||
indicator = "dot",
|
||||
hideLabel = false,
|
||||
hideIndicator = false,
|
||||
label,
|
||||
labelFormatter,
|
||||
labelClassName,
|
||||
formatter,
|
||||
color,
|
||||
nameKey,
|
||||
labelKey,
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const { config } = useChart()
|
||||
|
||||
const tooltipLabel = React.useMemo(() => {
|
||||
if (hideLabel || !payload?.length) {
|
||||
return null
|
||||
}
|
||||
|
||||
const [item] = payload
|
||||
const key = `${labelKey || item.dataKey || item.name || "value"}`
|
||||
const itemConfig = getPayloadConfigFromPayload(config, item, key)
|
||||
const value =
|
||||
!labelKey && typeof label === "string"
|
||||
? config[label]?.label || label
|
||||
: itemConfig?.label
|
||||
|
||||
if (labelFormatter) {
|
||||
return (
|
||||
(<div className={cn("font-medium", labelClassName)}>
|
||||
{labelFormatter(value, payload)}
|
||||
</div>)
|
||||
);
|
||||
}
|
||||
|
||||
if (!value) {
|
||||
return null
|
||||
}
|
||||
|
||||
return <div className={cn("font-medium", labelClassName)}>{value}</div>;
|
||||
}, [
|
||||
label,
|
||||
labelFormatter,
|
||||
payload,
|
||||
hideLabel,
|
||||
labelClassName,
|
||||
config,
|
||||
labelKey,
|
||||
])
|
||||
|
||||
if (!active || !payload?.length) {
|
||||
return null
|
||||
}
|
||||
|
||||
const nestLabel = payload.length === 1 && indicator !== "dot"
|
||||
|
||||
return (
|
||||
(<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"grid min-w-[8rem] items-start gap-1.5 rounded-lg border border-border/50 bg-background px-2.5 py-1.5 text-xs shadow-xl",
|
||||
className
|
||||
)}>
|
||||
{!nestLabel ? tooltipLabel : null}
|
||||
<div className="grid gap-1.5">
|
||||
{payload.map((item, index) => {
|
||||
const key = `${nameKey || item.name || item.dataKey || "value"}`
|
||||
const itemConfig = getPayloadConfigFromPayload(config, item, key)
|
||||
const indicatorColor = color || item.payload.fill || item.color
|
||||
|
||||
return (
|
||||
(<div
|
||||
key={item.dataKey}
|
||||
className={cn(
|
||||
"flex w-full flex-wrap items-stretch gap-2 [&>svg]:h-2.5 [&>svg]:w-2.5 [&>svg]:text-muted-foreground",
|
||||
indicator === "dot" && "items-center"
|
||||
)}>
|
||||
{formatter && item?.value !== undefined && item.name ? (
|
||||
formatter(item.value, item.name, item, index, item.payload)
|
||||
) : (
|
||||
<>
|
||||
{itemConfig?.icon ? (
|
||||
<itemConfig.icon />
|
||||
) : (
|
||||
!hideIndicator && (
|
||||
<div
|
||||
className={cn("shrink-0 rounded-[2px] border-[--color-border] bg-[--color-bg]", {
|
||||
"h-2.5 w-2.5": indicator === "dot",
|
||||
"w-1": indicator === "line",
|
||||
"w-0 border-[1.5px] border-dashed bg-transparent":
|
||||
indicator === "dashed",
|
||||
"my-0.5": nestLabel && indicator === "dashed",
|
||||
})}
|
||||
style={
|
||||
{
|
||||
"--color-bg": indicatorColor,
|
||||
"--color-border": indicatorColor
|
||||
}
|
||||
} />
|
||||
)
|
||||
)}
|
||||
<div
|
||||
className={cn(
|
||||
"flex flex-1 justify-between leading-none",
|
||||
nestLabel ? "items-end" : "items-center"
|
||||
)}>
|
||||
<div className="grid gap-1.5">
|
||||
{nestLabel ? tooltipLabel : null}
|
||||
<span className="text-muted-foreground">
|
||||
{itemConfig?.label || item.name}
|
||||
</span>
|
||||
</div>
|
||||
{item.value && (
|
||||
<span className="font-mono font-medium tabular-nums text-foreground">
|
||||
{item.value.toLocaleString()}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>)
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>)
|
||||
);
|
||||
})
|
||||
ChartTooltipContent.displayName = "ChartTooltip"
|
||||
|
||||
const ChartLegend = RechartsPrimitive.Legend
|
||||
|
||||
const ChartLegendContent = React.forwardRef((
|
||||
{ className, hideIcon = false, payload, verticalAlign = "bottom", nameKey },
|
||||
ref
|
||||
) => {
|
||||
const { config } = useChart()
|
||||
|
||||
if (!payload?.length) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
(<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex items-center justify-center gap-4",
|
||||
verticalAlign === "top" ? "pb-3" : "pt-3",
|
||||
className
|
||||
)}>
|
||||
{payload.map((item) => {
|
||||
const key = `${nameKey || item.dataKey || "value"}`
|
||||
const itemConfig = getPayloadConfigFromPayload(config, item, key)
|
||||
|
||||
return (
|
||||
(<div
|
||||
key={item.value}
|
||||
className={cn(
|
||||
"flex items-center gap-1.5 [&>svg]:h-3 [&>svg]:w-3 [&>svg]:text-muted-foreground"
|
||||
)}>
|
||||
{itemConfig?.icon && !hideIcon ? (
|
||||
<itemConfig.icon />
|
||||
) : (
|
||||
<div
|
||||
className="h-2 w-2 shrink-0 rounded-[2px]"
|
||||
style={{
|
||||
backgroundColor: item.color,
|
||||
}} />
|
||||
)}
|
||||
{itemConfig?.label}
|
||||
</div>)
|
||||
);
|
||||
})}
|
||||
</div>)
|
||||
);
|
||||
})
|
||||
ChartLegendContent.displayName = "ChartLegend"
|
||||
|
||||
// Helper to extract item config from a payload.
|
||||
function getPayloadConfigFromPayload(
|
||||
config,
|
||||
payload,
|
||||
key
|
||||
) {
|
||||
if (typeof payload !== "object" || payload === null) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const payloadPayload =
|
||||
"payload" in payload &&
|
||||
typeof payload.payload === "object" &&
|
||||
payload.payload !== null
|
||||
? payload.payload
|
||||
: undefined
|
||||
|
||||
let configLabelKey = key
|
||||
|
||||
if (
|
||||
key in payload &&
|
||||
typeof payload[key] === "string"
|
||||
) {
|
||||
configLabelKey = payload[key]
|
||||
} else if (
|
||||
payloadPayload &&
|
||||
key in payloadPayload &&
|
||||
typeof payloadPayload[key] === "string"
|
||||
) {
|
||||
configLabelKey = payloadPayload[key]
|
||||
}
|
||||
|
||||
return configLabelKey in config
|
||||
? config[configLabelKey]
|
||||
: config[key];
|
||||
}
|
||||
|
||||
export {
|
||||
ChartContainer,
|
||||
ChartTooltip,
|
||||
ChartTooltipContent,
|
||||
ChartLegend,
|
||||
ChartLegendContent,
|
||||
ChartStyle,
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user