Compare commits
9 Commits
1eff047fb6
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| ec14a7dfb4 | |||
|
|
5fdb56e498 | ||
|
|
fc64391cfb | ||
|
|
f1448d512c | ||
|
|
5021d46ba0 | ||
|
|
97659421b5 | ||
|
|
8677820864 | ||
|
|
0c461d0656 | ||
| 3c4cb03df4 |
Binary file not shown.
118
backend/internal/backup/handler.go
Normal file
118
backend/internal/backup/handler.go
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
package backup
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/atlasos/calypso/internal/common/logger"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler handles backup-related API requests
|
||||||
|
type Handler struct {
|
||||||
|
service *Service
|
||||||
|
logger *logger.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHandler creates a new backup handler
|
||||||
|
func NewHandler(service *Service, log *logger.Logger) *Handler {
|
||||||
|
return &Handler{
|
||||||
|
service: service,
|
||||||
|
logger: log,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListJobs lists backup jobs with optional filters
|
||||||
|
func (h *Handler) ListJobs(c *gin.Context) {
|
||||||
|
opts := ListJobsOptions{
|
||||||
|
Status: c.Query("status"),
|
||||||
|
JobType: c.Query("job_type"),
|
||||||
|
ClientName: c.Query("client_name"),
|
||||||
|
JobName: c.Query("job_name"),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse pagination
|
||||||
|
var limit, offset int
|
||||||
|
if limitStr := c.Query("limit"); limitStr != "" {
|
||||||
|
if _, err := fmt.Sscanf(limitStr, "%d", &limit); err == nil {
|
||||||
|
opts.Limit = limit
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if offsetStr := c.Query("offset"); offsetStr != "" {
|
||||||
|
if _, err := fmt.Sscanf(offsetStr, "%d", &offset); err == nil {
|
||||||
|
opts.Offset = offset
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
jobs, totalCount, err := h.service.ListJobs(c.Request.Context(), opts)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to list jobs", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to list jobs"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if jobs == nil {
|
||||||
|
jobs = []Job{}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"jobs": jobs,
|
||||||
|
"total": totalCount,
|
||||||
|
"limit": opts.Limit,
|
||||||
|
"offset": opts.Offset,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetJob retrieves a job by ID
|
||||||
|
func (h *Handler) GetJob(c *gin.Context) {
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
job, err := h.service.GetJob(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
if err.Error() == "job not found" {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "job not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
h.logger.Error("Failed to get job", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to get job"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, job)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateJob creates a new backup job
|
||||||
|
func (h *Handler) CreateJob(c *gin.Context) {
|
||||||
|
var req CreateJobRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate job type
|
||||||
|
validJobTypes := map[string]bool{
|
||||||
|
"Backup": true, "Restore": true, "Verify": true, "Copy": true, "Migrate": true,
|
||||||
|
}
|
||||||
|
if !validJobTypes[req.JobType] {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid job_type"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate job level
|
||||||
|
validJobLevels := map[string]bool{
|
||||||
|
"Full": true, "Incremental": true, "Differential": true, "Since": true,
|
||||||
|
}
|
||||||
|
if !validJobLevels[req.JobLevel] {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid job_level"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
job, err := h.service.CreateJob(c.Request.Context(), req)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to create job", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to create job"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, job)
|
||||||
|
}
|
||||||
781
backend/internal/backup/service.go
Normal file
781
backend/internal/backup/service.go
Normal file
@@ -0,0 +1,781 @@
|
|||||||
|
package backup
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"os/exec"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/atlasos/calypso/internal/common/config"
|
||||||
|
"github.com/atlasos/calypso/internal/common/database"
|
||||||
|
"github.com/atlasos/calypso/internal/common/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Service handles backup job operations
|
||||||
|
type Service struct {
|
||||||
|
db *database.DB
|
||||||
|
baculaDB *database.DB // Direct connection to Bacula database
|
||||||
|
logger *logger.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewService creates a new backup service
|
||||||
|
func NewService(db *database.DB, log *logger.Logger) *Service {
|
||||||
|
return &Service{
|
||||||
|
db: db,
|
||||||
|
logger: log,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetBaculaDatabase sets up direct connection to Bacula database
|
||||||
|
func (s *Service) SetBaculaDatabase(cfg config.DatabaseConfig, baculaDBName string) error {
|
||||||
|
// Create new database config for Bacula database
|
||||||
|
baculaCfg := cfg
|
||||||
|
baculaCfg.Database = baculaDBName // Override database name
|
||||||
|
|
||||||
|
// Create connection to Bacula database
|
||||||
|
baculaDB, err := database.NewConnection(baculaCfg)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to connect to Bacula database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.baculaDB = baculaDB
|
||||||
|
s.logger.Info("Connected to Bacula database", "database", baculaDBName, "host", cfg.Host, "port", cfg.Port)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Job represents a backup job
|
||||||
|
type Job struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
JobID int `json:"job_id"`
|
||||||
|
JobName string `json:"job_name"`
|
||||||
|
ClientName string `json:"client_name"`
|
||||||
|
JobType string `json:"job_type"`
|
||||||
|
JobLevel string `json:"job_level"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
BytesWritten int64 `json:"bytes_written"`
|
||||||
|
FilesWritten int `json:"files_written"`
|
||||||
|
DurationSeconds *int `json:"duration_seconds,omitempty"`
|
||||||
|
StartedAt *time.Time `json:"started_at,omitempty"`
|
||||||
|
EndedAt *time.Time `json:"ended_at,omitempty"`
|
||||||
|
ErrorMessage *string `json:"error_message,omitempty"`
|
||||||
|
StorageName *string `json:"storage_name,omitempty"`
|
||||||
|
PoolName *string `json:"pool_name,omitempty"`
|
||||||
|
VolumeName *string `json:"volume_name,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListJobsOptions represents filtering and pagination options
|
||||||
|
type ListJobsOptions struct {
|
||||||
|
Status string // Filter by status: "Running", "Completed", "Failed", etc.
|
||||||
|
JobType string // Filter by job type: "Backup", "Restore", etc.
|
||||||
|
ClientName string // Filter by client name
|
||||||
|
JobName string // Filter by job name
|
||||||
|
Limit int // Number of results to return
|
||||||
|
Offset int // Offset for pagination
|
||||||
|
}
|
||||||
|
|
||||||
|
// SyncJobsFromBacula syncs jobs from Bacula/Bareos to the database
|
||||||
|
// Tries to query Bacula database directly first, falls back to bconsole if database access fails
|
||||||
|
func (s *Service) SyncJobsFromBacula(ctx context.Context) error {
|
||||||
|
s.logger.Info("Starting sync from Bacula database", "bacula_db_configured", s.baculaDB != nil)
|
||||||
|
|
||||||
|
// Check if Bacula database connection is configured
|
||||||
|
if s.baculaDB == nil {
|
||||||
|
s.logger.Warn("Bacula database connection not configured, trying bconsole fallback")
|
||||||
|
return s.syncFromBconsole(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to query Bacula database directly (if user has access)
|
||||||
|
jobs, err := s.queryBaculaDatabase(ctx)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn("Failed to query Bacula database directly, trying bconsole", "error", err)
|
||||||
|
// Fallback to bconsole
|
||||||
|
return s.syncFromBconsole(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Queried Bacula database", "jobs_found", len(jobs))
|
||||||
|
|
||||||
|
if len(jobs) == 0 {
|
||||||
|
s.logger.Debug("No jobs found in Bacula database")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert jobs to Calypso database
|
||||||
|
successCount := 0
|
||||||
|
errorCount := 0
|
||||||
|
for _, job := range jobs {
|
||||||
|
err := s.upsertJob(ctx, job)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to upsert job", "job_id", job.JobID, "job_name", job.JobName, "error", err)
|
||||||
|
errorCount++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
successCount++
|
||||||
|
s.logger.Debug("Upserted job", "job_id", job.JobID, "job_name", job.JobName)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Synced jobs from Bacula database", "total", len(jobs), "success", successCount, "errors", errorCount)
|
||||||
|
|
||||||
|
if errorCount > 0 {
|
||||||
|
return fmt.Errorf("failed to sync %d out of %d jobs", errorCount, len(jobs))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryBaculaDatabase queries Bacula database directly
|
||||||
|
// Uses direct connection to Bacula database (no dblink needed)
|
||||||
|
func (s *Service) queryBaculaDatabase(ctx context.Context) ([]Job, error) {
|
||||||
|
// Use direct connection to Bacula database
|
||||||
|
if s.baculaDB == nil {
|
||||||
|
return nil, fmt.Errorf("Bacula database connection not configured")
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.queryBaculaDirect(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryBaculaDirect queries Job table directly (Bacularis approach)
|
||||||
|
// Assumes Bacula tables are in same database or accessible via search_path
|
||||||
|
func (s *Service) queryBaculaDirect(ctx context.Context) ([]Job, error) {
|
||||||
|
// Bacularis-style query: direct query to Job table with JOIN to Client
|
||||||
|
// This is the standard way Bacularis queries Bacula database
|
||||||
|
query := `
|
||||||
|
SELECT
|
||||||
|
j.JobId as job_id,
|
||||||
|
j.Name as job_name,
|
||||||
|
COALESCE(c.Name, 'unknown') as client_name,
|
||||||
|
CASE
|
||||||
|
WHEN j.Type = 'B' THEN 'Backup'
|
||||||
|
WHEN j.Type = 'R' THEN 'Restore'
|
||||||
|
WHEN j.Type = 'V' THEN 'Verify'
|
||||||
|
WHEN j.Type = 'C' THEN 'Copy'
|
||||||
|
WHEN j.Type = 'M' THEN 'Migrate'
|
||||||
|
ELSE 'Backup'
|
||||||
|
END as job_type,
|
||||||
|
CASE
|
||||||
|
WHEN j.Level = 'F' THEN 'Full'
|
||||||
|
WHEN j.Level = 'I' THEN 'Incremental'
|
||||||
|
WHEN j.Level = 'D' THEN 'Differential'
|
||||||
|
WHEN j.Level = 'S' THEN 'Since'
|
||||||
|
ELSE 'Full'
|
||||||
|
END as job_level,
|
||||||
|
CASE
|
||||||
|
WHEN j.JobStatus = 'T' THEN 'Running'
|
||||||
|
WHEN j.JobStatus = 'C' THEN 'Completed'
|
||||||
|
WHEN j.JobStatus = 'f' OR j.JobStatus = 'F' THEN 'Failed'
|
||||||
|
WHEN j.JobStatus = 'A' THEN 'Canceled'
|
||||||
|
WHEN j.JobStatus = 'W' THEN 'Waiting'
|
||||||
|
ELSE 'Waiting'
|
||||||
|
END as status,
|
||||||
|
COALESCE(j.JobBytes, 0) as bytes_written,
|
||||||
|
COALESCE(j.JobFiles, 0) as files_written,
|
||||||
|
j.StartTime as started_at,
|
||||||
|
j.EndTime as ended_at
|
||||||
|
FROM Job j
|
||||||
|
LEFT JOIN Client c ON j.ClientId = c.ClientId
|
||||||
|
ORDER BY j.StartTime DESC
|
||||||
|
LIMIT 1000
|
||||||
|
`
|
||||||
|
|
||||||
|
// Use direct connection to Bacula database
|
||||||
|
rows, err := s.baculaDB.QueryContext(ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to query Bacula Job table: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var jobs []Job
|
||||||
|
for rows.Next() {
|
||||||
|
var job Job
|
||||||
|
var startedAt, endedAt sql.NullTime
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&job.JobID, &job.JobName, &job.ClientName,
|
||||||
|
&job.JobType, &job.JobLevel, &job.Status,
|
||||||
|
&job.BytesWritten, &job.FilesWritten, &startedAt, &endedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to scan Bacula job", "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if startedAt.Valid {
|
||||||
|
job.StartedAt = &startedAt.Time
|
||||||
|
}
|
||||||
|
if endedAt.Valid {
|
||||||
|
job.EndedAt = &endedAt.Time
|
||||||
|
// Calculate duration if both start and end times are available
|
||||||
|
if job.StartedAt != nil {
|
||||||
|
duration := int(endedAt.Time.Sub(*job.StartedAt).Seconds())
|
||||||
|
job.DurationSeconds = &duration
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
jobs = append(jobs, job)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(jobs) > 0 {
|
||||||
|
s.logger.Info("Successfully queried Bacula database (direct)", "count", len(jobs))
|
||||||
|
return jobs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return jobs, nil // Return empty list, not an error
|
||||||
|
}
|
||||||
|
|
||||||
|
// syncFromBconsole syncs jobs using bconsole command (fallback method)
|
||||||
|
func (s *Service) syncFromBconsole(ctx context.Context) error {
|
||||||
|
// Execute bconsole command to list jobs
|
||||||
|
cmd := exec.CommandContext(ctx, "sh", "-c", "echo -e 'list jobs\nquit' | bconsole")
|
||||||
|
|
||||||
|
output, err := cmd.CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Debug("Failed to execute bconsole", "error", err, "output", string(output))
|
||||||
|
return nil // Don't fail, just return empty
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(output) == 0 {
|
||||||
|
s.logger.Debug("bconsole returned empty output")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse bconsole output
|
||||||
|
jobs := s.parseBconsoleOutput(ctx, string(output))
|
||||||
|
|
||||||
|
if len(jobs) == 0 {
|
||||||
|
s.logger.Debug("No jobs found in bconsole output")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert jobs to database
|
||||||
|
successCount := 0
|
||||||
|
for _, job := range jobs {
|
||||||
|
err := s.upsertJob(ctx, job)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to upsert job", "job_id", job.JobID, "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
successCount++
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Synced jobs from bconsole", "total", len(jobs), "success", successCount)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseBconsoleOutput parses bconsole "list jobs" output
|
||||||
|
func (s *Service) parseBconsoleOutput(ctx context.Context, output string) []Job {
|
||||||
|
var jobs []Job
|
||||||
|
lines := strings.Split(output, "\n")
|
||||||
|
|
||||||
|
// Skip header lines until we find the data rows
|
||||||
|
inDataSection := false
|
||||||
|
for _, line := range lines {
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
|
||||||
|
// Skip empty lines and separators
|
||||||
|
if line == "" || strings.HasPrefix(line, "+") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start data section when we see header
|
||||||
|
if strings.HasPrefix(line, "| jobid") {
|
||||||
|
inDataSection = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop at footer separator
|
||||||
|
if strings.HasPrefix(line, "*") {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if !inDataSection {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse data row: | jobid | name | starttime | type | level | jobfiles | jobbytes | jobstatus |
|
||||||
|
if strings.HasPrefix(line, "|") {
|
||||||
|
parts := strings.Split(line, "|")
|
||||||
|
if len(parts) < 9 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract fields (skip first empty part)
|
||||||
|
jobIDStr := strings.TrimSpace(parts[1])
|
||||||
|
jobName := strings.TrimSpace(parts[2])
|
||||||
|
startTimeStr := strings.TrimSpace(parts[3])
|
||||||
|
jobTypeChar := strings.TrimSpace(parts[4])
|
||||||
|
jobLevelChar := strings.TrimSpace(parts[5])
|
||||||
|
jobFilesStr := strings.TrimSpace(parts[6])
|
||||||
|
jobBytesStr := strings.TrimSpace(parts[7])
|
||||||
|
jobStatusChar := strings.TrimSpace(parts[8])
|
||||||
|
|
||||||
|
// Parse job ID
|
||||||
|
jobID, err := strconv.Atoi(jobIDStr)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn("Failed to parse job ID", "value", jobIDStr, "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse start time
|
||||||
|
var startedAt *time.Time
|
||||||
|
if startTimeStr != "" && startTimeStr != "-" {
|
||||||
|
// Format: 2025-12-27 23:05:02
|
||||||
|
parsedTime, err := time.Parse("2006-01-02 15:04:05", startTimeStr)
|
||||||
|
if err == nil {
|
||||||
|
startedAt = &parsedTime
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map job type
|
||||||
|
jobType := "Backup"
|
||||||
|
switch jobTypeChar {
|
||||||
|
case "B":
|
||||||
|
jobType = "Backup"
|
||||||
|
case "R":
|
||||||
|
jobType = "Restore"
|
||||||
|
case "V":
|
||||||
|
jobType = "Verify"
|
||||||
|
case "C":
|
||||||
|
jobType = "Copy"
|
||||||
|
case "M":
|
||||||
|
jobType = "Migrate"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map job level
|
||||||
|
jobLevel := "Full"
|
||||||
|
switch jobLevelChar {
|
||||||
|
case "F":
|
||||||
|
jobLevel = "Full"
|
||||||
|
case "I":
|
||||||
|
jobLevel = "Incremental"
|
||||||
|
case "D":
|
||||||
|
jobLevel = "Differential"
|
||||||
|
case "S":
|
||||||
|
jobLevel = "Since"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse files and bytes
|
||||||
|
filesWritten := 0
|
||||||
|
if jobFilesStr != "" && jobFilesStr != "-" {
|
||||||
|
if f, err := strconv.Atoi(jobFilesStr); err == nil {
|
||||||
|
filesWritten = f
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bytesWritten := int64(0)
|
||||||
|
if jobBytesStr != "" && jobBytesStr != "-" {
|
||||||
|
if b, err := strconv.ParseInt(jobBytesStr, 10, 64); err == nil {
|
||||||
|
bytesWritten = b
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map job status
|
||||||
|
status := "Waiting"
|
||||||
|
switch strings.ToLower(jobStatusChar) {
|
||||||
|
case "t", "T":
|
||||||
|
status = "Running"
|
||||||
|
case "c", "C":
|
||||||
|
status = "Completed"
|
||||||
|
case "f", "F":
|
||||||
|
status = "Failed"
|
||||||
|
case "A":
|
||||||
|
status = "Canceled"
|
||||||
|
case "W":
|
||||||
|
status = "Waiting"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to extract client name from job name (common pattern: JobName-ClientName)
|
||||||
|
clientName := "unknown"
|
||||||
|
// For now, use job name as client name if it looks like a client name
|
||||||
|
// In real implementation, we'd query job details from Bacula
|
||||||
|
if jobName != "" {
|
||||||
|
// Try to get client name from job details
|
||||||
|
clientNameFromJob := s.getClientNameFromJob(ctx, jobID)
|
||||||
|
if clientNameFromJob != "" {
|
||||||
|
clientName = clientNameFromJob
|
||||||
|
} else {
|
||||||
|
// Fallback: use job name as client name
|
||||||
|
clientName = jobName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
job := Job{
|
||||||
|
JobID: jobID,
|
||||||
|
JobName: jobName,
|
||||||
|
ClientName: clientName,
|
||||||
|
JobType: jobType,
|
||||||
|
JobLevel: jobLevel,
|
||||||
|
Status: status,
|
||||||
|
BytesWritten: bytesWritten,
|
||||||
|
FilesWritten: filesWritten,
|
||||||
|
StartedAt: startedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
jobs = append(jobs, job)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return jobs
|
||||||
|
}
|
||||||
|
|
||||||
|
// getClientNameFromJob gets client name from job details using bconsole
|
||||||
|
func (s *Service) getClientNameFromJob(ctx context.Context, jobID int) string {
|
||||||
|
// Execute bconsole to get job details
|
||||||
|
cmd := exec.CommandContext(ctx, "sh", "-c", fmt.Sprintf("echo -e 'list job jobid=%d\nquit' | bconsole", jobID))
|
||||||
|
|
||||||
|
output, err := cmd.CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Debug("Failed to get job details", "job_id", jobID, "error", err)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse output to find Client line
|
||||||
|
lines := strings.Split(string(output), "\n")
|
||||||
|
for _, line := range lines {
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
if strings.HasPrefix(line, "Client:") {
|
||||||
|
parts := strings.Split(line, ":")
|
||||||
|
if len(parts) >= 2 {
|
||||||
|
return strings.TrimSpace(parts[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// upsertJob inserts or updates a job in the database
|
||||||
|
func (s *Service) upsertJob(ctx context.Context, job Job) error {
|
||||||
|
query := `
|
||||||
|
INSERT INTO backup_jobs (
|
||||||
|
job_id, job_name, client_name, job_type, job_level, status,
|
||||||
|
bytes_written, files_written, started_at, ended_at, duration_seconds, updated_at
|
||||||
|
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, NOW())
|
||||||
|
ON CONFLICT (job_id) DO UPDATE SET
|
||||||
|
job_name = EXCLUDED.job_name,
|
||||||
|
client_name = EXCLUDED.client_name,
|
||||||
|
job_type = EXCLUDED.job_type,
|
||||||
|
job_level = EXCLUDED.job_level,
|
||||||
|
status = EXCLUDED.status,
|
||||||
|
bytes_written = EXCLUDED.bytes_written,
|
||||||
|
files_written = EXCLUDED.files_written,
|
||||||
|
started_at = EXCLUDED.started_at,
|
||||||
|
ended_at = EXCLUDED.ended_at,
|
||||||
|
duration_seconds = EXCLUDED.duration_seconds,
|
||||||
|
updated_at = NOW()
|
||||||
|
`
|
||||||
|
|
||||||
|
// Use job name as client name if client_name is empty (we'll improve this later)
|
||||||
|
clientName := job.ClientName
|
||||||
|
if clientName == "" {
|
||||||
|
clientName = "unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := s.db.ExecContext(ctx, query,
|
||||||
|
job.JobID, job.JobName, clientName, job.JobType, job.JobLevel, job.Status,
|
||||||
|
job.BytesWritten, job.FilesWritten, job.StartedAt, job.EndedAt, job.DurationSeconds,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Database error in upsertJob", "job_id", job.JobID, "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
rowsAffected, _ := result.RowsAffected()
|
||||||
|
s.logger.Debug("Upserted job to database", "job_id", job.JobID, "rows_affected", rowsAffected)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListJobs lists backup jobs with optional filters
|
||||||
|
func (s *Service) ListJobs(ctx context.Context, opts ListJobsOptions) ([]Job, int, error) {
|
||||||
|
// Try to sync jobs from Bacula first (non-blocking - if it fails, continue with database)
|
||||||
|
// Don't return error if sync fails, just log it and continue
|
||||||
|
// This allows the API to work even if bconsole is not available
|
||||||
|
s.logger.Info("ListJobs called, syncing from Bacula first")
|
||||||
|
syncErr := s.SyncJobsFromBacula(ctx)
|
||||||
|
if syncErr != nil {
|
||||||
|
s.logger.Warn("Failed to sync jobs from Bacula, using database only", "error", syncErr)
|
||||||
|
// Continue anyway - we'll use whatever is in the database
|
||||||
|
} else {
|
||||||
|
s.logger.Info("Successfully synced jobs from Bacula")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build WHERE clause
|
||||||
|
whereClauses := []string{"1=1"}
|
||||||
|
args := []interface{}{}
|
||||||
|
argIndex := 1
|
||||||
|
|
||||||
|
if opts.Status != "" {
|
||||||
|
whereClauses = append(whereClauses, fmt.Sprintf("status = $%d", argIndex))
|
||||||
|
args = append(args, opts.Status)
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
if opts.JobType != "" {
|
||||||
|
whereClauses = append(whereClauses, fmt.Sprintf("job_type = $%d", argIndex))
|
||||||
|
args = append(args, opts.JobType)
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
if opts.ClientName != "" {
|
||||||
|
whereClauses = append(whereClauses, fmt.Sprintf("client_name ILIKE $%d", argIndex))
|
||||||
|
args = append(args, "%"+opts.ClientName+"%")
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
if opts.JobName != "" {
|
||||||
|
whereClauses = append(whereClauses, fmt.Sprintf("job_name ILIKE $%d", argIndex))
|
||||||
|
args = append(args, "%"+opts.JobName+"%")
|
||||||
|
argIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
whereClause := ""
|
||||||
|
if len(whereClauses) > 0 {
|
||||||
|
whereClause = "WHERE " + whereClauses[0]
|
||||||
|
for i := 1; i < len(whereClauses); i++ {
|
||||||
|
whereClause += " AND " + whereClauses[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get total count
|
||||||
|
countQuery := fmt.Sprintf("SELECT COUNT(*) FROM backup_jobs %s", whereClause)
|
||||||
|
var totalCount int
|
||||||
|
err := s.db.QueryRowContext(ctx, countQuery, args...).Scan(&totalCount)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("failed to count jobs: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set default limit
|
||||||
|
limit := opts.Limit
|
||||||
|
if limit <= 0 {
|
||||||
|
limit = 50
|
||||||
|
}
|
||||||
|
if limit > 100 {
|
||||||
|
limit = 100
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build query with pagination
|
||||||
|
query := fmt.Sprintf(`
|
||||||
|
SELECT id, job_id, job_name, client_name, job_type, job_level, status,
|
||||||
|
bytes_written, files_written, duration_seconds,
|
||||||
|
started_at, ended_at, error_message,
|
||||||
|
storage_name, pool_name, volume_name,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM backup_jobs
|
||||||
|
%s
|
||||||
|
ORDER BY started_at DESC NULLS LAST, created_at DESC
|
||||||
|
LIMIT $%d OFFSET $%d
|
||||||
|
`, whereClause, argIndex, argIndex+1)
|
||||||
|
|
||||||
|
args = append(args, limit, opts.Offset)
|
||||||
|
|
||||||
|
rows, err := s.db.QueryContext(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("failed to query jobs: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var jobs []Job
|
||||||
|
for rows.Next() {
|
||||||
|
var job Job
|
||||||
|
var durationSeconds sql.NullInt64
|
||||||
|
var startedAt, endedAt sql.NullTime
|
||||||
|
var errorMessage, storageName, poolName, volumeName sql.NullString
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&job.ID, &job.JobID, &job.JobName, &job.ClientName,
|
||||||
|
&job.JobType, &job.JobLevel, &job.Status,
|
||||||
|
&job.BytesWritten, &job.FilesWritten, &durationSeconds,
|
||||||
|
&startedAt, &endedAt, &errorMessage,
|
||||||
|
&storageName, &poolName, &volumeName,
|
||||||
|
&job.CreatedAt, &job.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("Failed to scan job", "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if durationSeconds.Valid {
|
||||||
|
dur := int(durationSeconds.Int64)
|
||||||
|
job.DurationSeconds = &dur
|
||||||
|
}
|
||||||
|
if startedAt.Valid {
|
||||||
|
job.StartedAt = &startedAt.Time
|
||||||
|
}
|
||||||
|
if endedAt.Valid {
|
||||||
|
job.EndedAt = &endedAt.Time
|
||||||
|
}
|
||||||
|
if errorMessage.Valid {
|
||||||
|
job.ErrorMessage = &errorMessage.String
|
||||||
|
}
|
||||||
|
if storageName.Valid {
|
||||||
|
job.StorageName = &storageName.String
|
||||||
|
}
|
||||||
|
if poolName.Valid {
|
||||||
|
job.PoolName = &poolName.String
|
||||||
|
}
|
||||||
|
if volumeName.Valid {
|
||||||
|
job.VolumeName = &volumeName.String
|
||||||
|
}
|
||||||
|
|
||||||
|
jobs = append(jobs, job)
|
||||||
|
}
|
||||||
|
|
||||||
|
return jobs, totalCount, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetJob retrieves a job by ID
|
||||||
|
func (s *Service) GetJob(ctx context.Context, id string) (*Job, error) {
|
||||||
|
query := `
|
||||||
|
SELECT id, job_id, job_name, client_name, job_type, job_level, status,
|
||||||
|
bytes_written, files_written, duration_seconds,
|
||||||
|
started_at, ended_at, error_message,
|
||||||
|
storage_name, pool_name, volume_name,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM backup_jobs
|
||||||
|
WHERE id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
var job Job
|
||||||
|
var durationSeconds sql.NullInt64
|
||||||
|
var startedAt, endedAt sql.NullTime
|
||||||
|
var errorMessage, storageName, poolName, volumeName sql.NullString
|
||||||
|
|
||||||
|
err := s.db.QueryRowContext(ctx, query, id).Scan(
|
||||||
|
&job.ID, &job.JobID, &job.JobName, &job.ClientName,
|
||||||
|
&job.JobType, &job.JobLevel, &job.Status,
|
||||||
|
&job.BytesWritten, &job.FilesWritten, &durationSeconds,
|
||||||
|
&startedAt, &endedAt, &errorMessage,
|
||||||
|
&storageName, &poolName, &volumeName,
|
||||||
|
&job.CreatedAt, &job.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return nil, fmt.Errorf("job not found")
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to get job: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if durationSeconds.Valid {
|
||||||
|
dur := int(durationSeconds.Int64)
|
||||||
|
job.DurationSeconds = &dur
|
||||||
|
}
|
||||||
|
if startedAt.Valid {
|
||||||
|
job.StartedAt = &startedAt.Time
|
||||||
|
}
|
||||||
|
if endedAt.Valid {
|
||||||
|
job.EndedAt = &endedAt.Time
|
||||||
|
}
|
||||||
|
if errorMessage.Valid {
|
||||||
|
job.ErrorMessage = &errorMessage.String
|
||||||
|
}
|
||||||
|
if storageName.Valid {
|
||||||
|
job.StorageName = &storageName.String
|
||||||
|
}
|
||||||
|
if poolName.Valid {
|
||||||
|
job.PoolName = &poolName.String
|
||||||
|
}
|
||||||
|
if volumeName.Valid {
|
||||||
|
job.VolumeName = &volumeName.String
|
||||||
|
}
|
||||||
|
|
||||||
|
return &job, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateJobRequest represents a request to create a new backup job
|
||||||
|
type CreateJobRequest struct {
|
||||||
|
JobName string `json:"job_name" binding:"required"`
|
||||||
|
ClientName string `json:"client_name" binding:"required"`
|
||||||
|
JobType string `json:"job_type" binding:"required"` // 'Backup', 'Restore', 'Verify', 'Copy', 'Migrate'
|
||||||
|
JobLevel string `json:"job_level" binding:"required"` // 'Full', 'Incremental', 'Differential', 'Since'
|
||||||
|
StorageName *string `json:"storage_name,omitempty"`
|
||||||
|
PoolName *string `json:"pool_name,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateJob creates a new backup job
|
||||||
|
func (s *Service) CreateJob(ctx context.Context, req CreateJobRequest) (*Job, error) {
|
||||||
|
// Generate a unique job ID (in real implementation, this would come from Bareos)
|
||||||
|
// For now, we'll use a simple incrementing approach or timestamp-based ID
|
||||||
|
var jobID int
|
||||||
|
err := s.db.QueryRowContext(ctx, `
|
||||||
|
SELECT COALESCE(MAX(job_id), 0) + 1 FROM backup_jobs
|
||||||
|
`).Scan(&jobID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to generate job ID: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert the job into database
|
||||||
|
query := `
|
||||||
|
INSERT INTO backup_jobs (
|
||||||
|
job_id, job_name, client_name, job_type, job_level,
|
||||||
|
status, bytes_written, files_written,
|
||||||
|
storage_name, pool_name, started_at
|
||||||
|
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, NOW())
|
||||||
|
RETURNING id, job_id, job_name, client_name, job_type, job_level, status,
|
||||||
|
bytes_written, files_written, duration_seconds,
|
||||||
|
started_at, ended_at, error_message,
|
||||||
|
storage_name, pool_name, volume_name,
|
||||||
|
created_at, updated_at
|
||||||
|
`
|
||||||
|
|
||||||
|
var job Job
|
||||||
|
var durationSeconds sql.NullInt64
|
||||||
|
var startedAt, endedAt sql.NullTime
|
||||||
|
var errorMessage, storageName, poolName, volumeName sql.NullString
|
||||||
|
|
||||||
|
err = s.db.QueryRowContext(ctx, query,
|
||||||
|
jobID, req.JobName, req.ClientName, req.JobType, req.JobLevel,
|
||||||
|
"Waiting", 0, 0,
|
||||||
|
req.StorageName, req.PoolName,
|
||||||
|
).Scan(
|
||||||
|
&job.ID, &job.JobID, &job.JobName, &job.ClientName,
|
||||||
|
&job.JobType, &job.JobLevel, &job.Status,
|
||||||
|
&job.BytesWritten, &job.FilesWritten, &durationSeconds,
|
||||||
|
&startedAt, &endedAt, &errorMessage,
|
||||||
|
&storageName, &poolName, &volumeName,
|
||||||
|
&job.CreatedAt, &job.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create job: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if durationSeconds.Valid {
|
||||||
|
dur := int(durationSeconds.Int64)
|
||||||
|
job.DurationSeconds = &dur
|
||||||
|
}
|
||||||
|
if startedAt.Valid {
|
||||||
|
job.StartedAt = &startedAt.Time
|
||||||
|
}
|
||||||
|
if endedAt.Valid {
|
||||||
|
job.EndedAt = &endedAt.Time
|
||||||
|
}
|
||||||
|
if errorMessage.Valid {
|
||||||
|
job.ErrorMessage = &errorMessage.String
|
||||||
|
}
|
||||||
|
if storageName.Valid {
|
||||||
|
job.StorageName = &storageName.String
|
||||||
|
}
|
||||||
|
if poolName.Valid {
|
||||||
|
job.PoolName = &poolName.String
|
||||||
|
}
|
||||||
|
if volumeName.Valid {
|
||||||
|
job.VolumeName = &volumeName.String
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info("Backup job created",
|
||||||
|
"job_id", job.JobID,
|
||||||
|
"job_name", job.JobName,
|
||||||
|
"client_name", job.ClientName,
|
||||||
|
"job_type", job.JobType,
|
||||||
|
)
|
||||||
|
|
||||||
|
return &job, nil
|
||||||
|
}
|
||||||
@@ -59,7 +59,7 @@ func RunMigrations(ctx context.Context, db *DB) error {
|
|||||||
|
|
||||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||||
tx.Rollback()
|
tx.Rollback()
|
||||||
return fmt.Errorf("failed to execute migration %s: %w", migration.Version, err)
|
return fmt.Errorf("failed to execute migration %d: %w", migration.Version, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Record migration
|
// Record migration
|
||||||
@@ -68,11 +68,11 @@ func RunMigrations(ctx context.Context, db *DB) error {
|
|||||||
migration.Version,
|
migration.Version,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
tx.Rollback()
|
tx.Rollback()
|
||||||
return fmt.Errorf("failed to record migration %s: %w", migration.Version, err)
|
return fmt.Errorf("failed to record migration %d: %w", migration.Version, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := tx.Commit(); err != nil {
|
if err := tx.Commit(); err != nil {
|
||||||
return fmt.Errorf("failed to commit migration %s: %w", migration.Version, err)
|
return fmt.Errorf("failed to commit migration %d: %w", migration.Version, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Info("Migration applied successfully", "version", migration.Version)
|
log.Info("Migration applied successfully", "version", migration.Version)
|
||||||
|
|||||||
@@ -0,0 +1,3 @@
|
|||||||
|
-- Add vendor column to virtual_tape_libraries table
|
||||||
|
ALTER TABLE virtual_tape_libraries ADD COLUMN IF NOT EXISTS vendor VARCHAR(255);
|
||||||
|
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
-- Add user groups feature
|
||||||
|
-- Groups table
|
||||||
|
CREATE TABLE IF NOT EXISTS groups (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
name VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
description TEXT,
|
||||||
|
is_system BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- User groups junction table
|
||||||
|
CREATE TABLE IF NOT EXISTS user_groups (
|
||||||
|
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
group_id UUID NOT NULL REFERENCES groups(id) ON DELETE CASCADE,
|
||||||
|
assigned_at TIMESTAMP NOT NULL DEFAULT NOW(),
|
||||||
|
assigned_by UUID REFERENCES users(id),
|
||||||
|
PRIMARY KEY (user_id, group_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Group roles junction table (groups can have roles)
|
||||||
|
CREATE TABLE IF NOT EXISTS group_roles (
|
||||||
|
group_id UUID NOT NULL REFERENCES groups(id) ON DELETE CASCADE,
|
||||||
|
role_id UUID NOT NULL REFERENCES roles(id) ON DELETE CASCADE,
|
||||||
|
granted_at TIMESTAMP NOT NULL DEFAULT NOW(),
|
||||||
|
PRIMARY KEY (group_id, role_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_groups_name ON groups(name);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_user_groups_user_id ON user_groups(user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_user_groups_group_id ON user_groups(group_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_group_roles_group_id ON group_roles(group_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_group_roles_role_id ON group_roles(role_id);
|
||||||
|
|
||||||
|
-- Insert default system groups
|
||||||
|
INSERT INTO groups (name, description, is_system) VALUES
|
||||||
|
('wheel', 'System administrators group', true),
|
||||||
|
('operators', 'System operators group', true),
|
||||||
|
('backup', 'Backup operators group', true),
|
||||||
|
('auditors', 'Auditors group', true),
|
||||||
|
('storage_admins', 'Storage administrators group', true),
|
||||||
|
('services', 'Service accounts group', true)
|
||||||
|
ON CONFLICT (name) DO NOTHING;
|
||||||
|
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
-- AtlasOS - Calypso
|
||||||
|
-- Backup Jobs Schema
|
||||||
|
-- Version: 9.0
|
||||||
|
|
||||||
|
-- Backup jobs table
|
||||||
|
CREATE TABLE IF NOT EXISTS backup_jobs (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
job_id INTEGER NOT NULL UNIQUE, -- Bareos job ID
|
||||||
|
job_name VARCHAR(255) NOT NULL,
|
||||||
|
client_name VARCHAR(255) NOT NULL,
|
||||||
|
job_type VARCHAR(50) NOT NULL, -- 'Backup', 'Restore', 'Verify', 'Copy', 'Migrate'
|
||||||
|
job_level VARCHAR(50) NOT NULL, -- 'Full', 'Incremental', 'Differential', 'Since'
|
||||||
|
status VARCHAR(50) NOT NULL, -- 'Running', 'Completed', 'Failed', 'Canceled', 'Waiting'
|
||||||
|
bytes_written BIGINT NOT NULL DEFAULT 0,
|
||||||
|
files_written INTEGER NOT NULL DEFAULT 0,
|
||||||
|
duration_seconds INTEGER,
|
||||||
|
started_at TIMESTAMP,
|
||||||
|
ended_at TIMESTAMP,
|
||||||
|
error_message TEXT,
|
||||||
|
storage_name VARCHAR(255),
|
||||||
|
pool_name VARCHAR(255),
|
||||||
|
volume_name VARCHAR(255),
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes for performance
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_backup_jobs_job_id ON backup_jobs(job_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_backup_jobs_job_name ON backup_jobs(job_name);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_backup_jobs_client_name ON backup_jobs(client_name);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_backup_jobs_status ON backup_jobs(status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_backup_jobs_started_at ON backup_jobs(started_at DESC);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_backup_jobs_job_type ON backup_jobs(job_type);
|
||||||
|
|
||||||
@@ -0,0 +1,39 @@
|
|||||||
|
-- AtlasOS - Calypso
|
||||||
|
-- Add Backup Permissions
|
||||||
|
-- Version: 10.0
|
||||||
|
|
||||||
|
-- Insert backup permissions
|
||||||
|
INSERT INTO permissions (name, resource, action, description) VALUES
|
||||||
|
('backup:read', 'backup', 'read', 'View backup jobs and history'),
|
||||||
|
('backup:write', 'backup', 'write', 'Create and manage backup jobs'),
|
||||||
|
('backup:manage', 'backup', 'manage', 'Full backup management')
|
||||||
|
ON CONFLICT (name) DO NOTHING;
|
||||||
|
|
||||||
|
-- Assign backup permissions to roles
|
||||||
|
|
||||||
|
-- Admin gets all backup permissions (explicitly assign since admin query in 001 only runs once)
|
||||||
|
INSERT INTO role_permissions (role_id, permission_id)
|
||||||
|
SELECT r.id, p.id
|
||||||
|
FROM roles r, permissions p
|
||||||
|
WHERE r.name = 'admin'
|
||||||
|
AND p.resource = 'backup'
|
||||||
|
ON CONFLICT DO NOTHING;
|
||||||
|
|
||||||
|
-- Operator gets read and write permissions for backup
|
||||||
|
INSERT INTO role_permissions (role_id, permission_id)
|
||||||
|
SELECT r.id, p.id
|
||||||
|
FROM roles r, permissions p
|
||||||
|
WHERE r.name = 'operator'
|
||||||
|
AND p.resource = 'backup'
|
||||||
|
AND p.action IN ('read', 'write')
|
||||||
|
ON CONFLICT DO NOTHING;
|
||||||
|
|
||||||
|
-- ReadOnly gets only read permission for backup
|
||||||
|
INSERT INTO role_permissions (role_id, permission_id)
|
||||||
|
SELECT r.id, p.id
|
||||||
|
FROM roles r, permissions p
|
||||||
|
WHERE r.name = 'readonly'
|
||||||
|
AND p.resource = 'backup'
|
||||||
|
AND p.action = 'read'
|
||||||
|
ON CONFLICT DO NOTHING;
|
||||||
|
|
||||||
@@ -0,0 +1,209 @@
|
|||||||
|
-- AtlasOS - Calypso
|
||||||
|
-- PostgreSQL Function to Sync Jobs from Bacula to Calypso
|
||||||
|
-- Version: 11.0
|
||||||
|
--
|
||||||
|
-- This function syncs jobs from Bacula database (Job table) to Calypso database (backup_jobs table)
|
||||||
|
-- Uses dblink extension to query Bacula database from Calypso database
|
||||||
|
--
|
||||||
|
-- Prerequisites:
|
||||||
|
-- 1. dblink extension must be installed: CREATE EXTENSION IF NOT EXISTS dblink;
|
||||||
|
-- 2. User must have access to both databases
|
||||||
|
-- 3. Connection parameters must be configured in the function
|
||||||
|
|
||||||
|
-- Create function to sync jobs from Bacula to Calypso
|
||||||
|
CREATE OR REPLACE FUNCTION sync_bacula_jobs(
|
||||||
|
bacula_db_name TEXT DEFAULT 'bacula',
|
||||||
|
bacula_host TEXT DEFAULT 'localhost',
|
||||||
|
bacula_port INTEGER DEFAULT 5432,
|
||||||
|
bacula_user TEXT DEFAULT 'calypso',
|
||||||
|
bacula_password TEXT DEFAULT ''
|
||||||
|
)
|
||||||
|
RETURNS TABLE(
|
||||||
|
jobs_synced INTEGER,
|
||||||
|
jobs_inserted INTEGER,
|
||||||
|
jobs_updated INTEGER,
|
||||||
|
errors INTEGER
|
||||||
|
) AS $$
|
||||||
|
DECLARE
|
||||||
|
conn_str TEXT;
|
||||||
|
jobs_count INTEGER := 0;
|
||||||
|
inserted_count INTEGER := 0;
|
||||||
|
updated_count INTEGER := 0;
|
||||||
|
error_count INTEGER := 0;
|
||||||
|
job_record RECORD;
|
||||||
|
BEGIN
|
||||||
|
-- Build dblink connection string
|
||||||
|
conn_str := format(
|
||||||
|
'dbname=%s host=%s port=%s user=%s password=%s',
|
||||||
|
bacula_db_name,
|
||||||
|
bacula_host,
|
||||||
|
bacula_port,
|
||||||
|
bacula_user,
|
||||||
|
bacula_password
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Query jobs from Bacula database using dblink
|
||||||
|
FOR job_record IN
|
||||||
|
SELECT * FROM dblink(
|
||||||
|
conn_str,
|
||||||
|
$QUERY$
|
||||||
|
SELECT
|
||||||
|
j.JobId,
|
||||||
|
j.Name as job_name,
|
||||||
|
COALESCE(c.Name, 'unknown') as client_name,
|
||||||
|
CASE
|
||||||
|
WHEN j.Type = 'B' THEN 'Backup'
|
||||||
|
WHEN j.Type = 'R' THEN 'Restore'
|
||||||
|
WHEN j.Type = 'V' THEN 'Verify'
|
||||||
|
WHEN j.Type = 'C' THEN 'Copy'
|
||||||
|
WHEN j.Type = 'M' THEN 'Migrate'
|
||||||
|
ELSE 'Backup'
|
||||||
|
END as job_type,
|
||||||
|
CASE
|
||||||
|
WHEN j.Level = 'F' THEN 'Full'
|
||||||
|
WHEN j.Level = 'I' THEN 'Incremental'
|
||||||
|
WHEN j.Level = 'D' THEN 'Differential'
|
||||||
|
WHEN j.Level = 'S' THEN 'Since'
|
||||||
|
ELSE 'Full'
|
||||||
|
END as job_level,
|
||||||
|
CASE
|
||||||
|
WHEN j.JobStatus = 'T' THEN 'Running'
|
||||||
|
WHEN j.JobStatus = 'C' THEN 'Completed'
|
||||||
|
WHEN j.JobStatus = 'f' OR j.JobStatus = 'F' THEN 'Failed'
|
||||||
|
WHEN j.JobStatus = 'A' THEN 'Canceled'
|
||||||
|
WHEN j.JobStatus = 'W' THEN 'Waiting'
|
||||||
|
ELSE 'Waiting'
|
||||||
|
END as status,
|
||||||
|
COALESCE(j.JobBytes, 0) as bytes_written,
|
||||||
|
COALESCE(j.JobFiles, 0) as files_written,
|
||||||
|
j.StartTime as started_at,
|
||||||
|
j.EndTime as ended_at,
|
||||||
|
CASE
|
||||||
|
WHEN j.EndTime IS NOT NULL AND j.StartTime IS NOT NULL
|
||||||
|
THEN EXTRACT(EPOCH FROM (j.EndTime - j.StartTime))::INTEGER
|
||||||
|
ELSE NULL
|
||||||
|
END as duration_seconds
|
||||||
|
FROM Job j
|
||||||
|
LEFT JOIN Client c ON j.ClientId = c.ClientId
|
||||||
|
ORDER BY j.StartTime DESC
|
||||||
|
LIMIT 1000
|
||||||
|
$QUERY$
|
||||||
|
) AS t(
|
||||||
|
job_id INTEGER,
|
||||||
|
job_name TEXT,
|
||||||
|
client_name TEXT,
|
||||||
|
job_type TEXT,
|
||||||
|
job_level TEXT,
|
||||||
|
status TEXT,
|
||||||
|
bytes_written BIGINT,
|
||||||
|
files_written INTEGER,
|
||||||
|
started_at TIMESTAMP,
|
||||||
|
ended_at TIMESTAMP,
|
||||||
|
duration_seconds INTEGER
|
||||||
|
)
|
||||||
|
LOOP
|
||||||
|
BEGIN
|
||||||
|
-- Check if job already exists (before insert/update)
|
||||||
|
IF EXISTS (SELECT 1 FROM backup_jobs WHERE job_id = job_record.job_id) THEN
|
||||||
|
updated_count := updated_count + 1;
|
||||||
|
ELSE
|
||||||
|
inserted_count := inserted_count + 1;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Upsert job to backup_jobs table
|
||||||
|
INSERT INTO backup_jobs (
|
||||||
|
job_id, job_name, client_name, job_type, job_level, status,
|
||||||
|
bytes_written, files_written, started_at, ended_at, duration_seconds,
|
||||||
|
updated_at
|
||||||
|
) VALUES (
|
||||||
|
job_record.job_id,
|
||||||
|
job_record.job_name,
|
||||||
|
job_record.client_name,
|
||||||
|
job_record.job_type,
|
||||||
|
job_record.job_level,
|
||||||
|
job_record.status,
|
||||||
|
job_record.bytes_written,
|
||||||
|
job_record.files_written,
|
||||||
|
job_record.started_at,
|
||||||
|
job_record.ended_at,
|
||||||
|
job_record.duration_seconds,
|
||||||
|
NOW()
|
||||||
|
)
|
||||||
|
ON CONFLICT (job_id) DO UPDATE SET
|
||||||
|
job_name = EXCLUDED.job_name,
|
||||||
|
client_name = EXCLUDED.client_name,
|
||||||
|
job_type = EXCLUDED.job_type,
|
||||||
|
job_level = EXCLUDED.job_level,
|
||||||
|
status = EXCLUDED.status,
|
||||||
|
bytes_written = EXCLUDED.bytes_written,
|
||||||
|
files_written = EXCLUDED.files_written,
|
||||||
|
started_at = EXCLUDED.started_at,
|
||||||
|
ended_at = EXCLUDED.ended_at,
|
||||||
|
duration_seconds = EXCLUDED.duration_seconds,
|
||||||
|
updated_at = NOW();
|
||||||
|
|
||||||
|
jobs_count := jobs_count + 1;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
error_count := error_count + 1;
|
||||||
|
-- Log error but continue with next job
|
||||||
|
RAISE WARNING 'Error syncing job %: %', job_record.job_id, SQLERRM;
|
||||||
|
END;
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
-- Return summary
|
||||||
|
RETURN QUERY SELECT jobs_count, inserted_count, updated_count, error_count;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
-- Create a simpler version that uses current database connection settings
|
||||||
|
-- This version assumes Bacula is on same host/port with same user
|
||||||
|
CREATE OR REPLACE FUNCTION sync_bacula_jobs_simple()
|
||||||
|
RETURNS TABLE(
|
||||||
|
jobs_synced INTEGER,
|
||||||
|
jobs_inserted INTEGER,
|
||||||
|
jobs_updated INTEGER,
|
||||||
|
errors INTEGER
|
||||||
|
) AS $$
|
||||||
|
DECLARE
|
||||||
|
current_user_name TEXT;
|
||||||
|
current_host TEXT;
|
||||||
|
current_port INTEGER;
|
||||||
|
current_db TEXT;
|
||||||
|
BEGIN
|
||||||
|
-- Get current connection info
|
||||||
|
SELECT
|
||||||
|
current_user,
|
||||||
|
COALESCE(inet_server_addr()::TEXT, 'localhost'),
|
||||||
|
COALESCE(inet_server_port(), 5432),
|
||||||
|
current_database()
|
||||||
|
INTO
|
||||||
|
current_user_name,
|
||||||
|
current_host,
|
||||||
|
current_port,
|
||||||
|
current_db;
|
||||||
|
|
||||||
|
-- Call main function with current connection settings
|
||||||
|
-- Note: password needs to be passed or configured in .pgpass
|
||||||
|
RETURN QUERY
|
||||||
|
SELECT * FROM sync_bacula_jobs(
|
||||||
|
'bacula', -- Try 'bacula' first
|
||||||
|
current_host,
|
||||||
|
current_port,
|
||||||
|
current_user_name,
|
||||||
|
'' -- Empty password - will use .pgpass or peer authentication
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
-- Grant execute permission to calypso user
|
||||||
|
GRANT EXECUTE ON FUNCTION sync_bacula_jobs(TEXT, TEXT, INTEGER, TEXT, TEXT) TO calypso;
|
||||||
|
GRANT EXECUTE ON FUNCTION sync_bacula_jobs_simple() TO calypso;
|
||||||
|
|
||||||
|
-- Create index if not exists (should already exist from migration 009)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_backup_jobs_job_id ON backup_jobs(job_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_backup_jobs_updated_at ON backup_jobs(updated_at);
|
||||||
|
|
||||||
|
COMMENT ON FUNCTION sync_bacula_jobs IS 'Syncs jobs from Bacula database to Calypso backup_jobs table using dblink';
|
||||||
|
COMMENT ON FUNCTION sync_bacula_jobs_simple IS 'Simplified version that uses current connection settings (requires .pgpass for password)';
|
||||||
|
|
||||||
@@ -0,0 +1,209 @@
|
|||||||
|
-- AtlasOS - Calypso
|
||||||
|
-- PostgreSQL Function to Sync Jobs from Bacula to Calypso
|
||||||
|
-- Version: 11.0
|
||||||
|
--
|
||||||
|
-- This function syncs jobs from Bacula database (Job table) to Calypso database (backup_jobs table)
|
||||||
|
-- Uses dblink extension to query Bacula database from Calypso database
|
||||||
|
--
|
||||||
|
-- Prerequisites:
|
||||||
|
-- 1. dblink extension must be installed: CREATE EXTENSION IF NOT EXISTS dblink;
|
||||||
|
-- 2. User must have access to both databases
|
||||||
|
-- 3. Connection parameters must be configured in the function
|
||||||
|
|
||||||
|
-- Create function to sync jobs from Bacula to Calypso
|
||||||
|
CREATE OR REPLACE FUNCTION sync_bacula_jobs(
|
||||||
|
bacula_db_name TEXT DEFAULT 'bacula',
|
||||||
|
bacula_host TEXT DEFAULT 'localhost',
|
||||||
|
bacula_port INTEGER DEFAULT 5432,
|
||||||
|
bacula_user TEXT DEFAULT 'calypso',
|
||||||
|
bacula_password TEXT DEFAULT ''
|
||||||
|
)
|
||||||
|
RETURNS TABLE(
|
||||||
|
jobs_synced INTEGER,
|
||||||
|
jobs_inserted INTEGER,
|
||||||
|
jobs_updated INTEGER,
|
||||||
|
errors INTEGER
|
||||||
|
) AS $$
|
||||||
|
DECLARE
|
||||||
|
conn_str TEXT;
|
||||||
|
jobs_count INTEGER := 0;
|
||||||
|
inserted_count INTEGER := 0;
|
||||||
|
updated_count INTEGER := 0;
|
||||||
|
error_count INTEGER := 0;
|
||||||
|
job_record RECORD;
|
||||||
|
BEGIN
|
||||||
|
-- Build dblink connection string
|
||||||
|
conn_str := format(
|
||||||
|
'dbname=%s host=%s port=%s user=%s password=%s',
|
||||||
|
bacula_db_name,
|
||||||
|
bacula_host,
|
||||||
|
bacula_port,
|
||||||
|
bacula_user,
|
||||||
|
bacula_password
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Query jobs from Bacula database using dblink
|
||||||
|
FOR job_record IN
|
||||||
|
SELECT * FROM dblink(
|
||||||
|
conn_str,
|
||||||
|
$$
|
||||||
|
SELECT
|
||||||
|
j.JobId,
|
||||||
|
j.Name as job_name,
|
||||||
|
COALESCE(c.Name, 'unknown') as client_name,
|
||||||
|
CASE
|
||||||
|
WHEN j.Type = 'B' THEN 'Backup'
|
||||||
|
WHEN j.Type = 'R' THEN 'Restore'
|
||||||
|
WHEN j.Type = 'V' THEN 'Verify'
|
||||||
|
WHEN j.Type = 'C' THEN 'Copy'
|
||||||
|
WHEN j.Type = 'M' THEN 'Migrate'
|
||||||
|
ELSE 'Backup'
|
||||||
|
END as job_type,
|
||||||
|
CASE
|
||||||
|
WHEN j.Level = 'F' THEN 'Full'
|
||||||
|
WHEN j.Level = 'I' THEN 'Incremental'
|
||||||
|
WHEN j.Level = 'D' THEN 'Differential'
|
||||||
|
WHEN j.Level = 'S' THEN 'Since'
|
||||||
|
ELSE 'Full'
|
||||||
|
END as job_level,
|
||||||
|
CASE
|
||||||
|
WHEN j.JobStatus = 'T' THEN 'Running'
|
||||||
|
WHEN j.JobStatus = 'C' THEN 'Completed'
|
||||||
|
WHEN j.JobStatus = 'f' OR j.JobStatus = 'F' THEN 'Failed'
|
||||||
|
WHEN j.JobStatus = 'A' THEN 'Canceled'
|
||||||
|
WHEN j.JobStatus = 'W' THEN 'Waiting'
|
||||||
|
ELSE 'Waiting'
|
||||||
|
END as status,
|
||||||
|
COALESCE(j.JobBytes, 0) as bytes_written,
|
||||||
|
COALESCE(j.JobFiles, 0) as files_written,
|
||||||
|
j.StartTime as started_at,
|
||||||
|
j.EndTime as ended_at,
|
||||||
|
CASE
|
||||||
|
WHEN j.EndTime IS NOT NULL AND j.StartTime IS NOT NULL
|
||||||
|
THEN EXTRACT(EPOCH FROM (j.EndTime - j.StartTime))::INTEGER
|
||||||
|
ELSE NULL
|
||||||
|
END as duration_seconds
|
||||||
|
FROM Job j
|
||||||
|
LEFT JOIN Client c ON j.ClientId = c.ClientId
|
||||||
|
ORDER BY j.StartTime DESC
|
||||||
|
LIMIT 1000
|
||||||
|
$$
|
||||||
|
) AS t(
|
||||||
|
job_id INTEGER,
|
||||||
|
job_name TEXT,
|
||||||
|
client_name TEXT,
|
||||||
|
job_type TEXT,
|
||||||
|
job_level TEXT,
|
||||||
|
status TEXT,
|
||||||
|
bytes_written BIGINT,
|
||||||
|
files_written INTEGER,
|
||||||
|
started_at TIMESTAMP,
|
||||||
|
ended_at TIMESTAMP,
|
||||||
|
duration_seconds INTEGER
|
||||||
|
)
|
||||||
|
LOOP
|
||||||
|
BEGIN
|
||||||
|
-- Check if job already exists (before insert/update)
|
||||||
|
IF EXISTS (SELECT 1 FROM backup_jobs WHERE job_id = job_record.job_id) THEN
|
||||||
|
updated_count := updated_count + 1;
|
||||||
|
ELSE
|
||||||
|
inserted_count := inserted_count + 1;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Upsert job to backup_jobs table
|
||||||
|
INSERT INTO backup_jobs (
|
||||||
|
job_id, job_name, client_name, job_type, job_level, status,
|
||||||
|
bytes_written, files_written, started_at, ended_at, duration_seconds,
|
||||||
|
updated_at
|
||||||
|
) VALUES (
|
||||||
|
job_record.job_id,
|
||||||
|
job_record.job_name,
|
||||||
|
job_record.client_name,
|
||||||
|
job_record.job_type,
|
||||||
|
job_record.job_level,
|
||||||
|
job_record.status,
|
||||||
|
job_record.bytes_written,
|
||||||
|
job_record.files_written,
|
||||||
|
job_record.started_at,
|
||||||
|
job_record.ended_at,
|
||||||
|
job_record.duration_seconds,
|
||||||
|
NOW()
|
||||||
|
)
|
||||||
|
ON CONFLICT (job_id) DO UPDATE SET
|
||||||
|
job_name = EXCLUDED.job_name,
|
||||||
|
client_name = EXCLUDED.client_name,
|
||||||
|
job_type = EXCLUDED.job_type,
|
||||||
|
job_level = EXCLUDED.job_level,
|
||||||
|
status = EXCLUDED.status,
|
||||||
|
bytes_written = EXCLUDED.bytes_written,
|
||||||
|
files_written = EXCLUDED.files_written,
|
||||||
|
started_at = EXCLUDED.started_at,
|
||||||
|
ended_at = EXCLUDED.ended_at,
|
||||||
|
duration_seconds = EXCLUDED.duration_seconds,
|
||||||
|
updated_at = NOW();
|
||||||
|
|
||||||
|
jobs_count := jobs_count + 1;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN OTHERS THEN
|
||||||
|
error_count := error_count + 1;
|
||||||
|
-- Log error but continue with next job
|
||||||
|
RAISE WARNING 'Error syncing job %: %', job_record.job_id, SQLERRM;
|
||||||
|
END;
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
-- Return summary
|
||||||
|
RETURN QUERY SELECT jobs_count, inserted_count, updated_count, error_count;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
-- Create a simpler version that uses current database connection settings
|
||||||
|
-- This version assumes Bacula is on same host/port with same user
|
||||||
|
CREATE OR REPLACE FUNCTION sync_bacula_jobs_simple()
|
||||||
|
RETURNS TABLE(
|
||||||
|
jobs_synced INTEGER,
|
||||||
|
jobs_inserted INTEGER,
|
||||||
|
jobs_updated INTEGER,
|
||||||
|
errors INTEGER
|
||||||
|
) AS $$
|
||||||
|
DECLARE
|
||||||
|
current_user_name TEXT;
|
||||||
|
current_host TEXT;
|
||||||
|
current_port INTEGER;
|
||||||
|
current_db TEXT;
|
||||||
|
BEGIN
|
||||||
|
-- Get current connection info
|
||||||
|
SELECT
|
||||||
|
current_user,
|
||||||
|
COALESCE(inet_server_addr()::TEXT, 'localhost'),
|
||||||
|
COALESCE(inet_server_port(), 5432),
|
||||||
|
current_database()
|
||||||
|
INTO
|
||||||
|
current_user_name,
|
||||||
|
current_host,
|
||||||
|
current_port,
|
||||||
|
current_db;
|
||||||
|
|
||||||
|
-- Call main function with current connection settings
|
||||||
|
-- Note: password needs to be passed or configured in .pgpass
|
||||||
|
RETURN QUERY
|
||||||
|
SELECT * FROM sync_bacula_jobs(
|
||||||
|
'bacula', -- Try 'bacula' first
|
||||||
|
current_host,
|
||||||
|
current_port,
|
||||||
|
current_user_name,
|
||||||
|
'' -- Empty password - will use .pgpass or peer authentication
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
-- Grant execute permission to calypso user
|
||||||
|
GRANT EXECUTE ON FUNCTION sync_bacula_jobs(TEXT, TEXT, INTEGER, TEXT, TEXT) TO calypso;
|
||||||
|
GRANT EXECUTE ON FUNCTION sync_bacula_jobs_simple() TO calypso;
|
||||||
|
|
||||||
|
-- Create index if not exists (should already exist from migration 009)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_backup_jobs_job_id ON backup_jobs(job_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_backup_jobs_updated_at ON backup_jobs(updated_at);
|
||||||
|
|
||||||
|
COMMENT ON FUNCTION sync_bacula_jobs IS 'Syncs jobs from Bacula database to Calypso backup_jobs table using dblink';
|
||||||
|
COMMENT ON FUNCTION sync_bacula_jobs_simple IS 'Simplified version that uses current connection settings (requires .pgpass for password)';
|
||||||
|
|
||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
|
|
||||||
"github.com/atlasos/calypso/internal/audit"
|
"github.com/atlasos/calypso/internal/audit"
|
||||||
"github.com/atlasos/calypso/internal/auth"
|
"github.com/atlasos/calypso/internal/auth"
|
||||||
|
"github.com/atlasos/calypso/internal/backup"
|
||||||
"github.com/atlasos/calypso/internal/common/cache"
|
"github.com/atlasos/calypso/internal/common/cache"
|
||||||
"github.com/atlasos/calypso/internal/common/config"
|
"github.com/atlasos/calypso/internal/common/config"
|
||||||
"github.com/atlasos/calypso/internal/common/database"
|
"github.com/atlasos/calypso/internal/common/database"
|
||||||
@@ -207,8 +208,21 @@ func NewRouter(cfg *config.Config, db *database.DB, log *logger.Logger) *gin.Eng
|
|||||||
scstGroup.POST("/targets", scstHandler.CreateTarget)
|
scstGroup.POST("/targets", scstHandler.CreateTarget)
|
||||||
scstGroup.POST("/targets/:id/luns", scstHandler.AddLUN)
|
scstGroup.POST("/targets/:id/luns", scstHandler.AddLUN)
|
||||||
scstGroup.POST("/targets/:id/initiators", scstHandler.AddInitiator)
|
scstGroup.POST("/targets/:id/initiators", scstHandler.AddInitiator)
|
||||||
|
scstGroup.POST("/targets/:id/enable", scstHandler.EnableTarget)
|
||||||
|
scstGroup.POST("/targets/:id/disable", scstHandler.DisableTarget)
|
||||||
|
scstGroup.GET("/initiators", scstHandler.ListAllInitiators)
|
||||||
|
scstGroup.GET("/initiators/:id", scstHandler.GetInitiator)
|
||||||
|
scstGroup.DELETE("/initiators/:id", scstHandler.RemoveInitiator)
|
||||||
|
scstGroup.GET("/extents", scstHandler.ListExtents)
|
||||||
|
scstGroup.POST("/extents", scstHandler.CreateExtent)
|
||||||
|
scstGroup.DELETE("/extents/:device", scstHandler.DeleteExtent)
|
||||||
scstGroup.POST("/config/apply", scstHandler.ApplyConfig)
|
scstGroup.POST("/config/apply", scstHandler.ApplyConfig)
|
||||||
scstGroup.GET("/handlers", scstHandler.ListHandlers)
|
scstGroup.GET("/handlers", scstHandler.ListHandlers)
|
||||||
|
scstGroup.GET("/portals", scstHandler.ListPortals)
|
||||||
|
scstGroup.GET("/portals/:id", scstHandler.GetPortal)
|
||||||
|
scstGroup.POST("/portals", scstHandler.CreatePortal)
|
||||||
|
scstGroup.PUT("/portals/:id", scstHandler.UpdatePortal)
|
||||||
|
scstGroup.DELETE("/portals/:id", scstHandler.DeletePortal)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Physical Tape Libraries
|
// Physical Tape Libraries
|
||||||
@@ -258,16 +272,67 @@ func NewRouter(cfg *config.Config, db *database.DB, log *logger.Logger) *gin.Eng
|
|||||||
systemGroup.GET("/interfaces", systemHandler.ListNetworkInterfaces)
|
systemGroup.GET("/interfaces", systemHandler.ListNetworkInterfaces)
|
||||||
}
|
}
|
||||||
|
|
||||||
// IAM (admin only)
|
// IAM routes - GetUser can be accessed by user viewing own profile or admin
|
||||||
iamHandler := iam.NewHandler(db, cfg, log)
|
iamHandler := iam.NewHandler(db, cfg, log)
|
||||||
|
protected.GET("/iam/users/:id", iamHandler.GetUser)
|
||||||
|
|
||||||
|
// IAM admin routes
|
||||||
iamGroup := protected.Group("/iam")
|
iamGroup := protected.Group("/iam")
|
||||||
iamGroup.Use(requireRole("admin"))
|
iamGroup.Use(requireRole("admin"))
|
||||||
{
|
{
|
||||||
iamGroup.GET("/users", iamHandler.ListUsers)
|
iamGroup.GET("/users", iamHandler.ListUsers)
|
||||||
iamGroup.GET("/users/:id", iamHandler.GetUser)
|
|
||||||
iamGroup.POST("/users", iamHandler.CreateUser)
|
iamGroup.POST("/users", iamHandler.CreateUser)
|
||||||
iamGroup.PUT("/users/:id", iamHandler.UpdateUser)
|
iamGroup.PUT("/users/:id", iamHandler.UpdateUser)
|
||||||
iamGroup.DELETE("/users/:id", iamHandler.DeleteUser)
|
iamGroup.DELETE("/users/:id", iamHandler.DeleteUser)
|
||||||
|
// Roles routes
|
||||||
|
iamGroup.GET("/roles", iamHandler.ListRoles)
|
||||||
|
iamGroup.GET("/roles/:id", iamHandler.GetRole)
|
||||||
|
iamGroup.POST("/roles", iamHandler.CreateRole)
|
||||||
|
iamGroup.PUT("/roles/:id", iamHandler.UpdateRole)
|
||||||
|
iamGroup.DELETE("/roles/:id", iamHandler.DeleteRole)
|
||||||
|
iamGroup.GET("/roles/:id/permissions", iamHandler.GetRolePermissions)
|
||||||
|
iamGroup.POST("/roles/:id/permissions", iamHandler.AssignPermissionToRole)
|
||||||
|
iamGroup.DELETE("/roles/:id/permissions", iamHandler.RemovePermissionFromRole)
|
||||||
|
|
||||||
|
// Permissions routes
|
||||||
|
iamGroup.GET("/permissions", iamHandler.ListPermissions)
|
||||||
|
|
||||||
|
// User role/group assignment
|
||||||
|
iamGroup.POST("/users/:id/roles", iamHandler.AssignRoleToUser)
|
||||||
|
iamGroup.DELETE("/users/:id/roles", iamHandler.RemoveRoleFromUser)
|
||||||
|
iamGroup.POST("/users/:id/groups", iamHandler.AssignGroupToUser)
|
||||||
|
iamGroup.DELETE("/users/:id/groups", iamHandler.RemoveGroupFromUser)
|
||||||
|
|
||||||
|
// Groups routes
|
||||||
|
iamGroup.GET("/groups", iamHandler.ListGroups)
|
||||||
|
iamGroup.GET("/groups/:id", iamHandler.GetGroup)
|
||||||
|
iamGroup.POST("/groups", iamHandler.CreateGroup)
|
||||||
|
iamGroup.PUT("/groups/:id", iamHandler.UpdateGroup)
|
||||||
|
iamGroup.DELETE("/groups/:id", iamHandler.DeleteGroup)
|
||||||
|
iamGroup.POST("/groups/:id/users", iamHandler.AddUserToGroup)
|
||||||
|
iamGroup.DELETE("/groups/:id/users/:user_id", iamHandler.RemoveUserFromGroup)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Backup Jobs
|
||||||
|
backupService := backup.NewService(db, log)
|
||||||
|
// Set up direct connection to Bacula database
|
||||||
|
// Try common Bacula database names
|
||||||
|
baculaDBName := "bacula" // Default
|
||||||
|
if err := backupService.SetBaculaDatabase(cfg.Database, baculaDBName); err != nil {
|
||||||
|
log.Warn("Failed to connect to Bacula database, trying 'bareos'", "error", err)
|
||||||
|
// Try 'bareos' as alternative
|
||||||
|
if err := backupService.SetBaculaDatabase(cfg.Database, "bareos"); err != nil {
|
||||||
|
log.Error("Failed to connect to Bacula database", "error", err, "tried", []string{"bacula", "bareos"})
|
||||||
|
// Continue anyway - will fallback to bconsole
|
||||||
|
}
|
||||||
|
}
|
||||||
|
backupHandler := backup.NewHandler(backupService, log)
|
||||||
|
backupGroup := protected.Group("/backup")
|
||||||
|
backupGroup.Use(requirePermission("backup", "read"))
|
||||||
|
{
|
||||||
|
backupGroup.GET("/jobs", backupHandler.ListJobs)
|
||||||
|
backupGroup.GET("/jobs/:id", backupHandler.GetJob)
|
||||||
|
backupGroup.POST("/jobs", requirePermission("backup", "write"), backupHandler.CreateJob)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Monitoring
|
// Monitoring
|
||||||
|
|||||||
218
backend/internal/iam/group.go
Normal file
218
backend/internal/iam/group.go
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
package iam
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/atlasos/calypso/internal/common/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Group represents a user group
|
||||||
|
type Group struct {
|
||||||
|
ID string
|
||||||
|
Name string
|
||||||
|
Description string
|
||||||
|
IsSystem bool
|
||||||
|
CreatedAt time.Time
|
||||||
|
UpdatedAt time.Time
|
||||||
|
UserCount int
|
||||||
|
RoleCount int
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetGroupByID retrieves a group by ID
|
||||||
|
func GetGroupByID(db *database.DB, groupID string) (*Group, error) {
|
||||||
|
query := `
|
||||||
|
SELECT id, name, description, is_system, created_at, updated_at
|
||||||
|
FROM groups
|
||||||
|
WHERE id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
var group Group
|
||||||
|
err := db.QueryRow(query, groupID).Scan(
|
||||||
|
&group.ID, &group.Name, &group.Description, &group.IsSystem,
|
||||||
|
&group.CreatedAt, &group.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get user count
|
||||||
|
var userCount int
|
||||||
|
db.QueryRow("SELECT COUNT(*) FROM user_groups WHERE group_id = $1", groupID).Scan(&userCount)
|
||||||
|
group.UserCount = userCount
|
||||||
|
|
||||||
|
// Get role count
|
||||||
|
var roleCount int
|
||||||
|
db.QueryRow("SELECT COUNT(*) FROM group_roles WHERE group_id = $1", groupID).Scan(&roleCount)
|
||||||
|
group.RoleCount = roleCount
|
||||||
|
|
||||||
|
return &group, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetGroupByName retrieves a group by name
|
||||||
|
func GetGroupByName(db *database.DB, name string) (*Group, error) {
|
||||||
|
query := `
|
||||||
|
SELECT id, name, description, is_system, created_at, updated_at
|
||||||
|
FROM groups
|
||||||
|
WHERE name = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
var group Group
|
||||||
|
err := db.QueryRow(query, name).Scan(
|
||||||
|
&group.ID, &group.Name, &group.Description, &group.IsSystem,
|
||||||
|
&group.CreatedAt, &group.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &group, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserGroups retrieves all groups for a user
|
||||||
|
func GetUserGroups(db *database.DB, userID string) ([]string, error) {
|
||||||
|
query := `
|
||||||
|
SELECT g.name
|
||||||
|
FROM groups g
|
||||||
|
INNER JOIN user_groups ug ON g.id = ug.group_id
|
||||||
|
WHERE ug.user_id = $1
|
||||||
|
ORDER BY g.name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := db.Query(query, userID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var groups []string
|
||||||
|
for rows.Next() {
|
||||||
|
var groupName string
|
||||||
|
if err := rows.Scan(&groupName); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
groups = append(groups, groupName)
|
||||||
|
}
|
||||||
|
|
||||||
|
return groups, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetGroupUsers retrieves all users in a group
|
||||||
|
func GetGroupUsers(db *database.DB, groupID string) ([]string, error) {
|
||||||
|
query := `
|
||||||
|
SELECT u.id
|
||||||
|
FROM users u
|
||||||
|
INNER JOIN user_groups ug ON u.id = ug.user_id
|
||||||
|
WHERE ug.group_id = $1
|
||||||
|
ORDER BY u.username
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := db.Query(query, groupID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var userIDs []string
|
||||||
|
for rows.Next() {
|
||||||
|
var userID string
|
||||||
|
if err := rows.Scan(&userID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
userIDs = append(userIDs, userID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return userIDs, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetGroupRoles retrieves all roles for a group
|
||||||
|
func GetGroupRoles(db *database.DB, groupID string) ([]string, error) {
|
||||||
|
query := `
|
||||||
|
SELECT r.name
|
||||||
|
FROM roles r
|
||||||
|
INNER JOIN group_roles gr ON r.id = gr.role_id
|
||||||
|
WHERE gr.group_id = $1
|
||||||
|
ORDER BY r.name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := db.Query(query, groupID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var roles []string
|
||||||
|
for rows.Next() {
|
||||||
|
var role string
|
||||||
|
if err := rows.Scan(&role); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
roles = append(roles, role)
|
||||||
|
}
|
||||||
|
|
||||||
|
return roles, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddUserToGroup adds a user to a group
|
||||||
|
func AddUserToGroup(db *database.DB, userID, groupID, assignedBy string) error {
|
||||||
|
query := `
|
||||||
|
INSERT INTO user_groups (user_id, group_id, assigned_by)
|
||||||
|
VALUES ($1, $2, $3)
|
||||||
|
ON CONFLICT (user_id, group_id) DO NOTHING
|
||||||
|
`
|
||||||
|
_, err := db.Exec(query, userID, groupID, assignedBy)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveUserFromGroup removes a user from a group
|
||||||
|
func RemoveUserFromGroup(db *database.DB, userID, groupID string) error {
|
||||||
|
query := `DELETE FROM user_groups WHERE user_id = $1 AND group_id = $2`
|
||||||
|
_, err := db.Exec(query, userID, groupID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddRoleToGroup adds a role to a group
|
||||||
|
func AddRoleToGroup(db *database.DB, groupID, roleID string) error {
|
||||||
|
query := `
|
||||||
|
INSERT INTO group_roles (group_id, role_id)
|
||||||
|
VALUES ($1, $2)
|
||||||
|
ON CONFLICT (group_id, role_id) DO NOTHING
|
||||||
|
`
|
||||||
|
_, err := db.Exec(query, groupID, roleID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveRoleFromGroup removes a role from a group
|
||||||
|
func RemoveRoleFromGroup(db *database.DB, groupID, roleID string) error {
|
||||||
|
query := `DELETE FROM group_roles WHERE group_id = $1 AND role_id = $2`
|
||||||
|
_, err := db.Exec(query, groupID, roleID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserRolesFromGroups retrieves all roles for a user via groups
|
||||||
|
func GetUserRolesFromGroups(db *database.DB, userID string) ([]string, error) {
|
||||||
|
query := `
|
||||||
|
SELECT DISTINCT r.name
|
||||||
|
FROM roles r
|
||||||
|
INNER JOIN group_roles gr ON r.id = gr.role_id
|
||||||
|
INNER JOIN user_groups ug ON gr.group_id = ug.group_id
|
||||||
|
WHERE ug.user_id = $1
|
||||||
|
ORDER BY r.name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := db.Query(query, userID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var roles []string
|
||||||
|
for rows.Next() {
|
||||||
|
var role string
|
||||||
|
if err := rows.Scan(&role); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
roles = append(roles, role)
|
||||||
|
}
|
||||||
|
|
||||||
|
return roles, rows.Err()
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
237
backend/internal/iam/role.go
Normal file
237
backend/internal/iam/role.go
Normal file
@@ -0,0 +1,237 @@
|
|||||||
|
package iam
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/atlasos/calypso/internal/common/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Role represents a system role
|
||||||
|
type Role struct {
|
||||||
|
ID string
|
||||||
|
Name string
|
||||||
|
Description string
|
||||||
|
IsSystem bool
|
||||||
|
CreatedAt time.Time
|
||||||
|
UpdatedAt time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRoleByID retrieves a role by ID
|
||||||
|
func GetRoleByID(db *database.DB, roleID string) (*Role, error) {
|
||||||
|
query := `
|
||||||
|
SELECT id, name, description, is_system, created_at, updated_at
|
||||||
|
FROM roles
|
||||||
|
WHERE id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
var role Role
|
||||||
|
err := db.QueryRow(query, roleID).Scan(
|
||||||
|
&role.ID, &role.Name, &role.Description, &role.IsSystem,
|
||||||
|
&role.CreatedAt, &role.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &role, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRoleByName retrieves a role by name
|
||||||
|
func GetRoleByName(db *database.DB, name string) (*Role, error) {
|
||||||
|
query := `
|
||||||
|
SELECT id, name, description, is_system, created_at, updated_at
|
||||||
|
FROM roles
|
||||||
|
WHERE name = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
var role Role
|
||||||
|
err := db.QueryRow(query, name).Scan(
|
||||||
|
&role.ID, &role.Name, &role.Description, &role.IsSystem,
|
||||||
|
&role.CreatedAt, &role.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &role, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListRoles retrieves all roles
|
||||||
|
func ListRoles(db *database.DB) ([]*Role, error) {
|
||||||
|
query := `
|
||||||
|
SELECT id, name, description, is_system, created_at, updated_at
|
||||||
|
FROM roles
|
||||||
|
ORDER BY name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := db.Query(query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var roles []*Role
|
||||||
|
for rows.Next() {
|
||||||
|
var role Role
|
||||||
|
if err := rows.Scan(
|
||||||
|
&role.ID, &role.Name, &role.Description, &role.IsSystem,
|
||||||
|
&role.CreatedAt, &role.UpdatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
roles = append(roles, &role)
|
||||||
|
}
|
||||||
|
|
||||||
|
return roles, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateRole creates a new role
|
||||||
|
func CreateRole(db *database.DB, name, description string) (*Role, error) {
|
||||||
|
query := `
|
||||||
|
INSERT INTO roles (name, description)
|
||||||
|
VALUES ($1, $2)
|
||||||
|
RETURNING id, name, description, is_system, created_at, updated_at
|
||||||
|
`
|
||||||
|
|
||||||
|
var role Role
|
||||||
|
err := db.QueryRow(query, name, description).Scan(
|
||||||
|
&role.ID, &role.Name, &role.Description, &role.IsSystem,
|
||||||
|
&role.CreatedAt, &role.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &role, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateRole updates an existing role
|
||||||
|
func UpdateRole(db *database.DB, roleID, name, description string) error {
|
||||||
|
query := `
|
||||||
|
UPDATE roles
|
||||||
|
SET name = $1, description = $2, updated_at = NOW()
|
||||||
|
WHERE id = $3
|
||||||
|
`
|
||||||
|
_, err := db.Exec(query, name, description, roleID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteRole deletes a role
|
||||||
|
func DeleteRole(db *database.DB, roleID string) error {
|
||||||
|
query := `DELETE FROM roles WHERE id = $1`
|
||||||
|
_, err := db.Exec(query, roleID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRoleUsers retrieves all users with a specific role
|
||||||
|
func GetRoleUsers(db *database.DB, roleID string) ([]string, error) {
|
||||||
|
query := `
|
||||||
|
SELECT u.id
|
||||||
|
FROM users u
|
||||||
|
INNER JOIN user_roles ur ON u.id = ur.user_id
|
||||||
|
WHERE ur.role_id = $1
|
||||||
|
ORDER BY u.username
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := db.Query(query, roleID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var userIDs []string
|
||||||
|
for rows.Next() {
|
||||||
|
var userID string
|
||||||
|
if err := rows.Scan(&userID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
userIDs = append(userIDs, userID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return userIDs, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRolePermissions retrieves all permissions for a role
|
||||||
|
func GetRolePermissions(db *database.DB, roleID string) ([]string, error) {
|
||||||
|
query := `
|
||||||
|
SELECT p.name
|
||||||
|
FROM permissions p
|
||||||
|
INNER JOIN role_permissions rp ON p.id = rp.permission_id
|
||||||
|
WHERE rp.role_id = $1
|
||||||
|
ORDER BY p.name
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := db.Query(query, roleID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var permissions []string
|
||||||
|
for rows.Next() {
|
||||||
|
var perm string
|
||||||
|
if err := rows.Scan(&perm); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
permissions = append(permissions, perm)
|
||||||
|
}
|
||||||
|
|
||||||
|
return permissions, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddPermissionToRole assigns a permission to a role
|
||||||
|
func AddPermissionToRole(db *database.DB, roleID, permissionID string) error {
|
||||||
|
query := `
|
||||||
|
INSERT INTO role_permissions (role_id, permission_id)
|
||||||
|
VALUES ($1, $2)
|
||||||
|
ON CONFLICT (role_id, permission_id) DO NOTHING
|
||||||
|
`
|
||||||
|
_, err := db.Exec(query, roleID, permissionID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemovePermissionFromRole removes a permission from a role
|
||||||
|
func RemovePermissionFromRole(db *database.DB, roleID, permissionID string) error {
|
||||||
|
query := `DELETE FROM role_permissions WHERE role_id = $1 AND permission_id = $2`
|
||||||
|
_, err := db.Exec(query, roleID, permissionID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPermissionIDByName retrieves a permission ID by name
|
||||||
|
func GetPermissionIDByName(db *database.DB, permissionName string) (string, error) {
|
||||||
|
var permissionID string
|
||||||
|
err := db.QueryRow("SELECT id FROM permissions WHERE name = $1", permissionName).Scan(&permissionID)
|
||||||
|
return permissionID, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListPermissions retrieves all permissions
|
||||||
|
func ListPermissions(db *database.DB) ([]map[string]interface{}, error) {
|
||||||
|
query := `
|
||||||
|
SELECT id, name, resource, action, description
|
||||||
|
FROM permissions
|
||||||
|
ORDER BY resource, action
|
||||||
|
`
|
||||||
|
|
||||||
|
rows, err := db.Query(query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var permissions []map[string]interface{}
|
||||||
|
for rows.Next() {
|
||||||
|
var id, name, resource, action, description string
|
||||||
|
if err := rows.Scan(&id, &name, &resource, &action, &description); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
permissions = append(permissions, map[string]interface{}{
|
||||||
|
"id": id,
|
||||||
|
"name": name,
|
||||||
|
"resource": resource,
|
||||||
|
"action": action,
|
||||||
|
"description": description,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return permissions, rows.Err()
|
||||||
|
}
|
||||||
@@ -126,3 +126,27 @@ func GetUserPermissions(db *database.DB, userID string) ([]string, error) {
|
|||||||
return permissions, rows.Err()
|
return permissions, rows.Err()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AddUserRole assigns a role to a user
|
||||||
|
func AddUserRole(db *database.DB, userID, roleID, assignedBy string) error {
|
||||||
|
query := `
|
||||||
|
INSERT INTO user_roles (user_id, role_id, assigned_by)
|
||||||
|
VALUES ($1, $2, $3)
|
||||||
|
ON CONFLICT (user_id, role_id) DO NOTHING
|
||||||
|
`
|
||||||
|
_, err := db.Exec(query, userID, roleID, assignedBy)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveUserRole removes a role from a user
|
||||||
|
func RemoveUserRole(db *database.DB, userID, roleID string) error {
|
||||||
|
query := `DELETE FROM user_roles WHERE user_id = $1 AND role_id = $2`
|
||||||
|
_, err := db.Exec(query, userID, roleID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRoleIDByName retrieves a role ID by name
|
||||||
|
func GetRoleIDByName(db *database.DB, roleName string) (string, error) {
|
||||||
|
var roleID string
|
||||||
|
err := db.QueryRow("SELECT id FROM roles WHERE name = $1", roleName).Scan(&roleID)
|
||||||
|
return roleID, err
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,10 +1,14 @@
|
|||||||
package monitoring
|
package monitoring
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bufio"
|
||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/atlasos/calypso/internal/common/database"
|
"github.com/atlasos/calypso/internal/common/database"
|
||||||
@@ -13,14 +17,14 @@ import (
|
|||||||
|
|
||||||
// Metrics represents system metrics
|
// Metrics represents system metrics
|
||||||
type Metrics struct {
|
type Metrics struct {
|
||||||
System SystemMetrics `json:"system"`
|
System SystemMetrics `json:"system"`
|
||||||
Storage StorageMetrics `json:"storage"`
|
Storage StorageMetrics `json:"storage"`
|
||||||
SCST SCSTMetrics `json:"scst"`
|
SCST SCSTMetrics `json:"scst"`
|
||||||
Tape TapeMetrics `json:"tape"`
|
Tape TapeMetrics `json:"tape"`
|
||||||
VTL VTLMetrics `json:"vtl"`
|
VTL VTLMetrics `json:"vtl"`
|
||||||
Tasks TaskMetrics `json:"tasks"`
|
Tasks TaskMetrics `json:"tasks"`
|
||||||
API APIMetrics `json:"api"`
|
API APIMetrics `json:"api"`
|
||||||
CollectedAt time.Time `json:"collected_at"`
|
CollectedAt time.Time `json:"collected_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SystemMetrics represents system-level metrics
|
// SystemMetrics represents system-level metrics
|
||||||
@@ -37,11 +41,11 @@ type SystemMetrics struct {
|
|||||||
|
|
||||||
// StorageMetrics represents storage metrics
|
// StorageMetrics represents storage metrics
|
||||||
type StorageMetrics struct {
|
type StorageMetrics struct {
|
||||||
TotalDisks int `json:"total_disks"`
|
TotalDisks int `json:"total_disks"`
|
||||||
TotalRepositories int `json:"total_repositories"`
|
TotalRepositories int `json:"total_repositories"`
|
||||||
TotalCapacityBytes int64 `json:"total_capacity_bytes"`
|
TotalCapacityBytes int64 `json:"total_capacity_bytes"`
|
||||||
UsedCapacityBytes int64 `json:"used_capacity_bytes"`
|
UsedCapacityBytes int64 `json:"used_capacity_bytes"`
|
||||||
AvailableBytes int64 `json:"available_bytes"`
|
AvailableBytes int64 `json:"available_bytes"`
|
||||||
UsagePercent float64 `json:"usage_percent"`
|
UsagePercent float64 `json:"usage_percent"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -72,28 +76,43 @@ type VTLMetrics struct {
|
|||||||
|
|
||||||
// TaskMetrics represents task execution metrics
|
// TaskMetrics represents task execution metrics
|
||||||
type TaskMetrics struct {
|
type TaskMetrics struct {
|
||||||
TotalTasks int `json:"total_tasks"`
|
TotalTasks int `json:"total_tasks"`
|
||||||
PendingTasks int `json:"pending_tasks"`
|
PendingTasks int `json:"pending_tasks"`
|
||||||
RunningTasks int `json:"running_tasks"`
|
RunningTasks int `json:"running_tasks"`
|
||||||
CompletedTasks int `json:"completed_tasks"`
|
CompletedTasks int `json:"completed_tasks"`
|
||||||
FailedTasks int `json:"failed_tasks"`
|
FailedTasks int `json:"failed_tasks"`
|
||||||
AvgDurationSec float64 `json:"avg_duration_seconds"`
|
AvgDurationSec float64 `json:"avg_duration_seconds"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// APIMetrics represents API metrics
|
// APIMetrics represents API metrics
|
||||||
type APIMetrics struct {
|
type APIMetrics struct {
|
||||||
TotalRequests int64 `json:"total_requests"`
|
TotalRequests int64 `json:"total_requests"`
|
||||||
RequestsPerSec float64 `json:"requests_per_second"`
|
RequestsPerSec float64 `json:"requests_per_second"`
|
||||||
ErrorRate float64 `json:"error_rate"`
|
ErrorRate float64 `json:"error_rate"`
|
||||||
AvgLatencyMs float64 `json:"avg_latency_ms"`
|
AvgLatencyMs float64 `json:"avg_latency_ms"`
|
||||||
ActiveConnections int `json:"active_connections"`
|
ActiveConnections int `json:"active_connections"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// MetricsService collects and provides system metrics
|
// MetricsService collects and provides system metrics
|
||||||
type MetricsService struct {
|
type MetricsService struct {
|
||||||
db *database.DB
|
db *database.DB
|
||||||
logger *logger.Logger
|
logger *logger.Logger
|
||||||
startTime time.Time
|
startTime time.Time
|
||||||
|
lastCPU *cpuStats // For CPU usage calculation
|
||||||
|
lastCPUTime time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// cpuStats represents CPU statistics from /proc/stat
|
||||||
|
type cpuStats struct {
|
||||||
|
user uint64
|
||||||
|
nice uint64
|
||||||
|
system uint64
|
||||||
|
idle uint64
|
||||||
|
iowait uint64
|
||||||
|
irq uint64
|
||||||
|
softirq uint64
|
||||||
|
steal uint64
|
||||||
|
guest uint64
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewMetricsService creates a new metrics service
|
// NewMetricsService creates a new metrics service
|
||||||
@@ -115,6 +134,8 @@ func (s *MetricsService) CollectMetrics(ctx context.Context) (*Metrics, error) {
|
|||||||
sysMetrics, err := s.collectSystemMetrics(ctx)
|
sysMetrics, err := s.collectSystemMetrics(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.logger.Error("Failed to collect system metrics", "error", err)
|
s.logger.Error("Failed to collect system metrics", "error", err)
|
||||||
|
// Set default/zero values if collection fails
|
||||||
|
metrics.System = SystemMetrics{}
|
||||||
} else {
|
} else {
|
||||||
metrics.System = *sysMetrics
|
metrics.System = *sysMetrics
|
||||||
}
|
}
|
||||||
@@ -167,21 +188,17 @@ func (s *MetricsService) CollectMetrics(ctx context.Context) (*Metrics, error) {
|
|||||||
|
|
||||||
// collectSystemMetrics collects system-level metrics
|
// collectSystemMetrics collects system-level metrics
|
||||||
func (s *MetricsService) collectSystemMetrics(ctx context.Context) (*SystemMetrics, error) {
|
func (s *MetricsService) collectSystemMetrics(ctx context.Context) (*SystemMetrics, error) {
|
||||||
var m runtime.MemStats
|
// Get system memory from /proc/meminfo
|
||||||
runtime.ReadMemStats(&m)
|
memoryTotal, memoryUsed, memoryPercent := s.getSystemMemory()
|
||||||
|
|
||||||
// Get memory info
|
// Get CPU usage from /proc/stat
|
||||||
memoryUsed := int64(m.Alloc)
|
cpuUsage := s.getCPUUsage()
|
||||||
memoryTotal := int64(m.Sys)
|
|
||||||
memoryPercent := float64(memoryUsed) / float64(memoryTotal) * 100
|
|
||||||
|
|
||||||
// Uptime
|
// Get system uptime from /proc/uptime
|
||||||
uptime := time.Since(s.startTime).Seconds()
|
uptime := s.getSystemUptime()
|
||||||
|
|
||||||
// CPU and disk would require external tools or system calls
|
|
||||||
// For now, we'll use placeholders
|
|
||||||
metrics := &SystemMetrics{
|
metrics := &SystemMetrics{
|
||||||
CPUUsagePercent: 0.0, // Would need to read from /proc/stat
|
CPUUsagePercent: cpuUsage,
|
||||||
MemoryUsed: memoryUsed,
|
MemoryUsed: memoryUsed,
|
||||||
MemoryTotal: memoryTotal,
|
MemoryTotal: memoryTotal,
|
||||||
MemoryPercent: memoryPercent,
|
MemoryPercent: memoryPercent,
|
||||||
@@ -268,7 +285,7 @@ func (s *MetricsService) collectSCSTMetrics(ctx context.Context) (*SCSTMetrics,
|
|||||||
TotalTargets: totalTargets,
|
TotalTargets: totalTargets,
|
||||||
TotalLUNs: totalLUNs,
|
TotalLUNs: totalLUNs,
|
||||||
TotalInitiators: totalInitiators,
|
TotalInitiators: totalInitiators,
|
||||||
ActiveTargets: activeTargets,
|
ActiveTargets: activeTargets,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -403,3 +420,232 @@ func (s *MetricsService) collectTaskMetrics(ctx context.Context) (*TaskMetrics,
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// getSystemUptime reads system uptime from /proc/uptime
|
||||||
|
// Returns uptime in seconds, or service uptime as fallback
|
||||||
|
func (s *MetricsService) getSystemUptime() float64 {
|
||||||
|
file, err := os.Open("/proc/uptime")
|
||||||
|
if err != nil {
|
||||||
|
// Fallback to service uptime if /proc/uptime is not available
|
||||||
|
s.logger.Warn("Failed to read /proc/uptime, using service uptime", "error", err)
|
||||||
|
return time.Since(s.startTime).Seconds()
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(file)
|
||||||
|
if !scanner.Scan() {
|
||||||
|
// Fallback to service uptime if file is empty
|
||||||
|
s.logger.Warn("Failed to read /proc/uptime content, using service uptime")
|
||||||
|
return time.Since(s.startTime).Seconds()
|
||||||
|
}
|
||||||
|
|
||||||
|
line := strings.TrimSpace(scanner.Text())
|
||||||
|
fields := strings.Fields(line)
|
||||||
|
if len(fields) == 0 {
|
||||||
|
// Fallback to service uptime if no data
|
||||||
|
s.logger.Warn("No data in /proc/uptime, using service uptime")
|
||||||
|
return time.Since(s.startTime).Seconds()
|
||||||
|
}
|
||||||
|
|
||||||
|
// First field is system uptime in seconds
|
||||||
|
uptimeSeconds, err := strconv.ParseFloat(fields[0], 64)
|
||||||
|
if err != nil {
|
||||||
|
// Fallback to service uptime if parsing fails
|
||||||
|
s.logger.Warn("Failed to parse /proc/uptime, using service uptime", "error", err)
|
||||||
|
return time.Since(s.startTime).Seconds()
|
||||||
|
}
|
||||||
|
|
||||||
|
return uptimeSeconds
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSystemMemory reads system memory from /proc/meminfo
|
||||||
|
// Returns total, used (in bytes), and usage percentage
|
||||||
|
func (s *MetricsService) getSystemMemory() (int64, int64, float64) {
|
||||||
|
file, err := os.Open("/proc/meminfo")
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn("Failed to read /proc/meminfo, using Go runtime memory", "error", err)
|
||||||
|
var m runtime.MemStats
|
||||||
|
runtime.ReadMemStats(&m)
|
||||||
|
memoryUsed := int64(m.Alloc)
|
||||||
|
memoryTotal := int64(m.Sys)
|
||||||
|
memoryPercent := float64(memoryUsed) / float64(memoryTotal) * 100
|
||||||
|
return memoryTotal, memoryUsed, memoryPercent
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
var memTotal, memAvailable, memFree, buffers, cached int64
|
||||||
|
scanner := bufio.NewScanner(file)
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := strings.TrimSpace(scanner.Text())
|
||||||
|
if line == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse line like "MemTotal: 16375596 kB"
|
||||||
|
// or "MemTotal: 16375596" (some systems don't have unit)
|
||||||
|
colonIdx := strings.Index(line, ":")
|
||||||
|
if colonIdx == -1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
key := strings.TrimSpace(line[:colonIdx])
|
||||||
|
valuePart := strings.TrimSpace(line[colonIdx+1:])
|
||||||
|
|
||||||
|
// Split value part to get number (ignore unit like "kB")
|
||||||
|
fields := strings.Fields(valuePart)
|
||||||
|
if len(fields) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
value, err := strconv.ParseInt(fields[0], 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Values in /proc/meminfo are in KB, convert to bytes
|
||||||
|
valueBytes := value * 1024
|
||||||
|
|
||||||
|
switch key {
|
||||||
|
case "MemTotal":
|
||||||
|
memTotal = valueBytes
|
||||||
|
case "MemAvailable":
|
||||||
|
memAvailable = valueBytes
|
||||||
|
case "MemFree":
|
||||||
|
memFree = valueBytes
|
||||||
|
case "Buffers":
|
||||||
|
buffers = valueBytes
|
||||||
|
case "Cached":
|
||||||
|
cached = valueBytes
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
s.logger.Warn("Error scanning /proc/meminfo", "error", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if memTotal == 0 {
|
||||||
|
s.logger.Warn("Failed to get MemTotal from /proc/meminfo, using Go runtime memory", "memTotal", memTotal)
|
||||||
|
var m runtime.MemStats
|
||||||
|
runtime.ReadMemStats(&m)
|
||||||
|
memoryUsed := int64(m.Alloc)
|
||||||
|
memoryTotal := int64(m.Sys)
|
||||||
|
memoryPercent := float64(memoryUsed) / float64(memoryTotal) * 100
|
||||||
|
return memoryTotal, memoryUsed, memoryPercent
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate used memory
|
||||||
|
// If MemAvailable exists (kernel 3.14+), use it for more accurate calculation
|
||||||
|
var memoryUsed int64
|
||||||
|
if memAvailable > 0 {
|
||||||
|
memoryUsed = memTotal - memAvailable
|
||||||
|
} else {
|
||||||
|
// Fallback: MemTotal - MemFree - Buffers - Cached
|
||||||
|
memoryUsed = memTotal - memFree - buffers - cached
|
||||||
|
if memoryUsed < 0 {
|
||||||
|
memoryUsed = memTotal - memFree
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
memoryPercent := float64(memoryUsed) / float64(memTotal) * 100
|
||||||
|
|
||||||
|
s.logger.Debug("System memory stats",
|
||||||
|
"memTotal", memTotal,
|
||||||
|
"memAvailable", memAvailable,
|
||||||
|
"memoryUsed", memoryUsed,
|
||||||
|
"memoryPercent", memoryPercent)
|
||||||
|
|
||||||
|
return memTotal, memoryUsed, memoryPercent
|
||||||
|
}
|
||||||
|
|
||||||
|
// getCPUUsage reads CPU usage from /proc/stat
|
||||||
|
// Requires two readings to calculate percentage
|
||||||
|
func (s *MetricsService) getCPUUsage() float64 {
|
||||||
|
currentCPU, err := s.readCPUStats()
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn("Failed to read CPU stats", "error", err)
|
||||||
|
return 0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
// If this is the first reading, store it and return 0
|
||||||
|
if s.lastCPU == nil {
|
||||||
|
s.lastCPU = currentCPU
|
||||||
|
s.lastCPUTime = time.Now()
|
||||||
|
return 0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate time difference
|
||||||
|
timeDiff := time.Since(s.lastCPUTime).Seconds()
|
||||||
|
if timeDiff < 0.1 {
|
||||||
|
// Too soon, return previous value or 0
|
||||||
|
return 0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate total CPU time
|
||||||
|
prevTotal := s.lastCPU.user + s.lastCPU.nice + s.lastCPU.system + s.lastCPU.idle +
|
||||||
|
s.lastCPU.iowait + s.lastCPU.irq + s.lastCPU.softirq + s.lastCPU.steal + s.lastCPU.guest
|
||||||
|
currTotal := currentCPU.user + currentCPU.nice + currentCPU.system + currentCPU.idle +
|
||||||
|
currentCPU.iowait + currentCPU.irq + currentCPU.softirq + currentCPU.steal + currentCPU.guest
|
||||||
|
|
||||||
|
// Calculate idle time
|
||||||
|
prevIdle := s.lastCPU.idle + s.lastCPU.iowait
|
||||||
|
currIdle := currentCPU.idle + currentCPU.iowait
|
||||||
|
|
||||||
|
// Calculate used time
|
||||||
|
totalDiff := currTotal - prevTotal
|
||||||
|
idleDiff := currIdle - prevIdle
|
||||||
|
|
||||||
|
if totalDiff == 0 {
|
||||||
|
return 0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate CPU usage percentage
|
||||||
|
usagePercent := 100.0 * (1.0 - float64(idleDiff)/float64(totalDiff))
|
||||||
|
|
||||||
|
// Update last CPU stats
|
||||||
|
s.lastCPU = currentCPU
|
||||||
|
s.lastCPUTime = time.Now()
|
||||||
|
|
||||||
|
return usagePercent
|
||||||
|
}
|
||||||
|
|
||||||
|
// readCPUStats reads CPU statistics from /proc/stat
|
||||||
|
func (s *MetricsService) readCPUStats() (*cpuStats, error) {
|
||||||
|
file, err := os.Open("/proc/stat")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to open /proc/stat: %w", err)
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(file)
|
||||||
|
if !scanner.Scan() {
|
||||||
|
return nil, fmt.Errorf("failed to read /proc/stat")
|
||||||
|
}
|
||||||
|
|
||||||
|
line := strings.TrimSpace(scanner.Text())
|
||||||
|
if !strings.HasPrefix(line, "cpu ") {
|
||||||
|
return nil, fmt.Errorf("invalid /proc/stat format")
|
||||||
|
}
|
||||||
|
|
||||||
|
fields := strings.Fields(line)
|
||||||
|
if len(fields) < 8 {
|
||||||
|
return nil, fmt.Errorf("insufficient CPU stats fields")
|
||||||
|
}
|
||||||
|
|
||||||
|
stats := &cpuStats{}
|
||||||
|
stats.user, _ = strconv.ParseUint(fields[1], 10, 64)
|
||||||
|
stats.nice, _ = strconv.ParseUint(fields[2], 10, 64)
|
||||||
|
stats.system, _ = strconv.ParseUint(fields[3], 10, 64)
|
||||||
|
stats.idle, _ = strconv.ParseUint(fields[4], 10, 64)
|
||||||
|
stats.iowait, _ = strconv.ParseUint(fields[5], 10, 64)
|
||||||
|
stats.irq, _ = strconv.ParseUint(fields[6], 10, 64)
|
||||||
|
stats.softirq, _ = strconv.ParseUint(fields[7], 10, 64)
|
||||||
|
|
||||||
|
if len(fields) > 8 {
|
||||||
|
stats.steal, _ = strconv.ParseUint(fields[8], 10, 64)
|
||||||
|
}
|
||||||
|
if len(fields) > 9 {
|
||||||
|
stats.guest, _ = strconv.ParseUint(fields[9], 10, 64)
|
||||||
|
}
|
||||||
|
|
||||||
|
return stats, nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package scst
|
package scst
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/atlasos/calypso/internal/common/database"
|
"github.com/atlasos/calypso/internal/common/database"
|
||||||
@@ -11,19 +12,19 @@ import (
|
|||||||
|
|
||||||
// Handler handles SCST-related API requests
|
// Handler handles SCST-related API requests
|
||||||
type Handler struct {
|
type Handler struct {
|
||||||
service *Service
|
service *Service
|
||||||
taskEngine *tasks.Engine
|
taskEngine *tasks.Engine
|
||||||
db *database.DB
|
db *database.DB
|
||||||
logger *logger.Logger
|
logger *logger.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewHandler creates a new SCST handler
|
// NewHandler creates a new SCST handler
|
||||||
func NewHandler(db *database.DB, log *logger.Logger) *Handler {
|
func NewHandler(db *database.DB, log *logger.Logger) *Handler {
|
||||||
return &Handler{
|
return &Handler{
|
||||||
service: NewService(db, log),
|
service: NewService(db, log),
|
||||||
taskEngine: tasks.NewEngine(db, log),
|
taskEngine: tasks.NewEngine(db, log),
|
||||||
db: db,
|
db: db,
|
||||||
logger: log,
|
logger: log,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -55,21 +56,34 @@ func (h *Handler) GetTarget(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get LUNs
|
// Get LUNs
|
||||||
luns, _ := h.service.GetTargetLUNs(c.Request.Context(), targetID)
|
luns, err := h.service.GetTargetLUNs(c.Request.Context(), targetID)
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Warn("Failed to get LUNs", "target_id", targetID, "error", err)
|
||||||
|
// Return empty array instead of nil
|
||||||
|
luns = []LUN{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get initiator groups
|
||||||
|
groups, err2 := h.service.GetTargetInitiatorGroups(c.Request.Context(), targetID)
|
||||||
|
if err2 != nil {
|
||||||
|
h.logger.Warn("Failed to get initiator groups", "target_id", targetID, "error", err2)
|
||||||
|
groups = []InitiatorGroup{}
|
||||||
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
c.JSON(http.StatusOK, gin.H{
|
||||||
"target": target,
|
"target": target,
|
||||||
"luns": luns,
|
"luns": luns,
|
||||||
|
"initiator_groups": groups,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateTargetRequest represents a target creation request
|
// CreateTargetRequest represents a target creation request
|
||||||
type CreateTargetRequest struct {
|
type CreateTargetRequest struct {
|
||||||
IQN string `json:"iqn" binding:"required"`
|
IQN string `json:"iqn" binding:"required"`
|
||||||
TargetType string `json:"target_type" binding:"required"`
|
TargetType string `json:"target_type" binding:"required"`
|
||||||
Name string `json:"name" binding:"required"`
|
Name string `json:"name" binding:"required"`
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
SingleInitiatorOnly bool `json:"single_initiator_only"`
|
SingleInitiatorOnly bool `json:"single_initiator_only"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateTarget creates a new SCST target
|
// CreateTarget creates a new SCST target
|
||||||
@@ -83,13 +97,13 @@ func (h *Handler) CreateTarget(c *gin.Context) {
|
|||||||
userID, _ := c.Get("user_id")
|
userID, _ := c.Get("user_id")
|
||||||
|
|
||||||
target := &Target{
|
target := &Target{
|
||||||
IQN: req.IQN,
|
IQN: req.IQN,
|
||||||
TargetType: req.TargetType,
|
TargetType: req.TargetType,
|
||||||
Name: req.Name,
|
Name: req.Name,
|
||||||
Description: req.Description,
|
Description: req.Description,
|
||||||
IsActive: true,
|
IsActive: true,
|
||||||
SingleInitiatorOnly: req.SingleInitiatorOnly || req.TargetType == "vtl" || req.TargetType == "physical_tape",
|
SingleInitiatorOnly: req.SingleInitiatorOnly || req.TargetType == "vtl" || req.TargetType == "physical_tape",
|
||||||
CreatedBy: userID.(string),
|
CreatedBy: userID.(string),
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := h.service.CreateTarget(c.Request.Context(), target); err != nil {
|
if err := h.service.CreateTarget(c.Request.Context(), target); err != nil {
|
||||||
@@ -103,9 +117,9 @@ func (h *Handler) CreateTarget(c *gin.Context) {
|
|||||||
|
|
||||||
// AddLUNRequest represents a LUN addition request
|
// AddLUNRequest represents a LUN addition request
|
||||||
type AddLUNRequest struct {
|
type AddLUNRequest struct {
|
||||||
DeviceName string `json:"device_name" binding:"required"`
|
DeviceName string `json:"device_name" binding:"required"`
|
||||||
DevicePath string `json:"device_path" binding:"required"`
|
DevicePath string `json:"device_path" binding:"required"`
|
||||||
LUNNumber int `json:"lun_number" binding:"required"`
|
LUNNumber int `json:"lun_number" binding:"required"`
|
||||||
HandlerType string `json:"handler_type" binding:"required"`
|
HandlerType string `json:"handler_type" binding:"required"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -121,7 +135,15 @@ func (h *Handler) AddLUN(c *gin.Context) {
|
|||||||
|
|
||||||
var req AddLUNRequest
|
var req AddLUNRequest
|
||||||
if err := c.ShouldBindJSON(&req); err != nil {
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request"})
|
h.logger.Error("Failed to bind AddLUN request", "error", err)
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("invalid request: %v", err)})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate required fields
|
||||||
|
if req.DeviceName == "" || req.DevicePath == "" || req.HandlerType == "" {
|
||||||
|
h.logger.Error("Missing required fields in AddLUN request", "device_name", req.DeviceName, "device_path", req.DevicePath, "handler_type", req.HandlerType)
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "device_name, device_path, and handler_type are required"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -164,6 +186,110 @@ func (h *Handler) AddInitiator(c *gin.Context) {
|
|||||||
c.JSON(http.StatusOK, gin.H{"message": "Initiator added successfully"})
|
c.JSON(http.StatusOK, gin.H{"message": "Initiator added successfully"})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ListAllInitiators lists all initiators across all targets
|
||||||
|
func (h *Handler) ListAllInitiators(c *gin.Context) {
|
||||||
|
initiators, err := h.service.ListAllInitiators(c.Request.Context())
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to list initiators", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to list initiators"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if initiators == nil {
|
||||||
|
initiators = []InitiatorWithTarget{}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"initiators": initiators})
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveInitiator removes an initiator
|
||||||
|
func (h *Handler) RemoveInitiator(c *gin.Context) {
|
||||||
|
initiatorID := c.Param("id")
|
||||||
|
|
||||||
|
if err := h.service.RemoveInitiator(c.Request.Context(), initiatorID); err != nil {
|
||||||
|
if err.Error() == "initiator not found" {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "initiator not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
h.logger.Error("Failed to remove initiator", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "Initiator removed successfully"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetInitiator retrieves an initiator by ID
|
||||||
|
func (h *Handler) GetInitiator(c *gin.Context) {
|
||||||
|
initiatorID := c.Param("id")
|
||||||
|
|
||||||
|
initiator, err := h.service.GetInitiator(c.Request.Context(), initiatorID)
|
||||||
|
if err != nil {
|
||||||
|
if err.Error() == "initiator not found" {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "initiator not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
h.logger.Error("Failed to get initiator", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to get initiator"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, initiator)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListExtents lists all device extents
|
||||||
|
func (h *Handler) ListExtents(c *gin.Context) {
|
||||||
|
extents, err := h.service.ListExtents(c.Request.Context())
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to list extents", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to list extents"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if extents == nil {
|
||||||
|
extents = []Extent{}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"extents": extents})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateExtentRequest represents a request to create an extent
|
||||||
|
type CreateExtentRequest struct {
|
||||||
|
DeviceName string `json:"device_name" binding:"required"`
|
||||||
|
DevicePath string `json:"device_path" binding:"required"`
|
||||||
|
HandlerType string `json:"handler_type" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateExtent creates a new device extent
|
||||||
|
func (h *Handler) CreateExtent(c *gin.Context) {
|
||||||
|
var req CreateExtentRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.service.CreateExtent(c.Request.Context(), req.DeviceName, req.DevicePath, req.HandlerType); err != nil {
|
||||||
|
h.logger.Error("Failed to create extent", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"message": "Extent created successfully"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteExtent deletes a device extent
|
||||||
|
func (h *Handler) DeleteExtent(c *gin.Context) {
|
||||||
|
deviceName := c.Param("device")
|
||||||
|
|
||||||
|
if err := h.service.DeleteExtent(c.Request.Context(), deviceName); err != nil {
|
||||||
|
h.logger.Error("Failed to delete extent", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "Extent deleted successfully"})
|
||||||
|
}
|
||||||
|
|
||||||
// ApplyConfig applies SCST configuration
|
// ApplyConfig applies SCST configuration
|
||||||
func (h *Handler) ApplyConfig(c *gin.Context) {
|
func (h *Handler) ApplyConfig(c *gin.Context) {
|
||||||
userID, _ := c.Get("user_id")
|
userID, _ := c.Get("user_id")
|
||||||
@@ -209,3 +335,142 @@ func (h *Handler) ListHandlers(c *gin.Context) {
|
|||||||
c.JSON(http.StatusOK, gin.H{"handlers": handlers})
|
c.JSON(http.StatusOK, gin.H{"handlers": handlers})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ListPortals lists all iSCSI portals
|
||||||
|
func (h *Handler) ListPortals(c *gin.Context) {
|
||||||
|
portals, err := h.service.ListPortals(c.Request.Context())
|
||||||
|
if err != nil {
|
||||||
|
h.logger.Error("Failed to list portals", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to list portals"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure we return an empty array instead of null
|
||||||
|
if portals == nil {
|
||||||
|
portals = []Portal{}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"portals": portals})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatePortal creates a new portal
|
||||||
|
func (h *Handler) CreatePortal(c *gin.Context) {
|
||||||
|
var portal Portal
|
||||||
|
if err := c.ShouldBindJSON(&portal); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.service.CreatePortal(c.Request.Context(), &portal); err != nil {
|
||||||
|
h.logger.Error("Failed to create portal", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, portal)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatePortal updates a portal
|
||||||
|
func (h *Handler) UpdatePortal(c *gin.Context) {
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
var portal Portal
|
||||||
|
if err := c.ShouldBindJSON(&portal); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.service.UpdatePortal(c.Request.Context(), id, &portal); err != nil {
|
||||||
|
if err.Error() == "portal not found" {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "portal not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
h.logger.Error("Failed to update portal", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, portal)
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnableTarget enables a target
|
||||||
|
func (h *Handler) EnableTarget(c *gin.Context) {
|
||||||
|
targetID := c.Param("id")
|
||||||
|
|
||||||
|
target, err := h.service.GetTarget(c.Request.Context(), targetID)
|
||||||
|
if err != nil {
|
||||||
|
if err.Error() == "target not found" {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "target not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
h.logger.Error("Failed to get target", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to get target"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.service.EnableTarget(c.Request.Context(), target.IQN); err != nil {
|
||||||
|
h.logger.Error("Failed to enable target", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "Target enabled successfully"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DisableTarget disables a target
|
||||||
|
func (h *Handler) DisableTarget(c *gin.Context) {
|
||||||
|
targetID := c.Param("id")
|
||||||
|
|
||||||
|
target, err := h.service.GetTarget(c.Request.Context(), targetID)
|
||||||
|
if err != nil {
|
||||||
|
if err.Error() == "target not found" {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "target not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
h.logger.Error("Failed to get target", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to get target"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.service.DisableTarget(c.Request.Context(), target.IQN); err != nil {
|
||||||
|
h.logger.Error("Failed to disable target", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "Target disabled successfully"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeletePortal deletes a portal
|
||||||
|
func (h *Handler) DeletePortal(c *gin.Context) {
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
if err := h.service.DeletePortal(c.Request.Context(), id); err != nil {
|
||||||
|
if err.Error() == "portal not found" {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "portal not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
h.logger.Error("Failed to delete portal", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "Portal deleted successfully"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPortal retrieves a portal by ID
|
||||||
|
func (h *Handler) GetPortal(c *gin.Context) {
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
portal, err := h.service.GetPortal(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
if err.Error() == "portal not found" {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "portal not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
h.logger.Error("Failed to get portal", "error", err)
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to get portal"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, portal)
|
||||||
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -67,7 +67,7 @@ func (m *MHVTLMonitor) Stop() {
|
|||||||
|
|
||||||
// syncMHVTL parses mhvtl configuration and syncs to database
|
// syncMHVTL parses mhvtl configuration and syncs to database
|
||||||
func (m *MHVTLMonitor) syncMHVTL(ctx context.Context) {
|
func (m *MHVTLMonitor) syncMHVTL(ctx context.Context) {
|
||||||
m.logger.Debug("Running MHVTL configuration sync")
|
m.logger.Info("Running MHVTL configuration sync")
|
||||||
|
|
||||||
deviceConfPath := filepath.Join(m.configPath, "device.conf")
|
deviceConfPath := filepath.Join(m.configPath, "device.conf")
|
||||||
if _, err := os.Stat(deviceConfPath); os.IsNotExist(err) {
|
if _, err := os.Stat(deviceConfPath); os.IsNotExist(err) {
|
||||||
@@ -84,6 +84,11 @@ func (m *MHVTLMonitor) syncMHVTL(ctx context.Context) {
|
|||||||
|
|
||||||
m.logger.Info("Parsed MHVTL configuration", "libraries", len(libraries), "drives", len(drives))
|
m.logger.Info("Parsed MHVTL configuration", "libraries", len(libraries), "drives", len(drives))
|
||||||
|
|
||||||
|
// Log parsed drives for debugging
|
||||||
|
for _, drive := range drives {
|
||||||
|
m.logger.Debug("Parsed drive", "drive_id", drive.DriveID, "library_id", drive.LibraryID, "slot", drive.Slot)
|
||||||
|
}
|
||||||
|
|
||||||
// Sync libraries to database
|
// Sync libraries to database
|
||||||
for _, lib := range libraries {
|
for _, lib := range libraries {
|
||||||
if err := m.syncLibrary(ctx, lib); err != nil {
|
if err := m.syncLibrary(ctx, lib); err != nil {
|
||||||
@@ -94,7 +99,9 @@ func (m *MHVTLMonitor) syncMHVTL(ctx context.Context) {
|
|||||||
// Sync drives to database
|
// Sync drives to database
|
||||||
for _, drive := range drives {
|
for _, drive := range drives {
|
||||||
if err := m.syncDrive(ctx, drive); err != nil {
|
if err := m.syncDrive(ctx, drive); err != nil {
|
||||||
m.logger.Error("Failed to sync drive", "drive_id", drive.DriveID, "error", err)
|
m.logger.Error("Failed to sync drive", "drive_id", drive.DriveID, "library_id", drive.LibraryID, "slot", drive.Slot, "error", err)
|
||||||
|
} else {
|
||||||
|
m.logger.Debug("Synced drive", "drive_id", drive.DriveID, "library_id", drive.LibraryID, "slot", drive.Slot)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -106,7 +113,7 @@ func (m *MHVTLMonitor) syncMHVTL(ctx context.Context) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
m.logger.Debug("MHVTL configuration sync completed")
|
m.logger.Info("MHVTL configuration sync completed")
|
||||||
}
|
}
|
||||||
|
|
||||||
// LibraryInfo represents a library from device.conf
|
// LibraryInfo represents a library from device.conf
|
||||||
@@ -189,6 +196,7 @@ func (m *MHVTLMonitor) parseDeviceConf(ctx context.Context, path string) ([]Libr
|
|||||||
Target: matches[3],
|
Target: matches[3],
|
||||||
LUN: matches[4],
|
LUN: matches[4],
|
||||||
}
|
}
|
||||||
|
// Library ID and Slot might be on the same line or next line
|
||||||
if matches := libraryIDRegex.FindStringSubmatch(line); matches != nil {
|
if matches := libraryIDRegex.FindStringSubmatch(line); matches != nil {
|
||||||
libID, _ := strconv.Atoi(matches[1])
|
libID, _ := strconv.Atoi(matches[1])
|
||||||
slot, _ := strconv.Atoi(matches[2])
|
slot, _ := strconv.Atoi(matches[2])
|
||||||
@@ -198,34 +206,63 @@ func (m *MHVTLMonitor) parseDeviceConf(ctx context.Context, path string) ([]Libr
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse library fields
|
// Parse library fields (only if we're in a library section and not in a drive section)
|
||||||
if currentLibrary != nil {
|
if currentLibrary != nil && currentDrive == nil {
|
||||||
if strings.HasPrefix(line, "Vendor identification:") {
|
// Handle both "Vendor identification:" and " Vendor identification:" (with leading space)
|
||||||
currentLibrary.Vendor = strings.TrimSpace(strings.TrimPrefix(line, "Vendor identification:"))
|
if strings.Contains(line, "Vendor identification:") {
|
||||||
} else if strings.HasPrefix(line, "Product identification:") {
|
parts := strings.Split(line, "Vendor identification:")
|
||||||
currentLibrary.Product = strings.TrimSpace(strings.TrimPrefix(line, "Product identification:"))
|
if len(parts) > 1 {
|
||||||
} else if strings.HasPrefix(line, "Unit serial number:") {
|
currentLibrary.Vendor = strings.TrimSpace(parts[1])
|
||||||
currentLibrary.SerialNumber = strings.TrimSpace(strings.TrimPrefix(line, "Unit serial number:"))
|
m.logger.Debug("Parsed vendor", "vendor", currentLibrary.Vendor, "library_id", currentLibrary.LibraryID)
|
||||||
} else if strings.HasPrefix(line, "Home directory:") {
|
}
|
||||||
currentLibrary.HomeDirectory = strings.TrimSpace(strings.TrimPrefix(line, "Home directory:"))
|
} else if strings.Contains(line, "Product identification:") {
|
||||||
|
parts := strings.Split(line, "Product identification:")
|
||||||
|
if len(parts) > 1 {
|
||||||
|
currentLibrary.Product = strings.TrimSpace(parts[1])
|
||||||
|
m.logger.Info("Parsed library product", "product", currentLibrary.Product, "library_id", currentLibrary.LibraryID)
|
||||||
|
}
|
||||||
|
} else if strings.Contains(line, "Unit serial number:") {
|
||||||
|
parts := strings.Split(line, "Unit serial number:")
|
||||||
|
if len(parts) > 1 {
|
||||||
|
currentLibrary.SerialNumber = strings.TrimSpace(parts[1])
|
||||||
|
}
|
||||||
|
} else if strings.Contains(line, "Home directory:") {
|
||||||
|
parts := strings.Split(line, "Home directory:")
|
||||||
|
if len(parts) > 1 {
|
||||||
|
currentLibrary.HomeDirectory = strings.TrimSpace(parts[1])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse drive fields
|
// Parse drive fields
|
||||||
if currentDrive != nil {
|
if currentDrive != nil {
|
||||||
if strings.HasPrefix(line, "Vendor identification:") {
|
// Check for Library ID and Slot first (can be on separate line)
|
||||||
currentDrive.Vendor = strings.TrimSpace(strings.TrimPrefix(line, "Vendor identification:"))
|
if strings.Contains(line, "Library ID:") && strings.Contains(line, "Slot:") {
|
||||||
} else if strings.HasPrefix(line, "Product identification:") {
|
|
||||||
currentDrive.Product = strings.TrimSpace(strings.TrimPrefix(line, "Product identification:"))
|
|
||||||
} else if strings.HasPrefix(line, "Unit serial number:") {
|
|
||||||
currentDrive.SerialNumber = strings.TrimSpace(strings.TrimPrefix(line, "Unit serial number:"))
|
|
||||||
} else if strings.HasPrefix(line, "Library ID:") && strings.Contains(line, "Slot:") {
|
|
||||||
matches := libraryIDRegex.FindStringSubmatch(line)
|
matches := libraryIDRegex.FindStringSubmatch(line)
|
||||||
if matches != nil {
|
if matches != nil {
|
||||||
libID, _ := strconv.Atoi(matches[1])
|
libID, _ := strconv.Atoi(matches[1])
|
||||||
slot, _ := strconv.Atoi(matches[2])
|
slot, _ := strconv.Atoi(matches[2])
|
||||||
currentDrive.LibraryID = libID
|
currentDrive.LibraryID = libID
|
||||||
currentDrive.Slot = slot
|
currentDrive.Slot = slot
|
||||||
|
m.logger.Debug("Parsed drive Library ID and Slot", "drive_id", currentDrive.DriveID, "library_id", libID, "slot", slot)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Handle both "Vendor identification:" and " Vendor identification:" (with leading space)
|
||||||
|
if strings.Contains(line, "Vendor identification:") {
|
||||||
|
parts := strings.Split(line, "Vendor identification:")
|
||||||
|
if len(parts) > 1 {
|
||||||
|
currentDrive.Vendor = strings.TrimSpace(parts[1])
|
||||||
|
}
|
||||||
|
} else if strings.Contains(line, "Product identification:") {
|
||||||
|
parts := strings.Split(line, "Product identification:")
|
||||||
|
if len(parts) > 1 {
|
||||||
|
currentDrive.Product = strings.TrimSpace(parts[1])
|
||||||
|
}
|
||||||
|
} else if strings.Contains(line, "Unit serial number:") {
|
||||||
|
parts := strings.Split(line, "Unit serial number:")
|
||||||
|
if len(parts) > 1 {
|
||||||
|
currentDrive.SerialNumber = strings.TrimSpace(parts[1])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -255,9 +292,17 @@ func (m *MHVTLMonitor) syncLibrary(ctx context.Context, libInfo LibraryInfo) err
|
|||||||
libInfo.LibraryID,
|
libInfo.LibraryID,
|
||||||
).Scan(&existingID)
|
).Scan(&existingID)
|
||||||
|
|
||||||
|
m.logger.Debug("Syncing library", "library_id", libInfo.LibraryID, "vendor", libInfo.Vendor, "product", libInfo.Product)
|
||||||
|
|
||||||
|
// Use product identification for library name (without library ID)
|
||||||
libraryName := fmt.Sprintf("VTL-%d", libInfo.LibraryID)
|
libraryName := fmt.Sprintf("VTL-%d", libInfo.LibraryID)
|
||||||
if libInfo.Product != "" {
|
if libInfo.Product != "" {
|
||||||
libraryName = fmt.Sprintf("%s-%d", libInfo.Product, libInfo.LibraryID)
|
// Use only product name, without library ID
|
||||||
|
libraryName = libInfo.Product
|
||||||
|
m.logger.Info("Using product for library name", "product", libInfo.Product, "library_id", libInfo.LibraryID, "name", libraryName)
|
||||||
|
} else if libInfo.Vendor != "" {
|
||||||
|
libraryName = libInfo.Vendor
|
||||||
|
m.logger.Info("Using vendor for library name (product not available)", "vendor", libInfo.Vendor, "library_id", libInfo.LibraryID)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
@@ -275,23 +320,41 @@ func (m *MHVTLMonitor) syncLibrary(ctx context.Context, libInfo LibraryInfo) err
|
|||||||
_, err = m.service.db.ExecContext(ctx, `
|
_, err = m.service.db.ExecContext(ctx, `
|
||||||
INSERT INTO virtual_tape_libraries (
|
INSERT INTO virtual_tape_libraries (
|
||||||
name, description, mhvtl_library_id, backing_store_path,
|
name, description, mhvtl_library_id, backing_store_path,
|
||||||
slot_count, drive_count, is_active
|
vendor, slot_count, drive_count, is_active
|
||||||
) VALUES ($1, $2, $3, $4, $5, $6, $7)
|
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||||
`, libraryName, fmt.Sprintf("MHVTL Library %d (%s)", libInfo.LibraryID, libInfo.Product),
|
`, libraryName, fmt.Sprintf("MHVTL Library %d (%s)", libInfo.LibraryID, libInfo.Product),
|
||||||
libInfo.LibraryID, backingStorePath, slotCount, driveCount, true)
|
libInfo.LibraryID, backingStorePath, libInfo.Vendor, slotCount, driveCount, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to insert library: %w", err)
|
return fmt.Errorf("failed to insert library: %w", err)
|
||||||
}
|
}
|
||||||
m.logger.Info("Created virtual library from MHVTL", "library_id", libInfo.LibraryID, "name", libraryName)
|
m.logger.Info("Created virtual library from MHVTL", "library_id", libInfo.LibraryID, "name", libraryName)
|
||||||
} else if err == nil {
|
} else if err == nil {
|
||||||
// Update existing library
|
// Update existing library - also update name if product is available
|
||||||
|
updateName := libraryName
|
||||||
|
// If product exists and current name doesn't match, update it
|
||||||
|
if libInfo.Product != "" {
|
||||||
|
var currentName string
|
||||||
|
err := m.service.db.QueryRowContext(ctx,
|
||||||
|
"SELECT name FROM virtual_tape_libraries WHERE id = $1", existingID,
|
||||||
|
).Scan(¤tName)
|
||||||
|
if err == nil {
|
||||||
|
// Use only product name, without library ID
|
||||||
|
expectedName := libInfo.Product
|
||||||
|
if currentName != expectedName {
|
||||||
|
updateName = expectedName
|
||||||
|
m.logger.Info("Updating library name", "old", currentName, "new", updateName, "product", libInfo.Product)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
m.logger.Info("Updating existing library", "library_id", libInfo.LibraryID, "product", libInfo.Product, "vendor", libInfo.Vendor, "old_name", libraryName, "new_name", updateName)
|
||||||
_, err = m.service.db.ExecContext(ctx, `
|
_, err = m.service.db.ExecContext(ctx, `
|
||||||
UPDATE virtual_tape_libraries SET
|
UPDATE virtual_tape_libraries SET
|
||||||
name = $1, description = $2, backing_store_path = $3,
|
name = $1, description = $2, backing_store_path = $3,
|
||||||
is_active = $4, updated_at = NOW()
|
vendor = $4, is_active = $5, updated_at = NOW()
|
||||||
WHERE id = $5
|
WHERE id = $6
|
||||||
`, libraryName, fmt.Sprintf("MHVTL Library %d (%s)", libInfo.LibraryID, libInfo.Product),
|
`, updateName, fmt.Sprintf("MHVTL Library %d (%s)", libInfo.LibraryID, libInfo.Product),
|
||||||
libInfo.HomeDirectory, true, existingID)
|
libInfo.HomeDirectory, libInfo.Vendor, true, existingID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to update library: %w", err)
|
return fmt.Errorf("failed to update library: %w", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ type VirtualTapeLibrary struct {
|
|||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
MHVTLibraryID int `json:"mhvtl_library_id"`
|
MHVTLibraryID int `json:"mhvtl_library_id"`
|
||||||
BackingStorePath string `json:"backing_store_path"`
|
BackingStorePath string `json:"backing_store_path"`
|
||||||
|
Vendor string `json:"vendor,omitempty"`
|
||||||
SlotCount int `json:"slot_count"`
|
SlotCount int `json:"slot_count"`
|
||||||
DriveCount int `json:"drive_count"`
|
DriveCount int `json:"drive_count"`
|
||||||
IsActive bool `json:"is_active"`
|
IsActive bool `json:"is_active"`
|
||||||
@@ -223,6 +224,7 @@ func (s *Service) createTape(ctx context.Context, tape *VirtualTape) error {
|
|||||||
func (s *Service) ListLibraries(ctx context.Context) ([]VirtualTapeLibrary, error) {
|
func (s *Service) ListLibraries(ctx context.Context) ([]VirtualTapeLibrary, error) {
|
||||||
query := `
|
query := `
|
||||||
SELECT id, name, description, mhvtl_library_id, backing_store_path,
|
SELECT id, name, description, mhvtl_library_id, backing_store_path,
|
||||||
|
COALESCE(vendor, '') as vendor,
|
||||||
slot_count, drive_count, is_active, created_at, updated_at, created_by
|
slot_count, drive_count, is_active, created_at, updated_at, created_by
|
||||||
FROM virtual_tape_libraries
|
FROM virtual_tape_libraries
|
||||||
ORDER BY name
|
ORDER BY name
|
||||||
@@ -247,6 +249,7 @@ func (s *Service) ListLibraries(ctx context.Context) ([]VirtualTapeLibrary, erro
|
|||||||
var createdBy sql.NullString
|
var createdBy sql.NullString
|
||||||
err := rows.Scan(
|
err := rows.Scan(
|
||||||
&lib.ID, &lib.Name, &description, &lib.MHVTLibraryID, &lib.BackingStorePath,
|
&lib.ID, &lib.Name, &description, &lib.MHVTLibraryID, &lib.BackingStorePath,
|
||||||
|
&lib.Vendor,
|
||||||
&lib.SlotCount, &lib.DriveCount, &lib.IsActive,
|
&lib.SlotCount, &lib.DriveCount, &lib.IsActive,
|
||||||
&lib.CreatedAt, &lib.UpdatedAt, &createdBy,
|
&lib.CreatedAt, &lib.UpdatedAt, &createdBy,
|
||||||
)
|
)
|
||||||
@@ -284,6 +287,7 @@ func (s *Service) ListLibraries(ctx context.Context) ([]VirtualTapeLibrary, erro
|
|||||||
func (s *Service) GetLibrary(ctx context.Context, id string) (*VirtualTapeLibrary, error) {
|
func (s *Service) GetLibrary(ctx context.Context, id string) (*VirtualTapeLibrary, error) {
|
||||||
query := `
|
query := `
|
||||||
SELECT id, name, description, mhvtl_library_id, backing_store_path,
|
SELECT id, name, description, mhvtl_library_id, backing_store_path,
|
||||||
|
COALESCE(vendor, '') as vendor,
|
||||||
slot_count, drive_count, is_active, created_at, updated_at, created_by
|
slot_count, drive_count, is_active, created_at, updated_at, created_by
|
||||||
FROM virtual_tape_libraries
|
FROM virtual_tape_libraries
|
||||||
WHERE id = $1
|
WHERE id = $1
|
||||||
@@ -294,6 +298,7 @@ func (s *Service) GetLibrary(ctx context.Context, id string) (*VirtualTapeLibrar
|
|||||||
var createdBy sql.NullString
|
var createdBy sql.NullString
|
||||||
err := s.db.QueryRowContext(ctx, query, id).Scan(
|
err := s.db.QueryRowContext(ctx, query, id).Scan(
|
||||||
&lib.ID, &lib.Name, &description, &lib.MHVTLibraryID, &lib.BackingStorePath,
|
&lib.ID, &lib.Name, &description, &lib.MHVTLibraryID, &lib.BackingStorePath,
|
||||||
|
&lib.Vendor,
|
||||||
&lib.SlotCount, &lib.DriveCount, &lib.IsActive,
|
&lib.SlotCount, &lib.DriveCount, &lib.IsActive,
|
||||||
&lib.CreatedAt, &lib.UpdatedAt, &createdBy,
|
&lib.CreatedAt, &lib.UpdatedAt, &createdBy,
|
||||||
)
|
)
|
||||||
|
|||||||
25
deploy/systemd/calypso-logger.service
Normal file
25
deploy/systemd/calypso-logger.service
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Calypso Stack Log Aggregator
|
||||||
|
Documentation=https://github.com/atlasos/calypso
|
||||||
|
After=network.target
|
||||||
|
Wants=calypso-api.service calypso-frontend.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
# Run as root to access journald and write to /var/syslog
|
||||||
|
# Format: timestamp [service] message
|
||||||
|
ExecStart=/bin/bash -c '/usr/bin/journalctl -u calypso-api.service -u calypso-frontend.service -f --no-pager -o short-iso >> /var/syslog/calypso.log 2>&1'
|
||||||
|
Restart=always
|
||||||
|
RestartSec=5
|
||||||
|
|
||||||
|
# Security hardening
|
||||||
|
NoNewPrivileges=false
|
||||||
|
PrivateTmp=true
|
||||||
|
ReadWritePaths=/var/syslog
|
||||||
|
|
||||||
|
# Resource limits
|
||||||
|
LimitNOFILE=65536
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
|
||||||
126
docs/services.md
Normal file
126
docs/services.md
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
# Calypso Appliance Services Documentation
|
||||||
|
|
||||||
|
This document provides an overview of all services that form the Calypso backup appliance.
|
||||||
|
|
||||||
|
## Core Calypso Services
|
||||||
|
|
||||||
|
### calypso-api.service
|
||||||
|
**Status**: Running
|
||||||
|
**Description**: AtlasOS Calypso API Service (Development)
|
||||||
|
**Purpose**: Main REST API backend for the Calypso appliance, handles all business logic and database operations.
|
||||||
|
**Binary**: `/development/calypso/backend/bin/calypso-api`
|
||||||
|
**Config**: `/development/calypso/backend/config.yaml.example`
|
||||||
|
|
||||||
|
### calypso-frontend.service
|
||||||
|
**Status**: Running
|
||||||
|
**Description**: Calypso Frontend Development Server
|
||||||
|
**Purpose**: Web UI for managing backups, storage, and monitoring the appliance.
|
||||||
|
**Port**: 3000
|
||||||
|
**Technology**: React + Vite (development mode)
|
||||||
|
|
||||||
|
## Backup Services (Bacula)
|
||||||
|
|
||||||
|
### bacula-director.service
|
||||||
|
**Status**: Running
|
||||||
|
**Description**: Bacula Director Daemon
|
||||||
|
**Purpose**: Central management daemon that orchestrates all backup, restore, and verify operations.
|
||||||
|
**Config**: `/etc/bacula/bacula-dir.conf`
|
||||||
|
**Docs**: `man:bacula-dir(8)`
|
||||||
|
|
||||||
|
### bacula-sd.service
|
||||||
|
**Status**: Running
|
||||||
|
**Description**: Bacula Storage Daemon
|
||||||
|
**Purpose**: Manages physical backup storage devices (disks, tapes, virtual tape libraries).
|
||||||
|
**Config**: `/etc/bacula/bacula-sd.conf`
|
||||||
|
|
||||||
|
### bacula-fd.service
|
||||||
|
**Status**: Running
|
||||||
|
**Description**: Bacula File Daemon
|
||||||
|
**Purpose**: Runs on systems being backed up, manages file access and metadata.
|
||||||
|
**Config**: `/etc/bacula/bacula-fd.conf`
|
||||||
|
|
||||||
|
## Storage/iSCSI Services (SCST)
|
||||||
|
|
||||||
|
### scst.service
|
||||||
|
**Status**: Active (exited)
|
||||||
|
**Description**: SCST - A Generic SCSI Target Subsystem
|
||||||
|
**Purpose**: Kernel-level SCSI target framework providing high-performance storage exports.
|
||||||
|
**Type**: One-shot service that loads SCST kernel modules
|
||||||
|
|
||||||
|
### iscsi-scstd.service
|
||||||
|
**Status**: Running
|
||||||
|
**Description**: iSCSI SCST Target Daemon
|
||||||
|
**Purpose**: Provides iSCSI protocol support for SCST, allowing network block storage exports.
|
||||||
|
**Port**: 3260 (standard iSCSI port)
|
||||||
|
**Configured Targets**:
|
||||||
|
- `iqn.2025-12.id.atlas:lun01` (enabled)
|
||||||
|
|
||||||
|
### iscsid.service
|
||||||
|
**Status**: Inactive
|
||||||
|
**Description**: iSCSI initiator daemon
|
||||||
|
**Purpose**: Client-side iSCSI service (not currently in use)
|
||||||
|
|
||||||
|
### open-iscsi.service
|
||||||
|
**Status**: Inactive
|
||||||
|
**Description**: Login to default iSCSI targets
|
||||||
|
**Purpose**: Automatic iSCSI target login (not currently in use)
|
||||||
|
|
||||||
|
## Virtual Tape Library
|
||||||
|
|
||||||
|
### mhvtl-load-modules.service
|
||||||
|
**Status**: Active (exited)
|
||||||
|
**Description**: Load mhvtl modules
|
||||||
|
**Purpose**: Loads mhVTL (virtual tape library) kernel modules for tape emulation.
|
||||||
|
**Type**: One-shot service that runs at boot
|
||||||
|
**Docs**: `man:vtltape(1)`, `man:vtllibrary(1)`
|
||||||
|
|
||||||
|
## Database
|
||||||
|
|
||||||
|
### postgresql.service
|
||||||
|
**Status**: Active (exited)
|
||||||
|
**Description**: PostgreSQL RDBMS
|
||||||
|
**Purpose**: Parent service for PostgreSQL database management
|
||||||
|
|
||||||
|
### postgresql@16-main.service
|
||||||
|
**Status**: Running
|
||||||
|
**Description**: PostgreSQL Cluster 16-main
|
||||||
|
**Purpose**: Main database for Calypso API, stores configuration, jobs, and metadata.
|
||||||
|
**Version**: PostgreSQL 16
|
||||||
|
|
||||||
|
## Service Management
|
||||||
|
|
||||||
|
### Check All Services Status
|
||||||
|
```bash
|
||||||
|
systemctl status calypso-api calypso-frontend bacula-director bacula-sd bacula-fd scst iscsi-scstd mhvtl-load-modules postgresql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Rebuild and Restart Core Services
|
||||||
|
```bash
|
||||||
|
/development/calypso/scripts/rebuild-and-restart.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
### Restart Individual Services
|
||||||
|
```bash
|
||||||
|
systemctl restart calypso-api.service
|
||||||
|
systemctl restart calypso-frontend.service
|
||||||
|
systemctl restart bacula-director.service
|
||||||
|
```
|
||||||
|
|
||||||
|
## Service Dependencies
|
||||||
|
|
||||||
|
```
|
||||||
|
PostgreSQL
|
||||||
|
└── Calypso API
|
||||||
|
└── Calypso Frontend
|
||||||
|
|
||||||
|
SCST
|
||||||
|
└── iSCSI SCST Target Daemon
|
||||||
|
|
||||||
|
mhVTL
|
||||||
|
└── Bacula Storage Daemon
|
||||||
|
└── Bacula Director
|
||||||
|
└── Bacula File Daemon
|
||||||
|
```
|
||||||
|
|
||||||
|
## Total Service Count
|
||||||
|
**11 services** forming the complete Calypso backup appliance stack.
|
||||||
@@ -11,6 +11,9 @@ import VTLDetailPage from '@/pages/VTLDetail'
|
|||||||
import ISCSITargetsPage from '@/pages/ISCSITargets'
|
import ISCSITargetsPage from '@/pages/ISCSITargets'
|
||||||
import ISCSITargetDetailPage from '@/pages/ISCSITargetDetail'
|
import ISCSITargetDetailPage from '@/pages/ISCSITargetDetail'
|
||||||
import SystemPage from '@/pages/System'
|
import SystemPage from '@/pages/System'
|
||||||
|
import BackupManagementPage from '@/pages/BackupManagement'
|
||||||
|
import IAMPage from '@/pages/IAM'
|
||||||
|
import ProfilePage from '@/pages/Profile'
|
||||||
import Layout from '@/components/Layout'
|
import Layout from '@/components/Layout'
|
||||||
|
|
||||||
// Create a client
|
// Create a client
|
||||||
@@ -55,8 +58,12 @@ function App() {
|
|||||||
<Route path="tape/vtl/:id" element={<VTLDetailPage />} />
|
<Route path="tape/vtl/:id" element={<VTLDetailPage />} />
|
||||||
<Route path="iscsi" element={<ISCSITargetsPage />} />
|
<Route path="iscsi" element={<ISCSITargetsPage />} />
|
||||||
<Route path="iscsi/:id" element={<ISCSITargetDetailPage />} />
|
<Route path="iscsi/:id" element={<ISCSITargetDetailPage />} />
|
||||||
|
<Route path="backup" element={<BackupManagementPage />} />
|
||||||
<Route path="alerts" element={<AlertsPage />} />
|
<Route path="alerts" element={<AlertsPage />} />
|
||||||
<Route path="system" element={<SystemPage />} />
|
<Route path="system" element={<SystemPage />} />
|
||||||
|
<Route path="iam" element={<IAMPage />} />
|
||||||
|
<Route path="profile" element={<ProfilePage />} />
|
||||||
|
<Route path="profile/:id" element={<ProfilePage />} />
|
||||||
</Route>
|
</Route>
|
||||||
</Routes>
|
</Routes>
|
||||||
<Toaster />
|
<Toaster />
|
||||||
|
|||||||
75
frontend/src/api/backup.ts
Normal file
75
frontend/src/api/backup.ts
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
import apiClient from './client'
|
||||||
|
|
||||||
|
export interface BackupJob {
|
||||||
|
id: string
|
||||||
|
job_id: number
|
||||||
|
job_name: string
|
||||||
|
client_name: string
|
||||||
|
job_type: string
|
||||||
|
job_level: string
|
||||||
|
status: 'Running' | 'Completed' | 'Failed' | 'Canceled' | 'Waiting'
|
||||||
|
bytes_written: number
|
||||||
|
files_written: number
|
||||||
|
duration_seconds?: number
|
||||||
|
started_at?: string
|
||||||
|
ended_at?: string
|
||||||
|
error_message?: string
|
||||||
|
storage_name?: string
|
||||||
|
pool_name?: string
|
||||||
|
volume_name?: string
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ListJobsResponse {
|
||||||
|
jobs: BackupJob[]
|
||||||
|
total: number
|
||||||
|
limit: number
|
||||||
|
offset: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ListJobsParams {
|
||||||
|
status?: string
|
||||||
|
job_type?: string
|
||||||
|
client_name?: string
|
||||||
|
job_name?: string
|
||||||
|
limit?: number
|
||||||
|
offset?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateJobRequest {
|
||||||
|
job_name: string
|
||||||
|
client_name: string
|
||||||
|
job_type: string
|
||||||
|
job_level: string
|
||||||
|
storage_name?: string
|
||||||
|
pool_name?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export const backupAPI = {
|
||||||
|
listJobs: async (params?: ListJobsParams): Promise<ListJobsResponse> => {
|
||||||
|
const queryParams = new URLSearchParams()
|
||||||
|
if (params?.status) queryParams.append('status', params.status)
|
||||||
|
if (params?.job_type) queryParams.append('job_type', params.job_type)
|
||||||
|
if (params?.client_name) queryParams.append('client_name', params.client_name)
|
||||||
|
if (params?.job_name) queryParams.append('job_name', params.job_name)
|
||||||
|
if (params?.limit) queryParams.append('limit', params.limit.toString())
|
||||||
|
if (params?.offset) queryParams.append('offset', params.offset.toString())
|
||||||
|
|
||||||
|
const response = await apiClient.get<ListJobsResponse>(
|
||||||
|
`/backup/jobs${queryParams.toString() ? `?${queryParams.toString()}` : ''}`
|
||||||
|
)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
getJob: async (id: string): Promise<BackupJob> => {
|
||||||
|
const response = await apiClient.get<BackupJob>(`/backup/jobs/${id}`)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
createJob: async (data: CreateJobRequest): Promise<BackupJob> => {
|
||||||
|
const response = await apiClient.post<BackupJob>('/backup/jobs', data)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
191
frontend/src/api/iam.ts
Normal file
191
frontend/src/api/iam.ts
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
import apiClient from './client'
|
||||||
|
|
||||||
|
export interface User {
|
||||||
|
id: string
|
||||||
|
username: string
|
||||||
|
email: string
|
||||||
|
full_name: string
|
||||||
|
is_active: boolean
|
||||||
|
is_system: boolean
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
last_login_at: string | null
|
||||||
|
roles?: string[]
|
||||||
|
permissions?: string[]
|
||||||
|
groups?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Group {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
description?: string
|
||||||
|
is_system: boolean
|
||||||
|
user_count: number
|
||||||
|
role_count: number
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
users?: string[]
|
||||||
|
roles?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateGroupRequest {
|
||||||
|
name: string
|
||||||
|
description?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UpdateGroupRequest {
|
||||||
|
name?: string
|
||||||
|
description?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AddUserToGroupRequest {
|
||||||
|
user_id: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateUserRequest {
|
||||||
|
username: string
|
||||||
|
email: string
|
||||||
|
password: string
|
||||||
|
full_name?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UpdateUserRequest {
|
||||||
|
email?: string
|
||||||
|
full_name?: string
|
||||||
|
is_active?: boolean
|
||||||
|
roles?: string[]
|
||||||
|
groups?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export const iamApi = {
|
||||||
|
listUsers: async (): Promise<User[]> => {
|
||||||
|
const response = await apiClient.get<{ users: User[] }>('/iam/users')
|
||||||
|
return response.data.users || []
|
||||||
|
},
|
||||||
|
|
||||||
|
getUser: async (id: string): Promise<User> => {
|
||||||
|
const response = await apiClient.get<{
|
||||||
|
id: string
|
||||||
|
username: string
|
||||||
|
email: string
|
||||||
|
full_name: string
|
||||||
|
is_active: boolean
|
||||||
|
is_system: boolean
|
||||||
|
roles: string[]
|
||||||
|
permissions: string[]
|
||||||
|
groups: string[]
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
last_login_at: string | null
|
||||||
|
}>(`/iam/users/${id}`)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
createUser: async (data: CreateUserRequest): Promise<{ id: string; username: string }> => {
|
||||||
|
const response = await apiClient.post<{ id: string; username: string }>('/iam/users', data)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
updateUser: async (id: string, data: UpdateUserRequest): Promise<void> => {
|
||||||
|
await apiClient.put(`/iam/users/${id}`, data)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteUser: async (id: string): Promise<void> => {
|
||||||
|
await apiClient.delete(`/iam/users/${id}`)
|
||||||
|
},
|
||||||
|
|
||||||
|
// Groups API
|
||||||
|
listGroups: async (): Promise<Group[]> => {
|
||||||
|
const response = await apiClient.get<{ groups: Group[] }>('/iam/groups')
|
||||||
|
return response.data.groups || []
|
||||||
|
},
|
||||||
|
|
||||||
|
getGroup: async (id: string): Promise<Group> => {
|
||||||
|
const response = await apiClient.get<Group>(`/iam/groups/${id}`)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
createGroup: async (data: CreateGroupRequest): Promise<{ id: string; name: string }> => {
|
||||||
|
const response = await apiClient.post<{ id: string; name: string }>('/iam/groups', data)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
updateGroup: async (id: string, data: UpdateGroupRequest): Promise<void> => {
|
||||||
|
await apiClient.put(`/iam/groups/${id}`, data)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteGroup: async (id: string): Promise<void> => {
|
||||||
|
await apiClient.delete(`/iam/groups/${id}`)
|
||||||
|
},
|
||||||
|
|
||||||
|
addUserToGroup: async (groupId: string, userId: string): Promise<void> => {
|
||||||
|
await apiClient.post(`/iam/groups/${groupId}/users`, { user_id: userId })
|
||||||
|
},
|
||||||
|
|
||||||
|
removeUserFromGroup: async (groupId: string, userId: string): Promise<void> => {
|
||||||
|
await apiClient.delete(`/iam/groups/${groupId}/users/${userId}`)
|
||||||
|
},
|
||||||
|
|
||||||
|
// User role assignment
|
||||||
|
assignRoleToUser: async (userId: string, roleName: string): Promise<void> => {
|
||||||
|
await apiClient.post(`/iam/users/${userId}/roles`, { role_name: roleName })
|
||||||
|
},
|
||||||
|
|
||||||
|
removeRoleFromUser: async (userId: string, roleName: string): Promise<void> => {
|
||||||
|
await apiClient.delete(`/iam/users/${userId}/roles?role_name=${encodeURIComponent(roleName)}`)
|
||||||
|
},
|
||||||
|
|
||||||
|
// User group assignment
|
||||||
|
assignGroupToUser: async (userId: string, groupName: string): Promise<void> => {
|
||||||
|
await apiClient.post(`/iam/users/${userId}/groups`, { group_name: groupName })
|
||||||
|
},
|
||||||
|
|
||||||
|
removeGroupFromUser: async (userId: string, groupName: string): Promise<void> => {
|
||||||
|
await apiClient.delete(`/iam/users/${userId}/groups?group_name=${encodeURIComponent(groupName)}`)
|
||||||
|
},
|
||||||
|
|
||||||
|
// List all available roles
|
||||||
|
listRoles: async (): Promise<Array<{ id: string; name: string; description?: string; is_system: boolean; user_count?: number; created_at?: string; updated_at?: string }>> => {
|
||||||
|
const response = await apiClient.get<{ roles: Array<{ id: string; name: string; description?: string; is_system: boolean; user_count?: number; created_at?: string; updated_at?: string }> }>('/iam/roles')
|
||||||
|
return response.data.roles
|
||||||
|
},
|
||||||
|
|
||||||
|
getRole: async (id: string): Promise<{ id: string; name: string; description?: string; is_system: boolean; user_count?: number; created_at?: string; updated_at?: string }> => {
|
||||||
|
const response = await apiClient.get<{ id: string; name: string; description?: string; is_system: boolean; user_count?: number; created_at?: string; updated_at?: string }>(`/iam/roles/${id}`)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
createRole: async (data: { name: string; description?: string }): Promise<{ id: string; name: string }> => {
|
||||||
|
const response = await apiClient.post<{ id: string; name: string }>('/iam/roles', data)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
updateRole: async (id: string, data: { name?: string; description?: string }): Promise<void> => {
|
||||||
|
await apiClient.put(`/iam/roles/${id}`, data)
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteRole: async (id: string): Promise<void> => {
|
||||||
|
await apiClient.delete(`/iam/roles/${id}`)
|
||||||
|
},
|
||||||
|
|
||||||
|
// Role permissions
|
||||||
|
getRolePermissions: async (roleId: string): Promise<string[]> => {
|
||||||
|
const response = await apiClient.get<{ permissions: string[] }>(`/iam/roles/${roleId}/permissions`)
|
||||||
|
return response.data.permissions
|
||||||
|
},
|
||||||
|
|
||||||
|
assignPermissionToRole: async (roleId: string, permissionName: string): Promise<void> => {
|
||||||
|
await apiClient.post(`/iam/roles/${roleId}/permissions`, { permission_name: permissionName })
|
||||||
|
},
|
||||||
|
|
||||||
|
removePermissionFromRole: async (roleId: string, permissionName: string): Promise<void> => {
|
||||||
|
await apiClient.delete(`/iam/roles/${roleId}/permissions?permission_name=${encodeURIComponent(permissionName)}`)
|
||||||
|
},
|
||||||
|
|
||||||
|
// Permissions
|
||||||
|
listPermissions: async (): Promise<Array<{ id: string; name: string; resource: string; action: string; description?: string }>> => {
|
||||||
|
const response = await apiClient.get<{ permissions: Array<{ id: string; name: string; resource: string; action: string; description?: string }> }>('/iam/permissions')
|
||||||
|
return response.data.permissions
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
@@ -9,6 +9,7 @@ export interface SCSTTarget {
|
|||||||
iqn: string
|
iqn: string
|
||||||
alias?: string
|
alias?: string
|
||||||
is_active: boolean
|
is_active: boolean
|
||||||
|
lun_count?: number
|
||||||
created_at: string
|
created_at: string
|
||||||
updated_at: string
|
updated_at: string
|
||||||
}
|
}
|
||||||
@@ -31,7 +32,11 @@ export interface SCSTInitiator {
|
|||||||
iqn: string
|
iqn: string
|
||||||
is_active: boolean
|
is_active: boolean
|
||||||
created_at: string
|
created_at: string
|
||||||
updated_at: string
|
updated_at?: string
|
||||||
|
target_id?: string
|
||||||
|
target_iqn?: string
|
||||||
|
target_name?: string
|
||||||
|
group_name?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SCSTInitiatorGroup {
|
export interface SCSTInitiatorGroup {
|
||||||
@@ -45,9 +50,19 @@ export interface SCSTInitiatorGroup {
|
|||||||
|
|
||||||
export interface SCSTHandler {
|
export interface SCSTHandler {
|
||||||
name: string
|
name: string
|
||||||
|
label: string
|
||||||
description?: string
|
description?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface SCSTPortal {
|
||||||
|
id: string
|
||||||
|
ip_address: string
|
||||||
|
port: number
|
||||||
|
is_active: boolean
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
}
|
||||||
|
|
||||||
export interface CreateTargetRequest {
|
export interface CreateTargetRequest {
|
||||||
iqn: string
|
iqn: string
|
||||||
target_type: string
|
target_type: string
|
||||||
@@ -80,6 +95,7 @@ export const scstAPI = {
|
|||||||
getTarget: async (id: string): Promise<{
|
getTarget: async (id: string): Promise<{
|
||||||
target: SCSTTarget
|
target: SCSTTarget
|
||||||
luns: SCSTLUN[]
|
luns: SCSTLUN[]
|
||||||
|
initiator_groups?: SCSTInitiatorGroup[]
|
||||||
}> => {
|
}> => {
|
||||||
const response = await apiClient.get(`/scst/targets/${id}`)
|
const response = await apiClient.get(`/scst/targets/${id}`)
|
||||||
return response.data
|
return response.data
|
||||||
@@ -87,7 +103,8 @@ export const scstAPI = {
|
|||||||
|
|
||||||
createTarget: async (data: CreateTargetRequest): Promise<SCSTTarget> => {
|
createTarget: async (data: CreateTargetRequest): Promise<SCSTTarget> => {
|
||||||
const response = await apiClient.post('/scst/targets', data)
|
const response = await apiClient.post('/scst/targets', data)
|
||||||
return response.data.target
|
// Backend returns target directly, not wrapped in { target: ... }
|
||||||
|
return response.data
|
||||||
},
|
},
|
||||||
|
|
||||||
addLUN: async (targetId: string, data: AddLUNRequest): Promise<{ task_id: string }> => {
|
addLUN: async (targetId: string, data: AddLUNRequest): Promise<{ task_id: string }> => {
|
||||||
@@ -109,5 +126,81 @@ export const scstAPI = {
|
|||||||
const response = await apiClient.get('/scst/handlers')
|
const response = await apiClient.get('/scst/handlers')
|
||||||
return response.data.handlers || []
|
return response.data.handlers || []
|
||||||
},
|
},
|
||||||
|
|
||||||
|
listPortals: async (): Promise<SCSTPortal[]> => {
|
||||||
|
const response = await apiClient.get('/scst/portals')
|
||||||
|
return response.data.portals || []
|
||||||
|
},
|
||||||
|
|
||||||
|
getPortal: async (id: string): Promise<SCSTPortal> => {
|
||||||
|
const response = await apiClient.get(`/scst/portals/${id}`)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
createPortal: async (data: { ip_address: string; port?: number; is_active?: boolean }): Promise<SCSTPortal> => {
|
||||||
|
const response = await apiClient.post('/scst/portals', data)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
updatePortal: async (id: string, data: { ip_address: string; port?: number; is_active?: boolean }): Promise<SCSTPortal> => {
|
||||||
|
const response = await apiClient.put(`/scst/portals/${id}`, data)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
deletePortal: async (id: string): Promise<void> => {
|
||||||
|
await apiClient.delete(`/scst/portals/${id}`)
|
||||||
|
},
|
||||||
|
|
||||||
|
enableTarget: async (targetId: string): Promise<{ message: string }> => {
|
||||||
|
const response = await apiClient.post(`/scst/targets/${targetId}/enable`)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
disableTarget: async (targetId: string): Promise<{ message: string }> => {
|
||||||
|
const response = await apiClient.post(`/scst/targets/${targetId}/disable`)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
listInitiators: async (): Promise<SCSTInitiator[]> => {
|
||||||
|
const response = await apiClient.get('/scst/initiators')
|
||||||
|
return response.data.initiators || []
|
||||||
|
},
|
||||||
|
|
||||||
|
getInitiator: async (id: string): Promise<SCSTInitiator> => {
|
||||||
|
const response = await apiClient.get(`/scst/initiators/${id}`)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
removeInitiator: async (id: string): Promise<void> => {
|
||||||
|
await apiClient.delete(`/scst/initiators/${id}`)
|
||||||
|
},
|
||||||
|
|
||||||
|
listExtents: async (): Promise<SCSTExtent[]> => {
|
||||||
|
const response = await apiClient.get('/scst/extents')
|
||||||
|
return response.data.extents || []
|
||||||
|
},
|
||||||
|
|
||||||
|
createExtent: async (extent: CreateExtentRequest): Promise<{ message: string }> => {
|
||||||
|
const response = await apiClient.post('/scst/extents', extent)
|
||||||
|
return response.data
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteExtent: async (deviceName: string): Promise<void> => {
|
||||||
|
await apiClient.delete(`/scst/extents/${deviceName}`)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SCSTExtent {
|
||||||
|
handler_type: string
|
||||||
|
device_name: string
|
||||||
|
device_path: string
|
||||||
|
is_in_use: boolean
|
||||||
|
lun_count: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateExtentRequest {
|
||||||
|
device_name: string
|
||||||
|
device_path: string
|
||||||
|
handler_type: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ export interface VirtualTapeLibrary {
|
|||||||
name: string
|
name: string
|
||||||
mhvtl_library_id: number
|
mhvtl_library_id: number
|
||||||
storage_path: string
|
storage_path: string
|
||||||
|
vendor?: string
|
||||||
slot_count: number
|
slot_count: number
|
||||||
drive_count: number
|
drive_count: number
|
||||||
is_active: boolean
|
is_active: boolean
|
||||||
|
|||||||
@@ -10,7 +10,8 @@ import {
|
|||||||
Settings,
|
Settings,
|
||||||
Bell,
|
Bell,
|
||||||
Server,
|
Server,
|
||||||
Users
|
Users,
|
||||||
|
Archive
|
||||||
} from 'lucide-react'
|
} from 'lucide-react'
|
||||||
import { useState, useEffect } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
|
|
||||||
@@ -44,14 +45,15 @@ export default function Layout() {
|
|||||||
{ name: 'Dashboard', href: '/', icon: LayoutDashboard },
|
{ name: 'Dashboard', href: '/', icon: LayoutDashboard },
|
||||||
{ name: 'Storage', href: '/storage', icon: HardDrive },
|
{ name: 'Storage', href: '/storage', icon: HardDrive },
|
||||||
{ name: 'Tape Libraries', href: '/tape', icon: Database },
|
{ name: 'Tape Libraries', href: '/tape', icon: Database },
|
||||||
{ name: 'iSCSI Targets', href: '/iscsi', icon: Network },
|
{ name: 'iSCSI Management', href: '/iscsi', icon: Network },
|
||||||
|
{ name: 'Backup Management', href: '/backup', icon: Archive },
|
||||||
{ name: 'Tasks', href: '/tasks', icon: Settings },
|
{ name: 'Tasks', href: '/tasks', icon: Settings },
|
||||||
{ name: 'Alerts', href: '/alerts', icon: Bell },
|
{ name: 'Alerts', href: '/alerts', icon: Bell },
|
||||||
{ name: 'System', href: '/system', icon: Server },
|
{ name: 'System', href: '/system', icon: Server },
|
||||||
]
|
]
|
||||||
|
|
||||||
if (user?.roles.includes('admin')) {
|
if (user?.roles.includes('admin')) {
|
||||||
navigation.push({ name: 'IAM', href: '/iam', icon: Users })
|
navigation.push({ name: 'User Management', href: '/iam', icon: Users })
|
||||||
}
|
}
|
||||||
|
|
||||||
const isActive = (href: string) => {
|
const isActive = (href: string) => {
|
||||||
@@ -62,7 +64,7 @@ export default function Layout() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="min-h-screen bg-background-dark">
|
<div className="h-screen bg-background-dark flex overflow-hidden">
|
||||||
{/* Mobile backdrop overlay */}
|
{/* Mobile backdrop overlay */}
|
||||||
{sidebarOpen && (
|
{sidebarOpen && (
|
||||||
<div
|
<div
|
||||||
@@ -135,12 +137,15 @@ export default function Layout() {
|
|||||||
|
|
||||||
{/* Footer */}
|
{/* Footer */}
|
||||||
<div className="p-4 border-t border-border-dark bg-[#0d1419]">
|
<div className="p-4 border-t border-border-dark bg-[#0d1419]">
|
||||||
<div className="mb-3 px-2">
|
<Link
|
||||||
|
to="/profile"
|
||||||
|
className="mb-3 px-2 py-2 rounded-lg hover:bg-card-dark transition-colors block"
|
||||||
|
>
|
||||||
<p className="text-sm font-semibold text-white mb-0.5">{user?.username}</p>
|
<p className="text-sm font-semibold text-white mb-0.5">{user?.username}</p>
|
||||||
<p className="text-xs text-text-secondary font-mono">
|
<p className="text-xs text-text-secondary font-mono">
|
||||||
{user?.roles.join(', ').toUpperCase()}
|
{user?.roles.join(', ').toUpperCase()}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</Link>
|
||||||
<button
|
<button
|
||||||
onClick={handleLogout}
|
onClick={handleLogout}
|
||||||
className="w-full flex items-center gap-2 px-4 py-2.5 rounded-lg text-text-secondary hover:bg-card-dark hover:text-white transition-colors border border-border-dark"
|
className="w-full flex items-center gap-2 px-4 py-2.5 rounded-lg text-text-secondary hover:bg-card-dark hover:text-white transition-colors border border-border-dark"
|
||||||
@@ -153,9 +158,9 @@ export default function Layout() {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Main content */}
|
{/* Main content */}
|
||||||
<div className={`transition-all duration-300 ${sidebarOpen ? 'lg:ml-64' : 'ml-0'} bg-background-dark`}>
|
<div className={`transition-all duration-300 flex-1 flex flex-col overflow-hidden ${sidebarOpen ? 'lg:ml-64' : 'ml-0'} bg-background-dark`}>
|
||||||
{/* Top bar with burger menu button */}
|
{/* Top bar with burger menu button */}
|
||||||
<div className="sticky top-0 z-30 lg:hidden bg-background-dark border-b border-border-dark px-4 py-3">
|
<div className="flex-none lg:hidden bg-background-dark border-b border-border-dark px-4 py-3">
|
||||||
<button
|
<button
|
||||||
onClick={() => setSidebarOpen(true)}
|
onClick={() => setSidebarOpen(true)}
|
||||||
className="text-text-secondary hover:text-white transition-colors"
|
className="text-text-secondary hover:text-white transition-colors"
|
||||||
@@ -166,7 +171,7 @@ export default function Layout() {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Page content */}
|
{/* Page content */}
|
||||||
<main className="min-h-screen">
|
<main className="flex-1 overflow-y-auto">
|
||||||
<Outlet />
|
<Outlet />
|
||||||
</main>
|
</main>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -69,6 +69,7 @@
|
|||||||
|
|
||||||
.custom-scrollbar::-webkit-scrollbar-track {
|
.custom-scrollbar::-webkit-scrollbar-track {
|
||||||
background: #111a22;
|
background: #111a22;
|
||||||
|
border-radius: 4px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.custom-scrollbar::-webkit-scrollbar-thumb {
|
.custom-scrollbar::-webkit-scrollbar-thumb {
|
||||||
@@ -80,6 +81,24 @@
|
|||||||
background: #476685;
|
background: #476685;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.custom-scrollbar {
|
||||||
|
-webkit-overflow-scrolling: touch;
|
||||||
|
overscroll-behavior: contain;
|
||||||
|
scroll-behavior: smooth;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Ensure mouse wheel scrolling works */
|
||||||
|
.custom-scrollbar,
|
||||||
|
.custom-scrollbar * {
|
||||||
|
touch-action: pan-y;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Firefox scrollbar */
|
||||||
|
.custom-scrollbar {
|
||||||
|
scrollbar-width: thin;
|
||||||
|
scrollbar-color: #324d67 #111a22;
|
||||||
|
}
|
||||||
|
|
||||||
/* Electric glow animation for buttons */
|
/* Electric glow animation for buttons */
|
||||||
@keyframes electric-glow {
|
@keyframes electric-glow {
|
||||||
0%, 100% {
|
0%, 100% {
|
||||||
|
|||||||
811
frontend/src/pages/BackupManagement.tsx
Normal file
811
frontend/src/pages/BackupManagement.tsx
Normal file
@@ -0,0 +1,811 @@
|
|||||||
|
import { useState } from 'react'
|
||||||
|
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'
|
||||||
|
import { backupAPI } from '@/api/backup'
|
||||||
|
import { Search, X } from 'lucide-react'
|
||||||
|
|
||||||
|
export default function BackupManagement() {
|
||||||
|
const [activeTab, setActiveTab] = useState<'dashboard' | 'jobs' | 'clients' | 'storage' | 'restore'>('dashboard')
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex-1 flex flex-col h-full overflow-hidden relative bg-background-dark">
|
||||||
|
{/* Page Heading */}
|
||||||
|
<header className="flex-none px-6 py-5 border-b border-border-dark bg-background-dark/95 backdrop-blur z-10">
|
||||||
|
<div className="max-w-[1200px] mx-auto flex flex-wrap justify-between items-end gap-4">
|
||||||
|
<div className="flex flex-col gap-2">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<h1 className="text-white text-3xl md:text-4xl font-black leading-tight tracking-tight">
|
||||||
|
Calypso Backup Manager
|
||||||
|
</h1>
|
||||||
|
<span className="flex h-3 w-3 rounded-full bg-green-500 shadow-[0_0_8px_rgba(34,197,94,0.6)]"></span>
|
||||||
|
</div>
|
||||||
|
<p className="text-text-secondary text-base font-normal max-w-2xl">
|
||||||
|
Manage backup jobs, configure clients, and monitor storage pools from a central director console.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div className="flex gap-3">
|
||||||
|
<button className="flex items-center gap-2 cursor-pointer justify-center rounded-lg h-10 px-4 bg-[#1c2936] border border-border-dark text-white text-sm font-bold hover:bg-[#2a3c50] transition-colors">
|
||||||
|
<span className="material-symbols-outlined text-base">terminal</span>
|
||||||
|
<span>Console</span>
|
||||||
|
</button>
|
||||||
|
<button className="flex items-center gap-2 cursor-pointer justify-center rounded-lg h-10 px-4 bg-primary text-white text-sm font-bold shadow-lg shadow-primary/20 hover:bg-primary/90 transition-colors">
|
||||||
|
<span className="material-symbols-outlined text-base">refresh</span>
|
||||||
|
<span>Restart Director</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
{/* Scrollable Content */}
|
||||||
|
<div className="flex-1 overflow-y-auto bg-background-dark">
|
||||||
|
<div className="max-w-[1200px] mx-auto p-6 md:p-8 flex flex-col gap-6">
|
||||||
|
{/* Navigation Tabs */}
|
||||||
|
<div className="w-full overflow-x-auto">
|
||||||
|
<div className="flex border-b border-border-dark gap-8 min-w-max">
|
||||||
|
<button
|
||||||
|
onClick={() => setActiveTab('dashboard')}
|
||||||
|
className={`flex items-center gap-2 border-b-[3px] pb-3 pt-2 transition-colors ${
|
||||||
|
activeTab === 'dashboard'
|
||||||
|
? 'border-primary text-white'
|
||||||
|
: 'border-transparent text-text-secondary hover:text-white'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<span className="material-symbols-outlined text-base">dashboard</span>
|
||||||
|
<p className="text-sm font-bold tracking-wide">Dashboard</p>
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => setActiveTab('jobs')}
|
||||||
|
className={`flex items-center gap-2 border-b-[3px] pb-3 pt-2 transition-colors ${
|
||||||
|
activeTab === 'jobs'
|
||||||
|
? 'border-primary text-white'
|
||||||
|
: 'border-transparent text-text-secondary hover:text-white'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<span className="material-symbols-outlined text-base">task</span>
|
||||||
|
<p className="text-sm font-bold tracking-wide">Jobs</p>
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => setActiveTab('clients')}
|
||||||
|
className={`flex items-center gap-2 border-b-[3px] pb-3 pt-2 transition-colors ${
|
||||||
|
activeTab === 'clients'
|
||||||
|
? 'border-primary text-white'
|
||||||
|
: 'border-transparent text-text-secondary hover:text-white'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<span className="material-symbols-outlined text-base">devices</span>
|
||||||
|
<p className="text-sm font-bold tracking-wide">Clients</p>
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => setActiveTab('storage')}
|
||||||
|
className={`flex items-center gap-2 border-b-[3px] pb-3 pt-2 transition-colors ${
|
||||||
|
activeTab === 'storage'
|
||||||
|
? 'border-primary text-white'
|
||||||
|
: 'border-transparent text-text-secondary hover:text-white'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<span className="material-symbols-outlined text-base">storage</span>
|
||||||
|
<p className="text-sm font-bold tracking-wide">Storage</p>
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => setActiveTab('restore')}
|
||||||
|
className={`flex items-center gap-2 border-b-[3px] pb-3 pt-2 transition-colors ${
|
||||||
|
activeTab === 'restore'
|
||||||
|
? 'border-primary text-white'
|
||||||
|
: 'border-transparent text-text-secondary hover:text-white'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<span className="material-symbols-outlined text-base">history</span>
|
||||||
|
<p className="text-sm font-bold tracking-wide">Restore</p>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Conditional Content Based on Active Tab */}
|
||||||
|
{activeTab === 'dashboard' && (
|
||||||
|
<>
|
||||||
|
{/* Stats Dashboard */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-4">
|
||||||
|
{/* Service Status Card */}
|
||||||
|
<div className="flex flex-col justify-between rounded-lg p-5 bg-[#1c2936] border border-border-dark relative overflow-hidden group">
|
||||||
|
<div className="absolute right-0 top-0 p-4 opacity-10 group-hover:opacity-20 transition-opacity">
|
||||||
|
<span className="material-symbols-outlined text-6xl">health_and_safety</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col gap-1 z-10">
|
||||||
|
<p className="text-text-secondary text-sm font-medium uppercase tracking-wider">Director Status</p>
|
||||||
|
<div className="flex items-center gap-2 mt-1">
|
||||||
|
<span className="material-symbols-outlined text-green-500">check_circle</span>
|
||||||
|
<p className="text-white text-2xl font-bold">Active</p>
|
||||||
|
</div>
|
||||||
|
<p className="text-green-500 text-xs font-mono mt-1">Uptime: 14d 2h 12m</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Last Backup Card */}
|
||||||
|
<div className="flex flex-col justify-between rounded-lg p-5 bg-[#1c2936] border border-border-dark relative overflow-hidden group">
|
||||||
|
<div className="absolute right-0 top-0 p-4 opacity-10 group-hover:opacity-20 transition-opacity">
|
||||||
|
<span className="material-symbols-outlined text-6xl">schedule</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col gap-1 z-10">
|
||||||
|
<p className="text-text-secondary text-sm font-medium uppercase tracking-wider">Last Job</p>
|
||||||
|
<div className="flex items-center gap-2 mt-1">
|
||||||
|
<p className="text-white text-2xl font-bold">Success</p>
|
||||||
|
</div>
|
||||||
|
<p className="text-text-secondary text-xs mt-1">DailyBackup • 2h 15m ago</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Active Jobs Card */}
|
||||||
|
<div className="flex flex-col justify-between rounded-lg p-5 bg-[#1c2936] border border-border-dark relative overflow-hidden group">
|
||||||
|
<div className="absolute right-0 top-0 p-4 opacity-10 group-hover:opacity-20 transition-opacity">
|
||||||
|
<span className="material-symbols-outlined text-6xl">pending_actions</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col gap-1 z-10">
|
||||||
|
<p className="text-text-secondary text-sm font-medium uppercase tracking-wider">Active Jobs</p>
|
||||||
|
<div className="flex items-center gap-2 mt-1">
|
||||||
|
<p className="text-primary text-2xl font-bold">3 Running</p>
|
||||||
|
</div>
|
||||||
|
<div className="w-full bg-[#111a22] h-1.5 rounded-full mt-3 overflow-hidden">
|
||||||
|
<div className="bg-primary h-full rounded-full animate-pulse w-2/3"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Storage Pool Card */}
|
||||||
|
<div className="flex flex-col justify-between rounded-lg p-5 bg-[#1c2936] border border-border-dark relative overflow-hidden group">
|
||||||
|
<div className="absolute right-0 top-0 p-4 opacity-10 group-hover:opacity-20 transition-opacity">
|
||||||
|
<span className="material-symbols-outlined text-6xl">hard_drive</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col gap-1 z-10 w-full">
|
||||||
|
<div className="flex justify-between items-center">
|
||||||
|
<p className="text-text-secondary text-sm font-medium uppercase tracking-wider">Default Pool</p>
|
||||||
|
<span className="text-white text-xs font-bold">78%</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-end gap-1 mt-1">
|
||||||
|
<p className="text-white text-2xl font-bold">9.4 TB</p>
|
||||||
|
<p className="text-text-secondary text-sm mb-1">/ 12 TB</p>
|
||||||
|
</div>
|
||||||
|
<div className="w-full bg-[#111a22] h-2 rounded-full mt-2 overflow-hidden">
|
||||||
|
<div className="bg-gradient-to-r from-primary to-blue-400 h-full rounded-full" style={{ width: '78%' }}></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Recent Jobs Section */}
|
||||||
|
<div className="flex flex-col gap-4">
|
||||||
|
<div className="flex items-center justify-between px-1">
|
||||||
|
<h3 className="text-white text-lg font-bold">Recent Job History</h3>
|
||||||
|
<button className="text-primary text-sm font-bold hover:text-blue-300 transition-colors">
|
||||||
|
View All History
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div className="rounded-lg border border-border-dark bg-[#1c2936] overflow-hidden shadow-sm">
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="w-full text-left border-collapse">
|
||||||
|
<thead>
|
||||||
|
<tr className="bg-[#111a22] border-b border-border-dark text-text-secondary text-xs uppercase tracking-wider">
|
||||||
|
<th className="px-6 py-4 font-semibold">Status</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Job ID</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Job Name</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Client</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Type</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Level</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Duration</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Bytes</th>
|
||||||
|
<th className="px-6 py-4 font-semibold text-right">Actions</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-border-dark text-sm">
|
||||||
|
{/* Running Job */}
|
||||||
|
<tr className="hover:bg-[#111a22]/50 transition-colors">
|
||||||
|
<td className="px-6 py-4">
|
||||||
|
<span className="inline-flex items-center gap-1.5 rounded px-2 py-1 text-xs font-medium bg-blue-500/10 text-blue-400 border border-blue-500/20">
|
||||||
|
<span className="block h-1.5 w-1.5 rounded-full bg-blue-400 animate-pulse"></span>
|
||||||
|
Running
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">10423</td>
|
||||||
|
<td className="px-6 py-4 text-white font-medium">WeeklyArchive</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">filesrv-02</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">Backup</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">Full</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">00:45:12</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">142 GB</td>
|
||||||
|
<td className="px-6 py-4 text-right">
|
||||||
|
<button className="text-text-secondary hover:text-white p-1 rounded hover:bg-[#111a22] transition-colors">
|
||||||
|
<span className="material-symbols-outlined text-[20px]">cancel</span>
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{/* Successful Job */}
|
||||||
|
<tr className="hover:bg-[#111a22]/50 transition-colors">
|
||||||
|
<td className="px-6 py-4">
|
||||||
|
<span className="inline-flex items-center gap-1.5 rounded px-2 py-1 text-xs font-medium bg-green-500/10 text-green-400 border border-green-500/20">
|
||||||
|
<span className="material-symbols-outlined text-[14px]">check</span>
|
||||||
|
OK
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">10422</td>
|
||||||
|
<td className="px-6 py-4 text-white font-medium">DailyBackup</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">web-srv-01</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">Backup</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">Incr</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">00:12:05</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">4.2 GB</td>
|
||||||
|
<td className="px-6 py-4 text-right">
|
||||||
|
<button className="text-text-secondary hover:text-white p-1 rounded hover:bg-[#111a22] transition-colors">
|
||||||
|
<span className="material-symbols-outlined text-[20px]">more_vert</span>
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{/* Failed Job */}
|
||||||
|
<tr className="hover:bg-[#111a22]/50 transition-colors">
|
||||||
|
<td className="px-6 py-4">
|
||||||
|
<span className="inline-flex items-center gap-1.5 rounded px-2 py-1 text-xs font-medium bg-red-500/10 text-red-400 border border-red-500/20">
|
||||||
|
<span className="material-symbols-outlined text-[14px]">error</span>
|
||||||
|
Error
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">10421</td>
|
||||||
|
<td className="px-6 py-4 text-white font-medium">DB_Snapshot</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">db-prod-01</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">Backup</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">Diff</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">00:00:04</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">0 B</td>
|
||||||
|
<td className="px-6 py-4 text-right">
|
||||||
|
<button className="text-text-secondary hover:text-white p-1 rounded hover:bg-[#111a22] transition-colors">
|
||||||
|
<span className="material-symbols-outlined text-[20px]">replay</span>
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{/* Another Success */}
|
||||||
|
<tr className="hover:bg-[#111a22]/50 transition-colors">
|
||||||
|
<td className="px-6 py-4">
|
||||||
|
<span className="inline-flex items-center gap-1.5 rounded px-2 py-1 text-xs font-medium bg-green-500/10 text-green-400 border border-green-500/20">
|
||||||
|
<span className="material-symbols-outlined text-[14px]">check</span>
|
||||||
|
OK
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">10420</td>
|
||||||
|
<td className="px-6 py-4 text-white font-medium">CatalogBackup</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">backup-srv-01</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">Backup</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">Full</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">00:05:30</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">850 MB</td>
|
||||||
|
<td className="px-6 py-4 text-right">
|
||||||
|
<button className="text-text-secondary hover:text-white p-1 rounded hover:bg-[#111a22] transition-colors">
|
||||||
|
<span className="material-symbols-outlined text-[20px]">more_vert</span>
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
{/* Pagination/Footer */}
|
||||||
|
<div className="bg-[#111a22] border-t border-border-dark px-6 py-3 flex items-center justify-between">
|
||||||
|
<p className="text-text-secondary text-xs">Showing 4 of 128 jobs</p>
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<button className="p-1 rounded text-text-secondary hover:text-white disabled:opacity-50 hover:bg-[#1c2936]">
|
||||||
|
<span className="material-symbols-outlined text-base">chevron_left</span>
|
||||||
|
</button>
|
||||||
|
<button className="p-1 rounded text-text-secondary hover:text-white hover:bg-[#1c2936]">
|
||||||
|
<span className="material-symbols-outlined text-base">chevron_right</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Footer Console Widget */}
|
||||||
|
<div className="mt-auto pt-8">
|
||||||
|
<div className="rounded-lg bg-[#0d131a] border border-border-dark p-4 font-mono text-xs text-text-secondary shadow-inner h-32 overflow-y-auto">
|
||||||
|
<div className="flex items-center justify-between mb-2 text-gray-500 border-b border-white/5 pb-1">
|
||||||
|
<span>Console Log (tail -f)</span>
|
||||||
|
<span className="flex items-center gap-1">
|
||||||
|
<span className="w-2 h-2 bg-green-500 rounded-full animate-pulse"></span> Connected
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-blue-400">[14:22:01] bareos-dir: Connected to Storage at backup-srv-01:9103</p>
|
||||||
|
<p>[14:22:02] bareos-sd: Volume "Vol-0012" selected for appending</p>
|
||||||
|
<p>[14:22:05] bareos-fd: Client "filesrv-02" starting backup of /var/www/html</p>
|
||||||
|
<p className="text-yellow-500">[14:23:10] warning: /var/www/html/cache/tmp locked by another process, skipping</p>
|
||||||
|
<p>[14:23:45] bareos-dir: JobId 10423: Sending Accurate information.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{activeTab === 'jobs' && (
|
||||||
|
<JobsManagementTab />
|
||||||
|
)}
|
||||||
|
|
||||||
|
{activeTab === 'clients' && (
|
||||||
|
<div className="p-8 text-center text-text-secondary">
|
||||||
|
Clients tab coming soon
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{activeTab === 'storage' && (
|
||||||
|
<div className="p-8 text-center text-text-secondary">
|
||||||
|
Storage tab coming soon
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{activeTab === 'restore' && (
|
||||||
|
<div className="p-8 text-center text-text-secondary">
|
||||||
|
Restore tab coming soon
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Jobs Management Tab Component
|
||||||
|
function JobsManagementTab() {
|
||||||
|
const queryClient = useQueryClient()
|
||||||
|
const [searchQuery, setSearchQuery] = useState('')
|
||||||
|
const [statusFilter, setStatusFilter] = useState<string>('')
|
||||||
|
const [jobTypeFilter, setJobTypeFilter] = useState<string>('')
|
||||||
|
const [page, setPage] = useState(1)
|
||||||
|
const [showCreateForm, setShowCreateForm] = useState(false)
|
||||||
|
const limit = 20
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ['backup-jobs', statusFilter, jobTypeFilter, searchQuery, page],
|
||||||
|
queryFn: () => backupAPI.listJobs({
|
||||||
|
status: statusFilter || undefined,
|
||||||
|
job_type: jobTypeFilter || undefined,
|
||||||
|
job_name: searchQuery || undefined,
|
||||||
|
limit,
|
||||||
|
offset: (page - 1) * limit,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
const jobs = data?.jobs || []
|
||||||
|
const total = data?.total || 0
|
||||||
|
const totalPages = Math.ceil(total / limit)
|
||||||
|
|
||||||
|
const formatBytes = (bytes: number): string => {
|
||||||
|
if (bytes === 0) return '0 B'
|
||||||
|
const k = 1024
|
||||||
|
const sizes = ['B', 'KB', 'MB', 'GB', 'TB']
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||||
|
return `${(bytes / Math.pow(k, i)).toFixed(2)} ${sizes[i]}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const formatDuration = (seconds?: number): string => {
|
||||||
|
if (!seconds) return '-'
|
||||||
|
const hours = Math.floor(seconds / 3600)
|
||||||
|
const minutes = Math.floor((seconds % 3600) / 60)
|
||||||
|
const secs = seconds % 60
|
||||||
|
if (hours > 0) {
|
||||||
|
return `${hours}:${minutes.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`
|
||||||
|
}
|
||||||
|
return `${minutes}:${secs.toString().padStart(2, '0')}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const getStatusBadge = (status: string) => {
|
||||||
|
const statusMap: Record<string, { bg: string; text: string; border: string; icon: string }> = {
|
||||||
|
Running: {
|
||||||
|
bg: 'bg-blue-500/10',
|
||||||
|
text: 'text-blue-400',
|
||||||
|
border: 'border-blue-500/20',
|
||||||
|
icon: 'pending_actions',
|
||||||
|
},
|
||||||
|
Completed: {
|
||||||
|
bg: 'bg-green-500/10',
|
||||||
|
text: 'text-green-400',
|
||||||
|
border: 'border-green-500/20',
|
||||||
|
icon: 'check_circle',
|
||||||
|
},
|
||||||
|
Failed: {
|
||||||
|
bg: 'bg-red-500/10',
|
||||||
|
text: 'text-red-400',
|
||||||
|
border: 'border-red-500/20',
|
||||||
|
icon: 'error',
|
||||||
|
},
|
||||||
|
Canceled: {
|
||||||
|
bg: 'bg-yellow-500/10',
|
||||||
|
text: 'text-yellow-400',
|
||||||
|
border: 'border-yellow-500/20',
|
||||||
|
icon: 'cancel',
|
||||||
|
},
|
||||||
|
Waiting: {
|
||||||
|
bg: 'bg-gray-500/10',
|
||||||
|
text: 'text-gray-400',
|
||||||
|
border: 'border-gray-500/20',
|
||||||
|
icon: 'schedule',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = statusMap[status] || statusMap.Waiting
|
||||||
|
|
||||||
|
return (
|
||||||
|
<span className={`inline-flex items-center gap-1.5 rounded px-2 py-1 text-xs font-medium ${config.bg} ${config.text} border ${config.border}`}>
|
||||||
|
{status === 'Running' && (
|
||||||
|
<span className="block h-1.5 w-1.5 rounded-full bg-blue-400 animate-pulse"></span>
|
||||||
|
)}
|
||||||
|
{status !== 'Running' && (
|
||||||
|
<span className="material-symbols-outlined text-[14px]">{config.icon}</span>
|
||||||
|
)}
|
||||||
|
{status}
|
||||||
|
</span>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Header */}
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<h2 className="text-white text-2xl font-bold">Backup Jobs</h2>
|
||||||
|
<p className="text-text-secondary text-sm mt-1">Manage and monitor backup job executions</p>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
onClick={() => setShowCreateForm(true)}
|
||||||
|
className="flex items-center gap-2 cursor-pointer justify-center rounded-lg h-10 px-4 bg-primary text-white text-sm font-bold shadow-lg shadow-primary/20 hover:bg-primary/90 transition-colors"
|
||||||
|
>
|
||||||
|
<span className="material-symbols-outlined text-base">add</span>
|
||||||
|
<span>Create Job</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Filters */}
|
||||||
|
<div className="flex flex-wrap items-center gap-4 p-4 bg-[#1c2936] border border-border-dark rounded-lg">
|
||||||
|
{/* Search */}
|
||||||
|
<div className="flex-1 min-w-[200px] relative">
|
||||||
|
<Search className="absolute left-3 top-1/2 transform -translate-y-1/2 text-text-secondary" size={18} />
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
placeholder="Search by job name..."
|
||||||
|
value={searchQuery}
|
||||||
|
onChange={(e) => {
|
||||||
|
setSearchQuery(e.target.value)
|
||||||
|
setPage(1)
|
||||||
|
}}
|
||||||
|
className="w-full pl-10 pr-4 py-2 bg-[#111a22] border border-border-dark rounded-lg text-white text-sm placeholder-text-secondary focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Status Filter */}
|
||||||
|
<select
|
||||||
|
value={statusFilter}
|
||||||
|
onChange={(e) => {
|
||||||
|
setStatusFilter(e.target.value)
|
||||||
|
setPage(1)
|
||||||
|
}}
|
||||||
|
className="px-4 py-2 bg-[#111a22] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent appearance-none cursor-pointer"
|
||||||
|
style={{
|
||||||
|
backgroundImage: `url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23ffffff' d='M6 9L1 4h10z'/%3E%3C/svg%3E")`,
|
||||||
|
backgroundRepeat: 'no-repeat',
|
||||||
|
backgroundPosition: 'right 0.75rem center',
|
||||||
|
paddingRight: '2.5rem',
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<option value="">All Status</option>
|
||||||
|
<option value="Running">Running</option>
|
||||||
|
<option value="Completed">Completed</option>
|
||||||
|
<option value="Failed">Failed</option>
|
||||||
|
<option value="Canceled">Canceled</option>
|
||||||
|
<option value="Waiting">Waiting</option>
|
||||||
|
</select>
|
||||||
|
|
||||||
|
{/* Job Type Filter */}
|
||||||
|
<select
|
||||||
|
value={jobTypeFilter}
|
||||||
|
onChange={(e) => {
|
||||||
|
setJobTypeFilter(e.target.value)
|
||||||
|
setPage(1)
|
||||||
|
}}
|
||||||
|
className="px-4 py-2 bg-[#111a22] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent appearance-none cursor-pointer"
|
||||||
|
style={{
|
||||||
|
backgroundImage: `url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23ffffff' d='M6 9L1 4h10z'/%3E%3C/svg%3E")`,
|
||||||
|
backgroundRepeat: 'no-repeat',
|
||||||
|
backgroundPosition: 'right 0.75rem center',
|
||||||
|
paddingRight: '2.5rem',
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<option value="">All Types</option>
|
||||||
|
<option value="Backup">Backup</option>
|
||||||
|
<option value="Restore">Restore</option>
|
||||||
|
<option value="Verify">Verify</option>
|
||||||
|
<option value="Copy">Copy</option>
|
||||||
|
<option value="Migrate">Migrate</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Jobs Table */}
|
||||||
|
<div className="rounded-lg border border-border-dark bg-[#1c2936] overflow-hidden shadow-sm">
|
||||||
|
{isLoading ? (
|
||||||
|
<div className="p-8 text-center text-text-secondary">Loading jobs...</div>
|
||||||
|
) : error ? (
|
||||||
|
<div className="p-8 text-center text-red-400">Failed to load jobs</div>
|
||||||
|
) : jobs.length === 0 ? (
|
||||||
|
<div className="p-12 text-center">
|
||||||
|
<p className="text-text-secondary">No jobs found</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="w-full text-left border-collapse">
|
||||||
|
<thead>
|
||||||
|
<tr className="bg-[#111a22] border-b border-border-dark text-text-secondary text-xs uppercase tracking-wider">
|
||||||
|
<th className="px-6 py-4 font-semibold">Status</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Job ID</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Job Name</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Client</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Type</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Level</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Duration</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Bytes</th>
|
||||||
|
<th className="px-6 py-4 font-semibold">Files</th>
|
||||||
|
<th className="px-6 py-4 font-semibold text-right">Actions</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-border-dark text-sm">
|
||||||
|
{jobs.map((job) => (
|
||||||
|
<tr key={job.id} className="hover:bg-[#111a22]/50 transition-colors">
|
||||||
|
<td className="px-6 py-4">{getStatusBadge(job.status)}</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">{job.job_id}</td>
|
||||||
|
<td className="px-6 py-4 text-white font-medium">{job.job_name}</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">{job.client_name}</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">{job.job_type}</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">{job.job_level}</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">{formatDuration(job.duration_seconds)}</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary font-mono">{formatBytes(job.bytes_written)}</td>
|
||||||
|
<td className="px-6 py-4 text-text-secondary">{job.files_written.toLocaleString()}</td>
|
||||||
|
<td className="px-6 py-4 text-right">
|
||||||
|
<button className="text-text-secondary hover:text-white p-1 rounded hover:bg-[#111a22] transition-colors">
|
||||||
|
<span className="material-symbols-outlined text-[20px]">more_vert</span>
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
{/* Pagination */}
|
||||||
|
<div className="bg-[#111a22] border-t border-border-dark px-6 py-3 flex items-center justify-between">
|
||||||
|
<p className="text-text-secondary text-xs">
|
||||||
|
Showing {(page - 1) * limit + 1}-{Math.min(page * limit, total)} of {total} jobs
|
||||||
|
</p>
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<button
|
||||||
|
onClick={() => setPage(p => Math.max(1, p - 1))}
|
||||||
|
disabled={page === 1}
|
||||||
|
className="p-1 rounded text-text-secondary hover:text-white disabled:opacity-50 hover:bg-[#1c2936]"
|
||||||
|
>
|
||||||
|
<span className="material-symbols-outlined text-base">chevron_left</span>
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => setPage(p => Math.min(totalPages, p + 1))}
|
||||||
|
disabled={page >= totalPages}
|
||||||
|
className="p-1 rounded text-text-secondary hover:text-white disabled:opacity-50 hover:bg-[#1c2936]"
|
||||||
|
>
|
||||||
|
<span className="material-symbols-outlined text-base">chevron_right</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Create Job Form Modal */}
|
||||||
|
{showCreateForm && (
|
||||||
|
<CreateJobForm
|
||||||
|
onClose={() => setShowCreateForm(false)}
|
||||||
|
onSuccess={async () => {
|
||||||
|
setShowCreateForm(false)
|
||||||
|
await queryClient.invalidateQueries({ queryKey: ['backup-jobs'] })
|
||||||
|
await queryClient.refetchQueries({ queryKey: ['backup-jobs'] })
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Job Form Component
|
||||||
|
interface CreateJobFormProps {
|
||||||
|
onClose: () => void
|
||||||
|
onSuccess: () => void
|
||||||
|
}
|
||||||
|
|
||||||
|
function CreateJobForm({ onClose, onSuccess }: CreateJobFormProps) {
|
||||||
|
const [formData, setFormData] = useState({
|
||||||
|
job_name: '',
|
||||||
|
client_name: '',
|
||||||
|
job_type: 'Backup',
|
||||||
|
job_level: 'Full',
|
||||||
|
storage_name: '',
|
||||||
|
pool_name: '',
|
||||||
|
})
|
||||||
|
const [error, setError] = useState<string | null>(null)
|
||||||
|
|
||||||
|
const createJobMutation = useMutation({
|
||||||
|
mutationFn: backupAPI.createJob,
|
||||||
|
onSuccess: () => {
|
||||||
|
onSuccess()
|
||||||
|
},
|
||||||
|
onError: (err: any) => {
|
||||||
|
setError(err.response?.data?.error || 'Failed to create job')
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const handleSubmit = (e: React.FormEvent) => {
|
||||||
|
e.preventDefault()
|
||||||
|
setError(null)
|
||||||
|
|
||||||
|
const payload: any = {
|
||||||
|
job_name: formData.job_name,
|
||||||
|
client_name: formData.client_name,
|
||||||
|
job_type: formData.job_type,
|
||||||
|
job_level: formData.job_level,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (formData.storage_name) {
|
||||||
|
payload.storage_name = formData.storage_name
|
||||||
|
}
|
||||||
|
if (formData.pool_name) {
|
||||||
|
payload.pool_name = formData.pool_name
|
||||||
|
}
|
||||||
|
|
||||||
|
createJobMutation.mutate(payload)
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="fixed inset-0 bg-black/50 backdrop-blur-sm z-50 flex items-center justify-center p-4">
|
||||||
|
<div className="bg-[#1c2936] border border-border-dark rounded-lg shadow-xl w-full max-w-2xl max-h-[90vh] overflow-y-auto">
|
||||||
|
{/* Header */}
|
||||||
|
<div className="flex items-center justify-between p-6 border-b border-border-dark">
|
||||||
|
<h2 className="text-white text-xl font-bold">Create Backup Job</h2>
|
||||||
|
<button
|
||||||
|
onClick={onClose}
|
||||||
|
className="text-text-secondary hover:text-white transition-colors"
|
||||||
|
>
|
||||||
|
<X size={20} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Form */}
|
||||||
|
<form onSubmit={handleSubmit} className="p-6 space-y-4">
|
||||||
|
{error && (
|
||||||
|
<div className="p-3 bg-red-500/10 border border-red-500/20 rounded-lg text-red-400 text-sm">
|
||||||
|
{error}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Job Name */}
|
||||||
|
<div>
|
||||||
|
<label className="block text-white text-sm font-semibold mb-2">
|
||||||
|
Job Name <span className="text-red-400">*</span>
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
required
|
||||||
|
value={formData.job_name}
|
||||||
|
onChange={(e) => setFormData({ ...formData, job_name: e.target.value })}
|
||||||
|
className="w-full px-4 py-2 bg-[#111a22] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent"
|
||||||
|
placeholder="e.g., DailyBackup"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Client Name */}
|
||||||
|
<div>
|
||||||
|
<label className="block text-white text-sm font-semibold mb-2">
|
||||||
|
Client Name <span className="text-red-400">*</span>
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
required
|
||||||
|
value={formData.client_name}
|
||||||
|
onChange={(e) => setFormData({ ...formData, client_name: e.target.value })}
|
||||||
|
className="w-full px-4 py-2 bg-[#111a22] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent"
|
||||||
|
placeholder="e.g., filesrv-02"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Job Type & Level */}
|
||||||
|
<div className="grid grid-cols-2 gap-4">
|
||||||
|
<div>
|
||||||
|
<label className="block text-white text-sm font-semibold mb-2">
|
||||||
|
Job Type <span className="text-red-400">*</span>
|
||||||
|
</label>
|
||||||
|
<select
|
||||||
|
required
|
||||||
|
value={formData.job_type}
|
||||||
|
onChange={(e) => setFormData({ ...formData, job_type: e.target.value })}
|
||||||
|
className="w-full px-4 py-2 bg-[#111a22] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent appearance-none cursor-pointer"
|
||||||
|
style={{
|
||||||
|
backgroundImage: `url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23ffffff' d='M6 9L1 4h10z'/%3E%3C/svg%3E")`,
|
||||||
|
backgroundRepeat: 'no-repeat',
|
||||||
|
backgroundPosition: 'right 0.75rem center',
|
||||||
|
paddingRight: '2.5rem',
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<option value="Backup">Backup</option>
|
||||||
|
<option value="Restore">Restore</option>
|
||||||
|
<option value="Verify">Verify</option>
|
||||||
|
<option value="Copy">Copy</option>
|
||||||
|
<option value="Migrate">Migrate</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<label className="block text-white text-sm font-semibold mb-2">
|
||||||
|
Job Level <span className="text-red-400">*</span>
|
||||||
|
</label>
|
||||||
|
<select
|
||||||
|
required
|
||||||
|
value={formData.job_level}
|
||||||
|
onChange={(e) => setFormData({ ...formData, job_level: e.target.value })}
|
||||||
|
className="w-full px-4 py-2 bg-[#111a22] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent appearance-none cursor-pointer"
|
||||||
|
style={{
|
||||||
|
backgroundImage: `url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='0 0 12 12'%3E%3Cpath fill='%23ffffff' d='M6 9L1 4h10z'/%3E%3C/svg%3E")`,
|
||||||
|
backgroundRepeat: 'no-repeat',
|
||||||
|
backgroundPosition: 'right 0.75rem center',
|
||||||
|
paddingRight: '2.5rem',
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<option value="Full">Full</option>
|
||||||
|
<option value="Incremental">Incremental</option>
|
||||||
|
<option value="Differential">Differential</option>
|
||||||
|
<option value="Since">Since</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Storage Name */}
|
||||||
|
<div>
|
||||||
|
<label className="block text-white text-sm font-semibold mb-2">
|
||||||
|
Storage Name (Optional)
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value={formData.storage_name}
|
||||||
|
onChange={(e) => setFormData({ ...formData, storage_name: e.target.value })}
|
||||||
|
className="w-full px-4 py-2 bg-[#111a22] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent"
|
||||||
|
placeholder="e.g., backup-srv-01"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Pool Name */}
|
||||||
|
<div>
|
||||||
|
<label className="block text-white text-sm font-semibold mb-2">
|
||||||
|
Pool Name (Optional)
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value={formData.pool_name}
|
||||||
|
onChange={(e) => setFormData({ ...formData, pool_name: e.target.value })}
|
||||||
|
className="w-full px-4 py-2 bg-[#111a22] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent"
|
||||||
|
placeholder="e.g., Default"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Actions */}
|
||||||
|
<div className="flex items-center justify-end gap-3 pt-4 border-t border-border-dark">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={onClose}
|
||||||
|
className="px-4 py-2 bg-[#111a22] border border-border-dark rounded-lg text-white text-sm font-semibold hover:bg-[#1c2936] transition-colors"
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
disabled={createJobMutation.isPending}
|
||||||
|
className="px-4 py-2 bg-primary text-white rounded-lg text-sm font-semibold hover:bg-primary/90 transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||||
|
>
|
||||||
|
{createJobMutation.isPending ? 'Creating...' : 'Create Job'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
@@ -25,6 +25,34 @@ import {
|
|||||||
ResponsiveContainer,
|
ResponsiveContainer,
|
||||||
} from 'recharts'
|
} from 'recharts'
|
||||||
|
|
||||||
|
// Mock data - moved outside component to prevent re-creation on every render
|
||||||
|
const MOCK_ACTIVE_JOBS = [
|
||||||
|
{
|
||||||
|
id: '1',
|
||||||
|
name: 'Daily Backup: VM-Cluster-01',
|
||||||
|
type: 'Replication',
|
||||||
|
progress: 45,
|
||||||
|
speed: '145 MB/s',
|
||||||
|
status: 'running',
|
||||||
|
eta: '1h 12m',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '2',
|
||||||
|
name: 'ZFS Scrub: Pool-01',
|
||||||
|
type: 'Maintenance',
|
||||||
|
progress: 78,
|
||||||
|
speed: '1.2 GB/s',
|
||||||
|
status: 'running',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
const MOCK_SYSTEM_LOGS = [
|
||||||
|
{ time: '10:45:22', level: 'INFO', source: 'systemd', message: 'Started User Manager for UID 1000.' },
|
||||||
|
{ time: '10:45:15', level: 'WARN', source: 'smartd', message: 'Device: /dev/ada5, SMART Usage Attribute: 194 Temperature_Celsius changed from 38 to 41' },
|
||||||
|
{ time: '10:44:58', level: 'INFO', source: 'kernel', message: 'ix0: link state changed to UP' },
|
||||||
|
{ time: '10:42:10', level: 'INFO', source: 'zfs', message: 'zfs_arc_reclaim_thread: reclaiming 157286400 bytes ...' },
|
||||||
|
]
|
||||||
|
|
||||||
export default function Dashboard() {
|
export default function Dashboard() {
|
||||||
const [activeTab, setActiveTab] = useState<'jobs' | 'logs' | 'alerts'>('jobs')
|
const [activeTab, setActiveTab] = useState<'jobs' | 'logs' | 'alerts'>('jobs')
|
||||||
const [networkDataPoints, setNetworkDataPoints] = useState<Array<{ time: string; inbound: number; outbound: number }>>([])
|
const [networkDataPoints, setNetworkDataPoints] = useState<Array<{ time: string; inbound: number; outbound: number }>>([])
|
||||||
@@ -37,63 +65,83 @@ export default function Dashboard() {
|
|||||||
return response.data
|
return response.data
|
||||||
},
|
},
|
||||||
refetchInterval: refreshInterval * 1000,
|
refetchInterval: refreshInterval * 1000,
|
||||||
|
staleTime: refreshInterval * 1000 * 2, // Consider data fresh for 2x the interval
|
||||||
|
refetchOnWindowFocus: false, // Don't refetch on window focus
|
||||||
|
refetchOnMount: false, // Don't refetch on mount if data is fresh
|
||||||
|
notifyOnChangeProps: ['data', 'error'],
|
||||||
|
structuralSharing: (oldData, newData) => {
|
||||||
|
// Only update if data actually changed
|
||||||
|
if (JSON.stringify(oldData) === JSON.stringify(newData)) {
|
||||||
|
return oldData
|
||||||
|
}
|
||||||
|
return newData
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const { data: metrics } = useQuery({
|
const { data: metrics } = useQuery({
|
||||||
queryKey: ['metrics'],
|
queryKey: ['metrics'],
|
||||||
queryFn: monitoringApi.getMetrics,
|
queryFn: monitoringApi.getMetrics,
|
||||||
refetchInterval: refreshInterval * 1000,
|
refetchInterval: refreshInterval * 1000,
|
||||||
|
staleTime: refreshInterval * 1000 * 2,
|
||||||
|
refetchOnWindowFocus: false,
|
||||||
|
refetchOnMount: false,
|
||||||
|
notifyOnChangeProps: ['data', 'error'],
|
||||||
|
structuralSharing: (oldData, newData) => {
|
||||||
|
if (JSON.stringify(oldData) === JSON.stringify(newData)) {
|
||||||
|
return oldData
|
||||||
|
}
|
||||||
|
return newData
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const { data: alerts } = useQuery({
|
const { data: alerts } = useQuery({
|
||||||
queryKey: ['alerts', 'dashboard'],
|
queryKey: ['alerts', 'dashboard'],
|
||||||
queryFn: () => monitoringApi.listAlerts({ is_acknowledged: false, limit: 10 }),
|
queryFn: () => monitoringApi.listAlerts({ is_acknowledged: false, limit: 10 }),
|
||||||
refetchInterval: refreshInterval * 1000,
|
refetchInterval: refreshInterval * 1000,
|
||||||
|
staleTime: refreshInterval * 1000 * 2,
|
||||||
|
refetchOnWindowFocus: false,
|
||||||
|
refetchOnMount: false,
|
||||||
|
notifyOnChangeProps: ['data', 'error'],
|
||||||
|
structuralSharing: (oldData, newData) => {
|
||||||
|
if (JSON.stringify(oldData) === JSON.stringify(newData)) {
|
||||||
|
return oldData
|
||||||
|
}
|
||||||
|
return newData
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const { data: repositories = [] } = useQuery({
|
const { data: repositories = [] } = useQuery({
|
||||||
queryKey: ['storage', 'repositories'],
|
queryKey: ['storage', 'repositories'],
|
||||||
queryFn: storageApi.listRepositories,
|
queryFn: storageApi.listRepositories,
|
||||||
|
staleTime: 60 * 1000, // Consider repositories fresh for 60 seconds
|
||||||
|
refetchOnWindowFocus: false,
|
||||||
|
refetchOnMount: false,
|
||||||
|
notifyOnChangeProps: ['data', 'error'],
|
||||||
|
structuralSharing: (oldData, newData) => {
|
||||||
|
if (JSON.stringify(oldData) === JSON.stringify(newData)) {
|
||||||
|
return oldData
|
||||||
|
}
|
||||||
|
return newData
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
// Calculate uptime (mock for now, would come from metrics)
|
// Memoize uptime calculations to prevent recalculation on every render
|
||||||
const uptime = metrics?.system?.uptime_seconds || 0
|
const { days, hours, minutes } = useMemo(() => {
|
||||||
const days = Math.floor(uptime / 86400)
|
const uptimeValue = metrics?.system?.uptime_seconds || 0
|
||||||
const hours = Math.floor((uptime % 86400) / 3600)
|
return {
|
||||||
const minutes = Math.floor((uptime % 3600) / 60)
|
days: Math.floor(uptimeValue / 86400),
|
||||||
|
hours: Math.floor((uptimeValue % 86400) / 3600),
|
||||||
|
minutes: Math.floor((uptimeValue % 3600) / 60),
|
||||||
|
}
|
||||||
|
}, [metrics?.system?.uptime_seconds])
|
||||||
|
|
||||||
// Mock active jobs (would come from tasks API)
|
// Use memoized storage calculations to prevent unnecessary recalculations
|
||||||
const activeJobs = [
|
const { totalStorage, usedStorage, storagePercent } = useMemo(() => {
|
||||||
{
|
const total = Array.isArray(repositories) ? repositories.reduce((sum, repo) => sum + (repo?.size_bytes || 0), 0) : 0
|
||||||
id: '1',
|
const used = Array.isArray(repositories) ? repositories.reduce((sum, repo) => sum + (repo?.used_bytes || 0), 0) : 0
|
||||||
name: 'Daily Backup: VM-Cluster-01',
|
const percent = total > 0 ? (used / total) * 100 : 0
|
||||||
type: 'Replication',
|
return { totalStorage: total, usedStorage: used, storagePercent: percent }
|
||||||
progress: 45,
|
}, [repositories])
|
||||||
speed: '145 MB/s',
|
|
||||||
status: 'running',
|
|
||||||
eta: '1h 12m',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: '2',
|
|
||||||
name: 'ZFS Scrub: Pool-01',
|
|
||||||
type: 'Maintenance',
|
|
||||||
progress: 78,
|
|
||||||
speed: '1.2 GB/s',
|
|
||||||
status: 'running',
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
// Mock system logs
|
|
||||||
const systemLogs = [
|
|
||||||
{ time: '10:45:22', level: 'INFO', source: 'systemd', message: 'Started User Manager for UID 1000.' },
|
|
||||||
{ time: '10:45:15', level: 'WARN', source: 'smartd', message: 'Device: /dev/ada5, SMART Usage Attribute: 194 Temperature_Celsius changed from 38 to 41' },
|
|
||||||
{ time: '10:44:58', level: 'INFO', source: 'kernel', message: 'ix0: link state changed to UP' },
|
|
||||||
{ time: '10:42:10', level: 'INFO', source: 'zfs', message: 'zfs_arc_reclaim_thread: reclaiming 157286400 bytes ...' },
|
|
||||||
]
|
|
||||||
|
|
||||||
const totalStorage = Array.isArray(repositories) ? repositories.reduce((sum, repo) => sum + (repo?.size_bytes || 0), 0) : 0
|
|
||||||
const usedStorage = Array.isArray(repositories) ? repositories.reduce((sum, repo) => sum + (repo?.used_bytes || 0), 0) : 0
|
|
||||||
const storagePercent = totalStorage > 0 ? (usedStorage / totalStorage) * 100 : 0
|
|
||||||
|
|
||||||
// Initialize network data
|
// Initialize network data
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -157,11 +205,15 @@ export default function Dashboard() {
|
|||||||
return Math.max(...networkDataPoints.map((d) => d.inbound + d.outbound))
|
return Math.max(...networkDataPoints.map((d) => d.inbound + d.outbound))
|
||||||
}, [networkDataPoints])
|
}, [networkDataPoints])
|
||||||
|
|
||||||
const systemStatus = health?.status === 'healthy' ? 'System Healthy' : 'System Degraded'
|
// Memoize system status to prevent recalculation
|
||||||
const isHealthy = health?.status === 'healthy'
|
const { systemStatus, isHealthy } = useMemo(() => {
|
||||||
|
const status = health?.status === 'healthy' ? 'System Healthy' : 'System Degraded'
|
||||||
|
const healthy = health?.status === 'healthy'
|
||||||
|
return { systemStatus: status, isHealthy: healthy }
|
||||||
|
}, [health?.status])
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="min-h-screen bg-background-dark text-white overflow-hidden">
|
<div className="h-full bg-background-dark text-white">
|
||||||
{/* Header */}
|
{/* Header */}
|
||||||
<header className="flex-none px-6 py-5 border-b border-border-dark bg-background-dark/95 backdrop-blur z-10">
|
<header className="flex-none px-6 py-5 border-b border-border-dark bg-background-dark/95 backdrop-blur z-10">
|
||||||
<div className="flex flex-wrap justify-between items-end gap-3 max-w-[1600px] mx-auto">
|
<div className="flex flex-wrap justify-between items-end gap-3 max-w-[1600px] mx-auto">
|
||||||
@@ -420,9 +472,9 @@ export default function Dashboard() {
|
|||||||
}`}
|
}`}
|
||||||
>
|
>
|
||||||
Active Jobs{' '}
|
Active Jobs{' '}
|
||||||
{activeJobs.length > 0 && (
|
{MOCK_ACTIVE_JOBS.length > 0 && (
|
||||||
<span className="ml-2 bg-primary/20 text-primary px-1.5 py-0.5 rounded text-xs">
|
<span className="ml-2 bg-primary/20 text-primary px-1.5 py-0.5 rounded text-xs">
|
||||||
{activeJobs.length}
|
{MOCK_ACTIVE_JOBS.length}
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
</button>
|
</button>
|
||||||
@@ -473,7 +525,7 @@ export default function Dashboard() {
|
|||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody className="text-sm divide-y divide-border-dark">
|
<tbody className="text-sm divide-y divide-border-dark">
|
||||||
{activeJobs.map((job) => (
|
{MOCK_ACTIVE_JOBS.map((job) => (
|
||||||
<tr key={job.id} className="group hover:bg-[#233648] transition-colors">
|
<tr key={job.id} className="group hover:bg-[#233648] transition-colors">
|
||||||
<td className="px-6 py-4 font-medium text-white">{job.name}</td>
|
<td className="px-6 py-4 font-medium text-white">{job.name}</td>
|
||||||
<td className="px-6 py-4 text-text-secondary">{job.type}</td>
|
<td className="px-6 py-4 text-text-secondary">{job.type}</td>
|
||||||
@@ -519,7 +571,7 @@ export default function Dashboard() {
|
|||||||
<div className="flex-1 overflow-y-auto custom-scrollbar bg-[#111a22]">
|
<div className="flex-1 overflow-y-auto custom-scrollbar bg-[#111a22]">
|
||||||
<table className="w-full text-left border-collapse">
|
<table className="w-full text-left border-collapse">
|
||||||
<tbody className="text-sm font-mono divide-y divide-border-dark/50">
|
<tbody className="text-sm font-mono divide-y divide-border-dark/50">
|
||||||
{systemLogs.map((log, idx) => (
|
{MOCK_SYSTEM_LOGS.map((log, idx) => (
|
||||||
<tr key={idx} className="group hover:bg-[#233648] transition-colors">
|
<tr key={idx} className="group hover:bg-[#233648] transition-colors">
|
||||||
<td className="px-6 py-2 text-text-secondary w-32 whitespace-nowrap">
|
<td className="px-6 py-2 text-text-secondary w-32 whitespace-nowrap">
|
||||||
{log.time}
|
{log.time}
|
||||||
|
|||||||
2021
frontend/src/pages/IAM.tsx
Normal file
2021
frontend/src/pages/IAM.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@ import { scstAPI, type SCSTHandler } from '@/api/scst'
|
|||||||
import { Card, CardContent, CardHeader, CardTitle, CardDescription } from '@/components/ui/card'
|
import { Card, CardContent, CardHeader, CardTitle, CardDescription } from '@/components/ui/card'
|
||||||
import { Button } from '@/components/ui/button'
|
import { Button } from '@/components/ui/button'
|
||||||
import { ArrowLeft, Plus, RefreshCw, HardDrive, Users } from 'lucide-react'
|
import { ArrowLeft, Plus, RefreshCw, HardDrive, Users } from 'lucide-react'
|
||||||
import { useState } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
|
|
||||||
export default function ISCSITargetDetail() {
|
export default function ISCSITargetDetail() {
|
||||||
const { id } = useParams<{ id: string }>()
|
const { id } = useParams<{ id: string }>()
|
||||||
@@ -13,6 +13,10 @@ export default function ISCSITargetDetail() {
|
|||||||
const [showAddLUN, setShowAddLUN] = useState(false)
|
const [showAddLUN, setShowAddLUN] = useState(false)
|
||||||
const [showAddInitiator, setShowAddInitiator] = useState(false)
|
const [showAddInitiator, setShowAddInitiator] = useState(false)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
console.log('showAddLUN state:', showAddLUN)
|
||||||
|
}, [showAddLUN])
|
||||||
|
|
||||||
const { data, isLoading } = useQuery({
|
const { data, isLoading } = useQuery({
|
||||||
queryKey: ['scst-target', id],
|
queryKey: ['scst-target', id],
|
||||||
queryFn: () => scstAPI.getTarget(id!),
|
queryFn: () => scstAPI.getTarget(id!),
|
||||||
@@ -22,6 +26,8 @@ export default function ISCSITargetDetail() {
|
|||||||
const { data: handlers } = useQuery<SCSTHandler[]>({
|
const { data: handlers } = useQuery<SCSTHandler[]>({
|
||||||
queryKey: ['scst-handlers'],
|
queryKey: ['scst-handlers'],
|
||||||
queryFn: scstAPI.listHandlers,
|
queryFn: scstAPI.listHandlers,
|
||||||
|
staleTime: 0, // Always fetch fresh data
|
||||||
|
refetchOnMount: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
if (isLoading) {
|
if (isLoading) {
|
||||||
@@ -33,6 +39,8 @@ export default function ISCSITargetDetail() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const { target, luns } = data
|
const { target, luns } = data
|
||||||
|
// Ensure luns is always an array, not null
|
||||||
|
const lunsArray = luns || []
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6 min-h-screen bg-background-dark p-6">
|
<div className="space-y-6 min-h-screen bg-background-dark p-6">
|
||||||
@@ -91,12 +99,12 @@ export default function ISCSITargetDetail() {
|
|||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
<div className="flex justify-between">
|
<div className="flex justify-between">
|
||||||
<span className="text-text-secondary">Total LUNs:</span>
|
<span className="text-text-secondary">Total LUNs:</span>
|
||||||
<span className="font-medium text-white">{luns.length}</span>
|
<span className="font-medium text-white">{lunsArray.length}</span>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex justify-between">
|
<div className="flex justify-between">
|
||||||
<span className="text-text-secondary">Active:</span>
|
<span className="text-text-secondary">Active:</span>
|
||||||
<span className="font-medium text-white">
|
<span className="font-medium text-white">
|
||||||
{luns.filter((l) => l.is_active).length}
|
{lunsArray.filter((l) => l.is_active).length}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -140,14 +148,22 @@ export default function ISCSITargetDetail() {
|
|||||||
<CardTitle>LUNs (Logical Unit Numbers)</CardTitle>
|
<CardTitle>LUNs (Logical Unit Numbers)</CardTitle>
|
||||||
<CardDescription>Storage devices exported by this target</CardDescription>
|
<CardDescription>Storage devices exported by this target</CardDescription>
|
||||||
</div>
|
</div>
|
||||||
<Button variant="outline" size="sm" onClick={() => setShowAddLUN(true)}>
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={(e) => {
|
||||||
|
e.stopPropagation()
|
||||||
|
console.log('Add LUN button clicked, setting showAddLUN to true')
|
||||||
|
setShowAddLUN(true)
|
||||||
|
}}
|
||||||
|
>
|
||||||
<Plus className="h-4 w-4 mr-2" />
|
<Plus className="h-4 w-4 mr-2" />
|
||||||
Add LUN
|
Add LUN
|
||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
{luns.length > 0 ? (
|
{lunsArray.length > 0 ? (
|
||||||
<div className="overflow-x-auto">
|
<div className="overflow-x-auto">
|
||||||
<table className="min-w-full divide-y divide-gray-200">
|
<table className="min-w-full divide-y divide-gray-200">
|
||||||
<thead className="bg-[#1a2632]">
|
<thead className="bg-[#1a2632]">
|
||||||
@@ -170,7 +186,7 @@ export default function ISCSITargetDetail() {
|
|||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody className="bg-card-dark divide-y divide-border-dark">
|
<tbody className="bg-card-dark divide-y divide-border-dark">
|
||||||
{luns.map((lun) => (
|
{lunsArray.map((lun) => (
|
||||||
<tr key={lun.id} className="hover:bg-[#233648]">
|
<tr key={lun.id} className="hover:bg-[#233648]">
|
||||||
<td className="px-6 py-4 whitespace-nowrap text-sm font-medium text-white">
|
<td className="px-6 py-4 whitespace-nowrap text-sm font-medium text-white">
|
||||||
{lun.lun_number}
|
{lun.lun_number}
|
||||||
@@ -204,7 +220,14 @@ export default function ISCSITargetDetail() {
|
|||||||
<div className="text-center py-8">
|
<div className="text-center py-8">
|
||||||
<HardDrive className="h-12 w-12 text-gray-400 mx-auto mb-4" />
|
<HardDrive className="h-12 w-12 text-gray-400 mx-auto mb-4" />
|
||||||
<p className="text-sm text-text-secondary mb-4">No LUNs configured</p>
|
<p className="text-sm text-text-secondary mb-4">No LUNs configured</p>
|
||||||
<Button variant="outline" onClick={() => setShowAddLUN(true)}>
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={(e) => {
|
||||||
|
e.stopPropagation()
|
||||||
|
console.log('Add First LUN button clicked, setting showAddLUN to true')
|
||||||
|
setShowAddLUN(true)
|
||||||
|
}}
|
||||||
|
>
|
||||||
<Plus className="h-4 w-4 mr-2" />
|
<Plus className="h-4 w-4 mr-2" />
|
||||||
Add First LUN
|
Add First LUN
|
||||||
</Button>
|
</Button>
|
||||||
@@ -254,12 +277,21 @@ function AddLUNForm({ targetId, handlers, onClose, onSuccess }: AddLUNFormProps)
|
|||||||
const [deviceName, setDeviceName] = useState('')
|
const [deviceName, setDeviceName] = useState('')
|
||||||
const [lunNumber, setLunNumber] = useState(0)
|
const [lunNumber, setLunNumber] = useState(0)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
console.log('AddLUNForm mounted, targetId:', targetId, 'handlers:', handlers)
|
||||||
|
}, [targetId, handlers])
|
||||||
|
|
||||||
const addLUNMutation = useMutation({
|
const addLUNMutation = useMutation({
|
||||||
mutationFn: (data: { device_name: string; device_path: string; lun_number: number; handler_type: string }) =>
|
mutationFn: (data: { device_name: string; device_path: string; lun_number: number; handler_type: string }) =>
|
||||||
scstAPI.addLUN(targetId, data),
|
scstAPI.addLUN(targetId, data),
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
onSuccess()
|
onSuccess()
|
||||||
},
|
},
|
||||||
|
onError: (error: any) => {
|
||||||
|
console.error('Failed to add LUN:', error)
|
||||||
|
const errorMessage = error.response?.data?.error || error.message || 'Failed to add LUN'
|
||||||
|
alert(errorMessage)
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const handleSubmit = (e: React.FormEvent) => {
|
const handleSubmit = (e: React.FormEvent) => {
|
||||||
@@ -278,35 +310,62 @@ function AddLUNForm({ targetId, handlers, onClose, onSuccess }: AddLUNFormProps)
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Card>
|
<div className="fixed inset-0 bg-black/50 z-50 flex items-center justify-center p-4">
|
||||||
<CardHeader>
|
<div className="bg-card-dark border border-border-dark rounded-xl max-w-2xl w-full max-h-[90vh] overflow-y-auto">
|
||||||
<CardTitle>Add LUN</CardTitle>
|
<div className="p-6 border-b border-border-dark">
|
||||||
<CardDescription>Add a storage device to this target</CardDescription>
|
<h2 className="text-xl font-bold text-white">Add LUN</h2>
|
||||||
</CardHeader>
|
<p className="text-sm text-text-secondary mt-1">Bind a ZFS volume or storage device to this target</p>
|
||||||
<CardContent>
|
</div>
|
||||||
<form onSubmit={handleSubmit} className="space-y-4">
|
<form onSubmit={handleSubmit} className="p-6 space-y-4">
|
||||||
<div>
|
<div>
|
||||||
<label htmlFor="handlerType" className="block text-sm font-medium text-gray-700 mb-1">
|
<label htmlFor="handlerType" className="block text-sm font-medium text-white mb-1">
|
||||||
Handler Type *
|
Handler Type *
|
||||||
</label>
|
</label>
|
||||||
<select
|
<select
|
||||||
id="handlerType"
|
id="handlerType"
|
||||||
value={handlerType}
|
value={handlerType}
|
||||||
onChange={(e) => setHandlerType(e.target.value)}
|
onChange={(e) => setHandlerType(e.target.value)}
|
||||||
className="w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500"
|
className="w-full px-3 py-2 bg-[#0f161d] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:border-primary focus:ring-1 focus:ring-primary"
|
||||||
required
|
required
|
||||||
>
|
>
|
||||||
<option value="">Select a handler</option>
|
<option value="">Select a handler</option>
|
||||||
{handlers.map((h) => (
|
{handlers.map((h) => (
|
||||||
<option key={h.name} value={h.name}>
|
<option key={h.name} value={h.name}>
|
||||||
{h.name} {h.description && `- ${h.description}`}
|
{h.label || h.name}
|
||||||
</option>
|
</option>
|
||||||
))}
|
))}
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<label htmlFor="deviceName" className="block text-sm font-medium text-gray-700 mb-1">
|
<label htmlFor="devicePath" className="block text-sm font-medium text-white mb-1">
|
||||||
|
ZFS Volume Path *
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
id="devicePath"
|
||||||
|
type="text"
|
||||||
|
value={devicePath}
|
||||||
|
onChange={(e) => {
|
||||||
|
const path = e.target.value.trim()
|
||||||
|
setDevicePath(path)
|
||||||
|
// Auto-generate device name from path (e.g., /dev/zvol/pool/volume -> volume)
|
||||||
|
if (path && !deviceName) {
|
||||||
|
const parts = path.split('/')
|
||||||
|
const name = parts[parts.length - 1] || parts[parts.length - 2] || 'device'
|
||||||
|
setDeviceName(name)
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
placeholder="/dev/zvol/pool/volume or /dev/sda"
|
||||||
|
className="w-full px-3 py-2 bg-[#0f161d] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:border-primary focus:ring-1 focus:ring-primary font-mono"
|
||||||
|
required
|
||||||
|
/>
|
||||||
|
<p className="mt-1 text-xs text-text-secondary">
|
||||||
|
Enter ZFS volume path (e.g., /dev/zvol/pool/volume) or block device path
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<label htmlFor="deviceName" className="block text-sm font-medium text-white mb-1">
|
||||||
Device Name *
|
Device Name *
|
||||||
</label>
|
</label>
|
||||||
<input
|
<input
|
||||||
@@ -315,28 +374,16 @@ function AddLUNForm({ targetId, handlers, onClose, onSuccess }: AddLUNFormProps)
|
|||||||
value={deviceName}
|
value={deviceName}
|
||||||
onChange={(e) => setDeviceName(e.target.value)}
|
onChange={(e) => setDeviceName(e.target.value)}
|
||||||
placeholder="device1"
|
placeholder="device1"
|
||||||
className="w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500"
|
className="w-full px-3 py-2 bg-[#0f161d] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:border-primary focus:ring-1 focus:ring-primary"
|
||||||
required
|
required
|
||||||
/>
|
/>
|
||||||
|
<p className="mt-1 text-xs text-text-secondary">
|
||||||
|
Logical name for this device in SCST (auto-filled from volume path)
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<label htmlFor="devicePath" className="block text-sm font-medium text-gray-700 mb-1">
|
<label htmlFor="lunNumber" className="block text-sm font-medium text-white mb-1">
|
||||||
Device Path *
|
|
||||||
</label>
|
|
||||||
<input
|
|
||||||
id="devicePath"
|
|
||||||
type="text"
|
|
||||||
value={devicePath}
|
|
||||||
onChange={(e) => setDevicePath(e.target.value)}
|
|
||||||
placeholder="/dev/sda or /dev/calypso/vg1/lv1"
|
|
||||||
className="w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 font-mono text-sm"
|
|
||||||
required
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<label htmlFor="lunNumber" className="block text-sm font-medium text-gray-700 mb-1">
|
|
||||||
LUN Number *
|
LUN Number *
|
||||||
</label>
|
</label>
|
||||||
<input
|
<input
|
||||||
@@ -345,12 +392,15 @@ function AddLUNForm({ targetId, handlers, onClose, onSuccess }: AddLUNFormProps)
|
|||||||
value={lunNumber}
|
value={lunNumber}
|
||||||
onChange={(e) => setLunNumber(parseInt(e.target.value) || 0)}
|
onChange={(e) => setLunNumber(parseInt(e.target.value) || 0)}
|
||||||
min="0"
|
min="0"
|
||||||
className="w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500"
|
className="w-full px-3 py-2 bg-[#0f161d] border border-border-dark rounded-lg text-white text-sm focus:outline-none focus:border-primary focus:ring-1 focus:ring-primary"
|
||||||
required
|
required
|
||||||
/>
|
/>
|
||||||
|
<p className="mt-1 text-xs text-text-secondary">
|
||||||
|
Logical Unit Number (0-255, typically start from 0)
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex justify-end gap-2">
|
<div className="flex justify-end gap-2 pt-4 border-t border-border-dark">
|
||||||
<Button type="button" variant="outline" onClick={onClose}>
|
<Button type="button" variant="outline" onClick={onClose}>
|
||||||
Cancel
|
Cancel
|
||||||
</Button>
|
</Button>
|
||||||
@@ -359,8 +409,8 @@ function AddLUNForm({ targetId, handlers, onClose, onSuccess }: AddLUNFormProps)
|
|||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</CardContent>
|
</div>
|
||||||
</Card>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
376
frontend/src/pages/Profile.tsx
Normal file
376
frontend/src/pages/Profile.tsx
Normal file
@@ -0,0 +1,376 @@
|
|||||||
|
import { useState, useEffect } from 'react'
|
||||||
|
import { useParams, useNavigate } from 'react-router-dom'
|
||||||
|
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'
|
||||||
|
import { useAuthStore } from '@/store/auth'
|
||||||
|
import { iamApi, type User, type UpdateUserRequest } from '@/api/iam'
|
||||||
|
import { Button } from '@/components/ui/button'
|
||||||
|
import { ArrowLeft, Save, Mail, User as UserIcon, Shield, Calendar, Clock, Edit2, X } from 'lucide-react'
|
||||||
|
|
||||||
|
export default function Profile() {
|
||||||
|
const { id } = useParams<{ id?: string }>()
|
||||||
|
const navigate = useNavigate()
|
||||||
|
const { user: currentUser } = useAuthStore()
|
||||||
|
const queryClient = useQueryClient()
|
||||||
|
const [isEditing, setIsEditing] = useState(false)
|
||||||
|
const [editForm, setEditForm] = useState({
|
||||||
|
email: '',
|
||||||
|
full_name: '',
|
||||||
|
})
|
||||||
|
|
||||||
|
// Determine which user to show
|
||||||
|
const targetUserId = id || currentUser?.id
|
||||||
|
|
||||||
|
// Check permission: only allow if viewing own profile or user is admin
|
||||||
|
const canView = !!currentUser && !!targetUserId && (
|
||||||
|
targetUserId === currentUser.id ||
|
||||||
|
currentUser.roles.includes('admin')
|
||||||
|
)
|
||||||
|
|
||||||
|
const { data: profileUser, isLoading } = useQuery<User>({
|
||||||
|
queryKey: ['iam-user', targetUserId],
|
||||||
|
queryFn: () => iamApi.getUser(targetUserId!),
|
||||||
|
enabled: canView,
|
||||||
|
})
|
||||||
|
|
||||||
|
const updateMutation = useMutation({
|
||||||
|
mutationFn: (data: UpdateUserRequest) => iamApi.updateUser(targetUserId!, data),
|
||||||
|
onSuccess: () => {
|
||||||
|
queryClient.invalidateQueries({ queryKey: ['iam-user', targetUserId] })
|
||||||
|
queryClient.invalidateQueries({ queryKey: ['iam-users'] })
|
||||||
|
setIsEditing(false)
|
||||||
|
// If updating own profile, refresh auth store
|
||||||
|
if (targetUserId === currentUser?.id) {
|
||||||
|
queryClient.invalidateQueries({ queryKey: ['auth-me'] })
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (profileUser) {
|
||||||
|
setEditForm({
|
||||||
|
email: profileUser.email || '',
|
||||||
|
full_name: profileUser.full_name || '',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}, [profileUser])
|
||||||
|
|
||||||
|
if (!canView) {
|
||||||
|
return (
|
||||||
|
<div className="flex-1 overflow-y-auto p-8">
|
||||||
|
<div className="max-w-[1200px] mx-auto">
|
||||||
|
<div className="bg-red-500/10 border border-red-500/20 rounded-lg p-6 text-center">
|
||||||
|
<p className="text-red-400 font-semibold">Access Denied</p>
|
||||||
|
<p className="text-text-secondary text-sm mt-2">
|
||||||
|
You don't have permission to view this profile.
|
||||||
|
</p>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => navigate(-1)}
|
||||||
|
className="mt-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Go Back
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex-1 overflow-y-auto p-8">
|
||||||
|
<div className="max-w-[1200px] mx-auto">
|
||||||
|
<p className="text-text-secondary">Loading profile...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!profileUser) {
|
||||||
|
return (
|
||||||
|
<div className="flex-1 overflow-y-auto p-8">
|
||||||
|
<div className="max-w-[1200px] mx-auto">
|
||||||
|
<div className="bg-card-dark border border-border-dark rounded-lg p-6 text-center">
|
||||||
|
<p className="text-text-secondary">User not found</p>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => navigate(-1)}
|
||||||
|
className="mt-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Go Back
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isOwnProfile = targetUserId === currentUser?.id
|
||||||
|
const canEdit = isOwnProfile || currentUser?.roles.includes('admin')
|
||||||
|
|
||||||
|
const handleSave = () => {
|
||||||
|
updateMutation.mutate({
|
||||||
|
email: editForm.email,
|
||||||
|
full_name: editForm.full_name,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const formatDate = (dateString: string) => {
|
||||||
|
return new Date(dateString).toLocaleString()
|
||||||
|
}
|
||||||
|
|
||||||
|
const formatLastLogin = (lastLoginAt: string | null) => {
|
||||||
|
if (!lastLoginAt) return 'Never'
|
||||||
|
return formatDate(lastLoginAt)
|
||||||
|
}
|
||||||
|
|
||||||
|
const getAvatarInitials = () => {
|
||||||
|
if (profileUser?.full_name) {
|
||||||
|
return profileUser.full_name
|
||||||
|
.split(' ')
|
||||||
|
.map((n: string) => n[0])
|
||||||
|
.join('')
|
||||||
|
.substring(0, 2)
|
||||||
|
.toUpperCase()
|
||||||
|
}
|
||||||
|
return profileUser?.username?.substring(0, 2).toUpperCase() || 'U'
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex-1 overflow-y-auto p-8">
|
||||||
|
<div className="max-w-[1200px] mx-auto flex flex-col gap-6">
|
||||||
|
{/* Header */}
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div className="flex items-center gap-4">
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => navigate(-1)}
|
||||||
|
className="text-text-secondary hover:text-white"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back
|
||||||
|
</Button>
|
||||||
|
<div>
|
||||||
|
<h1 className="text-3xl font-black text-white leading-tight">User Profile</h1>
|
||||||
|
<p className="text-text-secondary text-sm mt-1">
|
||||||
|
{isOwnProfile ? 'Your profile information' : `Viewing profile for ${profileUser.username}`}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{canEdit && (
|
||||||
|
<div className="flex gap-2">
|
||||||
|
{isEditing ? (
|
||||||
|
<>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => {
|
||||||
|
setIsEditing(false)
|
||||||
|
setEditForm({
|
||||||
|
email: profileUser.email || '',
|
||||||
|
full_name: profileUser.full_name || '',
|
||||||
|
})
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<X className="h-4 w-4 mr-2" />
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
onClick={handleSave}
|
||||||
|
disabled={updateMutation.isPending}
|
||||||
|
>
|
||||||
|
<Save className="h-4 w-4 mr-2" />
|
||||||
|
{updateMutation.isPending ? 'Saving...' : 'Save Changes'}
|
||||||
|
</Button>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<Button onClick={() => setIsEditing(true)}>
|
||||||
|
<Edit2 className="h-4 w-4 mr-2" />
|
||||||
|
Edit Profile
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Profile Card */}
|
||||||
|
<div className="bg-card-dark border border-border-dark rounded-xl overflow-hidden">
|
||||||
|
{/* Profile Header */}
|
||||||
|
<div className="bg-gradient-to-r from-primary/20 to-blue-600/20 p-8 border-b border-border-dark">
|
||||||
|
<div className="flex items-center gap-6">
|
||||||
|
<div className="w-24 h-24 rounded-full bg-gradient-to-br from-blue-500 to-indigo-600 flex items-center justify-center text-white text-3xl font-bold">
|
||||||
|
{getAvatarInitials()}
|
||||||
|
</div>
|
||||||
|
<div className="flex-1">
|
||||||
|
<h2 className="text-2xl font-bold text-white">
|
||||||
|
{profileUser.full_name || profileUser.username}
|
||||||
|
</h2>
|
||||||
|
<p className="text-text-secondary mt-1">@{profileUser.username}</p>
|
||||||
|
<div className="flex items-center gap-4 mt-3">
|
||||||
|
<div className={`inline-flex items-center gap-2 px-3 py-1 rounded-full text-xs font-bold ${
|
||||||
|
profileUser.is_active
|
||||||
|
? 'bg-green-500/10 text-green-400 border border-green-500/20'
|
||||||
|
: 'bg-red-500/10 text-red-400 border border-red-500/20'
|
||||||
|
}`}>
|
||||||
|
<span className={`w-2 h-2 rounded-full ${profileUser.is_active ? 'bg-green-400' : 'bg-red-400'}`}></span>
|
||||||
|
{profileUser.is_active ? 'Active' : 'Inactive'}
|
||||||
|
</div>
|
||||||
|
{profileUser.is_system && (
|
||||||
|
<div className="inline-flex items-center gap-2 px-3 py-1 rounded-full text-xs font-bold bg-purple-500/10 text-purple-400 border border-purple-500/20">
|
||||||
|
<Shield size={12} />
|
||||||
|
System User
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Profile Content */}
|
||||||
|
<div className="p-8">
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 gap-6">
|
||||||
|
{/* Basic Information */}
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<h3 className="text-lg font-bold text-white mb-4 flex items-center gap-2">
|
||||||
|
<UserIcon className="h-5 w-5 text-primary" />
|
||||||
|
Basic Information
|
||||||
|
</h3>
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div>
|
||||||
|
<label className="block text-xs font-bold text-text-secondary uppercase tracking-wider mb-2">
|
||||||
|
Username
|
||||||
|
</label>
|
||||||
|
<div className="bg-[#0f161d] border border-border-dark rounded-lg px-4 py-3 text-white font-mono">
|
||||||
|
{profileUser.username}
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-text-secondary mt-1">Username cannot be changed</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<label className="block text-xs font-bold text-text-secondary uppercase tracking-wider mb-2">
|
||||||
|
Email Address
|
||||||
|
</label>
|
||||||
|
{isEditing ? (
|
||||||
|
<input
|
||||||
|
type="email"
|
||||||
|
value={editForm.email}
|
||||||
|
onChange={(e) => setEditForm({ ...editForm, email: e.target.value })}
|
||||||
|
className="w-full bg-[#0f161d] border border-border-dark rounded-lg px-4 py-3 text-white focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent"
|
||||||
|
placeholder="email@example.com"
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<div className="bg-[#0f161d] border border-border-dark rounded-lg px-4 py-3 text-white flex items-center gap-2">
|
||||||
|
<Mail className="h-4 w-4 text-text-secondary" />
|
||||||
|
{profileUser.email || '-'}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<label className="block text-xs font-bold text-text-secondary uppercase tracking-wider mb-2">
|
||||||
|
Full Name
|
||||||
|
</label>
|
||||||
|
{isEditing ? (
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value={editForm.full_name}
|
||||||
|
onChange={(e) => setEditForm({ ...editForm, full_name: e.target.value })}
|
||||||
|
className="w-full bg-[#0f161d] border border-border-dark rounded-lg px-4 py-3 text-white focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent"
|
||||||
|
placeholder="Full Name"
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<div className="bg-[#0f161d] border border-border-dark rounded-lg px-4 py-3 text-white">
|
||||||
|
{profileUser.full_name || '-'}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Account Details */}
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<h3 className="text-lg font-bold text-white mb-4 flex items-center gap-2">
|
||||||
|
<Shield className="h-5 w-5 text-primary" />
|
||||||
|
Account Details
|
||||||
|
</h3>
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div>
|
||||||
|
<label className="block text-xs font-bold text-text-secondary uppercase tracking-wider mb-2">
|
||||||
|
Roles
|
||||||
|
</label>
|
||||||
|
<div className="bg-[#0f161d] border border-border-dark rounded-lg px-4 py-3">
|
||||||
|
{profileUser.roles && profileUser.roles.length > 0 ? (
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
{profileUser.roles.map((role) => (
|
||||||
|
<span
|
||||||
|
key={role}
|
||||||
|
className="inline-flex items-center gap-1.5 px-2.5 py-1 rounded-md bg-primary/10 text-primary text-xs font-medium border border-primary/20"
|
||||||
|
>
|
||||||
|
<Shield size={12} />
|
||||||
|
{role}
|
||||||
|
</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<span className="text-text-secondary text-sm">No roles assigned</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<label className="block text-xs font-bold text-text-secondary uppercase tracking-wider mb-2">
|
||||||
|
Permissions
|
||||||
|
</label>
|
||||||
|
<div className="bg-[#0f161d] border border-border-dark rounded-lg px-4 py-3">
|
||||||
|
{profileUser.permissions && profileUser.permissions.length > 0 ? (
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
{profileUser.permissions.map((perm) => (
|
||||||
|
<span
|
||||||
|
key={perm}
|
||||||
|
className="inline-flex items-center px-2 py-1 rounded-md bg-slate-700 text-slate-300 text-xs font-medium"
|
||||||
|
>
|
||||||
|
{perm}
|
||||||
|
</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<span className="text-text-secondary text-sm">No permissions assigned</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<label className="block text-xs font-bold text-text-secondary uppercase tracking-wider mb-2">
|
||||||
|
Last Login
|
||||||
|
</label>
|
||||||
|
<div className="bg-[#0f161d] border border-border-dark rounded-lg px-4 py-3 text-white flex items-center gap-2">
|
||||||
|
<Clock className="h-4 w-4 text-text-secondary" />
|
||||||
|
{formatLastLogin(profileUser.last_login_at)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<label className="block text-xs font-bold text-text-secondary uppercase tracking-wider mb-2">
|
||||||
|
Account Created
|
||||||
|
</label>
|
||||||
|
<div className="bg-[#0f161d] border border-border-dark rounded-lg px-4 py-3 text-white flex items-center gap-2">
|
||||||
|
<Calendar className="h-4 w-4 text-text-secondary" />
|
||||||
|
{formatDate(profileUser.created_at)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
@@ -365,7 +365,11 @@ export default function TapeLibraries() {
|
|||||||
</td>
|
</td>
|
||||||
<td className="py-4 px-6">
|
<td className="py-4 px-6">
|
||||||
<p className="text-white text-sm font-medium">
|
<p className="text-white text-sm font-medium">
|
||||||
{isVTL ? 'MHVTL' : 'physical' in library ? (library as PhysicalTapeLibrary).vendor : 'N/A'}
|
{isVTL
|
||||||
|
? (library as VirtualTapeLibrary).vendor || 'MHVTL'
|
||||||
|
: 'physical' in library
|
||||||
|
? (library as PhysicalTapeLibrary).vendor
|
||||||
|
: 'N/A'}
|
||||||
</p>
|
</p>
|
||||||
<p className="text-text-secondary text-xs">
|
<p className="text-text-secondary text-xs">
|
||||||
LTO-8 • {library.drive_count} {library.drive_count === 1 ? 'Drive' : 'Drives'}
|
LTO-8 • {library.drive_count} {library.drive_count === 1 ? 'Drive' : 'Drives'}
|
||||||
@@ -390,7 +394,13 @@ export default function TapeLibraries() {
|
|||||||
</div>
|
</div>
|
||||||
</td>
|
</td>
|
||||||
<td className="py-4 px-6">
|
<td className="py-4 px-6">
|
||||||
<div className="flex items-center gap-2 group/copy cursor-pointer">
|
<div
|
||||||
|
className="flex items-center gap-2 group/copy cursor-pointer"
|
||||||
|
onClick={() => {
|
||||||
|
const iqn = `iqn.2023-10.com.vtl:${library.name.toLowerCase().replace(/\s+/g, '')}`
|
||||||
|
navigator.clipboard.writeText(iqn)
|
||||||
|
}}
|
||||||
|
>
|
||||||
<code className="text-xs text-text-secondary font-mono bg-[#111a22] px-2 py-1 rounded border border-border-dark group-hover/copy:text-white transition-colors">
|
<code className="text-xs text-text-secondary font-mono bg-[#111a22] px-2 py-1 rounded border border-border-dark group-hover/copy:text-white transition-colors">
|
||||||
iqn.2023-10.com.vtl:{library.name.toLowerCase().replace(/\s+/g, '')}
|
iqn.2023-10.com.vtl:{library.name.toLowerCase().replace(/\s+/g, '')}
|
||||||
</code>
|
</code>
|
||||||
|
|||||||
@@ -46,6 +46,8 @@ export default {
|
|||||||
"card-dark": "#192633",
|
"card-dark": "#192633",
|
||||||
"border-dark": "#233648",
|
"border-dark": "#233648",
|
||||||
"text-secondary": "#92adc9",
|
"text-secondary": "#92adc9",
|
||||||
|
"surface-dark": "#111a22",
|
||||||
|
"surface-highlight": "#1c2936",
|
||||||
},
|
},
|
||||||
fontFamily: {
|
fontFamily: {
|
||||||
display: ["Manrope", "sans-serif"],
|
display: ["Manrope", "sans-serif"],
|
||||||
|
|||||||
25
scripts/rebuild-and-restart.sh
Executable file
25
scripts/rebuild-and-restart.sh
Executable file
@@ -0,0 +1,25 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "🔨 Rebuilding Calypso API..."
|
||||||
|
cd /development/calypso/backend
|
||||||
|
make build
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "🔨 Rebuilding Calypso Frontend..."
|
||||||
|
cd /development/calypso/frontend
|
||||||
|
npm run build
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "🔄 Restarting API service..."
|
||||||
|
systemctl restart calypso-api.service
|
||||||
|
|
||||||
|
echo "🔄 Restarting Frontend service..."
|
||||||
|
systemctl restart calypso-frontend.service
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ Build and restart complete!"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
systemctl status calypso-api.service calypso-frontend.service --no-pager
|
||||||
Reference in New Issue
Block a user