Initial commit: Open sourcing all of the Maple Open Technologies code.

This commit is contained in:
Bartlomiej Mika 2025-12-02 14:33:08 -05:00
commit 755d54a99d
2010 changed files with 448675 additions and 0 deletions

View file

@ -0,0 +1,444 @@
package app
import (
"encoding/json"
"fmt"
"time"
"go.uber.org/zap"
"codeberg.org/mapleopentech/monorepo/cloud/maplefile-backend/pkg/maplefile/client"
"codeberg.org/mapleopentech/monorepo/cloud/maplefile-backend/pkg/maplefile/e2ee"
"codeberg.org/mapleopentech/monorepo/native/desktop/maplefile/internal/domain/file"
)
// DashboardData contains the formatted dashboard data for the frontend
type DashboardData struct {
Summary DashboardSummary `json:"summary"`
StorageUsageTrend StorageUsageTrend `json:"storage_usage_trend"`
RecentFiles []DashboardRecentFile `json:"recent_files"`
}
// DashboardSummary contains summary statistics
type DashboardSummary struct {
TotalFiles int `json:"total_files"`
TotalFolders int `json:"total_folders"`
StorageUsed string `json:"storage_used"`
StorageLimit string `json:"storage_limit"`
StorageUsagePercentage int `json:"storage_usage_percentage"`
}
// StorageUsageTrend contains storage usage trend data
type StorageUsageTrend struct {
Period string `json:"period"`
DataPoints []StorageTrendDataPoint `json:"data_points"`
}
// StorageTrendDataPoint represents a single data point in the storage trend
type StorageTrendDataPoint struct {
Date string `json:"date"`
Usage string `json:"usage"`
}
// DashboardRecentFile represents a recent file for dashboard display
type DashboardRecentFile struct {
ID string `json:"id"`
CollectionID string `json:"collection_id"`
Name string `json:"name"`
Size string `json:"size"`
SizeInBytes int64 `json:"size_in_bytes"`
MimeType string `json:"mime_type"`
CreatedAt string `json:"created_at"`
IsDecrypted bool `json:"is_decrypted"`
SyncStatus string `json:"sync_status"`
HasLocalContent bool `json:"has_local_content"`
}
// GetDashboardData fetches and formats dashboard data from the backend
func (a *Application) GetDashboardData() (*DashboardData, error) {
// Get API client from auth service
apiClient := a.authService.GetAPIClient()
if apiClient == nil {
return nil, fmt.Errorf("API client not available")
}
// Ensure we have a valid session with tokens
session, err := a.authService.GetCurrentSession(a.ctx)
if err != nil || session == nil {
return nil, fmt.Errorf("no active session - please log in")
}
if !session.IsValid() {
return nil, fmt.Errorf("session expired - please log in again")
}
// Ensure tokens are set in the API client
// This is important after app restarts or hot reloads
apiClient.SetTokens(session.AccessToken, session.RefreshToken)
a.logger.Debug("Restored tokens to API client for dashboard request",
zap.String("user_id", session.UserID),
zap.Time("token_expires_at", session.ExpiresAt))
// Check if access token is about to expire or already expired
timeUntilExpiry := time.Until(session.ExpiresAt)
now := time.Now()
sessionAge := now.Sub(session.CreatedAt)
a.logger.Debug("Token status check",
zap.Time("now", now),
zap.Time("expires_at", session.ExpiresAt),
zap.Duration("time_until_expiry", timeUntilExpiry),
zap.Duration("session_age", sessionAge))
if timeUntilExpiry < 0 {
a.logger.Warn("Access token already expired, refresh should happen automatically",
zap.Duration("expired_since", -timeUntilExpiry))
} else if timeUntilExpiry < 2*time.Minute {
a.logger.Info("Access token expiring soon, refresh may be needed",
zap.Duration("time_until_expiry", timeUntilExpiry))
}
// If session is very old (more than 1 day), recommend fresh login
if sessionAge > 24*time.Hour {
a.logger.Warn("Session is very old, consider logging out and logging in again",
zap.Duration("session_age", sessionAge))
}
// Fetch dashboard data from backend
// The client will automatically refresh the token if it gets a 401
a.logger.Debug("Calling backend API for dashboard data")
resp, err := apiClient.GetDashboard(a.ctx)
if err != nil {
a.logger.Error("Failed to fetch dashboard data",
zap.Error(err),
zap.String("error_type", fmt.Sprintf("%T", err)))
// Check if this is an unauthorized error that should trigger token refresh
if apiErr, ok := err.(*client.APIError); ok {
a.logger.Error("API Error details",
zap.Int("status", apiErr.Status),
zap.String("title", apiErr.Title),
zap.String("detail", apiErr.Detail))
}
return nil, fmt.Errorf("failed to fetch dashboard: %w", err)
}
if resp.Dashboard == nil {
return nil, fmt.Errorf("dashboard data is empty")
}
dashboard := resp.Dashboard
// Format summary data
summary := DashboardSummary{
TotalFiles: dashboard.Summary.TotalFiles,
TotalFolders: dashboard.Summary.TotalFolders,
StorageUsed: formatStorageAmount(dashboard.Summary.StorageUsed),
StorageLimit: formatStorageAmount(dashboard.Summary.StorageLimit),
StorageUsagePercentage: dashboard.Summary.StorageUsagePercentage,
}
// Format storage usage trend
dataPoints := make([]StorageTrendDataPoint, len(dashboard.StorageUsageTrend.DataPoints))
for i, dp := range dashboard.StorageUsageTrend.DataPoints {
dataPoints[i] = StorageTrendDataPoint{
Date: dp.Date,
Usage: formatStorageAmount(dp.Usage),
}
}
trend := StorageUsageTrend{
Period: dashboard.StorageUsageTrend.Period,
DataPoints: dataPoints,
}
// Get master key for decryption (needed for cloud-only files)
masterKey, cleanup, masterKeyErr := a.keyCache.GetMasterKey(session.Email)
if masterKeyErr != nil {
a.logger.Warn("Master key not available for dashboard file decryption",
zap.Error(masterKeyErr))
} else {
defer cleanup()
}
// Build a cache of collection keys for efficient decryption
// First, pre-populate from the dashboard response's collection_keys (if available)
// This avoids making additional API calls for each collection
collectionKeyCache := make(map[string][]byte) // collectionID -> decrypted collection key
if masterKeyErr == nil && len(dashboard.CollectionKeys) > 0 {
a.logger.Debug("Pre-populating collection key cache from dashboard response",
zap.Int("collection_keys_count", len(dashboard.CollectionKeys)))
for _, ck := range dashboard.CollectionKeys {
// Decode the encrypted collection key
collKeyCiphertext, decodeErr := tryDecodeBase64(ck.EncryptedCollectionKey)
if decodeErr != nil {
a.logger.Warn("Failed to decode collection key ciphertext from dashboard",
zap.String("collection_id", ck.CollectionID),
zap.Error(decodeErr))
continue
}
collKeyNonce, decodeErr := tryDecodeBase64(ck.EncryptedCollectionKeyNonce)
if decodeErr != nil {
a.logger.Warn("Failed to decode collection key nonce from dashboard",
zap.String("collection_id", ck.CollectionID),
zap.Error(decodeErr))
continue
}
// Handle combined ciphertext format (nonce prepended to ciphertext)
actualCollKeyCiphertext := extractActualCiphertext(collKeyCiphertext, collKeyNonce)
// Decrypt the collection key with the master key
collectionKey, decryptErr := e2ee.DecryptCollectionKey(&e2ee.EncryptedKey{
Ciphertext: actualCollKeyCiphertext,
Nonce: collKeyNonce,
}, masterKey)
if decryptErr != nil {
a.logger.Warn("Failed to decrypt collection key from dashboard",
zap.String("collection_id", ck.CollectionID),
zap.Error(decryptErr))
continue
}
// Cache the decrypted collection key
collectionKeyCache[ck.CollectionID] = collectionKey
a.logger.Debug("Cached collection key from dashboard response",
zap.String("collection_id", ck.CollectionID))
}
a.logger.Info("Pre-populated collection key cache from dashboard",
zap.Int("cached_keys", len(collectionKeyCache)))
}
// Format recent files (use local data if available, otherwise decrypt from cloud)
recentFiles := make([]DashboardRecentFile, 0, len(dashboard.RecentFiles))
for _, cloudFile := range dashboard.RecentFiles {
// Debug: Log what we received from the API
a.logger.Debug("Processing dashboard recent file",
zap.String("file_id", cloudFile.ID),
zap.String("collection_id", cloudFile.CollectionID),
zap.Int("encrypted_file_key_ciphertext_len", len(cloudFile.EncryptedFileKey.Ciphertext)),
zap.Int("encrypted_file_key_nonce_len", len(cloudFile.EncryptedFileKey.Nonce)),
zap.String("encrypted_file_key_ciphertext_preview", truncateForLog(cloudFile.EncryptedFileKey.Ciphertext, 50)),
zap.Int("encrypted_metadata_len", len(cloudFile.EncryptedMetadata)))
// Default values for files not in local repository
filename := "Encrypted File"
isDecrypted := false
syncStatus := file.SyncStatusCloudOnly // Default: cloud only
hasLocalContent := false
sizeInBytes := cloudFile.EncryptedFileSizeInBytes
mimeType := "application/octet-stream"
// Check local repository for this file to get decrypted name and sync status
localFile, err := a.mustGetFileRepo().Get(cloudFile.ID)
if err == nil && localFile != nil && localFile.State != file.StateDeleted {
// File exists locally - use local data
syncStatus = localFile.SyncStatus
hasLocalContent = localFile.HasLocalContent()
// Use decrypted filename if available
if localFile.Name != "" {
filename = localFile.Name
isDecrypted = true
}
// Use decrypted mime type if available
if localFile.MimeType != "" {
mimeType = localFile.MimeType
}
// Use local size (decrypted) if available
if localFile.DecryptedSizeInBytes > 0 {
sizeInBytes = localFile.DecryptedSizeInBytes
}
} else if masterKeyErr == nil && cloudFile.EncryptedMetadata != "" {
// File not in local repo, but we have the master key - try to decrypt from cloud data
decryptedFilename, decryptedMimeType, decryptErr := a.decryptDashboardFileMetadata(
cloudFile, masterKey, collectionKeyCache, apiClient)
if decryptErr != nil {
// Log at Warn level for better visibility during troubleshooting
a.logger.Warn("Failed to decrypt dashboard file metadata",
zap.String("file_id", cloudFile.ID),
zap.String("collection_id", cloudFile.CollectionID),
zap.Int("encrypted_file_key_ciphertext_len", len(cloudFile.EncryptedFileKey.Ciphertext)),
zap.Int("encrypted_file_key_nonce_len", len(cloudFile.EncryptedFileKey.Nonce)),
zap.Error(decryptErr))
} else {
filename = decryptedFilename
mimeType = decryptedMimeType
isDecrypted = true
}
}
recentFiles = append(recentFiles, DashboardRecentFile{
ID: cloudFile.ID,
CollectionID: cloudFile.CollectionID,
Name: filename,
Size: formatFileSize(sizeInBytes),
SizeInBytes: sizeInBytes,
MimeType: mimeType,
CreatedAt: cloudFile.CreatedAt.Format(time.RFC3339),
IsDecrypted: isDecrypted,
SyncStatus: syncStatus.String(),
HasLocalContent: hasLocalContent,
})
}
dashboardData := &DashboardData{
Summary: summary,
StorageUsageTrend: trend,
RecentFiles: recentFiles,
}
a.logger.Info("Dashboard data fetched successfully",
zap.Int("total_files", summary.TotalFiles),
zap.Int("recent_files", len(recentFiles)))
return dashboardData, nil
}
// formatStorageAmount converts StorageAmount to human-readable string
func formatStorageAmount(amount client.StorageAmount) string {
if amount.Value == 0 {
return "0 B"
}
return fmt.Sprintf("%.2f %s", amount.Value, amount.Unit)
}
// formatFileSize converts bytes to human-readable format
func formatFileSize(bytes int64) string {
if bytes == 0 {
return "0 B"
}
const unit = 1024
if bytes < unit {
return fmt.Sprintf("%d B", bytes)
}
div, exp := int64(unit), 0
for n := bytes / unit; n >= unit; n /= unit {
div *= unit
exp++
}
units := []string{"B", "KB", "MB", "GB", "TB"}
return fmt.Sprintf("%.1f %s", float64(bytes)/float64(div), units[exp+1])
}
// decryptDashboardFileMetadata decrypts file metadata for a dashboard recent file
// Collection keys should already be pre-populated in the cache from the dashboard API response
func (a *Application) decryptDashboardFileMetadata(
cloudFile client.RecentFileDashboard,
masterKey []byte,
collectionKeyCache map[string][]byte,
apiClient *client.Client,
) (filename string, mimeType string, err error) {
// Step 1: Get the collection key from cache (should be pre-populated from dashboard API response)
collectionKey, exists := collectionKeyCache[cloudFile.CollectionID]
if !exists {
// Collection key was not provided by the dashboard API - this shouldn't happen
// but we log a warning for debugging
a.logger.Warn("Collection key not found in cache - dashboard API should have provided it",
zap.String("collection_id", cloudFile.CollectionID),
zap.String("file_id", cloudFile.ID))
return "", "", fmt.Errorf("collection key not available for collection %s", cloudFile.CollectionID)
}
// Step 2: Get the file's encrypted_file_key
// First try using the dashboard data, but if empty, fetch from the file endpoint directly
var fileKeyCiphertext, fileKeyNonce []byte
if cloudFile.EncryptedFileKey.Ciphertext != "" && cloudFile.EncryptedFileKey.Nonce != "" {
// Use data from dashboard response
var decodeErr error
fileKeyCiphertext, decodeErr = tryDecodeBase64(cloudFile.EncryptedFileKey.Ciphertext)
if decodeErr != nil {
return "", "", fmt.Errorf("failed to decode file key ciphertext: %w", decodeErr)
}
fileKeyNonce, decodeErr = tryDecodeBase64(cloudFile.EncryptedFileKey.Nonce)
if decodeErr != nil {
return "", "", fmt.Errorf("failed to decode file key nonce: %w", decodeErr)
}
} else {
// Dashboard response has empty encrypted_file_key, fetch from file endpoint
// This endpoint properly deserializes the encrypted_file_key through the repository
a.logger.Debug("Dashboard encrypted_file_key is empty, fetching from file endpoint",
zap.String("file_id", cloudFile.ID))
file, fetchErr := apiClient.GetFile(a.ctx, cloudFile.ID)
if fetchErr != nil {
return "", "", fmt.Errorf("failed to fetch file %s: %w", cloudFile.ID, fetchErr)
}
if file.EncryptedFileKey.Ciphertext == "" || file.EncryptedFileKey.Nonce == "" {
return "", "", fmt.Errorf("file endpoint also returned empty encrypted_file_key for file %s", cloudFile.ID)
}
var decodeErr error
fileKeyCiphertext, decodeErr = tryDecodeBase64(file.EncryptedFileKey.Ciphertext)
if decodeErr != nil {
return "", "", fmt.Errorf("failed to decode file key ciphertext from file endpoint: %w", decodeErr)
}
fileKeyNonce, decodeErr = tryDecodeBase64(file.EncryptedFileKey.Nonce)
if decodeErr != nil {
return "", "", fmt.Errorf("failed to decode file key nonce from file endpoint: %w", decodeErr)
}
}
// Handle combined ciphertext format for file key
actualFileKeyCiphertext := extractActualCiphertext(fileKeyCiphertext, fileKeyNonce)
fileKey, err := e2ee.DecryptFileKey(&e2ee.EncryptedKey{
Ciphertext: actualFileKeyCiphertext,
Nonce: fileKeyNonce,
}, collectionKey)
if err != nil {
return "", "", fmt.Errorf("failed to decrypt file key: %w", err)
}
// Step 3: Decrypt the file metadata with the file key
// Use tryDecodeBase64 to handle multiple base64 encoding formats
encryptedMetadataBytes, err := tryDecodeBase64(cloudFile.EncryptedMetadata)
if err != nil {
return "", "", fmt.Errorf("failed to decode encrypted metadata: %w", err)
}
// Split nonce and ciphertext from the combined metadata (auto-detect nonce size)
metadataNonce, metadataCiphertext, err := e2ee.SplitNonceAndCiphertextAuto(encryptedMetadataBytes)
if err != nil {
return "", "", fmt.Errorf("failed to split metadata nonce/ciphertext: %w", err)
}
decryptedMetadata, err := e2ee.DecryptWithAlgorithm(metadataCiphertext, metadataNonce, fileKey)
if err != nil {
return "", "", fmt.Errorf("failed to decrypt metadata: %w", err)
}
// Step 4: Parse the decrypted metadata JSON
var metadata struct {
Name string `json:"name"`
MimeType string `json:"mime_type"`
}
if err := json.Unmarshal(decryptedMetadata, &metadata); err != nil {
return "", "", fmt.Errorf("failed to parse metadata JSON: %w", err)
}
return metadata.Name, metadata.MimeType, nil
}
// truncateForLog truncates a string for logging purposes
func truncateForLog(s string, maxLen int) string {
if len(s) <= maxLen {
return s
}
return s[:maxLen] + "..."
}