Initial commit: Open sourcing all of the Maple Open Technologies code.
This commit is contained in:
commit
755d54a99d
2010 changed files with 448675 additions and 0 deletions
451
native/desktop/maplefile/internal/app/app_export.go
Normal file
451
native/desktop/maplefile/internal/app/app_export.go
Normal file
|
|
@ -0,0 +1,451 @@
|
|||
package app
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
sysRuntime "runtime"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
"github.com/wailsapp/wails/v2/pkg/runtime"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// =============================================================================
|
||||
// EXPORT TYPES AND UTILITIES
|
||||
// =============================================================================
|
||||
|
||||
// ExportError represents an error that occurred during export
|
||||
type ExportError struct {
|
||||
FileID string `json:"file_id"`
|
||||
Filename string `json:"filename"`
|
||||
CollectionID string `json:"collection_id"`
|
||||
ErrorMessage string `json:"error_message"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
}
|
||||
|
||||
// ExportEstimate provides an estimate of what will be exported
|
||||
type ExportEstimate struct {
|
||||
TotalCollections int `json:"total_collections"`
|
||||
OwnedCollections int `json:"owned_collections"`
|
||||
SharedCollections int `json:"shared_collections"`
|
||||
TotalFiles int `json:"total_files"`
|
||||
TotalSizeBytes int64 `json:"total_size_bytes"`
|
||||
LocalFilesCount int `json:"local_files_count"`
|
||||
CloudOnlyCount int `json:"cloud_only_count"`
|
||||
EstimatedTime string `json:"estimated_time"`
|
||||
}
|
||||
|
||||
// UserProfileExport represents exported user profile data
|
||||
type UserProfileExport struct {
|
||||
ID string `json:"id"`
|
||||
Email string `json:"email"`
|
||||
FirstName string `json:"first_name"`
|
||||
LastName string `json:"last_name"`
|
||||
Name string `json:"name"`
|
||||
Phone string `json:"phone,omitempty"`
|
||||
Country string `json:"country,omitempty"`
|
||||
Timezone string `json:"timezone,omitempty"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
ExportedAt string `json:"exported_at"`
|
||||
}
|
||||
|
||||
// CollectionExportData represents a single collection in the export
|
||||
type CollectionExportData struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
CollectionType string `json:"collection_type"`
|
||||
ParentID string `json:"parent_id,omitempty"`
|
||||
FileCount int `json:"file_count"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
ModifiedAt string `json:"modified_at"`
|
||||
IsShared bool `json:"is_shared"`
|
||||
}
|
||||
|
||||
// CollectionsExport represents all exported collections
|
||||
type CollectionsExport struct {
|
||||
OwnedCollections []*CollectionExportData `json:"owned_collections"`
|
||||
SharedCollections []*CollectionExportData `json:"shared_collections"`
|
||||
TotalCount int `json:"total_count"`
|
||||
ExportedAt string `json:"exported_at"`
|
||||
}
|
||||
|
||||
// FileExportData represents a single file's metadata in the export
|
||||
type FileExportData struct {
|
||||
ID string `json:"id"`
|
||||
Filename string `json:"filename"`
|
||||
MimeType string `json:"mime_type"`
|
||||
SizeBytes int64 `json:"size_bytes"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
ModifiedAt string `json:"modified_at"`
|
||||
CollectionID string `json:"collection_id"`
|
||||
CollectionName string `json:"collection_name"`
|
||||
}
|
||||
|
||||
// FilesMetadataExport represents all exported file metadata
|
||||
type FilesMetadataExport struct {
|
||||
Files []*FileExportData `json:"files"`
|
||||
TotalCount int `json:"total_count"`
|
||||
TotalSize int64 `json:"total_size_bytes"`
|
||||
ExportedAt string `json:"exported_at"`
|
||||
}
|
||||
|
||||
// FileExportResult represents the result of exporting a single file
|
||||
type FileExportResult struct {
|
||||
FileID string `json:"file_id"`
|
||||
Filename string `json:"filename"`
|
||||
SourceType string `json:"source_type"`
|
||||
SizeBytes int64 `json:"size_bytes"`
|
||||
DestPath string `json:"dest_path"`
|
||||
Success bool `json:"success"`
|
||||
ErrorMessage string `json:"error_message,omitempty"`
|
||||
}
|
||||
|
||||
// ExportSummary is the final summary of the export operation
|
||||
type ExportSummary struct {
|
||||
ExportedAt string `json:"exported_at"`
|
||||
ExportPath string `json:"export_path"`
|
||||
TotalCollections int `json:"total_collections"`
|
||||
OwnedCollections int `json:"owned_collections"`
|
||||
SharedCollections int `json:"shared_collections"`
|
||||
TotalFiles int `json:"total_files"`
|
||||
FilesExported int `json:"files_exported"`
|
||||
FilesCopiedLocal int `json:"files_copied_local"`
|
||||
FilesDownloaded int `json:"files_downloaded"`
|
||||
FilesFailed int `json:"files_failed"`
|
||||
TotalSizeBytes int64 `json:"total_size_bytes"`
|
||||
Errors []ExportError `json:"errors,omitempty"`
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// EXPORT SETUP OPERATIONS
|
||||
// =============================================================================
|
||||
|
||||
// SelectExportDirectory opens a dialog for the user to select an export directory
|
||||
func (a *Application) SelectExportDirectory() (string, error) {
|
||||
// Get user's home directory as default
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
homeDir = ""
|
||||
}
|
||||
|
||||
dir, err := runtime.OpenDirectoryDialog(a.ctx, runtime.OpenDialogOptions{
|
||||
DefaultDirectory: homeDir,
|
||||
Title: "Select Export Directory",
|
||||
CanCreateDirectories: true,
|
||||
ShowHiddenFiles: false,
|
||||
TreatPackagesAsDirectories: false,
|
||||
})
|
||||
if err != nil {
|
||||
a.logger.Error("Failed to open directory dialog", zap.Error(err))
|
||||
return "", fmt.Errorf("failed to open directory dialog: %w", err)
|
||||
}
|
||||
|
||||
return dir, nil
|
||||
}
|
||||
|
||||
// GetExportEstimate returns an estimate of what will be exported
|
||||
func (a *Application) GetExportEstimate() (*ExportEstimate, error) {
|
||||
a.logger.Info("Getting export estimate")
|
||||
|
||||
// Get current session
|
||||
session, err := a.authService.GetCurrentSession(a.ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("not authenticated: %w", err)
|
||||
}
|
||||
|
||||
if !session.IsValid() {
|
||||
return nil, fmt.Errorf("session expired - please log in again")
|
||||
}
|
||||
|
||||
apiClient := a.authService.GetAPIClient()
|
||||
apiClient.SetTokens(session.AccessToken, session.RefreshToken)
|
||||
|
||||
// Get dashboard for storage stats
|
||||
dashResp, err := apiClient.GetDashboard(a.ctx)
|
||||
if err != nil {
|
||||
a.logger.Warn("Failed to get dashboard for estimate", zap.Error(err))
|
||||
}
|
||||
|
||||
// Get owned collections
|
||||
ownedCollections, err := a.ListCollections()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to list owned collections: %w", err)
|
||||
}
|
||||
|
||||
// Get shared collections
|
||||
sharedCollections, err := a.listSharedCollections()
|
||||
if err != nil {
|
||||
a.logger.Warn("Failed to list shared collections", zap.Error(err))
|
||||
sharedCollections = []*CollectionData{}
|
||||
}
|
||||
|
||||
// Count files and check local availability
|
||||
totalFiles := 0
|
||||
localFilesCount := 0
|
||||
cloudOnlyCount := 0
|
||||
var totalSizeBytes int64 = 0
|
||||
|
||||
allCollections := append(ownedCollections, sharedCollections...)
|
||||
for _, coll := range allCollections {
|
||||
totalFiles += coll.TotalFiles
|
||||
}
|
||||
|
||||
// Check local file repository for files with decrypted content available
|
||||
// We check for FilePath (decrypted file) since that's what we copy during export
|
||||
localFiles, err := a.mustGetFileRepo().List()
|
||||
if err == nil {
|
||||
for _, f := range localFiles {
|
||||
if f.FilePath != "" {
|
||||
localFilesCount++
|
||||
totalSizeBytes += f.DecryptedSizeInBytes
|
||||
}
|
||||
}
|
||||
}
|
||||
cloudOnlyCount = totalFiles - localFilesCount
|
||||
if cloudOnlyCount < 0 {
|
||||
cloudOnlyCount = 0
|
||||
}
|
||||
|
||||
// Note: Dashboard has storage in formatted units (e.g., "1.5 GB")
|
||||
// We use our calculated totalSizeBytes instead for accuracy
|
||||
_ = dashResp // Suppress unused variable warning if dashboard call failed
|
||||
|
||||
// Estimate time based on file count and sizes
|
||||
estimatedTime := "Less than a minute"
|
||||
if cloudOnlyCount > 0 {
|
||||
// Rough estimate: 1 file per second for cloud downloads
|
||||
seconds := cloudOnlyCount
|
||||
if seconds > 60 {
|
||||
minutes := seconds / 60
|
||||
if minutes > 60 {
|
||||
estimatedTime = fmt.Sprintf("About %d hours", minutes/60)
|
||||
} else {
|
||||
estimatedTime = fmt.Sprintf("About %d minutes", minutes)
|
||||
}
|
||||
} else {
|
||||
estimatedTime = fmt.Sprintf("About %d seconds", seconds)
|
||||
}
|
||||
}
|
||||
|
||||
estimate := &ExportEstimate{
|
||||
TotalCollections: len(allCollections),
|
||||
OwnedCollections: len(ownedCollections),
|
||||
SharedCollections: len(sharedCollections),
|
||||
TotalFiles: totalFiles,
|
||||
TotalSizeBytes: totalSizeBytes,
|
||||
LocalFilesCount: localFilesCount,
|
||||
CloudOnlyCount: cloudOnlyCount,
|
||||
EstimatedTime: estimatedTime,
|
||||
}
|
||||
|
||||
a.logger.Info("Export estimate calculated",
|
||||
zap.Int("total_collections", estimate.TotalCollections),
|
||||
zap.Int("total_files", estimate.TotalFiles),
|
||||
zap.Int("local_files", estimate.LocalFilesCount),
|
||||
zap.Int("cloud_only", estimate.CloudOnlyCount))
|
||||
|
||||
return estimate, nil
|
||||
}
|
||||
|
||||
// CreateExportDirectory creates the export directory with timestamp
|
||||
func (a *Application) CreateExportDirectory(basePath string) (string, error) {
|
||||
timestamp := time.Now().Format("2006-01-02_15-04-05")
|
||||
exportDir := filepath.Join(basePath, fmt.Sprintf("MapleFile_Export_%s", timestamp))
|
||||
|
||||
if err := os.MkdirAll(exportDir, 0755); err != nil {
|
||||
return "", fmt.Errorf("failed to create export directory: %w", err)
|
||||
}
|
||||
|
||||
// Create subdirectories
|
||||
filesDir := filepath.Join(exportDir, "files")
|
||||
if err := os.MkdirAll(filesDir, 0755); err != nil {
|
||||
return "", fmt.Errorf("failed to create files directory: %w", err)
|
||||
}
|
||||
|
||||
return exportDir, nil
|
||||
}
|
||||
|
||||
// OpenExportFolder opens the export folder in the system file manager
|
||||
func (a *Application) OpenExportFolder(path string) error {
|
||||
// Security: Validate the path before passing to exec.Command
|
||||
if path == "" {
|
||||
return fmt.Errorf("path cannot be empty")
|
||||
}
|
||||
|
||||
// Get absolute path and clean it
|
||||
absPath, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid path: %w", err)
|
||||
}
|
||||
absPath = filepath.Clean(absPath)
|
||||
|
||||
// Verify the path exists and is a directory
|
||||
info, err := os.Stat(absPath)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return fmt.Errorf("path does not exist: %s", absPath)
|
||||
}
|
||||
return fmt.Errorf("failed to access path: %w", err)
|
||||
}
|
||||
|
||||
if !info.IsDir() {
|
||||
return fmt.Errorf("path is not a directory: %s", absPath)
|
||||
}
|
||||
|
||||
a.logger.Info("Opening export folder",
|
||||
zap.String("path", absPath))
|
||||
|
||||
var cmd *exec.Cmd
|
||||
switch sysRuntime.GOOS {
|
||||
case "darwin":
|
||||
cmd = exec.Command("open", absPath)
|
||||
case "windows":
|
||||
cmd = exec.Command("explorer", absPath)
|
||||
case "linux":
|
||||
cmd = exec.Command("xdg-open", absPath)
|
||||
default:
|
||||
return fmt.Errorf("unsupported operating system: %s", sysRuntime.GOOS)
|
||||
}
|
||||
|
||||
return cmd.Start()
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// HELPER FUNCTIONS
|
||||
// =============================================================================
|
||||
|
||||
// sanitizeFilename removes or replaces characters that are invalid in filenames.
|
||||
// This function provides defense-in-depth against path traversal attacks by:
|
||||
// 1. Extracting only the base filename (removing any path components)
|
||||
// 2. Handling special directory references (. and ..)
|
||||
// 3. Removing control characters
|
||||
// 4. Replacing invalid filesystem characters
|
||||
// 5. Handling Windows reserved names
|
||||
// 6. Limiting filename length
|
||||
func sanitizeFilename(name string) string {
|
||||
// Step 1: Extract only the base filename to prevent path traversal
|
||||
// This handles cases like "../../../etc/passwd" -> "passwd"
|
||||
name = filepath.Base(name)
|
||||
|
||||
// Step 2: Handle special directory references
|
||||
if name == "." || name == ".." || name == "" {
|
||||
return "unnamed"
|
||||
}
|
||||
|
||||
// Step 3: Trim leading/trailing whitespace and dots
|
||||
// Windows doesn't allow filenames ending with dots or spaces
|
||||
name = strings.TrimSpace(name)
|
||||
name = strings.Trim(name, ".")
|
||||
|
||||
if name == "" {
|
||||
return "unnamed"
|
||||
}
|
||||
|
||||
// Step 4: Remove control characters (ASCII 0-31)
|
||||
result := make([]rune, 0, len(name))
|
||||
for _, r := range name {
|
||||
if r < 32 || !unicode.IsPrint(r) {
|
||||
continue // Skip control characters
|
||||
}
|
||||
result = append(result, r)
|
||||
}
|
||||
name = string(result)
|
||||
|
||||
// Step 5: Replace invalid filesystem characters
|
||||
// These are invalid on Windows: \ / : * ? " < > |
|
||||
// Forward/back slashes are also dangerous for path traversal
|
||||
replacer := map[rune]rune{
|
||||
'/': '-',
|
||||
'\\': '-',
|
||||
':': '-',
|
||||
'*': '-',
|
||||
'?': '-',
|
||||
'"': '\'',
|
||||
'<': '(',
|
||||
'>': ')',
|
||||
'|': '-',
|
||||
}
|
||||
|
||||
result = make([]rune, 0, len(name))
|
||||
for _, r := range name {
|
||||
if replacement, ok := replacer[r]; ok {
|
||||
result = append(result, replacement)
|
||||
} else {
|
||||
result = append(result, r)
|
||||
}
|
||||
}
|
||||
name = string(result)
|
||||
|
||||
// Step 6: Handle Windows reserved names
|
||||
// These names are reserved regardless of extension: CON, PRN, AUX, NUL,
|
||||
// COM1-COM9, LPT1-LPT9
|
||||
upperName := strings.ToUpper(name)
|
||||
// Extract name without extension for comparison
|
||||
nameWithoutExt := upperName
|
||||
if idx := strings.LastIndex(upperName, "."); idx > 0 {
|
||||
nameWithoutExt = upperName[:idx]
|
||||
}
|
||||
|
||||
reservedNames := map[string]bool{
|
||||
"CON": true, "PRN": true, "AUX": true, "NUL": true,
|
||||
"COM1": true, "COM2": true, "COM3": true, "COM4": true,
|
||||
"COM5": true, "COM6": true, "COM7": true, "COM8": true, "COM9": true,
|
||||
"LPT1": true, "LPT2": true, "LPT3": true, "LPT4": true,
|
||||
"LPT5": true, "LPT6": true, "LPT7": true, "LPT8": true, "LPT9": true,
|
||||
}
|
||||
|
||||
if reservedNames[nameWithoutExt] {
|
||||
name = "_" + name
|
||||
}
|
||||
|
||||
// Step 7: Limit filename length
|
||||
// Most filesystems support 255 bytes; we use 200 to leave room for path
|
||||
const maxFilenameLength = 200
|
||||
if len(name) > maxFilenameLength {
|
||||
// Try to preserve the extension
|
||||
ext := filepath.Ext(name)
|
||||
if len(ext) < maxFilenameLength-10 {
|
||||
nameWithoutExt := name[:len(name)-len(ext)]
|
||||
if len(nameWithoutExt) > maxFilenameLength-len(ext) {
|
||||
nameWithoutExt = nameWithoutExt[:maxFilenameLength-len(ext)]
|
||||
}
|
||||
name = nameWithoutExt + ext
|
||||
} else {
|
||||
name = name[:maxFilenameLength]
|
||||
}
|
||||
}
|
||||
|
||||
// Final check
|
||||
if name == "" {
|
||||
return "unnamed"
|
||||
}
|
||||
|
||||
return name
|
||||
}
|
||||
|
||||
// copyFile copies a file from src to dst
|
||||
func copyFile(src, dst string) error {
|
||||
sourceFile, err := os.Open(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer sourceFile.Close()
|
||||
|
||||
destFile, err := os.Create(dst)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer destFile.Close()
|
||||
|
||||
_, err = io.Copy(destFile, sourceFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return destFile.Sync()
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue