Filecatalyst Profiles [patched] -
# Python SDK from filecatalyst import Profile, Orchestrator
Profile: "Video_Transcode_Sync" ├── Transfers (24h): 1,247 ├── Total Data: 3.2 TB ├── Avg Speed: 245 Mbps ├── Success Rate: 99.87% ├── Bottleneck: Disk I/O on source (42% of delay) └── Recommendations: • Enable local caching on source • Increase thread count from 4 to 8 Advanced rule engine for file selection:
// Include only if ALL conditions match include_if: extension: [".mp4", ".mov", ".mxf"], min_size: "10MB", max_size: "50GB", regex: "/project_*/final_cut/*", metadata: "bitrate": "> 5000", "codec": ["h264", "hevc"] filecatalyst profiles
# CLI examples filecatalyst profile create --from-template backup \ --source s3://my-bucket/ \ --dest /backup/ \ --schedule "0 3 * * *" filecatalyst profile apply --profile marketing_sync --override bandwidth=200Mbps
notifications: on_success: "slack:#backups - Success: size transferred" on_failure: - "pagerduty:Database Backup Failed" - "email:db-admin@company.com" # Python SDK from filecatalyst import Profile, Orchestrator
source: type: "postgresql" connection: "pg://backup-user@primary/db" dump_before_transfer: true
transfer: adaptive_bandwidth: true min_bandwidth: "20Mbps" max_bandwidth: "200Mbps" compression: "zstd" encryption: "AES-256-GCM" parallel_chunks: 8 verify_checksum: "SHA-256" metadata: "bitrate": ">
// Exclude patterns exclude: [ " .tmp", " .partial", "/.*/" // hidden files ] Per-profile bandwidth management with priority queuing:
