Filecatalyst Workload Automation ((top)) May 2026

# Retry up to 3 times RETRIES=3 for i in $(seq 1 $RETRIES); do fta-cli --put critical_file.dat --target /incoming/ && break || sleep 10 done | Tool | Integration Method | |------|--------------------| | Apache Airflow | Use BashOperator with fta-cli or SimpleHttpOperator for REST API | | Jenkins | Execute shell script step calling fta-cli | | Rundeck | Create a job step: "Command" → fta-cli ... | | Control-M | FileCatalyst provides a Control-M plugin (File Transfer Hub) | | Apache NiFi | Use ExecuteProcess processor to call fta-cli |

import requests import time API_BASE = "http://fc-server:8080/api" API_KEY = "your-api-key" def run_transfer(local_path, remote_path): payload = "source": local_path, "destination": remote_path, "server": "destination-host", "username": "transfer_user", "password": "secret" filecatalyst workload automation

# PowerShell example $md5 = (Get-FileHash "data.bin" -Algorithm MD5).Hash if ($md5 -eq "expected_hash") fta-cli --put data.bin --target /secure/ else Write-EventLog -LogName Application -Source FileCatalyst -EntryType Error -EventId 100 -Message "Hash mismatch" # Retry up to 3 times RETRIES=3 for

# Send 10 files in parallel ls /data/to_send/*.dat | xargs -P 10 -I {} fta-cli --put {} --target /remote/ Check file hash before transfer. remote_path): payload = "source": local_path