...
Code Block |
---|
#!/bin/sh # # This script copies recent DataCore Swarm logs (castor, cloudgateway, haxproxy) to a # bucket configured to temporarily and privately make logs accessible to Support staff # without relying on techsupport-bundle-grab.sh and uploading bundles to a ticket. # # Copy this into /etc/cron.hourly/dcsupportsupport-upload-logs and verify "systemctl status crond". # The keys, endpoint and bucket will be provided by DataCore Support. They are provided in # this self-contained script so they do not rely on ~/.rclone.conf. S3_ENDPOINT=https://customer-demo.cloud.datacore.com BUCKET=logs # Expires 2024-05-15 S3_ACCESS_KEY=ec1246520a9574bf278d376732abcfc7 S3_SECRET_KEY=secret # Set higher if you want all current log files uploaded MAX_AGE=7d SYSLOG_HOST=127.0.0.1 # Uses "timeout" to prevent multiple copies of rclone from running at the same time timeout 55m rclone -vv copy --transfers 1 --s3-no-head --s3-upload-cutoff 1G --s3-chunk-size 100M --max-age "${MAX_AGE}" --max-depth 1 \ --include "cloudgateway_*.gz" --include "castor*.gz" --include "haproxy.log*.gz" /var/log/datacore/ \ ":s3,provider=Other,endpoint='${S3_ENDPOINT}',access_key_id=${S3_ACCESS_KEY},secret_access_key=${S3_SECRET_KEY}:${BUCKET}" EXITVALUE=$? if [ $EXITVALUE != 0 ]; then # Logs to /var/log/messages logger -n "${SYSLOG_HOST}" -p user.notice -t dcsupportsupport-upload-logs "ALERT: the DataCore support rclone cron job exited abnormally with [$EXITVALUE]" fi exit 0 |
...