Merge pull request #2 from offen/mc-extra-flags

Allow passing custom arguments to minio client
This commit is contained in:
Frederik Ring 2021-05-25 07:37:00 +02:00 committed by GitHub
commit 84afc43fd8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 16 additions and 5 deletions

View File

@ -52,6 +52,15 @@ AWS_S3_BUCKET_NAME="<xxx>"
# Backups can be encrypted using gpg in case a passphrase is given # Backups can be encrypted using gpg in case a passphrase is given
# GPG_PASSPHRASE="<xxx>" # GPG_PASSPHRASE="<xxx>"
########### MINIO CLIENT CONFIGURATION
# Pass these additional flags to all MinIO client `mc` invocations.
# This can be used for example to pass `--insecure` when using self
# signed certificates, or passing `--debug` to gain insights on
# unexpected behavior.
# MC_GLOBAL_OPTIONS="<xxx>"
``` ```
## Example in a docker-compose setup ## Example in a docker-compose setup

View File

@ -58,7 +58,7 @@ fi
if [ ! -z "$AWS_S3_BUCKET_NAME" ]; then if [ ! -z "$AWS_S3_BUCKET_NAME" ]; then
info "Uploading backup to remote storage" info "Uploading backup to remote storage"
echo "Will upload to bucket \"$AWS_S3_BUCKET_NAME\"." echo "Will upload to bucket \"$AWS_S3_BUCKET_NAME\"."
mc cp "$BACKUP_FILENAME" "backup-target/$AWS_S3_BUCKET_NAME" mc cp $MC_GLOBAL_OPTIONS "$BACKUP_FILENAME" "backup-target/$AWS_S3_BUCKET_NAME"
echo "Upload finished." echo "Upload finished."
fi fi
@ -76,14 +76,14 @@ if [ ! -z "$BACKUP_RETENTION_DAYS" ]; then
sleep "$BACKUP_PRUNING_LEEWAY" sleep "$BACKUP_PRUNING_LEEWAY"
bucket=$AWS_S3_BUCKET_NAME bucket=$AWS_S3_BUCKET_NAME
rule_applies_to=$(mc rm --fake --recursive -force --older-than "${BACKUP_RETENTION_DAYS}d" "backup-target/$bucket" | wc -l) rule_applies_to=$(mc rm $MC_GLOBAL_OPTIONS --fake --recursive -force --older-than "${BACKUP_RETENTION_DAYS}d" "backup-target/$bucket" | wc -l)
if [ "$rule_applies_to" == "0" ]; then if [ "$rule_applies_to" == "0" ]; then
echo "No backups found older than the configured retention period of $BACKUP_RETENTION_DAYS days." echo "No backups found older than the configured retention period of $BACKUP_RETENTION_DAYS days."
echo "Doing nothing." echo "Doing nothing."
exit 0 exit 0
fi fi
total=$(mc ls "backup-target/$bucket" | wc -l) total=$(mc ls $MC_GLOBAL_OPTIONS "backup-target/$bucket" | wc -l)
if [ "$rule_applies_to" == "$total" ]; then if [ "$rule_applies_to" == "$total" ]; then
echo "Using a retention of ${BACKUP_RETENTION_DAYS} days would prune all currently existing backups, will not continue." echo "Using a retention of ${BACKUP_RETENTION_DAYS} days would prune all currently existing backups, will not continue."
@ -91,6 +91,6 @@ if [ ! -z "$BACKUP_RETENTION_DAYS" ]; then
exit 1 exit 1
fi fi
mc rm --recursive -force --older-than "${BACKUP_RETENTION_DAYS}d" "backup-target/$bucket" mc rm $MC_GLOBAL_OPTIONS --recursive -force --older-than "${BACKUP_RETENTION_DAYS}d" "backup-target/$bucket"
echo "Successfully pruned ${rule_applies_to} backups older than ${BACKUP_RETENTION_DAYS} days." echo "Successfully pruned ${rule_applies_to} backups older than ${BACKUP_RETENTION_DAYS} days."
fi fi

View File

@ -21,11 +21,13 @@ AWS_S3_BUCKET_NAME="${AWS_S3_BUCKET_NAME:-}"
AWS_ENDPOINT="${AWS_ENDPOINT:-s3.amazonaws.com}" AWS_ENDPOINT="${AWS_ENDPOINT:-s3.amazonaws.com}"
GPG_PASSPHRASE="${GPG_PASSPHRASE:-}" GPG_PASSPHRASE="${GPG_PASSPHRASE:-}"
MC_GLOBAL_OPTIONS="${MC_GLOBAL_OPTIONS:-}"
EOF EOF
chmod a+x env.sh chmod a+x env.sh
source env.sh source env.sh
mc alias set backup-target "https://$AWS_ENDPOINT" "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY" mc $MC_GLOBAL_OPTIONS alias set backup-target "https://$AWS_ENDPOINT" "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY"
# Add our cron entry, and direct stdout & stderr to Docker commands stdout # Add our cron entry, and direct stdout & stderr to Docker commands stdout
echo "Installing cron.d entry with expression $BACKUP_CRON_EXPRESSION." echo "Installing cron.d entry with expression $BACKUP_CRON_EXPRESSION."