diff --git a/SETUP.md b/SETUP.md index 21ded55..e34312e 100644 --- a/SETUP.md +++ b/SETUP.md @@ -602,6 +602,8 @@ To access mqtt channel, user needs credentials to access it. # mosquitto_passwd -c /etc/mosquitto/credentials/passwd Password: Reenter password: + + # chmod 644 /etc/mosquitto/credentials/passwd ``` 3. Close the connection to mqtts (Ctrl+D). diff --git a/apiserver/Dockerfile b/apiserver/Dockerfile index 0c6c98f..9f6d64d 100644 --- a/apiserver/Dockerfile +++ b/apiserver/Dockerfile @@ -4,7 +4,7 @@ # Build the APISERVER using phusion base image -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # Enabling SSH service RUN rm -f /etc/service/sshd/down diff --git a/cron-backup/Dockerfile b/backup/Dockerfile old mode 100755 new mode 100644 similarity index 71% rename from cron-backup/Dockerfile rename to backup/Dockerfile index 9f5b137..5f58303 --- a/cron-backup/Dockerfile +++ b/backup/Dockerfile @@ -7,24 +7,25 @@ # 5. mongodb # To find the version of installed Mongodb service -FROM mongo:latest AS mongodb +FROM mongo:5.0.11 AS mongodb RUN env | grep MON > /root/env # Building cron-backup instance -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # Copying mongodb's version COPY --from=mongodb /root/env /root/env -# Installing same Mongodb's tools as in the copied version here in the cron-backup instance RUN set -x \ && export $(xargs < /root/env) \ - && echo "deb http://$MONGO_REPO/apt/ubuntu focal/${MONGO_PACKAGE%-unstable}/$MONGO_MAJOR multiverse" | tee "/etc/apt/sources.list.d/${MONGO_PACKAGE%-unstable}.list" \ - && apt-key adv --keyserver keyserver.ubuntu.com --recv-keys B00A0BD1E2C63C11 \ - && export DEBIAN_FRONTEND=noninteractive && apt-get update && ln -s /bin/true /usr/local/bin/systemctl && apt-get install -y \ - ${MONGO_PACKAGE}=$MONGO_VERSION \ - ${MONGO_PACKAGE}-tools=$MONGO_VERSION - + && echo "deb http://security.ubuntu.com/ubuntu focal-security main" | tee /etc/apt/sources.list.d/focal-security.list \ + && apt-get install -y gpg curl \ + && curl -fsSL https://pgp.mongodb.com/server-7.0.asc | \ + gpg -o /usr/share/keyrings/mongodb-server-7.0.gpg \ + --dearmor\ + && echo "deb [ arch=amd64,arm64 signed-by=/usr/share/keyrings/mongodb-server-7.0.gpg ] https://repo.mongodb.org/apt/ubuntu jammy/mongodb-org/7.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-7.0.list\ + && apt-get update \ + && apt-get install -y mongodb-org mongodb-org-database mongodb-org-server mongodb-org-shell mongodb-org-mongos mongodb-org-tools # some basic package installation for troubleshooting RUN apt-get update && apt-get install -y \ @@ -80,16 +81,18 @@ RUN chmod +x /bin/nginx_backup.sh COPY mqtts_backup.sh /bin/mqtts_backup.sh RUN chmod +x /bin/mqtts_backup.sh +# Backup script for startup.sh +COPY startup.sh /etc/service/startup/run +RUN chmod +x /etc/service/startup/run + # Backup script for mongodb -COPY mongodb_backup.sh /bin/mongodb_backup.sh -RUN chmod +x /bin/mongodb_backup.sh +COPY mongodb_backup.sh /etc/service/mongodb_backup/run +RUN chmod +x /etc/service/mongodb_backup/run + # Start the postfix daemon during container startup +RUN mkdir -p /etc/my_init.d COPY postfix.sh /etc/my_init.d/postfix.sh RUN chmod +x /etc/my_init.d/postfix.sh -# To Enable crontab -RUN mkdir -p /etc/my_init.d -COPY cron.sh /etc/my_init.d/cron.sh -RUN chmod +x /etc/my_init.d/cron.sh # end of file diff --git a/backup/README.md b/backup/README.md new file mode 100644 index 0000000..f859142 --- /dev/null +++ b/backup/README.md @@ -0,0 +1,50 @@ +# [cron-backup](./cron-backup) Docker Container Usage + +This instance provides backup support for the `Nginx`, `Node-red` and `Grafana` containers and pushed the backed up data to S3-compatible storage. + +## Shell script + +For backing up the directory data + +- It uses [`grafana_backup.sh`](backup\grafana_backup.sh) for `Grafana` container. +- It uses [`nodered_backup.sh`](backup\nodered_backup.sh) for `Node-red` container. +- It uses [`nginx_backup.sh`](backup\nginx_backup.sh) for `Nginx` container. +- It uses [`mqtts_backup.sh`](backup\mqtts_backup.sh) for `Mqtts` container. + +## Scheduling backup using `Daemon thread` + +The following backup jobs are added to run at specific time. + +``` bash + +# Start up the Process +while true +do + HOUR="$(date +'%H')" + MINUTE="$(date +'%M')" + + if [ "$HOUR" = "06" ] && [ "$MINUTE" = "35" ] + then + /bin/nodered_backup.sh + sleep 60 + fi + if [ "$HOUR" = "07" ] && [ "$MINUTE" = "35" ] + then + /bin/grafana_backup.sh + sleep 60 + fi + if [ "$HOUR" = "08" ] && [ "$MINUTE" = "35" ] + then + /bin/nginx_backup.sh + sleep 60 + fi + if [ "$HOUR" = "09" ] && [ "$MINUTE" = "35" ] + then + /bin/mqtts_backup.sh + sleep 60 + fi +``` + +## Mail Alert + +The above backup shell scripts were configured to send mail for the both successful/unsuccessful run. diff --git a/backup/grafana_backup.sh b/backup/grafana_backup.sh new file mode 100644 index 0000000..f1cb98f --- /dev/null +++ b/backup/grafana_backup.sh @@ -0,0 +1,144 @@ +#!/bin/bash +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. + +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec +DATE1=$(date +%Y%m%d%H%M) +DATE=$(date +%d-%m-%y_%H-%M) + +mkdir -p /var/lib/backup/grafana + +grafana_src='/grafana' + +if [ ! -d $grafana_src ]; then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana backup" + echo "" + echo "STATUS: Grafana backup failed" + echo "" + echo "The source backup directory: grafana_src is not available" + }>> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" + exit +else + tar cvzf /var/lib/backup/grafana/"${SOURCE_NAME}"_grafana_data_backup_"${DATE1}".tgz ${grafana_src}/ +fi + +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/grafana/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Daily backup" + echo "" + echo "STATUS: Grafana Daily backup succeeded." + echo "" + echo "******* Grafana Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""\/,,g" &>> /tmp/grafana.txt + echo "" + echo "************** END **************************" + } >> /tmp/grafana.txt +else +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Daily backup" + echo "" + echo "STATUS: Grafana Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Monthly backup" + echo "" + echo "STATUS: Grafana Monthly backup succeeded." + echo "" >> /tmp/grafana.txt + echo "******* Grafana Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""/monthly_backup/grafana/\/,,g" &>> /tmp/grafana.txt + echo "" + echo "************** END **************************" + } >> /tmp/grafana.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Monthly backup" + echo "" + echo "STATUS: Grafana Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" +fi +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/yearly_backup/grafana/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Yearly backup" + echo "" + echo "STATUS: Grafana Yearly backup succeeded." + echo "" + echo "******* Grafana Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/yearly_backup/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""/yearly_backup/grafana/\/,,g" &>> /tmp/grafana.txt + echo "" + echo "************** END **************************" + } >> /tmp/grafana.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Yearly backup" + echo "" + echo "STATUS: Grafana Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" +fi +fi + + +< /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" + +# Remove the old backup data in local directory to avoid excessive storage use +find /var/lib/backup/grafana/ -type f -exec rm {} \; +rm /tmp/grafana.txt +###PRUNE### + +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_GRAFANA}"/grafana/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi diff --git a/backup/mongodb_backup.sh b/backup/mongodb_backup.sh new file mode 100644 index 0000000..14dc310 --- /dev/null +++ b/backup/mongodb_backup.sh @@ -0,0 +1,78 @@ +#!/bin/bash +# Shell script for MongoDB backup with pruning and S3 strategy + +DATE1=$(date +%Y%m%d%H%M) +DATE=$(date +%d-%m-%y_%H-%M) +MONTH=$(date +%b) +backup_dir="/var/lib/backup/mongodb" +mongodb_backup_dir="/var/lib/mongodb-backup" +mongodb_data_dir="/root/mongodb_data" +report_file="/tmp/mongodbbackup.txt" + +mkdir -p $backup_dir $mongodb_backup_dir $mongodb_data_dir + +# Full MongoDB backup +mongodump --host mongodb:27017 --authenticationDatabase admin -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" -o $mongodb_backup_dir + +# List databases +mongosh --quiet --host mongodb:27017 --eval "printjson(db.adminCommand('listDatabases'))" \ + -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" \ + | grep -i name | awk -F'"' '{print $4}' > /mongo_dbs.txt + +# Backup listed databases +while read -r db; do + echo "Creating backup for $db" + mongodump --host mongodb:27017 --db "$db" --authenticationDatabase admin \ + -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" -o $mongodb_backup_dir +done < /mongo_dbs.txt + +# Archive backups +tar czf "$backup_dir/${SOURCE_NAME}_mongodb_db_backup_${DATE1}.tgz" $mongodb_backup_dir/. \ + && rsync -avr /var/lib/mongodb/ $mongodb_data_dir/ \ + && tar czf "$backup_dir/${SOURCE_NAME}_mongodb_data_backup_${DATE1}.tgz" $mongodb_data_dir/. + +# Upload backups to S3 (Daily Backup) +if s3cmd put -r --no-mime-magic $backup_dir/ s3://"${S3_BUCKET_MONGODB}"/daily_backup/; then + echo "Daily backup succeeded." >> $report_file +else + echo "Daily backup failed." >> $report_file +fi + +# Monthly Backup +if [ "$(date -d +1day +%d)" -eq 1 ]; then + if s3cmd put -r --no-mime-magic $backup_dir/ s3://"${S3_BUCKET_MONGODB}"/monthly_backup/; then + echo "Monthly backup succeeded." >> $report_file + else + echo "Monthly backup failed." >> $report_file + fi +fi + +# Quarterly Backup +if [[ "$MONTH" == "Mar" || "$MONTH" == "Jun" || "$MONTH" == "Sep" || "$MONTH" == "Dec" ]] && [ "$(date -d +1day +%d)" -eq 1 ]; then + if s3cmd put -r --no-mime-magic $backup_dir/ s3://"${S3_BUCKET_MONGODB}"/quarterly_backup/; then + echo "Quarterly backup succeeded." >> $report_file + else + echo "Quarterly backup failed." >> $report_file + fi +fi + +# Prune old backups from S3 +# Daily Backup Prune (31 days) +s3cmd ls -r s3://"${S3_BUCKET_MONGODB}"/daily_backup/ | \ + awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + +# Monthly Backup Prune (1 year) +s3cmd ls -r s3://"${S3_BUCKET_MONGODB}"/monthly_backup/ | \ + awk -v DEL="$(date +%F -d "3 months ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + +# Quarterly Backup Prune (3 years) +s3cmd ls -r s3://"${S3_BUCKET_MONGODB}"/quarterly_backup/ | \ + awk -v DEL="$(date +%F -d "3 years ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + +# Send report via email +< $report_file mail -s "${SOURCE_NAME}: MongoDB Backup Report" "${CRON_BACKUP_MAIL}" + +# Clean up local backup data +find $backup_dir $mongodb_data_dir $mongodb_backup_dir -type f -exec rm {} \; +rm $report_file + diff --git a/backup/mqtts_backup.sh b/backup/mqtts_backup.sh new file mode 100644 index 0000000..8225492 --- /dev/null +++ b/backup/mqtts_backup.sh @@ -0,0 +1,145 @@ +#!/bin/bash +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. + +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec +DATE1=$(date +%Y%m%d%H%M) +DATE=$(date +%d-%m-%y_%H-%M) + + +mkdir -p /var/lib/backup/mqtts + +mqtts_src='/mqtts' + +if [ ! -d $mqtts_src ]; then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts backup" + echo "" + echo "STATUS: Mqtts backup failed" + echo "" + echo "The source backup directory: mqtts_src is not available" + }>> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" + exit +else + tar cvzf /var/lib/backup/mqtts/"${SOURCE_NAME}"_mqtts_data_backup_"${DATE1}".tgz ${mqtts_src}/ +fi + +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/mqtts/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Daily backup" + echo "" + echo "STATUS: Mqtts Daily backup succeeded." + echo "" + echo "******* Mqtts Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""\/,,g" &>> /tmp/mqtts.txt + echo "" + echo "************** END **************************" + } >> /tmp/mqtts.txt +else +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Daily backup" + echo "" + echo "STATUS: Mqtts Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Monthly backup" + echo "" + echo "STATUS: Mqtts Monthly backup succeeded." + echo "" >> /tmp/mqtts.txt + echo "******* Mqtts Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""/monthly_backup/mqtts/\/,,g" &>> /tmp/mqtts.txt + echo "" + echo "************** END **************************" + } >> /tmp/mqtts.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Monthly backup" + echo "" + echo "STATUS: Mqtts Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" +fi +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/yearly_backup/mqtts/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Yearly backup" + echo "" + echo "STATUS: Mqtts Yearly backup succeeded." + echo "" + echo "******* Mqtts Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/yearly_backup/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""/yearly_backup/mqtts/\/,,g" &>> /tmp/mqtts.txt + echo "" + echo "************** END **************************" + } >> /tmp/mqtts.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Yearly backup" + echo "" + echo "STATUS: Mqtts Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" +fi +fi + + +< /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" + +# Remove the old backup data in local directory to avoid excessive storage use +find /var/lib/backup/mqtts/ -type f -exec rm {} \; +rm /tmp/mqtts.txt +###PRUNE### + +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_MQTTS}"/mqtts/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi \ No newline at end of file diff --git a/backup/nginx_backup.sh b/backup/nginx_backup.sh new file mode 100644 index 0000000..44beb0b --- /dev/null +++ b/backup/nginx_backup.sh @@ -0,0 +1,144 @@ +#!/bin/bash +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. + +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec +DATE1=$(date +%Y%m%d%H%M) +DATE=$(date +%d-%m-%y_%H-%M) + +mkdir -p /var/lib/backup/nginx + +nginx_src='/nginx' + +if [ ! -d $nginx_src ]; then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx backup" + echo "" + echo "STATUS: Nginx backup failed" + echo "" + echo "The source backup directory: nginx_src is not available" + }>> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" + exit +else + tar cvzf /var/lib/backup/nginx/"${SOURCE_NAME}"_nginx_data_backup_"${DATE1}".tgz ${nginx_src}/ +fi + +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/nginx/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Daily backup" + echo "" + echo "STATUS: Nginx Daily backup succeeded." + echo "" + echo "******* Nginx Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""\/,,g" &>> /tmp/nginx.txt + echo "" + echo "************** END **************************" + } >> /tmp/nginx.txt +else +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Daily backup" + echo "" + echo "STATUS: Nginx Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Monthly backup" + echo "" + echo "STATUS: Nginx Monthly backup succeeded." + echo "" >> /tmp/nginx.txt + echo "******* Nginx Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""/monthly_backup/nginx/\/,,g" &>> /tmp/nginx.txt + echo "" + echo "************** END **************************" + } >> /tmp/nginx.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Monthly backup" + echo "" + echo "STATUS: Nginx Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" +fi +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/yearly_backup/nginx/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Yearly backup" + echo "" + echo "STATUS: Nginx Yearly backup succeeded." + echo "" + echo "******* Nginx Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/yearly_backup/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""/yearly_backup/nginx/\/,,g" &>> /tmp/nginx.txt + echo "" + echo "************** END **************************" + } >> /tmp/nginx.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Yearly backup" + echo "" + echo "STATUS: Nginx Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" +fi +fi + + +< /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" + +# Remove the old backup data in local directory to avoid excessive storage use +find /var/lib/backup/nginx/ -type f -exec rm {} \; +rm /tmp/nginx.txt +###PRUNE### + +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_NGINX}"/nginx/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi \ No newline at end of file diff --git a/backup/nodered_backup.sh b/backup/nodered_backup.sh new file mode 100644 index 0000000..fa48477 --- /dev/null +++ b/backup/nodered_backup.sh @@ -0,0 +1,145 @@ +#!/bin/bash +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. + +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec +DATE1=$(date +%Y%m%d%H%M) +DATE=$(date +%d-%m-%y_%H-%M) + +mkdir -p /var/lib/backup/nodered + +nodered_src='/nodered' + +if [ ! -d $nodered_src ]; then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered backup" + echo "" + echo "STATUS: Nodered backup failed" + echo "" + echo "The source backup directory: nodered_src is not available" + }>> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" + exit +else + tar cvzf /var/lib/backup/nodered/"${SOURCE_NAME}"_nodered_data_backup_"${DATE1}".tgz ${nodered_src}/ +fi + +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/nodered/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Daily backup" + echo "" + echo "STATUS: Nodered Daily backup succeeded." + echo "" + echo "******* Nodered Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""\/,,g" &>> /tmp/nodered.txt + echo "" + echo "************** END **************************" + } >> /tmp/nodered.txt +else +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Daily backup" + echo "" + echo "STATUS: Nodered Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Monthly backup" + echo "" + echo "STATUS: Nodered Monthly backup succeeded." + echo "" >> /tmp/nodered.txt + echo "******* Nodered Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""/monthly_backup/nodered/\/,,g" &>> /tmp/nodered.txt + echo "" + echo "************** END **************************" + } >> /tmp/nodered.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Monthly backup" + echo "" + echo "STATUS: Nodered Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" +fi +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/yearly_backup/nodered/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Yearly backup" + echo "" + echo "STATUS: Nodered Yearly backup succeeded." + echo "" + echo "******* Nodered Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/yearly_backup/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""/yearly_backup/nodered/\/,,g" &>> /tmp/nodered.txt + echo "" + echo "************** END **************************" + } >> /tmp/nodered.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Yearly backup" + echo "" + echo "STATUS: Nodered Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" +fi +fi + + +< /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" + +# Remove the old backup data in local directory to avoid excessive storage use +find /var/lib/backup/nodered/ -type f -exec rm {} \; +rm /tmp/nodered.txt + +###PRUNE### + +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_NODERED}"/nodered/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi diff --git a/cron-backup/postfix.sh b/backup/postfix.sh old mode 100755 new mode 100644 similarity index 100% rename from cron-backup/postfix.sh rename to backup/postfix.sh diff --git a/backup/startup.sh b/backup/startup.sh new file mode 100644 index 0000000..25afc6b --- /dev/null +++ b/backup/startup.sh @@ -0,0 +1,32 @@ +#!/bin/bash +while true +do + HOUR="$(date +'%H')" + MINUTE="$(date +'%M')" + + if [ "$HOUR" = "06" ] && [ "$MINUTE" = "35" ] + then + /bin/nodered_backup.sh + sleep 60 + fi + if [ "$HOUR" = "07" ] && [ "$MINUTE" = "35" ] + then + /bin/grafana_backup.sh + sleep 60 + fi + if [ "$HOUR" = "08" ] && [ "$MINUTE" = "35" ] + then + /bin/nginx_backup.sh + sleep 60 + fi + if [ "$HOUR" = "10" ] && [ "$MINUTE" = "35" ] + then + /bin/mongodb_backup.sh + sleep 60 + fi + if [ "$HOUR" = "09" ] && [ "$MINUTE" = "35" ] + then + /bin/mqtts_backup.sh + sleep 60 + fi +done diff --git a/cron-backup/README.md b/cron-backup/README.md deleted file mode 100644 index 116f926..0000000 --- a/cron-backup/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# [cron-backup](./cron-backup) Docker Container Usage - -This instance provides backup support for the `Nginx`, `Node-red` and `Grafana` containers and pushed the backed up data to S3-compatible storage. - -## Shell script - -For backing up the directory data - -- It uses [`grafana_backup.sh`](cron-backup\grafana_backup.sh) for `Grafana` container. -- It uses [`nodered_backup.sh`](cron-backup\nodered_backup.sh) for `Node-red` container. -- It uses [`nginx_backup.sh`](cron-backup\nginx_backup.sh) for `Nginx` container. - -## Scheduling backup using `crontab` - -The following backup jobs are added to run at specific time. - -``` bash - -# echo new cron into cron file -{ - echo "35 6 * * * /bin/bash -l -c '/bin/nodered_backup.sh'" - echo "35 7 * * * /bin/bash -l -c '/bin/grafana_backup.sh'" - echo "35 8 * * * /bin/bash -l -c '/bin/nginx_backup.sh'" -} >> mycron - -``` - -## Mail Alert - -The above backup shell scripts were configured to send mail for the both successful/unsuccessful run. diff --git a/cron-backup/cron.sh b/cron-backup/cron.sh deleted file mode 100755 index 0225d7f..0000000 --- a/cron-backup/cron.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh - -# exit on unchecked errors -set -e - -# backups are scheduled via the root crontab. Start by heading there -cd /root - -# write out current crontab -crontab -l > mycron || echo "no crontab for root, going on" - -# echo new cron into cron file -{ - echo "35 6 * * * /bin/bash -l -c '/bin/nodered_backup.sh'" - echo "35 7 * * * /bin/bash -l -c '/bin/grafana_backup.sh'" - echo "35 8 * * * /bin/bash -l -c '/bin/nginx_backup.sh'" - echo "35 9 * * * /bin/bash -l -c '/bin/mqtts_backup.sh'" -} >> mycron - -# delete duplicated lines -sort -u -o mycron mycron - -# install new cron file -crontab mycron diff --git a/cron-backup/grafana_backup.sh b/cron-backup/grafana_backup.sh deleted file mode 100755 index 33ca0bd..0000000 --- a/cron-backup/grafana_backup.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash -#The Shell script will be used for taking backup and send it to S3 bucket. - -DATE1=$(date +%Y%m%d%H%M) -DATE=$(date +%d-%m-%y_%H-%M) - -mkdir -p /var/lib/backup/grafana - -grafana_src='/grafana' - -if [ ! -d $grafana_src ]; then - - echo "DATE:" "$DATE" > /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana backup" >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "STATUS: Grafana backup failed" >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "The source backup directory: grafana_src is not available" >> /tmp/grafana.txt - < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${CRON_BACKUP_MAIL}" - exit -else - tar cvzf /var/lib/backup/grafana/"${SOURCE_NAME}"_grafana_data_backup_"${DATE1}".tgz ${grafana_src}/ -fi - -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/; -then - echo "DATE:" "$DATE" > /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana backup" >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "STATUS: Grafana backup succeeded." >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "******* Grafana Data Backup ****************" >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_GRAFANA}\/,,g" &>> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "************** END **************************" >> /tmp/grafana.txt -else - echo "DATE:" "$DATE" > /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana backup" >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "STATUS: Grafana backup failed" >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "Something went wrong, please check it" >> /tmp/grafana.txt - < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${CRON_BACKUP_MAIL}" -fi -< /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${CRON_BACKUP_MAIL}" - -# Remove the old backup data in local directory to avoid excessive storage use -find /var/lib/backup/grafana/ -type f -exec rm {} \; - -exit diff --git a/cron-backup/mongodb_backup.sh b/cron-backup/mongodb_backup.sh deleted file mode 100644 index f8105ff..0000000 --- a/cron-backup/mongodb_backup.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/bash -#The Shell script will be used for taking backup and send it to S3 bucket. - -# TO list all Databases in mongodb databases -DATE1=$(date +%Y%m%d%H%M) -DATE=$(date +%d-%m-%y_%H-%M) - -mkdir -p /var/lib/backup/mongodb - -#Full Mongodb backup - -mongodump --host mongodb:27017 --authenticationDatabase admin -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" -o /var/lib/mongodb-backup/dump - - -showdb(){ -mongo --quiet --host mongodb:27017 --eval "printjson(db.adminCommand('listDatabases'))" -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" | grep -i name | awk -F'"' '{print $4}' -} - - -showdb > /mongo_dbs.txt - -#Backing up the databases listed. -while read -r db -do - echo "Creating backup for $db" - mongodump --host mongodb:27017 --db "$db" --authenticationDatabase admin -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" -o /var/lib/mongodb-backup/ -done < "/mongo_dbs.txt" - -tar czf /var/lib/backup/mongodb/"${SOURCE_NAME}"_mongodb_db_backup_"${DATE1}".tgz /var/lib/mongodb-backup/. && rsync -avr /var/lib/mongodb/ /root/mongodb_data/ && tar czf /var/lib/backup/mongodb/"${SOURCE_NAME}"_mongodb_data_backup_"${DATE1}".tgz /root/mongodb_data/. - -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/mongodb/ s3://"${S3_BUCKET_MONGODB}"/; -then - echo "DATE:" "$DATE" > /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Mongodb backup" >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "STATUS: mongodb backup is Successful." >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "******* Mongodb Database Backup ****************" >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MONGODB}"/ --human-readable | grep -i "${SOURCE_NAME}"_mongodb_db | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_MONGODB}\/,,g" &>> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "************** Mongodb data Backup *************" >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MONGODB}"/ --human-readable | grep -i "${SOURCE_NAME}"_mongodb_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_MONGODB}\/,,g" &>> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "********************** END *********************" >> /tmp/mongodbbackup.txt -else - echo "DATE:" "$DATE" > /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Mongodb backup" >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "STATUS: mongodb backup is Failed." >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "Something went wrong, Please check it" >> /tmp/mongodbbackup.txt - < /tmp/mongodbbackup.txt mail -s "${SOURCE_NAME}: mongodb backup" "${CRON_BACKUP_MAIL}" -fi - -# Remove the old backup data in local directory to avoid excessive storage use -find /var/lib/backup/mongodb/ -type f -exec rm {} \; -find /root/mongodb_data/ -type f -exec rm {} \; -find /var/lib/mongodb-backup/ -type f -exec rm {} \; - -< /tmp/mongodbbackup.txt mail -s "${SOURCE_NAME}: mongodb backup" "${CRON_BACKUP_MAIL}" diff --git a/cron-backup/mqtts_backup.sh b/cron-backup/mqtts_backup.sh deleted file mode 100755 index 1b42070..0000000 --- a/cron-backup/mqtts_backup.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash -#The Shell script will be used for taking backup and send it to S3 bucket. - -DATE1=$(date +%Y%m%d%H%M) -DATE=$(date +%d-%m-%y_%H-%M) - -mkdir -p /var/lib/backup/mqtts - -mqtts_src='/mqtts' - -if [ ! -d $mqtts_src ]; then - - echo "DATE:" "$DATE" > /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "DESCRIPTION: ${SOURCE_NAME}_MQTTs backup" >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "STATUS: MQTTs backup failed." >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "The source backup directory: mqtts_src is not available" >> /tmp/mqtts.txt - < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: MQTTs Data Backup" "${CRON_BACKUP_MAIL}" - exit -else - tar cvzf /var/lib/backup/mqtts/"${SOURCE_NAME}"_mqtts_data_backup_"${DATE1}".tgz ${mqtts_src}/ -fi - -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/; -then - echo "DATE:" "$DATE" > /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "DESCRIPTION: ${SOURCE_NAME}_MQTTs backup" >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "STATUS: MQTTs backup succeeded." >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "******* MQTTs Data Backup ****************" >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_MQTTS}\/,,g" &>> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "************** END **************************" >> /tmp/mqtts.txt -else - echo "DATE:" "$DATE" > /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "DESCRIPTION: ${SOURCE_NAME}_MQTTs backup" >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "STATUS: MQTTs backup failed." >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "Something went wrong, please check it" >> /tmp/mqtts.txt - < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: MQTTs Data Backup" "${CRON_BACKUP_MAIL}" -fi -< /tmp/mqtts.txt mail -s "${SOURCE_NAME}: MQTTs Data Backup" "${CRON_BACKUP_MAIL}" - -# Remove the old backup data in local directory to avoid excessive storage use -find /var/lib/backup/mqtts/ -type f -exec rm {} \; - -exit diff --git a/cron-backup/nginx_backup.sh b/cron-backup/nginx_backup.sh deleted file mode 100755 index 1db5260..0000000 --- a/cron-backup/nginx_backup.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash -#The Shell script will be used for taking backup and send it to S3 bucket. - -DATE1=$(date +%Y%m%d%H%M) -DATE=$(date +%d-%m-%y_%H-%M) - -mkdir -p /var/lib/backup/nginx - -nginx_src='/nginx' - -if [ ! -d $nginx_src ]; then - - echo "DATE:" "$DATE" > /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx backup" >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "STATUS: Nginx backup failed." >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "The source backup directory: nginx_src is not available" >> /tmp/nginx.txt - < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${CRON_BACKUP_MAIL}" - exit -else - tar cvzf /var/lib/backup/nginx/"${SOURCE_NAME}"_nginx_data_backup_"${DATE1}".tgz ${nginx_src}/ -fi - -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/; -then - echo "DATE:" "$DATE" > /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx backup" >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "STATUS: Nginx backup succeeded." >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "******* Nginx Data Backup ****************" >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_NGINX}\/,,g" &>> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "************** END **************************" >> /tmp/nginx.txt -else - echo "DATE:" "$DATE" > /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx backup" >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "STATUS: Nginx backup failed." >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "Something went wrong, please check it" >> /tmp/nginx.txt - < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${CRON_BACKUP_MAIL}" -fi -< /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${CRON_BACKUP_MAIL}" - -# Remove the old backup data in local directory to avoid excessive storage use -find /var/lib/backup/nginx/ -type f -exec rm {} \; - -exit diff --git a/cron-backup/nodered_backup.sh b/cron-backup/nodered_backup.sh deleted file mode 100755 index 8232eee..0000000 --- a/cron-backup/nodered_backup.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash -#The Shell script will be used for taking backup and send it to S3 bucket. - -DATE1=$(date +%Y%m%d%H%M) -DATE=$(date +%d-%m-%y_%H-%M) - -mkdir -p /var/lib/backup/nodered - -nodered_src='/nodered' - -if [ ! -d $nodered_src ]; then - - echo "DATE:" "$DATE" > /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered backup" >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "STATUS: Nodered backup failed." >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "The source backup directory: nodered_src is not available" >> /tmp/nodered.txt - < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${CRON_BACKUP_MAIL}" - exit -else - tar cvzf /var/lib/backup/nodered/"${SOURCE_NAME}"_nodered_data_backup_"${DATE1}".tgz ${nodered_src}/ -fi - -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/; -then - echo "DATE:" "$DATE" > /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered backup" >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "STATUS: Node-red backup succeeded." >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "******* Node-red Data Backup ****************" >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_NODERED}\/,,g" &>> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "************** END **************************" >> /tmp/nodered.txt -else - echo "DATE:" "$DATE" > /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered backup" >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "STATUS: Nodered backup failed." >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "Something went wrong, please check it" >> /tmp/nodered.txt - < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${CRON_BACKUP_MAIL}" -fi -< /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${CRON_BACKUP_MAIL}" - -# Remove the old backup data in local directory to avoid excessive storage use -find /var/lib/backup/nodered/ -type f -exec rm {} \; - -exit diff --git a/docker-compose.yml b/docker-compose.yml index 5fe81e8..d3d27d7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -183,8 +183,8 @@ # IOT_DASHBOARD_INFLUXDB_BACKUP_EMAIL # To send backup mail in Influxdb container. Use "space" to delimit the MAIL IDs. # -# IOT_DASHBOARD_CRON_BACKUP_EMAIL -# To send backup mail in cron-backup container. Use "space" to delimit the MAIL IDs. +# IOT_DASHBOARD_BACKUP_EMAIL +# To send backup mail in backup container. Use "space" to delimit the MAIL IDs. # # IOT_DASHBOARD_MONGO_INITDB_ROOT_USERNAME # The Username to be used for accessing Mongodb. @@ -254,6 +254,7 @@ services: - "${IOT_DASHBOARD_DATA}node-red:/data" environment: TZ: "${IOT_DASHBOARD_TIMEZONE:-GMT}" + NODE_RED_ENABLE_PROJECTS: "true" # nodered opens ports on influxdb and postfix so it needs to be able to talk to it. links: - influxdb @@ -348,13 +349,13 @@ services: - "2525:25" hostname: "${IOT_DASHBOARD_MAIL_HOST_NAME:-iotmail}" - cron-backup: + backup: restart: unless-stopped build: - context: ./cron-backup + context: ./backup dockerfile: Dockerfile args: - hostname: "${IOT_DASHBOARD_CRONBACKUP_MAIL_HOST_NAME:-cron-backup}" + hostname: "${IOT_DASHBOARD_CRONBACKUP_MAIL_HOST_NAME:-backup}" relay_ip: "postfix:25" domain: "${IOT_DASHBOARD_MAIL_DOMAIN:-example.com}" # Moving backup data to S3 Bucket @@ -363,7 +364,7 @@ services: AWS_DEFAULT_REGION: "${IOT_DASHBOARD_AWS_DEFAULT_REGION:-.}" AWS_HOST_BASE: "${IOT_DASHBOARD_AWS_HOST_BASE:-.}" AWS_HOST_BUCKET: "${IOT_DASHBOARD_AWS_HOST_BUCKET:-.}" - hostname: "${IOT_DASHBOARD_CRONBACKUP_MAIL_HOST_NAME:-cron-backup}" + hostname: "${IOT_DASHBOARD_CRONBACKUP_MAIL_HOST_NAME:-backup}" volumes: - "${IOT_DASHBOARD_DATA}grafana:/grafana" - "${IOT_DASHBOARD_DATA}node-red:/nodered" @@ -372,7 +373,7 @@ services: - "${IOT_DASHBOARD_DATA}mongodb/mongodb_data:/var/lib/mongodb" - "${IOT_DASHBOARD_DATA}mongodb/mongodb-S3-bucket:/var/lib/mongodb-S3-bucket" environment: - CRON_BACKUP_MAIL: "${IOT_DASHBOARD_CRON_BACKUP_EMAIL:-}" + BACKUP_MAIL: "${IOT_DASHBOARD_BACKUP_EMAIL:-}" MONGO_INITDB_ROOT_USERNAME: "${IOT_DASHBOARD_MONGO_INITDB_ROOT_USERNAME:-}" MONGO_INITDB_ROOT_PASSWORD: "${IOT_DASHBOARD_MONGO_INITDB_ROOT_PASSWORD:-}" SOURCE_NAME: "${IOT_DASHBOARD_CERTBOT_FQDN}" @@ -406,7 +407,7 @@ services: mongodb: restart: unless-stopped - image: mongo:5.0 + image: mongo:5.0.11 environment: MONGO_INITDB_ROOT_USERNAME: "${IOT_DASHBOARD_MONGO_INITDB_ROOT_USERNAME:-}" MONGO_INITDB_ROOT_PASSWORD: "${IOT_DASHBOARD_MONGO_INITDB_ROOT_PASSWORD:-}" diff --git a/expo/Dockerfile b/expo/Dockerfile index d5da83d..f37691c 100644 --- a/expo/Dockerfile +++ b/expo/Dockerfile @@ -4,7 +4,7 @@ # Build the EXPO using phusion base image -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # Enabling SSH service RUN rm -f /etc/service/sshd/down diff --git a/influxdb/Dockerfile b/influxdb/Dockerfile index 90252c4..26bfe63 100644 --- a/influxdb/Dockerfile +++ b/influxdb/Dockerfile @@ -2,7 +2,7 @@ # Dockerfile for building the influxdb instance with S3-backup and Mail alert setup # -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # Default InfluxDB host ENV INFLUX_HOST=influxdb @@ -13,7 +13,7 @@ ARG distrib_id ARG distrib_codename RUN echo "${distrib_id}" -RUN wget -qO- https://repos.influxdata.com/influxdb.key | apt-key add - +RUN wget -qO- https://repos.influxdata.com/influxdata-archive_compat.key | apt-key add - RUN /bin/bash -c "source /etc/lsb-release" RUN echo "deb https://repos.influxdata.com/${distrib_id} ${distrib_codename} stable" | tee /etc/apt/sources.list.d/influxdb.list @@ -61,8 +61,6 @@ COPY influxdb.conf /etc/influxdb/influxdb.conf # Enable influxdb database automatic backup crontab RUN mkdir -p /etc/my_init.d -COPY influxdb_cron.sh /etc/my_init.d/influxdb_cron.sh -RUN chmod +x /etc/my_init.d/influxdb_cron.sh # Start the postfix daemon during container startup COPY postfix.sh /etc/my_init.d/postfix.sh @@ -73,4 +71,8 @@ RUN mkdir /etc/service/influx COPY influx.sh /etc/service/influx/run RUN chmod +x /etc/service/influx/run +# Backup script for influxdb +COPY influxstart.sh /etc/service/influxstart/run +RUN chmod +x /etc/service/influxstart/run + # end of file diff --git a/influxdb/backup.sh b/influxdb/backup.sh index d4b93aa..3e61fe4 100755 --- a/influxdb/backup.sh +++ b/influxdb/backup.sh @@ -1,6 +1,16 @@ #!/bin/bash #The Shell script will be used for taking backup and send it to S3 bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec # TO list all Databases in influxdb databases DATE=$(date +%d-%m-%y_%H-%M) DATE1=$(date +%Y%m%d%H%M) @@ -31,37 +41,120 @@ done < "/tmp/data.txt" tar czf /var/lib/backup/influxdb/"${SOURCE_NAME}"_influxdb_metdata_db_backup_"${DATE1}".tgz /var/lib/influxdb-backup/ && tar czf /var/lib/backup/influxdb/"${SOURCE_NAME}"_influxdb_data_backup_"${DATE1}".tgz /var/lib/influxdb/ -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/influxdb/ s3://"${S3_BUCKET_INFLUXDB}"/; +# Moving the backup to S3 bucket (Daily Backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/influxdb/ s3://"${S3_BUCKET_INFLUXDB}"/influxdb/; then - echo "DATE:" "$DATE" > /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb backup" >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "STATUS: Influxdb backup succeeded." >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "******* Influxdb Database & metadata Backup ********" >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_metdata_db | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "************** Influxdb data Backup ****************" >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "********************** END ********************* " >> /tmp/influxbackup.txt + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Daily backup" + echo "" + echo "STATUS: Influxdb Daily backup succeeded." + echo "" + echo "******* Influxdb Database & metadata Backup ********" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_metdata_db | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "************** Influxdb data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "********************** END ********************* " + }>> /tmp/influxbackup.txt else - echo "DATE:" "$DATE" > /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb backup" >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "STATUS: Influxdb backup failed." >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "Something went wrong, please check it" >> /tmp/influxbackup.txt + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Daily backup" + echo "" + echo "STATUS: Influxdb Daily backup failed." + echo "" + echo "Something went wrong, please check it" + }>> /tmp/influxbackup.txt < /tmp/influxbackup.txt mail -s "${SOURCE_NAME}: Influxdb backup" "${INFLUXDB_BACKUP_MAIL}" fi +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/influxdb/ s3://"${S3_BUCKET_INFLUXDB}"/monthly_backup/influxdb/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Monthly backup" + echo "" + echo "STATUS: Influxdb Monthly backup succeeded." + echo "" + echo "******* Influxdb Database & metadata Backup ********" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/monthly_backup/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_metdata_db | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "************** Influxdb data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/monthly_backup/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "********************** END ********************* " + }>> /tmp/influxbackup.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Monthly backup" + echo "" + echo "STATUS: Influxdb Monthly backup failed." + echo "" + echo "Something went wrong, please check it" + }>> /tmp/influxbackup.txt + < /tmp/influxbackup.txt mail -s "${SOURCE_NAME}: Influxdb backup" "${INFLUXDB_BACKUP_MAIL}" +fi +fi + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/influxdb/ s3://"${S3_BUCKET_INFLUXDB}"/yearly_backup/influxdb/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Yearly backup" + echo "" + echo "STATUS: Influxdb Yearly backup succeeded." + echo "" + echo "******* Influxdb Database & metadata Backup ********" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/yearly_backup/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_metdata_db | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "************** Influxdb data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/yearly_backup/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "********************** END ********************* " + }>> /tmp/influxbackup.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Yearly backup" + echo "" + echo "STATUS: Influxdb Yearly backup failed." + echo "" + echo "Something went wrong, please check it" + }>> /tmp/influxbackup.txt + < /tmp/influxbackup.txt mail -s "${SOURCE_NAME}: Influxdb backup" "${INFLUXDB_BACKUP_MAIL}" +fi +fi # Remove the old backup data in local directory to avoid excessive storage use find /var/lib/backup/influxdb/ -type f -exec rm {} \; find /var/lib/influxdb-backup/ -type f -exec rm {} \; < /tmp/influxbackup.txt mail -s "${SOURCE_NAME}: Influxdb backup" "${INFLUXDB_BACKUP_MAIL}" +###PRUNE### +rm /tmp/influxbackup.txt +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_INFLUXDB}"/influxdb/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_INFLUXDB}"/monthly_backup/influxdb/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi diff --git a/influxdb/influxdb_cron.sh b/influxdb/influxdb_cron.sh deleted file mode 100644 index 4c13808..0000000 --- a/influxdb/influxdb_cron.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh - -# exit on unchecked errors -set -e - -# backups are scheduled via the root crontab. Start by heading there -cd /root - -#write out current crontab -crontab -l > mycron || echo "no crontab for root, going on" - -#echo new cron into cron file -echo "35 6 * * * /bin/bash -l -c '/bin/backup.sh'" >> mycron - -#delete duplicated lines -sort -u -o mycron mycron - -#install new cron file -crontab mycron diff --git a/influxdb/influxstart.sh b/influxdb/influxstart.sh new file mode 100644 index 0000000..5c222f5 --- /dev/null +++ b/influxdb/influxstart.sh @@ -0,0 +1,12 @@ +#!/bin/bash +while true +do + HOUR="$(date +'%H')" + MINUTE="$(date +'%M')" + + if [ "$HOUR" = "06" ] && [ "$MINUTE" = "35" ] + then + /bin/backup.sh + sleep 60 + fi + done \ No newline at end of file diff --git a/mqtts/Dockerfile b/mqtts/Dockerfile index 7d1544c..eee617e 100644 --- a/mqtts/Dockerfile +++ b/mqtts/Dockerfile @@ -3,8 +3,7 @@ # # Build the MQTTS using phusion base image -FROM phusion/baseimage:master-amd64 - +FROM phusion/baseimage:jammy-1.0.1 # Installing mosquitto packages and certbot RUN apt-add-repository ppa:mosquitto-dev/mosquitto-ppa RUN apt-get update && apt-get install -y \ diff --git a/nginx/Dockerfile b/nginx/Dockerfile index 8bcb60c..8eeff99 100644 --- a/nginx/Dockerfile +++ b/nginx/Dockerfile @@ -3,8 +3,8 @@ # # Start from Phusion. -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # Installing the required packages RUN apt-get update && apt-get install -y \ software-properties-common \ diff --git a/node-red/Dockerfile b/node-red/Dockerfile index a50c865..e985b67 100644 --- a/node-red/Dockerfile +++ b/node-red/Dockerfile @@ -5,7 +5,7 @@ # build the node red image using the offical node red distribution # passing arguments to build specific image ARG node_red_version -FROM nodered/node-red:${node_red_version} +FROM nodered/node-red:2.2.3 # To avoid SSL certification issue ENV NODE_TLS_REJECT_UNAUTHORIZED=0 @@ -35,4 +35,4 @@ COPY settings.js /usr/src/node-red/.node-red/ # change the startup command to be sure to use our settings. CMD ["npm", "start", "--", "--userDir", "/data", "--settings", "/usr/src/node-red/.node-red/settings.js"] -# end of file \ No newline at end of file +# end of file diff --git a/postfix/Dockerfile b/postfix/Dockerfile index d90a589..34abb9b 100644 --- a/postfix/Dockerfile +++ b/postfix/Dockerfile @@ -2,8 +2,8 @@ # Dockerfile for building POSTFIX # # Build the Postfix using phusion base image -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # some basic package installation for troubleshooting RUN apt-get update && apt-get install -y \ iputils-ping \