Build 13.0.3

This commit is contained in:
Eduardo Fraga
2023-08-23 07:33:42 -03:00
commit 95a1b068b1
70 changed files with 5420 additions and 0 deletions

2
docker/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
working/*
etc/baculum/Config-web-apache/session.dump

View File

@@ -0,0 +1,21 @@
FROM debian:11
ENV BACULA_VERSION 13.0.3
ENV DISTRO bullseye
ENV DEBIAN_FRONTEND noninteractive
# get your key on: https://www.bacula.org/bacula-binary-package-download/
ENV BACULA_KEY 5cee4d079821e
ENV EMAIL suporte@eftech.com.br
RUN apt update && \
apt -y install gnupg2 curl && \
curl https://bacula.org/downloads/Bacula-4096-Distribution-Verification-key.asc | apt-key add - && \
echo "deb https://www.bacula.org/packages/${BACULA_KEY}/debs/${BACULA_VERSION} ${DISTRO} main" > /etc/apt/sources.list.d/bacula-community.list && \
apt update && \
apt -y install dbconfig-pgsql bacula-postgresql
RUN for i in `ls /opt/bacula/bin`; do if test -z /usr/sbin/$i; then ln -s /opt/bacula/bin/$i /usr/sbin/$i; fi; done

View File

@@ -0,0 +1,6 @@
[Bacula-Community]
name=CentOS - Bacula - Community
baseurl=https://www.bacula.org/packages/BACULA_KEY/rpms/BACULA_VERSION/el7/
enabled=1
protect=0
gpgcheck=0

View File

@@ -0,0 +1,32 @@
FROM eftechcombr/bacula:13.0.3-base AS base
FROM postgres:13.7
ENV POSTGRES_PASSWORD bacula
ENV POSTGRES_USER bacula
ENV POSTGRES_DB bacula
ENV POSTGRES_INITDB_ARGS '--encoding=SQL_ASCII --lc-collate=C --lc-ctype=C'
COPY --from=base /opt/bacula/scripts/make_postgresql_tables /docker-entrypoint-initdb.d/make_postgresql_tables
COPY --from=base /opt/bacula/scripts/grant_postgresql_privileges /docker-entrypoint-initdb.d/grant_postgresql_privileges
RUN { \
echo '#!/bin/bash'; \
echo 'sh /docker-entrypoint-initdb.d/make_postgresql_tables --username=$POSTGRES_USER'; \
echo 'sh /docker-entrypoint-initdb.d/grant_postgresql_privileges --username=$POSTGRES_USER'; \
} >> /docker-entrypoint-initdb.d/deploy_database.sh \
&& chmod +x /docker-entrypoint-initdb.d/deploy_database.sh \
&& chown postgres. /docker-entrypoint-initdb.d/deploy_database.sh
# COPY 01-make_postgresql_tables.sql 02-grant_postgresql_privileges.sql /docker-entrypoint-initdb.d/
RUN chown -R postgres. /docker-entrypoint-initdb.d/*
VOLUME ["/var/lib/postgresql/data"]
EXPOSE 5432/tcp

View File

@@ -0,0 +1,9 @@
FROM eftechcombr/bacula:13.0.3-base
VOLUME ["/opt/bacula/etc"]
EXPOSE 9101/tcp
ENTRYPOINT ["/opt/bacula/bin/bacula-dir"]
CMD ["-f", "-c", "/opt/bacula/etc/bacula-dir.conf"]

View File

@@ -0,0 +1,9 @@
FROM eftechcombr/bacula:13.0.3-base
VOLUME ["/opt/bacula/etc"]
EXPOSE 9102/tcp
ENTRYPOINT ["/opt/bacula/bin/bacula-fd"]
CMD ["-f", "-c", "/opt/bacula/etc/bacula-fd.conf"]

View File

@@ -0,0 +1,21 @@
FROM eftechcombr/bacula:13.0.3-base
ENV USERNAME ${USERNAME}
ENV TOKEN ${TOKEN}
ENV ORG ${ORG}
ADD scripts/*.sh /usr/local/bin/
RUN chmod a+x /usr/local/bin/*.sh
RUN apt -y install git curl jq
VOLUME ["/opt/bacula/etc"]
EXPOSE 9102/tcp
ENTRYPOINT ["/opt/bacula/bin/bacula-fd"]
CMD ["-f", "-c", "/opt/bacula/etc/bacula-fd.conf"]

View File

@@ -0,0 +1,26 @@
FROM eftechcombr/bacula:13.0.3-base
ENV AWS_S3_ACCESS_KEY_ID "${AWS_S3_ACCESS_KEY_ID}"
ENV AWS_S3_BUCKET "${AWS_S3_BUCKET}"
ENV AWS_S3_SECRET_ACCESS_KEY "${AWS_S3_SECRET_ACCESS_KEY}"
RUN apt -y install s3fs
RUN { \
echo '#!/bin/sh' ; \
echo ; \
echo 'echo ${AWS_S3_ACCESS_KEY_ID}:${AWS_S3_SECRET_ACCESS_KEY} > ${HOME}/.passwd-s3fs' ; \
echo 'chmod 0600 ${HOME}/.passwd-s3fs' ; \
echo 'mkdir -p /${AWS_S3_BUCKET}' ; \
echo 's3fs ${AWS_S3_BUCKET} /${AWS_S3_BUCKET} -o passwd_file=${HOME}/.passwd-s3fs' ; \
echo '/opt/bacula/bin/bacula-fd -f -c /opt/bacula/etc/bacula-fd.conf' ; \
echo ; \
} > /entrypoint.sh && chmod +x /entrypoint.sh
VOLUME ["/opt/bacula/etc"]
EXPOSE 9102/tcp
CMD ["/entrypoint.sh"]

View File

@@ -0,0 +1,24 @@
#!/bin/bash
# Author: Sarav AK
# Email: hello@gritfy.com
# Created Date: 19 Aug 2021
# Update by Eduardo Fraga <eduardo@eftech.com.br> at 27 Dec 2022
#
#
if [ -z $USERNAME ] || [ -z $TOKEN ] || [ -z $ORG ]; then
echo "Missing environmet variables USERNAME, TOKEN, ORG"
exit 1;
fi
# No of reposoitories per page - Maximum Limit is 100
PERPAGE=100
# Change the BASEURL to your Org or User based
# Org base URL
BASEURL="https://api.github.com/orgs/${ORG}/repos"
# User base URL
# BASEURL="https://api.github.com/user/<your_github_username>/repos"
curl -s -u $USERNAME:$TOKEN -H 'Accept: application/vnd.github.v3+json' "${BASEURL}?per_page=${PERPAGE}&page=1" 2>&1 | jq '.[].name' | tr -d '\\"'

View File

@@ -0,0 +1,6 @@
#!/bin/bash
# for i in `/usr/local/bin/GitRepoList.sh`; do /usr/local/bin/backup-github.sh $i /github/$i.bundle || exit 1; done
for i in `/usr/local/bin/GitRepoList.sh`; do /usr/local/bin/backup-github.sh $i /github/$i.bundle; done

View File

@@ -0,0 +1,31 @@
#!/bin/bash
# Update by Eduardo Fraga <eduardo@eftech.com.br> at 27 Dec 2022
#
#
# Required: git
# Required environment variables above:
if [ -z $USERNAME ] || [ -z $TOKEN ] || [ -z $ORG ]; then
echo "Missing environmet variables USERNAME, TOKEN, ORG"
exit 1;
fi
# Args
if [ -z $1 ] || [ -z $2 ]; then
echo "Required $0 <repo> <destdir>"
exit 2
fi
DIR=`mktemp -d`
cd $DIR
git clone https://$USERNAME:$TOKEN@github.com/$ORG/$1
cd $1
git bundle create $2 --all
git bundle verify $2 || exit 3
rm -rf $DIR

View File

@@ -0,0 +1,11 @@
FROM eftechcombr/bacula:13.0.3-base
RUN apt -y install bacula-cloud-storage-common bacula-cloud-storage-s3 bacula-aligned
VOLUME ["/opt/bacula/etc"]
EXPOSE 9103/tcp
ENTRYPOINT ["/opt/bacula/bin/bacula-sd"]
CMD ["-f", "-c", "/opt/bacula/etc/bacula-sd.conf"]

View File

@@ -0,0 +1,34 @@
[api]
auth_type = "basic"
debug = "0"
lang = "en"
[db]
enabled = "1"
type = "pgsql"
name = "bacula"
login = "bacula"
password = "bacula"
ip_addr = "db"
port = "5432"
path = ""
[bconsole]
enabled = "1"
bin_path = "/opt/bacula/bin/bconsole"
cfg_path = "/opt/bacula/etc/bconsole.conf"
use_sudo = "1"
[jsontools]
enabled = "1"
use_sudo = "1"
bconfig_dir = "/opt/bacula/etc"
bdirjson_path = "/opt/bacula/bin/bdirjson"
dir_cfg_path = "/opt/bacula/etc/bacula-dir.conf"
bsdjson_path = "/opt/bacula/bin/bsdjson"
sd_cfg_path = "/opt/bacula/etc/bacula-sd.conf"
bfdjson_path = "/opt/bacula/bin/bfdjson"
fd_cfg_path = "/opt/bacula/etc/bacula-fd.conf"
bbconsjson_path = "/opt/bacula/bin/bbconsjson"
bcons_cfg_path = "/opt/bacula/etc/bconsole.conf"

View File

@@ -0,0 +1 @@
admin:YWG41BPzVAkN6

View File

@@ -0,0 +1,38 @@
FROM eftechcombr/bacula:13.0.3-base
RUN curl https://www.bacula.org/downloads/baculum/baculum.pub | apt-key add -
COPY baculum.list /etc/apt/sources.list.d/baculum.list
RUN apt update && \
apt -y install \
php-bcmath \
php*-mbstring \
php-fpm \
baculum-api \
baculum-api-apache2 \
baculum-common \
bacula-console \
baculum-web \
baculum-web-apache2 \
supervisor
COPY sudoers-baculum /etc/sudoers.d/sudoers-baculum
RUN usermod -aG bacula www-data && \
chown -R www-data:bacula /opt/bacula/working /opt/bacula/etc && \
chmod -R g+rwx /opt/bacula/working /opt/bacula/etc && \
a2enmod rewrite && \
a2ensite baculum-api
COPY timezone.ini /etc/php.d/timezone.ini
COPY confs/supervisord.conf /etc/supervisord.conf
VOLUME ["/opt/bacula/etc","/etc/baculum"]
EXPOSE 9096/tcp
RUN mkdir -p /run/php
CMD ["/usr/bin/supervisord"]

View File

@@ -0,0 +1,2 @@
deb [ arch=amd64 ] https://www.bacula.org/downloads/baculum/stable-11/debian bullseye main
deb-src https://www.bacula.org/downloads/baculum/stable-11/debian bullseye main

View File

@@ -0,0 +1,31 @@
[unix_http_server]
file=/var/run/supervisor.sock ; the path to the socket file
[supervisord]
logfile=/var/log/supervisord.log
loglevel=info
pidfile=/var/run/supervisord.pid
nodaemon=true
logfile_backups=10
logfile_maxbytes=50MB
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
[supervisorctl]
serverurl=unix:///var/run/supervisor.sock
[program:php-fpm7.4]
command=/usr/sbin/php-fpm7.4 --nodaemonize -c /etc/php/7.4/fpm/php-fpm.conf
priority=50
autorestart=true
[program:apache2]
command=/usr/sbin/apache2ctl -D FOREGROUND
priority=50
autorestart=true

View File

@@ -0,0 +1,16 @@
Defaults:apache "'!'"requiretty
www-data ALL=NOPASSWD: /usr/sbin/bconsole
www-data ALL=NOPASSWD: /usr/sbin/bdirjson
www-data ALL=NOPASSWD: /usr/sbin/bsdjson
www-data ALL=NOPASSWD: /usr/sbin/bfdjson
www-data ALL=NOPASSWD: /usr/sbin/bbconsjson
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl start bacula-dir
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl stop bacula-dir
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl restart bacula-dir
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl start bacula-sd
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl stop bacula-sd
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl restart bacula-sd
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl start bacula-fd
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl stop bacula-fd
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl restart bacula-fd
www-data ALL=(root) NOPASSWD: /opt/bacula/bin/mtx-changer

View File

@@ -0,0 +1,2 @@
[Date]
date.timezone = Etc/UTC ;

View File

@@ -0,0 +1 @@
admin:YWG41BPzVAkN6

View File

@@ -0,0 +1,13 @@
[Main]
auth_type = "basic"
login = "admin"
password = "admin"
client_id = ""
client_secret = ""
redirect_uri = ""
scope = ""
protocol = "http"
address = "baculum-api"
port = "9096"
url_prefix = ""

View File

@@ -0,0 +1 @@
a:1:{s:11:"host_params";a:1:{i:0;a:12:{s:4:"host";s:9:"localhost";s:8:"protocol";s:4:"http";s:7:"address";s:9:"localhost";s:4:"port";s:4:"9096";s:10:"url_prefix";s:0:"";s:9:"auth_type";s:5:"basic";s:5:"login";s:5:"admin";s:8:"password";s:5:"admin";s:9:"client_id";N;s:13:"client_secret";N;s:12:"redirect_uri";N;s:5:"scope";N;}}}

View File

@@ -0,0 +1,7 @@
[baculum]
login = "admin"
debug = "0"
lang = "en"
[users]

View File

@@ -0,0 +1,42 @@
FROM eftechcombr/bacula:13.0.3-base
RUN curl https://www.bacula.org/downloads/baculum/baculum.pub | apt-key add -
COPY baculum.list /etc/apt/sources.list.d/baculum.list
RUN apt update && \
apt -y install \
php-bcmath \
php*-mbstring \
php-fpm \
libapache2-mod-php7.4 \
baculum-api \
baculum-api-apache2 \
baculum-common \
bacula-console \
baculum-web \
baculum-web-apache2 \
supervisor
COPY sudoers-baculum /etc/sudoers.d/sudoers-baculum
RUN usermod -aG bacula www-data && \
chown -R www-data:bacula /opt/bacula/working /opt/bacula/etc && \
chmod -R g+rwx /opt/bacula/working /opt/bacula/etc && \
a2enmod rewrite && \
a2enmod php7.4 && \
a2ensite baculum-web
COPY timezone.ini /etc/php.d/timezone.ini
COPY confs/supervisord.conf /etc/supervisord.conf
VOLUME ["/opt/bacula/etc","/etc/baculum"]
EXPOSE 9095/tcp
RUN mkdir -p /run/php
# RUN yum -y clean all && rm -rf /var/cache/yum
CMD ["/usr/bin/supervisord"]

View File

@@ -0,0 +1,2 @@
deb [ arch=amd64 ] https://www.bacula.org/downloads/baculum/stable-11/debian bullseye main
deb-src https://www.bacula.org/downloads/baculum/stable-11/debian bullseye main

View File

@@ -0,0 +1,31 @@
[unix_http_server]
file=/var/run/supervisor.sock ; the path to the socket file
[supervisord]
logfile=/var/log/supervisord.log
loglevel=info
pidfile=/var/run/supervisord.pid
nodaemon=true
logfile_backups=10
logfile_maxbytes=50MB
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
[supervisorctl]
serverurl=unix:///var/run/supervisor.sock
[program:php-fpm7.4]
command=/usr/sbin/php-fpm7.4 --nodaemonize -c /etc/php/7.4/fpm/php-fpm.conf
priority=50
autorestart=true
[program:apache2]
command=/usr/sbin/apache2ctl -D FOREGROUND
priority=50
autorestart=true

View File

@@ -0,0 +1,16 @@
Defaults:apache "'!'"requiretty
www-data ALL=NOPASSWD: /usr/sbin/bconsole
www-data ALL=NOPASSWD: /usr/sbin/bdirjson
www-data ALL=NOPASSWD: /usr/sbin/bsdjson
www-data ALL=NOPASSWD: /usr/sbin/bfdjson
www-data ALL=NOPASSWD: /usr/sbin/bbconsjson
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl start bacula-dir
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl stop bacula-dir
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl restart bacula-dir
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl start bacula-sd
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl stop bacula-sd
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl restart bacula-sd
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl start bacula-fd
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl stop bacula-fd
www-data ALL=(root) NOPASSWD: /usr/bin/systemctl restart bacula-fd
www-data ALL=(root) NOPASSWD: /opt/bacula/bin/mtx-changer

View File

@@ -0,0 +1,2 @@
[Date]
date.timezone = Etc/UTC ;

View File

@@ -0,0 +1,150 @@
version: '3.1'
#
services:
base:
build: bacula-base/
image: eftechcombr/bacula:13.0.3-base
#
db:
build: bacula-catalog/
image: eftechcombr/bacula:13.0.3-catalog
restart: unless-stopped
environment:
POSTGRES_PASSWORD: bacula
POSTGRES_USER: bacula
POSTGRES_DB: bacula
# volumes:
# - pgdata:/var/lib/postgresql/data:rw
ports:
- 5432:5432
#
bacula-dir:
build: bacula-dir/
image: eftechcombr/bacula:13.0.3-director
restart: unless-stopped
volumes:
- ./etc:/opt/bacula/etc:ro
depends_on:
- db
ports:
- 9101:9101
#
bacula-sd:
build: bacula-sd/
image: eftechcombr/bacula:13.0.3-storage
restart: unless-stopped
depends_on:
- bacula-dir
- db
volumes:
- ./etc:/opt/bacula/etc:ro
ports:
- 9103:9103
#
bacula-fd:
build: bacula-fd/
image: eftechcombr/bacula:13.0.3-client
restart: unless-stopped
depends_on:
- bacula-sd
- bacula-dir
- db
volumes:
- ./etc:/opt/bacula/etc:ro
ports:
- 9102:9102
#
# bacula-fd-git:
# build:
# context: bacula-fd/
# dockerfile: Dockerfile-git
# image: eftechcombr/bacula:13.0.3-client-git
# restart: unless-stopped
# environment:
# USERNAME: <username>
# TOKEN: <token>
# ORG: <org>
# volumes:
# - ./etc:/opt/bacula/etc:ro
# ports:
# - 9202:9102
# bacula-fd-s3fs:
# build:
# context: bacula-fd/
# dockerfile: Dockerfile-s3fs
# image: eftechcombr/bacula:13.0.3-client-s3fs
# restart: unless-stopped
# cap_add:
# - SYS_ADMIN
# # security_opt:
# # - 'apparmor:unconfined'
# devices:
# - /dev/fuse
# # volumes:
# # - bucket:/opt/s3fs/bucket
# environment:
# AWS_S3_BUCKET: ${AWS_S3_BUCKET}
# AWS_S3_ACCESS_KEY_ID: ${AWS_S3_ACCESS_KEY_ID}
# AWS_S3_SECRET_ACCESS_KEY: ${AWS_S3_SECRET_ACCESS_KEY}
# volumes:
# - ./etc:/opt/bacula/etc:ro
# ports:
# - 9112:9102
#
baculum-api:
build: baculum-api/
image: eftechcombr/baculum:11.0.6-api
restart: unless-stopped
depends_on:
- db
- bacula-dir
volumes:
- ./etc/bconsole.conf:/opt/bacula/etc/bconsole.conf:ro
- ./etc/baculum:/etc/baculum:rw
ports:
- 9096:9096
#
baculum-web:
build: baculum-web/
image: eftechcombr/baculum:11.0.6-web
restart: unless-stopped
depends_on:
- baculum-api
volumes:
- ./etc/bconsole.conf:/opt/bacula/etc/bconsole.conf:ro
- ./etc/baculum:/etc/baculum:rw
ports:
- 9095:9095
#
#volumes:
# pgdata:
#
# gmail:
# image: eftechcombr/postfix:gmail
# restart: unless-stopped
# depends_on:
# - bacula-dir
# # ports:
# # - 30025:25
# environment:
# GMAIL_USER: xxxxxxxx
# GMAIL_PASS: xxxxxxxx
#
# smtp2tg:
# image: b3vis/docker-smtp2tg
# restart: unless-stopped
# volumes:
# - ./etc/smtp2tg.toml:/config/smtp2tg.toml:ro
# # ports:
# # - "31025:25"
# depends_on:
# - bacula-dir
#
#
# volumes:
# pgdata:
# bucket:

101
docker/docker-compose.yml Normal file
View File

@@ -0,0 +1,101 @@
version: '3.1'
#
services:
db:
image: eftechcombr/bacula:13.0.3-catalog
restart: unless-stopped
environment:
POSTGRES_PASSWORD: bacula
POSTGRES_USER: bacula
POSTGRES_DB: bacula
# volumes:
# - pgdata:/var/lib/postgresql/data:rw
ports:
- 5432:5432
#
bacula-dir:
image: eftechcombr/bacula:13.0.3-director
restart: unless-stopped
volumes:
- ./etc:/opt/bacula/etc:ro
depends_on:
- db
ports:
- 9101:9101
#
bacula-sd:
image: eftechcombr/bacula:13.0.3-storage
restart: unless-stopped
depends_on:
- bacula-dir
- db
volumes:
- ./etc:/opt/bacula/etc:ro
ports:
- 9103:9103
#
bacula-fd:
image: eftechcombr/bacula:13.0.3-client
restart: unless-stopped
depends_on:
- bacula-sd
- bacula-dir
- db
volumes:
- ./etc:/opt/bacula/etc:ro
ports:
- 9102:9102
#
# baculum-api:
# image: eftechcombr/baculum:11.0.6-api
# restart: unless-stopped
# depends_on:
# - db
# - bacula-dir
# volumes:
# - ./etc/bconsole.conf:/opt/bacula/etc/bconsole.conf:ro
# - ./etc/baculum:/etc/baculum:rw
# ports:
# - 9096:9096
# #
# baculum-web:
# image: eftechcombr/baculum:11.0.6-web
# restart: unless-stopped
# depends_on:
# - baculum-api
# volumes:
# - ./etc/bconsole.conf:/opt/bacula/etc/bconsole.conf:ro
# - ./etc/baculum:/etc/baculum:rw
# ports:
# - 9095:9095
#
#volumes:
# pgdata:
#
# gmail:
# image: eftechcombr/postfix:gmail
# restart: unless-stopped
# depends_on:
# - bacula-dir
# # ports:
# # - 30025:25
# environment:
# GMAIL_USER: xxxxxxxx
# GMAIL_PASS: xxxxxxxx
#
# smtp2tg:
# image: b3vis/docker-smtp2tg
# restart: unless-stopped
# volumes:
# - ./etc/smtp2tg.toml:/config/smtp2tg.toml:ro
# # ports:
# # - "31025:25"
# depends_on:
# - bacula-dir
#
#
#volumes:
# pgdata:

View File

@@ -0,0 +1,72 @@
# bacula-dir-cloud.conf
#
# JobDefs
# Job
# Restore
# Pool
# Autochanger
#
# Template to store in cloud
JobDefs {
Name = "DefaultJobToCloudAWS"
Type = Backup
Level = Incremental
Client = bacula-fd
FileSet = "Full Set"
Schedule = "WeeklyCycle"
Storage = "CloudS3AWS"
Messages = Standard
Pool = CloudAWS
SpoolAttributes = yes
Priority = 10
Write Bootstrap = "/opt/bacula/working/%c.bsr"
}
# Jobs
Job {
Name = "BackupClient1ToCloudAWS"
JobDefs = "DefaultJobToCloudAWS"
}
# Restore
Job {
Name = "RestoreFromCloudAWS"
Type = Restore
Client=bacula-fd
Storage = CloudS3AWS
FileSet="Full Set"
Pool = CloudAWS
Messages = Standard
Where = /tmp/bacula-restores
}
# Cloud Pool definition
Pool {
Name = CloudAWS
Pool Type = Backup
Recycle = no # Bacula can automatically recycle Volumes
AutoPrune = yes # Prune expired volumes
Volume Retention = 365 days # one year
Maximum Volume Jobs = 1 #
# Maximum Volume Bytes = 100M # Limit Volume size to something reasonable
Label Format = "Vol-JobId-${JobId}" # Auto label
}
# Autochanger definition
Autochanger {
Name = "CloudS3AWS"
# Do not use "localhost" here
Address = bacula-sd # N.B. Use a fully qualified name here
SDPort = 9103
Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n"
Device = "CloudAutoChangerS3"
Media Type = "CloudType"
Maximum Concurrent Jobs = 10 # run up to 10 jobs a the same time
}

View File

@@ -0,0 +1,96 @@
# bacula-dir-cloud.conf
#
# JobDefs
# Job
# Restore
# Pool
# Autochanger
#
# Template to store in cloud
JobDefs {
Name = "DefaultJobToCloud"
Type = Backup
Level = Incremental
Client = bacula-fd
FileSet = "Full Set"
Schedule = "WeeklyCycle"
Storage = "CloudS3"
Messages = Standard
Pool = Cloud
SpoolAttributes = yes
Priority = 10
Write Bootstrap = "/opt/bacula/working/%c.bsr"
}
# Jobs
Job {
Name = "BackupClient1ToCloud"
JobDefs = "DefaultJobToCloud"
}
# Restore
Job {
Name = "RestoreFromCloud"
Type = Restore
Client=bacula-fd
Storage = CloudS3
FileSet="Full Set"
Pool = Cloud
Messages = Standard
Where = /tmp/bacula-restores
}
# Cloud Pool definition
Pool {
Name = Cloud
Pool Type = Backup
Recycle = no # Bacula can automatically recycle Volumes
AutoPrune = yes # Prune expired volumes
Volume Retention = 365 days # one year
Maximum Volume Jobs = 1 #
# Maximum Volume Bytes = 100M # Limit Volume size to something reasonable
Label Format = "Vol-JobId-${JobId}" # Auto label
}
# Autochanger definition
Autochanger {
Name = "CloudS3"
# Do not use "localhost" here
Address = bacula-sd # N.B. Use a fully qualified name here
SDPort = 9103
Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n"
Device = "CloudAutoChanger1"
Media Type = "CloudType"
Maximum Concurrent Jobs = 10 # run up to 10 jobs a the same time
}
#
#Autochanger {
# Name = "CloudS3-2"
## Do not use "localhost" here
# Address = bacula-sd # N.B. Use a fully qualified name here
# SDPort = 9103
# Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n"
# Device = "CloudAutoChanger2"
# Media Type = "CloudType"
# Maximum Concurrent Jobs = 10 # run up to 10 jobs a the same time
#}
#
#Autochanger {
# Name = "CloudS3-3"
## Do not use "localhost" here
# Address = bacula-sd # N.B. Use a fully qualified name here
# SDPort = 9103
# Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n"
# Device = "CloudAutoChanger3"
# Media Type = "CloudType"
# Maximum Concurrent Jobs = 10 # run up to 10 jobs a the same time
#}
#

354
docker/etc/bacula-dir.conf Executable file
View File

@@ -0,0 +1,354 @@
#
# Default Bacula Director Configuration file
#
# The only thing that MUST be changed is to add one or more
# file or directory names in the Include directive of the
# FileSet resource.
#
# For Bacula release 9.4.4 (28 May 2019) -- redhat Enterprise release
#
# You might also want to change the default email address
# from root to your address. See the "mail" and "operator"
# directives in the Messages resource.
#
# Copyright (C) 2000-2017 Kern Sibbald
# License: BSD 2-Clause; see file LICENSE-FOSS
#
Director { # define myself
Name = bacula-dir
DIRport = 9101 # where we listen for UA connections
QueryFile = "/opt/bacula/scripts/query.sql"
WorkingDirectory = "/opt/bacula/working"
PidDirectory = "/opt/bacula/working"
Maximum Concurrent Jobs = 20
Password = "XDnaVZYU9F4QhqUGMPxiOXsJaji23mNG3FaAM9Z2q1c/" # Console password
Messages = Daemon
}
JobDefs {
Name = "DefaultJob"
Type = Backup
Level = Incremental
Client = bacula-fd
FileSet = "Full Set"
Schedule = "WeeklyCycle"
Storage = File1
Messages = Standard
Pool = File
SpoolAttributes = yes
Priority = 10
Write Bootstrap = "/opt/bacula/working/%c.bsr"
}
#
# Define the main nightly save backup job
# By default, this job will back up to disk in /tmp
Job {
Name = "BackupClient1"
JobDefs = "DefaultJob"
}
#Job {
# Name = "BackupClient2"
# Client = bacula2-fd
# JobDefs = "DefaultJob"
#}
#Job {
# Name = "BackupClient1-to-Tape"
# JobDefs = "DefaultJob"
# Storage = LTO-4
# Spool Data = yes # Avoid shoe-shine
# Pool = Default
#}
#}
# Backup the catalog database (after the nightly save)
Job {
Name = "BackupCatalog"
JobDefs = "DefaultJob"
Level = Full
FileSet="Catalog"
Schedule = "WeeklyCycleAfterBackup"
# This creates an ASCII copy of the catalog
# Arguments to make_catalog_backup.pl are:
# make_catalog_backup.pl <catalog-name>
ClientRunBeforeJob = "/opt/bacula/scripts/make_catalog_backup.pl MyCatalog"
# This deletes the copy of the catalog
ClientRunAfterJob = "/opt/bacula/scripts/delete_catalog_backup"
Write Bootstrap = "/opt/bacula/working/%n.bsr"
Priority = 11 # run after main backup
}
#
# Standard Restore template, to be changed by Console program
# Only one such job is needed for all Jobs/Clients/Storage ...
#
Job {
Name = "RestoreFiles"
Type = Restore
Client=bacula-fd
Storage = File1
# The FileSet and Pool directives are not used by Restore Jobs
# but must not be removed
FileSet="Full Set"
Pool = File
Messages = Standard
Where = /tmp/bacula-restores
}
# List of files to be backed up
FileSet {
Name = "Full Set"
Include {
Options {
signature = MD5
}
#
# Put your list of files here, preceded by 'File =', one per line
# or include an external list with:
#
# File = <file-name
#
# Note: / backs up everything on the root partition.
# if you have other partitions such as /usr or /home
# you will probably want to add them too.
#
# By default this is defined to point to the Bacula binary
# directory to give a reasonable FileSet to backup to
# disk storage during initial testing.
#
File = /opt/bacula/bin
File = /opt/bacula
File = /opt/bacula/etc
}
#
# If you backup the root directory, the following two excluded
# files can be useful
#
Exclude {
File = /opt/bacula/working
File = /tmp
File = /proc
File = /tmp
File = /sys
File = /.journal
File = /.fsck
}
}
#
# When to do the backups, full backup on first sunday of the month,
# differential (i.e. incremental since full) every other sunday,
# and incremental backups other days
Schedule {
Name = "WeeklyCycle"
Run = Full 1st sun at 23:05
Run = Differential 2nd-5th sun at 23:05
Run = Incremental mon-sat at 23:05
}
# This schedule does the catalog. It starts after the WeeklyCycle
Schedule {
Name = "WeeklyCycleAfterBackup"
Run = Full sun-sat at 23:10
}
# This is the backup of the catalog
FileSet {
Name = "Catalog"
Include {
Options {
signature = MD5
}
File = "/opt/bacula/working/bacula.sql"
}
}
# Client (File Services) to backup
Client {
Name = bacula-fd
# Address = bacula-standalone
Address = bacula-fd
FDPort = 9102
Catalog = MyCatalog
Password = "eso80TrxzhXkRgaQVI6ZYrSzAZ4E9KFNp0Y+T1HHVWBi" # password for FileDaemon
File Retention = 60 days # 60 days
Job Retention = 6 months # six months
AutoPrune = yes # Prune expired Jobs/Files
}
#
# Second Client (File Services) to backup
# You should change Name, Address, and Password before using
#
#Client {
# Name = bacula2-fd
# Address = bacula2
# FDPort = 9102
# Catalog = MyCatalog
# Password = "eso80TrxzhXkRgaQVI6ZYrSzAZ4E9KFNp0Y+T1HHVWBi2" # password for FileDaemon 2
# File Retention = 60 days # 60 days
# Job Retention = 6 months # six months
# AutoPrune = yes # Prune expired Jobs/Files
#}
# Definition of file Virtual Autochanger device
Autochanger {
Name = File1
# Do not use "localhost" here
# Address = bacula-standalone
Address = bacula-sd # N.B. Use a fully qualified name here
SDPort = 9103
Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n"
Device = FileChgr1
Media Type = File1
Maximum Concurrent Jobs = 10 # run up to 10 jobs a the same time
Autochanger = File1 # point to ourself
}
# Definition of a second file Virtual Autochanger device
# Possibly pointing to a different disk drive
Autochanger {
Name = File2
# Do not use "localhost" here
# Address = bacula-standalone # N.B. Use a fully qualified name here
Address = bacula-sd # N.B. Use a fully qualified name here
SDPort = 9103
Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n"
Device = FileChgr2
Media Type = File2
Autochanger = File2 # point to ourself
Maximum Concurrent Jobs = 10 # run up to 10 jobs a the same time
}
# Definition of LTO-4 tape Autochanger device
#Autochanger {
# Name = LTO-4
# Do not use "localhost" here
# Address = bacula # N.B. Use a fully qualified name here
# SDPort = 9103
# Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n" # password for Storage daemon
# Device = LTO-4 # must be same as Device in Storage daemon
# Media Type = LTO-4 # must be same as MediaType in Storage daemon
# Autochanger = LTO-4 # enable for autochanger device
# Maximum Concurrent Jobs = 10
#}
# Generic catalog service
Catalog {
Name = MyCatalog
dbname = "bacula"
dbuser = "bacula"
dbpassword = "bacula"
DB address = "db"
}
# Reasonable message delivery -- send most everything to email address
# and to the console
Messages {
Name = Standard
#
# NOTE! If you send to two email or more email addresses, you will need
# to replace the %r in the from field (-f part) with a single valid
# email address in both the mailcommand and the operatorcommand.
# What this does is, it sets the email address that emails would display
# in the FROM field, which is by default the same email as they're being
# sent to. However, if you send email to more than one address, then
# you'll have to set the FROM address manually, to a single address.
# for example, a 'no-reply@mydomain.com', is better since that tends to
# tell (most) people that its coming from an automated source.
#
# mailcommand = "/opt/bacula/bin/bsmtp -h localhost -f \"\(Bacula\) \<%r\>\" -s \"Bacula: %t %e of %c %l\" %r"
# operatorcommand = "/opt/bacula/bin/bsmtp -h localhost -f \"\(Bacula\) \<%r\>\" -s \"Bacula: Intervention needed for %j\" %r"
# mail = root@localhost = all, !skipped
operator = root@localhost = mount
console = all, !skipped, !saved
#
# WARNING! the following will create a file that you must cycle from
# time to time as it will grow indefinitely. However, it will
# also keep all your messages if they scroll off the console.
#
# append = "/opt/bacula/log/bacula.log" = all, !skipped
stdout = all, !skipped
catalog = all
# Telegram
# mailcommand = "/opt/bacula/bin/bsmtp -h smtp2tg -f \"\(Bacula\) \<%r\>\" -s \"Bacula: %t %e of %c %l\" %r"
# mail = eduardo@smtp2tg = all, !skipped
}
#
# Message delivery for daemon messages (no job).
Messages {
Name = Daemon
# mailcommand = "/opt/bacula/bin/bsmtp -h localhost -f \"\(Bacula\) \<%r\>\" -s \"Bacula daemon message\" %r"
# mail = root@localhost = all, !skipped
console = all, !skipped, !saved
stdout = all, !skipped
# append = "/opt/bacula/log/bacula.log" = all, !skipped
# Telegram
# mailcommand = "/opt/bacula/bin/bsmtp -h smtp2tg -f \"\(Bacula\) \<%r\>\" -s \"Bacula: %t %e of %c %l\" %r"
# mail = eduardo@smtp2tg = all, !skipped
}
# Default pool definition
Pool {
Name = Default
Pool Type = Backup
Recycle = yes # Bacula can automatically recycle Volumes
AutoPrune = yes # Prune expired volumes
Volume Retention = 365 days # one year
Maximum Volume Bytes = 50G # Limit Volume size to something reasonable
Maximum Volumes = 100 # Limit number of Volumes in Pool
}
# File Pool definition
Pool {
Name = File
Pool Type = Backup
Recycle = yes # Bacula can automatically recycle Volumes
AutoPrune = yes # Prune expired volumes
Volume Retention = 365 days # one year
Maximum Volume Bytes = 50G # Limit Volume size to something reasonable
Maximum Volumes = 100 # Limit number of Volumes in Pool
Label Format = "Vol-" # Auto label
}
# Scratch pool definition
Pool {
Name = Scratch
Pool Type = Backup
}
#
# Restricted console used by tray-monitor to get the status of the director
#
Console {
Name = bacula-mon
Password = "r0V/Hx0TUwQ4TlnX1lyUHf8J8v9XvRBqnHTRW9+CB614"
CommandACL = status, .status
}
# Include bacula-dir-cloud.conf for Wasabi cloud provider
# @/opt/bacula/etc/bacula-dir-cloud.conf
# Include bacula-dir-cloud-aws.conf for AWS S3 cloud provider
@/opt/bacula/etc/bacula-dir-cloud-aws.conf
# Include subfiles associated with configuration of clients.
# # They define the bulk of the Clients, Jobs, and FileSets.
# # Remember to "reload" the Director after adding a client file.
# @|"sh -c 'for f in /opt/bacula/etc/clientdefs/*.conf ; do echo @${f} ; done'"

48
docker/etc/bacula-fd.conf Executable file
View File

@@ -0,0 +1,48 @@
#
# Default Bacula File Daemon Configuration file
#
# For Bacula release 9.4.4 (28 May 2019) -- redhat Enterprise release
#
# There is not much to change here except perhaps the
# File daemon Name to
#
#
# Copyright (C) 2000-2015 Kern Sibbald
# License: BSD 2-Clause; see file LICENSE-FOSS
#
#
# List Directors who are permitted to contact this File daemon
#
Director {
Name = bacula-dir
Password = "eso80TrxzhXkRgaQVI6ZYrSzAZ4E9KFNp0Y+T1HHVWBi"
}
#
# Restricted Director, used by tray-monitor to get the
# status of the file daemon
#
Director {
Name = bacula-mon
Password = "nm6na6cCh3NymDV6JteWL0Fir71A5uhrdRjmnRKjnHn5"
Monitor = yes
}
#
# "Global" File daemon configuration specifications
#
FileDaemon { # this is me
Name = bacula-fd
FDport = 9102 # where we listen for the director
WorkingDirectory = /opt/bacula/working
Pid Directory = /opt/bacula/working
Maximum Concurrent Jobs = 20
Plugin Directory = /opt/bacula/plugins
}
# Send all messages except skipped files back to Director
Messages {
Name = Standard
director = bacula-dir = all, !skipped, !restored
}

50
docker/etc/bacula-sd-s3.conf Executable file
View File

@@ -0,0 +1,50 @@
# Define a virtual autochanger for AWS S3
#
# Change AccessKey and SecretKey on Cloud resource
#
# Autochangers
Autochanger {
Name = "CloudAutoChangerS3"
Device = CloudStorageS3
Changer Command = ""
Changer Device = /dev/null
}
# Devices
Device {
Name = "CloudStorageS3"
Device Type = "Cloud"
Cloud = "S3-cloud-us-west-2"
Maximum Part Size = 2M
Maximum File Size = 2M
Media Type = "CloudType"
Archive Device = "/tmp"
LabelMedia = yes
Random Access = yes
AutomaticMount = yes
RemovableMedia = no
AlwaysOpen = no
}
# Cloud providers
# Hostname see https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
Cloud {
Name = "S3-cloud-us-west-2"
Driver = "S3"
HostName = "s3.us-west-2.amazonaws.com"
BucketName = "eftechcombr-bacula"
AccessKey = "AKIA33MZDCX3OLN7A6DD"
SecretKey = "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"
Protocol = HTTPS
UriStyle = "VirtualHost"
Truncate Cache = "AfterUpload"
Upload = "EachPart"
Region = "us-west-2"
MaximumUploadBandwidth = 10MB/s
}
#
#

129
docker/etc/bacula-sd-wasabi.conf Executable file
View File

@@ -0,0 +1,129 @@
# Define a virtual autochanger for Wasabi Cloud
#
# CloudStorage1 => us-east-2
# CloudStorage2 => eu-central-1
# CloudStorage3 => us-west-1
#
# Change AccessKey and SecretKey on Cloud resource
#
# Autochangers
Autochanger {
Name = "CloudAutoChanger1"
Device = CloudStorage1
Changer Command = ""
Changer Device = /dev/null
}
#
#Autochanger {
# Name = "CloudAutoChanger2"
# Device = CloudStorage2
# Changer Command = ""
# Changer Device = /dev/null
#}
#
#Autochanger {
# Name = "CloudAutoChanger3"
# Device = CloudStorage3
# Changer Command = ""
# Changer Device = /dev/null
#}
#
# Devices
Device {
Name = "CloudStorage1"
Device Type = "Cloud"
Cloud = "WasabiS3-cloud-us-east-2"
Maximum Part Size = 2M
Maximum File Size = 2M
Media Type = "CloudType"
Archive Device = "/tmp"
LabelMedia = yes
Random Access = yes
AutomaticMount = yes
RemovableMedia = no
AlwaysOpen = no
}
#
#Device {
# Name = "CloudStorage2"
# Device Type = "Cloud"
# Cloud = "WasabiS3-cloud-eu-central-1"
# Maximum Part Size = 2M
# Maximum File Size = 2M
# Media Type = "CloudType"
# Archive Device = "/tmp"
# LabelMedia = yes
# Random Access = yes
# AutomaticMount = yes
# RemovableMedia = no
# AlwaysOpen = no
#}
#
#
#Device {
# Name = "CloudStorage3"
# Device Type = "Cloud"
# Cloud = "WasabiS3-cloud-us-west-1"
# Maximum Part Size = 2M
# Maximum File Size = 2M
# Media Type = "CloudType"
# Archive Device = "/tmp"
# LabelMedia = yes
# Random Access = yes
# AutomaticMount = yes
# RemovableMedia = no
# AlwaysOpen = no
#}
#
# Cloud providers
Cloud {
Name = "WasabiS3-cloud-us-east-2"
Driver = "S3"
HostName = "s3.us-east-2.wasabisys.com"
BucketName = "eftechcombr-backup"
AccessKey = "ABC"
SecretKey = "DEF"
Protocol = HTTPS
UriStyle = "VirtualHost"
Truncate Cache = "AfterUpload"
Upload = "EachPart"
MaximumUploadBandwidth = 10MB/s
}
#
#Cloud {
# Name = "WasabiS3-cloud-eu-central-1"
# Driver = "S3"
# HostName = "s3.eu-central-1.wasabisys.com"
# BucketName = "eftechcombr-backup2"
# AccessKey = "ABC"
# SecretKey = "DEF"
# Protocol = HTTPS
# UriStyle = "VirtualHost"
# Truncate Cache = "AfterUpload"
# Upload = "EachPart"
# MaximumUploadBandwidth = 10MB/s
#}
#Cloud {
# Name = "WasabiS3-cloud-us-west-1"
# Driver = "S3"
# HostName = "s3.us-west-1.wasabisys.com"
# BucketName = "eftechcombr-backup3"
# AccessKey = "ABC"
# SecretKey = "DEF"
# Protocol = HTTPS
# UriStyle = "VirtualHost"
# Truncate Cache = "AfterUpload"
# Upload = "EachPart"
# MaximumUploadBandwidth = 10MB/s
#}
#

344
docker/etc/bacula-sd.conf Executable file
View File

@@ -0,0 +1,344 @@
#
# Default Bacula Storage Daemon Configuration file
#
# For Bacula release 9.4.4 (28 May 2019) -- redhat Enterprise release
#
# You may need to change the name of your tape drive
# on the "Archive Device" directive in the Device
# resource. If you change the Name and/or the
# "Media Type" in the Device resource, please ensure
# that dird.conf has corresponding changes.
#
#
# Copyright (C) 2000-2017 Kern Sibbald
# License: BSD 2-Clause; see file LICENSE-FOSS
#
Storage { # definition of myself
Name = bacula-sd
SDPort = 9103 # Director's port
WorkingDirectory = "/opt/bacula/working"
Pid Directory = "/opt/bacula/working"
Plugin Directory = "/opt/bacula/plugins"
Maximum Concurrent Jobs = 20
}
#
# List Directors who are permitted to contact Storage daemon
#
Director {
Name = bacula-dir
Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n"
}
#
# Restricted Director, used by tray-monitor to get the
# status of the storage daemon
#
Director {
Name = bacula-mon
Password = "5p+emSGBrRv7sdsOJjlXxOjIDIzvivTLzY8ywWCjz02x"
Monitor = yes
}
#
# Note, for a list of additional Device templates please
# see the directory <bacula-source>/examples/devices
# Or follow the following link:
# http://www.bacula.org/git/cgit.cgi/bacula/tree/bacula/examples/devices?h=Branch-7.4
#
#
# Devices supported by this Storage daemon
# To connect, the Director's bacula-dir.conf must have the
# same Name and MediaType.
#
#
# Define a Virtual autochanger
#
Autochanger {
Name = FileChgr1
Device = FileChgr1-Dev1, FileChgr1-Dev2
Changer Command = ""
Changer Device = /dev/null
}
Device {
Name = FileChgr1-Dev1
Media Type = File1
Archive Device = /tmp
LabelMedia = yes; # lets Bacula label unlabeled media
Random Access = Yes;
AutomaticMount = yes; # when device opened, read it
RemovableMedia = no;
AlwaysOpen = no;
Maximum Concurrent Jobs = 5
}
Device {
Name = FileChgr1-Dev2
Media Type = File1
Archive Device = /tmp
LabelMedia = yes; # lets Bacula label unlabeled media
Random Access = Yes;
AutomaticMount = yes; # when device opened, read it
RemovableMedia = no;
AlwaysOpen = no;
Maximum Concurrent Jobs = 5
}
#
# Define a second Virtual autochanger
#
Autochanger {
Name = FileChgr2
Device = FileChgr2-Dev1, FileChgr2-Dev2
Changer Command = ""
Changer Device = /dev/null
}
Device {
Name = FileChgr2-Dev1
Media Type = File2
Archive Device = /tmp
LabelMedia = yes; # lets Bacula label unlabeled media
Random Access = Yes;
AutomaticMount = yes; # when device opened, read it
RemovableMedia = no;
AlwaysOpen = no;
Maximum Concurrent Jobs = 5
}
Device {
Name = FileChgr2-Dev2
Media Type = File2
Archive Device = /tmp
LabelMedia = yes; # lets Bacula label unlabeled media
Random Access = Yes;
AutomaticMount = yes; # when device opened, read it
RemovableMedia = no;
AlwaysOpen = no;
Maximum Concurrent Jobs = 5
}
#
# An autochanger device with two drives
#
#Autochanger {
# Name = Autochanger
# Device = Drive-1
# Device = Drive-2
# Changer Command = "/opt/bacula/scripts/mtx-changer %c %o %S %a %d"
# Changer Device = /dev/sg0
#}
#Device {
# Name = Drive-1 #
# Drive Index = 0
# Media Type = DLT-8000
# Archive Device = /dev/nst0
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = yes;
# RemovableMedia = yes;
# RandomAccess = no;
# AutoChanger = yes
# #
# # New alert command in Bacula 9.0.0
# # Note: you must have the sg3_utils (rpms) or the
# # sg3-utils (deb) installed on your system.
# # and you must set the correct control device that
# # corresponds to the Archive Device
# Control Device = /dev/sg?? # must be SCSI ctl for /dev/nst0
# Alert Command = "/opt/bacula/scripts/tapealert %l"
#
# #
# # Enable the Alert command only if you have the mtx package loaded
# # Note, apparently on some systems, tapeinfo resets the SCSI controller
# # thus if you turn this on, make sure it does not reset your SCSI
# # controller. I have never had any problems, and smartctl does
# # not seem to cause such problems.
# #
# Alert Command = "sh -c 'tapeinfo -f %c |grep TapeAlert|cat'"
# If you have smartctl, enable this, it has more info than tapeinfo
# Alert Command = "sh -c 'smartctl -H -l error %c'"
#}
#Device {
# Name = Drive-2 #
# Drive Index = 1
# Media Type = DLT-8000
# Archive Device = /dev/nst1
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = yes;
# RemovableMedia = yes;
# RandomAccess = no;
# AutoChanger = yes
# # Enable the Alert command only if you have the mtx package loaded
# Alert Command = "sh -c 'tapeinfo -f %c |grep TapeAlert|cat'"
# If you have smartctl, enable this, it has more info than tapeinfo
# Alert Command = "sh -c 'smartctl -H -l error %c'"
#}
#
# A Linux or Solaris LTO-2 tape drive
#
#Device {
# Name = LTO-2
# Media Type = LTO-2
# Archive Device = /dev/nst0
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = yes;
# RemovableMedia = yes;
# RandomAccess = no;
# Maximum File Size = 3GB
## Changer Command = "/opt/bacula/scripts/mtx-changer %c %o %S %a %d"
## Changer Device = /dev/sg0
## AutoChanger = yes
# # Enable the Alert command only if you have the mtx package loaded
## Alert Command = "sh -c 'tapeinfo -f %c |grep TapeAlert|cat'"
## If you have smartctl, enable this, it has more info than tapeinfo
## Alert Command = "sh -c 'smartctl -H -l error %c'"
#}
#
# A Linux or Solaris LTO-3 tape drive
#
#Device {
# Name = LTO-3
# Media Type = LTO-3
# Archive Device = /dev/nst0
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = yes;
# RemovableMedia = yes;
# RandomAccess = no;
# Maximum File Size = 4GB
# Changer Command = "/opt/bacula/scripts/mtx-changer %c %o %S %a %d"
# Changer Device = /dev/sg0
# AutoChanger = yes
# #
# # New alert command in Bacula 9.0.0
# # Note: you must have the sg3_utils (rpms) or the
# # sg3-utils (deb) installed on your system.
# # and you must set the correct control device that
# # corresponds to the Archive Device
# Control Device = /dev/sg?? # must be SCSI ctl for /dev/nst0
# Alert Command = "/opt/bacula/scripts/tapealert %l"
#
# # Enable the Alert command only if you have the mtx package loaded
## Alert Command = "sh -c 'tapeinfo -f %c |grep TapeAlert|cat'"
## If you have smartctl, enable this, it has more info than tapeinfo
## Alert Command = "sh -c 'smartctl -H -l error %c'"
#}
#
# A Linux or Solaris LTO-4 tape drive
#
#Device {
# Name = LTO-4
# Media Type = LTO-4
# Archive Device = /dev/nst0
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = yes;
# RemovableMedia = yes;
# RandomAccess = no;
# Maximum File Size = 5GB
# Changer Command = "/opt/bacula/scripts/mtx-changer %c %o %S %a %d"
# Changer Device = /dev/sg0
# AutoChanger = yes
# #
# # New alert command in Bacula 9.0.0
# # Note: you must have the sg3_utils (rpms) or the
# # sg3-utils (deb) installed on your system.
# # and you must set the correct control device that
# # corresponds to the Archive Device
# Control Device = /dev/sg?? # must be SCSI ctl for /dev/nst0
# Alert Command = "/opt/bacula/scripts/tapealert %l"
#
# # Enable the Alert command only if you have the mtx package loaded
## Alert Command = "sh -c 'tapeinfo -f %c |grep TapeAlert|cat'"
## If you have smartctl, enable this, it has more info than tapeinfo
## Alert Command = "sh -c 'smartctl -H -l error %c'"
#}
#
# An HP-UX tape drive
#
#Device {
# Name = Drive-1 #
# Drive Index = 0
# Media Type = DLT-8000
# Archive Device = /dev/rmt/1mnb
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = yes;
# RemovableMedia = yes;
# RandomAccess = no;
# AutoChanger = no
# Two EOF = yes
# Hardware End of Medium = no
# Fast Forward Space File = no
# #
# # New alert command in Bacula 9.0.0
# # Note: you must have the sg3_utils (rpms) or the
# # sg3-utils (deb) installed on your system.
# # and you must set the correct control device that
# # corresponds to the Archive Device
# Control Device = /dev/sg?? # must be SCSI ctl for /dev/rmt/1mnb
# Alert Command = "/opt/bacula/scripts/tapealert %l"
#
# #
# # Enable the Alert command only if you have the mtx package loaded
# Alert Command = "sh -c 'tapeinfo -f %c |grep TapeAlert|cat'"
# If you have smartctl, enable this, it has more info than tapeinfo
# Alert Command = "sh -c 'smartctl -H -l error %c'"
#}
#
# A FreeBSD tape drive
#
#Device {
# Name = DDS-4
# Description = "DDS-4 for FreeBSD"
# Media Type = DDS-4
# Archive Device = /dev/nsa1
# AutomaticMount = yes; # when device opened, read it
# AlwaysOpen = yes
# Offline On Unmount = no
# Hardware End of Medium = no
# BSF at EOM = yes
# Backward Space Record = no
# Fast Forward Space File = no
# TWO EOF = yes
# #
# # New alert command in Bacula 9.0.0
# # Note: you must have the sg3_utils (rpms) or the
# # sg3-utils (deb) installed on your system.
# # and you must set the correct control device that
# # corresponds to the Archive Device
# Control Device = /dev/sg?? # must be SCSI ctl for /dev/nsa1
# Alert Command = "/opt/bacula/scripts/tapealert %l"
#
# If you have smartctl, enable this, it has more info than tapeinfo
# Alert Command = "sh -c 'smartctl -H -l error %c'"
#}
#
# Send all messages to the Director,
# mount messages also are sent to the email address
#
Messages {
Name = Standard
director = bacula-dir = all
}
# # Include bacula-sd-wasabi.conf for Wasabi Cloud provider
# @/opt/bacula/etc/bacula-sd-wasabi.conf
#
# # Include bacula-sd-s3.conf for AWS S3 Cloud provider
@/opt/bacula/etc/bacula-sd-s3.conf
#

View File

@@ -0,0 +1,34 @@
[api]
auth_type = "basic"
debug = "0"
lang = "en"
[db]
enabled = "1"
type = "pgsql"
name = "bacula"
login = "bacula"
password = "bacula"
ip_addr = "db"
port = "5432"
path = ""
[bconsole]
enabled = "1"
bin_path = "/opt/bacula/bin/bconsole"
cfg_path = "/opt/bacula/etc/bconsole.conf"
use_sudo = "0"
[jsontools]
enabled = "1"
use_sudo = "0"
bconfig_dir = "/etc/baculum/"
bdirjson_path = "/opt/bacula/bin/bdirjson"
dir_cfg_path = "/opt/bacula/etc/bacula-dir.conf"
bsdjson_path = "/opt/bacula/bin/bsdjson"
sd_cfg_path = "/opt/bacula/etc/bacula-sd.conf"
bfdjson_path = "/opt/bacula/bin/bfdjson"
fd_cfg_path = "/opt/bacula/etc/bacula-fd.conf"
bbconsjson_path = "/opt/bacula/bin/bbconsjson"
bcons_cfg_path = "/opt/bacula/etc/bconsole.conf"

View File

@@ -0,0 +1 @@
admin:YWG41BPzVAkN6

View File

@@ -0,0 +1 @@
admin:YWG41BPzVAkN6

View File

@@ -0,0 +1,13 @@
[Main]
auth_type = "basic"
login = "admin"
password = "admin"
client_id = ""
client_secret = ""
redirect_uri = ""
scope = ""
protocol = "http"
address = "baculum-api"
port = "9096"
url_prefix = ""

View File

@@ -0,0 +1,23 @@
[baculum]
debug = "0"
lang = "en"
max_jobs = "15000"
size_values_unit = "decimal"
time_in_job_log = "0"
date_time_format = "Y-M-D R"
enable_messages_log = "1"
login = "admin"
[users]
[auth_basic]
allow_manage_users = "1"
user_file = "/usr/share/baculum/htdocs/protected/Web/Config/baculum.users"
hash_alg = "apr-md5"
[security]
auth_method = "basic"
def_access = "default_settings"
def_role = "normal"
def_api_host = "Main"

View File

@@ -0,0 +1,9 @@
[admin]
long_name = ""
description = ""
email = ""
roles = "admin"
api_hosts = ""
enabled = "1"
ips = ""

14
docker/etc/bconsole.conf Executable file
View File

@@ -0,0 +1,14 @@
#
# Bacula User Agent (or Console) Configuration File
#
# Copyright (C) 2000-2015 Kern Sibbald
# License: BSD 2-Clause; see file LICENSE-FOSS
#
Director {
Name = bacula-dir
DIRport = 9101
# address = bacula-standalone
address = bacula-dir
Password = "XDnaVZYU9F4QhqUGMPxiOXsJaji23mNG3FaAM9Z2q1c/"
}

View File

@@ -0,0 +1,32 @@
Job {
Name = "BackupUbuntu"
JobDefs = "DefaultJobToCloudAWS"
FileSet = "ubuntu-fs"
Client = "ubuntu-fd"
}
Client {
Name = ubuntu-fd
Address = 128.199.45.192
FDPort = 9102
Catalog = MyCatalog
Password = "lE-6z_tYeiRRnNLt_5L4w8KplM9Qb43z7"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
}
FileSet {
Name = "ubuntu-fs"
Include {
Options {
Compression=GZIP
signature=MD5
}
File = /home
File = /var/lib/mysql
File = /root
File = /share
File = /etc
}
}

View File

@@ -0,0 +1,38 @@
Job {
Name = "BackupW2k16ToAWS"
JobDefs = "DefaultJobToCloudAWS"
Client = "w2k16-fd"
Fileset = "w2k16-fs"
}
FileSet {
Name = "w2k16-fs"
Include {
Options {
signature = MD5
compression = GZIP
IgnoreCase = yes
}
File = "C:/documents and settings"
File = "C:/Users"
File = "C:/share"
}
Exclude {
File = "*.mp3"
File = "*.mp4"
File = "*.dll"
File = "*.exe"
File = "*.bin"
}
}
Client {
Name = w2k16-fd
Address = 40.71.101.166
FDPort = 9102
Catalog = MyCatalog
Password = "abc123cde456fgh789"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
}

View File

@@ -0,0 +1,72 @@
# bacula-dir-cloud.conf
#
# JobDefs
# Job
# Restore
# Pool
# Autochanger
#
# Template to store in cloud
JobDefs {
Name = "DefaultJobToCloudAWS"
Type = Backup
Level = Incremental
Client = bacula-fd
FileSet = "Full Set"
Schedule = "WeeklyCycle"
Storage = "CloudS3AWS"
Messages = Standard
Pool = CloudAWS
SpoolAttributes = yes
Priority = 10
Write Bootstrap = "/opt/bacula/working/%c.bsr"
}
# Jobs
Job {
Name = "BackupClient1ToCloudAWS"
JobDefs = "DefaultJobToCloudAWS"
}
# Restore
Job {
Name = "RestoreFromCloudAWS"
Type = Restore
Client=bacula-fd
Storage = CloudS3AWS
FileSet="Full Set"
Pool = CloudAWS
Messages = Standard
Where = /tmp/bacula-restores
}
# Cloud Pool definition
Pool {
Name = CloudAWS
Pool Type = Backup
Recycle = no # Bacula can automatically recycle Volumes
AutoPrune = yes # Prune expired volumes
Volume Retention = 365 days # one year
Maximum Volume Jobs = 1 #
# Maximum Volume Bytes = 100M # Limit Volume size to something reasonable
Label Format = "Vol-JobId-${JobId}" # Auto label
}
# Autochanger definition
Autochanger {
Name = "CloudS3AWS"
# Do not use "localhost" here
Address = bacula-sd # N.B. Use a fully qualified name here
SDPort = 9103
Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n"
Device = "CloudAutoChangerS3"
Media Type = "CloudType"
Maximum Concurrent Jobs = 10 # run up to 10 jobs a the same time
}

View File

@@ -0,0 +1,96 @@
# bacula-dir-cloud.conf
#
# JobDefs
# Job
# Restore
# Pool
# Autochanger
#
# Template to store in cloud
JobDefs {
Name = "DefaultJobToCloud"
Type = Backup
Level = Incremental
Client = bacula-fd
FileSet = "Full Set"
Schedule = "WeeklyCycle"
Storage = "CloudS3"
Messages = Standard
Pool = Cloud
SpoolAttributes = yes
Priority = 10
Write Bootstrap = "/opt/bacula/working/%c.bsr"
}
# Jobs
Job {
Name = "BackupClient1ToCloud"
JobDefs = "DefaultJobToCloud"
}
# Restore
Job {
Name = "RestoreFromCloud"
Type = Restore
Client=bacula-fd
Storage = CloudS3
FileSet="Full Set"
Pool = Cloud
Messages = Standard
Where = /tmp/bacula-restores
}
# Cloud Pool definition
Pool {
Name = Cloud
Pool Type = Backup
Recycle = no # Bacula can automatically recycle Volumes
AutoPrune = yes # Prune expired volumes
Volume Retention = 365 days # one year
Maximum Volume Jobs = 1 #
# Maximum Volume Bytes = 100M # Limit Volume size to something reasonable
Label Format = "Vol-JobId-${JobId}" # Auto label
}
# Autochanger definition
Autochanger {
Name = "CloudS3"
# Do not use "localhost" here
Address = bacula-sd # N.B. Use a fully qualified name here
SDPort = 9103
Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n"
Device = "CloudAutoChanger1"
Media Type = "CloudType"
Maximum Concurrent Jobs = 10 # run up to 10 jobs a the same time
}
#
#Autochanger {
# Name = "CloudS3-2"
## Do not use "localhost" here
# Address = bacula-sd # N.B. Use a fully qualified name here
# SDPort = 9103
# Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n"
# Device = "CloudAutoChanger2"
# Media Type = "CloudType"
# Maximum Concurrent Jobs = 10 # run up to 10 jobs a the same time
#}
#
#Autochanger {
# Name = "CloudS3-3"
## Do not use "localhost" here
# Address = bacula-sd # N.B. Use a fully qualified name here
# SDPort = 9103
# Password = "TS8EQJ99iLFSK39oJy33YqkZ98v4ZapjRcA+j1N6ED1n"
# Device = "CloudAutoChanger3"
# Media Type = "CloudType"
# Maximum Concurrent Jobs = 10 # run up to 10 jobs a the same time
#}
#

View File

@@ -0,0 +1,50 @@
# Define a virtual autochanger for AWS S3
#
# Change AccessKey and SecretKey on Cloud resource
#
# Autochangers
Autochanger {
Name = "CloudAutoChangerS3"
Device = CloudStorageS3
Changer Command = ""
Changer Device = /dev/null
}
# Devices
Device {
Name = "CloudStorageS3"
Device Type = "Cloud"
Cloud = "S3-cloud-us-west-2"
Maximum Part Size = 2M
Maximum File Size = 2M
Media Type = "CloudType"
Archive Device = "/tmp"
LabelMedia = yes
Random Access = yes
AutomaticMount = yes
RemovableMedia = no
AlwaysOpen = no
}
# Cloud providers
# Hostname see https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
Cloud {
Name = "S3-cloud-us-west-2"
Driver = "S3"
HostName = "s3.us-west-2.amazonaws.com"
BucketName = "eftechcombr-backup"
AccessKey = "ABC"
SecretKey = "DEF"
Protocol = HTTPS
UriStyle = "VirtualHost"
Truncate Cache = "AfterUpload"
Upload = "EachPart"
Region = "us-west-2"
MaximumUploadBandwidth = 10MB/s
}
#
#

View File

@@ -0,0 +1,129 @@
# Define a virtual autochanger for Wasabi Cloud
#
# CloudStorage1 => us-east-2
# CloudStorage2 => eu-central-1
# CloudStorage3 => us-west-1
#
# Change AccessKey and SecretKey on Cloud resource
#
# Autochangers
Autochanger {
Name = "CloudAutoChanger1"
Device = CloudStorage1
Changer Command = ""
Changer Device = /dev/null
}
#
#Autochanger {
# Name = "CloudAutoChanger2"
# Device = CloudStorage2
# Changer Command = ""
# Changer Device = /dev/null
#}
#
#Autochanger {
# Name = "CloudAutoChanger3"
# Device = CloudStorage3
# Changer Command = ""
# Changer Device = /dev/null
#}
#
# Devices
Device {
Name = "CloudStorage1"
Device Type = "Cloud"
Cloud = "WasabiS3-cloud-us-east-2"
Maximum Part Size = 2M
Maximum File Size = 2M
Media Type = "CloudType"
Archive Device = "/tmp"
LabelMedia = yes
Random Access = yes
AutomaticMount = yes
RemovableMedia = no
AlwaysOpen = no
}
#
#Device {
# Name = "CloudStorage2"
# Device Type = "Cloud"
# Cloud = "WasabiS3-cloud-eu-central-1"
# Maximum Part Size = 2M
# Maximum File Size = 2M
# Media Type = "CloudType"
# Archive Device = "/tmp"
# LabelMedia = yes
# Random Access = yes
# AutomaticMount = yes
# RemovableMedia = no
# AlwaysOpen = no
#}
#
#
#Device {
# Name = "CloudStorage3"
# Device Type = "Cloud"
# Cloud = "WasabiS3-cloud-us-west-1"
# Maximum Part Size = 2M
# Maximum File Size = 2M
# Media Type = "CloudType"
# Archive Device = "/tmp"
# LabelMedia = yes
# Random Access = yes
# AutomaticMount = yes
# RemovableMedia = no
# AlwaysOpen = no
#}
#
# Cloud providers
Cloud {
Name = "WasabiS3-cloud-us-east-2"
Driver = "S3"
HostName = "s3.us-east-2.wasabisys.com"
BucketName = "eftechcombr-backup"
AccessKey = "ABC"
SecretKey = "DEF"
Protocol = HTTPS
UriStyle = "VirtualHost"
Truncate Cache = "AfterUpload"
Upload = "EachPart"
MaximumUploadBandwidth = 10MB/s
}
#
#Cloud {
# Name = "WasabiS3-cloud-eu-central-1"
# Driver = "S3"
# HostName = "s3.eu-central-1.wasabisys.com"
# BucketName = "eftechcombr-backup2"
# AccessKey = "ABC"
# SecretKey = "DEF"
# Protocol = HTTPS
# UriStyle = "VirtualHost"
# Truncate Cache = "AfterUpload"
# Upload = "EachPart"
# MaximumUploadBandwidth = 10MB/s
#}
#Cloud {
# Name = "WasabiS3-cloud-us-west-1"
# Driver = "S3"
# HostName = "s3.us-west-1.wasabisys.com"
# BucketName = "eftechcombr-backup3"
# AccessKey = "ABC"
# SecretKey = "DEF"
# Protocol = HTTPS
# UriStyle = "VirtualHost"
# Truncate Cache = "AfterUpload"
# Upload = "EachPart"
# MaximumUploadBandwidth = 10MB/s
#}
#

View File

@@ -0,0 +1,32 @@
Job {
Name = "BackupUbuntu"
JobDefs = "DefaultJobToCloudAWS"
FileSet = "ubuntu-fs"
Client = "ubuntu-fd"
}
Client {
Name = ubuntu-fd
Address = 128.199.45.192
FDPort = 9102
Catalog = MyCatalog
Password = "lE-6z_tYeiRRnNLt_5L4w8KplM9Qb43z7"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
}
FileSet {
Name = "ubuntu-fs"
Include {
Options {
Compression=GZIP
signature=MD5
}
File = /home
File = /var/lib/mysql
File = /root
File = /share
File = /etc
}
}

View File

@@ -0,0 +1,38 @@
Job {
Name = "BackupW2k16ToAWS"
JobDefs = "DefaultJobToCloudAWS"
Client = "w2k16-fd"
Fileset = "w2k16-fs"
}
FileSet {
Name = "w2k16-fs"
Include {
Options {
signature = MD5
compression = GZIP
IgnoreCase = yes
}
File = "C:/documents and settings"
File = "C:/Users"
File = "C:/share"
}
Exclude {
File = "*.mp3"
File = "*.mp4"
File = "*.dll"
File = "*.exe"
File = "*.bin"
}
}
Client {
Name = w2k16-fd
Address = 40.71.101.166
FDPort = 9102
Catalog = MyCatalog
Password = "abc123cde456fgh789"
File Retention = 60 days
Job Retention = 6 months
AutoPrune = yes
}

12
docker/etc/smtp2tg.toml Executable file
View File

@@ -0,0 +1,12 @@
[bot]
token = "xxxxxTokenxxxxx"
[receivers]
"*" = "xxxxxChatIDxxxxx"
[smtp]
listen = "0.0.0.0:25"
name = "smtp2tg"
[logging]
debug = true