Can't connect to aws s3 instance
Summary
Reproducibility | Platform | OS | OS Version | Product Version |
---|---|---|---|---|
always | Linux | RHEL | 9 | 11.0.1 |
Description
Despite trying lots of examples found online, I cannot get Bacula to connect to the AWS Instance we have set up. AWS is set up correctly, as far as I can see, with the 9103 port, etc. But when Bacula tries to connect, I get: "Warning: bsockcore.c:258 Could not connect to Storage daemon on ec2-.us-west-1.compute.amazonaws.com:9103. ERR=Connection refused"
See my config files, as presently configured, below (I've tried many iterations, based on examples found online). How can I get this working? Thanks for any help!
Steps to Reproduce
- bconsole / "cloud" command
Additional Information
config files: bacula-fd.conf:
#
# Bacula File Daemon Configuration file
#
#
# List Directors who are permitted to contact this File daemon
#
Director {
Name = backup-dir
Password = "<pw>"
}
#
# Restricted Director, used by tray-monitor
#
Director {
Name = backup-mon
Password = "<pw>"
Monitor = yes
}
# Cloud Director
Director {
Name = dir.clouddir
Password = "<pw>"
}
#
# "Global" File daemon configuration specifications
#
FileDaemon {
Name = server.humortimes.com-fd
FDport = 9102
WorkingDirectory = /var/lib/bacula
Pid Directory = /var/run
Maximum Concurrent Jobs = 8
}
# Send all messages except skipped files back to Director
Messages {
Name = Standard
director = backup-dir = all
}
bacula-sd.conf:
#
# Bacula Storage Daemon Configuration file
#
Storage {
Name = sd.cloudsd2
SDPort = 9103 # Director's port
WorkingDirectory = "/var/lib/bacula"
Pid Directory = "/var/run"
Maximum Concurrent Jobs = 8
}
#
# List Directors who are permitted to contact Storage daemon
#
Director {
Name = backup-dir
Password = "<pw>"
}
#
# Restricted Director, used by tray-monitor to get the
# status of the storage daemon
#
Director {
Name = backup-mon
Password = "<pw>"
Monitor = yes
}
# Cloud director
Director {
Name = dir.clouddir
Password = "<pw>"
}
Autochanger {
Name = chgr.cloudsd2
Device = dev.cloud1
Changer Command = "/usr/libexec/bacula/mtx-changer %c %o %S %a %d"
Changer Device = /dev/null
}
#
# Devices supported by this Storage daemon
#
# Amazon Cloud storage
#
Device{
Name=dev.cloud1
DeviceType=Cloud
Cloud=S3Cloud
ArchiveDevice=/opt/bacula/backups
MaximumPartSize=10 MB
MediaType=CloudType
LabelMedia=yes
RandomAccess=Yes;
AutomaticMount=yes
RemovableMedia=no
AlwaysOpen=no
AutoChanger=yes
}
Cloud{
Name=S3Cloud
Driver="S3"
HostName="s3.amazonaws.com"
BucketName="htsync"
AccessKey="<key>" #server-sync IAM user
SecretKey="<key>"
Protocol=HTTPS
UriStyle=VirtualHost
TruncateCache=No
Upload=EachPart
Region="us-west-1"
MaximumUploadBandwidth=5MB/s
}
#
# Send all messages to the Director,
#
Messages {
Name = Standard
director = backup-dir = all
}
bacula-dir.conf (minus pool and message setups):
#
# Bacula Director Configuration file
#
Director {
Name = backup-dir
DIRport = 9101
QueryFile = "/etc/bacula/scripts/query.sql"
WorkingDirectory = "/var/spool/bacula"
PidDirectory = "/var/run"
Maximum Concurrent Jobs = 8
Password = "@@DIR_PASSWORD@@" # Console password
Messages = Daemon
FD Connect Timeout = 2min
DirAddress = 127.0.0.1
}
#
#Job Defs
#
JobDefs {
Name = "DefaultJob"
Type = Backup
Level = Incremental
Client = server.humortimes.com-fd
FileSet = "Full Set"
Schedule = "WeeklyCycle"
Storage = chgr.cloudsd2 # autochanger
Messages = "Standard"
Pool = Default
Full Backup Pool = Full-Pool
Differential Backup Pool = Diff-Pool
Incremental Backup Pool = Inc-Pool
Prefer Mounted Volumes = no
Maximum Concurrent Jobs = 8
Prune Files = yes
Prune Jobs = yes
Prune Volumes = yes
Priority = 10
}
# Backup job
Job {
Name = "BackupLocalFiles"
JobDefs = "DefaultJob"
}
# Backup the catalog database (after the nightly save)
Job {
Name = "BackupCatalog"
JobDefs = "DefaultJob"
Level = Full
FileSet="MyCatalog"
Schedule = "WeeklyCycleAfterBackup"
RunBeforeJob = "/usr/bin/awk -f /etc/bacula/scripts/make_catalog_backup.pl -v cat1=MyCatalog /etc/bacula/bacula-dir.conf"
RunAfterJob = "/etc/bacula/scripts/delete_catalog_backup"
Priority = 11 # run after main backup
}
# Restore job
Job {
Name = "RestoreLocalFiles"
Type = Restore
Client = server.humortimes.com-fd
FileSet = "Full Set"
Storage = chgr.cloudsd2 # autochanger
Pool = Default
Messages = Standard
Where = /bacula/restore
}
#
# List of files to be backed up
#
FileSet {
Name = "Full Set"
Include {
Options {
signature = MD5
compression = GZIP6
}
# single file for testing
File = /etc/bacula/test.txt
}
#
# When to do the backups
#
Schedule {
Name = "WeeklyCycle"
Run = Full 1st sun at 1:05
Run = Differential 2nd-5th sun at 1:05
Run = Incremental mon-sat at 1:05
}
#
# Client (File Services) to backup. Use gen_cli.pl to add new server!!!
#
Client {
Name = server.humortimes.com-fd ##The file daemon name of the client
Address = 127.0.0.1
FDPort = 9102
Catalog = MyCatalog
Password = "<pw>"
File Retention = 1 year
Job Retention = 1 year
AutoPrune = yes
}
#
# autochanger for Amazon AWS
# DNS string "Address" is from AWS account:
#
Autochanger{
Name=chgr.cloudsd2
Address=ec2-54-176-177-246.us-west-1.compute.amazonaws.com
SDPort=9103
Password="<pw>"
Device=chgr.cloudsd2
MediaType=Cloud
MaximumConcurrentJobs=8
Autochanger=chgr.cloudsd2
}
#
# Definition of file storage device
#
Storage {
Name = sd.cloudsd2
SDPort = 9103
Address = ec2-<dns>.us-west-1.compute.amazonaws.com
Password = "<pw>"
Device = dev.cloud1
MediaType = CloudType
Autochanger = chgr.cloudsd2
MaximumConcurrentJobs = 8
}
#
# Generic catalog service
#
Catalog {
Name = MyCatalog
dbname = "bacula"; dbuser = "bacula"; dbpassword = "<pw>"
}
#
# Restricted console used by tray-monitor to get the status of the director
#
Console {
Name = backup-mon
Password = "<pw>"
JobACL = *all*
ClientACL = status, .clients, .jobs, .pools, .storage, .filesets, .messages, .defaults, run, restore, cloud, wait
StorageACL = *all*
ScheduleACL = *all*
PoolACL = *all*
FileSetACL = *all*
CatalogACL = *all*
CommandACL = *all*
WhereACL = *all*
}