Files
danbooru/script/donmai/backup_db_to_s3
2015-07-15 18:07:20 -07:00

56 lines
1.4 KiB
Ruby
Executable File

#!/usr/bin/env ruby
require 'rubygems'
require 'aws-sdk/s3'
MAX_BACKUPS = 30
home = File.expand_path("~")
if !File.exists?("#{home}/.s3/access_key")
STDERR.puts "Access key not found"
exit 1
end
if !File.exists?("#{home}/.s3/secret_access_key")
STDERR.puts "Secret access key not found"
exit 1
end
access_key = open("#{home}/.s3/access_key").read.strip
secret_access_key = open("#{home}/.s3/secret_access_key").read.strip
credentials = Aws::Credentials.new(Danbooru.config.amazon_s3_access_key_id, Danbooru.config.amazon_s3_secret_access_key)
Aws.config.update({
region: "us-east-1",
credentials: credentials
})
client = Aws::S3::Client.new
bucket = "danbooru-backup"
current_backups = client.list_objects(buckets: buckets).contents.map {|x| x.key}.select {|x| x =~ /^db-/}.sort.reverse
if current_backups.size > MAX_BACKUPS
current_backups[MAX_BACKUPS..-1].each do |old_backup|
client.delete_object(bucket: bucket, key: old_backup)
puts "Deleted old backup #{old_backup}"
end
end
backup = Dir["/var/www/danbooru2/shared/backup/db-*.dump"].sort.last
data = File.open(backup, "rb")
filename = data.mtime.strftime("db-%Y-%m-%d-%H-%M")
tries = 0
begin
client.put_object(bucket: bucket, key: filename, body: data, :acl => "private")
rescue Errno::EPIPE
tries += 1
if tries > 3
raise
else
retry
end
end
puts "Uploaded #{backup} as #{filename}"