Merge pull request #3030 from evazion/feat-backups

Backup posts to S3 on creation/replacement
This commit is contained in:
Albert Yi
2017-05-08 13:39:41 -07:00
committed by GitHub
5 changed files with 79 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
class BackupService
def backup(file_path, options = {})
raise NotImplementedError.new("#{self.class}.backup not implemented")
end
end

View File

@@ -0,0 +1,5 @@
class NullBackupService
def backup(file_path, options = {})
# do nothing
end
end

View File

@@ -0,0 +1,35 @@
class S3BackupService < BackupService
attr_reader :client, :bucket
def initialize(client: nil, bucket: Danbooru.config.aws_s3_bucket_name)
@credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
@client = client || Aws::S3::Client.new(credentials: @credentials, region: "us-east-1", logger: Logger.new(STDOUT))
@bucket = bucket
end
def backup(file_path, type: nil, **options)
key = s3_key(file_path, type)
upload_to_s3(key, file_path)
end
protected
def s3_key(file_path, type)
case type
when :original
"#{File.basename(file_path)}"
when :preview
"preview/#{File.basename(file_path)}"
when :large
"large/#{File.basename(file_path)}"
else
raise ArgumentError.new("Unknown type: #{type}")
end
end
def upload_to_s3(key, file_path)
File.open(file_path, "rb") do |body|
base64_md5 = Digest::MD5.base64digest(File.read(file_path))
client.put_object(bucket: bucket, key: key, body: body, content_md5: base64_md5)
end
end
end

View File

@@ -23,6 +23,7 @@ class Post < ActiveRecord::Base
before_save :set_tag_counts
before_save :set_pool_category_pseudo_tags
before_create :autoban
after_save :queue_backup, if: :md5_changed?
after_save :create_version
after_save :update_parent_on_save
after_save :apply_post_metatags
@@ -229,6 +230,23 @@ class Post < ActiveRecord::Base
end
end
module BackupMethods
extend ActiveSupport::Concern
def queue_backup
Post.delay(queue: "default", priority: -1).backup_file(file_path, id: id, type: :original)
Post.delay(queue: "default", priority: -1).backup_file(large_file_path, id: id, type: :large) if has_large?
Post.delay(queue: "default", priority: -1).backup_file(preview_file_path, id: id, type: :preview) if has_preview?
end
module ClassMethods
def backup_file(file_path, options = {})
backup_service = Danbooru.config.backup_service
backup_service.backup(file_path, options)
end
end
end
module ImageMethods
def device_scale
if large_image_width > 320
@@ -1695,6 +1713,7 @@ class Post < ActiveRecord::Base
end
include FileMethods
include BackupMethods
include ImageMethods
include ApprovalMethods
include PresenterMethods

View File

@@ -90,6 +90,20 @@ module Danbooru
true
end
# What method to use to backup images.
#
# NullBackupService: Don't backup images at all.
#
# S3BackupService: Backup to Amazon S3. Must configure aws_access_key_id,
# aws_secret_access_key, and aws_s3_bucket_name. Bucket must exist and be writable.
def backup_service
if Rails.env.production?
S3BackupService.new
else
NullBackupService.new
end
end
# What method to use to store images.
# local_flat: Store every image in one directory.
# local_hierarchy: Store every image in a hierarchical directory, based on the post's MD5 hash. On some file systems this may be faster.
@@ -477,6 +491,7 @@ module Danbooru
false
end
# Used for backing up images to S3. Must be changed to your own S3 bucket.
def aws_s3_bucket_name
"danbooru"
end