posts: use storage manager to backup files.
* Perform backups synchronously inside `distribute_files` instead of asynchronously in `queue_backup`. Asynchronous backups assumed that files are stored on the local filesystem, which isn't true in general. * Remove obsolete backup service classes.
This commit is contained in:
@@ -1,9 +0,0 @@
|
||||
class BackupService
|
||||
def backup(file_path, options = {})
|
||||
raise NotImplementedError.new("#{self.class}.backup not implemented")
|
||||
end
|
||||
|
||||
def delete(file_path, options = {})
|
||||
raise NotImplementedError.new("#{self.class}.delete not implemented")
|
||||
end
|
||||
end
|
||||
@@ -1,9 +0,0 @@
|
||||
class NullBackupService
|
||||
def backup(file_path, options = {})
|
||||
# do nothing
|
||||
end
|
||||
|
||||
def delete(file_path, options = {})
|
||||
# do nothing
|
||||
end
|
||||
end
|
||||
@@ -1,52 +0,0 @@
|
||||
class S3BackupService < BackupService
|
||||
attr_reader :client, :bucket
|
||||
|
||||
def initialize(client: nil, bucket: Danbooru.config.aws_s3_bucket_name)
|
||||
@credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
|
||||
@client = client || Aws::S3::Client.new(credentials: @credentials, region: "us-east-1", logger: Logger.new(STDOUT))
|
||||
@bucket = bucket
|
||||
end
|
||||
|
||||
def backup(file_path, type: nil, **options)
|
||||
keys = s3_keys(file_path, type)
|
||||
keys.each do |key|
|
||||
upload_to_s3(key, file_path)
|
||||
end
|
||||
end
|
||||
|
||||
def delete(file_path, type: nil)
|
||||
keys = s3_keys(file_path, type)
|
||||
keys.each do |key|
|
||||
delete_from_s3(key)
|
||||
end
|
||||
end
|
||||
|
||||
protected
|
||||
def s3_keys(file_path, type)
|
||||
name = File.basename(file_path)
|
||||
|
||||
case type
|
||||
when :original
|
||||
[name]
|
||||
when :preview
|
||||
["preview/#{name}"]
|
||||
when :large
|
||||
["sample/#{name}"]
|
||||
else
|
||||
raise ArgumentError.new("Unknown type: #{type}")
|
||||
end
|
||||
end
|
||||
|
||||
def delete_from_s3(key)
|
||||
client.delete_object(bucket: bucket, key: key)
|
||||
rescue Aws::S3::Errors::NoSuchKey
|
||||
# ignore
|
||||
end
|
||||
|
||||
def upload_to_s3(key, file_path)
|
||||
File.open(file_path, "rb") do |body|
|
||||
base64_md5 = Digest::MD5.base64digest(File.read(file_path))
|
||||
client.put_object(acl: "public-read", bucket: bucket, key: key, body: body, content_md5: base64_md5)
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -29,7 +29,6 @@ class Post < ApplicationRecord
|
||||
before_save :set_tag_counts
|
||||
before_save :set_pool_category_pseudo_tags
|
||||
before_create :autoban
|
||||
after_save :queue_backup, if: :md5_changed?
|
||||
after_save :create_version
|
||||
after_save :update_parent_on_save
|
||||
after_save :apply_post_metatags
|
||||
@@ -134,6 +133,10 @@ class Post < ApplicationRecord
|
||||
storage_manager.store_file(file, self, :original)
|
||||
storage_manager.store_file(sample_file, self, :large) if sample_file.present?
|
||||
storage_manager.store_file(preview_file, self, :preview) if preview_file.present?
|
||||
|
||||
backup_storage_manager.store_file(file, self, :original)
|
||||
backup_storage_manager.store_file(sample_file, self, :large) if sample_file.present?
|
||||
backup_storage_manager.store_file(preview_file, self, :preview) if preview_file.present?
|
||||
end
|
||||
|
||||
def file_path_prefix
|
||||
@@ -168,6 +171,10 @@ class Post < ApplicationRecord
|
||||
"#{file_path_prefix}#{md5}.#{file_ext}"
|
||||
end
|
||||
|
||||
def backup_storage_manager
|
||||
Danbooru.config.backup_storage_manager
|
||||
end
|
||||
|
||||
def storage_manager
|
||||
Danbooru.config.storage_manager
|
||||
end
|
||||
@@ -263,23 +270,6 @@ class Post < ApplicationRecord
|
||||
end
|
||||
end
|
||||
|
||||
module BackupMethods
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
def queue_backup
|
||||
Post.delay(queue: "default", priority: -1).backup_file(file_path, id: id, type: :original)
|
||||
Post.delay(queue: "default", priority: -1).backup_file(large_file_path, id: id, type: :large) if has_large?
|
||||
Post.delay(queue: "default", priority: -1).backup_file(preview_file_path, id: id, type: :preview) if has_preview?
|
||||
end
|
||||
|
||||
module ClassMethods
|
||||
def backup_file(file_path, options = {})
|
||||
backup_service = Danbooru.config.backup_service
|
||||
backup_service.backup(file_path, options)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
module ImageMethods
|
||||
def twitter_card_supported?
|
||||
image_width.to_i >= 280 && image_height.to_i >= 150
|
||||
@@ -1800,7 +1790,6 @@ class Post < ApplicationRecord
|
||||
end
|
||||
|
||||
include FileMethods
|
||||
include BackupMethods
|
||||
include ImageMethods
|
||||
include ApprovalMethods
|
||||
include PresenterMethods
|
||||
|
||||
@@ -71,8 +71,6 @@ class PostReplacement < ApplicationRecord
|
||||
|
||||
if md5_changed
|
||||
post.comments.create!({creator: User.system, body: comment_replacement_message, do_not_bump_post: true}, without_protection: true)
|
||||
else
|
||||
post.queue_backup
|
||||
end
|
||||
|
||||
save!
|
||||
|
||||
@@ -99,20 +99,6 @@ module Danbooru
|
||||
true
|
||||
end
|
||||
|
||||
# What method to use to backup images.
|
||||
#
|
||||
# NullBackupService: Don't backup images at all.
|
||||
#
|
||||
# S3BackupService: Backup to Amazon S3. Must configure aws_access_key_id,
|
||||
# aws_secret_access_key, and aws_s3_bucket_name. Bucket must exist and be writable.
|
||||
def backup_service
|
||||
if Rails.env.production?
|
||||
S3BackupService.new
|
||||
else
|
||||
NullBackupService.new
|
||||
end
|
||||
end
|
||||
|
||||
# What method to use to store images.
|
||||
# local_flat: Store every image in one directory.
|
||||
# local_hierarchy: Store every image in a hierarchical directory, based on the post's MD5 hash. On some file systems this may be faster.
|
||||
@@ -257,6 +243,24 @@ module Danbooru
|
||||
# end
|
||||
end
|
||||
|
||||
# The method to use for backing up image files.
|
||||
def backup_storage_manager
|
||||
# Don't perform any backups.
|
||||
StorageManager::Null.new
|
||||
|
||||
# Backup files to /mnt/backup on the local filesystem.
|
||||
# StorageManager::Local.new(base_dir: "/mnt/backup", hierarchical: false)
|
||||
|
||||
# Backup files to /mnt/backup on a remote system. Configure SSH settings
|
||||
# in ~/.ssh_config or in the ssh_options param (ref: http://net-ssh.github.io/net-ssh/Net/SSH.html#method-c-start)
|
||||
# StorageManager::SFTP.new("www.example.com", base_dir: "/mnt/backup", ssh_options: {})
|
||||
|
||||
# Backup files to an S3 bucket. The bucket must already exist and be
|
||||
# writable by you. Configure your S3 settings in aws_region and
|
||||
# aws_credentials below, or in the s3_options param (ref:
|
||||
# https://docs.aws.amazon.com/sdkforruby/api/Aws/S3/Client.html#initialize-instance_method)
|
||||
# StorageManager::S3.new("my_s3_bucket_name", s3_options: {})
|
||||
end
|
||||
|
||||
#TAG CONFIGURATION
|
||||
|
||||
|
||||
@@ -41,6 +41,7 @@ class ActiveSupport::TestCase
|
||||
|
||||
storage_manager = StorageManager::Local.new(base_dir: "#{Rails.root}/public/data/test")
|
||||
Danbooru.config.stubs(:storage_manager).returns(storage_manager)
|
||||
Danbooru.config.stubs(:backup_storage_manager).returns(StorageManager::Null.new)
|
||||
end
|
||||
|
||||
teardown do
|
||||
|
||||
Reference in New Issue
Block a user