storage manager: remove S3 backend.

Remove in favor of the rclone S3 backend.
This commit is contained in:
evazion
2019-12-13 01:49:42 -06:00
parent 92aca9ed2d
commit 316690f393
5 changed files with 0 additions and 168 deletions

View File

@@ -22,7 +22,6 @@ gem 'streamio-ffmpeg'
gem 'rubyzip', :require => "zip"
gem 'stripe'
gem 'twitter'
gem 'aws-sdk-s3', '~> 1'
gem 'aws-sdk-sqs', '~> 1'
gem 'responders'
gem 'dtext_rb', git: "https://github.com/evazion/dtext_rb.git", require: "dtext"

View File

@@ -86,13 +86,6 @@ GEM
aws-partitions (~> 1, >= 1.239.0)
aws-sigv4 (~> 1.1)
jmespath (~> 1.0)
aws-sdk-kms (1.26.0)
aws-sdk-core (~> 3, >= 3.71.0)
aws-sigv4 (~> 1.1)
aws-sdk-s3 (1.59.0)
aws-sdk-core (~> 3, >= 3.83.0)
aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.1)
aws-sdk-sqs (1.23.1)
aws-sdk-core (~> 3, >= 3.71.0)
aws-sigv4 (~> 1.1)
@@ -424,7 +417,6 @@ DEPENDENCIES
activemodel-serializers-xml
addressable
awesome_print
aws-sdk-s3 (~> 1)
aws-sdk-sqs (~> 1)
bcrypt
bootsnap

View File

@@ -1,47 +0,0 @@
class StorageManager::S3 < StorageManager
# https://docs.aws.amazon.com/sdkforruby/api/Aws/S3/Client.html#initialize-instance_method
DEFAULT_S3_OPTIONS = {
region: Danbooru.config.aws_region,
credentials: Danbooru.config.aws_credentials,
logger: Rails.logger,
}
# https://docs.aws.amazon.com/sdkforruby/api/Aws/S3/Client.html#put_object-instance_method
DEFAULT_PUT_OPTIONS = {
acl: "public-read",
storage_class: "STANDARD", # STANDARD, STANDARD_IA, REDUCED_REDUNDANCY
cache_control: "public, max-age=#{1.year.to_i}",
#content_type: "image/jpeg" # XXX should set content type
}
attr_reader :bucket, :client, :s3_options
def initialize(bucket, client: nil, s3_options: {}, **options)
@bucket = bucket
@s3_options = DEFAULT_S3_OPTIONS.merge(s3_options)
@client = client || Aws::S3::Client.new(**@s3_options)
super(**options)
end
def key(path)
path.sub(/^.+?data\//, "")
end
def store(io, path)
data = io.read
base64_md5 = Digest::MD5.base64digest(data)
client.put_object(bucket: bucket, key: key(path), body: data, content_md5: base64_md5, **DEFAULT_PUT_OPTIONS)
end
def delete(path)
client.delete_object(bucket: bucket, key: key(path))
rescue Aws::S3::Errors::NoSuchKey
# ignore
end
def open(path)
file = Tempfile.new(binmode: true)
client.get_object(bucket: bucket, key: key(path), response_target: file)
file
end
end

View File

@@ -204,12 +204,6 @@ module Danbooru
# ~/.ssh_config or in the ssh_options param (ref: http://net-ssh.github.io/net-ssh/Net/SSH.html#method-c-start)
# StorageManager::SFTP.new("i1.example.com", "i2.example.com", base_dir: "/mnt/backup", hierarchical: false, ssh_options: {})
# Store files in an S3 bucket. The bucket must already exist and be
# writable by you. Configure your S3 settings in aws_region and
# aws_credentials below, or in the s3_options param (ref:
# https://docs.aws.amazon.com/sdkforruby/api/Aws/S3/Client.html#initialize-instance_method)
# StorageManager::S3.new("my_s3_bucket", base_url: "https://my_s3_bucket.s3.amazonaws.com/", s3_options: {})
# Select the storage method based on the post's id and type (preview, large, or original).
# StorageManager::Hybrid.new do |id, md5, file_ext, type|
# ssh_options = { user: "danbooru" }
@@ -237,12 +231,6 @@ module Danbooru
# Backup files to /mnt/backup on a remote system. Configure SSH settings
# in ~/.ssh_config or in the ssh_options param (ref: http://net-ssh.github.io/net-ssh/Net/SSH.html#method-c-start)
# StorageManager::SFTP.new("www.example.com", base_dir: "/mnt/backup", ssh_options: {})
# Backup files to an S3 bucket. The bucket must already exist and be
# writable by you. Configure your S3 settings in aws_region and
# aws_credentials below, or in the s3_options param (ref:
# https://docs.aws.amazon.com/sdkforruby/api/Aws/S3/Client.html#initialize-instance_method)
# StorageManager::S3.new("my_s3_bucket_name", s3_options: {})
end
#TAG CONFIGURATION

View File

@@ -1,104 +1,4 @@
namespace :images do
desc "Distribute posts to all servers via SFTP"
task :distribute, [:min_id, :max_id] => :environment do |t, args|
min_id = args[:min_id]
max_id = args[:max_id]
lsm = StorageManager::Local.new(base_url: "https://danbooru.donmai.us/data", base_dir: "/var/www/danbooru2/shared/data", hierarchical: false)
sftpsm = StorageManager::SFTP.new(*Danbooru.config.all_server_hosts, base_url: "https://danbooru.donmai.us/data")
Post.where("id between ? and ?", min_id, max_id).find_each do |post|
sftpsm.store_file(lsm.open_file(post, :original), post, :original)
sftpsm.store_file(lsm.open_file(post, :large), post, :large) if post.has_large?
sftpsm.store_file(lsm.open_file(post, :preview), post, :preview) if post.has_preview?
end
end
desc "Reset S3 + Storage Class"
task :reset_s3, [:min_id, :max_id] => :environment do |t, args|
min_id = args[:min_id] # 1
max_id = args[:max_id] # 50_000
credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
Aws.config.update({
region: "us-east-1",
credentials: credentials
})
client = Aws::S3::Client.new
bucket = Danbooru.config.aws_s3_bucket_name
Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post|
key = File.basename(post.file_path)
begin
client.copy_object(bucket: bucket, key: key, acl: "public-read", storage_class: "STANDARD", copy_source: "/#{bucket}/#{key}", metadata_directive: "COPY")
puts "copied #{post.id}"
rescue Aws::S3::Errors::InvalidObjectState
puts "invalid state #{post.id}"
rescue Aws::S3::Errors::NoSuchKey
puts "missing #{post.id}"
end
end
end
desc "restore from glacier"
task :restore_glacier, [:min_id, :max_id] => :environment do |t, args|
min_id = args[:min_id] # 10_001
max_id = args[:max_id] # 50_000
credentials = Aws::Credentials.new(Danbooru.config.aws_access_key_id, Danbooru.config.aws_secret_access_key)
Aws.config.update({
region: "us-east-1",
credentials: credentials
})
client = Aws::S3::Client.new
bucket = Danbooru.config.aws_s3_bucket_name
Post.where("id >= ? and id <= ?", min_id, max_id).find_each do |post|
key = "preview/" + File.basename(post.preview_file_path)
begin
client.restore_object(
bucket: bucket,
key: key,
restore_request: {
days: 1,
glacier_job_parameters: {
tier: "Bulk"
}
}
)
puts "restored #{post.id}"
rescue Aws::S3::Errors::InvalidObjectState
puts "already glaciered #{post.id}"
rescue Aws::S3::Errors::NoSuchKey
puts "missing #{post.id}"
rescue Aws::S3::Errors::RestoreAlreadyInProgress
puts "already restoring #{post.id}"
end
end
end
desc "Redownload an image from Pixiv"
task :download_pixiv => :environment do
post_id = ENV["id"]
if post_id !~ /\d+/
raise "Usage: regen_img.rb POST_ID"
end
post = Post.find(post_id)
post.source =~ /(\d{5,})/
if illust_id = $1
response = PixivApiClient.new.work(illust_id)
upload = Upload.new
upload.source = response.pages.first
upload.file_ext = post.file_ext
upload.image_width = post.image_width
upload.image_height = post.image_height
upload.md5 = post.md5
upload.download_from_source(post.file_path)
post.distribute_files
end
end
desc "Regenerates all images for a post id"
task :regen => :environment do
post_id = ENV["id"]