fixing jobs

This commit is contained in:
albert
2011-08-15 19:41:50 -04:00
parent 1c2ec028d3
commit c8067a4691
17 changed files with 73 additions and 124 deletions

View File

@@ -1079,7 +1079,7 @@ footer#page-footer {
margin: 1em;
text-align: center;
padding-top: 1em;
border-top: 2px solid #CCC;
border-top: 1px solid #EEE;
}
/*** news ticker ***/

View File

@@ -0,0 +1,44 @@
require 'base64'
require 'digest/md5'
class AmazonBackup < ActiveRecord::Base
def self.last_id
first.last_id
end
def self.update_id(new_id)
first.update_column(:last_id, new_id)
end
def self.execute
last_id = AmazonBackup.last_id
Post.where("id > ?", last_id).limit(200).order("id").each do |post|
AWS::S3::Base.establish_connection!(
:access_key_id => Danbooru.config.amazon_s3_access_key_id,
:secret_access_key => Danbooru.config.amazon_s3_secret_access_key
)
if File.exists?(post.file_path)
base64_md5 = Base64.encode64(Digest::MD5.digest(File.read(post.file_path)))
AWS::S3::S3Object.store(post.file_name, open(post.file_path, "rb"), Danbooru.config.amazon_s3_bucket_name, "Content-MD5" => base64_md5)
end
if post.image? && File.exists?(post.preview_file_path)
AWS::S3::S3Object.store("preview/#{post.md5}.jpg", open(post.preview_file_path, "rb"), Danbooru.config.amazon_s3_bucket_name)
end
if File.exists?(post.medium_file_path)
AWS::S3::S3Object.store("medium/#{post.md5}.jpg", open(post.medium_file_path, "rb"), Danbooru.config.amazon_s3_bucket_name)
end
if File.exists?(post.large_file_path)
AWS::S3::S3Object.store("large/#{post.md5}.jpg", open(post.large_file_path, "rb"), Danbooru.config.amazon_s3_bucket_name)
end
AmazonBackup.update_id(last_id)
end
rescue Exception => x
# probably some network error, retry next time
end
end

View File

@@ -1,27 +0,0 @@
module Jobs
class BackupToS3 < Struct.new(:last_id)
def perform
Post.find(:all, :conditions => ["id > ?", last_id], :limit => 200, :order => "id").each do |post|
AWS::S3::Base.establish_connection!(:access_key_id => CONFIG["amazon_s3_access_key_id"], :secret_access_key => CONFIG["amazon_s3_secret_access_key"])
if File.exists?(post.file_path)
base64_md5 = Base64.encode64(Digest::MD5.digest(File.read(post.file_path)))
AWS::S3::S3Object.store(post.file_name, open(post.file_path, "rb"), CONFIG["amazon_s3_bucket_name"], "Content-MD5" => base64_md5)
end
if post.image? && File.exists?(post.preview_path)
AWS::S3::S3Object.store("preview/#{post.md5}.jpg", open(post.preview_path, "rb"), CONFIG["amazon_s3_bucket_name"])
end
if File.exists?(post.sample_path)
AWS::S3::S3Object.store("sample/" + CONFIG["sample_filename_prefix"] + "#{post.md5}.jpg", open(post.sample_path, "rb"), CONFIG["amazon_s3_bucket_name"])
end
self.last_id = post.id
end
Delayed::Job.enqueue(BackupToS3.new(last_id))
rescue Exception => x
# probably some network error, retry next time
end
end
end

View File

@@ -1,7 +0,0 @@
module Jobs
class CalculatePostCount < Struct.new(:tag_name)
def perform
Tag.recalculate_post_count(tag_name)
end
end
end

View File

@@ -1,12 +0,0 @@
module Jobs
class CalculateRelatedTags < Struct.new(:tag_id)
def perform
tag = Tag.find_by_id(tag_id)
if tag
tag.update_related
tag.save
end
end
end
end

View File

@@ -1,13 +0,0 @@
module Jobs
class CalculateUploadedTags < Struct.new(:user_id)
def perform
tags = []
user = User.find(user_id)
CONFIG["tag_types"].values.uniq.each do |tag_type|
tags += user.calculate_uploaded_tags(tag_type)
end
user.update_column(:uploaded_tags, tags.join("\n"))
end
end
end

View File

@@ -1,12 +0,0 @@
module Jobs
class CreateTagAlias < Struct.new(:antecedent_name, :consequent_name, :creator_id, :creator_ip_addr)
def execute
TagAlias.create(
:antecedent_name => antecedent_name,
:consequent_name => consequent_name,
:creator_id => creator_id,
:creator_ip_addr => creator_ip_addr
)
end
end
end

View File

@@ -1,12 +0,0 @@
module Jobs
class CreateTagImplication < Struct.new(:antecedent_name, :consequent_name, :creator_id, :creator_ip_addr)
def perform
TagImplication.create(
:antecedent_name => antecedent_name,
:consequent_name => consequent_name,
:creator_id => creator_id,
:creator_ip_addr => creator_ip_addr
)
end
end
end

View File

@@ -1,13 +0,0 @@
module Jobs
class FixPixivUploads < Struct.new(:last_post_id)
def perform
post_id = nil
Post.find_each(:conditions => ["GREATEST(width, height) IN (150, 600) AND source LIKE ? AND id > ?", "%pixiv%", last_post_id]) do |post|
post_id = post.id
end
update_attributes(:data => {:last_post_id => post_id})
end
end
end

View File

@@ -1,7 +0,0 @@
module Jobs
class MassTagEdit < Struct.new(:start_tags, :result_tags, :updater_id, :updater_ip_addr)
def perform
Tag.mass_edit(start_tags, result_tags, updater_id, updater_ip_addr)
end
end
end

View File

@@ -1,10 +0,0 @@
module Jobs
class ProcessTagSubscriptions < Struct.new(:last_run)
def perform
if last_run.nil? || last_run < 20.minutes.ago
TagSubscription.process_all
Delayed::Job.enqueue(ProcessTagSubscriptions.new(Time.now))
end
end
end
end

View File

@@ -1,9 +0,0 @@
module Jobs
class ProcessUploads
def perform
Upload.find_each(:conditions => ["status = ?", "pending"]) do |upload|
upload.process!
end
end
end
end

View File

@@ -37,7 +37,7 @@ module PostSetPresenters
elsif max_date - min_date == 6
min_date.strftime("Week of %B %d, %Y")
else
date.strftime("Month of %B %Y")
date.strftime("%B %Y")
end
end
end

View File

@@ -3,3 +3,7 @@ set :output, "/var/log/whenever.log"
every 1.hour do
TagSubscription.process_all
end
every 1.hour do
AmazonBackup.execute
end

View File

@@ -0,0 +1,9 @@
class CreateAmazonBackups < ActiveRecord::Migration
def change
create_table :amazon_backups do |t|
t.integer :last_id
t.timestamps
end
end
end

7
test/fixtures/amazon_backups.yml vendored Normal file
View File

@@ -0,0 +1,7 @@
# Read about fixtures at http://api.rubyonrails.org/classes/Fixtures.html
one:
last_id: 1
two:
last_id: 1

View File

@@ -0,0 +1,7 @@
require 'test_helper'
class AmazonBackupTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end