Remove mod-only bulk revert system (#4178).

The mass undo system added in #4178 is a replacement for the mod-only
bulk revert system.
This commit is contained in:
evazion
2019-09-27 20:51:56 -05:00
parent 1341dbb262
commit f2dccf8cf1
12 changed files with 1 additions and 353 deletions

View File

@@ -1,67 +0,0 @@
class BulkRevert
BIG_QUERY_LIMIT = 5_000
attr_reader :constraints
class ConstraintTooGeneralError < Exception ; end
def process(creator, constraints = {})
@constraints = constraints
ModAction.log("Processed bulk revert for #{constraints.inspect} by #{creator.name}",:bulk_revert)
CurrentUser.scoped(creator) do
ApplicationRecord.without_timeout do
find_post_versions.order("updated_at, id").each do |version|
version.undo!
end
end
end
end
def initialize
@constraints = {}
end
def preview
@_preview ||= find_post_versions
end
def query_gbq(user_id, added_tags, removed_tags, min_version_id, max_version_id)
GoogleBigQuery::PostVersion.new.find(user_id, added_tags, removed_tags, min_version_id, max_version_id, BIG_QUERY_LIMIT)
end
def find_post_versions
q = PostArchive.where("true")
if constraints[:user_name]
constraints[:user_id] = User.find_by_name(constraints[:user_name]).try(:id)
end
if constraints[:user_id]
q = q.where("post_versions.updater_id = ?", constraints[:user_id])
end
if constraints[:added_tags] || constraints[:removed_tags]
hash = CityHash.hash64("#{constraints[:added_tags]} #{constraints{removed_tags}} #{constraints[:min_version_id]} #{constraints[:max_version_id]}").to_s(36)
sub_ids = Cache.get("br/fpv/#{hash}", 300) do
query_gbq(constraints[:user_id], constraints[:added_tags], constraints[:removed_tags], constraints[:min_version_id], constraints[:max_version_id])
end
if sub_ids.size >= BIG_QUERY_LIMIT
raise ConstraintTooGeneralError.new
end
q = q.where("post_versions.id in (?)", sub_ids)
end
if constraints[:min_version_id].present?
q = q.where("post_versions.id >= ?", constraints[:min_version_id])
end
if constraints[:max_version_id].present?
q = q.where("post_versions.id <= ?", constraints[:max_version_id])
end
q
end
end

View File

@@ -1,44 +0,0 @@
require "big_query"
module GoogleBigQuery
class Base
def self.enabled?
File.exists?(Danbooru.config.google_api_json_key_path)
end
def initialize
raise NotImplementedError.new("Google Big Query is not configured.") unless GoogleBigQuery::Base.enabled?
end
def query(q)
client.query(q)
end
def escape(s)
Regexp.escape(s).gsub(/\\/, '\0\0').gsub(/['"]/, '\\\\\0')
end
def client
@_client ||= BigQuery::Client.new(
"json_key" => client_options[:google_key_path],
"project_id" => google_config["project_id"],
"dataset" => client_options[:google_data_set]
)
end
def client_options
@_client_options ||= {
google_key_path: Danbooru.config.google_api_json_key_path,
google_data_set: "danbooru_#{Rails.env}"
}
end
def google_config
@_google_config ||= JSON.parse(File.read(client_options[:google_key_path]))
end
def data_set
client_options[:google_data_set]
end
end
end

View File

@@ -1,66 +0,0 @@
module GoogleBigQuery
class PostVersion < Base
def find_removed(tag, limit = 1_000)
limit = limit.to_i
query("select id, post_id, updated_at, updater_id, updater_ip_addr, tags, added_tags, removed_tags, parent_id, rating, source from [#{data_set}.post_versions] where #{remove_tag_condition(tag)} order by updated_at desc limit #{limit}")
end
def find_added(tag, limit = 1_000)
limit = limit.to_i
query("select id, post_id, updated_at, updater_id, updater_ip_addr, tags, added_tags, removed_tags, parent_id, rating, source from [#{data_set}.post_versions] where #{add_tag_condition(tag)} order by updated_at desc limit #{limit}")
end
def add_tag_condition(t)
es = escape(t)
"regexp_match(added_tags, \"(?:^| )#{es}(?:$| )\")"
end
def remove_tag_condition(t)
es = escape(t)
"regexp_match(removed_tags, \"(?:^| )#{es}(?:$| )\")"
end
def find_for_post(post_id, created_at)
post_id = post_id.to_i
btime = created_at.strftime("%Y-%m-%d 00:00:00", created_at)
etime = 1.day.from(created_at).strftime("%Y-%m-%d 00:00:00")
"select updater_id, added_tag from [danbooru_#{Rails.env}].post_versions_flat_part where _partitiontime >= #{btime} and _partitiontime <= #{etime} and post_id = #{post_id}"
end
def find(user_id, added_tags, removed_tags, min_version_id, max_version_id, limit = 1_000)
constraints = []
constraints << "updater_id = #{user_id.to_i}"
if added_tags
added_tags.split.each do |tag|
constraints << add_tag_condition(tag)
end
end
if removed_tags
removed_tags.split.each do |tag|
constraints << remove_tag_condition(tag)
end
end
if min_version_id
constraints << "id >= #{min_version_id.to_i}"
end
if max_version_id
constraints << "id <= #{max_version_id.to_i}"
end
limit = limit.to_i
sql = "select id from [#{data_set}.post_versions] where " + constraints.join(" and ") + " order by updated_at desc limit #{limit}"
result = query(sql)
if result["rows"]
result["rows"].map {|x| x["f"][0]["v"].to_i}
else
[]
end
end
end
end