Merge branch 'master' into fix-pixiv-profile-url

This commit is contained in:
evazion
2020-06-24 00:06:55 -05:00
committed by GitHub
103 changed files with 1639 additions and 2247 deletions

View File

@@ -28,11 +28,11 @@ module Explore
def searches
@date, @scale, @min_date, @max_date = parse_date(params)
@search_service = ReportbooruService.new
@searches = ReportbooruService.new.popular_searches(@date)
end
def missed_searches
@search_service = ReportbooruService.new
@missed_searches = ReportbooruService.new.missed_search_rankings
end
private

View File

@@ -0,0 +1,48 @@
class MockServicesController < ApplicationController
skip_forgery_protection
respond_to :json
before_action do
raise User::PrivilegeError if Rails.env.production?
end
def recommender_recommend
@data = posts.map { |post| [post.id, rand(0.0..1.0)] }
render json: @data
end
def recommender_similar
@data = posts.map { |post| [post.id, rand(0.0..1.0)] }
render json: @data
end
def reportbooru_missed_searches
@data = tags.map { |tag| "#{tag.name} #{rand(1.0..1000.0)}" }.join("\n")
render json: @data
end
def reportbooru_post_searches
@data = tags.map { |tag| [tag.name, rand(1..1000)] }
render json: @data
end
def reportbooru_post_views
@data = posts.map { |post| [post.id, rand(1..1000)] }
render json: @data
end
def iqdbs_similar
@data = posts.map { |post| { post_id: post.id, score: rand(0..100)} }
render json: @data
end
private
def posts(limit = 10)
Post.last(limit)
end
def tags(limit = 10)
Tag.order(post_count: :desc).limit(limit)
end
end

View File

@@ -21,7 +21,7 @@ class UploadsController < ApplicationController
def image_proxy
authorize Upload
resp = ImageProxy.get_image(params[:url])
send_data resp.body, :type => resp.content_type, :disposition => "inline"
send_data resp.body, type: resp.mime_type, disposition: "inline"
end
def index

View File

@@ -9,6 +9,7 @@ Autocomplete.ORDER_METATAGS = <%= PostQueryBuilder::ORDER_METATAGS.to_json.html_
Autocomplete.DISAPPROVAL_REASONS = <%= PostDisapproval::REASONS.to_json.html_safe %>;
/* eslint-enable */
Autocomplete.MISC_STATUSES = ["deleted", "active", "pending", "flagged", "banned", "modqueue", "unmoderated"];
Autocomplete.TAG_PREFIXES = "-|~|" + Object.keys(Autocomplete.TAG_CATEGORIES).map(category => category + ":").join("|");
Autocomplete.METATAGS_REGEX = Autocomplete.METATAGS.concat(Object.keys(Autocomplete.TAG_CATEGORIES)).join("|");
Autocomplete.TERM_REGEX = new RegExp(`([-~]*)(?:(${Autocomplete.METATAGS_REGEX}):)?(\\S*)$`, "i");
@@ -268,9 +269,7 @@ Autocomplete.render_item = function(list, item) {
Autocomplete.static_metatags = {
order: Autocomplete.ORDER_METATAGS,
status: [
"any", "deleted", "active", "pending", "flagged", "banned", "modqueue", "unmoderated"
],
status: ["any"].concat(Autocomplete.MISC_STATUSES),
rating: [
"safe", "questionable", "explicit"
],
@@ -280,12 +279,8 @@ Autocomplete.static_metatags = {
embedded: [
"true", "false"
],
child: [
"any", "none"
],
parent: [
"any", "none"
],
child: ["any", "none"].concat(Autocomplete.MISC_STATUSES),
parent: ["any", "none"].concat(Autocomplete.MISC_STATUSES),
filetype: [
"jpg", "png", "gif", "swf", "zip", "webm", "mp4"
],

View File

@@ -300,10 +300,10 @@ Post.initialize_favlist = function() {
});
}
Post.view_original = function(e) {
Post.view_original = function(e = null) {
if (Utility.test_max_width(660)) {
// Do the default behavior (navigate to image)
return false;
return;
}
var $image = $("#image");
@@ -316,13 +316,13 @@ Post.view_original = function(e) {
});
Note.Box.scale_all();
$("body").attr("data-post-current-image-size", "original");
return false;
e?.preventDefault();
}
Post.view_large = function(e) {
Post.view_large = function(e = null) {
if (Utility.test_max_width(660)) {
// Do the default behavior (navigate to image)
return false;
return;
}
var $image = $("#image");
@@ -335,7 +335,7 @@ Post.view_large = function(e) {
});
Note.Box.scale_all();
$("body").attr("data-post-current-image-size", "large");
return false;
e?.preventDefault();
}
Post.toggle_fit_window = function(e) {

View File

@@ -9,15 +9,6 @@ class CloudflareService
api_token.present? && zone.present?
end
def ips(expiry: 24.hours)
response = Danbooru::Http.cache(expiry).get("https://api.cloudflare.com/client/v4/ips")
return [] if response.code != 200
result = response.parse["result"]
ips = result["ipv4_cidrs"] + result["ipv6_cidrs"]
ips.map { |ip| IPAddr.new(ip) }
end
def purge_cache(urls)
return unless enabled?

View File

@@ -24,15 +24,6 @@ class CurrentUser
scoped(user, &block)
end
def self.as_system(&block)
if block_given?
scoped(::User.system, "127.0.0.1", &block)
else
self.user = User.system
self.ip_addr = "127.0.0.1"
end
end
def self.user
RequestStore[:current_user]
end

View File

@@ -1,18 +1,43 @@
require "danbooru/http/html_adapter"
require "danbooru/http/xml_adapter"
require "danbooru/http/cache"
require "danbooru/http/redirector"
require "danbooru/http/retriable"
require "danbooru/http/session"
module Danbooru
class Http
DEFAULT_TIMEOUT = 3
class DownloadError < StandardError; end
class FileTooLargeError < StandardError; end
DEFAULT_TIMEOUT = 10
MAX_REDIRECTS = 5
attr_writer :cache, :http
attr_accessor :max_size, :http
class << self
delegate :get, :put, :post, :delete, :cache, :follow, :timeout, :auth, :basic_auth, :headers, to: :new
delegate :get, :head, :put, :post, :delete, :cache, :follow, :max_size, :timeout, :auth, :basic_auth, :headers, :cookies, :use, :public_only, :download_media, to: :new
end
def initialize
@http ||=
::Danbooru::Http::ApplicationClient.new
.timeout(DEFAULT_TIMEOUT)
.headers("Accept-Encoding" => "gzip")
.headers("User-Agent": "#{Danbooru.config.canonical_app_name}/#{Rails.application.config.x.git_hash}")
.use(:auto_inflate)
.use(redirector: { max_redirects: MAX_REDIRECTS })
.use(:session)
end
def get(url, **options)
request(:get, url, **options)
end
def head(url, **options)
request(:head, url, **options)
end
def put(url, **options)
request(:get, url, **options)
end
@@ -25,14 +50,14 @@ module Danbooru
request(:delete, url, **options)
end
def cache(expiry)
dup.tap { |o| o.cache = expiry.to_i }
end
def follow(*args)
dup.tap { |o| o.http = o.http.follow(*args) }
end
def max_size(size)
dup.tap { |o| o.max_size = size }
end
def timeout(*args)
dup.tap { |o| o.http = o.http.timeout(*args) }
end
@@ -49,43 +74,72 @@ module Danbooru
dup.tap { |o| o.http = o.http.headers(*args) }
end
def cookies(*args)
dup.tap { |o| o.http = o.http.cookies(*args) }
end
def use(*args)
dup.tap { |o| o.http = o.http.use(*args) }
end
def cache(expires_in)
use(cache: { expires_in: expires_in })
end
# allow requests only to public IPs, not to local or private networks.
def public_only
dup.tap do |o|
o.http = o.http.dup.tap do |http|
http.default_options = http.default_options.with_socket_class(ValidatingSocket)
end
end
end
concerning :DownloadMethods do
def download_media(url, no_polish: true, **options)
url = Addressable::URI.heuristic_parse(url)
response = headers(Referer: url.origin).get(url)
# prevent Cloudflare Polish from modifying images.
if no_polish && response.headers["CF-Polished"].present?
url.query_values = url.query_values.to_h.merge(danbooru_no_polish: SecureRandom.uuid)
return download_media(url, no_polish: false)
end
file = download_response(response, **options)
[response, MediaFile.open(file)]
end
def download_response(response, file: Tempfile.new("danbooru-download-", binmode: true))
raise DownloadError, "Downloading #{response.uri} failed with code #{response.status}" if response.status != 200
raise FileTooLargeError, response if @max_size && response.content_length.to_i > @max_size
size = 0
response.body.each do |chunk|
size += chunk.size
raise FileTooLargeError if @max_size && size > @max_size
file.write(chunk)
end
file.rewind
file
end
end
protected
def request(method, url, **options)
if @cache.present?
cached_request(method, url, **options)
else
raw_request(method, url, **options)
end
rescue HTTP::Redirector::TooManyRedirectsError
::HTTP::Response.new(status: 598, body: "", version: "1.1")
rescue HTTP::TimeoutError
# return a synthetic http error on connection timeouts
::HTTP::Response.new(status: 599, body: "", version: "1.1")
end
def cached_request(method, url, **options)
key = Cache.hash({ method: method, url: url, headers: http.default_options.headers.to_h, **options }.to_json)
cached_response = Cache.get(key, @cache) do
response = raw_request(method, url, **options)
{ status: response.status, body: response.to_s, headers: response.headers.to_h, version: "1.1" }
end
::HTTP::Response.new(**cached_response)
end
def raw_request(method, url, **options)
http.send(method, url, **options)
rescue ValidatingSocket::ProhibitedIpError
fake_response(597, "")
rescue HTTP::Redirector::TooManyRedirectsError
fake_response(598, "")
rescue HTTP::TimeoutError
fake_response(599, "")
end
def http
@http ||= ::HTTP.
follow(strict: false, max_hops: MAX_REDIRECTS).
timeout(DEFAULT_TIMEOUT).
use(:auto_inflate).
headers(Danbooru.config.http_headers).
headers("Accept-Encoding" => "gzip")
def fake_response(status, body)
::HTTP::Response.new(status: status, version: "1.1", body: ::HTTP::Response::Body.new(body))
end
end
end

View File

@@ -0,0 +1,31 @@
# An extension to HTTP::Client that lets us write Rack-style middlewares that
# hook into the request/response cycle and override how requests are made. This
# works by extending http.rb's concept of features (HTTP::Feature) to give them
# a `perform` method that takes a http request and returns a http response.
# This can be used to intercept and modify requests and return arbitrary responses.
module Danbooru
class Http
class ApplicationClient < HTTP::Client
# Override `perform` to call the `perform` method on features first.
def perform(request, options)
features = options.features.values.reverse.select do |feature|
feature.respond_to?(:perform)
end
perform = proc { |req| super(req, options) }
callback_chain = features.reduce(perform) do |callback_chain, feature|
proc { |req| feature.perform(req, &callback_chain) }
end
callback_chain.call(request)
end
# Override `branch` to return an ApplicationClient instead of a
# HTTP::Client so that chaining works.
def branch(...)
ApplicationClient.new(...)
end
end
end
end

View File

@@ -0,0 +1,30 @@
module Danbooru
class Http
class Cache < HTTP::Feature
HTTP::Options.register_feature :cache, self
attr_reader :expires_in
def initialize(expires_in:)
@expires_in = expires_in
end
def perform(request, &block)
::Cache.get(cache_key(request), expires_in) do
response = yield request
# XXX hack to remove connection state from response body so we can serialize it for caching.
response.flush
response.body.instance_variable_set(:@connection, nil)
response.body.instance_variable_set(:@stream, nil)
response
end
end
def cache_key(request)
"http:" + ::Cache.hash({ method: request.verb, url: request.uri.to_s, headers: request.headers.sort }.to_json)
end
end
end
end

View File

@@ -0,0 +1,12 @@
module Danbooru
class Http
class HtmlAdapter < HTTP::MimeType::Adapter
HTTP::MimeType.register_adapter "text/html", self
HTTP::MimeType.register_alias "text/html", :html
def decode(str)
Nokogiri::HTML5(str)
end
end
end
end

View File

@@ -0,0 +1,40 @@
# A HTTP::Feature that automatically follows HTTP redirects.
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Redirections
module Danbooru
class Http
class Redirector < HTTP::Feature
HTTP::Options.register_feature :redirector, self
attr_reader :max_redirects
def initialize(max_redirects: 5)
@max_redirects = max_redirects
end
def perform(request, &block)
response = yield request
redirects = max_redirects
while response.status.redirect?
raise HTTP::Redirector::TooManyRedirectsError if redirects <= 0
response = yield build_redirect(request, response)
redirects -= 1
end
response
end
def build_redirect(request, response)
location = response.headers["Location"].to_s
uri = HTTP::URI.parse(location)
verb = request.verb
verb = :get if response.status == 303 && !request.verb.in?([:get, :head])
request.redirect(uri, verb)
end
end
end
end

View File

@@ -0,0 +1,54 @@
# A HTTP::Feature that automatically retries requests that return a 429 error
# or a Retry-After header. Usage: `Danbooru::Http.use(:retriable).get(url)`.
#
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/429
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After
module Danbooru
class Http
class Retriable < HTTP::Feature
HTTP::Options.register_feature :retriable, self
attr_reader :max_retries, :max_delay
def initialize(max_retries: 2, max_delay: 5.seconds)
@max_retries = max_retries
@max_delay = max_delay
end
def perform(request, &block)
response = yield request
retries = max_retries
while retriable?(response) && retries > 0 && retry_delay(response) <= max_delay
DanbooruLogger.info "Retrying url=#{request.uri} status=#{response.status} retries=#{retries} delay=#{retry_delay(response)}"
retries -= 1
sleep(retry_delay(response))
response = yield request
end
response
end
def retriable?(response)
response.status == 429 || response.headers["Retry-After"].present?
end
def retry_delay(response, current_time: Time.zone.now)
retry_after = response.headers["Retry-After"]
if retry_after.blank?
0.seconds
elsif retry_after =~ /\A\d+\z/
retry_after.to_i.seconds
else
retry_at = Time.zone.parse(retry_after)
return 0.seconds if retry_at.blank?
[retry_at - current_time, 0].max.seconds
end
end
end
end
end

View File

@@ -0,0 +1,37 @@
module Danbooru
class Http
class Session < HTTP::Feature
HTTP::Options.register_feature :session, self
attr_reader :cookie_jar
def initialize(cookie_jar: HTTP::CookieJar.new)
@cookie_jar = cookie_jar
end
def perform(request)
add_cookies(request)
response = yield request
save_cookies(response)
response
end
def add_cookies(request)
cookies = cookies_for_request(request)
request.headers["Cookie"] = cookies if cookies.present?
end
def cookies_for_request(request)
saved_cookies = cookie_jar.each(request.uri).map { |c| [c.name, c.value] }.to_h
request_cookies = HTTP::Cookie.cookie_value_to_hash(request.headers["Cookie"].to_s)
saved_cookies.merge(request_cookies).map { |name, value| "#{name}=#{value}" }.join("; ")
end
def save_cookies(response)
response.cookies.each do |cookie|
cookie_jar.add(cookie)
end
end
end
end
end

View File

@@ -0,0 +1,12 @@
module Danbooru
class Http
class XmlAdapter < HTTP::MimeType::Adapter
HTTP::MimeType.register_adapter "application/xml", self
HTTP::MimeType.register_alias "application/xml", :xml
def decode(str)
Hash.from_xml(str).with_indifferent_access
end
end
end
end

View File

@@ -1,123 +0,0 @@
require 'resolv'
module Downloads
class File
include ActiveModel::Validations
class Error < StandardError; end
RETRIABLE_ERRORS = [Errno::ECONNRESET, Errno::ETIMEDOUT, Errno::EIO, Errno::EHOSTUNREACH, Errno::ECONNREFUSED, Timeout::Error, IOError]
delegate :data, to: :strategy
attr_reader :url, :referer
validate :validate_url
def initialize(url, referer = nil)
@url = Addressable::URI.parse(url) rescue nil
@referer = referer
validate!
end
def size
res = HTTParty.head(uncached_url, **httparty_options, timeout: 3)
if res.success?
res.content_length
else
raise HTTParty::ResponseError.new(res)
end
end
def download!(url: uncached_url, tries: 3, **options)
Retriable.retriable(on: RETRIABLE_ERRORS, tries: tries, base_interval: 0) do
file = http_get_streaming(url, headers: strategy.headers, **options)
return [file, strategy]
end
end
def validate_url
errors[:base] << "URL must not be blank" if url.blank?
errors[:base] << "'#{url}' is not a valid url" if !url.host.present?
errors[:base] << "'#{url}' is not a valid url. Did you mean 'http://#{url}'?" if !url.scheme.in?(%w[http https])
end
def http_get_streaming(url, file: Tempfile.new(binmode: true), headers: {}, max_size: Danbooru.config.max_file_size)
size = 0
res = HTTParty.get(url, httparty_options) do |chunk|
next if chunk.code == 302
size += chunk.size
raise Error.new("File is too large (max size: #{max_size})") if size > max_size && max_size > 0
file.write(chunk)
end
if res.success?
file.rewind
return file
else
raise Error.new("HTTP error code: #{res.code} #{res.message}")
end
end
# Prevent Cloudflare from potentially mangling the image. See issue #3528.
def uncached_url
return file_url unless is_cloudflare?(file_url)
url = file_url.dup
url.query_values = url.query_values.to_h.merge(danbooru_no_cache: SecureRandom.uuid)
url
end
def preview_url
@preview_url ||= Addressable::URI.parse(strategy.preview_url)
end
def file_url
@file_url ||= Addressable::URI.parse(strategy.image_url)
end
def strategy
@strategy ||= Sources::Strategies.find(url.to_s, referer)
end
def httparty_options
{
timeout: 10,
stream_body: true,
headers: strategy.headers,
connection_adapter: ValidatingConnectionAdapter
}.deep_merge(Danbooru.config.httparty_options)
end
def is_cloudflare?(url)
ip_addr = IPAddr.new(Resolv.getaddress(url.hostname))
CloudflareService.new.ips.any? { |subnet| subnet.include?(ip_addr) }
end
def self.banned_ip?(ip)
ip = IPAddress.parse(ip.to_s) unless ip.is_a?(IPAddress)
if ip.ipv4?
ip.loopback? || ip.link_local? || ip.multicast? || ip.private?
elsif ip.ipv6?
ip.loopback? || ip.link_local? || ip.unique_local? || ip.unspecified?
end
end
end
# Hook into HTTParty to validate the IP before following redirects.
# https://www.rubydoc.info/github/jnunemaker/httparty/HTTParty/ConnectionAdapter
class ValidatingConnectionAdapter < HTTParty::ConnectionAdapter
def self.call(uri, options)
ip_addr = IPAddress.parse(::Resolv.getaddress(uri.hostname))
if Downloads::File.banned_ip?(ip_addr)
raise Downloads::File::Error, "Downloads from #{ip_addr} are not allowed"
end
super(uri, options)
end
end
end

View File

@@ -1,4 +1,6 @@
class ImageProxy
class Error < StandardError; end
def self.needs_proxy?(url)
fake_referer_for(url).present?
end
@@ -8,16 +10,13 @@ class ImageProxy
end
def self.get_image(url)
if url.blank?
raise "Must specify url"
end
raise Error, "URL not present" unless url.present?
raise Error, "Proxy not allowed for this url (url=#{url})" unless needs_proxy?(url)
if !needs_proxy?(url)
raise "Proxy not allowed for this site"
end
referer = fake_referer_for(url)
response = Danbooru::Http.headers(Referer: referer).get(url)
raise Error, "Couldn't proxy image (code=#{response.status}, url=#{url})" unless response.status.success?
response = HTTParty.get(url, Danbooru.config.httparty_options.deep_merge(headers: {"Referer" => fake_referer_for(url)}))
raise "HTTP error code: #{response.code} #{response.message}" unless response.success?
response
end
end

View File

@@ -12,8 +12,9 @@ class IqdbProxy
end
def download(url, type)
download = Downloads::File.new(url)
file, strategy = download.download!(url: download.send(type))
strategy = Sources::Strategies.find(url)
download_url = strategy.send(type)
file = strategy.download_file!(download_url)
file
end
@@ -32,7 +33,7 @@ class IqdbProxy
file = download(params[:image_url], :url)
results = query(file: file, limit: limit)
elsif params[:file_url].present?
file = download(params[:file_url], :file_url)
file = download(params[:file_url], :image_url)
results = query(file: file, limit: limit)
elsif params[:post_id].present?
url = Post.find(params[:post_id]).preview_file_url
@@ -50,9 +51,12 @@ class IqdbProxy
file.try(:close)
end
def query(params)
def query(file: nil, url: nil, limit: 20)
raise NotImplementedError, "the IQDBs service isn't configured" unless enabled?
response = http.post("#{iqdbs_server}/similar", body: params)
file = HTTP::FormData::File.new(file) if file
form = { file: file, url: url, limit: limit }.compact
response = http.timeout(30).post("#{iqdbs_server}/similar", form: form)
raise Error, "IQDB error: #{response.status}" if response.status != 200
raise Error, "IQDB error: #{response.parse["error"]}" if response.parse.is_a?(Hash)

View File

@@ -43,6 +43,8 @@ class MediaFile
else
:bin
end
rescue EOFError
:bin
end
def self.videos_enabled?

View File

@@ -4,8 +4,7 @@ class NicoSeigaApiClient
attr_reader :http
# XXX temp disable following redirects.
def initialize(work_id:, type:, http: Danbooru::Http.follow(nil))
def initialize(work_id:, type:, http: Danbooru::Http.new)
@work_id = work_id
@work_type = type
@http = http
@@ -80,28 +79,19 @@ class NicoSeigaApiClient
end
def get(url)
cookie_header = Cache.get("nicoseiga-cookie-header") || regenerate_cookie_header
resp = http.headers({Cookie: cookie_header}).cache(1.minute).get(url)
if resp.headers["Location"] =~ %r{seiga\.nicovideo\.jp/login/}i
cookie_header = regenerate_cookie_header
resp = http.headers({Cookie: cookie_header}).cache(1.minute).get(url)
end
resp
end
def regenerate_cookie_header
form = {
mail_tel: Danbooru.config.nico_seiga_login,
password: Danbooru.config.nico_seiga_password
}
resp = http.post("https://account.nicovideo.jp/api/v1/login", form: form)
cookies = resp.cookies.map { |c| c.name + "=" + c.value }
cookies << "accept_fetish_warning=2"
Cache.put("nicoseiga-cookie-header", cookies.join(";"), 1.week)
# XXX should fail gracefully instead of raising exception
resp = http.cache(1.hour).post("https://account.nicovideo.jp/login/redirector?site=seiga", form: form)
raise RuntimeError, "NicoSeiga login failed (status=#{resp.status} url=#{url})" if resp.status != 200
resp = http.cache(1.minute).get(url)
#raise RuntimeError, "NicoSeiga get failed (status=#{resp.status} url=#{url})" if resp.status != 200
resp
end
memoize :api_response, :manga_api_response, :user_api_response

View File

@@ -1,5 +1,3 @@
require 'resolv-replace'
class PixivApiClient
extend Memoist
@@ -8,6 +6,21 @@ class PixivApiClient
CLIENT_SECRET = "HP3RmkgAmEGro0gn1x9ioawQE8WMfvLXDz3ZqxpK"
CLIENT_HASH_SALT = "28c1fdd170a5204386cb1313c7077b34f83e4aaf4aa829ce78c231e05b0bae2c"
# Tools to not include in the tags list. We don't tag digital media, so
# including these results in bad translated tags suggestions.
TOOLS_BLACKLIST = %w[
Photoshop Illustrator Fireworks Flash Painter PaintShopPro pixiv\ Sketch
CLIP\ STUDIO\ PAINT IllustStudio ComicStudio RETAS\ STUDIO SAI PhotoStudio
Pixia NekoPaint PictBear openCanvas ArtRage Expression Inkscape GIMP
CGillust COMICWORKS MS_Paint EDGE AzPainter AzPainter2 AzDrawing
PicturePublisher SketchBookPro Processing 4thPaint GraphicsGale mdiapp
Paintgraphic AfterEffects drawr CLIP\ PAINT\ Lab FireAlpaca Pixelmator
AzDrawing2 MediBang\ Paint Krita ibisPaint Procreate Live2D
Lightwave3D Shade Poser STRATA AnimationMaster XSI CARRARA CINEMA4D Maya
3dsMax Blender ZBrush Metasequoia Sunny3D Bryce Vue Hexagon\ King SketchUp
VistaPro Sculptris Comi\ Po! modo DAZ\ Studio 3D-Coat
]
class Error < StandardError; end
class BadIDError < Error; end
@@ -24,7 +37,7 @@ class PixivApiClient
@artist_commentary_title = json["title"].to_s
@artist_commentary_desc = json["caption"].to_s
@tags = json["tags"].reject {|x| x =~ /^http:/}
@tags += json["tools"]
@tags += json["tools"] - TOOLS_BLACKLIST
if json["metadata"]
if json["metadata"]["zip_urls"]
@@ -99,66 +112,13 @@ class PixivApiClient
end
end
class FanboxResponse
attr_reader :json
def initialize(json)
@json = json
end
def name
json["body"]["user"]["name"]
end
def user_id
json["body"]["user"]["userId"]
end
def moniker
""
end
def page_count
json["body"]["body"]["images"].size
end
def artist_commentary_title
json["body"]["title"]
end
def artist_commentary_desc
json["body"]["body"]["text"]
end
def tags
[]
end
def pages
if json["body"]["body"]
json["body"]["body"]["images"].map {|x| x["originalUrl"]}
else
[]
end
end
end
def work(illust_id)
headers = Danbooru.config.http_headers.merge(
"Referer" => "http://www.pixiv.net",
"Content-Type" => "application/x-www-form-urlencoded",
"Authorization" => "Bearer #{access_token}"
)
params = {
"image_sizes" => "large",
"include_stats" => "true"
}
params = { image_sizes: "large", include_stats: "true" }
url = "https://public-api.secure.pixiv.net/v#{API_VERSION}/works/#{illust_id.to_i}.json"
response = Danbooru::Http.cache(1.minute).headers(headers).get(url, params: params)
response = api_client.cache(1.minute).get(url, params: params)
json = response.parse
if response.code == 200
if response.status == 200
WorkResponse.new(json["response"][0])
elsif json["status"] == "failure" && json.dig("errors", "system", "message") =~ /対象のイラストは見つかりませんでした。/
raise BadIDError.new("Pixiv ##{illust_id} not found: work was deleted, made private, or ID is invalid.")
@@ -169,32 +129,12 @@ class PixivApiClient
raise Error.new("Pixiv API call failed (status=#{response.code} body=#{response.body})")
end
def fanbox(fanbox_id)
url = "https://www.pixiv.net/ajax/fanbox/post?postId=#{fanbox_id.to_i}"
resp = agent.get(url)
json = JSON.parse(resp.body)
if resp.code == "200"
FanboxResponse.new(json)
elsif json["status"] == "failure"
raise Error.new("Pixiv API call failed (status=#{resp.code} body=#{body})")
end
rescue JSON::ParserError
raise Error.new("Pixiv API call failed (status=#{resp.code} body=#{body})")
end
def novel(novel_id)
headers = Danbooru.config.http_headers.merge(
"Referer" => "http://www.pixiv.net",
"Content-Type" => "application/x-www-form-urlencoded",
"Authorization" => "Bearer #{access_token}"
)
url = "https://public-api.secure.pixiv.net/v#{API_VERSION}/novels/#{novel_id.to_i}.json"
resp = HTTParty.get(url, Danbooru.config.httparty_options.deep_merge(headers: headers))
body = resp.body.force_encoding("utf-8")
json = JSON.parse(body)
resp = api_client.cache(1.minute).get(url)
json = resp.parse
if resp.success?
if resp.status == 200
NovelResponse.new(json["response"][0])
elsif json["status"] == "failure" && json.dig("errors", "system", "message") =~ /対象のイラストは見つかりませんでした。/
raise Error.new("Pixiv API call failed (status=#{resp.code} body=#{body})")
@@ -204,42 +144,41 @@ class PixivApiClient
end
def access_token
Cache.get("pixiv-papi-access-token", 3000) do
access_token = nil
# truncate timestamp to 1-hour resolution so that it doesn't break caching.
client_time = Time.zone.now.utc.change(min: 0).rfc3339
client_hash = Digest::MD5.hexdigest(client_time + CLIENT_HASH_SALT)
client_time = Time.now.rfc3339
client_hash = Digest::MD5.hexdigest(client_time + CLIENT_HASH_SALT)
headers = {
"Referer": "http://www.pixiv.net",
"X-Client-Time": client_time,
"X-Client-Hash": client_hash
}
headers = {
"Referer": "http://www.pixiv.net",
"X-Client-Time": client_time,
"X-Client-Hash": client_hash
}
params = {
username: Danbooru.config.pixiv_login,
password: Danbooru.config.pixiv_password,
grant_type: "password",
client_id: CLIENT_ID,
client_secret: CLIENT_SECRET
}
url = "https://oauth.secure.pixiv.net/auth/token"
params = {
username: Danbooru.config.pixiv_login,
password: Danbooru.config.pixiv_password,
grant_type: "password",
client_id: CLIENT_ID,
client_secret: CLIENT_SECRET
}
resp = HTTParty.post(url, Danbooru.config.httparty_options.deep_merge(body: params, headers: headers))
body = resp.body.force_encoding("utf-8")
resp = http.headers(headers).cache(1.hour).post("https://oauth.secure.pixiv.net/auth/token", form: params)
return nil unless resp.status == 200
if resp.success?
json = JSON.parse(body)
access_token = json["response"]["access_token"]
else
raise Error.new("Pixiv API access token call failed (status=#{resp.code} body=#{body})")
end
access_token
end
resp.parse.dig("response", "access_token")
end
def agent
PixivWebAgent.build
def api_client
http.headers(
"Referer": "http://www.pixiv.net",
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": "Bearer #{access_token}"
)
end
memoize :agent
def http
Danbooru::Http.new
end
memoize :access_token, :api_client, :http
end

View File

@@ -1,74 +0,0 @@
class PixivWebAgent
SESSION_CACHE_KEY = "pixiv-phpsessid"
COMIC_SESSION_CACHE_KEY = "pixiv-comicsessid"
SESSION_COOKIE_KEY = "PHPSESSID"
COMIC_SESSION_COOKIE_KEY = "_pixiv-comic_session"
def self.phpsessid(agent)
agent.cookies.select { |cookie| cookie.name == SESSION_COOKIE_KEY }.first.try(:value)
end
def self.build
mech = Mechanize.new
mech.keep_alive = false
phpsessid = Cache.get(SESSION_CACHE_KEY)
comicsessid = Cache.get(COMIC_SESSION_CACHE_KEY)
if phpsessid
cookie = Mechanize::Cookie.new(SESSION_COOKIE_KEY, phpsessid)
cookie.domain = ".pixiv.net"
cookie.path = "/"
mech.cookie_jar.add(cookie)
if comicsessid
cookie = Mechanize::Cookie.new(COMIC_SESSION_COOKIE_KEY, comicsessid)
cookie.domain = ".pixiv.net"
cookie.path = "/"
mech.cookie_jar.add(cookie)
end
else
headers = {
"Origin" => "https://accounts.pixiv.net",
"Referer" => "https://accounts.pixiv.net/login?lang=en^source=pc&view_type=page&ref=wwwtop_accounts_index"
}
params = {
pixiv_id: Danbooru.config.pixiv_login,
password: Danbooru.config.pixiv_password,
captcha: nil,
g_captcha_response: nil,
source: "pc",
post_key: nil
}
mech.get("https://accounts.pixiv.net/login?lang=en&source=pc&view_type=page&ref=wwwtop_accounts_index") do |page|
json = page.search("input#init-config").first.attr("value")
if json =~ /pixivAccount\.postKey":"([a-f0-9]+)/
params[:post_key] = $1
end
end
mech.post("https://accounts.pixiv.net/api/login?lang=en", params, headers)
if mech.current_page.body =~ /"error":false/
cookie = mech.cookies.select {|x| x.name == SESSION_COOKIE_KEY}.first
if cookie
Cache.put(SESSION_CACHE_KEY, cookie.value, 1.week)
end
end
begin
mech.get("https://comic.pixiv.net") do
cookie = mech.cookies.select {|x| x.name == COMIC_SESSION_COOKIE_KEY}.first
if cookie
Cache.put(COMIC_SESSION_CACHE_KEY, cookie.value, 1.week)
end
end
rescue Net::HTTPServiceUnavailable
# ignore
end
end
mech
end
end

View File

@@ -307,6 +307,8 @@ class PostQueryBuilder
Post.where(parent: nil)
when "any"
Post.where.not(parent: nil)
when /pending|flagged|modqueue|deleted|banned|active|unmoderated/
Post.where.not(parent: nil).where(parent: status_matches(parent))
when /\A\d+\z/
Post.where(id: parent).or(Post.where(parent: parent))
else
@@ -320,6 +322,8 @@ class PostQueryBuilder
Post.where(has_children: false)
when "any"
Post.where(has_children: true)
when /pending|flagged|modqueue|deleted|banned|active|unmoderated/
Post.where(has_children: true).where(children: status_matches(child))
else
Post.none
end

View File

@@ -20,29 +20,30 @@ class ReportbooruService
body.lines.map(&:split).map { [_1, _2.to_i] }
end
def post_search_rankings(date = Date.today, expires_in: 1.minutes)
return [] unless enabled?
response = http.cache(expires_in).get("#{reportbooru_server}/post_searches/rank?date=#{date}")
return [] if response.status != 200
JSON.parse(response.to_s.force_encoding("utf-8"))
def post_search_rankings(date, expires_in: 1.minutes)
request("#{reportbooru_server}/post_searches/rank?date=#{date}", expires_in)
end
def post_view_rankings(date = Date.today, expires_in: 1.minutes)
return [] unless enabled?
response = http.get("#{reportbooru_server}/post_views/rank?date=#{date}")
return [] if response.status != 200
JSON.parse(response.to_s.force_encoding("utf-8"))
def post_view_rankings(date, expires_in: 1.minutes)
request("#{reportbooru_server}/post_views/rank?date=#{date}", expires_in)
end
def popular_searches(date = Date.today, limit: 100)
def popular_searches(date, limit: 100)
ranking = post_search_rankings(date)
ranking.take(limit).map(&:first)
end
def popular_posts(date = Date.today, limit: 100)
def popular_posts(date, limit: 100)
ranking = post_view_rankings(date)
ranking = post_view_rankings(date.yesterday) if ranking.blank?
ranking.take(limit).map { |x| Post.find(x[0]) }
end
def request(url, expires_in)
return [] unless enabled?
response = http.cache(expires_in).get(url)
return [] if response.status != 200
JSON.parse(response.to_s.force_encoding("utf-8"))
end
end

View File

@@ -147,7 +147,7 @@ module Sources::Strategies
urls = urls.reverse
end
chosen_url = urls.find { |url| http_exists?(url, headers) }
chosen_url = urls.find { |url| http_exists?(url) }
chosen_url || url
end
end

View File

@@ -14,6 +14,8 @@
module Sources
module Strategies
class Base
class DownloadError < StandardError; end
attr_reader :url, :referer_url, :urls, :parsed_url, :parsed_referer, :parsed_urls
extend Memoist
@@ -35,9 +37,9 @@ module Sources
# <tt>referrer_url</tt> so the strategy can discover the HTML
# page and other information.
def initialize(url, referer_url = nil)
@url = url
@referer_url = referer_url
@urls = [url, referer_url].select(&:present?)
@url = url.to_s
@referer_url = referer_url&.to_s
@urls = [@url, @referer_url].select(&:present?)
@parsed_url = Addressable::URI.heuristic_parse(url) rescue nil
@parsed_referer = Addressable::URI.heuristic_parse(referer_url) rescue nil
@@ -139,15 +141,28 @@ module Sources
# Subclasses should merge in any required headers needed to access resources
# on the site.
def headers
Danbooru.config.http_headers
{}
end
# Returns the size of the image resource without actually downloading the file.
def size
Downloads::File.new(image_url).size
http.head(image_url).content_length.to_i
end
memoize :size
# Download the file at the given url, or at the main image url by default.
def download_file!(download_url = image_url)
raise DownloadError, "Download failed: couldn't find download url for #{url}" if download_url.blank?
response, file = http.download_media(download_url)
raise DownloadError, "Download failed: #{download_url} returned error #{response.status}" if response.status != 200
file
end
def http
Danbooru::Http.public_only.timeout(30).max_size(Danbooru.config.max_file_size)
end
memoize :http
# The url to use for artist finding purposes. This will be stored in the
# artist entry. Normally this will be the profile url.
def normalize_for_artist_finder
@@ -274,9 +289,8 @@ module Sources
to_h.to_json
end
def http_exists?(url, headers)
res = HTTParty.head(url, Danbooru.config.httparty_options.deep_merge(headers: headers))
res.success?
def http_exists?(url, headers = {})
http.headers(headers).head(url).status.success?
end
# Convert commentary to dtext by stripping html tags. Sites can override

View File

@@ -64,11 +64,10 @@ module Sources
def page
return nil if page_url.blank?
doc = Cache.get("hentai-foundry:#{page_url}", 1.minute) do
HTTParty.get("#{page_url}?enterAgree=1").body
end
response = Danbooru::Http.new.cache(1.minute).get("#{page_url}?enterAgree=1")
return nil unless response.status == 200
Nokogiri::HTML(doc)
response.parse
end
def tags

View File

@@ -73,8 +73,7 @@ module Sources
end
def image_url
return if image_urls.blank?
return url if api_client.blank?
return url if image_urls.blank? || api_client.blank?
img = case url
when DIRECT || CDN_DIRECT then "https://seiga.nicovideo.jp/image/source/#{image_id_from_url(url)}"
@@ -83,7 +82,7 @@ module Sources
end
resp = api_client.get(img)
if resp.headers["Location"] =~ %r{https?://.+/(\w+/\d+/\d+)\z}i
if resp.uri.to_s =~ %r{https?://.+/(\w+/\d+/\d+)\z}i
"https://lohas.nicoseiga.jp/priv/#{$1}"
else
img
@@ -181,12 +180,12 @@ module Sources
def api_client
if illust_id.present?
NicoSeigaApiClient.new(work_id: illust_id, type: "illust")
NicoSeigaApiClient.new(work_id: illust_id, type: "illust", http: http)
elsif manga_id.present?
NicoSeigaApiClient.new(work_id: manga_id, type: "manga")
NicoSeigaApiClient.new(work_id: manga_id, type: "manga", http: http)
elsif image_id.present?
# We default to illust to attempt getting the api anyway
NicoSeigaApiClient.new(work_id: image_id, type: "illust")
NicoSeigaApiClient.new(work_id: image_id, type: "illust", http: http)
end
end
memoize :api_client

View File

@@ -178,54 +178,21 @@ module Sources
def page
return nil if page_url.blank?
doc = agent.get(page_url)
http = Danbooru::Http.new
form = { email: Danbooru.config.nijie_login, password: Danbooru.config.nijie_password }
if doc.search("div#header-login-container").any?
# Session cache is invalid, clear it and log in normally.
Cache.delete("nijie-session")
doc = agent.get(page_url)
end
# XXX `retriable` must come after `cache` so that retries don't return cached error responses.
response = http.cache(1.hour).use(retriable: { max_retries: 20 }).post("https://nijie.info/login_int.php", form: form)
DanbooruLogger.info "Nijie login failed (#{url}, #{response.status})" if response.status != 200
return nil unless response.status == 200
doc
rescue Mechanize::ResponseCodeError => e
return nil if e.response_code.to_i == 404
raise
response = http.cookies(R18: 1).cache(1.minute).get(page_url)
return nil unless response.status == 200
response&.parse
end
memoize :page
def agent
mech = Mechanize.new
session = Cache.get("nijie-session")
if session
cookie = Mechanize::Cookie.new("NIJIEIJIEID", session)
cookie.domain = ".nijie.info"
cookie.path = "/"
mech.cookie_jar.add(cookie)
else
mech.get("https://nijie.info/login.php") do |page|
page.form_with(:action => "/login_int.php") do |form|
form['email'] = Danbooru.config.nijie_login
form['password'] = Danbooru.config.nijie_password
end.click_button
end
session = mech.cookie_jar.cookies.select {|c| c.name == "NIJIEIJIEID"}.first
Cache.put("nijie-session", session.value, 1.day) if session
end
# This cookie needs to be set to allow viewing of adult works while anonymous
cookie = Mechanize::Cookie.new("R18", "1")
cookie.domain = ".nijie.info"
cookie.path = "/"
mech.cookie_jar.add(cookie)
mech
rescue Mechanize::ResponseCodeError => e
raise unless e.response_code.to_i == 429
sleep(5)
retry
end
memoize :agent
end
end
end

View File

@@ -47,7 +47,7 @@ module Sources
when %r{\Ahttps?://c(?:s|han|[1-4])\.sankakucomplex\.com/data(?:/sample)?/(?:[a-f0-9]{2}/){2}(?:sample-|preview)?([a-f0-9]{32})}i
"https://chan.sankakucomplex.com/en/post/show?md5=#{$1}"
when %r{\Ahttps?://(?:www|s(?:tatic|[1-4]))\.zerochan\.net/.+(?:\.|\/)(\d+)(?:\.(?:jpe?g?))?\z}i
when %r{\Ahttps?://(?:www|s(?:tatic|[1-4]))\.zerochan\.net/.+(?:\.|\/)(\d+)(?:\.(?:jpe?g?|png))?\z}i
"https://www.zerochan.net/#{$1}#full"
when %r{\Ahttps?://static[1-6]?\.minitokyo\.net/(?:downloads|view)/(?:\d{2}/){2}(\d+)}i

View File

@@ -64,9 +64,6 @@ module Sources
ORIG_IMAGE = %r{#{PXIMG}/img-original/img/#{DATE}/(?<illust_id>\d+)_p(?<page>\d+)\.#{EXT}\z}i
STACC_PAGE = %r{\A#{WEB}/stacc/#{MONIKER}/?\z}i
NOVEL_PAGE = %r{(?:\Ahttps?://www\.pixiv\.net/novel/show\.php\?id=(\d+))}
FANBOX_ACCOUNT = %r{(?:\Ahttps?://www\.pixiv\.net/fanbox/creator/\d+\z)}
FANBOX_IMAGE = %r{(?:\Ahttps?://fanbox\.pixiv\.net/images/post/(\d+))}
FANBOX_PAGE = %r{(?:\Ahttps?://www\.pixiv\.net/fanbox/creator/\d+/post/(\d+))}
def self.to_dtext(text)
if text.nil?
@@ -127,14 +124,6 @@ module Sources
return "https://www.pixiv.net/novel/show.php?id=#{novel_id}&mode=cover"
end
if fanbox_id.present?
return "https://www.pixiv.net/fanbox/creator/#{metadata.user_id}/post/#{fanbox_id}"
end
if fanbox_account_id.present?
return "https://www.pixiv.net/fanbox/creator/#{fanbox_account_id}"
end
if illust_id.present?
return "https://www.pixiv.net/artworks/#{illust_id}"
end
@@ -192,17 +181,7 @@ module Sources
end
def headers
if fanbox_id.present?
# need the session to download fanbox images
return {
"Referer" => "https://www.pixiv.net/fanbox",
"Cookie" => HTTP::Cookie.cookie_value(agent.cookies)
}
end
{
"Referer" => "https://www.pixiv.net"
}
{ "Referer" => "https://www.pixiv.net" }
end
def normalize_for_source
@@ -242,10 +221,6 @@ module Sources
end
def image_urls_sub
if url =~ FANBOX_IMAGE
return [url]
end
# there's too much normalization bullshit we have to deal with
# raw urls, so just fetch the canonical url from the api every
# time.
@@ -265,7 +240,7 @@ module Sources
# even though it makes sense to reference page_url here, it will only look
# at (url, referer_url).
def illust_id
return nil if novel_id.present? || fanbox_id.present?
return nil if novel_id.present?
parsed_urls.each do |url|
# http://www.pixiv.net/member_illust.php?mode=medium&illust_id=18557054
@@ -328,46 +303,11 @@ module Sources
end
memoize :novel_id
def fanbox_id
[url, referer_url].each do |x|
if x =~ FANBOX_PAGE
return $1
end
if x =~ FANBOX_IMAGE
return $1
end
end
nil
end
memoize :fanbox_id
def fanbox_account_id
[url, referer_url].each do |x|
if x =~ FANBOX_ACCOUNT
return x
end
end
nil
end
memoize :fanbox_account_id
def agent
PixivWebAgent.build
end
memoize :agent
def metadata
if novel_id.present?
return PixivApiClient.new.novel(novel_id)
end
if fanbox_id.present?
return PixivApiClient.new.fanbox(fanbox_id)
end
PixivApiClient.new.work(illust_id)
end
memoize :metadata

View File

@@ -23,7 +23,7 @@ module Sources::Strategies
OLD_IMAGE = %r{\Ahttps?://#{DOMAIN}/(?<dir>#{MD5}/)?#{FILENAME}_(?<size>\w+)\.#{EXT}\z}i
IMAGE = %r{\Ahttps?://#{DOMAIN}/}i
VIDEO = %r{\Ahttps?://(?:vtt|ve\.media)\.tumblr\.com/}i
VIDEO = %r{\Ahttps?://(?:vtt|ve|va\.media)\.tumblr\.com/}i
POST = %r{\Ahttps?://(?<blog_name>[^.]+)\.tumblr\.com/(?:post|image)/(?<post_id>\d+)}i
def self.enabled?
@@ -168,7 +168,7 @@ module Sources::Strategies
end
candidates.find do |candidate|
http_exists?(candidate, headers)
http_exists?(candidate)
end
end

View File

@@ -200,7 +200,7 @@ module Sources::Strategies
end
def api_response
return {} unless self.class.enabled?
return {} unless self.class.enabled? && status_id.present?
api_client.status(status_id)
end

View File

@@ -11,14 +11,6 @@ module TagRelationshipRetirementService
"This topic deals with tag relationships created two or more years ago that have not been used since. They will be retired. This topic will be updated as an automated system retires expired relationships."
end
def dry_run
[TagAlias, TagImplication].each do |model|
each_candidate(model) do |rel|
puts "#{rel.relationship} #{rel.antecedent_name} -> #{rel.consequent_name} retired"
end
end
end
def forum_topic
topic = ForumTopic.where(title: forum_topic_title).first
if topic.nil?

View File

@@ -7,13 +7,10 @@ class UploadService
# this gets called from UploadsController#new so we need to preprocess async
UploadPreprocessorDelayedStartJob.perform_later(url, ref, CurrentUser.user)
begin
download = Downloads::File.new(url, ref)
remote_size = download.size
rescue Exception
end
strategy = Sources::Strategies.find(url, ref)
remote_size = strategy.size
[upload, remote_size]
return [upload, remote_size]
end
if file

View File

@@ -71,13 +71,13 @@ class UploadService
return file if file.present?
raise "No file or source URL provided" if upload.source_url.blank?
download = Downloads::File.new(upload.source_url, upload.referer_url)
file, strategy = download.download!
strategy = Sources::Strategies.find(upload.source_url, upload.referer_url)
file = strategy.download_file!
if download.data[:ugoira_frame_data].present?
if strategy.data[:ugoira_frame_data].present?
upload.context = {
"ugoira" => {
"frame_data" => download.data[:ugoira_frame_data],
"frame_data" => strategy.data[:ugoira_frame_data],
"content_type" => "image/jpeg"
}
}

View File

@@ -0,0 +1,27 @@
# A TCPSocket wrapper that disallows connections to local or private IPs. Used for SSRF protection.
# https://owasp.org/www-community/attacks/Server_Side_Request_Forgery
require "resolv"
class ValidatingSocket < TCPSocket
class ProhibitedIpError < StandardError; end
def initialize(hostname, port)
ip = validate_hostname!(hostname)
super(ip, port)
end
def validate_hostname!(hostname)
ip = IPAddress.parse(::Resolv.getaddress(hostname))
raise ProhibitedIpError, "Connection to #{hostname} failed; #{ip} is a prohibited IP" if prohibited_ip?(ip)
ip.to_s
end
def prohibited_ip?(ip)
if ip.ipv4?
ip.loopback? || ip.link_local? || ip.multicast? || ip.private?
elsif ip.ipv6?
ip.loopback? || ip.link_local? || ip.unique_local? || ip.unspecified?
end
end
end

View File

@@ -34,7 +34,7 @@ class ModerationReport < ApplicationRecord
def forum_topic
topic = ForumTopic.find_by_title(forum_topic_title)
if topic.nil?
CurrentUser.as_system do
CurrentUser.scoped(User.system) do
topic = ForumTopic.create!(creator: User.system, title: forum_topic_title, category_id: 0, min_level: User::Levels::MODERATOR)
forum_post = ForumPost.create!(creator: User.system, body: forum_topic_body, topic: topic)
end

View File

@@ -33,7 +33,7 @@ class PostVersion < ApplicationRecord
end
def tag_matches(string)
tag = string.split(/\S+/)[0]
tag = string.match(/\S+/)[0]
return all if tag.nil?
tag = "*#{tag}*" unless tag =~ /\*/
where_ilike(:tags, tag)

View File

@@ -18,8 +18,7 @@ class SavedSearch < ApplicationRecord
post_ids = Set.new
queries.each do |query|
redis_key = "search:#{query}"
# XXX change to `exists?` (ref: https://github.com-sds/mock_redis/pull/188
if redis.exists(redis_key)
if redis.exists?(redis_key)
sub_ids = redis.smembers(redis_key).map(&:to_i)
post_ids.merge(sub_ids)
else
@@ -116,7 +115,7 @@ class SavedSearch < ApplicationRecord
def populate(query, timeout: 10_000)
redis_key = "search:#{query}"
return if redis.exists(redis_key)
return if redis.exists?(redis_key)
post_ids = Post.with_timeout(timeout, [], query: query) do
Post.system_tag_match(query).limit(QUERY_LIMIT).pluck(:id)

View File

@@ -53,7 +53,11 @@ class WikiPage < ApplicationRecord
end
def linked_to(title)
where(id: DtextLink.wiki_page.wiki_link.where(link_target: title).select(:model_id))
where(dtext_links: DtextLink.wiki_page.wiki_link.where(link_target: normalize_title(title)))
end
def not_linked_to(title)
where.not(dtext_links: DtextLink.wiki_page.wiki_link.where(link_target: normalize_title(title)))
end
def default_order
@@ -82,6 +86,10 @@ class WikiPage < ApplicationRecord
q = q.linked_to(params[:linked_to])
end
if params[:not_linked_to].present?
q = q.not_linked_to(params[:not_linked_to])
end
if params[:hide_deleted].to_s.truthy?
q = q.where("is_deleted = false")
end
@@ -146,6 +154,7 @@ class WikiPage < ApplicationRecord
end
def self.normalize_title(title)
return if title.blank?
title.downcase.delete_prefix("~").gsub(/[[:space:]]+/, "_").gsub(/__/, "_").gsub(/\A_|_\z/, "")
end

View File

@@ -24,7 +24,7 @@ class ForumPostPolicy < ApplicationPolicy
end
def votable?
unbanned? && show? && record.bulk_update_request.present? && record.bulk_update_request.is_pending?
unbanned? && show? && record.bulk_update_request.present? && record.bulk_update_request.is_pending? && record.bulk_update_request.user_id != user.id
end
def reportable?

View File

@@ -8,12 +8,14 @@
<% if @artist.is_banned? && !policy(@artist).can_view_banned? %>
<p>The artist requested removal of this page.</p>
<% else %>
<% if @artist.wiki_page.present? %>
<div class="prose">
<%= format_text(@artist.wiki_page.body, :disable_mentions => true) %>
</div>
<% if @artist.wiki_page.present? && !@artist.wiki_page.is_deleted? %>
<div class="artist-wiki">
<div class="prose">
<%= format_text(@artist.wiki_page.body, :disable_mentions => true) %>
</div>
<p><%= link_to "View wiki page", @artist.wiki_page %></p>
<p><%= link_to "View wiki page", @artist.wiki_page %></p>
</div>
<% end %>
<%= yield %>

View File

@@ -15,7 +15,7 @@
</tr>
</thead>
<tbody>
<% @search_service.missed_search_rankings.each do |tags, count| %>
<% @missed_searches.each do |tags, count| %>
<tr class="tag-type-<%= Tag.category_for(tags) %>">
<td><%= link_to tags, posts_path(:tags => tags) %></td>
<td>

View File

@@ -13,7 +13,7 @@
</tr>
</thead>
<tbody>
<% @search_service.post_search_rankings(@date).each do |tags, count| %>
<% @searches.each do |tags, count| %>
<tr class="tag-type-<%= Tag.category_for(tags) %>">
<td><%= link_to tags, posts_path(:tags => tags) %></td>
<td style="text-align: right;"><%= count.to_i %></td>

View File

@@ -4,6 +4,8 @@
<%= f.input :title_normalize, label: "Title", hint: "Use * for wildcard searches", input_html: { "data-autocomplete": "wiki-page" } %>
<%= f.input :other_names_match, label: "Other names", hint: "Use * for wildcard searches" %>
<%= f.input :body_matches, label: "Body" %>
<%= f.input :linked_to, hint: "Which wikis link to the specified wiki.", input_html: { "data-autocomplete": "wiki-page" } %>
<%= f.input :not_linked_to, hint: "Which wikis do not link to the specified wiki.", input_html: { "data-autocomplete": "wiki-page" } %>
<%= f.input :other_names_present, as: :select %>
<%= f.input :hide_deleted, as: :select, include_blank: false %>
<%= f.input :order, collection: [%w[Name title], %w[Date time], %w[Posts post_count]], include_blank: false %>

View File

@@ -28,7 +28,7 @@
<%= format_text(@wiki_page.body) %>
<% end %>
<% if @wiki_page.artist %>
<% if @wiki_page.artist.present? && !@wiki_page.artist.is_deleted? %>
<p><%= link_to "View artist", @wiki_page.artist %></p>
<% end %>