switch to httparty

This commit is contained in:
r888888888
2017-06-29 17:10:07 -07:00
parent ed7b80c016
commit eb6c5e3af5
16 changed files with 108 additions and 182 deletions

View File

@@ -31,12 +31,8 @@ module Downloads
} }
@source, headers, @data = before_download(@source, headers, @data) @source, headers, @data = before_download(@source, headers, @data)
url = URI.parse(@source) url = URI.parse(@source)
Net::HTTP.start(url.host, url.port, :use_ssl => url.is_a?(URI::HTTPS)) do |http| res = HTTParty.head(url, timeout: 3)
http.read_timeout = 3 res.content_length
http.request_head(url.request_uri, headers) do |res|
return res.content_length
end
end
end end
def download! def download!
@@ -94,30 +90,17 @@ module Downloads
validate_local_hosts(url) validate_local_hosts(url)
begin begin
Net::HTTP.start(url.host, url.port, :use_ssl => url.is_a?(URI::HTTPS)) do |http| res = HTTParty.get(url, timeout: 10, headers: headers)
http.read_timeout = 10 if res.success?
http.request_get(url.request_uri, headers) do |res|
case res
when Net::HTTPSuccess then
if max_size if max_size
len = res["Content-Length"] len = res["Content-Length"]
raise Error.new("File is too large (#{len} bytes)") if len && len.to_i > max_size raise Error.new("File is too large (#{len} bytes)") if len && len.to_i > max_size
end end
yield(res) yield(res)
return [src, datums] return [src, datums]
when Net::HTTPRedirection then
if limit == 0 then
raise Error.new("Too many redirects")
end
src = res["location"]
limit -= 1
else else
raise Error.new("HTTP error code: #{res.code} #{res.message}") raise Error.new("HTTP error code: #{res.code} #{res.message}")
end end
end # http.request_get
end # http.start
rescue Errno::ECONNRESET, Errno::ETIMEDOUT, Errno::EIO, Errno::EHOSTUNREACH, Errno::ECONNREFUSED, IOError => x rescue Errno::ECONNRESET, Errno::ETIMEDOUT, Errno::EIO, Errno::EHOSTUNREACH, Errno::ECONNREFUSED, IOError => x
tries += 1 tries += 1
if tries < 3 if tries < 3

View File

@@ -18,7 +18,7 @@ module Downloads
protected protected
def test_original(url) def test_original(url)
res = http_head_request(url, {}) res = http_head_request(url, {})
res.is_a?(Net::HTTPSuccess) res.success?
end end
def rewrite_html_url(url, headers) def rewrite_html_url(url, headers)

View File

@@ -21,23 +21,12 @@ module Downloads
protected protected
def http_head_request(url, headers) def http_head_request(url, headers)
uri = URI.parse(url) HTTParty.head(url, headers: headers)
http = Net::HTTP.new(uri.host, uri.port)
if uri.scheme == "https"
http.use_ssl = true
end
http.request_head(uri.request_uri, headers) do |res|
return res
end
end end
def http_exists?(url, headers) def http_exists?(url, headers)
exists = false
res = http_head_request(url, headers) res = http_head_request(url, headers)
if res.is_a?(Net::HTTPSuccess) res.success?
exists = true
end
exists
end end
end end
end end

View File

@@ -16,19 +16,15 @@ class ImageProxy
raise "Proxy not allowed for this site" raise "Proxy not allowed for this site"
end end
uri = URI.parse(url)
headers = { headers = {
"Referer" => fake_referer_for(url), "Referer" => fake_referer_for(url),
"User-Agent" => "#{Danbooru.config.safe_app_name}/#{Danbooru.config.version}" "User-Agent" => "#{Danbooru.config.safe_app_name}/#{Danbooru.config.version}"
} }
response = HTTParty.get(url, headers: headers)
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http| if response.success?
resp = http.request_get(uri.request_uri, headers) return response
if resp.is_a?(Net::HTTPSuccess)
return resp
else else
raise "HTTP error code: #{resp.code} #{resp.message}" raise "HTTP error code: #{response.code} #{response.message}"
end
end end
end end
end end

View File

@@ -28,9 +28,8 @@ module Iqdb
uri = URI.parse("#{Danbooru.config.iqdbs_server}/similar") uri = URI.parse("#{Danbooru.config.iqdbs_server}/similar")
uri.query = URI.encode_www_form(params) uri.query = URI.encode_www_form(params)
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http| resp = HTTParty.get(uri)
resp = http.request_get(uri.request_uri) if resp.success?
if resp.is_a?(Net::HTTPSuccess)
json = JSON.parse(resp.body) json = JSON.parse(resp.body)
if json.is_a?(Array) if json.is_a?(Array)
post_ids = json.map { |match| match["post_id"] } post_ids = json.map { |match| match["post_id"] }
@@ -46,7 +45,6 @@ module Iqdb
else else
raise "HTTP error code: #{resp.code} #{resp.message}" raise "HTTP error code: #{resp.code} #{resp.message}"
end end
end
else else
raise NotImplementedError raise NotImplementedError
end end

View File

@@ -17,14 +17,11 @@ class MissedSearchService
def fetch_data def fetch_data
Cache.get("ms", 1.minute) do Cache.get("ms", 1.minute) do
url = URI.parse("#{Danbooru.config.reportbooru_server}/missed_searches") url = URI.parse("#{Danbooru.config.reportbooru_server}/missed_searches")
response = HTTParty.get(url, timeout: 6)
if response.success?
response = response.body
else
response = "" response = ""
Net::HTTP.start(url.host, url.port, :use_ssl => url.is_a?(URI::HTTPS)) do |http|
http.read_timeout = 1
http.request_get(url.request_uri) do |res|
if res.is_a?(Net::HTTPSuccess)
response = res.body
end
end
end end
response.force_encoding("utf-8") response.force_encoding("utf-8")
end end

View File

@@ -8,24 +8,20 @@ class NicoSeigaApiClient
end end
def get_illust(id) def get_illust(id)
uri = URI.parse("#{BASE_URL}/illust/info?id=#{id}") uri = "#{BASE_URL}/illust/info?id=#{id}"
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http| resp = HTTParty.get(uri)
resp = http.request_get(uri.request_uri) if resp.success?
if resp.is_a?(Net::HTTPSuccess)
parse_illust_xml_response(resp.body) parse_illust_xml_response(resp.body)
end end
end end
end
def get_artist(id) def get_artist(id)
uri = URI.parse("#{BASE_URL}/user/info?id=#{id}") uri = "#{BASE_URL}/user/info?id=#{id}"
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http| resp = HTTParty.get(uri)
resp = http.request_get(uri.request_uri) if resp.success?
if resp.is_a?(Net::HTTPSuccess)
parse_artist_xml_response(resp.body) parse_artist_xml_response(resp.body)
end end
end end
end
def parse_artist_xml_response(text) def parse_artist_xml_response(text)
doc = Hash.from_xml(text) doc = Hash.from_xml(text)

View File

@@ -137,14 +137,12 @@ class PixivApiClient
url.query = URI.encode_www_form(params) url.query = URI.encode_www_form(params)
json = nil json = nil
Net::HTTP.start(url.host, url.port, :use_ssl => true) do |http| resp = HTTParty.get(url)
resp = http.request_get(url.request_uri, headers) if resp.success?
if resp.is_a?(Net::HTTPSuccess)
json = parse_api_json(resp.body) json = parse_api_json(resp.body)
else else
raise Error.new("Pixiv API call failed (status=#{resp.code} body=#{resp.body})") raise Error.new("Pixiv API call failed (status=#{resp.code} body=#{resp.body})")
end end
end
WorksResponse.new(json["response"][0]) WorksResponse.new(json["response"][0])
end end
@@ -173,18 +171,15 @@ private
"client_id" => CLIENT_ID, "client_id" => CLIENT_ID,
"client_secret" => CLIENT_SECRET "client_secret" => CLIENT_SECRET
} }
url = URI.parse("https://oauth.secure.pixiv.net/auth/token") url = "https://oauth.secure.pixiv.net/auth/token"
Net::HTTP.start(url.host, url.port, :use_ssl => true) do |http| resp = HTTParty.post(url, body: params, headers: headers)
resp = http.request_post(url.request_uri, URI.encode_www_form(params), headers) if resp.success?
if resp.is_a?(Net::HTTPSuccess)
json = JSON.parse(resp.body) json = JSON.parse(resp.body)
access_token = json["response"]["access_token"] access_token = json["response"]["access_token"]
else else
raise Error.new("Pixiv API access token call failed (status=#{resp.code} body=#{resp.body})") raise Error.new("Pixiv API access token call failed (status=#{resp.code} body=#{resp.body})")
end end
end
access_token access_token
end end

View File

@@ -27,15 +27,12 @@ class PopularSearchService
dates = date.strftime("%Y-%m-%d") dates = date.strftime("%Y-%m-%d")
Cache.get("ps-#{scale}-#{dates}", 1.minute) do Cache.get("ps-#{scale}-#{dates}", 1.minute) do
url = URI.parse("#{Danbooru.config.reportbooru_server}/hits/#{scale}?date=#{dates}") url = "#{Danbooru.config.reportbooru_server}/hits/#{scale}?date=#{dates}"
response = HTTParty.get(url, timeout: 3)
if response.success?
response = response.body
else
response = "" response = ""
Net::HTTP.start(url.host, url.port, :use_ssl => url.is_a?(URI::HTTPS)) do |http|
http.read_timeout = 1
http.request_get(url.request_uri) do |res|
if res.is_a?(Net::HTTPSuccess)
response = res.body
end
end
end end
response response
end.to_s.force_encoding("utf-8") end.to_s.force_encoding("utf-8")

View File

@@ -33,14 +33,12 @@ module Reports
uri = URI.parse("#{Danbooru.config.reportbooru_server}/reports/#{endpoint}") uri = URI.parse("#{Danbooru.config.reportbooru_server}/reports/#{endpoint}")
uri.query = URI.encode_www_form(params) uri.query = URI.encode_www_form(params)
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http| resp = HTTParty.get(uri)
resp = http.request_get(uri.request_uri) if resp.success?
if resp.is_a?(Net::HTTPSuccess)
resp.body resp.body
else else
raise "HTTP error code: #{resp.code} #{resp.message}" raise "HTTP error code: #{resp.code} #{resp.message}"
end end
end end
end end
end
end end

View File

@@ -46,11 +46,9 @@ module Sources::Strategies
end end
def get def get
uri = URI.parse(api_url) resp = HTTParty.get(api_url)
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http|
resp = http.request_get(uri.request_uri)
image_url_rewriter = Downloads::RewriteStrategies::ArtStation.new image_url_rewriter = Downloads::RewriteStrategies::ArtStation.new
if resp.is_a?(Net::HTTPSuccess) if resp.success?
@json = JSON.parse(resp.body) @json = JSON.parse(resp.body)
@artist_name = json["user"]["username"] @artist_name = json["user"]["username"]
@profile_url = json["user"]["permalink"] @profile_url = json["user"]["permalink"]
@@ -67,5 +65,4 @@ module Sources::Strategies
end end
end end
end end
end
end end

View File

@@ -22,16 +22,13 @@ class TagAliasCorrection
end end
def fill_hash! def fill_hash!
Net::HTTP.start(hostname, 80) do |http| res = HTTParty.get("http://#{hostname}/tag_aliases/#{tag_alias_id}/correction.json")
http.request_get("/tag_aliases/#{tag_alias_id}/correction.json") do |res| if res.success?
if res === Net::HTTPSuccess
json = JSON.parse(res.body) json = JSON.parse(res.body)
statistics_hash["antecedent_cache"] = json["antecedent_cache"] statistics_hash["antecedent_cache"] = json["antecedent_cache"]
statistics_hash["consequent_cache"] = json["consequent_cache"] statistics_hash["consequent_cache"] = json["consequent_cache"]
end end
end end
end
end
def each_server def each_server
Danbooru.config.all_server_hosts.each do |host| Danbooru.config.all_server_hosts.each do |host|

View File

@@ -19,16 +19,13 @@ class TagCorrection
end end
def fill_hash! def fill_hash!
Net::HTTP.start(hostname, 80) do |http| res = HTTParty.get("http://#{hostname}/tags/#{tag_id}/correction.json")
http.request_get("/tags/#{tag_id}/correction.json") do |res| if res.success?
if res === Net::HTTPSuccess
json = JSON.parse(res.body) json = JSON.parse(res.body)
statistics_hash["category_cache"] = json["category_cache"] statistics_hash["category_cache"] = json["category_cache"]
statistics_hash["post_fast_count_cache"] = json["post_fast_count_cache"] statistics_hash["post_fast_count_cache"] = json["post_fast_count_cache"]
end end
end end
end
end
def each_server def each_server
Danbooru.config.all_server_hosts.each do |host| Danbooru.config.all_server_hosts.each do |host|

View File

@@ -32,24 +32,14 @@ class TwitterService
end.compact end.compact
end end
def extract_og_image_from_page(url, n = 5) def extract_og_image_from_page(url)
raise "too many redirects" if n == 0 resp = HTTParty.get(url)
if resp.success?
Net::HTTP.start(url.host, url.port, :use_ssl => (url.normalized_scheme == "https")) do |http|
resp = http.request_get(url.request_uri)
if resp.is_a?(Net::HTTPMovedPermanently) && resp["Location"]
redirect_url = Addressable::URI.parse(resp["Location"])
redirect_url.host = url.host if redirect_url.host.nil?
redirect_url.scheme = url.scheme if redirect_url.scheme.nil?
redirect_url.port = url.port if redirect_url.port.nil?
return extract_og_image_from_page(redirect_url, n - 1)
elsif resp.is_a?(Net::HTTPSuccess)
doc = Nokogiri::HTML(resp.body) doc = Nokogiri::HTML(resp.body)
images = doc.css("meta[property='og:image']") images = doc.css("meta[property='og:image']")
return images.first.attr("content").sub(":large", ":orig") return images.first.attr("content").sub(":large", ":orig")
end end
end end
end
def extract_urls_for_card(attrs) def extract_urls_for_card(attrs)
urls = attrs.urls.map {|x| x.expanded_url} urls = attrs.urls.map {|x| x.expanded_url}

View File

@@ -28,19 +28,15 @@ class SavedSearch < ApplicationRecord
"queries" => queries "queries" => queries
}.to_json }.to_json
uri = URI.parse("#{Danbooru.config.listbooru_server}/v2/search") uri = "#{Danbooru.config.listbooru_server}/v2/search"
body = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http| resp = HTTParty.post(uri, body: json)
resp = http.request_post(uri.request_uri, json) if resp.success?
if resp.is_a?(Net::HTTPSuccess) resp.body.to_s.scan(/\d+/).map(&:to_i)
resp.body
else else
raise "HTTP error code: #{resp.code} #{resp.message}" raise "HTTP error code: #{resp.code} #{resp.message}"
end end
end end
body.to_s.scan(/\d+/).map(&:to_i)
end
end end
end end
end end

View File

@@ -174,7 +174,7 @@ class User < ApplicationRecord
def update_remote_cache def update_remote_cache
if name_changed? if name_changed?
Danbooru.config.other_server_hosts.each do |server| Danbooru.config.other_server_hosts.each do |server|
Net::HTTP.delete(URI.parse("http://#{server}/users/#{id}/cache")) HTTParty.delete("http://#{server}/users/#{id}/cache")
end end
end end
rescue Exception rescue Exception