switch to httparty

This commit is contained in:
r888888888
2017-06-29 17:10:07 -07:00
parent ed7b80c016
commit eb6c5e3af5
16 changed files with 108 additions and 182 deletions

View File

@@ -31,12 +31,8 @@ module Downloads
}
@source, headers, @data = before_download(@source, headers, @data)
url = URI.parse(@source)
Net::HTTP.start(url.host, url.port, :use_ssl => url.is_a?(URI::HTTPS)) do |http|
http.read_timeout = 3
http.request_head(url.request_uri, headers) do |res|
return res.content_length
end
end
res = HTTParty.head(url, timeout: 3)
res.content_length
end
def download!
@@ -94,30 +90,17 @@ module Downloads
validate_local_hosts(url)
begin
Net::HTTP.start(url.host, url.port, :use_ssl => url.is_a?(URI::HTTPS)) do |http|
http.read_timeout = 10
http.request_get(url.request_uri, headers) do |res|
case res
when Net::HTTPSuccess then
if max_size
len = res["Content-Length"]
raise Error.new("File is too large (#{len} bytes)") if len && len.to_i > max_size
end
yield(res)
return [src, datums]
when Net::HTTPRedirection then
if limit == 0 then
raise Error.new("Too many redirects")
end
src = res["location"]
limit -= 1
else
raise Error.new("HTTP error code: #{res.code} #{res.message}")
end
end # http.request_get
end # http.start
res = HTTParty.get(url, timeout: 10, headers: headers)
if res.success?
if max_size
len = res["Content-Length"]
raise Error.new("File is too large (#{len} bytes)") if len && len.to_i > max_size
end
yield(res)
return [src, datums]
else
raise Error.new("HTTP error code: #{res.code} #{res.message}")
end
rescue Errno::ECONNRESET, Errno::ETIMEDOUT, Errno::EIO, Errno::EHOSTUNREACH, Errno::ECONNREFUSED, IOError => x
tries += 1
if tries < 3

View File

@@ -18,7 +18,7 @@ module Downloads
protected
def test_original(url)
res = http_head_request(url, {})
res.is_a?(Net::HTTPSuccess)
res.success?
end
def rewrite_html_url(url, headers)

View File

@@ -21,23 +21,12 @@ module Downloads
protected
def http_head_request(url, headers)
uri = URI.parse(url)
http = Net::HTTP.new(uri.host, uri.port)
if uri.scheme == "https"
http.use_ssl = true
end
http.request_head(uri.request_uri, headers) do |res|
return res
end
HTTParty.head(url, headers: headers)
end
def http_exists?(url, headers)
exists = false
res = http_head_request(url, headers)
if res.is_a?(Net::HTTPSuccess)
exists = true
end
exists
res.success?
end
end
end

View File

@@ -16,19 +16,15 @@ class ImageProxy
raise "Proxy not allowed for this site"
end
uri = URI.parse(url)
headers = {
"Referer" => fake_referer_for(url),
"User-Agent" => "#{Danbooru.config.safe_app_name}/#{Danbooru.config.version}"
}
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http|
resp = http.request_get(uri.request_uri, headers)
if resp.is_a?(Net::HTTPSuccess)
return resp
else
raise "HTTP error code: #{resp.code} #{resp.message}"
end
response = HTTParty.get(url, headers: headers)
if response.success?
return response
else
raise "HTTP error code: #{response.code} #{response.message}"
end
end
end
end

View File

@@ -28,24 +28,22 @@ module Iqdb
uri = URI.parse("#{Danbooru.config.iqdbs_server}/similar")
uri.query = URI.encode_www_form(params)
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http|
resp = http.request_get(uri.request_uri)
if resp.is_a?(Net::HTTPSuccess)
json = JSON.parse(resp.body)
if json.is_a?(Array)
post_ids = json.map { |match| match["post_id"] }
posts = Post.find(post_ids)
resp = HTTParty.get(uri)
if resp.success?
json = JSON.parse(resp.body)
if json.is_a?(Array)
post_ids = json.map { |match| match["post_id"] }
posts = Post.find(post_ids)
@matches = json.map do |match|
post = posts.find { |post| post.id == match["post_id"] }
match.with_indifferent_access.merge({ post: post })
end
else
@matches = []
@matches = json.map do |match|
post = posts.find { |post| post.id == match["post_id"] }
match.with_indifferent_access.merge({ post: post })
end
else
raise "HTTP error code: #{resp.code} #{resp.message}"
@matches = []
end
else
raise "HTTP error code: #{resp.code} #{resp.message}"
end
else
raise NotImplementedError

View File

@@ -17,14 +17,11 @@ class MissedSearchService
def fetch_data
Cache.get("ms", 1.minute) do
url = URI.parse("#{Danbooru.config.reportbooru_server}/missed_searches")
response = ""
Net::HTTP.start(url.host, url.port, :use_ssl => url.is_a?(URI::HTTPS)) do |http|
http.read_timeout = 1
http.request_get(url.request_uri) do |res|
if res.is_a?(Net::HTTPSuccess)
response = res.body
end
end
response = HTTParty.get(url, timeout: 6)
if response.success?
response = response.body
else
response = ""
end
response.force_encoding("utf-8")
end

View File

@@ -8,22 +8,18 @@ class NicoSeigaApiClient
end
def get_illust(id)
uri = URI.parse("#{BASE_URL}/illust/info?id=#{id}")
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http|
resp = http.request_get(uri.request_uri)
if resp.is_a?(Net::HTTPSuccess)
parse_illust_xml_response(resp.body)
end
uri = "#{BASE_URL}/illust/info?id=#{id}"
resp = HTTParty.get(uri)
if resp.success?
parse_illust_xml_response(resp.body)
end
end
def get_artist(id)
uri = URI.parse("#{BASE_URL}/user/info?id=#{id}")
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http|
resp = http.request_get(uri.request_uri)
if resp.is_a?(Net::HTTPSuccess)
parse_artist_xml_response(resp.body)
end
uri = "#{BASE_URL}/user/info?id=#{id}"
resp = HTTParty.get(uri)
if resp.success?
parse_artist_xml_response(resp.body)
end
end

View File

@@ -137,13 +137,11 @@ class PixivApiClient
url.query = URI.encode_www_form(params)
json = nil
Net::HTTP.start(url.host, url.port, :use_ssl => true) do |http|
resp = http.request_get(url.request_uri, headers)
if resp.is_a?(Net::HTTPSuccess)
json = parse_api_json(resp.body)
else
raise Error.new("Pixiv API call failed (status=#{resp.code} body=#{resp.body})")
end
resp = HTTParty.get(url)
if resp.success?
json = parse_api_json(resp.body)
else
raise Error.new("Pixiv API call failed (status=#{resp.code} body=#{resp.body})")
end
WorksResponse.new(json["response"][0])
@@ -173,17 +171,14 @@ private
"client_id" => CLIENT_ID,
"client_secret" => CLIENT_SECRET
}
url = URI.parse("https://oauth.secure.pixiv.net/auth/token")
url = "https://oauth.secure.pixiv.net/auth/token"
Net::HTTP.start(url.host, url.port, :use_ssl => true) do |http|
resp = http.request_post(url.request_uri, URI.encode_www_form(params), headers)
if resp.is_a?(Net::HTTPSuccess)
json = JSON.parse(resp.body)
access_token = json["response"]["access_token"]
else
raise Error.new("Pixiv API access token call failed (status=#{resp.code} body=#{resp.body})")
end
resp = HTTParty.post(url, body: params, headers: headers)
if resp.success?
json = JSON.parse(resp.body)
access_token = json["response"]["access_token"]
else
raise Error.new("Pixiv API access token call failed (status=#{resp.code} body=#{resp.body})")
end
access_token

View File

@@ -27,15 +27,12 @@ class PopularSearchService
dates = date.strftime("%Y-%m-%d")
Cache.get("ps-#{scale}-#{dates}", 1.minute) do
url = URI.parse("#{Danbooru.config.reportbooru_server}/hits/#{scale}?date=#{dates}")
response = ""
Net::HTTP.start(url.host, url.port, :use_ssl => url.is_a?(URI::HTTPS)) do |http|
http.read_timeout = 1
http.request_get(url.request_uri) do |res|
if res.is_a?(Net::HTTPSuccess)
response = res.body
end
end
url = "#{Danbooru.config.reportbooru_server}/hits/#{scale}?date=#{dates}"
response = HTTParty.get(url, timeout: 3)
if response.success?
response = response.body
else
response = ""
end
response
end.to_s.force_encoding("utf-8")

View File

@@ -33,13 +33,11 @@ module Reports
uri = URI.parse("#{Danbooru.config.reportbooru_server}/reports/#{endpoint}")
uri.query = URI.encode_www_form(params)
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http|
resp = http.request_get(uri.request_uri)
if resp.is_a?(Net::HTTPSuccess)
resp.body
else
raise "HTTP error code: #{resp.code} #{resp.message}"
end
resp = HTTParty.get(uri)
if resp.success?
resp.body
else
raise "HTTP error code: #{resp.code} #{resp.message}"
end
end
end

View File

@@ -46,25 +46,22 @@ module Sources::Strategies
end
def get
uri = URI.parse(api_url)
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http|
resp = http.request_get(uri.request_uri)
image_url_rewriter = Downloads::RewriteStrategies::ArtStation.new
if resp.is_a?(Net::HTTPSuccess)
@json = JSON.parse(resp.body)
@artist_name = json["user"]["username"]
@profile_url = json["user"]["permalink"]
images = json["assets"].select { |asset| asset["asset_type"] == "image" }
@image_urls = images.map do |x|
y, _, _ = image_url_rewriter.rewrite(x["image_url"], nil)
y
end
@tags = json["tags"].map {|x| [x.downcase.tr(" ", "_"), "https://www.artstation.com/search?q=" + CGI.escape(x)]} if json["tags"]
@artist_commentary_title = json["title"]
@artist_commentary_desc = ActionView::Base.full_sanitizer.sanitize(json["description"])
else
raise "HTTP error code: #{resp.code} #{resp.message}"
resp = HTTParty.get(api_url)
image_url_rewriter = Downloads::RewriteStrategies::ArtStation.new
if resp.success?
@json = JSON.parse(resp.body)
@artist_name = json["user"]["username"]
@profile_url = json["user"]["permalink"]
images = json["assets"].select { |asset| asset["asset_type"] == "image" }
@image_urls = images.map do |x|
y, _, _ = image_url_rewriter.rewrite(x["image_url"], nil)
y
end
@tags = json["tags"].map {|x| [x.downcase.tr(" ", "_"), "https://www.artstation.com/search?q=" + CGI.escape(x)]} if json["tags"]
@artist_commentary_title = json["title"]
@artist_commentary_desc = ActionView::Base.full_sanitizer.sanitize(json["description"])
else
raise "HTTP error code: #{resp.code} #{resp.message}"
end
end
end

View File

@@ -22,14 +22,11 @@ class TagAliasCorrection
end
def fill_hash!
Net::HTTP.start(hostname, 80) do |http|
http.request_get("/tag_aliases/#{tag_alias_id}/correction.json") do |res|
if res === Net::HTTPSuccess
json = JSON.parse(res.body)
statistics_hash["antecedent_cache"] = json["antecedent_cache"]
statistics_hash["consequent_cache"] = json["consequent_cache"]
end
end
res = HTTParty.get("http://#{hostname}/tag_aliases/#{tag_alias_id}/correction.json")
if res.success?
json = JSON.parse(res.body)
statistics_hash["antecedent_cache"] = json["antecedent_cache"]
statistics_hash["consequent_cache"] = json["consequent_cache"]
end
end

View File

@@ -19,14 +19,11 @@ class TagCorrection
end
def fill_hash!
Net::HTTP.start(hostname, 80) do |http|
http.request_get("/tags/#{tag_id}/correction.json") do |res|
if res === Net::HTTPSuccess
json = JSON.parse(res.body)
statistics_hash["category_cache"] = json["category_cache"]
statistics_hash["post_fast_count_cache"] = json["post_fast_count_cache"]
end
end
res = HTTParty.get("http://#{hostname}/tags/#{tag_id}/correction.json")
if res.success?
json = JSON.parse(res.body)
statistics_hash["category_cache"] = json["category_cache"]
statistics_hash["post_fast_count_cache"] = json["post_fast_count_cache"]
end
end

View File

@@ -32,22 +32,12 @@ class TwitterService
end.compact
end
def extract_og_image_from_page(url, n = 5)
raise "too many redirects" if n == 0
Net::HTTP.start(url.host, url.port, :use_ssl => (url.normalized_scheme == "https")) do |http|
resp = http.request_get(url.request_uri)
if resp.is_a?(Net::HTTPMovedPermanently) && resp["Location"]
redirect_url = Addressable::URI.parse(resp["Location"])
redirect_url.host = url.host if redirect_url.host.nil?
redirect_url.scheme = url.scheme if redirect_url.scheme.nil?
redirect_url.port = url.port if redirect_url.port.nil?
return extract_og_image_from_page(redirect_url, n - 1)
elsif resp.is_a?(Net::HTTPSuccess)
doc = Nokogiri::HTML(resp.body)
images = doc.css("meta[property='og:image']")
return images.first.attr("content").sub(":large", ":orig")
end
def extract_og_image_from_page(url)
resp = HTTParty.get(url)
if resp.success?
doc = Nokogiri::HTML(resp.body)
images = doc.css("meta[property='og:image']")
return images.first.attr("content").sub(":large", ":orig")
end
end

View File

@@ -28,18 +28,14 @@ class SavedSearch < ApplicationRecord
"queries" => queries
}.to_json
uri = URI.parse("#{Danbooru.config.listbooru_server}/v2/search")
uri = "#{Danbooru.config.listbooru_server}/v2/search"
body = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.is_a?(URI::HTTPS)) do |http|
resp = http.request_post(uri.request_uri, json)
if resp.is_a?(Net::HTTPSuccess)
resp.body
else
raise "HTTP error code: #{resp.code} #{resp.message}"
end
resp = HTTParty.post(uri, body: json)
if resp.success?
resp.body.to_s.scan(/\d+/).map(&:to_i)
else
raise "HTTP error code: #{resp.code} #{resp.message}"
end
body.to_s.scan(/\d+/).map(&:to_i)
end
end
end

View File

@@ -174,7 +174,7 @@ class User < ApplicationRecord
def update_remote_cache
if name_changed?
Danbooru.config.other_server_hosts.each do |server|
Net::HTTP.delete(URI.parse("http://#{server}/users/#{id}/cache"))
HTTParty.delete("http://#{server}/users/#{id}/cache")
end
end
rescue Exception