gems: drop Mechanize, HTTParty, and Sinatra gems.

This commit is contained in:
evazion
2020-06-21 12:00:57 -05:00
parent 5c7843bd3d
commit a4efeb2260
7 changed files with 2 additions and 122 deletions

View File

@@ -7,7 +7,6 @@ gem "pg"
gem "delayed_job"
gem "delayed_job_active_record"
gem "simple_form"
gem "mechanize"
gem "whenever", :require => false
gem "sanitize"
gem 'ruby-vips'
@@ -28,7 +27,6 @@ gem 'daemons'
gem 'oauth2'
gem 'bootsnap'
gem 'addressable'
gem 'httparty'
gem 'rakismet'
gem 'recaptcha', require: "recaptcha/rails"
gem 'activemodel-serializers-xml'
@@ -63,7 +61,6 @@ end
group :development do
gem 'rubocop'
gem 'rubocop-rails'
gem 'sinatra'
gem 'meta_request'
gem 'rack-mini-profiler'
gem 'stackprof'

View File

@@ -122,7 +122,6 @@ GEM
chronic (0.10.2)
coderay (1.1.3)
concurrent-ruby (1.1.6)
connection_pool (2.2.3)
crass (1.0.6)
daemons (1.3.1)
delayed_job (4.1.8)
@@ -164,9 +163,6 @@ GEM
http-form_data (2.3.0)
http-parser (1.2.1)
ffi-compiler (>= 1.0, < 2.0)
httparty (0.18.1)
mime-types (~> 3.0)
multi_xml (>= 0.5.2)
i18n (1.8.3)
concurrent-ruby (~> 1.0)
ipaddress_2 (0.13.0)
@@ -188,24 +184,12 @@ GEM
mini_mime (>= 0.1.1)
marcel (0.3.3)
mimemagic (~> 0.3.2)
mechanize (2.7.6)
domain_name (~> 0.5, >= 0.5.1)
http-cookie (~> 1.0)
mime-types (>= 1.17.2)
net-http-digest_auth (~> 1.1, >= 1.1.1)
net-http-persistent (>= 2.5.2)
nokogiri (~> 1.6)
ntlm-http (~> 0.1, >= 0.1.1)
webrobots (>= 0.0.9, < 0.2)
memoist (0.16.2)
memory_profiler (0.9.14)
meta_request (0.7.2)
rack-contrib (>= 1.1, < 3)
railties (>= 3.0.0, < 7)
method_source (1.0.0)
mime-types (3.3.1)
mime-types-data (~> 3.2015)
mime-types-data (3.2020.0512)
mimemagic (0.3.5)
mini_mime (1.0.2)
mini_portile2 (2.5.0)
@@ -224,11 +208,6 @@ GEM
multi_json (1.14.1)
multi_xml (0.6.0)
multipart-post (2.1.1)
mustermann (1.1.1)
ruby2_keywords (~> 0.0.1)
net-http-digest_auth (1.4.1)
net-http-persistent (4.0.0)
connection_pool (~> 2.2)
net-scp (3.0.0)
net-ssh (>= 2.6.5, < 7.0.0)
net-sftp (3.0.0)
@@ -241,7 +220,6 @@ GEM
nokogiri (1.11.0.rc2-x64-mingw32)
nokogumbo (2.0.2)
nokogiri (~> 1.8, >= 1.8.4)
ntlm-http (0.1.1)
oauth2 (1.4.4)
faraday (>= 0.8, < 2.0)
jwt (>= 1.0, < 3.0)
@@ -271,8 +249,6 @@ GEM
rack (~> 2.0)
rack-mini-profiler (2.0.2)
rack (>= 1.2.0)
rack-protection (2.0.8.1)
rack
rack-proxy (0.6.5)
rack
rack-test (1.1.0)
@@ -339,9 +315,7 @@ GEM
ruby-progressbar (1.10.1)
ruby-vips (2.0.17)
ffi (~> 1.9)
ruby2_keywords (0.0.2)
rubyzip (2.3.0)
safe_yaml (1.0.5)
sanitize (5.2.1)
crass (~> 1.0.2)
nokogiri (>= 1.8.0)
@@ -364,11 +338,6 @@ GEM
json (>= 1.8, < 3)
simplecov-html (~> 0.10.0)
simplecov-html (0.10.2)
sinatra (2.0.8.1)
mustermann (~> 1.0)
rack (~> 2.0)
rack-protection (= 2.0.8.1)
tilt (~> 2.0)
sprockets (4.0.2)
concurrent-ruby (~> 1.0)
rack (> 1, < 3)
@@ -385,7 +354,6 @@ GEM
stripe (5.22.0)
thor (1.0.1)
thread_safe (0.3.6)
tilt (2.0.10)
tzinfo (1.2.7)
thread_safe (~> 0.1)
unf (0.1.4)
@@ -404,7 +372,6 @@ GEM
rack-proxy (>= 0.6.1)
railties (>= 5.2)
semantic_range (>= 2.3.0)
webrobots (0.1.2)
websocket-driver (0.7.2)
websocket-extensions (>= 0.1.0)
websocket-extensions (0.1.5)
@@ -442,12 +409,10 @@ DEPENDENCIES
ffaker
flamegraph
http
httparty
ipaddress_2
jquery-rails
listen
mail
mechanize
memoist
memory_profiler
meta_request
@@ -484,7 +449,6 @@ DEPENDENCIES
shoulda-matchers
simple_form
simplecov (~> 0.17.0)
sinatra
stackprof
streamio-ffmpeg
stripe

View File

@@ -22,8 +22,8 @@ module Danbooru
@http ||=
::Danbooru::Http::ApplicationClient.new
.timeout(DEFAULT_TIMEOUT)
.headers(Danbooru.config.http_headers)
.headers("Accept-Encoding" => "gzip")
.headers("User-Agent": "#{Danbooru.config.canonical_app_name}/#{Rails.application.config.x.git_hash}")
.use(:auto_inflate)
.use(:retriable)
.use(redirector: { max_redirects: MAX_REDIRECTS })

View File

@@ -155,7 +155,6 @@ class PixivApiClient
def api_client
http.headers(
**Danbooru.config.http_headers,
"Referer": "http://www.pixiv.net",
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": "Bearer #{access_token}"

View File

@@ -141,7 +141,7 @@ module Sources
# Subclasses should merge in any required headers needed to access resources
# on the site.
def headers
Danbooru.config.http_headers
{}
end
# Returns the size of the image resource without actually downloading the file.

View File

@@ -340,22 +340,6 @@ module Danbooru
def twitter_api_secret
end
# The default headers to be sent with outgoing http requests. Some external
# services will fail if you don't set a valid User-Agent.
def http_headers
{
"User-Agent" => "#{Danbooru.config.canonical_app_name}/#{Rails.application.config.x.git_hash}"
}
end
def httparty_options
# proxy example:
# {http_proxyaddr: "", http_proxyport: "", http_proxyuser: nil, http_proxypass: nil}
{
headers: Danbooru.config.http_headers
}
end
# you should override this
def email_key
"zDMSATq0W3hmA5p3rKTgD"

View File

@@ -1,64 +0,0 @@
require 'mechanize'
if Rails.env.test?
# something about the root certs on the travis ci image causes Mechanize
# to intermittently fail. this is a monkey patch to reset the connection
# after every request to avoid dealing wtiht he issue.
#
# from http://scottwb.com/blog/2013/11/09/defeating-the-infamous-mechanize-too-many-connection-resets-bug/
class Mechanize::HTTP::Agent
MAX_RESET_RETRIES = 10
# We need to replace the core Mechanize HTTP method:
#
# Mechanize::HTTP::Agent#fetch
#
# with a wrapper that handles the infamous "too many connection resets"
# Mechanize bug that is described here:
#
# https://github.com/sparklemotion/mechanize/issues/123
#
# The wrapper shuts down the persistent HTTP connection when it fails with
# this error, and simply tries again. In practice, this only ever needs to
# be retried once, but I am going to let it retry a few times
# (MAX_RESET_RETRIES), just in case.
#
def fetch_with_retry(
uri,
method = :get,
headers = {},
params = [],
referer = current_page,
redirects = 0
)
action = "#{method.to_s.upcase} #{uri}"
retry_count = 0
begin
fetch_without_retry(uri, method, headers, params, referer, redirects)
rescue Net::HTTP::Persistent::Error => e
# Pass on any other type of error.
raise unless e.message =~ /too many connection resets/
# Pass on the error if we've tried too many times.
if retry_count >= MAX_RESET_RETRIES
print "R"
# puts "**** WARN: Mechanize retried connection reset #{MAX_RESET_RETRIES} times and never succeeded: #{action}"
raise
end
# Otherwise, shutdown the persistent HTTP connection and try again.
print "R"
# puts "**** WARN: Mechanize retrying connection reset error: #{action}"
retry_count += 1
self.http.shutdown
retry
end
end
# Alias so #fetch actually uses our new #fetch_with_retry to wrap the
# old one aliased as #fetch_without_retry.
alias fetch_without_retry fetch
alias fetch fetch_with_retry
end
end