Downloads::File: fix SSRF inside is_cloudflare? (#2498).

Fixes the banned IP check not being applied when sending the HEAD
request for is_cloudflare?.

Also fixes the `#size` method not using the uncached url (which meant
the bookmarklet could report the wrong filesize on artstation uploads).
This commit is contained in:
evazion
2018-09-19 20:11:53 -05:00
parent 9e1e73ec4c
commit f4e08ef30d
3 changed files with 23 additions and 12 deletions

View File

@@ -11,19 +11,19 @@ module Downloads
validate :validate_url
# Prevent Cloudflare from potentially mangling the image. See issue #3528.
def self.uncached_url(url, headers = {})
url = Addressable::URI.parse(url)
def uncached_url
url = file_url.dup
if is_cloudflare?(url, headers)
if is_cloudflare?
url.query_values = (url.query_values || {}).merge(danbooru_no_cache: SecureRandom.uuid)
end
url
end
def self.is_cloudflare?(url, headers = {})
Cache.get("is_cloudflare:#{url.origin}", 4.hours) do
res = HTTParty.head(url, { headers: headers }.deep_merge(Danbooru.config.httparty_options))
def is_cloudflare?
Cache.get("is_cloudflare:#{file_url.origin}", 4.hours) do
res = HTTParty.head(file_url, httparty_options)
raise Error.new("HTTP error code: #{res.code} #{res.message}") unless res.success?
res.key?("CF-Ray")
@@ -37,7 +37,7 @@ module Downloads
end
def size
res = HTTParty.head(strategy.file_url, **httparty_options, timeout: 3)
res = HTTParty.head(uncached_url, **httparty_options, timeout: 3)
if res.success?
res.content_length
@@ -47,10 +47,8 @@ module Downloads
end
def download!(tries: 3, **options)
url = self.class.uncached_url(strategy.file_url, strategy.headers)
Retriable.retriable(on: RETRIABLE_ERRORS, tries: tries, base_interval: 0) do
file = http_get_streaming(url, headers: strategy.headers, **options)
file = http_get_streaming(uncached_url, headers: strategy.headers, **options)
return [file, strategy]
end
end
@@ -79,6 +77,10 @@ module Downloads
end
end # def
def file_url
@file_url ||= Addressable::URI.parse(strategy.image_url)
end
def strategy
@strategy ||= Sources::Strategies.find(url.to_s, referer)
end

View File

@@ -34,6 +34,10 @@ module Downloads
should "return the original file, not the polished file" do
assert_downloaded(517_706, @asset) # polished size: 502_052
end
should "return the original filesize, not the polished filesize" do
assert_equal(517_706, Downloads::File.new(@asset).size)
end
end
context "a download for a https://$artist.artstation.com/projects/$id page" do

View File

@@ -9,12 +9,12 @@ module Downloads
end
context "for a banned IP" do
should "prevent downloads" do
should "not try to download the file" do
Resolv.expects(:getaddress).returns("127.0.0.1")
assert_raise(Downloads::File::Error) { Downloads::File.new("http://evil.com").download! }
end
should "prevent fetching the size" do
should "not try to fetch the size" do
Resolv.expects(:getaddress).returns("127.0.0.1")
assert_raise(Downloads::File::Error) { Downloads::File.new("http://evil.com").size }
end
@@ -33,6 +33,11 @@ module Downloads
assert_raise(Downloads::File::Error) { Downloads::File.new(url).download! }
end
should "not send a HEAD request when checking for cloudflare" do
Resolv.expects(:getaddress).with("www.google.com").returns("127.0.0.1")
assert_raise(Downloads::File::Error) { @download.is_cloudflare? }
end
end
context "that fails" do