tests: fix broken Danbooru::Http redirect tests.

Switch from https://httpbin.org to https://nghttp2.org/httpbin.
Redirects on https://httpbin.org have been broken for months, with no
resolution in sight.
This commit is contained in:
evazion
2021-01-11 20:29:15 -06:00
parent ef177a09cf
commit aab9fd710b

View File

@@ -1,36 +1,38 @@
require 'test_helper'
class DanbooruHttpTest < ActiveSupport::TestCase
def httpbin_url(path = "")
"https://nghttp2.org/httpbin/#{path}"
end
context "Danbooru::Http" do
context "#get method" do
should "work for all basic methods" do
%i[get head put post delete].each do |method|
response = Danbooru::Http.send(method, "https://httpbin.org/status/200")
response = Danbooru::Http.send(method, httpbin_url("status/200"))
assert_equal(200, response.status)
end
end
should "follow redirects" do
skip "Skipping test (https://github.com/postmanlabs/httpbin/issues/617)"
response = Danbooru::Http.get("https://httpbin.org/absolute-redirect/3")
response = Danbooru::Http.get(httpbin_url("absolute-redirect/3"))
assert_equal(200, response.status)
end
should "fail if redirected too many times" do
skip "Skipping test (https://github.com/postmanlabs/httpbin/issues/617)"
response = Danbooru::Http.get("https://httpbin.org/absolute-redirect/10")
response = Danbooru::Http.get(httpbin_url("absolute-redirect/10"))
assert_equal(596, response.status)
end
should "fail if the request takes too long to connect" do
response = Danbooru::Http.timeout(1).get("https://httpbin.org/delay/5")
response = Danbooru::Http.timeout(1).get(httpbin_url("delay/5"))
assert_equal(597, response.status)
end
should "fail if the request takes too long to download" do
# XXX should return status 597 instead
assert_raises(HTTP::TimeoutError) do
response = Danbooru::Http.timeout(1).get("https://httpbin.org/drip?duration=10&numbytes=10").flush
response = Danbooru::Http.timeout(1).get(httpbin_url("drip?duration=10&numbytes=10")).flush
end
end
@@ -45,20 +47,20 @@ class DanbooruHttpTest < ActiveSupport::TestCase
end
should "automatically decompress gzipped responses" do
response = Danbooru::Http.get("https://httpbin.org/gzip")
response = Danbooru::Http.get(httpbin_url("gzip"))
assert_equal(200, response.status)
assert_equal(true, response.parse["gzipped"])
end
should "automatically parse html responses" do
response = Danbooru::Http.get("https://httpbin.org/html")
response = Danbooru::Http.get(httpbin_url("html"))
assert_equal(200, response.status)
assert_instance_of(Nokogiri::HTML5::Document, response.parse)
assert_equal("Herman Melville - Moby-Dick", response.parse.css("h1").text)
end
should "automatically parse xml responses" do
response = Danbooru::Http.get("https://httpbin.org/xml")
response = Danbooru::Http.get(httpbin_url("xml"))
assert_equal(200, response.status)
assert_equal(true, response.parse[:slideshow].present?)
end
@@ -66,12 +68,12 @@ class DanbooruHttpTest < ActiveSupport::TestCase
should "track cookies between requests" do
http = Danbooru::Http.use(:session)
resp1 = http.get("https://httpbin.org/cookies/set/abc/1")
resp2 = http.get("https://httpbin.org/cookies/set/def/2")
resp3 = http.get("https://httpbin.org/cookies")
resp1 = http.get(httpbin_url("cookies/set/abc/1"))
resp2 = http.get(httpbin_url("cookies/set/def/2"))
resp3 = http.get(httpbin_url("cookies"))
assert_equal({ abc: "1", def: "2" }, resp3.parse["cookies"].symbolize_keys)
resp4 = http.cookies(def: 3, ghi: 4).get("https://httpbin.org/cookies")
resp4 = http.cookies(def: 3, ghi: 4).get(httpbin_url("cookies"))
assert_equal({ abc: "1", def: "3", ghi: "4" }, resp4.parse["cookies"].symbolize_keys)
end
end
@@ -80,10 +82,10 @@ class DanbooruHttpTest < ActiveSupport::TestCase
should "cache multiple requests to the same url" do
http = Danbooru::Http.cache(1.hour)
response1 = http.get("https://httpbin.org/uuid")
response1 = http.get(httpbin_url("uuid"))
assert_equal(200, response1.status)
response2 = http.get("https://httpbin.org/uuid")
response2 = http.get(httpbin_url("uuid"))
assert_equal(200, response2.status)
assert_equal(response2.to_s, response1.to_s)
end
@@ -91,10 +93,10 @@ class DanbooruHttpTest < ActiveSupport::TestCase
should "cache cookies correctly" do
http = Danbooru::Http.cache(1.hour)
resp1 = http.get("https://httpbin.org/cookies")
resp2 = http.get("https://httpbin.org/cookies/set/abc/1")
resp3 = http.get("https://httpbin.org/cookies/set/def/2")
resp4 = http.get("https://httpbin.org/cookies")
resp1 = http.get(httpbin_url("cookies"))
resp2 = http.get(httpbin_url("cookies/set/abc/1"))
resp3 = http.get(httpbin_url("cookies/set/def/2"))
resp4 = http.get(httpbin_url("cookies"))
assert_equal(200, resp1.status)
assert_equal(200, resp2.status)
@@ -114,7 +116,7 @@ class DanbooruHttpTest < ActiveSupport::TestCase
response_200 = ::HTTP::Response.new(status: 200, version: "1.1", body: "")
HTTP::Client.any_instance.expects(:perform).times(2).returns(response_429, response_200)
response = Danbooru::Http.use(:retriable).get("https://httpbin.org/status/429")
response = Danbooru::Http.use(:retriable).get(httpbin_url("status/429"))
assert_equal(200, response.status)
end
@@ -123,7 +125,7 @@ class DanbooruHttpTest < ActiveSupport::TestCase
response_200 = ::HTTP::Response.new(status: 200, version: "1.1", body: "")
HTTP::Client.any_instance.expects(:perform).times(2).returns(response_503, response_200)
response = Danbooru::Http.use(:retriable).get("https://httpbin.org/status/503")
response = Danbooru::Http.use(:retriable).get(httpbin_url("status/503"))
assert_equal(200, response.status)
end
@@ -132,17 +134,17 @@ class DanbooruHttpTest < ActiveSupport::TestCase
response_200 = ::HTTP::Response.new(status: 200, version: "1.1", body: "")
HTTP::Client.any_instance.expects(:perform).times(2).returns(response_503, response_200)
response = Danbooru::Http.use(:retriable).get("https://httpbin.org/status/503")
response = Danbooru::Http.use(:retriable).get(httpbin_url("status/503"))
assert_equal(200, response.status)
end
end
context "spoof referrer feature" do
should "spoof the referer" do
response = Danbooru::Http.use(:spoof_referrer).get("https://httpbin.org/anything")
response = Danbooru::Http.use(:spoof_referrer).get(httpbin_url("anything"))
assert_equal(200, response.status)
assert_equal("https://httpbin.org", response.parse.dig("headers", "Referer"))
assert_equal("https://nghttp2.org", response.parse.dig("headers", "Referer"))
end
end
@@ -158,15 +160,14 @@ class DanbooruHttpTest < ActiveSupport::TestCase
context "#download method" do
should "download files" do
response, file = Danbooru::Http.download_media("https://httpbin.org/bytes/1000")
response, file = Danbooru::Http.download_media(httpbin_url("bytes/1000"))
assert_equal(200, response.status)
assert_equal(1000, file.size)
end
should "follow redirects when downloading files" do
skip "Skipping test (https://github.com/postmanlabs/httpbin/issues/617)"
response, file = Danbooru::Http.download_media("https://httpbin.org/redirect-to?url=https://httpbin.org/bytes/1000")
response, file = Danbooru::Http.download_media(httpbin_url("/redirect-to?url=#{httpbin_url("bytes/1000")}"))
assert_equal(200, response.status)
assert_equal(1000, file.size)
@@ -180,19 +181,19 @@ class DanbooruHttpTest < ActiveSupport::TestCase
should "fail if the url redirects to a private IP" do
assert_raises(Danbooru::Http::DownloadError) do
Danbooru::Http.public_only.download_media("https://httpbin.org/redirect-to?url=https://127.0.0.1.xip.io")
Danbooru::Http.public_only.download_media(httpbin_url("redirect-to?url=https://127.0.0.1.xip.io"))
end
end
should "fail if a download is too large" do
assert_raises(Danbooru::Http::FileTooLargeError) do
response, file = Danbooru::Http.max_size(500).download_media("https://httpbin.org/bytes/1000")
response, file = Danbooru::Http.max_size(500).download_media(httpbin_url("bytes/1000"))
end
end
should "fail if a streaming download is too large" do
assert_raises(Danbooru::Http::FileTooLargeError) do
response, file = Danbooru::Http.max_size(500).download_media("https://httpbin.org/stream-bytes/1000")
response, file = Danbooru::Http.max_size(500).download_media(httpbin_url("stream-bytes/1000"))
end
end
end