integrate ugoiras into zip+webm+preview

This commit is contained in:
r888888888
2014-10-09 17:05:47 -07:00
parent 0a61aac231
commit 3bb06c2be4
28 changed files with 1800 additions and 1125 deletions

View File

@@ -98,7 +98,7 @@ div#c-comments {
} }
div.post-preview { div.post-preview {
&[data-tags~=animated], &[data-file-ext=swf], &[data-file-ext=webm] { &[data-tags~=animated], &[data-file-ext=swf], &[data-file-ext=webm], &[data-file-ext=zip] {
div.preview { div.preview {
position: relative; position: relative;

View File

@@ -22,7 +22,7 @@ article.post-preview {
margin: auto; margin: auto;
} }
&[data-tags~=animated]:before, &[data-file-ext=swf]:before, &[data-file-ext=webm]:before { &[data-tags~=animated]:before, &[data-file-ext=swf]:before, &[data-file-ext=webm]:before, &[data-file-ext=zip]:before {
content: ""; content: "";
position: absolute; position: absolute;
width: 20px; width: 20px;

View File

@@ -2,45 +2,49 @@ module Downloads
class File class File
class Error < Exception ; end class Error < Exception ; end
attr_reader :tries attr_reader :data
attr_accessor :source, :content_type, :file_path attr_accessor :source, :content_type, :file_path
def initialize(source, file_path) def initialize(source, file_path, options = {})
# source can potentially get rewritten in the course
# of downloading a file, so check it again
@source = source @source = source
# where to save the download
@file_path = file_path @file_path = file_path
@tries = 0
# we sometimes need to capture data from the source page
@data = {:is_ugoira => options[:is_ugoira]}
end end
def download! def download!
http_get_streaming do |response| @source, @data = http_get_streaming(@source, @data) do |response|
self.content_type = response["Content-Type"] self.content_type = response["Content-Type"]
::File.open(file_path, "wb") do |out| ::File.open(@file_path, "wb") do |out|
response.read_body(out) response.read_body(out)
end end
end end
after_download @source = after_download(@source)
end end
def before_download(url, headers) def before_download(url, headers, datums)
RewriteStrategies::Base.strategies.each do |strategy| RewriteStrategies::Base.strategies.each do |strategy|
url, headers = strategy.new.rewrite(url, headers) url, headers, datums = strategy.new.rewrite(url, headers, datums)
end end
return [url, headers] return [url, headers, datums]
end end
def after_download def after_download(src)
fix_image_board_sources fix_image_board_sources(src)
end end
def url def http_get_streaming(src, datums = {}, options = {})
URI.parse(source)
end
def http_get_streaming(options = {})
max_size = options[:max_size] || Danbooru.config.max_file_size max_size = options[:max_size] || Danbooru.config.max_file_size
max_size = nil if max_size == 0 # unlimited max_size = nil if max_size == 0 # unlimited
limit = 4 limit = 4
tries = 0
url = URI.parse(src)
while true while true
unless url.is_a?(URI::HTTP) || url.is_a?(URI::HTTPS) unless url.is_a?(URI::HTTP) || url.is_a?(URI::HTTPS)
@@ -50,7 +54,8 @@ module Downloads
headers = { headers = {
"User-Agent" => "#{Danbooru.config.safe_app_name}/#{Danbooru.config.version}" "User-Agent" => "#{Danbooru.config.safe_app_name}/#{Danbooru.config.version}"
} }
@source, headers = before_download(source, headers) src, headers, datums = before_download(src, headers, datums)
url = URI.parse(src)
begin begin
Net::HTTP.start(url.host, url.port, :use_ssl => url.is_a?(URI::HTTPS)) do |http| Net::HTTP.start(url.host, url.port, :use_ssl => url.is_a?(URI::HTTPS)) do |http|
@@ -63,13 +68,13 @@ module Downloads
raise Error.new("File is too large (#{len} bytes)") if len && len.to_i > max_size raise Error.new("File is too large (#{len} bytes)") if len && len.to_i > max_size
end end
yield(res) yield(res)
return return [src, datums]
when Net::HTTPRedirection then when Net::HTTPRedirection then
if limit == 0 then if limit == 0 then
raise Error.new("Too many redirects") raise Error.new("Too many redirects")
end end
@source = res["location"] src = res["location"]
limit -= 1 limit -= 1
else else
@@ -78,19 +83,23 @@ module Downloads
end # http.request_get end # http.request_get
end # http.start end # http.start
rescue Errno::ECONNRESET, Errno::ETIMEDOUT, Errno::EIO, Errno::EHOSTUNREACH, Errno::ECONNREFUSED, IOError => x rescue Errno::ECONNRESET, Errno::ETIMEDOUT, Errno::EIO, Errno::EHOSTUNREACH, Errno::ECONNREFUSED, IOError => x
@tries += 1 tries += 1
if @tries < 3 if tries < 3
retry retry
else else
raise raise
end end
end end
end # while end # while
[src, datums]
end # def end # def
def fix_image_board_sources def fix_image_board_sources(src)
if source =~ /i\.4cdn\.org|\/src\/\d{12,}|urnc\.yi\.org|yui\.cynthia\.bne\.jp/ if src =~ /i\.4cdn\.org|\/src\/\d{12,}|urnc\.yi\.org|yui\.cynthia\.bne\.jp/
@source = "Image board" "Image board"
else
src
end end
end end
end end

View File

@@ -5,8 +5,8 @@ module Downloads
[Pixiv, NicoSeiga, Twitpic, DeviantArt, Tumblr, Moebooru] [Pixiv, NicoSeiga, Twitpic, DeviantArt, Tumblr, Moebooru]
end end
def rewrite(url, headers) def rewrite(url, headers, data = {})
return [url, headers] return [url, headers, data]
end end
protected protected

View File

@@ -1,13 +1,13 @@
module Downloads module Downloads
module RewriteStrategies module RewriteStrategies
class DeviantArt < Base class DeviantArt < Base
def rewrite(url, headers) def rewrite(url, headers, data = {})
if url =~ /https?:\/\/(?:.+?\.)?deviantart\.(?:com|net)/ if url =~ /https?:\/\/(?:.+?\.)?deviantart\.(?:com|net)/
url, headers = rewrite_html_pages(url, headers) url, headers = rewrite_html_pages(url, headers)
url, headers = rewrite_thumbnails(url, headers) url, headers = rewrite_thumbnails(url, headers)
end end
return [url, headers] return [url, headers, data]
end end
protected protected

View File

@@ -3,12 +3,12 @@ module Downloads
class Moebooru < Base class Moebooru < Base
DOMAINS = '(?:[^.]+\.)?yande\.re|konachan\.com' DOMAINS = '(?:[^.]+\.)?yande\.re|konachan\.com'
def rewrite(url, headers) def rewrite(url, headers, data = {})
if url =~ %r{https?://(?:#{DOMAINS})} if url =~ %r{https?://(?:#{DOMAINS})}
url, headers = rewrite_jpeg_versions(url, headers) url, headers = rewrite_jpeg_versions(url, headers)
end end
return [url, headers] return [url, headers, data]
end end
protected protected

View File

@@ -1,14 +1,14 @@
module Downloads module Downloads
module RewriteStrategies module RewriteStrategies
class NicoSeiga < Base class NicoSeiga < Base
def rewrite(url, headers) def rewrite(url, headers, data = {})
if url =~ %r{https?://lohas\.nicoseiga\.jp} || url =~ %r{https?://seiga\.nicovideo\.jp} if url =~ %r{https?://lohas\.nicoseiga\.jp} || url =~ %r{https?://seiga\.nicovideo\.jp}
url, headers = rewrite_headers(url, headers) url, headers = rewrite_headers(url, headers)
url, headers = rewrite_html_pages(url, headers) url, headers = rewrite_html_pages(url, headers)
url, headers = rewrite_thumbnails(url, headers) url, headers = rewrite_thumbnails(url, headers)
end end
return [url, headers] return [url, headers, data]
end end
protected protected

View File

@@ -1,16 +1,16 @@
module Downloads module Downloads
module RewriteStrategies module RewriteStrategies
class Pixiv < Base class Pixiv < Base
def rewrite(url, headers) def rewrite(url, headers, data = {})
if url =~ /https?:\/\/(?:\w+\.)?pixiv\.net/ if url =~ /https?:\/\/(?:\w+\.)?pixiv\.net/
url, headers = rewrite_headers(url, headers) url, headers = rewrite_headers(url, headers)
url, headers = rewrite_cdn(url, headers) url, headers = rewrite_cdn(url, headers)
url, headers = rewrite_html_pages(url, headers) url, headers, data = rewrite_html_pages(url, headers, data)
url, headers = rewrite_thumbnails(url, headers) url, headers = rewrite_thumbnails(url, headers)
url, headers = rewrite_old_small_manga_pages(url, headers) url, headers = rewrite_old_small_manga_pages(url, headers)
end end
return [url, headers] return [url, headers, data]
end end
protected protected
@@ -31,9 +31,12 @@ module Downloads
if url =~ /illust_id=\d+/i || url =~ %r!pixiv\.net/img-inf/img/!i if url =~ /illust_id=\d+/i || url =~ %r!pixiv\.net/img-inf/img/!i
source = ::Sources::Strategies::Pixiv.new(url) source = ::Sources::Strategies::Pixiv.new(url)
source.get source.get
return [source.image_url, headers] data[:ugoira_frame_data] = source.ugoira_frame_data
data[:ugoira_width] = source.ugoira_width
data[:ugoira_height] = source.ugoira_height
return [source.file_url, headers, data]
else else
return [url, headers] return [url, headers, data]
end end
end end

View File

@@ -1,13 +1,13 @@
module Downloads module Downloads
module RewriteStrategies module RewriteStrategies
class Tumblr < Base class Tumblr < Base
def rewrite(url, headers) def rewrite(url, headers, data = {})
if url =~ %r{^https?://.*tumblr\.com} if url =~ %r{^https?://.*tumblr\.com}
url, headers = rewrite_cdn(url, headers) url, headers = rewrite_cdn(url, headers)
url, headers = rewrite_thumbnails(url, headers) url, headers = rewrite_thumbnails(url, headers)
end end
return [url, headers] return [url, headers, data]
end end
protected protected

View File

@@ -1,13 +1,13 @@
module Downloads module Downloads
module RewriteStrategies module RewriteStrategies
class Twitpic < Base class Twitpic < Base
def rewrite(url, headers) def rewrite(url, headers, data = {})
if url =~ %r{https?://twitpic\.com} || url =~ %r{^https?://d3j5vwomefv46c\.cloudfront\.net} if url =~ %r{https?://twitpic\.com} || url =~ %r{^https?://d3j5vwomefv46c\.cloudfront\.net}
url, headers = rewrite_html_pages(url, headers) url, headers = rewrite_html_pages(url, headers)
url, headers = rewrite_thumbnails(url, headers) url, headers = rewrite_thumbnails(url, headers)
end end
return [url, headers] return [url, headers, data]
end end
protected protected

View File

@@ -1,43 +1,11 @@
class PixivUgoiraConverter class PixivUgoiraConverter
attr_reader :agent, :url, :write_path, :format def convert(source_path, output_path, preview_path, frame_data)
folder = unpack(File.open(source_path))
def initialize(url, write_path, format) write_webm(folder, output_path, frame_data)
@url = url write_preview(folder, preview_path)
@write_path = write_path
@format = format
end end
def process! def write_webm(folder, write_path, frame_data)
folder = unpack(fetch_zipped_body)
if format == :gif
write_gif(folder)
elsif format == :webm
write_webm(folder)
elsif format == :apng
write_apng(folder)
end
end
def write_gif(folder)
anim = Magick::ImageList.new
delay_sum = 0
folder.each_with_index do |file, i|
image_blob = file.get_input_stream.read
image = Magick::Image.from_blob(image_blob).first
image.ticks_per_second = 1000
delay = @frame_data[i]["delay"]
rounded_delay = (delay_sum + delay).round(-1) - delay_sum.round(-1)
image.delay = rounded_delay
delay_sum += delay
anim << image
end
anim = anim.optimize_layers(Magick::OptimizeTransLayer)
anim.write("gif:" + write_path)
end
def write_webm(folder)
Dir.mktmpdir do |tmpdir| Dir.mktmpdir do |tmpdir|
FileUtils.mkdir_p("#{tmpdir}/images") FileUtils.mkdir_p("#{tmpdir}/images")
folder.each_with_index do |file, i| folder.each_with_index do |file, i|
@@ -62,7 +30,7 @@ class PixivUgoiraConverter
timecodes_path = File.join(tmpdir, "timecodes.tc") timecodes_path = File.join(tmpdir, "timecodes.tc")
File.open(timecodes_path, "w+") do |f| File.open(timecodes_path, "w+") do |f|
f.write("# timecode format v2\n") f.write("# timecode format v2\n")
@frame_data.each do |img| frame_data.each do |img|
f.write("#{delay_sum}\n") f.write("#{delay_sum}\n")
delay_sum += img["delay"] delay_sum += img["delay"]
end end
@@ -71,68 +39,21 @@ class PixivUgoiraConverter
end end
ext = folder.first.name.match(/\.(\w{,4})$/)[1] ext = folder.first.name.match(/\.(\w{,4})$/)[1]
system("ffmpeg -i #{tmpdir}/images/%06d.#{ext} -codec:v libvpx -crf 4 -b:v 5000k -an #{tmpdir}/tmp.webm") system("ffmpeg -loglevel quiet -i #{tmpdir}/images/%06d.#{ext} -codec:v libvpx -crf 4 -b:v 5000k -an #{tmpdir}/tmp.webm")
system("mkvmerge -o #{write_path} --webm --timecodes 0:#{tmpdir}/timecodes.tc #{tmpdir}/tmp.webm") system("mkvmerge -q -o #{write_path} --webm --timecodes 0:#{tmpdir}/timecodes.tc #{tmpdir}/tmp.webm")
end end
end end
def write_apng(folder) def write_preview(folder, path)
Dir.mktmpdir do |tmpdir| file = folder.first
folder.each_with_index do |file, i| image_blob = file.get_input_stream.read
frame_path = File.join(tmpdir, "frame#{"%03d" % i}.png") image = Magick::Image.from_blob(image_blob).first
delay_path = File.join(tmpdir, "frame#{"%03d" % i}.txt") image.write(path)
image_blob = file.get_input_stream.read
delay = @frame_data[i]["delay"]
image = Magick::Image.from_blob(image_blob).first
image.format = "PNG"
image.write(frame_path)
File.open(delay_path, "wb") do |f|
f.write("delay=#{delay}/1000")
end
end
system("apngasm -o -F #{write_path} #{tmpdir}/frame*.png")
end
end end
def unpack(zipped_body) def unpack(zip_file)
folder = Zip::CentralDirectory.new folder = Zip::CentralDirectory.new
folder.read_from_stream(StringIO.new(zipped_body)) folder.read_from_stream(zip_file)
folder folder
end end
def fetch_zipped_body
zip_body = nil
zip_url, @frame_data = fetch_frames
Downloads::File.new(zip_url, nil).http_get_streaming do |response|
zip_body = response.body
end
zip_body
end
def agent
@agent ||= Sources::Strategies::Pixiv.new(url).agent
end
def fetch_frames
agent.get(url) do |page|
# Get the zip url and frame delay by parsing javascript contained in a <script> tag on the page.
# Not a neat solution, but I haven't found any other location that has the frame delays listed.
scripts = page.search("body script").find_all do |node|
node.text =~ /_ugoira600x600\.zip/
end
if scripts.any?
javascript = scripts.first.text
json = javascript.match(/;pixiv\.context\.ugokuIllustData\s+=\s+(\{.+?\});(?:$|pixiv\.context)/)[1]
data = JSON.parse(json)
zip_url = data["src"].sub("_ugoira600x600.zip", "_ugoira1920x1080.zip")
frame_data = data["frames"]
return [zip_url, frame_data]
else
raise "Can't find javascript with frame data"
end
end
end
end end

View File

@@ -0,0 +1,21 @@
class PixivUgoiraService
attr_reader :width, :height, :frame_data
def process(post)
save_frame_data(post)
end
def save_frame_data(post)
PixivUgoiraFrameData.create(:data => @frame_data, :post_id => post.id)
end
def generate_resizes(source_path, output_path, preview_path)
PixivUgoiraConverter.new.convert(source_path, output_path, preview_path, @frame_data)
end
def load(data)
@frame_data = data[:ugoira_frame_data]
@width = data[:ugoira_width]
@height = data[:ugoira_height]
end
end

View File

@@ -0,0 +1,24 @@
class PixivWebAgent
def self.build
mech = Mechanize.new
phpsessid = Cache.get("pixiv-phpsessid")
if phpsessid
cookie = Mechanize::Cookie.new("PHPSESSID", phpsessid)
cookie.domain = ".pixiv.net"
cookie.path = "/"
mech.cookie_jar.add(cookie)
else
mech.get("http://www.pixiv.net") do |page|
page.form_with(:action => "/login.php") do |form|
form['pixiv_id'] = Danbooru.config.pixiv_login
form['pass'] = Danbooru.config.pixiv_password
end.click_button
end
phpsessid = mech.cookie_jar.cookies.select{|c| c.name == "PHPSESSID"}.first
Cache.put("pixiv-phpsessid", phpsessid.value, 1.month) if phpsessid
end
mech
end
end

View File

@@ -5,7 +5,7 @@ module Sources
class Site class Site
attr_reader :url, :strategy attr_reader :url, :strategy
delegate :get, :referer_url, :site_name, :artist_name, :profile_url, :image_url, :tags, :artist_record, :unique_id, :page_count, :to => :strategy delegate :get, :referer_url, :site_name, :artist_name, :profile_url, :image_url, :tags, :artist_record, :unique_id, :page_count, :file_url, :ugoira_frame_data, :ugoira_width, :ugoira_height, :to => :strategy
def self.strategies def self.strategies
[Strategies::Pixiv, Strategies::NicoSeiga, Strategies::DeviantArt, Strategies::Nijie] [Strategies::Pixiv, Strategies::NicoSeiga, Strategies::DeviantArt, Strategies::Nijie]

View File

@@ -5,6 +5,8 @@ require 'csv'
module Sources module Sources
module Strategies module Strategies
class Pixiv < Base class Pixiv < Base
attr_reader :zip_url, :ugoira_frame_data, :ugoira_width, :ugoira_height
def self.url_match?(url) def self.url_match?(url)
url =~ /^https?:\/\/(?:\w+\.)?pixiv\.net/ url =~ /^https?:\/\/(?:\w+\.)?pixiv\.net/
end end
@@ -43,6 +45,8 @@ module Sources
agent.get(URI.parse(normalized_url)) do |page| agent.get(URI.parse(normalized_url)) do |page|
@artist_name, @profile_url = get_profile_from_page(page) @artist_name, @profile_url = get_profile_from_page(page)
@pixiv_moniker = get_moniker_from_page(page) @pixiv_moniker = get_moniker_from_page(page)
@image_url = get_image_url_from_page(page)
@zip_url, @ugoira_frame_data, @ugoira_width, @ugoira_height = get_zip_url_from_page(page)
@tags = get_tags_from_page(page) @tags = get_tags_from_page(page)
@page_count = get_page_count_from_page(page) @page_count = get_page_count_from_page(page)
@@ -58,28 +62,11 @@ module Sources
end end
def agent def agent
@agent ||= begin @agent ||= PixivWebAgent.build
mech = Mechanize.new end
phpsessid = Cache.get("pixiv-phpsessid") def file_url
if phpsessid image_url || zip_url
cookie = Mechanize::Cookie.new("PHPSESSID", phpsessid)
cookie.domain = ".pixiv.net"
cookie.path = "/"
mech.cookie_jar.add(cookie)
else
mech.get("http://www.pixiv.net") do |page|
page.form_with(:action => "/login.php") do |form|
form['pixiv_id'] = Danbooru.config.pixiv_login
form['pass'] = Danbooru.config.pixiv_password
end.click_button
end
phpsessid = mech.cookie_jar.cookies.select{|c| c.name == "PHPSESSID"}.first
Cache.put("pixiv-phpsessid", phpsessid.value, 1.month) if phpsessid
end
mech
end
end end
protected protected
@@ -191,6 +178,31 @@ module Sources
end end
end end
def get_zip_url_from_page(page)
scripts = page.search("body script").find_all do |node|
node.text =~ /_ugoira600x600\.zip/
end
if scripts.any?
javascript = scripts.first.text
json = javascript.match(/;pixiv\.context\.ugokuIllustData\s+=\s+(\{.+?\});(?:$|pixiv\.context)/)[1]
data = JSON.parse(json)
zip_url = data["src"].sub("_ugoira600x600.zip", "_ugoira1920x1080.zip")
frame_data = data["frames"]
if javascript =~ /illustSize\s*=\s*\[\s*(\d+)\s*,\s*(\d+)\s*\]/
image_width = $1.to_i
image_height = $2.to_i
else
image_width = 600
image_height = 600
end
return [zip_url, frame_data, image_width, image_height]
end
end
def get_tags_from_page(page) def get_tags_from_page(page)
# puts page.root.to_xhtml # puts page.root.to_xhtml

View File

@@ -0,0 +1,4 @@
class PixivUgoiraFrameData < ActiveRecord::Base
attr_accessible :post_id, :data
serialize :data
end

View File

@@ -26,6 +26,7 @@ class Post < ActiveRecord::Base
belongs_to :parent, :class_name => "Post" belongs_to :parent, :class_name => "Post"
has_one :upload, :dependent => :destroy has_one :upload, :dependent => :destroy
has_one :artist_commentary, :dependent => :destroy has_one :artist_commentary, :dependent => :destroy
has_one :pixiv_ugoira_frame_data, :class_name => "PixivUgoiraFrameData"
has_many :flags, :class_name => "PostFlag", :dependent => :destroy has_many :flags, :class_name => "PostFlag", :dependent => :destroy
has_many :appeals, :class_name => "PostAppeal", :dependent => :destroy has_many :appeals, :class_name => "PostAppeal", :dependent => :destroy
has_many :versions, lambda {order("post_versions.updated_at ASC, post_versions.id ASC")}, :class_name => "PostVersion", :dependent => :destroy has_many :versions, lambda {order("post_versions.updated_at ASC, post_versions.id ASC")}, :class_name => "PostVersion", :dependent => :destroy
@@ -70,12 +71,20 @@ class Post < ActiveRecord::Base
def large_file_path def large_file_path
if has_large? if has_large?
"#{Rails.root}/public/data/sample/#{file_path_prefix}#{Danbooru.config.large_image_prefix}#{md5}.jpg" "#{Rails.root}/public/data/sample/#{file_path_prefix}#{Danbooru.config.large_image_prefix}#{md5}.#{large_file_ext}"
else else
file_path file_path
end end
end end
def large_file_ext
if is_ugoira?
"webm"
else
"jpg"
end
end
def preview_file_path def preview_file_path
"#{Rails.root}/public/data/preview/#{file_path_prefix}#{md5}.jpg" "#{Rails.root}/public/data/preview/#{file_path_prefix}#{md5}.jpg"
end end
@@ -129,7 +138,7 @@ class Post < ActiveRecord::Base
end end
def is_video? def is_video?
file_ext =~ /webm/i file_ext =~ /webm|zip/i
end end
def has_preview? def has_preview?

View File

@@ -112,6 +112,7 @@ class Upload < ActiveRecord::Base
post.distribute_files post.distribute_files
if post.save if post.save
CurrentUser.increment!(:post_upload_count) CurrentUser.increment!(:post_upload_count)
ugoira_service.process(post)
update_attributes(:status => "completed", :post_id => post.id) update_attributes(:status => "completed", :post_id => post.id)
else else
update_attribute(:status, "error: " + post.errors.full_messages.join(", ")) update_attribute(:status, "error: " + post.errors.full_messages.join(", "))
@@ -140,6 +141,10 @@ class Upload < ActiveRecord::Base
delete_temp_file delete_temp_file
end end
def ugoira_service
@ugoira_service ||= PixivUgoiraService.new
end
def convert_to_post def convert_to_post
Post.new.tap do |p| Post.new.tap do |p|
p.tag_string = tag_string p.tag_string = tag_string
@@ -190,16 +195,32 @@ class Upload < ActiveRecord::Base
def is_video? def is_video?
%w(webm).include?(file_ext) %w(webm).include?(file_ext)
end end
def is_ugoira?
%w(zip).include?(file_ext)
end
end end
module ResizerMethods module ResizerMethods
def generate_resizes(source_path) def generate_resizes(source_path)
generate_resize_for(Danbooru.config.small_image_width, Danbooru.config.small_image_width, source_path, 85) generate_resize_for(Danbooru.config.small_image_width, Danbooru.config.small_image_width, source_path, 85)
if is_image? && image_width > Danbooru.config.large_image_width if is_image? && image_width > Danbooru.config.large_image_width
generate_resize_for(Danbooru.config.large_image_width, nil, source_path) generate_resize_for(Danbooru.config.large_image_width, nil, source_path)
end end
end end
def generate_video_preview_for(width, height, output_path)
dimension_ratio = image_width.to_f / image_height
if dimension_ratio > 1
height = (width / dimension_ratio).to_i
else
width = (height * dimension_ratio).to_i
end
video.screenshot(output_path, {:seek_time => 0, :resolution => "#{width}x#{height}"})
FileUtils.chmod(0664, output_path)
end
def generate_resize_for(width, height, source_path, quality = 90) def generate_resize_for(width, height, source_path, quality = 90)
unless File.exists?(source_path) unless File.exists?(source_path)
raise Error.new("file not found") raise Error.new("file not found")
@@ -208,15 +229,10 @@ class Upload < ActiveRecord::Base
output_path = resized_file_path_for(width) output_path = resized_file_path_for(width)
if is_image? if is_image?
Danbooru.resize(source_path, output_path, width, height, quality) Danbooru.resize(source_path, output_path, width, height, quality)
elsif is_ugoira?
ugoira_service.generate_resizes(source_path, resized_file_path_for(Danbooru.config.large_image_width), resized_file_path_for(Danbooru.config.small_image_width))
elsif is_video? elsif is_video?
dimension_ratio = image_width.to_f / image_height generate_video_preview_for(width, height, output_path)
if dimension_ratio > 1
height = (width / dimension_ratio).to_i
else
width = (height * dimension_ratio).to_i
end
video.screenshot(output_path, {:seek_time => 0, :resolution => "#{width}x#{height}"})
FileUtils.chmod(0664, output_path)
end end
end end
end end
@@ -227,6 +243,9 @@ class Upload < ActiveRecord::Base
if is_video? if is_video?
self.image_width = video.width self.image_width = video.width
self.image_height = video.height self.image_height = video.height
elsif is_ugoira?
self.image_width = ugoira_service.width
self.image_height = ugoira_service.height
else else
File.open(file_path, "rb") do |file| File.open(file_path, "rb") do |file|
image_size = ImageSpec.new(file) image_size = ImageSpec.new(file)
@@ -238,13 +257,13 @@ class Upload < ActiveRecord::Base
# Does this file have image dimensions? # Does this file have image dimensions?
def has_dimensions? def has_dimensions?
%w(jpg gif png swf webm).include?(file_ext) %w(jpg gif png swf webm zip).include?(file_ext)
end end
end end
module ContentTypeMethods module ContentTypeMethods
def is_valid_content_type? def is_valid_content_type?
file_ext =~ /jpg|gif|png|swf|webm/ file_ext =~ /jpg|gif|png|swf|webm|zip/
end end
def content_type_to_file_ext(content_type) def content_type_to_file_ext(content_type)
@@ -264,6 +283,9 @@ class Upload < ActiveRecord::Base
when "video/webm" when "video/webm"
"webm" "webm"
when "application/zip"
"zip"
else else
"bin" "bin"
end end
@@ -286,6 +308,9 @@ class Upload < ActiveRecord::Base
when /^\x1a\x45\xdf\xa3/ when /^\x1a\x45\xdf\xa3/
"video/webm" "video/webm"
when /^PK\x03\x04/
"application/zip"
else else
"application/octet-stream" "application/octet-stream"
end end
@@ -321,23 +346,17 @@ class Upload < ActiveRecord::Base
source =~ /^https?:\/\// && file_path.blank? source =~ /^https?:\/\// && file_path.blank?
end end
def is_ugoira? def has_ugoira_tag?
tag_string =~ /\bugoira\b/i tag_string =~ /\bugoira\b/i
end end
# Downloads the file to destination_path # Downloads the file to destination_path
def download_from_source(destination_path) def download_from_source(destination_path)
self.file_path = destination_path self.file_path = destination_path
download = Downloads::File.new(source, destination_path, :is_ugoira => has_ugoira_tag?)
if is_ugoira? download.download!
converter = PixivUgoiraConverter.new(source, destination_path, :webm) self.source = download.source
converter.process! ugoira_service.load(download.data)
self.source = source
else
download = Downloads::File.new(source, destination_path)
download.download!
self.source = download.source
end
end end
end end

View File

@@ -0,0 +1,11 @@
class CreatePixivUgoiraFrameData < ActiveRecord::Migration
def change
create_table :pixiv_ugoira_frame_data do |t|
t.integer :post_id
t.text :data
t.timestamps
end
add_index :pixiv_ugoira_frame_data, :post_id, :unique => true
end
end

View File

@@ -2360,6 +2360,38 @@ CREATE SEQUENCE notes_id_seq
ALTER SEQUENCE notes_id_seq OWNED BY notes.id; ALTER SEQUENCE notes_id_seq OWNED BY notes.id;
--
-- Name: pixiv_ugoira_frame_data; Type: TABLE; Schema: public; Owner: -; Tablespace:
--
CREATE TABLE pixiv_ugoira_frame_data (
id integer NOT NULL,
post_id integer,
data text,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: pixiv_ugoira_frame_data_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE pixiv_ugoira_frame_data_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: pixiv_ugoira_frame_data_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE pixiv_ugoira_frame_data_id_seq OWNED BY pixiv_ugoira_frame_data.id;
-- --
-- Name: pool_versions; Type: TABLE; Schema: public; Owner: -; Tablespace: -- Name: pool_versions; Type: TABLE; Schema: public; Owner: -; Tablespace:
-- --
@@ -4056,6 +4088,13 @@ ALTER TABLE ONLY note_versions ALTER COLUMN id SET DEFAULT nextval('note_version
ALTER TABLE ONLY notes ALTER COLUMN id SET DEFAULT nextval('notes_id_seq'::regclass); ALTER TABLE ONLY notes ALTER COLUMN id SET DEFAULT nextval('notes_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY pixiv_ugoira_frame_data ALTER COLUMN id SET DEFAULT nextval('pixiv_ugoira_frame_data_id_seq'::regclass);
-- --
-- Name: id; Type: DEFAULT; Schema: public; Owner: - -- Name: id; Type: DEFAULT; Schema: public; Owner: -
-- --
@@ -4419,6 +4458,14 @@ ALTER TABLE ONLY notes
ADD CONSTRAINT notes_pkey PRIMARY KEY (id); ADD CONSTRAINT notes_pkey PRIMARY KEY (id);
--
-- Name: pixiv_ugoira_frame_data_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
--
ALTER TABLE ONLY pixiv_ugoira_frame_data
ADD CONSTRAINT pixiv_ugoira_frame_data_pkey PRIMARY KEY (id);
-- --
-- Name: pool_versions_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace: -- Name: pool_versions_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
-- --
@@ -6351,6 +6398,13 @@ CREATE INDEX index_notes_on_creator_id_and_post_id ON notes USING btree (creator
CREATE INDEX index_notes_on_post_id ON notes USING btree (post_id); CREATE INDEX index_notes_on_post_id ON notes USING btree (post_id);
--
-- Name: index_pixiv_ugoira_frame_data_on_post_id; Type: INDEX; Schema: public; Owner: -; Tablespace:
--
CREATE UNIQUE INDEX index_pixiv_ugoira_frame_data_on_post_id ON pixiv_ugoira_frame_data USING btree (post_id);
-- --
-- Name: index_pool_versions_on_pool_id; Type: INDEX; Schema: public; Owner: -; Tablespace: -- Name: index_pool_versions_on_pool_id; Type: INDEX; Schema: public; Owner: -; Tablespace:
-- --
@@ -7055,3 +7109,5 @@ INSERT INTO schema_migrations (version) VALUES ('20140722225753');
INSERT INTO schema_migrations (version) VALUES ('20140725003232'); INSERT INTO schema_migrations (version) VALUES ('20140725003232');
INSERT INTO schema_migrations (version) VALUES ('20141009231234');

BIN
test/fixtures/ugoira.zip vendored Normal file

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@@ -20,9 +20,8 @@ module Downloads
should "retry three times" do should "retry three times" do
assert_raises(Errno::ETIMEDOUT) do assert_raises(Errno::ETIMEDOUT) do
@download.http_get_streaming {} @download.http_get_streaming(@source) {}
end end
assert_equal(3, @download.tries)
end end
end end

View File

@@ -27,7 +27,27 @@ module Downloads
assert_rewritten(source, source) assert_rewritten(source, source)
end end
context "An ugoira site for pixiv" do
setup do
@tempfile = Tempfile.new("danbooru-test")
@download = Downloads::File.new("http://www.pixiv.net/member_illust.php?mode=medium&illust_id=46378654", @tempfile.path)
VCR.use_cassette("ugoira-converter", :record => :new_episodes) do
@download.download!
end
end
teardown do
@tempfile.unlink
end
should "download the zip file and update the source" do
assert_equal("http://i1.pixiv.net/img-zip-ugoira/img/2014/10/05/23/42/23/46378654_ugoira1920x1080.zip", @download.source)
end
should "capture the frame data" do
assert_equal([{"file"=>"000000.jpg", "delay"=>200}, {"file"=>"000001.jpg", "delay"=>200}, {"file"=>"000002.jpg", "delay"=>200}, {"file"=>"000003.jpg", "delay"=>200}, {"file"=>"000004.jpg", "delay"=>250}], @download.data[:ugoira_frame_data])
end
end
# Test an old illustration (one uploaded before 2014-09-16). New # Test an old illustration (one uploaded before 2014-09-16). New
# /img-original/ and /img-master/ URLs currently don't work for images # /img-original/ and /img-master/ URLs currently don't work for images
@@ -80,6 +100,7 @@ module Downloads
assert_downloaded(42, @new_medium_thumbnail, "download-pixiv-old-png-new-medium-thumbnail") assert_downloaded(42, @new_medium_thumbnail, "download-pixiv-old-png-new-medium-thumbnail")
assert_downloaded(@file_size, @new_full_size_image, "download-pixiv-old-png-new-full-size") assert_downloaded(@file_size, @new_full_size_image, "download-pixiv-old-png-new-full-size")
end end
end end

View File

@@ -3,36 +3,28 @@ require "test_helper"
class PixivUgoiraConverterTest < ActiveSupport::TestCase class PixivUgoiraConverterTest < ActiveSupport::TestCase
context "An ugoira converter" do context "An ugoira converter" do
setup do setup do
@url = "http://www.pixiv.net/member_illust.php?mode=medium&illust_id=46378654" @zipped_body = "#{Rails.root}/test/fixtures/ugoira.zip"
@write_file = Tempfile.new("output") @write_file = Tempfile.new("converted")
@preview_write_file = Tempfile.new("preview")
@frame_data = [
{"file" => "000000.jpg", "delay" => 200},
{"file" => "000001.jpg", "delay" => 200},
{"file" => "000002.jpg", "delay" => 200},
{"file" => "000003.jpg", "delay" => 200},
{"file" => "000004.jpg", "delay" => 250}
]
end end
teardown do teardown do
@write_file.unlink @write_file.unlink
end @preview_write_file.unlink
should "output to gif" do
@converter = PixivUgoiraConverter.new(@url, @write_file.path, :gif)
VCR.use_cassette("ugoira-converter", :record => :new_episodes) do
@converter.process!
end
assert_operator(File.size(@converter.write_path), :>, 1_000)
end end
should "output to webm" do should "output to webm" do
@converter = PixivUgoiraConverter.new(@url, @write_file.path, :webm) @converter = PixivUgoiraConverter.new
VCR.use_cassette("ugoira-converter", :record => :new_episodes) do @converter.convert(@zipped_body, @write_file.path, @preview_write_file.path, @frame_data)
@converter.process! assert_operator(File.size(@write_file.path), :>, 1_000)
end assert_operator(File.size(@preview_write_file.path), :>, 0)
assert_operator(File.size(@converter.write_path), :>, 1_000)
end end
# should "output to apng" do
# @converter = PixivUgoiraConverter.new(@url, @write_file.path, :apng)
# VCR.use_cassette("ugoira-converter", :record => :new_episodes) do
# @converter.process!
# end
# assert_operator(File.size(@converter.write_path), :>, 1_000)
# end
end end
end end

View File

@@ -12,6 +12,28 @@ module Sources
end end
end end
context "An ugoira source site for pixiv" do
setup do
VCR.use_cassette("ugoira-converter") do
@site = Sources::Site.new("http://www.pixiv.net/member_illust.php?mode=medium&illust_id=46378654")
@site.get
end
end
should "get the file url" do
assert_equal("http://i1.pixiv.net/img-zip-ugoira/img/2014/10/05/23/42/23/46378654_ugoira1920x1080.zip", @site.file_url)
end
should "capture the frame data" do
assert_equal([{"file"=>"000000.jpg", "delay"=>200}, {"file"=>"000001.jpg", "delay"=>200}, {"file"=>"000002.jpg", "delay"=>200}, {"file"=>"000003.jpg", "delay"=>200}, {"file"=>"000004.jpg", "delay"=>250}], @site.ugoira_frame_data)
end
should "capture the image dimensions" do
assert_equal(60, @site.ugoira_width)
assert_equal(60, @site.ugoira_height)
end
end
context "fetching source data for a new manga image" do context "fetching source data for a new manga image" do
setup do setup do
get_source("http://www.pixiv.net/member_illust.php?mode=medium&illust_id=46324488", "source-pixiv-new-manga") get_source("http://www.pixiv.net/member_illust.php?mode=medium&illust_id=46324488", "source-pixiv-new-manga")

View File

@@ -105,14 +105,20 @@ class UploadTest < ActiveSupport::TestCase
setup do setup do
@url = "http://www.pixiv.net/member_illust.php?mode=medium&illust_id=46378654" @url = "http://www.pixiv.net/member_illust.php?mode=medium&illust_id=46378654"
@upload = FactoryGirl.create(:source_upload, :source => @url, :tag_string => "ugoira") @upload = FactoryGirl.create(:source_upload, :source => @url, :tag_string => "ugoira")
@output_path = "#{Rails.root}/tmp/test.download.webm" @output_file = Tempfile.new("download")
end
teardown do
@output_file.unlink
end end
should "process successfully" do should "process successfully" do
VCR.use_cassette("ugoira-converter", :record => :new_episodes) do VCR.use_cassette("ugoira-converter", :record => :new_episodes) do
@upload.download_from_source(@output_path) @upload.download_from_source(@output_file.path)
end end
assert_operator(File.size(@output_path), :>, 1_000) assert_operator(File.size(@output_file.path), :>, 1_000)
assert_equal("application/zip", @upload.file_header_to_content_type(@output_file.path))
assert_equal("zip", @upload.content_type_to_file_ext(@upload.file_header_to_content_type(@output_file.path)))
end end
end end
@@ -229,6 +235,27 @@ class UploadTest < ActiveSupport::TestCase
end end
end end
should "process completely for an ugoira" do
@upload = FactoryGirl.create(:source_upload,
:source => "http://www.pixiv.net/member_illust.php?mode=medium&illust_id=46378654",
:rating => "s",
:uploader_ip_addr => "127.0.0.1",
:tag_string => "hoge foo"
)
VCR.use_cassette("ugoira-converter", :record => :new_episodes) do
assert_difference(["Post.count", "PixivUgoiraFrameData.count"]) do
@upload.process!
end
post = Post.last
assert_not_nil(post.pixiv_ugoira_frame_data)
assert_equal("0d94800c4b520bf3d8adda08f95d31e2", post.md5)
assert_equal(60, post.image_width)
assert_equal(60, post.image_height)
assert_operator(File.size(post.large_file_path), :>, 0)
assert_operator(File.size(post.preview_file_path), :>, 0)
end
end
should "process completely for an uploaded image" do should "process completely for an uploaded image" do
@upload = FactoryGirl.create(:jpg_upload, @upload = FactoryGirl.create(:jpg_upload,
:rating => "s", :rating => "s",

View File

@@ -0,0 +1,521 @@
// Source: https://github.com/pixiv/zip_player
// Required for iOS <6, where Blob URLs are not available. This is slow...
// Source: https://gist.github.com/jonleighton/958841
function base64ArrayBuffer(arrayBuffer, off, byteLength) {
var base64 = '';
var encodings = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
var bytes = new Uint8Array(arrayBuffer);
var byteRemainder = byteLength % 3;
var mainLength = off + byteLength - byteRemainder;
var a, b, c, d;
var chunk;
// Main loop deals with bytes in chunks of 3
for (var i = off; i < mainLength; i = i + 3) {
// Combine the three bytes into a single integer
chunk = (bytes[i] << 16) | (bytes[i + 1] << 8) | bytes[i + 2];
// Use bitmasks to extract 6-bit segments from the triplet
a = (chunk & 16515072) >> 18; // 16515072 = (2^6 - 1) << 18
b = (chunk & 258048) >> 12; // 258048 = (2^6 - 1) << 12
c = (chunk & 4032) >> 6; // 4032 = (2^6 - 1) << 6
d = chunk & 63; // 63 = 2^6 - 1
// Convert the raw binary segments to the appropriate ASCII encoding
base64 += encodings[a] + encodings[b] + encodings[c] + encodings[d];
}
// Deal with the remaining bytes and padding
if (byteRemainder == 1) {
chunk = bytes[mainLength];
a = (chunk & 252) >> 2; // 252 = (2^6 - 1) << 2
// Set the 4 least significant bits to zero
b = (chunk & 3) << 4; // 3 = 2^2 - 1
base64 += encodings[a] + encodings[b] + '==';
} else if (byteRemainder == 2) {
chunk = (bytes[mainLength] << 8) | bytes[mainLength + 1];
a = (chunk & 64512) >> 10; // 64512 = (2^6 - 1) << 10
b = (chunk & 1008) >> 4; // 1008 = (2^6 - 1) << 4
// Set the 2 least significant bits to zero
c = (chunk & 15) << 2; // 15 = 2^4 - 1
base64 += encodings[a] + encodings[b] + encodings[c] + '=';
}
return base64;
}
function ZipImagePlayer(options) {
this.op = options;
this._URL = (window.URL || window.webkitURL || window.MozURL
|| window.MSURL);
this._Blob = (window.Blob || window.WebKitBlob || window.MozBlob
|| window.MSBlob);
this._BlobBuilder = (window.BlobBuilder || window.WebKitBlobBuilder
|| window.MozBlobBuilder || window.MSBlobBuilder);
this._Uint8Array = (window.Uint8Array || window.WebKitUint8Array
|| window.MozUint8Array || window.MSUint8Array);
this._DataView = (window.DataView || window.WebKitDataView
|| window.MozDataView || window.MSDataView);
this._ArrayBuffer = (window.ArrayBuffer || window.WebKitArrayBuffer
|| window.MozArrayBuffer || window.MSArrayBuffer);
this._maxLoadAhead = 0;
if (!this._URL) {
this._debugLog("No URL support! Will use slower data: URLs.");
// Throttle loading to avoid making playback stalling completely while
// loading images...
this._maxLoadAhead = 10;
}
if (!this._Blob) {
this._error("No Blob support");
}
if (!this._Uint8Array) {
this._error("No Uint8Array support");
}
if (!this._DataView) {
this._error("No DataView support");
}
if (!this._ArrayBuffer) {
this._error("No ArrayBuffer support");
}
this._isSafari = Object.prototype.toString.call(
window.HTMLElement).indexOf('Constructor') > 0;
this._loadingState = 0;
this._dead = false;
this._context = options.canvas.getContext("2d");
this._files = {};
this._frameCount = this.op.metadata.frames.length;
this._debugLog("Frame count: " + this._frameCount);
this._frame = 0;
this._loadFrame = 0;
this._frameImages = [];
this._paused = false;
this._loadTimer = null;
this._startLoad();
if (this.op.autoStart) {
this.play();
} else {
this._paused = true;
}
}
ZipImagePlayer.prototype = {
_trailerBytes: 30000,
_failed: false,
_mkerr: function(msg) {
var _this = this;
return function() {
_this._error(msg);
}
},
_error: function(msg) {
this._failed = true;
throw Error("ZipImagePlayer error: " + msg);
},
_debugLog: function(msg) {
if (this.op.debug) {
console.log(msg);
}
},
_load: function(offset, length, callback) {
var _this = this;
// Unfortunately JQuery doesn't support ArrayBuffer XHR
var xhr = new XMLHttpRequest();
xhr.addEventListener("load", function(ev) {
if (_this._dead) {
return;
}
_this._debugLog("Load: " + offset + " " + length + " status=" +
xhr.status);
if (xhr.status == 200) {
_this._debugLog("Range disabled or unsupported, complete load");
offset = 0;
length = xhr.response.byteLength;
_this._len = length;
_this._buf = xhr.response;
_this._bytes = new _this._Uint8Array(_this._buf);
} else {
if (xhr.status != 206) {
_this._error("Unexpected HTTP status " + xhr.status);
}
if (xhr.response.byteLength != length) {
_this._error("Unexpected length " +
xhr.response.byteLength +
" (expected " + length + ")");
}
_this._bytes.set(new _this._Uint8Array(xhr.response), offset);
}
if (callback) {
callback.apply(_this, [offset, length]);
}
}, false);
xhr.addEventListener("error", this._mkerr("Fetch failed"), false);
xhr.open("GET", this.op.source);
xhr.responseType = "arraybuffer";
if (offset != null && length != null) {
var end = offset + length;
xhr.setRequestHeader("Range", "bytes=" + offset + "-" + (end - 1));
if (this._isSafari) {
// Range request caching is broken in Safari
// https://bugs.webkit.org/show_bug.cgi?id=82672
xhr.setRequestHeader("Cache-control", "no-cache");
xhr.setRequestHeader("If-None-Match", Math.random().toString());
}
}
/*this._debugLog("Load: " + offset + " " + length);*/
xhr.send();
},
_startLoad: function() {
var _this = this;
if (!this.op.source) {
// Unpacked mode (individiual frame URLs) - just load the frames.
this._loadNextFrame();
return;
}
$.ajax({
url: this.op.source,
type: "HEAD"
}).done(function(data, status, xhr) {
if (_this._dead) {
return;
}
_this._pHead = 0;
_this._pNextHead = 0;
_this._pFetch = 0;
var len = parseInt(xhr.getResponseHeader("Content-Length"));
if (!len) {
_this._debugLog("HEAD request failed: invalid file length.");
_this._debugLog("Falling back to full file mode.");
_this._load(null, null, function(off, len) {
_this._pTail = 0;
_this._pHead = len;
_this._findCentralDirectory();
});
return;
}
_this._debugLog("Len: " + len);
_this._len = len;
_this._buf = new _this._ArrayBuffer(len);
_this._bytes = new _this._Uint8Array(_this._buf);
var off = len - _this._trailerBytes;
if (off < 0) {
off = 0;
}
_this._pTail = len;
_this._load(off, len - off, function(off, len) {
_this._pTail = off;
_this._findCentralDirectory();
});
}).fail(this._mkerr("Length fetch failed"));
},
_findCentralDirectory: function() {
// No support for ZIP file comment
var dv = new this._DataView(this._buf, this._len - 22, 22);
if (dv.getUint32(0, true) != 0x06054b50) {
this._error("End of Central Directory signature not found");
}
var cd_count = dv.getUint16(10, true);
var cd_size = dv.getUint32(12, true);
var cd_off = dv.getUint32(16, true);
if (cd_off < this._pTail) {
this._load(cd_off, this._pTail - cd_off, function() {
this._pTail = cd_off;
this._readCentralDirectory(cd_off, cd_size, cd_count);
});
} else {
this._readCentralDirectory(cd_off, cd_size, cd_count);
}
},
_readCentralDirectory: function(offset, size, count) {
var dv = new this._DataView(this._buf, offset, size);
var p = 0;
for (var i = 0; i < count; i++ ) {
if (dv.getUint32(p, true) != 0x02014b50) {
this._error("Invalid Central Directory signature");
}
var compMethod = dv.getUint16(p + 10, true);
var uncompSize = dv.getUint32(p + 24, true);
var nameLen = dv.getUint16(p + 28, true);
var extraLen = dv.getUint16(p + 30, true);
var cmtLen = dv.getUint16(p + 32, true);
var off = dv.getUint32(p + 42, true);
if (compMethod != 0) {
this._error("Unsupported compression method");
}
p += 46;
var nameView = new this._Uint8Array(this._buf, offset + p, nameLen);
var name = "";
for (var j = 0; j < nameLen; j++) {
name += String.fromCharCode(nameView[j]);
}
p += nameLen + extraLen + cmtLen;
/*this._debugLog("File: " + name + " (" + uncompSize +
" bytes @ " + off + ")");*/
this._files[name] = {off: off, len: uncompSize};
}
// Two outstanding fetches at any given time.
// Note: the implementation does not support more than two.
if (this._pHead >= this._pTail) {
this._pHead = this._len;
$(this).triggerHandler("loadProgress", [this._pHead / this._len]);
this._loadNextFrame();
} else {
this._loadNextChunk();
this._loadNextChunk();
}
},
_loadNextChunk: function() {
if (this._pFetch >= this._pTail) {
return;
}
var off = this._pFetch;
var len = this.op.chunkSize;
if (this._pFetch + len > this._pTail) {
len = this._pTail - this._pFetch;
}
this._pFetch += len;
this._load(off, len, function() {
if (off == this._pHead) {
if (this._pNextHead) {
this._pHead = this._pNextHead;
this._pNextHead = 0;
} else {
this._pHead = off + len;
}
if (this._pHead >= this._pTail) {
this._pHead = this._len;
}
/*this._debugLog("New pHead: " + this._pHead);*/
$(this).triggerHandler("loadProgress",
[this._pHead / this._len]);
if (!this._loadTimer) {
this._loadNextFrame();
}
} else {
this._pNextHead = off + len;
}
this._loadNextChunk();
});
},
_fileDataStart: function(offset) {
var dv = new DataView(this._buf, offset, 30);
var nameLen = dv.getUint16(26, true);
var extraLen = dv.getUint16(28, true);
return offset + 30 + nameLen + extraLen;
},
_isFileAvailable: function(name) {
var info = this._files[name];
if (!info) {
this._error("File " + name + " not found in ZIP");
}
if (this._pHead < (info.off + 30)) {
return false;
}
return this._pHead >= (this._fileDataStart(info.off) + info.len);
},
_loadNextFrame: function() {
if (this._dead) {
return;
}
var frame = this._loadFrame;
if (frame >= this._frameCount) {
return;
}
var meta = this.op.metadata.frames[frame];
if (!this.op.source) {
// Unpacked mode (individiual frame URLs)
this._loadFrame += 1;
this._loadImage(frame, meta.file, false);
return;
}
if (!this._isFileAvailable(meta.file)) {
return;
}
this._loadFrame += 1;
var off = this._fileDataStart(this._files[meta.file].off);
var end = off + this._files[meta.file].len;
var url;
var mime_type = this.op.metadata.mime_type || "image/png";
if (this._URL) {
var slice;
if (!this._buf.slice) {
slice = new this._ArrayBuffer(this._files[meta.file].len);
var view = new this._Uint8Array(slice);
view.set(this._bytes.subarray(off, end));
} else {
slice = this._buf.slice(off, end);
}
var blob;
try {
blob = new this._Blob([slice], {type: mime_type});
}
catch (err) {
this._debugLog("Blob constructor failed. Trying BlobBuilder..."
+ " (" + err.message + ")");
var bb = new this._BlobBuilder();
bb.append(slice);
blob = bb.getBlob();
}
/*_this._debugLog("Loading " + meta.file + " to frame " + frame);*/
url = this._URL.createObjectURL(blob);
this._loadImage(frame, url, true);
} else {
url = ("data:" + mime_type + ";base64,"
+ base64ArrayBuffer(this._buf, off, end - off));
this._loadImage(frame, url, false);
}
},
_loadImage: function(frame, url, isBlob) {
var _this = this;
var image = new Image();
var meta = this.op.metadata.frames[frame];
image.addEventListener('load', function() {
_this._debugLog("Loaded " + meta.file + " to frame " + frame);
if (isBlob) {
_this._URL.revokeObjectURL(url);
}
if (_this._dead) {
return;
}
_this._frameImages[frame] = image;
$(_this).triggerHandler("frameLoaded", frame);
if (_this._loadingState == 0) {
_this._displayFrame.apply(_this);
}
if (frame >= (_this._frameCount - 1)) {
_this._setLoadingState(2);
_this._buf = null;
_this._bytes = null;
} else {
if (!_this._maxLoadAhead ||
(frame - _this._frame) < _this._maxLoadAhead) {
_this._loadNextFrame();
} else if (!_this._loadTimer) {
_this._loadTimer = setTimeout(function() {
_this._loadTimer = null;
_this._loadNextFrame();
}, 200);
}
}
});
image.src = url;
},
_setLoadingState: function(state) {
if (this._loadingState != state) {
this._loadingState = state;
$(this).triggerHandler("loadingStateChanged", [state]);
}
},
_displayFrame: function() {
if (this._dead) {
return;
}
var _this = this;
var meta = this.op.metadata.frames[this._frame];
this._debugLog("Displaying frame: " + this._frame + " " + meta.file);
var image = this._frameImages[this._frame];
if (!image) {
this._debugLog("Image not available!");
this._setLoadingState(0);
return;
}
if (this._loadingState != 2) {
this._setLoadingState(1);
}
if (this.op.autosize) {
if (this._context.canvas.width != image.width || this._context.canvas.height != image.height) {
// make the canvas autosize itself according to the images drawn on it
// should set it once, since we don't have variable sized frames
this._context.canvas.width = image.width;
this._context.canvas.height = image.height;
}
};
this._context.clearRect(0, 0, this.op.canvas.width,
this.op.canvas.height);
this._context.drawImage(image, 0, 0);
$(this).triggerHandler("frame", this._frame);
if (!this._paused) {
this._timer = setTimeout(function() {
_this._timer = null;
_this._nextFrame.apply(_this);
}, meta.delay);
}
},
_nextFrame: function(frame) {
if (this._frame >= (this._frameCount - 1)) {
if (this.op.loop) {
this._frame = 0;
} else {
this.pause();
return;
}
} else {
this._frame += 1;
}
this._displayFrame();
},
play: function() {
if (this._dead) {
return;
}
if (this._paused) {
$(this).triggerHandler("play", [this._frame]);
this._paused = false;
this._displayFrame();
}
},
pause: function() {
if (this._dead) {
return;
}
if (!this._paused) {
if (this._timer) {
clearTimeout(this._timer);
}
this._paused = true;
$(this).triggerHandler("pause", [this._frame]);
}
},
rewind: function() {
if (this._dead) {
return;
}
this._frame = 0;
if (this._timer) {
clearTimeout(this._timer);
}
this._displayFrame();
},
stop: function() {
this._debugLog("Stopped!");
this._dead = true;
if (this._timer) {
clearTimeout(this._timer);
}
if (this._loadTimer) {
clearTimeout(this._loadTimer);
}
this._frameImages = null;
this._buf = null;
this._bytes = null;
$(this).triggerHandler("stop");
},
getCurrentFrame: function() {
return this._frame;
},
getLoadedFrames: function() {
return this._frameImages.length;
},
getFrameCount: function() {
return this._frameCount;
},
hasError: function() {
return this._failed;
}
}