reports: add ability to group reports by column.
Add ability to group reports by various columns. For example, you can see the posts by the top 10 uploaders over time, or posts grouped by rating over time.
This commit is contained in:
@@ -3,60 +3,88 @@
|
||||
module Aggregatable
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
def timeseries(period: "day", date_column: :created_at, from: first[date_column], to: Time.now.utc, columns: { count: "COUNT(*)" })
|
||||
def timeseries(period: "day", date_column: :created_at, from: first[date_column], to: Time.now.utc, groups: [], group_limit: 10, columns: { count: "COUNT(*)" })
|
||||
raise ArgumentError, "invalid period: #{period}" if !period.in?(%w[second minute hour day week month quarter year])
|
||||
raise ArgumentError if all.group_values.present?
|
||||
|
||||
from = from.to_date
|
||||
to = to.to_date
|
||||
|
||||
# SELECT
|
||||
# date_trunc('day', posts.created_at) AS date
|
||||
# COUNT(*) AS count
|
||||
# FROM posts
|
||||
# WHERE posts.created_at BETWEEN from AND to
|
||||
# GROUP BY date
|
||||
group_associations = groups.map { |name| reflections[name.to_s] }.compact_blank
|
||||
group_fields = groups.map { |name| reflections[name.to_s]&.foreign_key || name }
|
||||
|
||||
# SELECT date_trunc('day', posts.created_at) AS date FROM posts WHERE created_at BETWEEN from AND to GROUP BY date
|
||||
subquery = select(date_trunc(period, date_column).as("date")).where(date_column => (from..to)).group("date").reorder(nil)
|
||||
|
||||
group_fields.each do |name|
|
||||
# SELECT date_trunc('day', posts.created_at) AS date, uploader_id FROM posts WHERE created_at BETWEEN from AND to GROUP BY date, uploader_id
|
||||
subquery = subquery.select(name).group(name)
|
||||
end
|
||||
|
||||
columns.each do |name, sql|
|
||||
# SELECT COUNT(*) AS count
|
||||
# SELECT date_trunc('day', posts.created_at) AS date, uploader_id, COUNT(*) AS count FROM posts WHERE created_at BETWEEN from AND to GROUP BY date, uploader_id
|
||||
subquery = subquery.select(Arel.sql(sql).as(name.to_s).to_sql)
|
||||
end
|
||||
|
||||
# SELECT date_trunc('day', dates) AS date FROM generate_series('2022-01-01', '2022-02-15', '1 day'::interval) AS dates
|
||||
# SELECT date_trunc('day', dates) AS date FROM generate_series(from, to, '1 day'::interval) AS dates
|
||||
dates = "SELECT #{date_trunc(period, Arel.sql("dates")).to_sql} AS date FROM #{generate_timeseries(from, to, period).to_sql} AS dates"
|
||||
|
||||
# SELECT
|
||||
# date_trunc('day', dates.date) AS date,
|
||||
# COALESCE(subquery.count, 0) AS count
|
||||
# FROM (
|
||||
# SELECT date_trunc('day', dates) AS date
|
||||
# FROM generate_series(from, to, '1 day'::interval) AS dates
|
||||
# ) AS dates
|
||||
# LEFT OUTER JOIN (
|
||||
# SELECT
|
||||
# date_trunc('day', posts.created_at) AS date,
|
||||
# COUNT(*) AS count
|
||||
# FROM posts
|
||||
# WHERE posts.created_at BETWEEN from AND to
|
||||
# GROUP BY date
|
||||
# ) AS subquery
|
||||
# ORDER BY date DESC
|
||||
query =
|
||||
unscoped.
|
||||
select(date_trunc(period, Arel.sql("dates.date")).as("date")).
|
||||
from("(#{dates}) AS dates").
|
||||
joins("LEFT OUTER JOIN (#{subquery.to_sql}) AS subquery ON subquery.date = dates.date").
|
||||
order("date DESC")
|
||||
# SELECT dates.date FROM (SELECT date_trunc('day', dates) AS date FROM generate_series(from, to, '1 day'::interval) AS dates) AS dates
|
||||
query = unscoped.select("dates.date").from("(#{dates}) AS dates")
|
||||
|
||||
group_fields.each do |field|
|
||||
# CROSS JOIN (SELECT uploader_id FROM posts WHERE created_at BETWEEN from AND to AND uploader_id IS NOT NULL GROUP BY uploader_id ORDER BY COUNT(*) DESC LIMIT 10) AS uploader_ids.uploader_id
|
||||
join = select(field).where(date_column => (from..to)).where.not(field => nil).group(field).reorder(Arel.sql("COUNT(*) DESC")).limit(group_limit)
|
||||
|
||||
# SELECT dates.date, uploader_ids.uploader_id
|
||||
# FROM (SELECT date_trunc('day', dates) AS date FROM generate_series('2022-01-01', '2022-02-15', '1 day'::interval) AS dates) AS dates
|
||||
# CROSS JOIN (SELECT uploader_id FROM posts WHERE created_at BETWEEN from AND to GROUP BY uploader_ids ORDER BY COUNT(*) DESC LIMIT 10) AS uploader_ids.uploader_id
|
||||
query = query.select("#{connection.quote_table_name(field.to_s.pluralize)}.#{connection.quote_column_name(field)}")
|
||||
query = query.joins("CROSS JOIN (#{join.to_sql}) AS #{connection.quote_column_name(field.to_s.pluralize)}")
|
||||
end
|
||||
|
||||
# on_clause = "subquery.date = dates.date AND subquery.uploader_id = uploader_ids.uploader_id"
|
||||
on_clause = ["date", *group_fields].map { |group| "subquery.#{connection.quote_column_name(group)} = #{connection.quote_table_name(group.to_s.pluralize)}.#{connection.quote_column_name(group)}" }.join(" AND ")
|
||||
query = query.joins("LEFT OUTER JOIN (#{subquery.to_sql}) AS subquery ON #{on_clause}")
|
||||
query = query.reorder("date DESC")
|
||||
|
||||
columns.each do |name, sql|
|
||||
# SELECT COALESCE(subquery.count, 0) AS count
|
||||
# SELECT dates.date, uploader_ids.uploader_id, COALESCE(subquery.count, 0) AS count FROM ...
|
||||
query = query.select(coalesce(Arel.sql("subquery.#{connection.quote_column_name(name)}"), 0).as(name.to_s))
|
||||
end
|
||||
|
||||
query.select_all
|
||||
# query =
|
||||
# SELECT
|
||||
# dates.date,
|
||||
# uploader_ids.uploader_id,
|
||||
# COALESCE(subquery.count, 0) AS count
|
||||
# FROM (
|
||||
# SELECT date_trunc('day', dates) AS date FROM generate_series(from, to, '1 day'::interval) AS dates
|
||||
# ) AS dates
|
||||
# CROSS JOIN (
|
||||
# SELECT uploader_id FROM posts WHERE created_at BETWEEN from AND to AND uploader_id IS NOT NULL GROUP BY uploader_id ORDER BY COUNT(*) DESC LIMIT 10
|
||||
# ) AS uploader_ids.uploader_id
|
||||
# LEFT OUTER JOIN (
|
||||
# SELECT
|
||||
# date_trunc('day', posts.created_at) AS date,
|
||||
# uploader_id,
|
||||
# COUNT(*) AS count
|
||||
# FROM posts
|
||||
# WHERE created_at BETWEEN from AND to
|
||||
# GROUP BY date, uploader_id
|
||||
# ) subquery ON subquery.date = dates.date AND subquery.uploader_id = uploader_ids.uploader_id
|
||||
# ORDER BY date DESC
|
||||
|
||||
results = query.select_all
|
||||
types = results.columns.map { |column| [column, :object] }.to_h
|
||||
|
||||
dataframe = Danbooru::DataFrame.new(results.to_a, types: types)
|
||||
dataframe = dataframe.preload_associations(group_associations)
|
||||
dataframe
|
||||
end
|
||||
|
||||
def group_by_period(period = "day", column = :created_at)
|
||||
select(date_trunc(period, column).as("date")).group("date").order(Arel.sql("date DESC"))
|
||||
select(date_trunc(period, column).as("date")).group("date").reorder(Arel.sql("date DESC"))
|
||||
end
|
||||
|
||||
def select_all
|
||||
|
||||
47
app/logical/danbooru/data_frame.rb
Normal file
47
app/logical/danbooru/data_frame.rb
Normal file
@@ -0,0 +1,47 @@
|
||||
# A wrapper around Rover::DataFrame that adds some extra utility methods.
|
||||
#
|
||||
# @see https://github.com/ankane/rover
|
||||
module Danbooru
|
||||
class DataFrame
|
||||
attr_reader :df
|
||||
delegate :head, :shape, :types, :rename, :each_row, :[], :[]=, to: :df
|
||||
|
||||
def initialize(...)
|
||||
@df = Rover::DataFrame.new(...)
|
||||
end
|
||||
|
||||
# Replace ID columns with the actual object. For example, replace the `user_id` column with a `user` column containing User objects.
|
||||
def preload_associations(associations)
|
||||
associations.reduce(dup) do |table, association|
|
||||
primary_key = association.association_primary_key
|
||||
foreign_key = association.foreign_key
|
||||
name = association.name.to_s
|
||||
|
||||
ids = table[foreign_key].to_a.uniq.compact_blank
|
||||
records = association.klass.where(primary_key => ids).index_by(&primary_key.to_sym)
|
||||
|
||||
table.rename({ foreign_key => name })
|
||||
table[name] = table[name].map { |id| records[id] }
|
||||
table
|
||||
end
|
||||
end
|
||||
|
||||
def crosstab(index, pivot)
|
||||
new_df = DataFrame.new(index => df[index].uniq)
|
||||
|
||||
df[pivot].uniq.to_a.each do |value|
|
||||
columns = df.types.keys.without(index, pivot)
|
||||
columns.each do |column|
|
||||
name = columns.one? ? value.to_s : "#{value}_#{column}"
|
||||
new_df[name] = df[df[pivot] == value][column]
|
||||
end
|
||||
end
|
||||
|
||||
new_df
|
||||
end
|
||||
|
||||
def as_json(*options)
|
||||
df.to_a
|
||||
end
|
||||
end
|
||||
end
|
||||
Reference in New Issue
Block a user