APIv2: Comparisons for breakdowns, timeseries, time_on_page (#4647)

* Refactor comparisons to a new options format

Prerequisite for APIv2 comparison work

* Experiment with default include deduplication

* WIP

Oops, breaks `include.total_rows`

* WIP

* Refactor breakdown.ex

* Pagination fix: dont paginate split subqueries

* Timeseries tests pass

* Aggregate tests use QueryExecutor

* Simplify QueryExecutor

* Handle legacy time-on-page metric in query_executor.ex

No behavioral changes

* Remove keep_requested_metrics

* Clean up imports

* Refactor aggregate.ex to be more straight-forward in output format building

* top stats: compute comparison via apiv2

* Minor cleanups

* WIP: Pipelines

* WIP: refactor for code cleanliness

* QueryExecutor to QueryRunner

* Make compilable

* Comparisons for timeseries works

Except for comparisons where comparison window is bigger than source query window

* Add special case for timeseries

* JSON schema tests for comparisons

* Test comparisons with the new API

* comparison date range parsing improvement

* Make comparisons api internal-only

* typegen

* credo

* Different schemata

* get_comparison_query

* Add comment on timeseries result format

* comparisons typegen

* Percent change for revenue metrics fix

* Use defstruct for query_runner over map

* Remove preloading atoms
This commit is contained in:
Karl-Aksel Puulmann 2024-10-08 13:13:04 +03:00 committed by GitHub
parent e7332d95b2
commit 5ad743c8d3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 1056 additions and 601 deletions

View File

@ -143,6 +143,28 @@ export interface QueryApiSchema {
* If set, returns the total number of result rows rows before pagination under `meta.total_rows`
*/
total_rows?: boolean;
comparisons?:
| {
mode: "previous_period" | "year_over_year";
/**
* If set and using time:day dimensions, day-of-week of comparison query is matched
*/
match_day_of_week?: boolean;
}
| {
mode: "custom";
/**
* If set and using time:day dimensions, day-of-week of comparison query is matched
*/
match_day_of_week?: boolean;
/**
* If custom period. A list of two ISO8601 dates or timestamps to compare against.
*
* @minItems 2
* @maxItems 2
*/
date_range: [string, string];
};
};
pagination?: {
/**

View File

@ -68,13 +68,17 @@ defmodule Plausible.Stats.Goal.Revenue do
def cast_revenue_metrics_to_money(results, currency) when is_map(results) do
for {metric, value} <- results, into: %{} do
if metric in @revenue_metrics && currency do
{metric, Money.new!(value || 0, currency)}
else
{metric, value}
end
{metric, maybe_cast_metric_to_money(value, metric, currency)}
end
end
def cast_revenue_metrics_to_money(results, _), do: results
def maybe_cast_metric_to_money(value, metric, currency) do
if currency && metric in @revenue_metrics do
Money.new!(value || 0, currency)
else
value
end
end
end

View File

@ -1,6 +1,5 @@
defmodule Plausible.Stats do
use Plausible
alias Plausible.Stats.QueryResult
use Plausible.ClickhouseRepo
alias Plausible.Stats.{
@ -9,8 +8,7 @@ defmodule Plausible.Stats do
Timeseries,
CurrentVisitors,
FilterSuggestions,
QueryOptimizer,
SQL
QueryRunner
}
use Plausible.DebugReplayInfo
@ -18,12 +16,7 @@ defmodule Plausible.Stats do
def query(site, query) do
include_sentry_replay_info()
optimized_query = QueryOptimizer.optimize(query)
optimized_query
|> SQL.QueryBuilder.build(site)
|> ClickhouseRepo.all(query: query)
|> QueryResult.from(site, optimized_query)
QueryRunner.run(site, query)
end
def breakdown(site, query, metrics, pagination) do

View File

@ -7,9 +7,7 @@ defmodule Plausible.Stats.Aggregate do
use Plausible.ClickhouseRepo
use Plausible
import Plausible.Stats.Base
import Ecto.Query
alias Plausible.Stats.{Query, Util, SQL, Filters}
alias Plausible.Stats.{Query, QueryRunner}
def aggregate(site, query, metrics) do
{currency, metrics} =
@ -21,91 +19,59 @@ defmodule Plausible.Stats.Aggregate do
Query.trace(query, metrics)
query_with_metrics = %Query{query | metrics: metrics}
query = %Query{query | metrics: metrics}
query_result = QueryRunner.run(site, query)
q = Plausible.Stats.SQL.QueryBuilder.build(query_with_metrics, site)
[entry] = query_result.results
time_on_page_task =
if :time_on_page in query_with_metrics.metrics do
fn -> aggregate_time_on_page(site, query) end
else
fn -> %{} end
end
Plausible.ClickhouseRepo.parallel_tasks([
run_query_task(q, query),
time_on_page_task
])
|> Enum.reduce(%{}, fn aggregate, task_result -> Map.merge(aggregate, task_result) end)
|> Util.keep_requested_metrics(metrics)
|> cast_revenue_metrics_to_money(currency)
|> Enum.map(&maybe_round_value/1)
|> Enum.map(fn {metric, value} -> {metric, %{value: value}} end)
query.metrics
|> Enum.with_index()
|> Enum.map(fn {metric, index} ->
{
metric,
metric_map(entry, index, metric, currency)
}
end)
|> Enum.into(%{})
end
defp run_query_task(nil, _query), do: fn -> %{} end
defp run_query_task(q, query), do: fn -> ClickhouseRepo.one(q, query: query) end
def metric_map(
%{metrics: metrics, comparison: %{metrics: comparison_metrics, change: change}},
index,
metric,
currency
) do
%{
value: get_value(metrics, index, metric, currency),
comparison_value: get_value(comparison_metrics, index, metric, currency),
change: Enum.at(change, index)
}
end
defp aggregate_time_on_page(site, query) do
windowed_pages_q =
from e in base_event_query(site, Query.remove_top_level_filters(query, ["event:page"])),
where: e.name != "pageleave",
select: %{
next_timestamp: over(fragment("leadInFrame(?)", e.timestamp), :event_horizon),
next_pathname: over(fragment("leadInFrame(?)", e.pathname), :event_horizon),
timestamp: e.timestamp,
pathname: e.pathname,
session_id: e.session_id
},
windows: [
event_horizon: [
partition_by: e.session_id,
order_by: e.timestamp,
frame: fragment("ROWS BETWEEN CURRENT ROW AND 1 FOLLOWING")
]
]
def metric_map(%{metrics: metrics}, index, metric, currency) do
%{
value: get_value(metrics, index, metric, currency)
}
end
event_page_filter = Filters.get_toplevel_filter(query, "event:page")
timed_page_transitions_q =
from e in Ecto.Query.subquery(windowed_pages_q),
group_by: [e.pathname, e.next_pathname, e.session_id],
where: ^SQL.WhereBuilder.build_condition(:pathname, event_page_filter),
where: e.next_timestamp != 0,
select: %{
pathname: e.pathname,
transition: e.next_pathname != e.pathname,
duration: sum(e.next_timestamp - e.timestamp)
}
avg_time_per_page_transition_q =
from e in Ecto.Query.subquery(timed_page_transitions_q),
select: %{avg: fragment("sum(?)/countIf(?)", e.duration, e.transition)},
group_by: e.pathname
time_on_page_q =
from e in Ecto.Query.subquery(avg_time_per_page_transition_q),
select: fragment("avg(ifNotFinite(?,NULL))", e.avg)
%{time_on_page: ClickhouseRepo.one(time_on_page_q, query: query)}
def get_value(metric_list, index, metric, currency) do
metric_list
|> Enum.at(index)
|> maybe_round_value(metric)
|> maybe_cast_metric_to_money(metric, currency)
end
@metrics_to_round [:bounce_rate, :time_on_page, :visit_duration, :sample_percent]
defp maybe_round_value({metric, nil}), do: {metric, nil}
defp maybe_round_value({metric, value}) when metric in @metrics_to_round do
{metric, round(value)}
end
defp maybe_round_value(entry), do: entry
defp maybe_round_value(nil, _metric), do: nil
defp maybe_round_value(value, metric) when metric in @metrics_to_round, do: round(value)
defp maybe_round_value(value, _metric), do: value
on_ee do
defp cast_revenue_metrics_to_money(results, revenue_goals) do
Plausible.Stats.Goal.Revenue.cast_revenue_metrics_to_money(results, revenue_goals)
defp maybe_cast_metric_to_money(value, metric, currency) do
Plausible.Stats.Goal.Revenue.maybe_cast_metric_to_money(value, metric, currency)
end
else
defp cast_revenue_metrics_to_money(results, _revenue_goals), do: results
defp maybe_cast_metric_to_money(value, _metric, _currency), do: value
end
end

View File

@ -9,15 +9,13 @@ defmodule Plausible.Stats.Breakdown do
use Plausible
use Plausible.Stats.SQL.Fragments
import Plausible.Stats.Base
import Ecto.Query
alias Plausible.Stats.{Query, QueryOptimizer, QueryResult, SQL}
alias Plausible.Stats.{Query, QueryOptimizer, QueryRunner}
def breakdown(
site,
%Query{dimensions: [dimension], order_by: order_by} = query,
metrics,
pagination,
{limit, page},
_opts \\ []
) do
transformed_metrics = transform_metrics(metrics, dimension)
@ -33,20 +31,15 @@ defmodule Plausible.Stats.Breakdown do
|> Enum.uniq_by(&elem(&1, 0)),
dimensions: transform_dimensions(dimension),
filters: query.filters ++ dimension_filters(dimension),
pagination: %{limit: limit, offset: (page - 1) * limit},
v2: true,
# Allow pageview and event metrics to be queried off of sessions table
legacy_breakdown: true
)
|> QueryOptimizer.optimize()
q = SQL.QueryBuilder.build(query_with_metrics, site)
q
|> apply_pagination(pagination)
|> ClickhouseRepo.all(query: query)
|> QueryResult.from(site, query_with_metrics)
QueryRunner.run(site, query_with_metrics)
|> build_breakdown_result(query_with_metrics, metrics)
|> maybe_add_time_on_page(site, query_with_metrics, metrics)
|> update_currency_metrics(site, query_with_metrics)
end
@ -67,109 +60,6 @@ defmodule Plausible.Stats.Breakdown do
defp result_key("visit:" <> key), do: key |> String.to_existing_atom()
defp result_key(dimension), do: dimension
defp maybe_add_time_on_page(event_results, site, query, metrics) do
if query.dimensions == ["event:page"] and :time_on_page in metrics do
pages = Enum.map(event_results, & &1[:page])
time_on_page_result = breakdown_time_on_page(site, query, pages)
event_results
|> Enum.map(fn row ->
Map.put(row, :time_on_page, time_on_page_result[row[:page]])
end)
else
event_results
end
end
defp breakdown_time_on_page(_site, _query, []) do
%{}
end
defp breakdown_time_on_page(site, query, pages) do
import Ecto.Query
windowed_pages_q =
from e in base_event_query(
site,
Query.remove_top_level_filters(query, ["event:page", "event:props"])
),
where: e.name != "pageleave",
select: %{
next_timestamp: over(fragment("leadInFrame(?)", e.timestamp), :event_horizon),
next_pathname: over(fragment("leadInFrame(?)", e.pathname), :event_horizon),
timestamp: e.timestamp,
pathname: e.pathname,
session_id: e.session_id
},
windows: [
event_horizon: [
partition_by: e.session_id,
order_by: e.timestamp,
frame: fragment("ROWS BETWEEN CURRENT ROW AND 1 FOLLOWING")
]
]
timed_page_transitions_q =
from e in subquery(windowed_pages_q),
group_by: [e.pathname, e.next_pathname, e.session_id],
where: e.pathname in ^pages,
where: e.next_timestamp != 0,
select: %{
pathname: e.pathname,
transition: e.next_pathname != e.pathname,
duration: sum(e.next_timestamp - e.timestamp)
}
no_select_timed_pages_q =
from e in subquery(timed_page_transitions_q),
group_by: e.pathname
date_range = Query.date_range(query)
timed_pages_q =
if query.include_imported do
# Imported page views have pre-calculated values
imported_timed_pages_q =
from i in "imported_pages",
group_by: i.page,
where: i.site_id == ^site.id,
where: i.date >= ^date_range.first and i.date <= ^date_range.last,
where: i.page in ^pages,
select: %{
page: i.page,
time_on_page: sum(i.time_on_page),
visits: sum(i.pageviews) - sum(i.exits)
}
timed_pages_q =
from e in no_select_timed_pages_q,
select: %{
page: e.pathname,
time_on_page: sum(e.duration),
visits: fragment("countIf(?)", e.transition)
}
"timed_pages"
|> with_cte("timed_pages", as: ^timed_pages_q)
|> with_cte("imported_timed_pages", as: ^imported_timed_pages_q)
|> join(:full, [t], i in "imported_timed_pages", on: t.page == i.page)
|> select(
[t, i],
{
fragment("if(empty(?),?,?)", t.page, i.page, t.page),
(t.time_on_page + i.time_on_page) / (t.visits + i.visits)
}
)
else
from e in no_select_timed_pages_q,
select: {e.pathname, fragment("sum(?)/countIf(?)", e.duration, e.transition)}
end
timed_pages_q
|> Plausible.ClickhouseRepo.all(query: query)
|> Map.new()
end
defp maybe_remap_to_group_conversion_rate(metric, dimension) do
case {metric, dimension} do
{:conversion_rate, "event:props:" <> _} -> :conversion_rate
@ -233,14 +123,6 @@ defmodule Plausible.Stats.Breakdown do
defp dimension_filters(_), do: []
defp apply_pagination(q, {limit, page}) do
offset = (page - 1) * limit
q
|> limit(^limit)
|> offset(^offset)
end
on_ee do
defp update_currency_metrics(results, site, %Query{dimensions: ["event:goal"]}) do
site = Plausible.Repo.preload(site, :goals)

View File

@ -15,8 +15,12 @@ defmodule Plausible.Stats.Compare do
def percent_change(_old_count, nil), do: nil
def percent_change(%Money{} = old_count, %Money{} = new_count) do
old_count = old_count |> Money.to_decimal() |> Decimal.to_float()
new_count = new_count |> Money.to_decimal() |> Decimal.to_float()
percent_change(old_count |> Money.to_decimal(), new_count |> Money.to_decimal())
end
def percent_change(%Decimal{} = old_count, %Decimal{} = new_count) do
old_count = old_count |> Decimal.to_float()
new_count = new_count |> Decimal.to_float()
percent_change(old_count, new_count)
end

View File

@ -10,122 +10,86 @@ defmodule Plausible.Stats.Comparisons do
alias Plausible.Stats
alias Plausible.Stats.{Query, DateTimeRange}
@modes ~w(previous_period year_over_year custom)
@disallowed_periods ~w(realtime all)
@type mode() :: String.t() | nil
@typep option() :: {:from, String.t()} | {:to, String.t()} | {:now, NaiveDateTime.t()}
@spec compare(Plausible.Site.t(), Stats.Query.t(), mode(), [option()]) ::
{:ok, Stats.Query.t()} | {:error, :not_supported} | {:error, :invalid_dates}
@spec get_comparison_query(Stats.Query.t(), map()) :: Stats.Query.t()
@doc """
Generates a comparison query based on the source query and comparison mode.
Currently only historical periods are supported for comparisons (not `realtime`
and `30m` periods).
The mode parameter specifies the type of comparison and can be one of the
## Options
* `mode` (required) - specifies the type of comparison and can be one of the
following:
* `"previous_period"` - shifts back the query by the same number of days the
source query has.
* `"previous_period"` - shifts back the query by the same number of days the
source query has.
* `"year_over_year"` - shifts back the query by 1 year.
* `"year_over_year"` - shifts back the query by 1 year.
* `"custom"` - compares the query using a custom date range. See options for
more details.
* `"custom"` - compares the query using a custom date range. See `date_range` for
more details.
The comparison query returned by the function has its end date restricted to
the current day. This can be overridden by the `now` option, described below.
* `:date_range` - a ISO-8601 date string pair used when mode is `"custom"`.
## Options
* `:now` - a `NaiveDateTime` struct with the current date and time. This is
optional and used for testing purposes.
* `:from` - a ISO-8601 date string used when mode is `"custom"`.
* `:to` - a ISO-8601 date string used when mode is `"custom"`. Must be
after `from`.
* `:match_day_of_week?` - determines whether the comparison query should be
* `:match_day_of_week` - determines whether the comparison query should be
adjusted to match the day of the week of the source query. When this option
is set to true, the comparison query is shifted to start on the same day of
the week as the source query, rather than on the exact same date. For
example, if the source query starts on Sunday, January 1st, 2023 and the
`year_over_year` comparison query is configured to `match_day_of_week?`,
`year_over_year` comparison query is configured to `match_day_of_week`,
it will be shifted to start on Sunday, January 2nd, 2022 instead of
January 1st. Defaults to false.
"""
def compare(%Plausible.Site{} = site, %Stats.Query{} = source_query, mode, opts \\ []) do
opts =
opts
|> Keyword.put_new(:now, DateTime.now!(site.timezone))
|> Keyword.put_new(:match_day_of_week?, false)
def get_comparison_query(%Stats.Query{} = source_query, options) do
comparison_date_range = get_comparison_date_range(source_query, options)
new_range =
DateTimeRange.new!(
comparison_date_range.first,
comparison_date_range.last,
source_query.timezone
)
|> DateTimeRange.to_timezone("Etc/UTC")
source_query
|> Query.set(utc_time_range: new_range)
|> maybe_include_imported(source_query)
end
defp get_comparison_date_range(source_query, %{mode: "year_over_year"} = options) do
source_date_range = Query.date_range(source_query)
with :ok <- validate_mode(source_query, mode),
{:ok, comparison_date_range} <- get_comparison_date_range(source_date_range, mode, opts) do
new_range =
DateTimeRange.new!(comparison_date_range.first, comparison_date_range.last, site.timezone)
|> DateTimeRange.to_timezone("Etc/UTC")
comparison_query =
source_query
|> Query.set(utc_time_range: new_range)
|> maybe_include_imported(source_query)
{:ok, comparison_query}
end
end
defp get_comparison_date_range(source_date_range, "year_over_year", opts) do
now = Keyword.fetch!(opts, :now)
start_date = Date.add(source_date_range.first, -365)
end_date = earliest(source_date_range.last, now) |> Date.add(-365)
end_date = earliest(source_date_range.last, source_query.now) |> Date.add(-365)
comparison_date_range =
Date.range(start_date, end_date)
|> maybe_match_day_of_week(source_date_range, opts)
{:ok, comparison_date_range}
Date.range(start_date, end_date)
|> maybe_match_day_of_week(source_date_range, options)
end
defp get_comparison_date_range(source_date_range, "previous_period", opts) do
now = Keyword.fetch!(opts, :now)
defp get_comparison_date_range(source_query, %{mode: "previous_period"} = options) do
source_date_range = Query.date_range(source_query)
last = earliest(source_date_range.last, now)
last = earliest(source_date_range.last, source_query.now)
diff_in_days = Date.diff(source_date_range.first, last) - 1
new_first = Date.add(source_date_range.first, diff_in_days)
new_last = Date.add(last, diff_in_days)
comparison_date_range =
Date.range(new_first, new_last)
|> maybe_match_day_of_week(source_date_range, opts)
{:ok, comparison_date_range}
Date.range(new_first, new_last)
|> maybe_match_day_of_week(source_date_range, options)
end
defp get_comparison_date_range(_source_date_range, "custom", opts) do
with {:ok, from} <- opts |> Keyword.fetch!(:from) |> Date.from_iso8601(),
{:ok, to} <- opts |> Keyword.fetch!(:to) |> Date.from_iso8601(),
result when result in [:eq, :lt] <- Date.compare(from, to) do
{:ok, Date.range(from, to)}
else
_error -> {:error, :invalid_dates}
end
defp get_comparison_date_range(source_query, %{mode: "custom"} = options) do
DateTimeRange.to_date_range(options.date_range, source_query.timezone)
end
defp earliest(a, b) do
if Date.compare(a, b) in [:eq, :lt], do: a, else: b
end
defp maybe_match_day_of_week(comparison_date_range, source_date_range, opts) do
if Keyword.fetch!(opts, :match_day_of_week?) do
defp maybe_match_day_of_week(comparison_date_range, source_date_range, options) do
if options[:match_day_of_week] do
day_to_match = Date.day_of_week(source_date_range.first)
new_first =
@ -188,12 +152,4 @@ defmodule Plausible.Stats.Comparisons do
)
end
end
defp validate_mode(%Stats.Query{period: period}, mode) do
if mode in @modes && period not in @disallowed_periods do
:ok
else
{:error, :not_supported}
end
end
end

View File

@ -26,8 +26,8 @@ defmodule Plausible.Stats.EmailReport do
end
defp with_comparisons(stats, site, query, metrics) do
{:ok, prev_query} = Comparisons.compare(site, query, "previous_period")
prev_period_stats = Stats.aggregate(site, prev_query, metrics)
comparison_query = Comparisons.get_comparison_query(query, %{mode: "previous_period"})
prev_period_stats = Stats.aggregate(site, comparison_query, metrics)
stats
|> Enum.map(fn {metric, %{value: value}} ->

View File

@ -6,7 +6,8 @@ defmodule Plausible.Stats.Filters.QueryParser do
@default_include %{
imports: false,
time_labels: false,
total_rows: false
total_rows: false,
comparisons: nil
}
@default_pagination %{
@ -14,6 +15,8 @@ defmodule Plausible.Stats.Filters.QueryParser do
offset: 0
}
def default_include(), do: @default_include
def parse(site, schema_type, params, now \\ nil) when is_map(params) do
{now, date} =
if now do
@ -31,7 +34,7 @@ defmodule Plausible.Stats.Filters.QueryParser do
{:ok, filters} <- parse_filters(Map.get(params, "filters", [])),
{:ok, dimensions} <- parse_dimensions(Map.get(params, "dimensions", [])),
{:ok, order_by} <- parse_order_by(Map.get(params, "order_by")),
{:ok, include} <- parse_include(Map.get(params, "include", %{})),
{:ok, include} <- parse_include(site, Map.get(params, "include", %{})),
{:ok, pagination} <- parse_pagination(Map.get(params, "pagination", %{})),
preloaded_goals <- preload_goals_if_needed(site, filters, dimensions),
query = %{
@ -56,6 +59,14 @@ defmodule Plausible.Stats.Filters.QueryParser do
end
end
def parse_date_range_pair(site, [from, to]) when is_binary(from) and is_binary(to) do
with {:ok, date_range} <- date_range_from_date_strings(site, from, to) do
{:ok, date_range |> DateTimeRange.to_timezone("Etc/UTC")}
end
end
def parse_date_range_pair(_site, unknown), do: {:error, "Invalid date_range '#{i(unknown)}'."}
defp parse_metrics(metrics) when is_list(metrics) do
parse_list(metrics, &parse_metric/1)
end
@ -228,8 +239,7 @@ defmodule Plausible.Stats.Filters.QueryParser do
{:ok, DateTimeRange.new!(start_date, date, site.timezone)}
end
defp parse_time_range(site, [from, to], _date, _now)
when is_binary(from) and is_binary(to) do
defp parse_time_range(site, [from, to], _date, _now) when is_binary(from) and is_binary(to) do
case date_range_from_date_strings(site, from, to) do
{:ok, date_range} -> {:ok, date_range}
{:error, _} -> date_range_from_timestamps(from, to)
@ -313,16 +323,37 @@ defmodule Plausible.Stats.Filters.QueryParser do
defp parse_order_direction([_, "desc"]), do: {:ok, :desc}
defp parse_order_direction(entry), do: {:error, "Invalid order_by entry '#{i(entry)}'."}
defp parse_include(include) when is_map(include) do
{:ok, Map.merge(@default_include, atomize_keys(include))}
defp parse_include(site, include) when is_map(include) do
parsed =
include
|> atomize_keys()
|> update_comparisons_date_range(site)
with {:ok, include} <- parsed do
{:ok, Map.merge(@default_include, include)}
end
end
defp update_comparisons_date_range(%{comparisons: %{date_range: date_range}} = include, site) do
with {:ok, parsed_date_range} <- parse_date_range_pair(site, date_range) do
{:ok, put_in(include, [:comparisons, :date_range], parsed_date_range)}
end
end
defp update_comparisons_date_range(include, _site), do: {:ok, include}
defp parse_pagination(pagination) when is_map(pagination) do
{:ok, Map.merge(@default_pagination, atomize_keys(pagination))}
end
defp atomize_keys(map),
do: Map.new(map, fn {key, value} -> {String.to_existing_atom(key), value} end)
defp atomize_keys(map) when is_map(map) do
Map.new(map, fn {key, value} ->
key = String.to_existing_atom(key)
{key, atomize_keys(value)}
end)
end
defp atomize_keys(value), do: value
defp parse_filter_key_string(filter_key, error_message \\ "") do
case filter_key do

View File

@ -0,0 +1,162 @@
defmodule Plausible.Stats.Legacy.TimeOnPage do
@moduledoc """
Calculation methods for legacy time_on_page metric. Note the metric
has its own limitations and quirks.
Closely coupled with Plausible.Stats.QueryRunner.
"""
use Plausible.ClickhouseRepo
import Ecto.Query
alias Plausible.Stats.{Base, Filters, Query, SQL, Util}
def calculate(site, query, ch_results) do
case {:time_on_page in query.metrics, query.dimensions} do
{true, []} ->
aggregate_time_on_page(site, query)
{true, ["event:page"]} ->
pages =
Enum.map(ch_results, fn entry -> Map.get(entry, Util.shortname(query, "event:page")) end)
breakdown_time_on_page(site, query, pages)
_ ->
%{}
end
end
defp aggregate_time_on_page(site, query) do
windowed_pages_q =
from e in Base.base_event_query(site, Query.remove_top_level_filters(query, ["event:page"])),
where: e.name != "pageleave",
select: %{
next_timestamp: over(fragment("leadInFrame(?)", e.timestamp), :event_horizon),
next_pathname: over(fragment("leadInFrame(?)", e.pathname), :event_horizon),
timestamp: e.timestamp,
pathname: e.pathname,
session_id: e.session_id
},
windows: [
event_horizon: [
partition_by: e.session_id,
order_by: e.timestamp,
frame: fragment("ROWS BETWEEN CURRENT ROW AND 1 FOLLOWING")
]
]
event_page_filter = Filters.get_toplevel_filter(query, "event:page")
timed_page_transitions_q =
from e in Ecto.Query.subquery(windowed_pages_q),
group_by: [e.pathname, e.next_pathname, e.session_id],
where: ^SQL.WhereBuilder.build_condition(:pathname, event_page_filter),
where: e.next_timestamp != 0,
select: %{
pathname: e.pathname,
transition: e.next_pathname != e.pathname,
duration: sum(e.next_timestamp - e.timestamp)
}
avg_time_per_page_transition_q =
from e in Ecto.Query.subquery(timed_page_transitions_q),
select: %{avg: fragment("sum(?)/countIf(?)", e.duration, e.transition)},
group_by: e.pathname
time_on_page_q =
from e in Ecto.Query.subquery(avg_time_per_page_transition_q),
select: fragment("avg(ifNotFinite(?,NULL))", e.avg)
%{[] => ClickhouseRepo.one(time_on_page_q, query: query)}
end
defp breakdown_time_on_page(_site, _query, []) do
%{}
end
defp breakdown_time_on_page(site, query, pages) do
import Ecto.Query
windowed_pages_q =
from e in Base.base_event_query(
site,
Query.remove_top_level_filters(query, ["event:page", "event:props"])
),
where: e.name != "pageleave",
select: %{
next_timestamp: over(fragment("leadInFrame(?)", e.timestamp), :event_horizon),
next_pathname: over(fragment("leadInFrame(?)", e.pathname), :event_horizon),
timestamp: e.timestamp,
pathname: e.pathname,
session_id: e.session_id
},
windows: [
event_horizon: [
partition_by: e.session_id,
order_by: e.timestamp,
frame: fragment("ROWS BETWEEN CURRENT ROW AND 1 FOLLOWING")
]
]
timed_page_transitions_q =
from e in subquery(windowed_pages_q),
group_by: [e.pathname, e.next_pathname, e.session_id],
where: e.pathname in ^pages,
where: e.next_timestamp != 0,
select: %{
pathname: e.pathname,
transition: e.next_pathname != e.pathname,
duration: sum(e.next_timestamp - e.timestamp)
}
no_select_timed_pages_q =
from e in subquery(timed_page_transitions_q),
group_by: e.pathname
date_range = Query.date_range(query)
timed_pages_q =
if query.include_imported do
# Imported page views have pre-calculated values
imported_timed_pages_q =
from i in "imported_pages",
group_by: i.page,
where: i.site_id == ^site.id,
where: i.date >= ^date_range.first and i.date <= ^date_range.last,
where: i.page in ^pages,
select: %{
page: i.page,
time_on_page: sum(i.time_on_page),
visits: sum(i.pageviews) - sum(i.exits)
}
timed_pages_q =
from e in no_select_timed_pages_q,
select: %{
page: e.pathname,
time_on_page: sum(e.duration),
visits: fragment("countIf(?)", e.transition)
}
"timed_pages"
|> with_cte("timed_pages", as: ^timed_pages_q)
|> with_cte("imported_timed_pages", as: ^imported_timed_pages_q)
|> join(:full, [t], i in "imported_timed_pages", on: t.page == i.page)
|> select(
[t, i],
{
fragment("if(empty(?),?,?)", t.page, i.page, t.page),
(t.time_on_page + i.time_on_page) / (t.visits + i.visits)
}
)
else
from e in no_select_timed_pages_q,
select: {e.pathname, fragment("sum(?)/countIf(?)", e.duration, e.transition)}
end
timed_pages_q
|> Plausible.ClickhouseRepo.all(query: query)
|> Map.new(fn {path, value} -> {[path], value} end)
end
end

View File

@ -18,11 +18,7 @@ defmodule Plausible.Stats.Query do
v2: false,
legacy_breakdown: false,
preloaded_goals: [],
include: %{
imports: false,
time_labels: false,
total_rows: false
},
include: Plausible.Stats.Filters.QueryParser.default_include(),
debug_metadata: %{},
pagination: nil
@ -76,6 +72,10 @@ defmodule Plausible.Stats.Query do
end
end
def set_include(query, key, value) do
struct!(query, include: Map.put(query.include, key, value))
end
def add_filter(query, filter) do
query
|> struct!(filters: query.filters ++ [filter])

View File

@ -33,7 +33,11 @@ defmodule Plausible.Stats.QueryOptimizer do
|> TableDecider.partition_metrics(query)
{
Query.set(query, metrics: event_metrics, include_imported: query.include_imported),
Query.set(query,
metrics: event_metrics,
include_imported: query.include_imported,
pagination: nil
),
split_sessions_query(query, sessions_metrics)
}
end
@ -164,7 +168,8 @@ defmodule Plausible.Stats.QueryOptimizer do
filters: filters,
metrics: session_metrics,
dimensions: dimensions,
include_imported: query.include_imported
include_imported: query.include_imported,
pagination: nil
)
end
end

View File

@ -7,26 +7,22 @@ defmodule Plausible.Stats.QueryResult do
produced by Jason.encode(query_result) is ordered.
"""
alias Plausible.Stats.{Util, Filters}
alias Plausible.Stats.DateTimeRange
defstruct results: [],
meta: %{},
query: nil
def from(results, site, query) do
results_list =
results
|> Enum.map(fn entry ->
%{
dimensions: Enum.map(query.dimensions, &dimension_label(&1, entry, query)),
metrics: Enum.map(query.metrics, &Map.get(entry, &1))
}
end)
@doc """
Builds full JSON-serializable query response.
`results` should already-built by Plausible.Stats.QueryRunner
"""
def from(results, site, query, meta_extra) do
struct!(
__MODULE__,
results: results_list,
meta: meta(query, results),
results: results,
meta: meta(query, meta_extra),
query:
Jason.OrderedObject.new(
site_id: site.domain,
@ -38,40 +34,18 @@ defmodule Plausible.Stats.QueryResult do
filters: query.filters,
dimensions: query.dimensions,
order_by: query.order_by |> Enum.map(&Tuple.to_list/1),
include: query.include |> Map.filter(fn {_key, val} -> val end),
include: include(query) |> Map.filter(fn {_key, val} -> val end),
pagination: query.pagination
)
)
end
defp dimension_label("event:goal", entry, query) do
{events, paths} = Filters.Utils.split_goals(query.preloaded_goals)
goal_index = Map.get(entry, Util.shortname(query, "event:goal"))
# Closely coupled logic with Plausible.Stats.SQL.Expression.event_goal_join/2
cond do
goal_index < 0 -> Enum.at(events, -goal_index - 1) |> Plausible.Goal.display_name()
goal_index > 0 -> Enum.at(paths, goal_index - 1) |> Plausible.Goal.display_name()
end
end
defp dimension_label("time:" <> _ = time_dimension, entry, query) do
datetime = Map.get(entry, Util.shortname(query, time_dimension))
Plausible.Stats.Time.format_datetime(datetime)
end
defp dimension_label(dimension, entry, query) do
Map.get(entry, Util.shortname(query, dimension))
end
@imports_unsupported_query_warning "Imported stats are not included in the results because query parameters are not supported. " <>
"For more information, see: https://plausible.io/docs/stats-api#filtering-imported-stats"
@imports_unsupported_interval_warning "Imported stats are not included because the time dimension (i.e. the interval) is too short."
defp meta(query, results) do
defp meta(query, meta_extra) do
%{
imports_included: if(query.include.imports, do: query.include_imported, else: nil),
imports_skip_reason:
@ -86,14 +60,25 @@ defmodule Plausible.Stats.QueryResult do
end,
time_labels:
if(query.include.time_labels, do: Plausible.Stats.Time.time_labels(query), else: nil),
total_rows: if(query.include.total_rows, do: total_rows(results), else: nil)
total_rows: if(query.include.total_rows, do: meta_extra.total_rows, else: nil)
}
|> Enum.reject(fn {_, value} -> is_nil(value) end)
|> Enum.into(%{})
end
defp total_rows([]), do: 0
defp total_rows([first_row | _rest]), do: first_row.total_rows
defp include(query) do
case get_in(query.include, [:comparisons, :date_range]) do
%DateTimeRange{first: first, last: last} ->
query.include
|> put_in([:comparisons, :date_range], [
to_iso8601(first, query.timezone),
to_iso8601(last, query.timezone)
])
nil ->
query.include
end
end
defp to_iso8601(datetime, timezone) do
datetime

View File

@ -0,0 +1,251 @@
defmodule Plausible.Stats.QueryRunner do
@moduledoc """
This module is responsible for executing a Plausible.Stats.Query
and gathering results.
Some secondary responsibilities are:
1. Dealing with comparison queries and combining results with main
2. Dealing with time-on-page
3. Passing total_rows from clickhouse to QueryResult meta
"""
use Plausible.ClickhouseRepo
alias Plausible.Stats.{
Comparisons,
Compare,
QueryOptimizer,
QueryResult,
Legacy,
Filters,
SQL,
Util,
Time
}
defstruct [
:query,
:site,
:comparison_query,
:comparison_results,
:main_results_list,
:ch_results,
:meta_extra,
:time_lookup,
:results_list
]
def run(site, query) do
optimized_query = QueryOptimizer.optimize(query)
run_results =
%__MODULE__{query: optimized_query, site: site}
|> add_comparison_query()
|> execute_comparison()
|> execute_main_query()
|> add_meta_extra()
|> add_time_lookup()
|> build_results_list()
QueryResult.from(run_results.results_list, site, optimized_query, run_results.meta_extra)
end
defp add_comparison_query(%__MODULE__{query: query} = run_results)
when is_map(query.include.comparisons) do
comparison_query = Comparisons.get_comparison_query(query, query.include.comparisons)
struct!(run_results, comparison_query: comparison_query)
end
defp add_comparison_query(run_results), do: run_results
defp execute_comparison(
%__MODULE__{comparison_query: comparison_query, site: site} = run_results
) do
if comparison_query do
{ch_results, time_on_page} = execute_query(comparison_query, site)
comparison_results =
build_from_ch(
ch_results,
comparison_query,
time_on_page
)
struct!(run_results, comparison_results: comparison_results)
else
run_results
end
end
defp add_time_lookup(run_results) do
time_lookup =
if Time.time_dimension(run_results.query) && run_results.comparison_query do
Enum.zip(
Time.time_labels(run_results.query),
Time.time_labels(run_results.comparison_query)
)
|> Map.new()
else
%{}
end
struct!(run_results, time_lookup: time_lookup)
end
defp execute_main_query(%__MODULE__{query: query, site: site} = run_results) do
{ch_results, time_on_page} = execute_query(query, site)
struct!(
run_results,
main_results_list: build_from_ch(ch_results, query, time_on_page),
ch_results: ch_results
)
end
defp add_meta_extra(%__MODULE__{query: query, ch_results: ch_results} = run_results) do
struct!(run_results,
meta_extra: %{
total_rows: if(query.include.total_rows, do: total_rows(ch_results), else: nil)
}
)
end
defp build_results_list(
%__MODULE__{query: query, main_results_list: main_results_list} = run_results
) do
results_list =
case query.dimensions do
["time:" <> _] -> main_results_list |> add_empty_timeseries_rows(run_results)
_ -> main_results_list
end
|> merge_with_comparison_results(run_results)
struct!(run_results, results_list: results_list)
end
defp execute_query(query, site) do
ch_results =
query
|> SQL.QueryBuilder.build(site)
|> ClickhouseRepo.all(query: query)
time_on_page = Legacy.TimeOnPage.calculate(site, query, ch_results)
{ch_results, time_on_page}
end
defp build_from_ch(ch_results, query, time_on_page) do
ch_results
|> Enum.map(fn entry ->
dimensions = Enum.map(query.dimensions, &dimension_label(&1, entry, query))
%{
dimensions: dimensions,
metrics: Enum.map(query.metrics, &get_metric(entry, &1, dimensions, time_on_page))
}
end)
end
defp dimension_label("event:goal", entry, query) do
{events, paths} = Filters.Utils.split_goals(query.preloaded_goals)
goal_index = Map.get(entry, Util.shortname(query, "event:goal"))
# Closely coupled logic with SQL.Expression.event_goal_join/2
cond do
goal_index < 0 -> Enum.at(events, -goal_index - 1) |> Plausible.Goal.display_name()
goal_index > 0 -> Enum.at(paths, goal_index - 1) |> Plausible.Goal.display_name()
end
end
defp dimension_label("time:" <> _ = time_dimension, entry, query) do
datetime = Map.get(entry, Util.shortname(query, time_dimension))
Time.format_datetime(datetime)
end
defp dimension_label(dimension, entry, query) do
Map.get(entry, Util.shortname(query, dimension))
end
defp get_metric(_entry, :time_on_page, dimensions, time_on_page),
do: Map.get(time_on_page, dimensions)
defp get_metric(entry, metric, _dimensions, _time_on_page), do: Map.get(entry, metric)
# Special case: If comparison and single time dimension, add 0 rows - otherwise
# comparisons would not be shown for timeseries with 0 values.
defp add_empty_timeseries_rows(results_list, %__MODULE__{query: query})
when is_map(query.include.comparisons) do
indexed_results = index_by_dimensions(results_list)
empty_timeseries_rows =
Time.time_labels(query)
|> Enum.reject(fn dimension_value -> Map.has_key?(indexed_results, [dimension_value]) end)
|> Enum.map(fn dimension_value ->
%{
metrics: empty_metrics(query),
dimensions: [dimension_value]
}
end)
results_list ++ empty_timeseries_rows
end
defp add_empty_timeseries_rows(results_list, _), do: results_list
defp merge_with_comparison_results(results_list, run_results) do
comparison_map = (run_results.comparison_results || []) |> index_by_dimensions()
time_lookup = run_results.time_lookup || %{}
Enum.map(
results_list,
&add_comparison_results(&1, run_results.query, comparison_map, time_lookup)
)
end
defp add_comparison_results(row, query, comparison_map, time_lookup)
when is_map(query.include.comparisons) do
dimensions = get_comparison_dimensions(row.dimensions, query, time_lookup)
comparison_metrics = get_comparison_metrics(comparison_map, dimensions, query)
change =
Enum.zip([query.metrics, row.metrics, comparison_metrics])
|> Enum.map(fn {metric, metric_value, comparison_value} ->
Compare.calculate_change(metric, comparison_value, metric_value)
end)
Map.merge(row, %{
comparison: %{
dimensions: dimensions,
metrics: comparison_metrics,
change: change
}
})
end
defp add_comparison_results(row, _, _, _), do: row
defp get_comparison_dimensions(dimensions, query, time_lookup) do
query.dimensions
|> Enum.zip(dimensions)
|> Enum.map(fn
{"time:" <> _, value} -> time_lookup[value]
{_, value} -> value
end)
end
defp index_by_dimensions(results_list) do
results_list
|> Map.new(fn entry -> {entry.dimensions, entry.metrics} end)
end
defp get_comparison_metrics(comparison_map, dimensions, query) do
Map.get_lazy(comparison_map, dimensions, fn -> empty_metrics(query) end)
end
defp empty_metrics(query), do: List.duplicate(0, length(query.metrics))
defp total_rows([]), do: 0
defp total_rows([first_row | _rest]), do: first_row.total_rows
end

View File

@ -32,13 +32,13 @@ defmodule Plausible.Stats.Time do
# Realtime graphs return numbers
def format_datetime(other), do: other
@doc """
Returns list of time bucket labels for the given query.
"""
def time_dimension(query) do
Enum.find(query.dimensions, &String.starts_with?(&1, "time"))
end
@doc """
Returns list of time bucket labels for the given query.
"""
def time_labels(query) do
time_labels_for_dimension(time_dimension(query), query)
end

View File

@ -7,7 +7,7 @@ defmodule Plausible.Stats.Timeseries do
use Plausible
use Plausible.ClickhouseRepo
alias Plausible.Stats.{Query, QueryOptimizer, QueryResult, SQL}
alias Plausible.Stats.{Comparisons, Query, QueryRunner, Time}
@time_dimension %{
"month" => "time:month",
@ -25,48 +25,54 @@ defmodule Plausible.Stats.Timeseries do
{nil, metrics}
end
query_with_metrics =
query =
Query.set(
query,
metrics: transform_metrics(metrics, %{conversion_rate: :group_conversion_rate}),
dimensions: [time_dimension(query)],
order_by: [{time_dimension(query), :asc}],
v2: true,
include: %{time_labels: true, imports: query.include.imports, total_rows: false}
v2: true
)
|> QueryOptimizer.optimize()
q = SQL.QueryBuilder.build(query_with_metrics, site)
comparison_query =
if(query.include.comparisons,
do: Comparisons.get_comparison_query(query, query.include.comparisons),
else: nil
)
query_result =
q
|> ClickhouseRepo.all(query: query)
|> QueryResult.from(site, query_with_metrics)
query_result = QueryRunner.run(site, query)
timeseries_result =
query_result
|> build_timeseries_result(query_with_metrics, currency)
|> transform_keys(%{group_conversion_rate: :conversion_rate})
{timeseries_result, query_result.meta}
{
build_result(query_result, query, currency, fn entry -> entry end),
build_result(query_result, comparison_query, currency, fn entry -> entry.comparison end),
query_result.meta
}
end
defp time_dimension(query), do: Map.fetch!(@time_dimension, query.interval)
defp build_timeseries_result(query_result, query, currency) do
results_map =
query_result.results
|> Enum.map(fn %{dimensions: [time_dimension_value], metrics: entry_metrics} ->
metrics_map = Enum.zip(query.metrics, entry_metrics) |> Enum.into(%{})
# Given a query result, build a legacy timeseries result
# Format is %{ date => %{ date: date_string, [metric] => value } } with a bunch of special cases for the UI
defp build_result(query_result, %Query{} = query, currency, extract_entry) do
query_result.results
|> Enum.map(&extract_entry.(&1))
|> Enum.map(fn %{dimensions: [time_dimension_value], metrics: metrics} ->
metrics_map = Enum.zip(query.metrics, metrics) |> Map.new()
{
time_dimension_value,
Map.put(metrics_map, :date, time_dimension_value)
}
end)
|> Enum.into(%{})
{
time_dimension_value,
Map.put(metrics_map, :date, time_dimension_value)
}
end)
|> Map.new()
|> add_labels(query, currency)
end
query_result.meta.time_labels
defp build_result(_, _, _, _), do: nil
defp add_labels(results_map, query, currency) do
query
|> Time.time_labels()
|> Enum.map(fn key ->
Map.get(
results_map,
@ -76,6 +82,7 @@ defmodule Plausible.Stats.Timeseries do
|> cast_revenue_metrics_to_money(currency)
end)
|> transform_realtime_labels(query)
|> transform_keys(%{group_conversion_rate: :conversion_rate})
end
defp empty_row(date, metrics) do

View File

@ -3,36 +3,6 @@ defmodule Plausible.Stats.Util do
Utilities for modifying stat results
"""
@manually_removable_metrics [
:__internal_visits,
:visitors,
:total_visitors
]
@doc """
Sometimes we need to manually add metrics in order to calculate the value for
other metrics. E.g:
* `__internal_visits` is fetched when querying bounce rate, visit duration,
or views_per_visit, as it is needed to calculate these from imported data.
* `visitors` metric might be added manually via `maybe_add_visitors_metric/1`,
in order to be able to calculate conversion rate.
This function can be used for stripping those metrics from a breakdown (list),
or an aggregate (map) result. We do not want to return metrics that we're not
requested.
"""
def keep_requested_metrics(results, requested_metrics) when is_list(results) do
Enum.map(results, fn results_map ->
keep_requested_metrics(results_map, requested_metrics)
end)
end
def keep_requested_metrics(results, requested_metrics) do
Map.drop(results, @manually_removable_metrics -- requested_metrics)
end
@doc """
This function adds the `visitors` metric into the list of
given metrics if it's not already there and if it is needed

View File

@ -22,11 +22,11 @@ defmodule PlausibleWeb.Api.ExternalStatsController do
:ok <- ensure_custom_props_access(site, query) do
results =
if params["compare"] == "previous_period" do
{:ok, prev_query} = Comparisons.compare(site, query, "previous_period")
comparison_query = Comparisons.get_comparison_query(query, %{mode: "previous_period"})
[prev_result, curr_result] =
Plausible.ClickhouseRepo.parallel_tasks([
fn -> Plausible.Stats.aggregate(site, prev_query, metrics) end,
fn -> Plausible.Stats.aggregate(site, comparison_query, metrics) end,
fn -> Plausible.Stats.aggregate(site, query, metrics) end
])
@ -273,7 +273,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController do
:ok <- validate_filters(site, query.filters),
{:ok, metrics} <- parse_and_validate_metrics(params, query),
:ok <- ensure_custom_props_access(site, query) do
{results, meta} = Plausible.Stats.timeseries(site, query, metrics)
{results, _, meta} = Plausible.Stats.timeseries(site, query, metrics)
payload =
case meta[:imports_warning] do

View File

@ -105,20 +105,11 @@ defmodule PlausibleWeb.Api.StatsController do
:ok <- validate_interval(params),
:ok <- validate_interval_granularity(site, params, dates),
params <- realtime_period_to_30m(params),
query = Query.from(site, params, debug_metadata(conn)),
query =
Query.from(site, params, debug_metadata(conn))
|> Query.set_include(:comparisons, parse_comparison_options(site, params)),
{:ok, metric} <- parse_and_validate_graph_metric(params, query) do
{timeseries_result, _meta} = Stats.timeseries(site, query, [metric])
comparison_opts = parse_comparison_opts(params)
comparison_result =
case Comparisons.compare(site, query, params["comparison"], comparison_opts) do
{:ok, comparison_query} ->
Stats.timeseries(site, comparison_query, [metric]) |> elem(0)
{:error, :not_supported} ->
nil
end
{timeseries_result, comparison_result, _meta} = Stats.timeseries(site, query, [metric])
labels = label_timeseries(timeseries_result, comparison_result)
present_index = present_index_for(site, query, labels)
@ -207,17 +198,12 @@ defmodule PlausibleWeb.Api.StatsController do
params = realtime_period_to_30m(params)
query = Query.from(site, params, debug_metadata(conn))
query =
Query.from(site, params, debug_metadata(conn))
|> Query.set_include(:comparisons, parse_comparison_options(site, params))
comparison_opts = parse_comparison_opts(params)
comparison_query =
case Stats.Comparisons.compare(site, query, params["comparison"], comparison_opts) do
{:ok, query} -> query
{:error, _cause} -> nil
end
{top_stats, sample_percent} = fetch_top_stats(site, query, comparison_query)
{top_stats, sample_percent} = fetch_top_stats(site, query)
comparison_query = comparison_query(site, query, params)
json(conn, %{
top_stats: top_stats,
@ -311,25 +297,27 @@ defmodule PlausibleWeb.Api.StatsController do
end
end
defp fetch_top_stats(site, query, comparison_query) do
defp fetch_top_stats(site, query) do
goal_filter? = Filters.filtering_on_dimension?(query, "event:goal")
cond do
query.period == "30m" && goal_filter? ->
fetch_goal_realtime_top_stats(site, query, comparison_query)
fetch_goal_realtime_top_stats(site, query)
query.period == "30m" ->
fetch_realtime_top_stats(site, query, comparison_query)
fetch_realtime_top_stats(site, query)
goal_filter? ->
fetch_goal_top_stats(site, query, comparison_query)
fetch_goal_top_stats(site, query)
true ->
fetch_other_top_stats(site, query, comparison_query)
fetch_other_top_stats(site, query)
end
end
defp fetch_goal_realtime_top_stats(site, query, _comparison_query) do
defp fetch_goal_realtime_top_stats(site, query) do
query = Query.set_include(query, :comparisons, nil)
%{
visitors: %{value: unique_conversions},
events: %{value: total_conversions}
@ -355,7 +343,9 @@ defmodule PlausibleWeb.Api.StatsController do
{stats, 100}
end
defp fetch_realtime_top_stats(site, query, _comparison_query) do
defp fetch_realtime_top_stats(site, query) do
query = Query.set_include(query, :comparisons, nil)
%{
visitors: %{value: visitors},
pageviews: %{value: pageviews}
@ -381,36 +371,35 @@ defmodule PlausibleWeb.Api.StatsController do
{stats, 100}
end
defp fetch_goal_top_stats(site, query, comparison_query) do
defp fetch_goal_top_stats(site, query) do
metrics =
[:total_visitors, :visitors, :events, :conversion_rate] ++ @revenue_metrics
results = Stats.aggregate(site, query, metrics)
comparison = if comparison_query, do: Stats.aggregate(site, comparison_query, metrics)
[
top_stats_entry(results, comparison, "Unique visitors", :total_visitors),
top_stats_entry(results, comparison, "Unique conversions", :visitors, graphable?: true),
top_stats_entry(results, comparison, "Total conversions", :events, graphable?: true),
top_stats_entry(results, "Unique visitors", :total_visitors),
top_stats_entry(results, "Unique conversions", :visitors, graphable?: true),
top_stats_entry(results, "Total conversions", :events, graphable?: true),
on_ee do
top_stats_entry(results, comparison, "Average revenue", :average_revenue,
top_stats_entry(results, "Average revenue", :average_revenue,
formatter: &format_money/1,
graphable?: true
)
end,
on_ee do
top_stats_entry(results, comparison, "Total revenue", :total_revenue,
top_stats_entry(results, "Total revenue", :total_revenue,
formatter: &format_money/1,
graphable?: true
)
end,
top_stats_entry(results, comparison, "Conversion rate", :conversion_rate, graphable?: true)
top_stats_entry(results, "Conversion rate", :conversion_rate, graphable?: true)
]
|> Enum.reject(&is_nil/1)
|> then(&{&1, 100})
end
defp fetch_other_top_stats(site, query, comparison_query) do
defp fetch_other_top_stats(site, query) do
page_filter? = Filters.filtering_on_dimension?(query, "event:page")
metrics = [:visitors, :visits, :pageviews, :sample_percent]
@ -423,27 +412,16 @@ defmodule PlausibleWeb.Api.StatsController do
end
current_results = Stats.aggregate(site, query, metrics)
prev_results = comparison_query && Stats.aggregate(site, comparison_query, metrics)
stats =
[
top_stats_entry(current_results, prev_results, "Unique visitors", :visitors,
graphable?: true
),
top_stats_entry(current_results, prev_results, "Total visits", :visits, graphable?: true),
top_stats_entry(current_results, prev_results, "Total pageviews", :pageviews,
graphable?: true
),
top_stats_entry(current_results, prev_results, "Views per visit", :views_per_visit,
graphable?: true
),
top_stats_entry(current_results, prev_results, "Bounce rate", :bounce_rate,
graphable?: true
),
top_stats_entry(current_results, prev_results, "Visit duration", :visit_duration,
graphable?: true
),
top_stats_entry(current_results, prev_results, "Time on page", :time_on_page,
top_stats_entry(current_results, "Unique visitors", :visitors, graphable?: true),
top_stats_entry(current_results, "Total visits", :visits, graphable?: true),
top_stats_entry(current_results, "Total pageviews", :pageviews, graphable?: true),
top_stats_entry(current_results, "Views per visit", :views_per_visit, graphable?: true),
top_stats_entry(current_results, "Bounce rate", :bounce_rate, graphable?: true),
top_stats_entry(current_results, "Visit duration", :visit_duration, graphable?: true),
top_stats_entry(current_results, "Time on page", :time_on_page,
formatter: fn
nil -> 0
value -> value
@ -455,14 +433,14 @@ defmodule PlausibleWeb.Api.StatsController do
{stats, current_results[:sample_percent][:value]}
end
defp top_stats_entry(current_results, prev_results, name, key, opts \\ []) do
defp top_stats_entry(current_results, name, key, opts \\ []) do
if current_results[key] do
formatter = Keyword.get(opts, :formatter, & &1)
value = get_in(current_results, [key, :value])
%{name: name, value: formatter.(value)}
|> maybe_put_graph_metric(opts, key)
|> maybe_put_comparison(prev_results, key, value, formatter)
|> maybe_put_comparison(current_results, key, formatter)
end
end
@ -474,11 +452,11 @@ defmodule PlausibleWeb.Api.StatsController do
end
end
defp maybe_put_comparison(entry, prev_results, key, value, formatter) do
if prev_results do
prev_value = get_in(prev_results, [key, :value])
change = Stats.Compare.calculate_change(key, prev_value, value)
defp maybe_put_comparison(entry, results, key, formatter) do
prev_value = get_in(results, [key, :comparison_value])
change = get_in(results, [key, :change])
if prev_value do
entry
|> Map.put(:comparison_value, formatter.(prev_value))
|> Map.put(:change, change)
@ -1551,14 +1529,41 @@ defmodule PlausibleWeb.Api.StatsController do
|> halt()
end
defp parse_comparison_opts(params) do
[
from: params["compare_from"],
to: params["compare_to"],
match_day_of_week?: params["match_day_of_week"] == "true"
]
def comparison_query(site, query, params) do
options = parse_comparison_options(site, params)
if options do
Comparisons.get_comparison_query(query, options)
end
end
def parse_comparison_options(_site, %{"period" => period}) when period in ~w(realtime all),
do: nil
def parse_comparison_options(_site, %{"comparison" => mode} = params)
when mode in ["previous_period", "year_over_year"] do
%{
mode: mode,
match_day_of_week: params["match_day_of_week"] == "true"
}
end
def parse_comparison_options(site, %{"comparison" => "custom"} = params) do
{:ok, date_range} =
Filters.QueryParser.parse_date_range_pair(site, [
params["compare_from"],
params["compare_to"]
])
%{
mode: "custom",
date_range: date_range,
match_day_of_week: params["match_day_of_week"] == "true"
}
end
def parse_comparison_options(_site, _options), do: nil
defp includes_imported?(source_query, comparison_query) do
cond do
source_query.include_imported -> true

View File

@ -62,6 +62,55 @@
"type": "boolean",
"default": false,
"description": "If set, returns the total number of result rows rows before pagination under `meta.total_rows`"
},
"comparisons": {
"$comment": "only :internal",
"type": "object",
"oneOf": [
{
"properties": {
"mode": {
"type": "string",
"enum": ["previous_period", "year_over_year"]
},
"match_day_of_week": {
"type": "boolean",
"default": false,
"description": "If set and using time:day dimensions, day-of-week of comparison query is matched"
}
},
"required": ["mode"],
"additionalProperties": false
},
{
"properties": {
"mode": {
"const": "custom"
},
"match_day_of_week": {
"type": "boolean",
"default": false,
"description": "If set and using time:day dimensions, day-of-week of comparison query is matched"
},
"date_range": {
"type": "array",
"additionalItems": false,
"minItems": 2,
"maxItems": 2,
"items": {
"type": "string",
"format": "date"
},
"description": "If custom period. A list of two ISO8601 dates or timestamps to compare against.",
"examples": [
["2024-01-01", "2024-01-31"]
]
}
},
"required": ["mode", "date_range"],
"additionalProperties": false
}
]
}
}
},

View File

@ -1,147 +1,174 @@
defmodule Plausible.Stats.ComparisonsTest do
use Plausible.DataCase
alias Plausible.Stats.{Query, Comparisons}
alias Plausible.Stats.{DateTimeRange, Query, Comparisons}
import Plausible.TestUtils
def build_query(site, params, now) do
query = Query.from(site, params)
Map.put(query, :now, now)
end
describe "with period set to this month" do
test "shifts back this month period when mode is previous_period" do
site = insert(:site)
query = Query.from(site, %{"period" => "month", "date" => "2023-03-02"})
now = ~N[2023-03-02 14:00:00]
{:ok, comparison} = Comparisons.compare(site, query, "previous_period", now: now)
query =
build_query(site, %{"period" => "month", "date" => "2023-03-02"}, ~N[2023-03-02 14:00:00])
assert comparison.utc_time_range.first == ~U[2023-02-27 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2023-02-28 23:59:59Z]
comparison_query = Comparisons.get_comparison_query(query, %{mode: "previous_period"})
assert comparison_query.utc_time_range.first == ~U[2023-02-27 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2023-02-28 23:59:59Z]
end
test "shifts back this month period when it's the first day of the month and mode is previous_period" do
site = insert(:site)
query = Query.from(site, %{"period" => "month", "date" => "2023-03-01"})
now = ~N[2023-03-01 14:00:00]
{:ok, comparison} = Comparisons.compare(site, query, "previous_period", now: now)
query =
build_query(site, %{"period" => "month", "date" => "2023-03-01"}, ~N[2023-03-01 14:00:00])
assert comparison.utc_time_range.first == ~U[2023-02-28 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2023-02-28 23:59:59Z]
comparison_query = Comparisons.get_comparison_query(query, %{mode: "previous_period"})
assert comparison_query.utc_time_range.first == ~U[2023-02-28 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2023-02-28 23:59:59Z]
end
test "matches the day of the week when nearest day is original query start date and mode is previous_period" do
site = insert(:site)
query = Query.from(site, %{"period" => "month", "date" => "2023-03-02"})
now = ~N[2023-03-02 14:00:00]
{:ok, comparison} =
Comparisons.compare(site, query, "previous_period", now: now, match_day_of_week?: true)
query =
build_query(site, %{"period" => "month", "date" => "2023-03-02"}, ~N[2023-03-02 14:00:00])
assert comparison.utc_time_range.first == ~U[2023-02-22 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2023-02-23 23:59:59Z]
comparison_query =
Comparisons.get_comparison_query(query, %{
mode: "previous_period",
match_day_of_week: true
})
assert comparison_query.utc_time_range.first == ~U[2023-02-22 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2023-02-23 23:59:59Z]
end
test "custom time zone sets timezone to UTC" do
site = insert(:site, timezone: "US/Eastern")
query = Query.from(site, %{"period" => "month", "date" => "2023-03-02"})
now = ~N[2023-03-02 14:00:00]
{:ok, comparison} = Comparisons.compare(site, query, "previous_period", now: now)
query =
build_query(site, %{"period" => "month", "date" => "2023-03-02"}, ~N[2023-03-02 14:00:00])
assert comparison.utc_time_range.first == ~U[2023-02-27 05:00:00Z]
assert comparison.utc_time_range.last == ~U[2023-03-01 04:59:59Z]
comparison_query = Comparisons.get_comparison_query(query, %{mode: "previous_period"})
assert comparison_query.utc_time_range.first == ~U[2023-02-27 05:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2023-03-01 04:59:59Z]
end
end
describe "with period set to previous month" do
test "shifts back using the same number of days when mode is previous_period" do
site = insert(:site)
query = Query.from(site, %{"period" => "month", "date" => "2023-02-01"})
now = ~N[2023-03-01 14:00:00]
{:ok, comparison} = Comparisons.compare(site, query, "previous_period", now: now)
query =
build_query(site, %{"period" => "month", "date" => "2023-02-01"}, ~N[2023-03-01 14:00:00])
assert comparison.utc_time_range.first == ~U[2023-01-04 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2023-01-31 23:59:59Z]
comparison_query = Comparisons.get_comparison_query(query, %{mode: "previous_period"})
assert comparison_query.utc_time_range.first == ~U[2023-01-04 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2023-01-31 23:59:59Z]
end
test "shifts back the full month when mode is year_over_year" do
site = insert(:site)
query = Query.from(site, %{"period" => "month", "date" => "2023-02-01"})
now = ~N[2023-03-01 14:00:00]
{:ok, comparison} = Comparisons.compare(site, query, "year_over_year", now: now)
query =
build_query(site, %{"period" => "month", "date" => "2023-02-01"}, ~N[2023-03-01 14:00:00])
assert comparison.utc_time_range.first == ~U[2022-02-01 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2022-02-28 23:59:59Z]
comparison_query = Comparisons.get_comparison_query(query, %{mode: "year_over_year"})
assert comparison_query.utc_time_range.first == ~U[2022-02-01 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2022-02-28 23:59:59Z]
end
test "shifts back whole month plus one day when mode is year_over_year and a leap year" do
site = insert(:site)
query = Query.from(site, %{"period" => "month", "date" => "2020-02-01"})
now = ~N[2023-03-01 14:00:00]
{:ok, comparison} = Comparisons.compare(site, query, "year_over_year", now: now)
query =
build_query(site, %{"period" => "month", "date" => "2020-02-01"}, ~N[2023-03-01 14:00:00])
assert comparison.utc_time_range.first == ~U[2019-02-01 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2019-03-01 23:59:59Z]
comparison_query = Comparisons.get_comparison_query(query, %{mode: "year_over_year"})
assert comparison_query.utc_time_range.first == ~U[2019-02-01 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2019-03-01 23:59:59Z]
end
test "matches the day of the week when mode is previous_period keeping the same day" do
site = insert(:site)
query = Query.from(site, %{"period" => "month", "date" => "2023-02-01"})
now = ~N[2023-03-01 14:00:00]
{:ok, comparison} =
Comparisons.compare(site, query, "previous_period", now: now, match_day_of_week?: true)
query =
build_query(site, %{"period" => "month", "date" => "2023-02-01"}, ~N[2023-03-01 14:00:00])
assert comparison.utc_time_range.first == ~U[2023-01-04 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2023-01-31 23:59:59Z]
comparison_query =
Comparisons.get_comparison_query(query, %{
mode: "previous_period",
match_day_of_week: true
})
assert comparison_query.utc_time_range.first == ~U[2023-01-04 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2023-01-31 23:59:59Z]
end
test "matches the day of the week when mode is previous_period" do
site = insert(:site)
query = Query.from(site, %{"period" => "month", "date" => "2023-01-01"})
now = ~N[2023-03-01 14:00:00]
{:ok, comparison} =
Comparisons.compare(site, query, "previous_period", now: now, match_day_of_week?: true)
query =
build_query(site, %{"period" => "month", "date" => "2023-01-01"}, ~N[2023-03-01 14:00:00])
assert comparison.utc_time_range.first == ~U[2022-12-04 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2023-01-03 23:59:59Z]
comparison_query =
Comparisons.get_comparison_query(query, %{
mode: "previous_period",
match_day_of_week: true
})
assert comparison_query.utc_time_range.first == ~U[2022-12-04 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2023-01-03 23:59:59Z]
end
end
describe "with period set to year to date" do
test "shifts back by the same number of days when mode is previous_period" do
site = insert(:site)
query = Query.from(site, %{"period" => "year", "date" => "2023-03-01"})
now = ~N[2023-03-01 14:00:00]
{:ok, comparison} = Comparisons.compare(site, query, "previous_period", now: now)
query =
build_query(site, %{"period" => "year", "date" => "2023-03-01"}, ~N[2023-03-01 14:00:00])
assert comparison.utc_time_range.first == ~U[2022-11-02 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2022-12-31 23:59:59Z]
comparison_query = Comparisons.get_comparison_query(query, %{mode: "previous_period"})
assert comparison_query.utc_time_range.first == ~U[2022-11-02 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2022-12-31 23:59:59Z]
end
test "shifts back by the same number of days when mode is year_over_year" do
site = insert(:site)
query = Query.from(site, %{"period" => "year", "date" => "2023-03-01"})
now = ~N[2023-03-01 14:00:00]
{:ok, comparison} = Comparisons.compare(site, query, "year_over_year", now: now)
query =
build_query(site, %{"period" => "year", "date" => "2023-03-01"}, ~N[2023-03-01 14:00:00])
assert comparison.utc_time_range.first == ~U[2022-01-01 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2022-03-01 23:59:59Z]
comparison_query = Comparisons.get_comparison_query(query, %{mode: "year_over_year"})
assert comparison_query.utc_time_range.first == ~U[2022-01-01 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2022-03-01 23:59:59Z]
end
test "matches the day of the week when mode is year_over_year" do
site = insert(:site)
query = Query.from(site, %{"period" => "year", "date" => "2023-03-01"})
now = ~N[2023-03-01 14:00:00]
{:ok, comparison} =
Comparisons.compare(site, query, "year_over_year", now: now, match_day_of_week?: true)
query =
build_query(site, %{"period" => "year", "date" => "2023-03-01"}, ~N[2023-03-01 14:00:00])
assert comparison.utc_time_range.first == ~U[2022-01-02 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2022-03-02 23:59:59Z]
comparison_query =
Comparisons.get_comparison_query(query, %{mode: "year_over_year", match_day_of_week: true})
assert comparison_query.utc_time_range.first == ~U[2022-01-02 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2022-03-02 23:59:59Z]
end
end
@ -150,20 +177,20 @@ defmodule Plausible.Stats.ComparisonsTest do
site = insert(:site)
query = Query.from(site, %{"period" => "year", "date" => "2022-03-02"})
{:ok, comparison} = Comparisons.compare(site, query, "year_over_year")
comparison_query = Comparisons.get_comparison_query(query, %{mode: "year_over_year"})
assert comparison.utc_time_range.first == ~U[2021-01-01 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2021-12-31 23:59:59Z]
assert comparison_query.utc_time_range.first == ~U[2021-01-01 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2021-12-31 23:59:59Z]
end
test "shifts back a whole year when mode is previous_period" do
site = insert(:site)
query = Query.from(site, %{"period" => "year", "date" => "2022-03-02"})
{:ok, comparison} = Comparisons.compare(site, query, "previous_period")
comparison_query = Comparisons.get_comparison_query(query, %{mode: "previous_period"})
assert comparison.utc_time_range.first == ~U[2021-01-01 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2021-12-31 23:59:59Z]
assert comparison_query.utc_time_range.first == ~U[2021-01-01 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2021-12-31 23:59:59Z]
end
end
@ -172,20 +199,20 @@ defmodule Plausible.Stats.ComparisonsTest do
site = insert(:site)
query = Query.from(site, %{"period" => "custom", "date" => "2023-01-01,2023-01-07"})
{:ok, comparison} = Comparisons.compare(site, query, "previous_period")
comparison_query = Comparisons.get_comparison_query(query, %{mode: "previous_period"})
assert comparison.utc_time_range.first == ~U[2022-12-25 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2022-12-31 23:59:59Z]
assert comparison_query.utc_time_range.first == ~U[2022-12-25 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2022-12-31 23:59:59Z]
end
test "shifts back to last year when mode is year_over_year" do
site = insert(:site)
query = Query.from(site, %{"period" => "custom", "date" => "2023-01-01,2023-01-07"})
{:ok, comparison} = Comparisons.compare(site, query, "year_over_year")
comparison_query = Comparisons.get_comparison_query(query, %{mode: "year_over_year"})
assert comparison.utc_time_range.first == ~U[2022-01-01 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2022-01-07 23:59:59Z]
assert comparison_query.utc_time_range.first == ~U[2022-01-01 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2022-01-07 23:59:59Z]
end
end
@ -194,22 +221,14 @@ defmodule Plausible.Stats.ComparisonsTest do
site = insert(:site)
query = Query.from(site, %{"period" => "custom", "date" => "2023-01-01,2023-01-07"})
{:ok, comparison} =
Comparisons.compare(site, query, "custom", from: "2022-05-25", to: "2022-05-30")
comparison_query =
Comparisons.get_comparison_query(query, %{
mode: "custom",
date_range: DateTimeRange.new!(~U[2022-05-25 00:00:00Z], ~U[2022-05-30 23:59:59Z])
})
assert comparison.utc_time_range.first == ~U[2022-05-25 00:00:00Z]
assert comparison.utc_time_range.last == ~U[2022-05-30 23:59:59Z]
end
test "validates from and to dates" do
site = insert(:site)
query = Query.from(site, %{"period" => "custom", "date" => "2023-01-01,2023-01-07"})
assert {:error, :invalid_dates} ==
Comparisons.compare(site, query, "custom", from: "2022-05-41", to: "2022-05-30")
assert {:error, :invalid_dates} ==
Comparisons.compare(site, query, "custom", from: "2022-05-30", to: "2022-05-25")
assert comparison_query.utc_time_range.first == ~U[2022-05-25 00:00:00Z]
assert comparison_query.utc_time_range.last == ~U[2022-05-30 23:59:59Z]
end
end
@ -220,7 +239,7 @@ defmodule Plausible.Stats.ComparisonsTest do
query = Query.from(site, %{"period" => "day", "date" => "2023-01-01"})
assert query.include_imported == false
{:ok, comparison_query} = Comparisons.compare(site, query, "previous_period")
comparison_query = Comparisons.get_comparison_query(query, %{mode: "previous_period"})
assert comparison_query.include_imported == false
end
end

View File

@ -68,7 +68,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
}
@ -91,7 +91,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -133,7 +133,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
},
@ -200,7 +200,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
},
@ -326,7 +326,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -352,7 +352,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -379,7 +379,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -446,7 +446,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -466,7 +466,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -527,7 +527,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -575,7 +575,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -611,7 +611,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["time"],
order_by: nil,
timezone: site.timezone,
include: %{imports: true, time_labels: true, total_rows: true},
include: %{imports: true, time_labels: true, total_rows: true, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -638,6 +638,150 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
end
end
describe "include.comparisons" do
test "not allowed in public API", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => "all",
"include" => %{"comparisons" => %{"mode" => "previous_period"}}
}
|> check_error(
site,
"#/include/comparisons: Schema does not allow additional properties."
)
end
test "mode=previous_period", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => "all",
"include" => %{"comparisons" => %{"mode" => "previous_period"}}
}
|> check_success(
site,
%{
metrics: [:visitors],
utc_time_range: @date_range_day,
filters: [],
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{
comparisons: %{
mode: "previous_period"
},
imports: false,
time_labels: false,
total_rows: false
},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
},
:internal
)
end
test "mode=year_over_year", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => "all",
"include" => %{"comparisons" => %{"mode" => "year_over_year"}}
}
|> check_success(
site,
%{
metrics: [:visitors],
utc_time_range: @date_range_day,
filters: [],
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{
comparisons: %{
mode: "year_over_year"
},
imports: false,
time_labels: false,
total_rows: false
},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
},
:internal
)
end
test "mode=custom", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => "all",
"include" => %{
"comparisons" => %{"mode" => "custom", "date_range" => ["2021-04-05", "2021-05-05"]}
}
}
|> check_success(
site,
%{
metrics: [:visitors],
utc_time_range: @date_range_day,
filters: [],
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{
comparisons: %{
mode: "custom",
date_range: @date_range_30d
},
imports: false,
time_labels: false,
total_rows: false
},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
},
:internal
)
end
test "mode=custom without date_range is invalid", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => "all",
"include" => %{"comparisons" => %{"mode" => "custom"}}
}
|> check_error(
site,
"#/include/comparisons: Expected exactly one of the schemata to match, but none of them did.",
:internal
)
end
test "mode=previous_period with date_range is invalid", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => "all",
"include" => %{
"comparisons" => %{
"mode" => "previous_period",
"date_range" => ["2024-01-01", "2024-01-31"]
}
}
}
|> check_error(
site,
"#/include/comparisons: Expected exactly one of the schemata to match, but none of them did.",
:internal
)
end
end
describe "pagination validation" do
test "setting pagination values", %{site: site} do
%{
@ -654,7 +798,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["time"],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 100, offset: 200},
preloaded_goals: []
})
@ -707,7 +851,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: ^expected_timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: [
%Plausible.Goal{page_path: "/thank-you"},
@ -980,7 +1124,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["event:#{unquote(dimension)}"],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -1002,7 +1146,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["visit:#{unquote(dimension)}"],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -1023,7 +1167,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["event:props:foobar"],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -1085,7 +1229,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: [{:events, :desc}, {:visitors, :asc}],
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -1106,7 +1250,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["event:name"],
order_by: [{"event:name", :desc}],
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -1257,7 +1401,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["event:goal"],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -1341,7 +1485,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["visit:device"],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -1374,7 +1518,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["event:page"],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})
@ -1394,7 +1538,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false},
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0},
preloaded_goals: []
})

View File

@ -34,7 +34,7 @@ defmodule Plausible.Stats.QueryResultTest do
query = QueryOptimizer.optimize(query)
query_result_json =
QueryResult.from([], site, query)
QueryResult.from([], site, query, %{})
|> Jason.encode!(pretty: true)
|> String.replace(site.domain, "dummy.site")