APIv2: TimeSeries using QueryBuilder, release experimental_session_count (#4305)

* Move fragments module under Plausible.Stats.SQL

* Introduce select_merge_as macro

This simplifies some select_merge calls

* Simplify select_join_fields

* Remove a needless dynamic

* wrap_select_columns macro

* Move metrics from base.ex to expression.ex

* Move WhereBuilder under Plausible.Stats.SQL

* Moduledoc

* Improved macros

* Wrap more code

* select_merge_as more

* Move defp to the end

* include.time_labels parsing

* include.time_labels in result

Note that the previous implementation of the labels from TimeSeries.ex was broken

* Apply consistent function in imports and timeseries.ex

* Remove boilerplate

* WIP: Limited support for timeseries-with-querybuilder

* time:week dimension

* cleanup: property -> dimension

* Make querying with time series work

* Refactor: Move special metrics (percentage, conversion rate) to own module

* Explicitly format datetimes

* Consistent include_imported in special metrics

* Solve week-related crash

* conversion_rate hacking

* Keep include_imported consistent after splitting the query

* Simplify do_decide_tables

* Handle time dimensions in imports cleaner

* Allow time dimensions in custom property queries

* time:week handling continued

* cast_revenue_metrics_to_money

* fix `full_intervals` support

* Handle minute/realtime graphs

* experimental_session_count? with timeseries

This becomes required as we try to include visits from sessions by default

* Support hourly data in imports

* Update bounce_rate in more csv tests

* Update some time-series query tests

* Fix for meta.warning being included incorrectly

* Simplify imported.ex

* experimental_session_count flag removal

* moduledoc

* Split interval and time modules
This commit is contained in:
Karl-Aksel Puulmann 2024-07-09 14:25:02 +03:00 committed by GitHub
parent 0da3517502
commit a181f3eab3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
33 changed files with 991 additions and 824 deletions

View File

@ -14,6 +14,7 @@ All notable changes to this project will be documented in this file.
### Changed
- Realtime and hourly graphs now show visits lasting their whole duration instead when specific events occur
- Increase hourly request limit for API keys in CE from 600 to 1000000 (practically removing the limit) plausible/analytics#4200
- Make TCP connections try IPv6 first with IPv4 fallback in CE plausible/analytics#4245
- `is` and `is not` filters in dashboard no longer support wildcards. Use contains/does not contain filter instead.

View File

@ -38,7 +38,6 @@ export function serializeQuery(query, extraQuery = []) {
if (query.from) { queryObj.from = formatISO(query.from) }
if (query.to) { queryObj.to = formatISO(query.to) }
if (query.filters) { queryObj.filters = serializeApiFilters(query.filters) }
if (query.experimental_session_count) { queryObj.experimental_session_count = query.experimental_session_count }
if (query.with_imported) { queryObj.with_imported = query.with_imported }
if (SHARED_LINK_AUTH) { queryObj.auth = SHARED_LINK_AUTH }

View File

@ -42,7 +42,6 @@ export function parseQuery(querystring, site) {
to: q.get('to') ? dayjs.utc(q.get('to')) : undefined,
match_day_of_week: matchDayOfWeek == 'true',
with_imported: q.get('with_imported') ? q.get('with_imported') === 'true' : true,
experimental_session_count: q.get('experimental_session_count'),
filters: parseJsonUrl(q.get('filters'), []),
labels: parseJsonUrl(q.get('labels'), {})
}

View File

@ -139,84 +139,4 @@ defmodule Plausible.Stats.Base do
"^#{escaped}$"
end
defp total_visitors(site, query) do
base_event_query(site, query)
|> select([e],
total_visitors: fragment("toUInt64(round(uniq(?) * any(_sample_factor)))", e.user_id)
)
end
# `total_visitors_subquery` returns a subquery which selects `total_visitors` -
# the number used as the denominator in the calculation of `conversion_rate` and
# `percentage` metrics.
# Usually, when calculating the totals, a new query is passed into this function,
# where certain filters (e.g. goal, props) are removed. That might make the query
# able to include imported data. However, we always want to include imported data
# only if it's included in the base query - otherwise the total will be based on
# a different data set, making the metric inaccurate. This is why we're using an
# explicit `include_imported` argument here.
def total_visitors_subquery(site, query, include_imported)
def total_visitors_subquery(site, query, true = _include_imported) do
wrap_alias([], %{
total_visitors:
subquery(total_visitors(site, query)) +
subquery(Plausible.Stats.Imported.total_imported_visitors(site, query))
})
end
def total_visitors_subquery(site, query, false = _include_imported) do
wrap_alias([], %{
total_visitors: subquery(total_visitors(site, query))
})
end
def add_percentage_metric(q, site, query, metrics) do
if :percentage in metrics do
total_query = Query.set_dimensions(query, [])
q
|> select_merge_as([], total_visitors_subquery(site, total_query, query.include_imported))
|> select_merge_as([], %{
percentage:
fragment(
"if(? > 0, round(? / ? * 100, 1), null)",
selected_as(:total_visitors),
selected_as(:visitors),
selected_as(:total_visitors)
)
})
else
q
end
end
# Adds conversion_rate metric to query, calculated as
# X / Y where Y is the same breakdown value without goal or props
# filters.
def maybe_add_conversion_rate(q, site, query, metrics) do
if :conversion_rate in metrics do
total_query =
query
|> Query.remove_filters(["event:goal", "event:props"])
|> Query.set_dimensions([])
# :TRICKY: Subquery is used due to event:goal breakdown above doing an UNION ALL
subquery(q)
|> select_merge_as([], total_visitors_subquery(site, total_query, query.include_imported))
|> select_merge_as([e], %{
conversion_rate:
fragment(
"if(? > 0, round(? / ? * 100, 1), 0)",
selected_as(:total_visitors),
e.visitors,
selected_as(:total_visitors)
)
})
else
q
end
end
end

View File

@ -163,21 +163,21 @@ defmodule Plausible.Stats.Comparisons do
end
defp maybe_include_imported(query, source_query) do
requested? = source_query.imported_data_requested
requested? = source_query.include.imports
case Query.ensure_include_imported(query, requested?) do
:ok ->
struct!(query,
imported_data_requested: true,
include_imported: true,
skip_imported_reason: nil
skip_imported_reason: nil,
include: Map.put(query.include, :imports, true)
)
{:error, reason} ->
struct!(query,
imported_data_requested: requested?,
include_imported: false,
skip_imported_reason: reason
skip_imported_reason: reason,
include: Map.put(query.include, :imports, requested?)
)
end
end

View File

@ -83,8 +83,8 @@ defmodule Plausible.Stats.Filters do
def parse(_), do: []
def without_prefix(property) do
property
def without_prefix(dimension) do
dimension
|> String.split(":")
|> List.last()
|> String.to_existing_atom()

View File

@ -6,6 +6,11 @@ defmodule Plausible.Stats.Filters.QueryParser do
alias Plausible.Stats.Query
alias Plausible.Stats.Metrics
@default_include %{
imports: false,
time_labels: false
}
def parse(site, params, now \\ nil) when is_map(params) do
with {:ok, metrics} <- parse_metrics(Map.get(params, "metrics", [])),
{:ok, filters} <- parse_filters(Map.get(params, "filters", [])),
@ -22,13 +27,14 @@ defmodule Plausible.Stats.Filters.QueryParser do
dimensions: dimensions,
order_by: order_by,
timezone: site.timezone,
imported_data_requested: Map.get(include, :imports, false),
preloaded_goals: preloaded_goals
preloaded_goals: preloaded_goals,
include: include
},
:ok <- validate_order_by(query),
:ok <- validate_goal_filters(query),
:ok <- validate_custom_props_access(site, query),
:ok <- validate_metrics(query) do
:ok <- validate_metrics(query),
:ok <- validate_include(query) do
{:ok, query}
end
end
@ -219,6 +225,7 @@ defmodule Plausible.Stats.Filters.QueryParser do
defp parse_time("time"), do: {:ok, "time"}
defp parse_time("time:hour"), do: {:ok, "time:hour"}
defp parse_time("time:day"), do: {:ok, "time:day"}
defp parse_time("time:week"), do: {:ok, "time:week"}
defp parse_time("time:month"), do: {:ok, "time:month"}
defp parse_time(_), do: :error
@ -226,9 +233,24 @@ defmodule Plausible.Stats.Filters.QueryParser do
defp parse_order_direction([_, "desc"]), do: {:ok, :desc}
defp parse_order_direction(entry), do: {:error, "Invalid order_by entry '#{inspect(entry)}'"}
defp parse_include(%{"imports" => value}) when is_boolean(value), do: {:ok, %{imports: value}}
defp parse_include(%{}), do: {:ok, %{}}
defp parse_include(include), do: {:error, "Invalid include passed '#{inspect(include)}'"}
defp parse_include(include) when is_map(include) do
with {:ok, parsed_include_list} <- parse_list(include, &parse_include_value/1) do
include = Map.merge(@default_include, Enum.into(parsed_include_list, %{}))
{:ok, include}
end
end
defp parse_include(entry), do: {:error, "Invalid include passed '#{inspect(entry)}'"}
defp parse_include_value({"imports", value}) when is_boolean(value),
do: {:ok, {:imports, value}}
defp parse_include_value({"time_labels", value}) when is_boolean(value),
do: {:ok, {:time_labels, value}}
defp parse_include_value({key, value}),
do: {:error, "Invalid include entry '#{inspect(%{key => value})}'"}
defp parse_filter_key_string(filter_key, error_message \\ "") do
case filter_key do
@ -386,7 +408,7 @@ defmodule Plausible.Stats.Filters.QueryParser do
end
end
def event_dimensions_not_allowing_session_metrics?(dimensions) do
defp event_dimensions_not_allowing_session_metrics?(dimensions) do
Enum.any?(dimensions, fn
"event:page" -> false
"event:" <> _ -> true
@ -394,6 +416,16 @@ defmodule Plausible.Stats.Filters.QueryParser do
end)
end
defp validate_include(query) do
time_dimension? = Enum.any?(query.dimensions, &String.starts_with?(&1, "time"))
if query.include.time_labels and not time_dimension? do
{:error, "Invalid include.time_labels: requires a time dimension"}
else
:ok
end
end
defp parse_list(list, parser_function) do
Enum.reduce_while(list, {:ok, []}, fn value, {:ok, results} ->
case parser_function.(value) do

View File

@ -29,11 +29,19 @@ defmodule Plausible.Stats.Imported.Base do
"event:page" => "imported_pages",
"event:name" => "imported_custom_events",
# NOTE: these properties can be only filtered by
# NOTE: these dimensions can be only filtered by
"visit:screen" => "imported_devices",
"event:hostname" => "imported_pages"
"event:hostname" => "imported_pages",
# NOTE: These dimensions are only used in group by
"time:month" => "imported_visitors",
"time:week" => "imported_visitors",
"time:day" => "imported_visitors",
"time:hour" => "imported_visitors"
}
@queriable_time_dimensions ["time:month", "time:week", "time:day", "time:hour"]
@imported_custom_props Imported.imported_custom_props()
@db_field_mappings %{
@ -121,9 +129,10 @@ defmodule Plausible.Stats.Imported.Base do
do_decide_custom_prop_table(query, dimension)
end
@queriable_custom_prop_dimensions ["event:goal", "event:name"] ++ @queriable_time_dimensions
defp do_decide_custom_prop_table(%{dimensions: dimensions} = query) do
if dimensions == [] or
(length(dimensions) == 1 and hd(dimensions) in ["event:goal", "event:name"]) do
(length(dimensions) == 1 and hd(dimensions) in @queriable_custom_prop_dimensions) do
custom_prop_filters =
query.filters
|> Enum.map(&Enum.at(&1, 1))
@ -169,14 +178,6 @@ defmodule Plausible.Stats.Imported.Base do
["imported_pages", "imported_custom_events"]
end
defp do_decide_tables(%Query{filters: [], dimensions: [dimension]}) do
if Map.has_key?(@property_to_table_mappings, dimension) do
[@property_to_table_mappings[dimension]]
else
[]
end
end
defp do_decide_tables(%Query{filters: filters, dimensions: ["event:goal"]}) do
filter_props = Enum.map(filters, &Enum.at(&1, 1))
@ -197,13 +198,15 @@ defmodule Plausible.Stats.Imported.Base do
filters
|> Enum.map(fn [_, filter_key | _] -> filter_key end)
|> Enum.concat(dimensions)
|> Enum.map(fn
"visit:screen" -> "visit:device"
dimension -> dimension
|> Enum.reject(&(&1 in @queriable_time_dimensions))
|> Enum.flat_map(fn
"visit:screen" -> ["visit:device"]
dimension -> [dimension]
end)
|> Enum.map(&@property_to_table_mappings[&1])
case Enum.uniq(table_candidates) do
[] -> ["imported_visitors"]
[nil] -> []
[candidate] -> [candidate]
_ -> []

View File

@ -16,9 +16,6 @@ defmodule Plausible.Stats.Imported do
@property_to_table_mappings Imported.Base.property_to_table_mappings()
@imported_dimensions Map.keys(@property_to_table_mappings) ++
Plausible.Imported.imported_custom_props()
@goals_with_url Plausible.Imported.goals_with_url()
def goals_with_url(), do: @goals_with_url
@ -225,47 +222,6 @@ defmodule Plausible.Stats.Imported do
{table, db_field}
end
def merge_imported_timeseries(native_q, _, %Plausible.Stats.Query{include_imported: false}, _),
do: native_q
def merge_imported_timeseries(
native_q,
site,
query,
metrics
) do
imported_q =
site
|> Imported.Base.query_imported(query)
|> select_imported_metrics(metrics)
|> apply_interval(query, site)
from(s in Ecto.Query.subquery(native_q),
full_join: i in subquery(imported_q),
on: s.date == i.date,
select: %{date: fragment("greatest(?, ?)", s.date, i.date)}
)
|> select_joined_metrics(metrics)
end
defp apply_interval(imported_q, %Plausible.Stats.Query{interval: "month"}, _site) do
imported_q
|> group_by([i], fragment("toStartOfMonth(?)", i.date))
|> select_merge([i], %{date: fragment("toStartOfMonth(?)", i.date)})
end
defp apply_interval(imported_q, %Plausible.Stats.Query{interval: "week"} = query, _site) do
imported_q
|> group_by([i], weekstart_not_before(i.date, ^query.date_range.first))
|> select_merge([i], %{date: weekstart_not_before(i.date, ^query.date_range.first)})
end
defp apply_interval(imported_q, _query, _site) do
imported_q
|> group_by([i], i.date)
|> select_merge([i], %{date: i.date})
end
def merge_imported(q, _, %Query{include_imported: false}, _), do: q
def merge_imported(q, site, %Query{dimensions: []} = query, metrics) do
@ -320,8 +276,8 @@ defmodule Plausible.Stats.Imported do
end)
end
def merge_imported(q, site, %Query{dimensions: dimensions} = query, metrics) do
if merge_imported_dimensions?(dimensions) do
def merge_imported(q, site, query, metrics) do
if schema_supports_query?(query) do
imported_q =
site
|> Imported.Base.query_imported(query)
@ -341,13 +297,6 @@ defmodule Plausible.Stats.Imported do
end
end
def merge_imported(q, _, _, _), do: q
defp merge_imported_dimensions?(dimensions) do
dimensions in [["visit:browser", "visit:browser_version"], ["visit:os", "visit:os_version"]] or
(length(dimensions) == 1 and hd(dimensions) in @imported_dimensions)
end
def total_imported_visitors(site, query) do
site
|> Imported.Base.query_imported(query)
@ -552,11 +501,11 @@ defmodule Plausible.Stats.Imported do
Enum.reduce(query.dimensions, q, fn dimension, q ->
dim = Plausible.Stats.Filters.without_prefix(dimension)
group_imported_by(q, dim, shortname(query, dimension))
group_imported_by(q, dim, shortname(query, dimension), query)
end)
end
defp group_imported_by(q, dim, key) when dim in [:source, :referrer] do
defp group_imported_by(q, dim, key, _query) when dim in [:source, :referrer] do
q
|> group_by([i], field(i, ^dim))
|> select_merge_as([i], %{
@ -564,7 +513,7 @@ defmodule Plausible.Stats.Imported do
})
end
defp group_imported_by(q, dim, key)
defp group_imported_by(q, dim, key, _query)
when dim in [:utm_source, :utm_medium, :utm_campaign, :utm_term, :utm_content] do
q
|> group_by([i], field(i, ^dim))
@ -572,34 +521,34 @@ defmodule Plausible.Stats.Imported do
|> select_merge_as([i], %{key => field(i, ^dim)})
end
defp group_imported_by(q, :page, key) do
defp group_imported_by(q, :page, key, _query) do
q
|> group_by([i], i.page)
|> select_merge_as([i], %{key => i.page, time_on_page: sum(i.time_on_page)})
end
defp group_imported_by(q, :country, key) do
defp group_imported_by(q, :country, key, _query) do
q
|> group_by([i], i.country)
|> where([i], i.country != "ZZ")
|> select_merge_as([i], %{key => i.country})
end
defp group_imported_by(q, :region, key) do
defp group_imported_by(q, :region, key, _query) do
q
|> group_by([i], i.region)
|> where([i], i.region != "")
|> select_merge_as([i], %{key => i.region})
end
defp group_imported_by(q, :city, key) do
defp group_imported_by(q, :city, key, _query) do
q
|> group_by([i], i.city)
|> where([i], i.city != 0 and not is_nil(i.city))
|> select_merge_as([i], %{key => i.city})
end
defp group_imported_by(q, dim, key) when dim in [:device, :browser] do
defp group_imported_by(q, dim, key, _query) when dim in [:device, :browser] do
q
|> group_by([i], field(i, ^dim))
|> select_merge_as([i], %{
@ -607,7 +556,7 @@ defmodule Plausible.Stats.Imported do
})
end
defp group_imported_by(q, :browser_version, key) do
defp group_imported_by(q, :browser_version, key, _query) do
q
|> group_by([i], [i.browser_version])
|> select_merge_as([i], %{
@ -615,7 +564,7 @@ defmodule Plausible.Stats.Imported do
})
end
defp group_imported_by(q, :os, key) do
defp group_imported_by(q, :os, key, _query) do
q
|> group_by([i], i.operating_system)
|> select_merge_as([i], %{
@ -623,7 +572,7 @@ defmodule Plausible.Stats.Imported do
})
end
defp group_imported_by(q, :os_version, key) do
defp group_imported_by(q, :os_version, key, _query) do
q
|> group_by([i], [i.operating_system_version])
|> select_merge_as([i], %{
@ -637,19 +586,19 @@ defmodule Plausible.Stats.Imported do
})
end
defp group_imported_by(q, dim, key) when dim in [:entry_page, :exit_page] do
defp group_imported_by(q, dim, key, _query) when dim in [:entry_page, :exit_page] do
q
|> group_by([i], field(i, ^dim))
|> select_merge_as([i], %{key => field(i, ^dim)})
end
defp group_imported_by(q, :name, key) do
defp group_imported_by(q, :name, key, _query) do
q
|> group_by([i], i.name)
|> select_merge_as([i], %{key => i.name})
end
defp group_imported_by(q, :url, key) do
defp group_imported_by(q, :url, key, _query) do
q
|> group_by([i], i.link_url)
|> select_merge_as([i], %{
@ -657,7 +606,7 @@ defmodule Plausible.Stats.Imported do
})
end
defp group_imported_by(q, :path, key) do
defp group_imported_by(q, :path, key, _query) do
q
|> group_by([i], i.path)
|> select_merge_as([i], %{
@ -665,6 +614,32 @@ defmodule Plausible.Stats.Imported do
})
end
defp group_imported_by(q, :month, key, _query) do
q
|> group_by([i], fragment("toStartOfMonth(?)", i.date))
|> select_merge_as([i], %{key => fragment("toStartOfMonth(?)", i.date)})
end
defp group_imported_by(q, :hour, key, _query) do
q
|> group_by([i], i.date)
|> select_merge_as([i], %{key => i.date})
end
defp group_imported_by(q, :week, key, query) do
q
|> group_by([i], weekstart_not_before(i.date, ^query.date_range.first))
|> select_merge_as([i], %{
key => weekstart_not_before(i.date, ^query.date_range.first)
})
end
defp group_imported_by(q, :day, key, _query) do
q
|> group_by([i], i.date)
|> select_merge_as([i], %{key => i.date})
end
defp select_joined_dimensions(q, query) do
Enum.reduce(query.dimensions, q, fn dimension, q ->
select_joined_dimension(q, dimension, shortname(query, dimension))
@ -677,6 +652,12 @@ defmodule Plausible.Stats.Imported do
})
end
defp select_joined_dimension(q, "time:" <> _, key) do
select_merge_as(q, [s, i], %{
key => fragment("greatest(?, ?)", field(i, ^key), field(s, ^key))
})
end
defp select_joined_dimension(q, _dimension, key) do
select_merge_as(q, [s, i], %{
key => fragment("if(empty(?), ?, ?)", field(s, ^key), field(i, ^key), field(s, ^key))

View File

@ -7,11 +7,9 @@ defmodule Plausible.Stats.Query do
dimensions: [],
filters: [],
sample_threshold: 20_000_000,
imported_data_requested: false,
include_imported: false,
skip_imported_reason: nil,
now: nil,
experimental_session_count?: false,
experimental_reduced_joins?: false,
latest_import_end_date: nil,
metrics: [],
@ -36,8 +34,7 @@ defmodule Plausible.Stats.Query do
query =
__MODULE__
|> struct!(now: now)
|> put_experimental_session_count(site, params)
|> struct!(now: now, timezone: site.timezone)
|> put_experimental_reduced_joins(site, params)
|> put_period(site, params)
|> put_dimensions(params)
@ -57,7 +54,6 @@ defmodule Plausible.Stats.Query do
query =
struct!(__MODULE__, Map.to_list(query_data))
|> put_imported_opts(site, %{})
|> put_experimental_session_count(site, params)
|> put_experimental_reduced_joins(site, params)
|> struct!(v2: true)
@ -65,18 +61,6 @@ defmodule Plausible.Stats.Query do
end
end
defp put_experimental_session_count(query, site, params) do
if Map.has_key?(params, "experimental_session_count") do
struct!(query,
experimental_session_count?: Map.get(params, "experimental_session_count") == "true"
)
else
struct!(query,
experimental_session_count?: FunWithFlags.enabled?(:experimental_session_count, for: site)
)
end
end
defp put_experimental_reduced_joins(query, site, params) do
if Map.has_key?(params, "experimental_reduced_joins") do
struct!(query,
@ -231,9 +215,13 @@ defmodule Plausible.Stats.Query do
end
def set(query, keywords) do
query
|> struct!(keywords)
|> refresh_imported_opts()
new_query = struct!(query, keywords)
if Keyword.has_key?(keywords, :include_imported) do
new_query
else
refresh_imported_opts(new_query)
end
end
@spec set_dimensions(t(), list(String.t())) :: t()
@ -314,7 +302,7 @@ defmodule Plausible.Stats.Query do
end
defp put_imported_opts(query, site, params) do
requested? = params["with_imported"] == "true" || query.imported_data_requested
requested? = params["with_imported"] == "true" || query.include.imports
latest_import_end_date =
if site do
@ -328,15 +316,15 @@ defmodule Plausible.Stats.Query do
case ensure_include_imported(query, requested?) do
:ok ->
struct!(query,
imported_data_requested: true,
include_imported: true
include_imported: true,
include: Map.put(query.include, :imports, true)
)
{:error, reason} ->
struct!(query,
imported_data_requested: requested?,
include_imported: false,
skip_imported_reason: reason
skip_imported_reason: reason,
include: Map.put(query.include, :imports, requested?)
)
end
end

View File

@ -33,7 +33,7 @@ defmodule Plausible.Stats.QueryOptimizer do
|> TableDecider.partition_metrics(query)
{
Query.set_metrics(query, event_metrics),
Query.set(query, metrics: event_metrics, include_imported: query.include_imported),
split_sessions_query(query, sessions_metrics)
}
end
@ -80,6 +80,7 @@ defmodule Plausible.Stats.QueryOptimizer do
cond do
Timex.diff(last, first, :hours) <= 48 -> "time:hour"
Timex.diff(last, first, :days) <= 40 -> "time:day"
Timex.diff(last, first, :weeks) <= 52 -> "time:week"
true -> "time:month"
end
end
@ -160,6 +161,11 @@ defmodule Plausible.Stats.QueryOptimizer do
query.filters
end
Query.set(query, filters: filters, metrics: session_metrics, dimensions: dimensions)
Query.set(query,
filters: filters,
metrics: session_metrics,
dimensions: dimensions,
include_imported: query.include_imported
)
end
end

View File

@ -3,7 +3,6 @@ defmodule Plausible.Stats.QueryResult do
alias Plausible.Stats.Util
alias Plausible.Stats.Filters
alias Plausible.Stats.Query
@derive Jason.Encoder
defstruct results: [],
@ -34,16 +33,6 @@ defmodule Plausible.Stats.QueryResult do
)
end
defp meta(%Query{skip_imported_reason: :unsupported_query}) do
%{
warning:
"Imported stats are not included in the results because query parameters are not supported. " <>
"For more information, see: https://plausible.io/docs/stats-api#filtering-imported-stats"
}
end
defp meta(_), do: %{}
defp dimension_label("event:goal", entry, query) do
{events, paths} = Filters.Utils.split_goals(query.preloaded_goals)
@ -56,6 +45,12 @@ defmodule Plausible.Stats.QueryResult do
end
end
defp dimension_label("time:" <> _ = time_dimension, entry, query) do
datetime = Map.get(entry, Util.shortname(query, time_dimension))
Plausible.Stats.Time.format_datetime(datetime)
end
defp dimension_label(dimension, entry, query) do
Map.get(entry, Util.shortname(query, dimension))
end
@ -65,4 +60,21 @@ defmodule Plausible.Stats.QueryResult do
end
defp serializable_filter(filter), do: filter
@imports_unsupported_query_warning "Imported stats are not included in the results because query parameters are not supported. " <>
"For more information, see: https://plausible.io/docs/stats-api#filtering-imported-stats"
defp meta(query) do
%{
warning:
case query.skip_imported_reason do
:unsupported_query -> @imports_unsupported_query_warning
_ -> nil
end,
time_labels:
if(query.include.time_labels, do: Plausible.Stats.Time.time_labels(query), else: nil)
}
|> Enum.reject(fn {_, value} -> is_nil(value) end)
|> Enum.into(%{})
end
end

View File

@ -26,34 +26,98 @@ defmodule Plausible.Stats.SQL.Expression do
end
end
def dimension(key, "time:hour", query) do
wrap_alias([t], %{
key => fragment("toStartOfHour(toTimeZone(?, ?))", t.timestamp, ^query.timezone)
})
defmacrop regular_time_slots(query, period_in_seconds) do
quote do
fragment(
"arrayJoin(timeSlots(toTimeZone(?, ?), toUInt32(timeDiff(?, ?)), toUInt32(?)))",
s.start,
^unquote(query).timezone,
s.start,
s.timestamp,
^unquote(period_in_seconds)
)
end
end
def dimension(key, "time:day", query) do
wrap_alias([t], %{
key => fragment("toDate(toTimeZone(?, ?))", t.timestamp, ^query.timezone)
})
end
def dimension(key, "time:month", query) do
def dimension(key, "time:month", _table, query) do
wrap_alias([t], %{
key => fragment("toStartOfMonth(toTimeZone(?, ?))", t.timestamp, ^query.timezone)
})
end
def dimension(key, "event:name", _query),
def dimension(key, "time:week", _table, query) do
wrap_alias([t], %{
key =>
weekstart_not_before(
to_timezone(t.timestamp, ^query.timezone),
^query.date_range.first
)
})
end
def dimension(key, "time:day", _table, query) do
wrap_alias([t], %{
key => fragment("toDate(toTimeZone(?, ?))", t.timestamp, ^query.timezone)
})
end
def dimension(key, "time:hour", :sessions, query) do
wrap_alias([s], %{
key => regular_time_slots(query, 3600)
})
end
def dimension(key, "time:hour", _table, query) do
wrap_alias([t], %{
key => fragment("toStartOfHour(toTimeZone(?, ?))", t.timestamp, ^query.timezone)
})
end
# :NOTE: This is not exposed in Query APIv2
def dimension(key, "time:minute", :sessions, %Query{
period: "30m"
}) do
wrap_alias([s], %{
key =>
fragment(
"arrayJoin(range(dateDiff('minute', now(), ?), dateDiff('minute', now(), ?) + 1))",
s.start,
s.timestamp
)
})
end
# :NOTE: This is not exposed in Query APIv2
def dimension(key, "time:minute", _table, %Query{period: "30m"}) do
wrap_alias([t], %{
key => fragment("dateDiff('minute', now(), ?)", t.timestamp)
})
end
# :NOTE: This is not exposed in Query APIv2
def dimension(key, "time:minute", :sessions, query) do
wrap_alias([s], %{
key => regular_time_slots(query, 60)
})
end
# :NOTE: This is not exposed in Query APIv2
def dimension(key, "time:minute", _table, query) do
wrap_alias([t], %{
key => fragment("toStartOfMinute(toTimeZone(?, ?))", t.timestamp, ^query.timezone)
})
end
def dimension(key, "event:name", _table, _query),
do: wrap_alias([t], %{key => t.name})
def dimension(key, "event:page", _query),
def dimension(key, "event:page", _table, _query),
do: wrap_alias([t], %{key => t.pathname})
def dimension(key, "event:hostname", _query),
def dimension(key, "event:hostname", _table, _query),
do: wrap_alias([t], %{key => t.hostname})
def dimension(key, "event:props:" <> property_name, _query) do
def dimension(key, "event:props:" <> property_name, _table, _query) do
wrap_alias([t], %{
key =>
fragment(
@ -64,55 +128,55 @@ defmodule Plausible.Stats.SQL.Expression do
})
end
def dimension(key, "visit:entry_page", _query),
def dimension(key, "visit:entry_page", _table, _query),
do: wrap_alias([t], %{key => t.entry_page})
def dimension(key, "visit:exit_page", _query),
def dimension(key, "visit:exit_page", _table, _query),
do: wrap_alias([t], %{key => t.exit_page})
def dimension(key, "visit:utm_medium", _query),
def dimension(key, "visit:utm_medium", _table, _query),
do: field_or_blank_value(key, t.utm_medium, @not_set)
def dimension(key, "visit:utm_source", _query),
def dimension(key, "visit:utm_source", _table, _query),
do: field_or_blank_value(key, t.utm_source, @not_set)
def dimension(key, "visit:utm_campaign", _query),
def dimension(key, "visit:utm_campaign", _table, _query),
do: field_or_blank_value(key, t.utm_campaign, @not_set)
def dimension(key, "visit:utm_content", _query),
def dimension(key, "visit:utm_content", _table, _query),
do: field_or_blank_value(key, t.utm_content, @not_set)
def dimension(key, "visit:utm_term", _query),
def dimension(key, "visit:utm_term", _table, _query),
do: field_or_blank_value(key, t.utm_term, @not_set)
def dimension(key, "visit:source", _query),
def dimension(key, "visit:source", _table, _query),
do: field_or_blank_value(key, t.source, @no_ref)
def dimension(key, "visit:referrer", _query),
def dimension(key, "visit:referrer", _table, _query),
do: field_or_blank_value(key, t.referrer, @no_ref)
def dimension(key, "visit:device", _query),
def dimension(key, "visit:device", _table, _query),
do: field_or_blank_value(key, t.device, @not_set)
def dimension(key, "visit:os", _query),
def dimension(key, "visit:os", _table, _query),
do: field_or_blank_value(key, t.os, @not_set)
def dimension(key, "visit:os_version", _query),
def dimension(key, "visit:os_version", _table, _query),
do: field_or_blank_value(key, t.os_version, @not_set)
def dimension(key, "visit:browser", _query),
def dimension(key, "visit:browser", _table, _query),
do: field_or_blank_value(key, t.browser, @not_set)
def dimension(key, "visit:browser_version", _query),
def dimension(key, "visit:browser_version", _table, _query),
do: field_or_blank_value(key, t.browser_version, @not_set)
def dimension(key, "visit:country", _query),
def dimension(key, "visit:country", _table, _query),
do: wrap_alias([t], %{key => t.country})
def dimension(key, "visit:region", _query),
def dimension(key, "visit:region", _table, _query),
do: wrap_alias([t], %{key => t.region})
def dimension(key, "visit:city", _query),
def dimension(key, "visit:city", _table, _query),
do: wrap_alias([t], %{key => t.city})
def event_metric(:pageviews) do

View File

@ -97,19 +97,6 @@ defmodule Plausible.Stats.SQL.Fragments do
end
end
@doc """
Same as Plausible.Stats.SQL.Fragments.weekstart_not_before/2 but converts dates to
the specified timezone.
"""
defmacro weekstart_not_before(date, not_before, timezone) do
quote do
weekstart_not_before(
to_timezone(unquote(date), unquote(timezone)),
to_timezone(unquote(not_before), unquote(timezone))
)
end
end
@doc """
Returns whether a key (usually property) exists under `meta.key` array or similar.
@ -192,6 +179,21 @@ defmodule Plausible.Stats.SQL.Fragments do
end
end
@doc """
Macro that helps join two Ecto queries by selecting fields from either one
"""
defmacro select_join_fields(q, query, list, table_name) do
quote do
Enum.reduce(unquote(list), unquote(q), fn metric_or_dimension, q ->
key = shortname(unquote(query), metric_or_dimension)
select_merge_as(q, [e, s], %{
key => field(unquote(table_name), ^key)
})
end)
end
end
defp update_literal_map_values({:%{}, ctx, keyword_list}, mapper_fn) do
{
:%{},

View File

@ -41,11 +41,9 @@ defmodule Plausible.Stats.SQL.QueryBuilder do
q
|> join_sessions_if_needed(site, events_query)
|> build_group_by(events_query)
|> build_group_by(:events, events_query)
|> merge_imported(site, events_query, events_query.metrics)
|> maybe_add_global_conversion_rate(site, events_query)
|> maybe_add_group_conversion_rate(site, events_query)
|> Base.add_percentage_metric(site, events_query, events_query.metrics)
|> SQL.SpecialMetrics.add(site, events_query)
end
defp join_sessions_if_needed(q, site, query) do
@ -84,11 +82,9 @@ defmodule Plausible.Stats.SQL.QueryBuilder do
q
|> join_events_if_needed(site, sessions_query)
|> build_group_by(sessions_query)
|> build_group_by(:sessions, sessions_query)
|> merge_imported(site, sessions_query, sessions_query.metrics)
|> maybe_add_global_conversion_rate(site, sessions_query)
|> maybe_add_group_conversion_rate(site, sessions_query)
|> Base.add_percentage_metric(site, sessions_query, sessions_query.metrics)
|> SQL.SpecialMetrics.add(site, sessions_query)
end
def join_events_if_needed(q, site, query) do
@ -115,11 +111,11 @@ defmodule Plausible.Stats.SQL.QueryBuilder do
end
end
defp build_group_by(q, query) do
Enum.reduce(query.dimensions, q, &dimension_group_by(&2, query, &1))
defp build_group_by(q, table, query) do
Enum.reduce(query.dimensions, q, &dimension_group_by(&2, table, query, &1))
end
defp dimension_group_by(q, query, "event:goal" = dimension) do
defp dimension_group_by(q, _table, query, "event:goal" = dimension) do
{events, page_regexes} = Filters.Utils.split_goals_query_expressions(query.preloaded_goals)
from(e in q,
@ -132,11 +128,11 @@ defmodule Plausible.Stats.SQL.QueryBuilder do
)
end
defp dimension_group_by(q, query, dimension) do
defp dimension_group_by(q, table, query, dimension) do
key = shortname(query, dimension)
q
|> select_merge_as([], Expression.dimension(key, dimension, query))
|> select_merge_as([], Expression.dimension(key, dimension, table, query))
|> group_by([], selected_as(^key))
end
@ -155,87 +151,6 @@ defmodule Plausible.Stats.SQL.QueryBuilder do
)
end
defmacrop select_join_fields(q, query, list, table_name) do
quote do
Enum.reduce(unquote(list), unquote(q), fn metric_or_dimension, q ->
key = shortname(unquote(query), metric_or_dimension)
select_merge_as(q, [e, s], %{
key => field(unquote(table_name), ^key)
})
end)
end
end
# Adds conversion_rate metric to query, calculated as
# X / Y where Y is the same breakdown value without goal or props
# filters.
def maybe_add_global_conversion_rate(q, site, query) do
if :conversion_rate in query.metrics do
total_query =
query
|> Query.remove_filters(["event:goal", "event:props"])
|> Query.set_dimensions([])
q
|> select_merge_as(
[],
Base.total_visitors_subquery(site, total_query, query.include_imported)
)
|> select_merge_as([e], %{
conversion_rate:
fragment(
"if(? > 0, round(? / ? * 100, 1), 0)",
selected_as(:total_visitors),
selected_as(:visitors),
selected_as(:total_visitors)
)
})
else
q
end
end
# This function injects a group_conversion_rate metric into
# a dimensional query. It is calculated as X / Y, where:
#
# * X is the number of conversions for a set of dimensions
# result (conversion = number of visitors who
# completed the filtered goal with the filtered
# custom properties).
#
# * Y is the number of all visitors for this set of dimensions
# result without the `event:goal` and `event:props:*`
# filters.
def maybe_add_group_conversion_rate(q, site, query) do
if :group_conversion_rate in query.metrics do
group_totals_query =
query
|> Query.remove_filters(["event:goal", "event:props"])
|> Query.set_metrics([:visitors])
|> Query.set_order_by([])
from(e in subquery(q),
left_join: c in subquery(build(group_totals_query, site)),
on: ^build_group_by_join(query)
)
|> select_merge_as([e, c], %{
total_visitors: c.visitors,
group_conversion_rate:
fragment(
"if(? > 0, round(? / ? * 100, 1), 0)",
c.visitors,
e.visitors,
c.visitors
)
})
|> select_join_fields(query, query.dimensions, e)
|> select_join_fields(query, List.delete(query.metrics, :group_conversion_rate), e)
else
q
end
end
defp join_query_results({nil, _}, {nil, _}), do: nil
defp join_query_results({events_q, events_query}, {nil, _}),

View File

@ -0,0 +1,152 @@
defmodule Plausible.Stats.SQL.SpecialMetrics do
@moduledoc """
This module defines how special metrics like `conversion_rate` and
`percentage` are calculated.
"""
use Plausible.Stats.SQL.Fragments
alias Plausible.Stats.{Base, Query, SQL}
import Ecto.Query
import Plausible.Stats.Util
def add(q, site, query) do
q
|> maybe_add_percentage_metric(site, query)
|> maybe_add_global_conversion_rate(site, query)
|> maybe_add_group_conversion_rate(site, query)
end
defp maybe_add_percentage_metric(q, site, query) do
if :percentage in query.metrics do
total_query =
Query.set(query,
dimensions: [],
include_imported: query.include_imported
)
q
|> select_merge_as([], total_visitors_subquery(site, total_query, query.include_imported))
|> select_merge_as([], %{
percentage:
fragment(
"if(? > 0, round(? / ? * 100, 1), null)",
selected_as(:total_visitors),
selected_as(:visitors),
selected_as(:total_visitors)
)
})
else
q
end
end
# Adds conversion_rate metric to query, calculated as
# X / Y where Y is the same breakdown value without goal or props
# filters.
def maybe_add_global_conversion_rate(q, site, query) do
if :conversion_rate in query.metrics do
total_query =
query
|> Query.remove_filters(["event:goal", "event:props"])
|> Query.set(
dimensions: [],
include_imported: query.include_imported
)
q
|> select_merge_as(
[],
total_visitors_subquery(site, total_query, query.include_imported)
)
|> select_merge_as([e], %{
conversion_rate:
fragment(
"if(? > 0, round(? / ? * 100, 1), 0)",
selected_as(:total_visitors),
selected_as(:visitors),
selected_as(:total_visitors)
)
})
else
q
end
end
# This function injects a group_conversion_rate metric into
# a dimensional query. It is calculated as X / Y, where:
#
# * X is the number of conversions for a set of dimensions
# result (conversion = number of visitors who
# completed the filtered goal with the filtered
# custom properties).
#
# * Y is the number of all visitors for this set of dimensions
# result without the `event:goal` and `event:props:*`
# filters.
def maybe_add_group_conversion_rate(q, site, query) do
if :group_conversion_rate in query.metrics do
group_totals_query =
query
|> Query.remove_filters(["event:goal", "event:props"])
|> Query.set(
metrics: [:visitors],
order_by: [],
include_imported: query.include_imported
)
from(e in subquery(q),
left_join: c in subquery(SQL.QueryBuilder.build(group_totals_query, site)),
on: ^SQL.QueryBuilder.build_group_by_join(query)
)
|> select_merge_as([e, c], %{
total_visitors: c.visitors,
group_conversion_rate:
fragment(
"if(? > 0, round(? / ? * 100, 1), 0)",
c.visitors,
e.visitors,
c.visitors
)
})
|> select_join_fields(query, query.dimensions, e)
|> select_join_fields(query, List.delete(query.metrics, :group_conversion_rate), e)
else
q
end
end
defp total_visitors(site, query) do
Base.base_event_query(site, query)
|> select([e],
total_visitors: fragment("toUInt64(round(uniq(?) * any(_sample_factor)))", e.user_id)
)
end
# `total_visitors_subquery` returns a subquery which selects `total_visitors` -
# the number used as the denominator in the calculation of `conversion_rate` and
# `percentage` metrics.
# Usually, when calculating the totals, a new query is passed into this function,
# where certain filters (e.g. goal, props) are removed. That might make the query
# able to include imported data. However, we always want to include imported data
# only if it's included in the base query - otherwise the total will be based on
# a different data set, making the metric inaccurate. This is why we're using an
# explicit `include_imported` argument here.
defp total_visitors_subquery(site, query, include_imported)
defp total_visitors_subquery(site, query, true = _include_imported) do
wrap_alias([], %{
total_visitors:
subquery(total_visitors(site, query)) +
subquery(Plausible.Stats.Imported.total_imported_visitors(site, query))
})
end
defp total_visitors_subquery(site, query, false = _include_imported) do
wrap_alias([], %{
total_visitors: subquery(total_visitors(site, query))
})
end
end

View File

@ -51,7 +51,7 @@ defmodule Plausible.Stats.SQL.WhereBuilder do
)
end
defp filter_site_time_range(:sessions, site, %Query{experimental_session_count?: true} = query) do
defp filter_site_time_range(:sessions, site, query) do
{first_datetime, last_datetime} = utc_boundaries(query, site)
# Counts each _active_ session in time range even if they started before
@ -61,15 +61,6 @@ defmodule Plausible.Stats.SQL.WhereBuilder do
)
end
defp filter_site_time_range(:sessions, site, query) do
{first_datetime, last_datetime} = utc_boundaries(query, site)
dynamic(
[s],
s.site_id == ^site.id and s.start >= ^first_datetime and s.start < ^last_datetime
)
end
defp add_filter(:events, _query, [:is, "event:name", list]) do
dynamic([e], e.name in ^list)
end

118
lib/plausible/stats/time.ex Normal file
View File

@ -0,0 +1,118 @@
defmodule Plausible.Stats.Time do
@moduledoc """
Collection of functions to work with time in queries.
"""
alias Plausible.Stats.Query
def format_datetime(%Date{} = date), do: Date.to_string(date)
def format_datetime(%DateTime{} = datetime),
do: Timex.format!(datetime, "{YYYY}-{0M}-{0D} {h24}:{m}:{s}")
# Realtime graphs return numbers
def format_datetime(other), do: other
@doc """
Returns list of time bucket labels for the given query.
"""
def time_dimension(query) do
Enum.find(query.dimensions, &String.starts_with?(&1, "time"))
end
def time_labels(query) do
time_labels_for_dimension(time_dimension(query), query)
end
defp time_labels_for_dimension("time:month", query) do
n_buckets =
Timex.diff(
query.date_range.last,
Date.beginning_of_month(query.date_range.first),
:months
)
Enum.map(n_buckets..0, fn shift ->
query.date_range.last
|> Date.beginning_of_month()
|> Timex.shift(months: -shift)
|> format_datetime()
end)
end
defp time_labels_for_dimension("time:week", query) do
n_buckets =
Timex.diff(
query.date_range.last,
Date.beginning_of_week(query.date_range.first),
:weeks
)
Enum.map(0..n_buckets, fn shift ->
query.date_range.first
|> Timex.shift(weeks: shift)
|> date_or_weekstart(query)
|> format_datetime()
end)
end
defp time_labels_for_dimension("time:day", query) do
query.date_range
|> Enum.into([])
|> Enum.map(&format_datetime/1)
end
@full_day_in_hours 23
defp time_labels_for_dimension("time:hour", query) do
n_buckets =
if query.date_range.first == query.date_range.last do
@full_day_in_hours
else
end_time =
query.date_range.last
|> Timex.to_datetime()
|> Timex.end_of_day()
Timex.diff(end_time, query.date_range.first, :hours)
end
Enum.map(0..n_buckets, fn step ->
query.date_range.first
|> Timex.to_datetime()
|> Timex.shift(hours: step)
|> DateTime.truncate(:second)
|> format_datetime()
end)
end
# Only supported in dashboards not via API
defp time_labels_for_dimension("time:minute", %Query{period: "30m"}) do
Enum.into(-30..-1, [])
end
@full_day_in_minutes 24 * 60 - 1
defp time_labels_for_dimension("time:minute", query) do
n_buckets =
if query.date_range.first == query.date_range.last do
@full_day_in_minutes
else
Timex.diff(query.date_range.last, query.date_range.first, :minutes)
end
Enum.map(0..n_buckets, fn step ->
query.date_range.first
|> Timex.to_datetime()
|> Timex.shift(minutes: step)
|> format_datetime()
end)
end
defp date_or_weekstart(date, query) do
weekstart = Timex.beginning_of_week(date)
if Enum.member?(query.date_range, weekstart) do
weekstart
else
date
end
end
end

View File

@ -1,290 +1,68 @@
defmodule Plausible.Stats.Timeseries do
use Plausible.ClickhouseRepo
use Plausible
alias Plausible.Stats.{Query, Util, Imported}
import Plausible.Stats.{Base}
import Ecto.Query
use Plausible.Stats.SQL.Fragments
use Plausible.ClickhouseRepo
alias Plausible.Stats.{Query, QueryOptimizer, QueryResult, SQL}
@typep metric ::
:pageviews
| :events
| :visitors
| :visits
| :bounce_rate
| :visit_duration
| :average_revenue
| :total_revenue
@typep value :: nil | integer() | float()
@type results :: nonempty_list(%{required(:date) => Date.t(), required(metric()) => value()})
@time_dimension %{
"month" => "time:month",
"week" => "time:week",
"date" => "time:day",
"hour" => "time:hour",
"minute" => "time:minute"
}
def timeseries(site, query, metrics) do
steps = buckets(query)
{event_metrics, session_metrics, _} =
Plausible.Stats.TableDecider.partition_metrics(metrics, query)
{currency, event_metrics} =
{currency, metrics} =
on_ee do
Plausible.Stats.Goal.Revenue.get_revenue_tracking_currency(site, query, event_metrics)
Plausible.Stats.Goal.Revenue.get_revenue_tracking_currency(site, query, metrics)
else
{nil, event_metrics}
{nil, metrics}
end
Query.trace(query, metrics)
query_with_metrics =
Query.set(
query,
metrics: transform_metrics(metrics, %{conversion_rate: :group_conversion_rate}),
dimensions: [time_dimension(query)],
order_by: [{time_dimension(query), :asc}],
v2: true,
include: %{time_labels: true, imports: query.include.imports}
)
|> QueryOptimizer.optimize()
[event_result, session_result] =
Plausible.ClickhouseRepo.parallel_tasks([
fn -> events_timeseries(site, query, event_metrics) end,
fn -> sessions_timeseries(site, query, session_metrics) end
])
q = SQL.QueryBuilder.build(query_with_metrics, site)
Enum.map(steps, fn step ->
empty_row(step, metrics)
|> Map.merge(Enum.find(event_result, fn row -> date_eq(row[:date], step) end) || %{})
|> Map.merge(Enum.find(session_result, fn row -> date_eq(row[:date], step) end) || %{})
|> Map.update!(:date, &date_format/1)
q
|> ClickhouseRepo.all()
|> QueryResult.from(query_with_metrics)
|> build_timeseries_result(query_with_metrics, currency)
|> transform_keys(%{group_conversion_rate: :conversion_rate})
end
defp time_dimension(query), do: Map.fetch!(@time_dimension, query.interval)
defp build_timeseries_result(query_result, query, currency) do
results_map =
query_result.results
|> Enum.map(fn %{dimensions: [time_dimension_value], metrics: entry_metrics} ->
metrics_map = Enum.zip(query.metrics, entry_metrics) |> Enum.into(%{})
{
time_dimension_value,
Map.put(metrics_map, :date, time_dimension_value)
}
end)
|> Enum.into(%{})
query_result.meta.time_labels
|> Enum.map(fn key ->
Map.get(
results_map,
key,
empty_row(key, query.metrics)
)
|> cast_revenue_metrics_to_money(currency)
end)
|> Util.keep_requested_metrics(metrics)
end
defp events_timeseries(_, _, []), do: []
defp events_timeseries(site, query, metrics) do
metrics = Util.maybe_add_visitors_metric(metrics)
from(e in base_event_query(site, query), select: ^select_event_metrics(metrics))
|> select_bucket(:events, site, query)
|> Imported.merge_imported_timeseries(site, query, metrics)
|> maybe_add_timeseries_conversion_rate(site, query, metrics)
|> ClickhouseRepo.all()
end
defp sessions_timeseries(_, _, []), do: []
defp sessions_timeseries(site, query, metrics) do
from(e in query_sessions(site, query), select: ^select_session_metrics(metrics, query))
|> filter_converted_sessions(site, query)
|> select_bucket(:sessions, site, query)
|> Imported.merge_imported_timeseries(site, query, metrics)
|> ClickhouseRepo.all()
|> Util.keep_requested_metrics(metrics)
end
defp buckets(%Query{interval: "month"} = query) do
n_buckets = Timex.diff(query.date_range.last, query.date_range.first, :months)
Enum.map(n_buckets..0, fn shift ->
query.date_range.last
|> Timex.beginning_of_month()
|> Timex.shift(months: -shift)
end)
end
defp buckets(%Query{interval: "week"} = query) do
n_buckets = Timex.diff(query.date_range.last, query.date_range.first, :weeks)
Enum.map(0..n_buckets, fn shift ->
query.date_range.first
|> Timex.shift(weeks: shift)
|> date_or_weekstart(query)
end)
end
defp buckets(%Query{interval: "date"} = query) do
Enum.into(query.date_range, [])
end
@full_day_in_hours 23
defp buckets(%Query{interval: "hour"} = query) do
n_buckets =
if query.date_range.first == query.date_range.last do
@full_day_in_hours
else
Timex.diff(query.date_range.last, query.date_range.first, :hours)
end
Enum.map(0..n_buckets, fn step ->
query.date_range.first
|> Timex.to_datetime()
|> Timex.shift(hours: step)
end)
end
defp buckets(%Query{period: "30m", interval: "minute"}) do
Enum.into(-30..-1, [])
end
@full_day_in_minutes 1439
defp buckets(%Query{interval: "minute"} = query) do
n_buckets =
if query.date_range.first == query.date_range.last do
@full_day_in_minutes
else
Timex.diff(query.date_range.last, query.date_range.first, :minutes)
end
Enum.map(0..n_buckets, fn step ->
query.date_range.first
|> Timex.to_datetime()
|> Timex.shift(minutes: step)
end)
end
defp date_eq(%DateTime{} = left, %DateTime{} = right) do
NaiveDateTime.compare(left, right) == :eq
end
defp date_eq(%Date{} = left, %Date{} = right) do
Date.compare(left, right) == :eq
end
defp date_eq(left, right) do
left == right
end
defp date_format(%DateTime{} = date) do
Timex.format!(date, "{YYYY}-{0M}-{0D} {h24}:{m}:{s}")
end
defp date_format(date) do
date
end
defp select_bucket(q, _table, site, %Query{interval: "month"}) do
from(
e in q,
group_by: fragment("toStartOfMonth(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
order_by: fragment("toStartOfMonth(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
select_merge: %{
date: fragment("toStartOfMonth(toTimeZone(?, ?))", e.timestamp, ^site.timezone)
}
)
end
defp select_bucket(q, _table, site, %Query{interval: "week"} = query) do
{first_datetime, _} = utc_boundaries(query, site)
from(
e in q,
select_merge: %{date: weekstart_not_before(e.timestamp, ^first_datetime, ^site.timezone)},
group_by: weekstart_not_before(e.timestamp, ^first_datetime, ^site.timezone),
order_by: weekstart_not_before(e.timestamp, ^first_datetime, ^site.timezone)
)
end
defp select_bucket(q, _table, site, %Query{interval: "date"}) do
from(
e in q,
group_by: fragment("toDate(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
order_by: fragment("toDate(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
select_merge: %{
date: fragment("toDate(toTimeZone(?, ?))", e.timestamp, ^site.timezone)
}
)
end
defp select_bucket(q, :sessions, site, %Query{
interval: "hour",
experimental_session_count?: true
}) do
bucket_with_timeslots(q, site, 3600)
end
defp select_bucket(q, _table, site, %Query{interval: "hour"}) do
from(
e in q,
group_by: fragment("toStartOfHour(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
order_by: fragment("toStartOfHour(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
select_merge: %{
date: fragment("toStartOfHour(toTimeZone(?, ?))", e.timestamp, ^site.timezone)
}
)
end
defp select_bucket(q, :sessions, _site, %Query{
interval: "minute",
period: "30m",
experimental_session_count?: true
}) do
from(
s in q,
array_join:
bucket in fragment(
"timeSlots(?, toUInt32(timeDiff(?, ?)), ?)",
s.start,
s.start,
s.timestamp,
60
),
group_by: fragment("dateDiff('minute', now(), ?)", bucket),
order_by: fragment("dateDiff('minute', now(), ?)", bucket),
select_merge: %{
date: fragment("dateDiff('minute', now(), ?)", bucket)
}
)
end
defp select_bucket(q, _table, _site, %Query{interval: "minute", period: "30m"}) do
from(
e in q,
group_by: fragment("dateDiff('minute', now(), ?)", e.timestamp),
order_by: fragment("dateDiff('minute', now(), ?)", e.timestamp),
select_merge: %{
date: fragment("dateDiff('minute', now(), ?)", e.timestamp)
}
)
end
defp select_bucket(q, _table, site, %Query{interval: "minute"}) do
from(
e in q,
group_by: fragment("toStartOfMinute(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
order_by: fragment("toStartOfMinute(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
select_merge: %{
date: fragment("toStartOfMinute(toTimeZone(?, ?))", e.timestamp, ^site.timezone)
}
)
end
defp select_bucket(q, :sessions, site, %Query{
interval: "minute",
experimental_session_count?: true
}) do
bucket_with_timeslots(q, site, 60)
end
# Includes session in _every_ time bucket it was active in.
# Only done in hourly and minute graphs for performance reasons.
defp bucket_with_timeslots(q, site, period_in_seconds) do
from(
s in q,
array_join:
bucket in fragment(
"timeSlots(toTimeZone(?, ?), toUInt32(timeDiff(?, ?)), toUInt32(?))",
s.start,
^site.timezone,
s.start,
s.timestamp,
^period_in_seconds
),
group_by: bucket,
order_by: bucket,
select_merge: %{
date: fragment("?", bucket)
}
)
end
defp date_or_weekstart(date, query) do
weekstart = Timex.beginning_of_week(date)
if Enum.member?(query.date_range, weekstart) do
weekstart
else
date
end
end
defp empty_row(date, metrics) do
@ -296,7 +74,8 @@ defmodule Plausible.Stats.Timeseries do
:visits -> Map.merge(row, %{visits: 0})
:views_per_visit -> Map.merge(row, %{views_per_visit: 0.0})
:conversion_rate -> Map.merge(row, %{conversion_rate: 0.0})
:bounce_rate -> Map.merge(row, %{bounce_rate: nil})
:group_conversion_rate -> Map.merge(row, %{group_conversion_rate: 0.0})
:bounce_rate -> Map.merge(row, %{bounce_rate: 0.0})
:visit_duration -> Map.merge(row, %{visit_duration: nil})
:average_revenue -> Map.merge(row, %{average_revenue: nil})
:total_revenue -> Map.merge(row, %{total_revenue: nil})
@ -304,6 +83,19 @@ defmodule Plausible.Stats.Timeseries do
end)
end
defp transform_metrics(metrics, to_replace) do
Enum.map(metrics, &Map.get(to_replace, &1, &1))
end
defp transform_keys(results, keys_to_replace) do
Enum.map(results, fn map ->
Enum.map(map, fn {key, val} ->
{Map.get(keys_to_replace, key, key), val}
end)
|> Enum.into(%{})
end)
end
on_ee do
defp cast_revenue_metrics_to_money(results, revenue_goals) do
Plausible.Stats.Goal.Revenue.cast_revenue_metrics_to_money(results, revenue_goals)
@ -311,41 +103,4 @@ defmodule Plausible.Stats.Timeseries do
else
defp cast_revenue_metrics_to_money(results, _revenue_goals), do: results
end
defp maybe_add_timeseries_conversion_rate(q, site, query, metrics) do
if :conversion_rate in metrics do
# Having removed some filters, the query might become eligible
# for including imported data. However, we still want to make
# sure that that include_imported is in sync between original
# and the totals query.
totals_query =
query
|> Query.remove_filters(["event:goal", "event:props"])
|> struct!(include_imported: query.include_imported)
totals_timeseries_q =
from(e in base_event_query(site, totals_query),
select: ^select_event_metrics([:visitors])
)
|> select_bucket(:events, site, totals_query)
|> Imported.merge_imported_timeseries(site, totals_query, [:visitors])
from(e in subquery(q),
left_join: c in subquery(totals_timeseries_q),
on: e.date == c.date,
select_merge: %{
total_visitors: c.visitors,
conversion_rate:
fragment(
"if(? > 0, round(? / ? * 100, 1), 0)",
c.visitors,
e.visitors,
c.visitors
)
}
)
else
q
end
end
end

View File

@ -6,7 +6,6 @@ defmodule Plausible.Stats.Util do
@manually_removable_metrics [
:__internal_visits,
:visitors,
:__breakdown_value,
:total_visitors
]

View File

@ -174,8 +174,10 @@ defmodule PlausibleWeb.Api.StatsController do
defp build_full_intervals(%{interval: "week", date_range: range}, labels) do
for label <- labels, into: %{} do
interval_start = Timex.beginning_of_week(label)
interval_end = Timex.end_of_week(label)
date = Date.from_iso8601!(label)
interval_start = Timex.beginning_of_week(date)
interval_end = Timex.end_of_week(date)
within_interval? = Enum.member?(range, interval_start) && Enum.member?(range, interval_end)
@ -185,8 +187,10 @@ defmodule PlausibleWeb.Api.StatsController do
defp build_full_intervals(%{interval: "month", date_range: range}, labels) do
for label <- labels, into: %{} do
interval_start = Timex.beginning_of_month(label)
interval_end = Timex.end_of_month(label)
date = Date.from_iso8601!(label)
interval_start = Timex.beginning_of_month(date)
interval_end = Timex.end_of_month(date)
within_interval? = Enum.member?(range, interval_start) && Enum.member?(range, interval_end)

View File

@ -74,17 +74,17 @@ defmodule Plausible.Stats.QueryOptimizerTest do
assert perform(%{
date_range: Date.range(~D[2022-01-01], ~D[2022-02-16]),
dimensions: ["time"]
}).dimensions == ["time:month"]
}).dimensions == ["time:week"]
assert perform(%{
date_range: Date.range(~D[2022-01-01], ~D[2022-03-16]),
dimensions: ["time"]
}).dimensions == ["time:month"]
}).dimensions == ["time:week"]
assert perform(%{
date_range: Date.range(~D[2022-01-01], ~D[2022-03-16]),
dimensions: ["time"]
}).dimensions == ["time:month"]
}).dimensions == ["time:week"]
assert perform(%{
date_range: Date.range(~D[2022-01-01], ~D[2023-11-16]),

View File

@ -27,7 +27,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -47,7 +47,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -85,7 +85,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -115,7 +115,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -160,7 +160,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -184,7 +184,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -209,7 +209,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -248,20 +248,21 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
end
describe "include validation" do
test "setting include.imports", %{site: site} do
test "setting include values", %{site: site} do
%{
"metrics" => ["visitors"],
"date_range" => "all",
"include" => %{"imports" => true}
"dimensions" => ["time"],
"include" => %{"imports" => true, "time_labels" => true}
}
|> check_success(site, %{
metrics: [:visitors],
date_range: @date_range,
filters: [],
dimensions: [],
dimensions: ["time"],
order_by: nil,
timezone: site.timezone,
imported_data_requested: true,
include: %{imports: true, time_labels: true},
preloaded_goals: []
})
end
@ -274,6 +275,15 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
}
|> check_error(site, ~r/Invalid include passed/)
end
test "setting include.time_labels without time dimension", %{site: site} do
%{
"metrics" => ["visitors"],
"date_range" => "all",
"include" => %{"time_labels" => true}
}
|> check_error(site, ~r/Invalid include.time_labels: requires a time dimension/)
end
end
describe "event:goal filter validation" do
@ -297,7 +307,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: [{:page, "/thank-you"}, {:event, "Signup"}]
})
end
@ -379,7 +389,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["event:#{unquote(dimension)}"],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -399,7 +409,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["visit:#{unquote(dimension)}"],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -418,7 +428,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["event:props:foobar"],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -474,7 +484,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: [{:events, :desc}, {:visitors, :asc}],
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -493,7 +503,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["event:name"],
order_by: [{"event:name", :desc}],
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -589,7 +599,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: [event: "Signup"]
})
end
@ -609,7 +619,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["event:goal"],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: [event: "Signup"]
})
end
@ -631,7 +641,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: [event: "Signup"]
})
end
@ -675,7 +685,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["visit:device"],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -705,7 +715,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: ["event:page"],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end
@ -723,7 +733,7 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
dimensions: [],
order_by: nil,
timezone: site.timezone,
imported_data_requested: false,
include: %{imports: false, time_labels: false},
preloaded_goals: []
})
end

View File

@ -0,0 +1,157 @@
defmodule Plausible.Stats.TimeTest do
use Plausible.DataCase, async: true
import Plausible.Stats.Time
describe "time_labels/1" do
test "with time:month dimension" do
assert time_labels(%{
dimensions: ["visit:device", "time:month"],
date_range: Date.range(~D[2022-01-17], ~D[2022-02-01])
}) == [
"2022-01-01",
"2022-02-01"
]
assert time_labels(%{
dimensions: ["visit:device", "time:month"],
date_range: Date.range(~D[2022-01-01], ~D[2022-03-07])
}) == [
"2022-01-01",
"2022-02-01",
"2022-03-01"
]
end
test "with time:week dimension" do
assert time_labels(%{
dimensions: ["time:week"],
date_range: Date.range(~D[2020-12-20], ~D[2021-01-08])
}) == [
"2020-12-20",
"2020-12-21",
"2020-12-28",
"2021-01-04"
]
assert time_labels(%{
dimensions: ["time:week"],
date_range: Date.range(~D[2020-12-21], ~D[2021-01-03])
}) == [
"2020-12-21",
"2020-12-28"
]
end
test "with time:day dimension" do
assert time_labels(%{
dimensions: ["time:day"],
date_range: Date.range(~D[2022-01-17], ~D[2022-02-02])
}) == [
"2022-01-17",
"2022-01-18",
"2022-01-19",
"2022-01-20",
"2022-01-21",
"2022-01-22",
"2022-01-23",
"2022-01-24",
"2022-01-25",
"2022-01-26",
"2022-01-27",
"2022-01-28",
"2022-01-29",
"2022-01-30",
"2022-01-31",
"2022-02-01",
"2022-02-02"
]
end
test "with time:hour dimension" do
assert time_labels(%{
dimensions: ["time:hour"],
date_range: Date.range(~D[2022-01-17], ~D[2022-01-17])
}) == [
"2022-01-17 00:00:00",
"2022-01-17 01:00:00",
"2022-01-17 02:00:00",
"2022-01-17 03:00:00",
"2022-01-17 04:00:00",
"2022-01-17 05:00:00",
"2022-01-17 06:00:00",
"2022-01-17 07:00:00",
"2022-01-17 08:00:00",
"2022-01-17 09:00:00",
"2022-01-17 10:00:00",
"2022-01-17 11:00:00",
"2022-01-17 12:00:00",
"2022-01-17 13:00:00",
"2022-01-17 14:00:00",
"2022-01-17 15:00:00",
"2022-01-17 16:00:00",
"2022-01-17 17:00:00",
"2022-01-17 18:00:00",
"2022-01-17 19:00:00",
"2022-01-17 20:00:00",
"2022-01-17 21:00:00",
"2022-01-17 22:00:00",
"2022-01-17 23:00:00"
]
assert time_labels(%{
dimensions: ["time:hour"],
date_range: Date.range(~D[2022-01-17], ~D[2022-01-18])
}) == [
"2022-01-17 00:00:00",
"2022-01-17 01:00:00",
"2022-01-17 02:00:00",
"2022-01-17 03:00:00",
"2022-01-17 04:00:00",
"2022-01-17 05:00:00",
"2022-01-17 06:00:00",
"2022-01-17 07:00:00",
"2022-01-17 08:00:00",
"2022-01-17 09:00:00",
"2022-01-17 10:00:00",
"2022-01-17 11:00:00",
"2022-01-17 12:00:00",
"2022-01-17 13:00:00",
"2022-01-17 14:00:00",
"2022-01-17 15:00:00",
"2022-01-17 16:00:00",
"2022-01-17 17:00:00",
"2022-01-17 18:00:00",
"2022-01-17 19:00:00",
"2022-01-17 20:00:00",
"2022-01-17 21:00:00",
"2022-01-17 22:00:00",
"2022-01-17 23:00:00",
"2022-01-18 00:00:00",
"2022-01-18 01:00:00",
"2022-01-18 02:00:00",
"2022-01-18 03:00:00",
"2022-01-18 04:00:00",
"2022-01-18 05:00:00",
"2022-01-18 06:00:00",
"2022-01-18 07:00:00",
"2022-01-18 08:00:00",
"2022-01-18 09:00:00",
"2022-01-18 10:00:00",
"2022-01-18 11:00:00",
"2022-01-18 12:00:00",
"2022-01-18 13:00:00",
"2022-01-18 14:00:00",
"2022-01-18 15:00:00",
"2022-01-18 16:00:00",
"2022-01-18 17:00:00",
"2022-01-18 18:00:00",
"2022-01-18 19:00:00",
"2022-01-18 20:00:00",
"2022-01-18 21:00:00",
"2022-01-18 22:00:00",
"2022-01-18 23:00:00"
]
end
end
end

View File

@ -1,32 +1,32 @@
date,visitors,pageviews,visits,views_per_visit,bounce_rate,visit_duration
2021-09-20,0,0,0,0.0,,
2021-09-21,0,0,0,0.0,,
2021-09-22,0,0,0,0.0,,
2021-09-23,0,0,0,0.0,,
2021-09-24,0,0,0,0.0,,
2021-09-25,0,0,0,0.0,,
2021-09-26,0,0,0,0.0,,
2021-09-27,0,0,0,0.0,,
2021-09-28,0,0,0,0.0,,
2021-09-29,0,0,0,0.0,,
2021-09-30,0,0,0,0.0,,
2021-10-01,0,0,0,0.0,,
2021-10-02,0,0,0,0.0,,
2021-10-03,0,0,0,0.0,,
2021-10-04,0,0,0,0.0,,
2021-10-05,0,0,0,0.0,,
2021-10-06,0,0,0,0.0,,
2021-10-07,0,0,0,0.0,,
2021-10-08,0,0,0,0.0,,
2021-10-09,0,0,0,0.0,,
2021-10-10,0,0,0,0.0,,
2021-10-11,0,0,0,0.0,,
2021-10-12,0,0,0,0.0,,
2021-10-13,0,0,0,0.0,,
2021-10-14,0,0,0,0.0,,
2021-10-15,0,0,0,0.0,,
2021-10-16,0,0,0,0.0,,
2021-10-17,0,0,0,0.0,,
2021-10-18,0,0,0,0.0,,
2021-10-19,0,0,0,0.0,,
2021-09-20,0,0,0,0.0,0.0,
2021-09-21,0,0,0,0.0,0.0,
2021-09-22,0,0,0,0.0,0.0,
2021-09-23,0,0,0,0.0,0.0,
2021-09-24,0,0,0,0.0,0.0,
2021-09-25,0,0,0,0.0,0.0,
2021-09-26,0,0,0,0.0,0.0,
2021-09-27,0,0,0,0.0,0.0,
2021-09-28,0,0,0,0.0,0.0,
2021-09-29,0,0,0,0.0,0.0,
2021-09-30,0,0,0,0.0,0.0,
2021-10-01,0,0,0,0.0,0.0,
2021-10-02,0,0,0,0.0,0.0,
2021-10-03,0,0,0,0.0,0.0,
2021-10-04,0,0,0,0.0,0.0,
2021-10-05,0,0,0,0.0,0.0,
2021-10-06,0,0,0,0.0,0.0,
2021-10-07,0,0,0,0.0,0.0,
2021-10-08,0,0,0,0.0,0.0,
2021-10-09,0,0,0,0.0,0.0,
2021-10-10,0,0,0,0.0,0.0,
2021-10-11,0,0,0,0.0,0.0,
2021-10-12,0,0,0,0.0,0.0,
2021-10-13,0,0,0,0.0,0.0,
2021-10-14,0,0,0,0.0,0.0,
2021-10-15,0,0,0,0.0,0.0,
2021-10-16,0,0,0,0.0,0.0,
2021-10-17,0,0,0,0.0,0.0,
2021-10-18,0,0,0,0.0,0.0,
2021-10-19,0,0,0,0.0,0.0,
2021-10-20,1,1,1,2.0,0,60

1 date visitors pageviews visits views_per_visit bounce_rate visit_duration
2 2021-09-20 0 0 0 0.0 0.0
3 2021-09-21 0 0 0 0.0 0.0
4 2021-09-22 0 0 0 0.0 0.0
5 2021-09-23 0 0 0 0.0 0.0
6 2021-09-24 0 0 0 0.0 0.0
7 2021-09-25 0 0 0 0.0 0.0
8 2021-09-26 0 0 0 0.0 0.0
9 2021-09-27 0 0 0 0.0 0.0
10 2021-09-28 0 0 0 0.0 0.0
11 2021-09-29 0 0 0 0.0 0.0
12 2021-09-30 0 0 0 0.0 0.0
13 2021-10-01 0 0 0 0.0 0.0
14 2021-10-02 0 0 0 0.0 0.0
15 2021-10-03 0 0 0 0.0 0.0
16 2021-10-04 0 0 0 0.0 0.0
17 2021-10-05 0 0 0 0.0 0.0
18 2021-10-06 0 0 0 0.0 0.0
19 2021-10-07 0 0 0 0.0 0.0
20 2021-10-08 0 0 0 0.0 0.0
21 2021-10-09 0 0 0 0.0 0.0
22 2021-10-10 0 0 0 0.0 0.0
23 2021-10-11 0 0 0 0.0 0.0
24 2021-10-12 0 0 0 0.0 0.0
25 2021-10-13 0 0 0 0.0 0.0
26 2021-10-14 0 0 0 0.0 0.0
27 2021-10-15 0 0 0 0.0 0.0
28 2021-10-16 0 0 0 0.0 0.0
29 2021-10-17 0 0 0 0.0 0.0
30 2021-10-18 0 0 0 0.0 0.0
31 2021-10-19 0 0 0 0.0 0.0
32 2021-10-20 1 1 1 2.0 0 60

View File

@ -1,32 +1,32 @@
date,visitors,pageviews,visits,views_per_visit,bounce_rate,visit_duration
2021-09-20,1,1,1,1.0,100,0
2021-09-21,0,0,0,0.0,,
2021-09-22,0,0,0,0.0,,
2021-09-23,0,0,0,0.0,,
2021-09-24,0,0,0,0.0,,
2021-09-25,0,0,0,0.0,,
2021-09-26,0,0,0,0.0,,
2021-09-27,0,0,0,0.0,,
2021-09-28,0,0,0,0.0,,
2021-09-29,0,0,0,0.0,,
2021-09-30,0,0,0,0.0,,
2021-10-01,0,0,0,0.0,,
2021-10-02,0,0,0,0.0,,
2021-10-03,0,0,0,0.0,,
2021-10-04,0,0,0,0.0,,
2021-10-05,0,0,0,0.0,,
2021-10-06,0,0,0,0.0,,
2021-10-07,0,0,0,0.0,,
2021-10-08,0,0,0,0.0,,
2021-10-09,0,0,0,0.0,,
2021-10-10,0,0,0,0.0,,
2021-10-11,0,0,0,0.0,,
2021-10-12,0,0,0,0.0,,
2021-10-13,0,0,0,0.0,,
2021-10-14,0,0,0,0.0,,
2021-10-15,0,0,0,0.0,,
2021-10-16,0,0,0,0.0,,
2021-10-17,0,0,0,0.0,,
2021-10-18,0,0,0,0.0,,
2021-09-21,0,0,0,0.0,0.0,
2021-09-22,0,0,0,0.0,0.0,
2021-09-23,0,0,0,0.0,0.0,
2021-09-24,0,0,0,0.0,0.0,
2021-09-25,0,0,0,0.0,0.0,
2021-09-26,0,0,0,0.0,0.0,
2021-09-27,0,0,0,0.0,0.0,
2021-09-28,0,0,0,0.0,0.0,
2021-09-29,0,0,0,0.0,0.0,
2021-09-30,0,0,0,0.0,0.0,
2021-10-01,0,0,0,0.0,0.0,
2021-10-02,0,0,0,0.0,0.0,
2021-10-03,0,0,0,0.0,0.0,
2021-10-04,0,0,0,0.0,0.0,
2021-10-05,0,0,0,0.0,0.0,
2021-10-06,0,0,0,0.0,0.0,
2021-10-07,0,0,0,0.0,0.0,
2021-10-08,0,0,0,0.0,0.0,
2021-10-09,0,0,0,0.0,0.0,
2021-10-10,0,0,0,0.0,0.0,
2021-10-11,0,0,0,0.0,0.0,
2021-10-12,0,0,0,0.0,0.0,
2021-10-13,0,0,0,0.0,0.0,
2021-10-14,0,0,0,0.0,0.0,
2021-10-15,0,0,0,0.0,0.0,
2021-10-16,0,0,0,0.0,0.0,
2021-10-17,0,0,0,0.0,0.0,
2021-10-18,0,0,0,0.0,0.0,
2021-10-19,2,2,2,1.0,50,30
2021-10-20,1,2,1,2.0,0,60

1 date visitors pageviews visits views_per_visit bounce_rate visit_duration
2 2021-09-20 1 1 1 1.0 100 0
3 2021-09-21 0 0 0 0.0 0.0
4 2021-09-22 0 0 0 0.0 0.0
5 2021-09-23 0 0 0 0.0 0.0
6 2021-09-24 0 0 0 0.0 0.0
7 2021-09-25 0 0 0 0.0 0.0
8 2021-09-26 0 0 0 0.0 0.0
9 2021-09-27 0 0 0 0.0 0.0
10 2021-09-28 0 0 0 0.0 0.0
11 2021-09-29 0 0 0 0.0 0.0
12 2021-09-30 0 0 0 0.0 0.0
13 2021-10-01 0 0 0 0.0 0.0
14 2021-10-02 0 0 0 0.0 0.0
15 2021-10-03 0 0 0 0.0 0.0
16 2021-10-04 0 0 0 0.0 0.0
17 2021-10-05 0 0 0 0.0 0.0
18 2021-10-06 0 0 0 0.0 0.0
19 2021-10-07 0 0 0 0.0 0.0
20 2021-10-08 0 0 0 0.0 0.0
21 2021-10-09 0 0 0 0.0 0.0
22 2021-10-10 0 0 0 0.0 0.0
23 2021-10-11 0 0 0 0.0 0.0
24 2021-10-12 0 0 0 0.0 0.0
25 2021-10-13 0 0 0 0.0 0.0
26 2021-10-14 0 0 0 0.0 0.0
27 2021-10-15 0 0 0 0.0 0.0
28 2021-10-16 0 0 0 0.0 0.0
29 2021-10-17 0 0 0 0.0 0.0
30 2021-10-18 0 0 0 0.0 0.0
31 2021-10-19 2 2 2 1.0 50 30
32 2021-10-20 1 2 1 2.0 0 60

View File

@ -1,7 +1,7 @@
date,visitors,pageviews,visits,views_per_visit,bounce_rate,visit_duration
2021-05-01,1,1,1,1.0,100,0
2021-06-01,0,0,0,0.0,,
2021-07-01,0,0,0,0.0,,
2021-08-01,0,0,0,0.0,,
2021-06-01,0,0,0,0.0,0.0,
2021-07-01,0,0,0,0.0,0.0,
2021-08-01,0,0,0,0.0,0.0,
2021-09-01,1,1,1,1.0,100,0
2021-10-01,3,4,3,1.33,33,40

1 date visitors pageviews visits views_per_visit bounce_rate visit_duration
2 2021-05-01 1 1 1 1.0 100 0
3 2021-06-01 0 0 0 0.0 0.0
4 2021-07-01 0 0 0 0.0 0.0
5 2021-08-01 0 0 0 0.0 0.0
6 2021-09-01 1 1 1 1.0 100 0
7 2021-10-01 3 4 3 1.33 33 40

View File

@ -1096,7 +1096,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryTest do
end
describe "timeseries" do
test "shows hourly data for a certain date", %{conn: conn, site: site} do
test "shows hourly data for a certain date with time_labels", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, user_id: @user_id, timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: @user_id, timestamp: ~N[2021-01-01 00:10:00]),
@ -1108,16 +1108,44 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryTest do
"site_id" => site.domain,
"metrics" => ["visitors", "pageviews", "visits", "visit_duration", "bounce_rate"],
"date_range" => ["2021-01-01", "2021-01-01"],
"dimensions" => ["time:hour"]
"dimensions" => ["time:hour"],
"include" => %{"time_labels" => true}
})
assert json_response(conn, 200)["results"] == [
%{"dimensions" => ["2021-01-01T00:00:00Z"], "metrics" => [1, 2, 1, 600, 0]},
%{"dimensions" => ["2021-01-01T23:00:00Z"], "metrics" => [1, 1, 1, 0, 100]}
%{"dimensions" => ["2021-01-01 00:00:00"], "metrics" => [1, 2, 1, 600, 0]},
%{"dimensions" => ["2021-01-01 23:00:00"], "metrics" => [1, 1, 1, 0, 100]}
]
assert json_response(conn, 200)["meta"]["time_labels"] == [
"2021-01-01 00:00:00",
"2021-01-01 01:00:00",
"2021-01-01 02:00:00",
"2021-01-01 03:00:00",
"2021-01-01 04:00:00",
"2021-01-01 05:00:00",
"2021-01-01 06:00:00",
"2021-01-01 07:00:00",
"2021-01-01 08:00:00",
"2021-01-01 09:00:00",
"2021-01-01 10:00:00",
"2021-01-01 11:00:00",
"2021-01-01 12:00:00",
"2021-01-01 13:00:00",
"2021-01-01 14:00:00",
"2021-01-01 15:00:00",
"2021-01-01 16:00:00",
"2021-01-01 17:00:00",
"2021-01-01 18:00:00",
"2021-01-01 19:00:00",
"2021-01-01 20:00:00",
"2021-01-01 21:00:00",
"2021-01-01 22:00:00",
"2021-01-01 23:00:00"
]
end
test "shows last 7 days of visitors", %{conn: conn, site: site} do
test "shows last 7 days of visitors with time labels", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, timestamp: ~N[2021-01-07 23:59:00])
@ -1128,13 +1156,53 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryTest do
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => ["2021-01-01", "2021-01-07"],
"dimensions" => ["time"]
"dimensions" => ["time"],
"include" => %{"time_labels" => true}
})
assert json_response(conn, 200)["results"] == [
%{"dimensions" => ["2021-01-01"], "metrics" => [1]},
%{"dimensions" => ["2021-01-07"], "metrics" => [1]}
]
assert json_response(conn, 200)["meta"]["time_labels"] == [
"2021-01-01",
"2021-01-02",
"2021-01-03",
"2021-01-04",
"2021-01-05",
"2021-01-06",
"2021-01-07"
]
end
test "shows weekly data with time labels", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, timestamp: ~N[2021-01-03 23:59:00]),
build(:pageview, timestamp: ~N[2021-01-07 23:59:00])
])
conn =
post(conn, "/api/v2/query", %{
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => ["2020-12-20", "2021-01-07"],
"dimensions" => ["time:week"],
"include" => %{"time_labels" => true}
})
assert json_response(conn, 200)["results"] == [
%{"dimensions" => ["2020-12-28"], "metrics" => [2]},
%{"dimensions" => ["2021-01-04"], "metrics" => [1]}
]
assert json_response(conn, 200)["meta"]["time_labels"] == [
"2020-12-20",
"2020-12-21",
"2020-12-28",
"2021-01-04"
]
end
test "shows last 6 months of visitors", %{conn: conn, site: site} do
@ -1150,7 +1218,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryTest do
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => ["2020-07-01", "2021-01-31"],
"dimensions" => ["time"]
"dimensions" => ["time:month"]
})
assert json_response(conn, 200)["results"] == [
@ -1173,7 +1241,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryTest do
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => ["2020-01-01", "2021-01-01"],
"dimensions" => ["time"]
"dimensions" => ["time:month"]
})
assert json_response(conn, 200)["results"] == [
@ -1196,7 +1264,8 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryTest do
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => ["2020-01-01", "2021-01-07"],
"dimensions" => ["time:day"]
"dimensions" => ["time:day"],
"include" => %{"time_labels" => true}
})
assert json_response(conn, 200)["results"] == [
@ -1204,6 +1273,8 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryTest do
%{"dimensions" => ["2020-12-31"], "metrics" => [1]},
%{"dimensions" => ["2021-01-01"], "metrics" => [2]}
]
assert length(json_response(conn, 200)["meta"]["time_labels"]) == 373
end
test "shows a custom range with daily interval", %{conn: conn, site: site} do
@ -3982,11 +4053,11 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryTest do
})
assert json_response(conn, 200)["results"] == [
%{"dimensions" => ["2021-01-01T00:00:00Z", "Google"], "metrics" => [1]},
%{"dimensions" => ["2021-01-02T00:00:00Z", "Google"], "metrics" => [1]},
%{"dimensions" => ["2021-01-02T00:00:00Z", "Direct / None"], "metrics" => [1]},
%{"dimensions" => ["2021-01-03T00:00:00Z", "Direct / None"], "metrics" => [1]},
%{"dimensions" => ["2021-01-03T00:00:00Z", "Twitter"], "metrics" => [1]}
%{"dimensions" => ["2021-01-01 00:00:00", "Google"], "metrics" => [1]},
%{"dimensions" => ["2021-01-02 00:00:00", "Google"], "metrics" => [1]},
%{"dimensions" => ["2021-01-02 00:00:00", "Direct / None"], "metrics" => [1]},
%{"dimensions" => ["2021-01-03 00:00:00", "Direct / None"], "metrics" => [1]},
%{"dimensions" => ["2021-01-03 00:00:00", "Twitter"], "metrics" => [1]}
]
end
end

View File

@ -131,7 +131,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 02:00:00",
@ -139,7 +139,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 03:00:00",
@ -147,7 +147,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 04:00:00",
@ -155,7 +155,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 05:00:00",
@ -163,7 +163,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 06:00:00",
@ -171,7 +171,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 07:00:00",
@ -179,7 +179,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 08:00:00",
@ -187,7 +187,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 09:00:00",
@ -195,7 +195,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 10:00:00",
@ -203,7 +203,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 11:00:00",
@ -211,7 +211,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 12:00:00",
@ -219,7 +219,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 13:00:00",
@ -227,7 +227,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 14:00:00",
@ -235,7 +235,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 15:00:00",
@ -243,7 +243,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 16:00:00",
@ -251,7 +251,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 17:00:00",
@ -259,7 +259,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 18:00:00",
@ -267,7 +267,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 19:00:00",
@ -275,7 +275,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 20:00:00",
@ -283,7 +283,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 21:00:00",
@ -291,7 +291,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 22:00:00",
@ -299,7 +299,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 0,
"pageviews" => 0,
"visit_duration" => nil,
"bounce_rate" => nil
"bounce_rate" => 0
},
%{
"date" => "2021-01-01 23:00:00",
@ -1712,7 +1712,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.TimeseriesTest do
"visits" => 1
},
%{
"bounce_rate" => nil,
"bounce_rate" => 0,
"date" => "2021-01-02",
"events" => 0,
"pageviews" => 0,

View File

@ -480,13 +480,7 @@ defmodule PlausibleWeb.Api.StatsController.MainGraphTest do
assert %{"plot" => plot} = json_response(conn, 200)
expected_plot =
if FunWithFlags.enabled?(:experimental_session_count) do
~w[1 1 1 1 1 2 2 2 2 2 2 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 0 0]
else
~w[0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 1 0 0]
end
expected_plot = ~w[1 1 1 1 1 2 2 2 2 2 2 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 0 0]
assert plot == Enum.map(expected_plot, &String.to_integer/1)
end
@ -565,11 +559,7 @@ defmodule PlausibleWeb.Api.StatsController.MainGraphTest do
assert %{"plot" => plot} = json_response(conn, 200)
if FunWithFlags.enabled?(:experimental_session_count) do
assert plot == [1, 1, 0, 0, 0]
else
assert plot == [1, 1, 0, 0, 1]
end
assert plot == [1, 1, 0, 0, 0]
end
end

View File

@ -85,7 +85,7 @@ defmodule PlausibleWeb.Api.StatsController.TopStatsTest do
conn =
get(
conn,
"/api/stats/#{site.domain}/top-stats?period=day&date=2021-01-01&experimental_session_count=true"
"/api/stats/#{site.domain}/top-stats?period=day&date=2021-01-01"
)
res = json_response(conn, 200)

View File

@ -323,9 +323,9 @@ defmodule PlausibleWeb.StatsControllerTest do
"visit_duration"
],
["2021-09-20", "1", "1", "1", "1.0", "100", "0"],
["2021-09-27", "0", "0", "0", "0.0", "", ""],
["2021-10-04", "0", "0", "0", "0.0", "", ""],
["2021-10-11", "0", "0", "0", "0.0", "", ""],
["2021-09-27", "0", "0", "0", "0.0", "0.0", ""],
["2021-10-04", "0", "0", "0", "0.0", "0.0", ""],
["2021-10-11", "0", "0", "0", "0.0", "0.0", ""],
["2021-10-18", "3", "4", "3", "1.33", "33", "40"],
[""]
]

View File

@ -11,10 +11,8 @@ Application.ensure_all_started(:double)
# Temporary flag to test `experimental_reduced_joins` flag on all tests.
if System.get_env("TEST_EXPERIMENTAL_REDUCED_JOINS") == "1" do
FunWithFlags.enable(:experimental_reduced_joins)
FunWithFlags.enable(:experimental_session_count)
else
FunWithFlags.disable(:experimental_reduced_joins)
FunWithFlags.disable(:experimental_session_count)
end
Ecto.Adapters.SQL.Sandbox.mode(Plausible.Repo, :manual)