Average Scroll Depth Metric: put scroll depth on the dashboard under a feature flag (#4832)

* migration: add scroll_depth to events_v2

* (cherry-pick) ingest scroll depth

* replace convoluted test with more concise ones

* QueryParser: parse internal scroll_depth metric + validation

* turn QueryComparisonsTest into QueryInternalTest

* rename file

* (cherry pick) query scroll depth 15b14d3

...and move the tests into `internal_query_test.exs`

* review feedback

* Get rid of unnecessary separation between aggregate and group scroll depth
* Drop irrelevant other metrics in tests

* add test ensuring scroll depth unavailable in Stats API v1

* Put scroll depth on the dashboard

* Top Stats
* Main Graph
* Top Pages > Details

* feature flag for dashboard scroll depth access

* ignore credo warning

* enable scroll_depth flag in tests

* remove duplication

* write timestamps explicitly in a test

* revert moving tests around

* Add query_comparisons_test back
* Move scroll_depth tests into query_test
* Delete query_internal_test

* rename setup util (got updated on master)

* use pageleave_factory where applicable

* Use the correct generated query-api.d.ts

* npm format
This commit is contained in:
RobertJoonas 2024-11-20 14:13:04 +01:00 committed by GitHub
parent a29eb3d3ca
commit 6822b29016
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 711 additions and 67 deletions

View File

@ -15,7 +15,8 @@ export function getGraphableMetrics(query, site) {
} else if (isGoalFilter) {
return ["visitors", "events", "conversion_rate"]
} else if (isPageFilter) {
return ["visitors", "visits", "pageviews", "bounce_rate"]
const pageFilterMetrics = ["visitors", "visits", "pageviews", "bounce_rate"]
return site.flags.scroll_depth ? [...pageFilterMetrics, "scroll_depth"] : pageFilterMetrics
} else {
return ["visitors", "visits", "pageviews", "views_per_visit", "bounce_rate", "visit_duration"]
}

View File

@ -47,12 +47,14 @@ function PagesModal() {
]
}
return [
const defaultMetrics = [
metrics.createVisitors({renderLabel: (_query) => "Visitors" }),
metrics.createPageviews(),
metrics.createBounceRate(),
metrics.createTimeOnPage()
]
return site.flags.scroll_depth ? [...defaultMetrics, metrics.createScrollDepth()] : defaultMetrics
}
return (

View File

@ -38,6 +38,7 @@ export const MetricFormatterShort: Record<
bounce_rate: percentageFormatter,
conversion_rate: percentageFormatter,
scroll_depth: percentageFormatter,
exit_rate: percentageFormatter,
group_conversion_rate: percentageFormatter,
percentage: percentageFormatter,
@ -65,6 +66,7 @@ export const MetricFormatterLong: Record<
bounce_rate: percentageFormatter,
conversion_rate: percentageFormatter,
scroll_depth: percentageFormatter,
exit_rate: percentageFormatter,
group_conversion_rate: percentageFormatter,
percentage: percentageFormatter,

View File

@ -172,7 +172,7 @@ export const createVisitDuration = (props) => {
export const createBounceRate = (props) => {
const renderLabel = (_query) => 'Bounce Rate'
return new Metric({
width: 'w-32',
width: 'w-28',
...props,
key: 'bounce_rate',
renderLabel,
@ -194,7 +194,7 @@ export const createPageviews = (props) => {
export const createTimeOnPage = (props) => {
const renderLabel = (_query) => 'Time on Page'
return new Metric({
width: 'w-32',
width: 'w-28',
...props,
key: 'time_on_page',
renderLabel,
@ -212,3 +212,14 @@ export const createExitRate = (props) => {
sortable: false
})
}
export const createScrollDepth = (props) => {
const renderLabel = (_query) => 'Scroll Depth'
return new Metric({
width: 'w-28',
...props,
key: 'scroll_depth',
renderLabel,
sortable: false
})
}

View File

@ -18,7 +18,8 @@ export type Metric =
| "group_conversion_rate"
| "time_on_page"
| "total_revenue"
| "average_revenue";
| "average_revenue"
| "scroll_depth";
export type DateRangeShorthand = "30m" | "realtime" | "all" | "day" | "7d" | "30d" | "month" | "6mo" | "12mo" | "year";
/**
* @minItems 2

View File

@ -541,6 +541,17 @@ defmodule Plausible.Stats.Filters.QueryParser do
end
end
defp validate_metric(:scroll_depth = metric, query) do
page_dimension? = Enum.member?(query.dimensions, "event:page")
toplevel_page_filter? = not is_nil(Filters.get_toplevel_filter(query, "event:page"))
if page_dimension? or toplevel_page_filter? do
:ok
else
{:error, "Metric `#{metric}` can only be queried with event:page filters or dimensions."}
end
end
defp validate_metric(:views_per_visit = metric, query) do
cond do
Filters.filtering_on_dimension?(query, "event:page") ->

View File

@ -18,7 +18,8 @@ defmodule Plausible.Stats.Metrics do
:conversion_rate,
:group_conversion_rate,
:time_on_page,
:percentage
:percentage,
:scroll_depth
] ++ on_ee(do: Plausible.Stats.Goal.Revenue.revenue_metrics(), else: [])
@metric_mappings Enum.into(@all_metrics, %{}, fn metric -> {to_string(metric), metric} end)

View File

@ -245,6 +245,7 @@ defmodule Plausible.Stats.SQL.Expression do
def event_metric(:percentage), do: %{}
def event_metric(:conversion_rate), do: %{}
def event_metric(:scroll_depth), do: %{}
def event_metric(:group_conversion_rate), do: %{}
def event_metric(:total_visitors), do: %{}

View File

@ -126,7 +126,7 @@ defmodule Plausible.Stats.SQL.QueryBuilder do
|> Enum.reduce(%{}, &Map.merge/2)
end
defp build_group_by(q, table, query) do
def build_group_by(q, table, query) do
Enum.reduce(query.dimensions, q, &dimension_group_by(&2, table, query, &1))
end

View File

@ -16,6 +16,7 @@ defmodule Plausible.Stats.SQL.SpecialMetrics do
|> maybe_add_percentage_metric(site, query)
|> maybe_add_global_conversion_rate(site, query)
|> maybe_add_group_conversion_rate(site, query)
|> maybe_add_scroll_depth(site, query)
end
defp maybe_add_percentage_metric(q, site, query) do
@ -121,6 +122,55 @@ defmodule Plausible.Stats.SQL.SpecialMetrics do
end
end
def maybe_add_scroll_depth(q, site, query) do
if :scroll_depth in query.metrics do
max_per_visitor_q =
Base.base_event_query(site, query)
|> where([e], e.name == "pageleave")
|> select([e], %{
user_id: e.user_id,
max_scroll_depth: max(e.scroll_depth)
})
|> SQL.QueryBuilder.build_group_by(:events, query)
|> group_by([e], e.user_id)
dim_shortnames = Enum.map(query.dimensions, fn dim -> shortname(query, dim) end)
dim_select =
dim_shortnames
|> Enum.map(fn dim -> {dim, dynamic([p], field(p, ^dim))} end)
|> Map.new()
dim_group_by =
dim_shortnames
|> Enum.map(fn dim -> dynamic([p], field(p, ^dim)) end)
scroll_depth_q =
subquery(max_per_visitor_q)
|> select([p], %{
scroll_depth: fragment("toUInt8(round(ifNotFinite(avg(?), 0)))", p.max_scroll_depth)
})
|> select_merge(^dim_select)
|> group_by(^dim_group_by)
join_on_dim_condition =
if dim_shortnames == [] do
true
else
dim_shortnames
|> Enum.map(fn dim -> dynamic([_e, ..., s], selected_as(^dim) == field(s, ^dim)) end)
# credo:disable-for-next-line Credo.Check.Refactor.Nesting
|> Enum.reduce(fn condition, acc -> dynamic([], ^acc and ^condition) end)
end
q
|> join(:left, [e], s in subquery(scroll_depth_q), on: ^join_on_dim_condition)
|> select_merge_as([_e, ..., s], %{scroll_depth: fragment("any(?)", s.scroll_depth)})
else
q
end
end
# `total_visitors_subquery` returns a subquery which selects `total_visitors` -
# the number used as the denominator in the calculation of `conversion_rate` and
# `percentage` metrics.

View File

@ -74,6 +74,7 @@ defmodule Plausible.Stats.TableDecider do
defp metric_partitioner(_, :average_revenue), do: :event
defp metric_partitioner(_, :total_revenue), do: :event
defp metric_partitioner(_, :scroll_depth), do: :event
defp metric_partitioner(_, :pageviews), do: :event
defp metric_partitioner(_, :events), do: :event
defp metric_partitioner(_, :bounce_rate), do: :session

View File

@ -87,6 +87,7 @@ defmodule Plausible.Stats.Timeseries do
:views_per_visit -> Map.merge(row, %{views_per_visit: 0.0})
:conversion_rate -> Map.merge(row, %{conversion_rate: 0.0})
:group_conversion_rate -> Map.merge(row, %{group_conversion_rate: 0.0})
:scroll_depth -> Map.merge(row, %{scroll_depth: 0})
:bounce_rate -> Map.merge(row, %{bounce_rate: 0.0})
:visit_duration -> Map.merge(row, %{visit_duration: nil})
:average_revenue -> Map.merge(row, %{average_revenue: nil})

View File

@ -193,12 +193,13 @@ defmodule PlausibleWeb.Api.StatsController do
def top_stats(conn, params) do
site = conn.assigns[:site]
current_user = conn.assigns[:current_user]
params = realtime_period_to_30m(params)
query = Query.from(site, params, debug_metadata(conn))
{top_stats, sample_percent} = fetch_top_stats(site, query)
{top_stats, sample_percent} = fetch_top_stats(site, query, current_user)
comparison_query = comparison_query(query)
json(conn, %{
@ -293,7 +294,7 @@ defmodule PlausibleWeb.Api.StatsController do
end
end
defp fetch_top_stats(site, query) do
defp fetch_top_stats(site, query, current_user) do
goal_filter? = Filters.filtering_on_dimension?(query, "event:goal")
cond do
@ -307,7 +308,7 @@ defmodule PlausibleWeb.Api.StatsController do
fetch_goal_top_stats(site, query)
true ->
fetch_other_top_stats(site, query)
fetch_other_top_stats(site, query, current_user)
end
end
@ -391,16 +392,24 @@ defmodule PlausibleWeb.Api.StatsController do
|> then(&{&1, 100})
end
defp fetch_other_top_stats(site, query) do
defp fetch_other_top_stats(site, query, current_user) do
page_filter? = Filters.filtering_on_dimension?(query, "event:page")
metrics = [:visitors, :visits, :pageviews, :sample_percent]
metrics =
cond do
page_filter? && query.include_imported -> metrics
page_filter? -> metrics ++ [:bounce_rate, :time_on_page]
true -> metrics ++ [:views_per_visit, :bounce_rate, :visit_duration]
page_filter? && query.include_imported ->
metrics
page_filter? && scroll_depth_enabled?(site, current_user) ->
metrics ++ [:bounce_rate, :scroll_depth, :time_on_page]
page_filter? ->
metrics ++ [:bounce_rate, :time_on_page]
true ->
metrics ++ [:views_per_visit, :bounce_rate, :visit_duration]
end
current_results = Stats.aggregate(site, query, metrics)
@ -418,7 +427,8 @@ defmodule PlausibleWeb.Api.StatsController do
nil -> 0
value -> value
end
)
),
top_stats_entry(current_results, "Scroll depth", :scroll_depth)
]
|> Enum.filter(& &1)
@ -819,13 +829,22 @@ defmodule PlausibleWeb.Api.StatsController do
def pages(conn, params) do
site = conn.assigns[:site]
current_user = conn.assigns[:current_user]
params = Map.put(params, "property", "event:page")
query = Query.from(site, params, debug_metadata(conn))
extra_metrics =
if params["detailed"],
do: [:pageviews, :bounce_rate, :time_on_page],
else: []
cond do
params["detailed"] && !query.include_imported && scroll_depth_enabled?(site, current_user) ->
[:pageviews, :bounce_rate, :time_on_page, :scroll_depth]
params["detailed"] ->
[:pageviews, :bounce_rate, :time_on_page]
true ->
[]
end
metrics = breakdown_metrics(query, extra_metrics)
pagination = parse_pagination(params)
@ -1532,11 +1551,20 @@ defmodule PlausibleWeb.Api.StatsController do
end
requires_goal_filter? = metric in [:conversion_rate, :events]
has_goal_filter? = Filters.filtering_on_dimension?(query, "event:goal")
if requires_goal_filter? and !Filters.filtering_on_dimension?(query, "event:goal") do
{:error, "Metric `#{metric}` can only be queried with a goal filter"}
else
{:ok, metric}
requires_page_filter? = metric == :scroll_depth
has_page_filter? = Filters.filtering_on_dimension?(query, "event:page")
cond do
requires_goal_filter? and not has_goal_filter? ->
{:error, "Metric `#{metric}` can only be queried with a goal filter"}
requires_page_filter? and not has_page_filter? ->
{:error, "Metric `#{metric}` can only be queried with a page filter"}
true ->
{:ok, metric}
end
end
@ -1588,4 +1616,9 @@ defmodule PlausibleWeb.Api.StatsController do
end
defp realtime_period_to_30m(params), do: params
defp scroll_depth_enabled?(site, user) do
FunWithFlags.enabled?(:scroll_depth, for: user) ||
FunWithFlags.enabled?(:scroll_depth, for: site)
end
end

View File

@ -375,7 +375,7 @@ defmodule PlausibleWeb.StatsController do
defp get_flags(user, site),
do:
[:channels, :saved_segments]
[:channels, :saved_segments, :scroll_depth]
|> Enum.map(fn flag ->
{flag, FunWithFlags.enabled?(flag, for: user) || FunWithFlags.enabled?(flag, for: site)}
end)

View File

@ -269,6 +269,10 @@
{
"const": "average_revenue",
"$comment": "only :internal"
},
{
"const": "scroll_depth",
"$comment": "only :internal"
}
]
},

View File

@ -721,7 +721,7 @@ defmodule Plausible.Billing.QuotaTest do
populate_stats(site, [
build(:event, timestamp: Timex.shift(now, days: -8), name: "custom"),
build(:pageview, user_id: 199, timestamp: Timex.shift(now, days: -5, minutes: -2)),
build(:event, user_id: 199, timestamp: Timex.shift(now, days: -5), name: "pageleave")
build(:pageleave, user_id: 199, timestamp: Timex.shift(now, days: -5))
])
assert %{

View File

@ -63,7 +63,7 @@ defmodule Plausible.Stats.GoalSuggestionsTest do
user_id: 1,
timestamp: NaiveDateTime.utc_now() |> NaiveDateTime.add(-1, :minute)
),
build(:event, name: "pageleave", user_id: 1, timestamp: NaiveDateTime.utc_now())
build(:pageleave, user_id: 1, timestamp: NaiveDateTime.utc_now())
])
assert GoalSuggestions.suggest_event_names(site, "") == ["Signup"]

View File

@ -1416,6 +1416,81 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
end
end
describe "scroll_depth metric" do
test "fails validation on its own", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["scroll_depth"],
"date_range" => "all"
}
|> check_error(
site,
"Metric `scroll_depth` can only be queried with event:page filters or dimensions.",
:internal
)
end
test "fails with only a non-top-level event:page filter", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["scroll_depth"],
"date_range" => "all",
"filters" => [["not", ["is", "event:page", ["/"]]]]
}
|> check_error(
site,
"Metric `scroll_depth` can only be queried with event:page filters or dimensions.",
:internal
)
end
test "succeeds with top-level event:page filter", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["scroll_depth"],
"date_range" => "all",
"filters" => [["is", "event:page", ["/"]]]
}
|> check_success(
site,
%{
metrics: [:scroll_depth],
utc_time_range: @date_range_day,
filters: [[:is, "event:page", ["/"]]],
dimensions: [],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0}
},
:internal
)
end
test "succeeds with event:page dimension", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["scroll_depth"],
"date_range" => "all",
"dimensions" => ["event:page"]
}
|> check_success(
site,
%{
metrics: [:scroll_depth],
utc_time_range: @date_range_day,
filters: [],
dimensions: ["event:page"],
order_by: nil,
timezone: site.timezone,
include: %{imports: false, time_labels: false, total_rows: false, comparisons: nil},
pagination: %{limit: 10_000, offset: 0}
},
:internal
)
end
end
describe "views_per_visit metric" do
test "succeeds with normal filters", %{site: site} do
insert(:goal, %{site: site, event_name: "Signup"})

View File

@ -126,6 +126,20 @@ defmodule PlausibleWeb.Api.ExternalStatsController.AggregateTest do
}
end
test "scroll depth metric is not recognized in the legacy API v1", %{conn: conn, site: site} do
conn =
get(conn, "/api/v1/stats/aggregate", %{
"site_id" => site.domain,
"period" => "30d",
"metrics" => "scroll_depth"
})
assert json_response(conn, 400) == %{
"error" =>
"The metric `scroll_depth` is not recognized. Find valid metrics from the documentation: https://plausible.io/docs/stats-api#metrics"
}
end
for property <- ["event:name", "event:goal", "event:props:custom_prop"] do
test "validates that session metrics cannot be used with #{property} filter", %{
conn: conn,
@ -1628,12 +1642,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.AggregateTest do
populate_stats(site, [
build(:pageview, user_id: 1234, timestamp: ~N[2021-01-01 12:00:00], pathname: "/1"),
build(:pageview, user_id: 1234, timestamp: ~N[2021-01-01 12:00:05], pathname: "/2"),
build(:event,
name: "pageleave",
user_id: 1234,
timestamp: ~N[2021-01-01 12:01:00],
pathname: "/1"
)
build(:pageleave, user_id: 1234, timestamp: ~N[2021-01-01 12:01:00], pathname: "/1")
])
conn =

View File

@ -2604,12 +2604,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.BreakdownTest do
populate_stats(site, [
build(:pageview, user_id: 1234, timestamp: ~N[2021-01-01 12:00:00], pathname: "/1"),
build(:pageview, user_id: 1234, timestamp: ~N[2021-01-01 12:00:05], pathname: "/2"),
build(:event,
name: "pageleave",
user_id: 1234,
timestamp: ~N[2021-01-01 12:01:00],
pathname: "/1"
)
build(:pageleave, user_id: 1234, timestamp: ~N[2021-01-01 12:01:00], pathname: "/1")
])
conn =

View File

@ -105,7 +105,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryTest do
%{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, user_id: 234, timestamp: ~N[2021-01-01 00:00:00]),
build(:event, user_id: 234, name: "pageleave", timestamp: ~N[2021-01-01 00:00:01])
build(:pageleave, user_id: 234, timestamp: ~N[2021-01-01 00:00:01])
])
conn =
@ -126,7 +126,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryTest do
} do
populate_stats(site, [
build(:pageview, user_id: 123, timestamp: ~N[2021-01-01 00:00:00]),
build(:event, user_id: 123, name: "pageleave", timestamp: ~N[2021-01-01 00:00:03])
build(:pageleave, user_id: 123, timestamp: ~N[2021-01-01 00:00:03])
])
conn =
@ -3426,4 +3426,298 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryTest do
assert json_response(conn4, 200)["results"] == []
end
end
describe "scroll_depth" do
setup [:create_user, :create_site, :create_api_key, :use_api_key]
test "scroll depth is (not yet) available in public API", %{conn: conn, site: site} do
conn =
post(conn, "/api/v2/query", %{
"site_id" => site.domain,
"filters" => [["is", "event:page", ["/"]]],
"date_range" => "all",
"metrics" => ["scroll_depth"]
})
assert json_response(conn, 400)["error"] =~ "Invalid metric \"scroll_depth\""
end
test "can query scroll_depth metric with a page filter", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, user_id: 123, timestamp: ~N[2021-01-01 00:00:00]),
build(:pageleave, user_id: 123, timestamp: ~N[2021-01-01 00:00:10], scroll_depth: 40),
build(:pageview, user_id: 123, timestamp: ~N[2021-01-01 00:00:10]),
build(:pageleave, user_id: 123, timestamp: ~N[2021-01-01 00:00:20], scroll_depth: 60),
build(:pageview, user_id: 456, timestamp: ~N[2021-01-01 00:00:00]),
build(:pageleave, user_id: 456, timestamp: ~N[2021-01-01 00:00:10], scroll_depth: 80)
])
conn =
post(conn, "/api/v2/query-internal-test", %{
"site_id" => site.domain,
"filters" => [["is", "event:page", ["/"]]],
"date_range" => "all",
"metrics" => ["scroll_depth"]
})
assert json_response(conn, 200)["results"] == [
%{"metrics" => [70], "dimensions" => []}
]
end
test "scroll depth is 0 when no pageleave data in range", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, timestamp: ~N[2021-01-01 00:00:00])
])
conn =
post(conn, "/api/v2/query-internal-test", %{
"site_id" => site.domain,
"filters" => [["is", "event:page", ["/"]]],
"date_range" => "all",
"metrics" => ["visitors", "scroll_depth"]
})
assert json_response(conn, 200)["results"] == [
%{"metrics" => [1, 0], "dimensions" => []}
]
end
test "scroll depth is 0 when no data at all in range", %{conn: conn, site: site} do
conn =
post(conn, "/api/v2/query-internal-test", %{
"site_id" => site.domain,
"filters" => [["is", "event:page", ["/"]]],
"date_range" => "all",
"metrics" => ["scroll_depth"]
})
assert json_response(conn, 200)["results"] == [
%{"metrics" => [0], "dimensions" => []}
]
end
test "scroll_depth metric in a time:day breakdown", %{conn: conn, site: site} do
t0 = ~N[2020-01-01 00:00:00]
[t1, t2, t3] = for i <- 1..3, do: NaiveDateTime.add(t0, i, :minute)
populate_stats(site, [
build(:pageview, user_id: 12, timestamp: t0),
build(:pageleave, user_id: 12, timestamp: t1, scroll_depth: 20),
build(:pageview, user_id: 34, timestamp: t0),
build(:pageleave, user_id: 34, timestamp: t1, scroll_depth: 17),
build(:pageview, user_id: 34, timestamp: t2),
build(:pageleave, user_id: 34, timestamp: t3, scroll_depth: 60),
build(:pageview, user_id: 56, timestamp: NaiveDateTime.add(t0, 1, :day)),
build(:pageleave,
user_id: 56,
timestamp: NaiveDateTime.add(t1, 1, :day),
scroll_depth: 20
)
])
conn =
post(conn, "/api/v2/query-internal-test", %{
"site_id" => site.domain,
"metrics" => ["scroll_depth"],
"date_range" => "all",
"dimensions" => ["time:day"],
"filters" => [["is", "event:page", ["/"]]]
})
assert json_response(conn, 200)["results"] == [
%{"dimensions" => ["2020-01-01"], "metrics" => [40]},
%{"dimensions" => ["2020-01-02"], "metrics" => [20]}
]
end
test "breakdown by event:page with scroll_depth metric", %{conn: conn, site: site} do
t0 = ~N[2020-01-01 00:00:00]
[t1, t2, t3] = for i <- 1..3, do: NaiveDateTime.add(t0, i, :minute)
populate_stats(site, [
build(:pageview, user_id: 12, pathname: "/blog", timestamp: t0),
build(:pageleave, user_id: 12, pathname: "/blog", timestamp: t1, scroll_depth: 20),
build(:pageview, user_id: 12, pathname: "/another", timestamp: t1),
build(:pageleave, user_id: 12, pathname: "/another", timestamp: t2, scroll_depth: 24),
build(:pageview, user_id: 34, pathname: "/blog", timestamp: t0),
build(:pageleave, user_id: 34, pathname: "/blog", timestamp: t1, scroll_depth: 17),
build(:pageview, user_id: 34, pathname: "/another", timestamp: t1),
build(:pageleave, user_id: 34, pathname: "/another", timestamp: t2, scroll_depth: 26),
build(:pageview, user_id: 34, pathname: "/blog", timestamp: t2),
build(:pageleave, user_id: 34, pathname: "/blog", timestamp: t3, scroll_depth: 60),
build(:pageview, user_id: 56, pathname: "/blog", timestamp: t0),
build(:pageleave, user_id: 56, pathname: "/blog", timestamp: t1, scroll_depth: 100)
])
conn =
post(conn, "/api/v2/query-internal-test", %{
"site_id" => site.domain,
"metrics" => ["scroll_depth"],
"date_range" => "all",
"dimensions" => ["event:page"]
})
assert json_response(conn, 200)["results"] == [
%{"dimensions" => ["/blog"], "metrics" => [60]},
%{"dimensions" => ["/another"], "metrics" => [25]}
]
end
test "breakdown by event:page + visit:source with scroll_depth metric", %{
conn: conn,
site: site
} do
populate_stats(site, [
build(:pageview,
referrer_source: "Google",
user_id: 12,
pathname: "/blog",
timestamp: ~N[2020-01-01 00:00:00]
),
build(:pageleave,
referrer_source: "Google",
user_id: 12,
pathname: "/blog",
timestamp: ~N[2020-01-01 00:00:00] |> NaiveDateTime.add(1, :minute),
scroll_depth: 20
),
build(:pageview,
referrer_source: "Google",
user_id: 34,
pathname: "/blog",
timestamp: ~N[2020-01-01 00:00:00]
),
build(:pageleave,
referrer_source: "Google",
user_id: 34,
pathname: "/blog",
timestamp: ~N[2020-01-01 00:00:00] |> NaiveDateTime.add(1, :minute),
scroll_depth: 17
),
build(:pageview,
referrer_source: "Google",
user_id: 34,
pathname: "/blog",
timestamp: ~N[2020-01-01 00:00:00] |> NaiveDateTime.add(2, :minute)
),
build(:pageleave,
referrer_source: "Google",
user_id: 34,
pathname: "/blog",
timestamp: ~N[2020-01-01 00:00:00] |> NaiveDateTime.add(3, :minute),
scroll_depth: 60
),
build(:pageview,
referrer_source: "Twitter",
user_id: 56,
pathname: "/blog",
timestamp: ~N[2020-01-01 00:00:00]
),
build(:pageleave,
referrer_source: "Twitter",
user_id: 56,
pathname: "/blog",
timestamp: ~N[2020-01-01 00:00:00] |> NaiveDateTime.add(1, :minute),
scroll_depth: 20
),
build(:pageview,
referrer_source: "Twitter",
user_id: 56,
pathname: "/another",
timestamp: ~N[2020-01-01 00:00:00] |> NaiveDateTime.add(1, :minute)
),
build(:pageleave,
referrer_source: "Twitter",
user_id: 56,
pathname: "/another",
timestamp: ~N[2020-01-01 00:00:00] |> NaiveDateTime.add(2, :minute),
scroll_depth: 24
)
])
conn =
post(conn, "/api/v2/query-internal-test", %{
"site_id" => site.domain,
"metrics" => ["scroll_depth"],
"date_range" => "all",
"dimensions" => ["event:page", "visit:source"]
})
assert json_response(conn, 200)["results"] == [
%{"dimensions" => ["/blog", "Google"], "metrics" => [40]},
%{"dimensions" => ["/another", "Twitter"], "metrics" => [24]},
%{"dimensions" => ["/blog", "Twitter"], "metrics" => [20]}
]
end
test "breakdown by event:page + time:day with scroll_depth metric", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, user_id: 12, pathname: "/blog", timestamp: ~N[2020-01-01 00:00:00]),
build(:pageleave,
user_id: 12,
pathname: "/blog",
timestamp: ~N[2020-01-01 00:01:00],
scroll_depth: 20
),
build(:pageview, user_id: 12, pathname: "/another", timestamp: ~N[2020-01-01 00:01:00]),
build(:pageleave,
user_id: 12,
pathname: "/another",
timestamp: ~N[2020-01-01 00:02:00],
scroll_depth: 24
),
build(:pageview, user_id: 34, pathname: "/blog", timestamp: ~N[2020-01-01 00:00:00]),
build(:pageleave,
user_id: 34,
pathname: "/blog",
timestamp: ~N[2020-01-01 00:01:00],
scroll_depth: 17
),
build(:pageview, user_id: 34, pathname: "/another", timestamp: ~N[2020-01-01 00:01:00]),
build(:pageleave,
user_id: 34,
pathname: "/another",
timestamp: ~N[2020-01-01 00:02:00],
scroll_depth: 26
),
build(:pageview, user_id: 34, pathname: "/blog", timestamp: ~N[2020-01-01 00:02:00]),
build(:pageleave,
user_id: 34,
pathname: "/blog",
timestamp: ~N[2020-01-01 00:03:00],
scroll_depth: 60
),
build(:pageview, user_id: 56, pathname: "/blog", timestamp: ~N[2020-01-02 00:00:00]),
build(:pageleave,
user_id: 56,
pathname: "/blog",
timestamp: ~N[2020-01-02 00:01:00],
scroll_depth: 20
),
build(:pageview, user_id: 56, pathname: "/another", timestamp: ~N[2020-01-02 00:01:00]),
build(:pageleave,
user_id: 56,
pathname: "/another",
timestamp: ~N[2020-01-02 00:02:00],
scroll_depth: 24
)
])
conn =
post(conn, "/api/v2/query-internal-test", %{
"site_id" => site.domain,
"metrics" => ["scroll_depth"],
"date_range" => "all",
"dimensions" => ["event:page", "time:day"]
})
assert json_response(conn, 200)["results"] == [
%{"dimensions" => ["/blog", "2020-01-01"], "metrics" => [40]},
%{"dimensions" => ["/another", "2020-01-01"], "metrics" => [25]},
%{"dimensions" => ["/another", "2020-01-02"], "metrics" => [24]},
%{"dimensions" => ["/blog", "2020-01-02"], "metrics" => [20]}
]
end
end
end

View File

@ -584,6 +584,56 @@ defmodule PlausibleWeb.Api.StatsController.MainGraphTest do
end
end
describe "GET /api/stats/main-graph - scroll_depth plot" do
setup [:create_user, :log_in, :create_site]
test "returns 400 when scroll_depth is queried without a page filter", %{
conn: conn,
site: site
} do
conn =
get(
conn,
"/api/stats/#{site.domain}/main-graph?period=month&date=2021-01-01&metric=scroll_depth"
)
assert %{"error" => error} = json_response(conn, 400)
assert error =~ "can only be queried with a page filter"
end
test "returns scroll depth per day", %{conn: conn, site: site} do
t0 = ~N[2020-01-01 00:00:00]
[t1, t2, t3] = for i <- 1..3, do: NaiveDateTime.add(t0, i, :minute)
populate_stats(site, [
build(:pageview, user_id: 12, timestamp: t0),
build(:pageleave, user_id: 12, timestamp: t1, scroll_depth: 20),
build(:pageview, user_id: 34, timestamp: t0),
build(:pageleave, user_id: 34, timestamp: t1, scroll_depth: 17),
build(:pageview, user_id: 34, timestamp: t2),
build(:pageleave, user_id: 34, timestamp: t3, scroll_depth: 60),
build(:pageview, user_id: 56, timestamp: NaiveDateTime.add(t0, 1, :day)),
build(:pageleave,
user_id: 56,
timestamp: NaiveDateTime.add(t1, 1, :day),
scroll_depth: 20
)
])
filters = Jason.encode!(%{page: "/"})
conn =
get(
conn,
"/api/stats/#{site.domain}/main-graph?period=7d&date=2020-01-07&metric=scroll_depth&filters=#{filters}"
)
assert %{"plot" => plot} = json_response(conn, 200)
assert plot == [40, 20, 0, 0, 0, 0, 0]
end
end
describe "GET /api/stats/main-graph - conversion_rate plot" do
setup [:create_user, :log_in, :create_site]

View File

@ -272,14 +272,16 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"visitors" => 2,
"pageviews" => 2,
"bounce_rate" => 0,
"time_on_page" => 600
"time_on_page" => 600,
"scroll_depth" => 0
},
%{
"name" => "/blog/john-1",
"visitors" => 1,
"pageviews" => 1,
"bounce_rate" => 0,
"time_on_page" => 60
"time_on_page" => 60,
"scroll_depth" => 0
}
]
end
@ -334,14 +336,16 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"visitors" => 2,
"pageviews" => 2,
"bounce_rate" => 0,
"time_on_page" => 120.0
"time_on_page" => 120.0,
"scroll_depth" => 0
},
%{
"name" => "/blog/other-post",
"visitors" => 1,
"pageviews" => 1,
"bounce_rate" => 0,
"time_on_page" => nil
"time_on_page" => nil,
"scroll_depth" => 0
}
]
end
@ -386,14 +390,16 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"visitors" => 2,
"pageviews" => 2,
"bounce_rate" => 50,
"time_on_page" => 60
"time_on_page" => 60,
"scroll_depth" => 0
},
%{
"name" => "/blog/other-post",
"visitors" => 1,
"pageviews" => 1,
"bounce_rate" => 0,
"time_on_page" => nil
"time_on_page" => nil,
"scroll_depth" => 0
}
]
end
@ -442,14 +448,16 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"visitors" => 2,
"pageviews" => 2,
"bounce_rate" => 100,
"time_on_page" => nil
"time_on_page" => nil,
"scroll_depth" => 0
},
%{
"name" => "/blog/john-1",
"visitors" => 1,
"pageviews" => 1,
"bounce_rate" => 0,
"time_on_page" => 60
"time_on_page" => 60,
"scroll_depth" => 0
}
]
end
@ -584,7 +592,49 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"visitors" => 2,
"pageviews" => 3,
"bounce_rate" => 50,
"time_on_page" => 60
"time_on_page" => 60,
"scroll_depth" => 0
}
]
end
test "calculates scroll_depth", %{conn: conn, site: site} do
t0 = ~N[2020-01-01 00:00:00]
[t1, t2, t3] = for i <- 1..3, do: NaiveDateTime.add(t0, i, :minute)
populate_stats(site, [
build(:pageview, user_id: 12, pathname: "/blog", timestamp: t0),
build(:pageleave, user_id: 12, pathname: "/blog", timestamp: t1, scroll_depth: 20),
build(:pageview, user_id: 12, pathname: "/another", timestamp: t1),
build(:pageleave, user_id: 12, pathname: "/another", timestamp: t2, scroll_depth: 24),
build(:pageview, user_id: 34, pathname: "/blog", timestamp: t0),
build(:pageleave, user_id: 34, pathname: "/blog", timestamp: t1, scroll_depth: 17),
build(:pageview, user_id: 34, pathname: "/another", timestamp: t1),
build(:pageleave, user_id: 34, pathname: "/another", timestamp: t2, scroll_depth: 26),
build(:pageview, user_id: 34, pathname: "/blog", timestamp: t2),
build(:pageleave, user_id: 34, pathname: "/blog", timestamp: t3, scroll_depth: 60),
build(:pageview, user_id: 56, pathname: "/blog", timestamp: t0),
build(:pageleave, user_id: 56, pathname: "/blog", timestamp: t1, scroll_depth: 100)
])
conn = get(conn, "/api/stats/#{site.domain}/pages?period=day&date=2020-01-01&detailed=true")
assert json_response(conn, 200)["results"] == [
%{
"name" => "/blog",
"visitors" => 3,
"pageviews" => 4,
"bounce_rate" => 33,
"time_on_page" => 60,
"scroll_depth" => 60
},
%{
"name" => "/another",
"visitors" => 2,
"pageviews" => 2,
"bounce_rate" => 0,
"time_on_page" => 60,
"scroll_depth" => 25
}
]
end
@ -631,14 +681,16 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"visitors" => 2,
"pageviews" => 3,
"bounce_rate" => 50,
"time_on_page" => 60
"time_on_page" => 60,
"scroll_depth" => 0
},
%{
"name" => "/about",
"visitors" => 1,
"pageviews" => 1,
"bounce_rate" => 100,
"time_on_page" => nil
"time_on_page" => nil,
"scroll_depth" => 0
}
]
end
@ -685,7 +737,8 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"visitors" => 2,
"pageviews" => 3,
"bounce_rate" => 50,
"time_on_page" => 60
"time_on_page" => 60,
"scroll_depth" => 0
}
]
end
@ -731,21 +784,24 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"visitors" => 2,
"pageviews" => 2,
"bounce_rate" => 100,
"time_on_page" => nil
"time_on_page" => nil,
"scroll_depth" => 0
},
%{
"name" => "/blog/post-1",
"visitors" => 1,
"pageviews" => 1,
"bounce_rate" => 0,
"time_on_page" => 60
"time_on_page" => 60,
"scroll_depth" => 0
},
%{
"name" => "/blog/post-2",
"visitors" => 1,
"pageviews" => 1,
"bounce_rate" => 0,
"time_on_page" => nil
"time_on_page" => nil,
"scroll_depth" => 0
}
]
end
@ -783,14 +839,16 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"visitors" => 1,
"pageviews" => 1,
"bounce_rate" => 0,
"time_on_page" => 60
"time_on_page" => 60,
"scroll_depth" => 0
},
%{
"name" => "/blog/(/post-2",
"visitors" => 1,
"pageviews" => 1,
"bounce_rate" => 0,
"time_on_page" => nil
"time_on_page" => nil,
"scroll_depth" => 0
}
]
end
@ -836,14 +894,16 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"visitors" => 2,
"pageviews" => 2,
"bounce_rate" => 50,
"time_on_page" => 600
"time_on_page" => 600,
"scroll_depth" => 0
},
%{
"name" => "/about",
"visitors" => 1,
"pageviews" => 1,
"bounce_rate" => 0,
"time_on_page" => nil
"time_on_page" => nil,
"scroll_depth" => 0
}
]
end
@ -937,14 +997,16 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"time_on_page" => 900.0,
"visitors" => 2,
"pageviews" => 2,
"name" => "/"
"name" => "/",
"scroll_depth" => 0
},
%{
"bounce_rate" => 0,
"time_on_page" => nil,
"visitors" => 1,
"pageviews" => 1,
"name" => "/some-other-page"
"name" => "/some-other-page",
"scroll_depth" => 0
}
]
end
@ -984,7 +1046,8 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"name" => "/about",
"pageviews" => 2,
"time_on_page" => nil,
"visitors" => 2
"visitors" => 2,
"scroll_depth" => 0
}
]
end
@ -1063,14 +1126,16 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"name" => "/about-blog",
"pageviews" => 3,
"time_on_page" => 1140.0,
"visitors" => 2
"visitors" => 2,
"scroll_depth" => 0
},
%{
"bounce_rate" => 0,
"name" => "/exit-blog",
"pageviews" => 1,
"time_on_page" => nil,
"visitors" => 1
"visitors" => 1,
"scroll_depth" => 0
}
]
end
@ -1418,17 +1483,20 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"pageviews" => 0,
"time_on_page" => 0,
"visitors" => 0,
"scroll_depth" => 0,
"change" => %{
"bounce_rate" => nil,
"pageviews" => 100,
"time_on_page" => nil,
"visitors" => 100
"visitors" => 100,
"scroll_depth" => 0
}
},
"name" => "/page2",
"pageviews" => 2,
"time_on_page" => nil,
"visitors" => 2
"visitors" => 2,
"scroll_depth" => 0
},
%{
"bounce_rate" => 100,
@ -1436,16 +1504,19 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"pageviews" => 1,
"time_on_page" => nil,
"visitors" => 1,
"scroll_depth" => 0,
"comparison" => %{
"bounce_rate" => 100,
"pageviews" => 1,
"time_on_page" => nil,
"visitors" => 1,
"scroll_depth" => 0,
"change" => %{
"bounce_rate" => 0,
"pageviews" => 0,
"time_on_page" => nil,
"visitors" => 0
"visitors" => 0,
"scroll_depth" => 0
}
}
}

View File

@ -892,7 +892,7 @@ defmodule PlausibleWeb.Api.StatsController.TopStatsTest do
} do
filters = Jason.encode!(%{page: "/A"})
[visitors, visits, pageviews, bounce_rate, time_on_page] =
[visitors, visits, pageviews, bounce_rate, time_on_page, scroll_depth] =
conn
|> get("/api/stats/#{site.domain}/top-stats?filters=#{filters}")
|> json_response(200)
@ -903,6 +903,7 @@ defmodule PlausibleWeb.Api.StatsController.TopStatsTest do
assert %{"graph_metric" => "pageviews"} = pageviews
assert %{"graph_metric" => "bounce_rate"} = bounce_rate
assert %{"graph_metric" => "time_on_page"} = time_on_page
assert %{"graph_metric" => "scroll_depth"} = scroll_depth
end
test "returns graph_metric key for top stats with a goal filter", %{
@ -960,6 +961,31 @@ defmodule PlausibleWeb.Api.StatsController.TopStatsTest do
]
end
test "returns scroll_depth with a page filter", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, user_id: 123, timestamp: ~N[2021-01-01 00:00:00]),
build(:pageleave, user_id: 123, timestamp: ~N[2021-01-01 00:00:10], scroll_depth: 40),
build(:pageview, user_id: 123, timestamp: ~N[2021-01-01 00:00:10]),
build(:pageleave, user_id: 123, timestamp: ~N[2021-01-01 00:00:20], scroll_depth: 60),
build(:pageview, user_id: 456, timestamp: ~N[2021-01-01 00:00:00]),
build(:pageleave, user_id: 456, timestamp: ~N[2021-01-01 00:00:10], scroll_depth: 80)
])
filters = Jason.encode!(%{page: "/"})
conn =
get(
conn,
"/api/stats/#{site.domain}/top-stats?period=day&date=2021-01-01&filters=#{filters}"
)
res = json_response(conn, 200)
assert %{"name" => "Scroll depth", "value" => 70, "graph_metric" => "scroll_depth"} in res[
"top_stats"
]
end
test "page glob filter", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, pathname: "/index"),

View File

@ -140,6 +140,10 @@ defmodule Plausible.Factory do
Map.put(event_factory(attrs), :name, "pageview")
end
def pageleave_factory(attrs) do
Map.put(event_factory(attrs), :name, "pageleave")
end
def event_factory(attrs) do
if Map.get(attrs, :acquisition_channel) do
raise "Acquisition channel cannot be written directly since it's a materialized column."

View File

@ -7,6 +7,7 @@ Mox.defmock(Plausible.HTTPClient.Mock, for: Plausible.HTTPClient.Interface)
Application.ensure_all_started(:double)
FunWithFlags.enable(:channels)
FunWithFlags.enable(:scroll_depth)
# Temporary flag to test `read_team_schemas` flag on all tests.
if System.get_env("TEST_READ_TEAM_SCHEMAS") == "1" do
IO.puts("READS TEAM SCHEMAS")