2019-11-19 07:30:42 +03:00
|
|
|
defmodule PlausibleWeb.Api.StatsController do
|
|
|
|
use PlausibleWeb, :controller
|
|
|
|
use Plausible.Repo
|
2021-03-25 12:55:15 +03:00
|
|
|
use Plug.ErrorHandler
|
2021-07-23 13:44:05 +03:00
|
|
|
alias Plausible.Stats
|
|
|
|
alias Plausible.Stats.{Query, Filters}
|
2019-11-19 07:30:42 +03:00
|
|
|
|
|
|
|
def main_graph(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2021-08-19 15:41:08 +03:00
|
|
|
timeseries_query =
|
|
|
|
if query.period == "realtime" do
|
|
|
|
%Query{query | period: "30m"}
|
|
|
|
else
|
|
|
|
query
|
|
|
|
end
|
|
|
|
|
|
|
|
timeseries = Task.async(fn -> Stats.timeseries(site, timeseries_query, ["visitors"]) end)
|
2021-03-24 12:19:10 +03:00
|
|
|
{top_stats, sample_percent} = fetch_top_stats(site, query)
|
2021-07-23 13:44:05 +03:00
|
|
|
|
|
|
|
timeseries_result = Task.await(timeseries)
|
|
|
|
plot = Enum.map(timeseries_result, fn row -> row["visitors"] end)
|
|
|
|
labels = Enum.map(timeseries_result, fn row -> row["date"] end)
|
|
|
|
present_index = present_index_for(site, query, labels)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
|
|
|
json(conn, %{
|
|
|
|
plot: plot,
|
|
|
|
labels: labels,
|
|
|
|
present_index: present_index,
|
2019-11-25 12:17:18 +03:00
|
|
|
top_stats: top_stats,
|
2021-03-24 12:19:10 +03:00
|
|
|
interval: query.interval,
|
|
|
|
sample_percent: sample_percent
|
2019-11-19 07:30:42 +03:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
defp present_index_for(site, query, dates) do
|
|
|
|
case query.interval do
|
2021-08-13 11:28:51 +03:00
|
|
|
"hour" ->
|
|
|
|
current_date =
|
|
|
|
Timex.now(site.timezone)
|
|
|
|
|> Timex.format!("{YYYY}-{0M}-{0D} {h24}:00:00")
|
|
|
|
|
|
|
|
Enum.find_index(dates, &(&1 == current_date))
|
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
"date" ->
|
|
|
|
current_date =
|
|
|
|
Timex.now(site.timezone)
|
|
|
|
|> Timex.to_date()
|
|
|
|
|
|
|
|
Enum.find_index(dates, &(&1 == current_date))
|
|
|
|
|
2021-08-13 11:28:51 +03:00
|
|
|
"month" ->
|
2021-07-23 13:44:05 +03:00
|
|
|
current_date =
|
|
|
|
Timex.now(site.timezone)
|
2021-08-13 11:28:51 +03:00
|
|
|
|> Timex.to_date()
|
|
|
|
|> Timex.beginning_of_month()
|
2021-07-23 13:44:05 +03:00
|
|
|
|
|
|
|
Enum.find_index(dates, &(&1 == current_date))
|
|
|
|
|
|
|
|
"minute" ->
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-08-19 15:41:08 +03:00
|
|
|
defp fetch_top_stats(site, %Query{period: "realtime"} = query) do
|
|
|
|
query_30m = %Query{query | period: "30m"}
|
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
%{
|
2021-08-17 15:21:12 +03:00
|
|
|
"visitors" => %{"value" => visitors},
|
|
|
|
"pageviews" => %{"value" => pageviews}
|
2021-08-19 15:41:08 +03:00
|
|
|
} = Stats.aggregate(site, query_30m, ["visitors", "pageviews"])
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-03-24 12:19:10 +03:00
|
|
|
stats = [
|
2020-07-14 16:52:26 +03:00
|
|
|
%{
|
2020-08-18 14:14:56 +03:00
|
|
|
name: "Current visitors",
|
2021-07-23 13:44:05 +03:00
|
|
|
value: Stats.current_visitors(site)
|
2020-07-14 16:52:26 +03:00
|
|
|
},
|
2020-12-22 16:54:41 +03:00
|
|
|
%{
|
|
|
|
name: "Unique visitors (last 30 min)",
|
2021-07-23 13:44:05 +03:00
|
|
|
value: visitors
|
2020-12-22 16:54:41 +03:00
|
|
|
},
|
2020-07-14 16:52:26 +03:00
|
|
|
%{
|
|
|
|
name: "Pageviews (last 30 min)",
|
2021-07-23 13:44:05 +03:00
|
|
|
value: pageviews
|
2020-07-14 16:52:26 +03:00
|
|
|
}
|
|
|
|
]
|
2021-03-24 12:19:10 +03:00
|
|
|
|
2021-08-18 15:24:44 +03:00
|
|
|
{stats, 100}
|
2020-07-14 16:52:26 +03:00
|
|
|
end
|
|
|
|
|
2021-08-18 14:49:39 +03:00
|
|
|
defp fetch_top_stats(site, %Query{filters: %{"event:goal" => _goal}} = query) do
|
2021-08-19 11:03:41 +03:00
|
|
|
total_q = Query.remove_goal(query)
|
2020-12-23 12:18:45 +03:00
|
|
|
prev_query = Query.shift_back(query, site)
|
2021-08-19 11:03:41 +03:00
|
|
|
prev_total_query = Query.shift_back(total_q, site)
|
2021-07-23 13:44:05 +03:00
|
|
|
|
|
|
|
%{
|
2021-08-17 15:21:12 +03:00
|
|
|
"visitors" => %{"value" => unique_visitors}
|
2021-08-19 11:03:41 +03:00
|
|
|
} = Stats.aggregate(site, total_q, ["visitors"])
|
2021-07-23 13:44:05 +03:00
|
|
|
|
|
|
|
%{
|
2021-08-17 15:21:12 +03:00
|
|
|
"visitors" => %{"value" => prev_unique_visitors}
|
2021-08-19 11:03:41 +03:00
|
|
|
} = Stats.aggregate(site, prev_total_query, ["visitors"])
|
2021-07-23 13:44:05 +03:00
|
|
|
|
|
|
|
%{
|
2021-08-17 15:21:12 +03:00
|
|
|
"visitors" => %{"value" => converted_visitors},
|
|
|
|
"events" => %{"value" => completions}
|
2021-07-23 13:44:05 +03:00
|
|
|
} = Stats.aggregate(site, query, ["visitors", "events"])
|
|
|
|
|
|
|
|
%{
|
2021-08-17 15:21:12 +03:00
|
|
|
"visitors" => %{"value" => prev_converted_visitors},
|
|
|
|
"events" => %{"value" => prev_completions}
|
2021-07-23 13:44:05 +03:00
|
|
|
} = Stats.aggregate(site, prev_query, ["visitors", "events"])
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2020-10-30 12:26:16 +03:00
|
|
|
conversion_rate = calculate_cr(unique_visitors, converted_visitors)
|
|
|
|
prev_conversion_rate = calculate_cr(prev_unique_visitors, prev_converted_visitors)
|
2019-11-25 12:17:18 +03:00
|
|
|
|
2021-03-24 12:19:10 +03:00
|
|
|
stats = [
|
2020-06-08 10:35:13 +03:00
|
|
|
%{
|
2020-08-17 16:35:55 +03:00
|
|
|
name: "Unique visitors",
|
2021-07-23 13:44:05 +03:00
|
|
|
value: unique_visitors,
|
2020-08-17 16:35:55 +03:00
|
|
|
change: percent_change(prev_unique_visitors, unique_visitors)
|
2020-06-08 10:35:13 +03:00
|
|
|
},
|
|
|
|
%{
|
2020-08-20 14:57:49 +03:00
|
|
|
name: "Unique conversions",
|
2021-07-23 13:44:05 +03:00
|
|
|
value: converted_visitors,
|
2020-06-08 10:35:13 +03:00
|
|
|
change: percent_change(prev_converted_visitors, converted_visitors)
|
|
|
|
},
|
2020-08-20 14:57:49 +03:00
|
|
|
%{
|
|
|
|
name: "Total conversions",
|
2021-07-23 13:44:05 +03:00
|
|
|
value: completions,
|
2020-08-20 14:57:49 +03:00
|
|
|
change: percent_change(prev_completions, completions)
|
|
|
|
},
|
2020-06-08 10:35:13 +03:00
|
|
|
%{
|
|
|
|
name: "Conversion rate",
|
2021-07-23 13:44:05 +03:00
|
|
|
value: conversion_rate,
|
2020-06-08 10:35:13 +03:00
|
|
|
change: percent_change(prev_conversion_rate, conversion_rate)
|
|
|
|
}
|
2019-11-25 12:17:18 +03:00
|
|
|
]
|
2021-03-24 12:19:10 +03:00
|
|
|
|
2021-08-17 15:21:12 +03:00
|
|
|
{stats, 100}
|
2019-11-25 12:17:18 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
defp fetch_top_stats(site, query) do
|
2020-12-23 11:42:22 +03:00
|
|
|
prev_query = Query.shift_back(query, site)
|
2021-03-24 12:19:10 +03:00
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
metrics =
|
|
|
|
if query.filters["event:page"] do
|
2021-08-17 15:21:12 +03:00
|
|
|
["visitors", "pageviews", "bounce_rate", "time_on_page", "sample_percent"]
|
2021-07-23 13:44:05 +03:00
|
|
|
else
|
2021-08-17 15:21:12 +03:00
|
|
|
["visitors", "pageviews", "bounce_rate", "visit_duration", "sample_percent"]
|
2020-11-03 12:20:11 +03:00
|
|
|
end
|
2019-11-25 12:17:18 +03:00
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
current_results = Stats.aggregate(site, query, metrics)
|
|
|
|
prev_results = Stats.aggregate(site, prev_query, metrics)
|
2021-05-19 10:21:43 +03:00
|
|
|
|
2021-03-24 12:19:10 +03:00
|
|
|
stats =
|
|
|
|
[
|
2021-08-17 15:21:12 +03:00
|
|
|
top_stats_entry(current_results, prev_results, "Unique visitors", "visitors"),
|
|
|
|
top_stats_entry(current_results, prev_results, "Total pageviews", "pageviews"),
|
|
|
|
top_stats_entry(current_results, prev_results, "Bounce rate", "bounce_rate"),
|
|
|
|
top_stats_entry(current_results, prev_results, "Visit duration", "visit_duration"),
|
|
|
|
top_stats_entry(current_results, prev_results, "Time on page", "time_on_page")
|
2021-03-24 12:19:10 +03:00
|
|
|
]
|
|
|
|
|> Enum.filter(& &1)
|
|
|
|
|
2021-08-17 15:21:12 +03:00
|
|
|
{stats, current_results["sample_percent"]["value"]}
|
2021-07-23 13:44:05 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
defp top_stats_entry(current_results, prev_results, name, key) do
|
|
|
|
if current_results[key] do
|
|
|
|
%{
|
|
|
|
name: name,
|
2021-08-17 15:21:12 +03:00
|
|
|
value: current_results[key]["value"],
|
|
|
|
change: calculate_change(key, prev_results[key]["value"], current_results[key]["value"])
|
2021-07-23 13:44:05 +03:00
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-08-17 15:21:12 +03:00
|
|
|
defp calculate_change("bounce_rate", old_count, new_count) do
|
2021-07-23 13:44:05 +03:00
|
|
|
if old_count > 0, do: new_count - old_count
|
|
|
|
end
|
|
|
|
|
|
|
|
defp calculate_change(_metric, old_count, new_count) do
|
|
|
|
percent_change(old_count, new_count)
|
2019-11-25 12:17:18 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
defp percent_change(old_count, new_count) do
|
|
|
|
cond do
|
|
|
|
old_count == 0 and new_count > 0 ->
|
|
|
|
100
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-11-25 12:17:18 +03:00
|
|
|
old_count == 0 and new_count == 0 ->
|
|
|
|
0
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-11-25 12:17:18 +03:00
|
|
|
true ->
|
|
|
|
round((new_count - old_count) / old_count * 100)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-09-28 11:29:24 +03:00
|
|
|
def sources(conn, params) do
|
2019-11-19 07:30:42 +03:00
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
|
|
|
|
query =
|
|
|
|
Query.from(site.timezone, params)
|
|
|
|
|> Filters.add_prefix()
|
2021-08-04 10:50:23 +03:00
|
|
|
|> maybe_hide_noref("visit:source", params)
|
2021-07-23 13:44:05 +03:00
|
|
|
|
|
|
|
pagination = parse_pagination(params)
|
|
|
|
|
|
|
|
metrics =
|
|
|
|
if params["detailed"], do: ["visitors", "bounce_rate", "visit_duration"], else: ["visitors"]
|
|
|
|
|
|
|
|
res =
|
|
|
|
Stats.breakdown(site, query, "visit:source", metrics, pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "source", "visit:source")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{"source" => "name"})
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-12 16:18:35 +03:00
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
res
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
|
|
|
json(conn, res)
|
|
|
|
end
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
2020-09-28 11:29:24 +03:00
|
|
|
def utm_mediums(conn, params) do
|
2020-01-16 16:40:06 +03:00
|
|
|
site = conn.assigns[:site]
|
2021-08-04 10:50:23 +03:00
|
|
|
|
|
|
|
query =
|
|
|
|
Query.from(site.timezone, params)
|
|
|
|
|> Filters.add_prefix()
|
|
|
|
|> maybe_hide_noref("visit:utm_medium", params)
|
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
pagination = parse_pagination(params)
|
|
|
|
metrics = ["visitors", "bounce_rate", "visit_duration"]
|
|
|
|
|
|
|
|
res =
|
|
|
|
Stats.breakdown(site, query, "visit:utm_medium", metrics, pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "utm_medium", "visit:utm_medium")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{"utm_medium" => "name"})
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-12 16:18:35 +03:00
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
res
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
|
|
|
json(conn, res)
|
|
|
|
end
|
2020-09-28 11:29:24 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def utm_campaigns(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-08-04 10:50:23 +03:00
|
|
|
|
|
|
|
query =
|
|
|
|
Query.from(site.timezone, params)
|
|
|
|
|> Filters.add_prefix()
|
|
|
|
|> maybe_hide_noref("visit:utm_campaign", params)
|
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
pagination = parse_pagination(params)
|
|
|
|
metrics = ["visitors", "bounce_rate", "visit_duration"]
|
|
|
|
|
|
|
|
res =
|
|
|
|
Stats.breakdown(site, query, "visit:utm_campaign", metrics, pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "utm_campaign", "visit:utm_campaign")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{"utm_campaign" => "name"})
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-12 16:18:35 +03:00
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
res
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
|
|
|
json(conn, res)
|
|
|
|
end
|
2020-09-28 11:29:24 +03:00
|
|
|
end
|
2020-01-16 16:40:06 +03:00
|
|
|
|
2021-12-16 12:02:09 +03:00
|
|
|
def utm_contents(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
|
|
|
|
query =
|
|
|
|
Query.from(site.timezone, params)
|
|
|
|
|> Filters.add_prefix()
|
|
|
|
|> maybe_hide_noref("visit:utm_content", params)
|
|
|
|
|
|
|
|
pagination = parse_pagination(params)
|
|
|
|
metrics = ["visitors", "bounce_rate", "visit_duration"]
|
|
|
|
|
|
|
|
res =
|
|
|
|
Stats.breakdown(site, query, "visit:utm_content", metrics, pagination)
|
|
|
|
|> maybe_add_cr(site, query, pagination, "utm_content", "visit:utm_content")
|
2021-12-16 16:50:37 +03:00
|
|
|
|> transform_keys(%{"utm_content" => "name"})
|
2021-12-16 12:02:09 +03:00
|
|
|
|
2021-12-16 16:50:37 +03:00
|
|
|
if params["csv"] do
|
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
res
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
|
|
|
|
end
|
|
|
|
else
|
|
|
|
json(conn, res)
|
|
|
|
end
|
2021-12-16 12:02:09 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def utm_terms(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
|
|
|
|
query =
|
|
|
|
Query.from(site.timezone, params)
|
|
|
|
|> Filters.add_prefix()
|
|
|
|
|> maybe_hide_noref("visit:utm_term", params)
|
|
|
|
|
|
|
|
pagination = parse_pagination(params)
|
|
|
|
metrics = ["visitors", "bounce_rate", "visit_duration"]
|
|
|
|
|
|
|
|
res =
|
|
|
|
Stats.breakdown(site, query, "visit:utm_term", metrics, pagination)
|
|
|
|
|> maybe_add_cr(site, query, pagination, "utm_term", "visit:utm_term")
|
2021-12-16 16:50:37 +03:00
|
|
|
|> transform_keys(%{"utm_term" => "name"})
|
2021-12-16 12:02:09 +03:00
|
|
|
|
2021-12-16 16:50:37 +03:00
|
|
|
if params["csv"] do
|
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
res
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
|
|
|
|
end
|
|
|
|
else
|
|
|
|
json(conn, res)
|
|
|
|
end
|
2021-12-16 12:02:09 +03:00
|
|
|
end
|
|
|
|
|
2020-09-28 11:29:24 +03:00
|
|
|
def utm_sources(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-08-04 10:50:23 +03:00
|
|
|
|
|
|
|
query =
|
|
|
|
Query.from(site.timezone, params)
|
|
|
|
|> Filters.add_prefix()
|
|
|
|
|> maybe_hide_noref("visit:utm_source", params)
|
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
pagination = parse_pagination(params)
|
|
|
|
metrics = ["visitors", "bounce_rate", "visit_duration"]
|
|
|
|
|
|
|
|
res =
|
|
|
|
Stats.breakdown(site, query, "visit:utm_source", metrics, pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "utm_source", "visit:utm_source")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{"utm_source" => "name"})
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-12 16:18:35 +03:00
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
res
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
|
|
|
json(conn, res)
|
|
|
|
end
|
2020-01-16 16:40:06 +03:00
|
|
|
end
|
2019-11-20 11:42:45 +03:00
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
def referrer_drilldown(conn, %{"referrer" => "Google"} = params) do
|
|
|
|
site = conn.assigns[:site] |> Repo.preload(:google_auth)
|
2021-09-21 11:53:21 +03:00
|
|
|
|
|
|
|
query =
|
|
|
|
Query.from(site.timezone, params)
|
|
|
|
|> Query.put_filter("source", "Google")
|
|
|
|
|> Filters.add_prefix()
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2020-06-08 10:35:13 +03:00
|
|
|
search_terms =
|
|
|
|
if site.google_auth && site.google_auth.property && !query.filters["goal"] do
|
2021-01-07 16:16:04 +03:00
|
|
|
google_api().fetch_stats(site, query, params["limit"] || 9)
|
2020-06-08 10:35:13 +03:00
|
|
|
end
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2021-08-17 15:21:12 +03:00
|
|
|
%{"visitors" => %{"value" => total_visitors}} = Stats.aggregate(site, query, ["visitors"])
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
case search_terms do
|
|
|
|
nil ->
|
|
|
|
user_id = get_session(conn, :current_user_id)
|
2021-06-16 15:00:07 +03:00
|
|
|
is_admin = user_id && Plausible.Sites.has_admin_access?(user_id, site)
|
|
|
|
json(conn, %{not_configured: true, is_admin: is_admin, total_visitors: total_visitors})
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
{:ok, terms} ->
|
|
|
|
json(conn, %{search_terms: terms, total_visitors: total_visitors})
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
{:error, e} ->
|
|
|
|
put_status(conn, 500)
|
|
|
|
|> json(%{error: e})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def referrer_drilldown(conn, %{"referrer" => referrer} = params) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
query =
|
|
|
|
Query.from(site.timezone, params)
|
|
|
|
|> Query.put_filter("source", referrer)
|
|
|
|
|> Filters.add_prefix()
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
pagination = parse_pagination(params)
|
|
|
|
|
|
|
|
metrics =
|
|
|
|
if params["detailed"], do: ["visitors", "bounce_rate", "visit_duration"], else: ["visitors"]
|
|
|
|
|
|
|
|
referrers =
|
|
|
|
Stats.breakdown(site, query, "visit:referrer", metrics, pagination)
|
2021-09-29 14:28:29 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "referrer", "visit:referrer")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{"referrer" => "name"})
|
2020-01-16 16:40:06 +03:00
|
|
|
|
2021-08-17 15:21:12 +03:00
|
|
|
%{"visitors" => %{"value" => total_visitors}} = Stats.aggregate(site, query, ["visitors"])
|
2020-01-16 16:40:06 +03:00
|
|
|
json(conn, %{referrers: referrers, total_visitors: total_visitors})
|
|
|
|
end
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
def pages(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
|
|
|
|
|
|
|
metrics =
|
|
|
|
if params["detailed"],
|
|
|
|
do: ["visitors", "pageviews", "bounce_rate", "time_on_page"],
|
|
|
|
else: ["visitors"]
|
|
|
|
|
|
|
|
pagination = parse_pagination(params)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
pages =
|
|
|
|
Stats.breakdown(site, query, "event:page", metrics, pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "page", "event:page")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{"page" => "name"})
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-12 16:18:35 +03:00
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
pages
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
pages |> to_csv(["name", "visitors", "bounce_rate", "time_on_page"])
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
|
|
|
json(conn, pages)
|
|
|
|
end
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
2020-07-30 11:18:28 +03:00
|
|
|
def entry_pages(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
|
|
|
pagination = parse_pagination(params)
|
|
|
|
metrics = ["visitors", "visits", "visit_duration"]
|
|
|
|
|
|
|
|
entry_pages =
|
2021-08-16 11:58:36 +03:00
|
|
|
Stats.breakdown(site, query, "visit:entry_page", metrics, pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "entry_page", "visit:entry_page")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{
|
|
|
|
"entry_page" => "name",
|
|
|
|
"visitors" => "unique_entrances",
|
|
|
|
"visits" => "total_entrances"
|
|
|
|
})
|
2020-07-30 11:18:28 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-12 16:18:35 +03:00
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
entry_pages
|
|
|
|
|> transform_keys(%{"unique_entrances" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
entry_pages |> to_csv(["name", "unique_entrances", "total_entrances", "visit_duration"])
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
|
|
|
json(conn, entry_pages)
|
|
|
|
end
|
Adds entry and exit pages to Top Pages module (#712)
* Initial Pass
* Adds support for page visits counting by referrer
* Includes goal selection in entry and exit computation
* Adds goal-based entry and exit page stats, formatting, code cleanup
* Changelog
* Format
* Exit rate, visit duration, updated tests
* I keep forgetting to format :/
* Tests, last time
* Fixes double counting, exit rate >100%, relevant tests
* Fixes exit pages on filter and goal states
* Adds entry and exit filters, fixes various bugs
* Fixes discussed issues
* Format
* Fixes impossible case in tests
Originally, there were only 2 pageviews for `test-site.com`,`/` on `2019-01-01`, but that doesn't make sense when there were 3 sessions that exited on the same site/date.
* Format
* Removes boolean function parameter in favor of separate function
* Adds support for queries that use `page` filter as `entry-page`
* Format
* Makes loader/title interaction in sources report consistent
2021-02-26 12:02:37 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def exit_pages(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
|
|
|
{limit, page} = parse_pagination(params)
|
|
|
|
metrics = ["visitors", "visits"]
|
|
|
|
|
|
|
|
exit_pages =
|
|
|
|
Stats.breakdown(site, query, "visit:exit_page", metrics, {limit, page})
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, {limit, page}, "exit_page", "visit:exit_page")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{
|
|
|
|
"exit_page" => "name",
|
|
|
|
"visitors" => "unique_exits",
|
|
|
|
"visits" => "total_exits"
|
|
|
|
})
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-08-18 14:49:39 +03:00
|
|
|
pages = Enum.map(exit_pages, & &1["name"])
|
|
|
|
|
|
|
|
total_visits_query =
|
|
|
|
Query.put_filter(query, "event:page", {:member, pages})
|
|
|
|
|> Query.put_filter("event:goal", nil)
|
|
|
|
|> Query.put_filter("event:name", {:is, "pageview"})
|
|
|
|
|> Query.put_filter("visit:goal", query.filters["event:goal"])
|
2021-08-19 10:32:03 +03:00
|
|
|
|> Query.put_filter("visit:page", query.filters["event:page"])
|
2021-07-23 13:44:05 +03:00
|
|
|
|
|
|
|
total_pageviews =
|
|
|
|
Stats.breakdown(site, total_visits_query, "event:page", ["pageviews"], {limit, 1})
|
|
|
|
|
|
|
|
exit_pages =
|
|
|
|
Enum.map(exit_pages, fn exit_page ->
|
|
|
|
exit_rate =
|
|
|
|
case Enum.find(total_pageviews, &(&1["page"] == exit_page["name"])) do
|
|
|
|
%{"pageviews" => pageviews} ->
|
2021-11-04 15:20:39 +03:00
|
|
|
Float.floor(exit_page["total_exits"] / pageviews * 100)
|
2021-07-23 13:44:05 +03:00
|
|
|
|
|
|
|
nil ->
|
|
|
|
nil
|
|
|
|
end
|
Adds entry and exit pages to Top Pages module (#712)
* Initial Pass
* Adds support for page visits counting by referrer
* Includes goal selection in entry and exit computation
* Adds goal-based entry and exit page stats, formatting, code cleanup
* Changelog
* Format
* Exit rate, visit duration, updated tests
* I keep forgetting to format :/
* Tests, last time
* Fixes double counting, exit rate >100%, relevant tests
* Fixes exit pages on filter and goal states
* Adds entry and exit filters, fixes various bugs
* Fixes discussed issues
* Format
* Fixes impossible case in tests
Originally, there were only 2 pageviews for `test-site.com`,`/` on `2019-01-01`, but that doesn't make sense when there were 3 sessions that exited on the same site/date.
* Format
* Removes boolean function parameter in favor of separate function
* Adds support for queries that use `page` filter as `entry-page`
* Format
* Makes loader/title interaction in sources report consistent
2021-02-26 12:02:37 +03:00
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
Map.put(exit_page, "exit_rate", exit_rate)
|
|
|
|
end)
|
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-12 16:18:35 +03:00
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
exit_pages
|
|
|
|
|> transform_keys(%{"unique_exits" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
exit_pages |> to_csv(["name", "unique_exits", "total_exits", "exit_rate"])
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
|
|
|
json(conn, exit_pages)
|
|
|
|
end
|
2020-07-30 11:18:28 +03:00
|
|
|
end
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
def countries(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
2021-10-14 11:55:43 +03:00
|
|
|
pagination = parse_pagination(params)
|
2021-07-23 13:44:05 +03:00
|
|
|
|
|
|
|
countries =
|
2021-10-14 11:55:43 +03:00
|
|
|
Stats.breakdown(site, query, "visit:country", ["visitors"], pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, {300, 1}, "country", "visit:country")
|
2021-11-25 13:00:17 +03:00
|
|
|
|> transform_keys(%{"country" => "code"})
|
2021-11-12 16:18:35 +03:00
|
|
|
|> maybe_add_percentages(query)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-12 16:18:35 +03:00
|
|
|
countries =
|
|
|
|
countries
|
|
|
|
|> Enum.map(fn country ->
|
2022-01-18 19:41:15 +03:00
|
|
|
country_info = get_country(country["code"])
|
2021-12-09 15:21:26 +03:00
|
|
|
Map.put(country, "name", country_info.name)
|
2021-11-12 16:18:35 +03:00
|
|
|
end)
|
|
|
|
|
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
countries
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
countries |> to_csv(["name", "visitors"])
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
2021-11-04 15:20:39 +03:00
|
|
|
countries =
|
2021-12-01 16:31:50 +03:00
|
|
|
Enum.map(countries, fn row ->
|
2022-01-18 19:41:15 +03:00
|
|
|
country = get_country(row["code"])
|
|
|
|
|
|
|
|
Map.merge(row, %{
|
|
|
|
"name" => country.name,
|
|
|
|
"flag" => country.flag,
|
|
|
|
"alpha_3" => country.alpha_3,
|
|
|
|
"code" => country.alpha_2
|
|
|
|
})
|
2021-11-04 15:20:39 +03:00
|
|
|
end)
|
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
json(conn, countries)
|
|
|
|
end
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
2021-11-23 12:39:09 +03:00
|
|
|
def regions(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
|
|
|
pagination = parse_pagination(params)
|
|
|
|
|
2022-01-04 12:08:06 +03:00
|
|
|
regions =
|
2021-11-23 12:39:09 +03:00
|
|
|
Stats.breakdown(site, query, "visit:region", ["visitors"], pagination)
|
|
|
|
|> transform_keys(%{"region" => "code"})
|
|
|
|
|> Enum.map(fn region ->
|
2021-12-09 15:21:26 +03:00
|
|
|
region_entry = Location.get_subdivision(region["code"])
|
2022-01-18 19:03:50 +03:00
|
|
|
|
2022-01-18 20:13:35 +03:00
|
|
|
if region_entry do
|
2022-01-18 19:41:15 +03:00
|
|
|
country_entry = get_country(region_entry.country_code)
|
2022-01-18 19:03:50 +03:00
|
|
|
Map.merge(region, %{"name" => region_entry.name, "country_flag" => country_entry.flag})
|
|
|
|
else
|
|
|
|
Sentry.capture_message("Could not find region info", extra: %{code: region["code"]})
|
|
|
|
Map.merge(region, %{"name" => region["code"]})
|
|
|
|
end
|
2021-11-23 12:39:09 +03:00
|
|
|
end)
|
|
|
|
|
2022-01-04 12:08:06 +03:00
|
|
|
if params["csv"] do
|
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
regions
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
regions |> to_csv(["name", "visitors"])
|
|
|
|
end
|
|
|
|
else
|
|
|
|
json(conn, regions)
|
|
|
|
end
|
2021-11-23 12:39:09 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def cities(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
|
|
|
pagination = parse_pagination(params)
|
|
|
|
|
|
|
|
cities =
|
|
|
|
Stats.breakdown(site, query, "visit:city", ["visitors"], pagination)
|
|
|
|
|> transform_keys(%{"city" => "code"})
|
|
|
|
|> Enum.map(fn city ->
|
2021-12-09 15:21:26 +03:00
|
|
|
city_info = Location.get_city(city["code"])
|
|
|
|
|
|
|
|
if city_info do
|
2022-01-18 19:41:15 +03:00
|
|
|
country_info = get_country(city_info.country_code)
|
2021-12-09 15:21:26 +03:00
|
|
|
|
|
|
|
Map.merge(city, %{
|
|
|
|
"name" => city_info.name,
|
|
|
|
"country_flag" => country_info.flag
|
|
|
|
})
|
|
|
|
else
|
2022-01-18 19:03:50 +03:00
|
|
|
Sentry.capture_message("Could not find city info", extra: %{code: city["code"]})
|
|
|
|
|
2021-12-09 15:21:26 +03:00
|
|
|
Map.merge(city, %{"name" => "N/A"})
|
|
|
|
end
|
2021-11-23 12:39:09 +03:00
|
|
|
end)
|
|
|
|
|
2022-01-04 12:08:06 +03:00
|
|
|
if params["csv"] do
|
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
cities
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
cities |> to_csv(["name", "visitors"])
|
|
|
|
end
|
|
|
|
else
|
|
|
|
json(conn, cities)
|
|
|
|
end
|
2021-11-23 12:39:09 +03:00
|
|
|
end
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
def browsers(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
|
|
|
pagination = parse_pagination(params)
|
|
|
|
|
|
|
|
browsers =
|
|
|
|
Stats.breakdown(site, query, "visit:browser", ["visitors"], pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "browser", "visit:browser")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{"browser" => "name"})
|
2021-11-12 16:18:35 +03:00
|
|
|
|> maybe_add_percentages(query)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-12 16:18:35 +03:00
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
browsers
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
browsers |> to_csv(["name", "visitors"])
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
|
|
|
json(conn, browsers)
|
|
|
|
end
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
2020-11-10 16:18:59 +03:00
|
|
|
def browser_versions(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
|
|
|
pagination = parse_pagination(params)
|
|
|
|
|
|
|
|
versions =
|
|
|
|
Stats.breakdown(site, query, "visit:browser_version", ["visitors"], pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "browser_version", "visit:browser_version")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{"browser_version" => "name"})
|
2021-11-12 16:18:35 +03:00
|
|
|
|> maybe_add_percentages(query)
|
2020-11-10 16:18:59 +03:00
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
json(conn, versions)
|
2020-11-10 16:18:59 +03:00
|
|
|
end
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
def operating_systems(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
|
|
|
pagination = parse_pagination(params)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
systems =
|
|
|
|
Stats.breakdown(site, query, "visit:os", ["visitors"], pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "os", "visit:os")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{"os" => "name"})
|
2021-11-12 16:18:35 +03:00
|
|
|
|> maybe_add_percentages(query)
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-12 16:18:35 +03:00
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
systems
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
systems |> to_csv(["name", "visitors"])
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
|
|
|
json(conn, systems)
|
|
|
|
end
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
2020-11-10 16:18:59 +03:00
|
|
|
def operating_system_versions(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
|
|
|
pagination = parse_pagination(params)
|
|
|
|
|
|
|
|
versions =
|
|
|
|
Stats.breakdown(site, query, "visit:os_version", ["visitors"], pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "os_version", "visit:os_version")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{"os_version" => "name"})
|
2021-11-12 16:18:35 +03:00
|
|
|
|> maybe_add_percentages(query)
|
2020-11-10 16:18:59 +03:00
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
json(conn, versions)
|
2020-11-10 16:18:59 +03:00
|
|
|
end
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
def screen_sizes(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
|
|
|
pagination = parse_pagination(params)
|
|
|
|
|
|
|
|
sizes =
|
|
|
|
Stats.breakdown(site, query, "visit:device", ["visitors"], pagination)
|
2021-09-20 17:17:11 +03:00
|
|
|
|> maybe_add_cr(site, query, pagination, "device", "visit:device")
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{"device" => "name"})
|
2021-11-12 16:18:35 +03:00
|
|
|
|> maybe_add_percentages(query)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-12 16:18:35 +03:00
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
sizes
|
|
|
|
|> transform_keys(%{"visitors" => "conversions"})
|
|
|
|
|> to_csv(["name", "conversions", "conversion_rate"])
|
|
|
|
else
|
|
|
|
sizes |> to_csv(["name", "visitors"])
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
|
|
|
json(conn, sizes)
|
|
|
|
end
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
2021-11-15 18:13:37 +03:00
|
|
|
defp calculate_cr(nil, _converted_visitors), do: 100.0
|
2021-11-15 18:06:39 +03:00
|
|
|
|
2020-10-30 12:26:16 +03:00
|
|
|
defp calculate_cr(unique_visitors, converted_visitors) do
|
|
|
|
if unique_visitors > 0,
|
|
|
|
do: Float.round(converted_visitors / unique_visitors * 100, 1),
|
2020-11-03 12:20:11 +03:00
|
|
|
else: 0.0
|
2020-10-30 12:26:16 +03:00
|
|
|
end
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
def conversions(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
2021-08-19 15:41:08 +03:00
|
|
|
|
|
|
|
query =
|
|
|
|
if query.period == "realtime" do
|
|
|
|
%Query{query | period: "30m"}
|
|
|
|
else
|
|
|
|
query
|
|
|
|
end
|
|
|
|
|
2021-08-19 11:03:41 +03:00
|
|
|
total_q = Query.remove_goal(query)
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-08-19 11:03:41 +03:00
|
|
|
%{"visitors" => %{"value" => total_visitors}} = Stats.aggregate(site, total_q, ["visitors"])
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-09-24 12:03:20 +03:00
|
|
|
prop_names =
|
|
|
|
if query.filters["event:goal"] do
|
|
|
|
Stats.props(site, query)
|
|
|
|
else
|
|
|
|
%{}
|
|
|
|
end
|
2020-11-03 12:20:11 +03:00
|
|
|
|
|
|
|
conversions =
|
2021-08-23 11:28:19 +03:00
|
|
|
Stats.breakdown(site, query, "event:goal", ["visitors", "events"], {100, 1})
|
2021-11-04 15:20:39 +03:00
|
|
|
|> transform_keys(%{
|
|
|
|
"goal" => "name",
|
2021-11-05 16:18:59 +03:00
|
|
|
"visitors" => "unique_conversions",
|
2021-11-04 15:20:39 +03:00
|
|
|
"events" => "total_conversions"
|
|
|
|
})
|
2020-11-03 12:20:11 +03:00
|
|
|
|> Enum.map(fn goal ->
|
|
|
|
goal
|
2021-07-23 13:44:05 +03:00
|
|
|
|> Map.put(:prop_names, prop_names[goal["name"]])
|
2021-11-12 16:18:35 +03:00
|
|
|
|> Map.put("conversion_rate", calculate_cr(total_visitors, goal["unique_conversions"]))
|
2020-11-03 12:20:11 +03:00
|
|
|
end)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
if params["csv"] do
|
2021-11-04 15:20:39 +03:00
|
|
|
conversions |> to_csv(["name", "unique_conversions", "total_conversions"])
|
2021-10-26 16:54:50 +03:00
|
|
|
else
|
|
|
|
json(conn, conversions)
|
|
|
|
end
|
2020-10-28 12:09:04 +03:00
|
|
|
end
|
|
|
|
|
2020-10-30 11:49:41 +03:00
|
|
|
def prop_breakdown(conn, params) do
|
2020-10-28 12:09:04 +03:00
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
2021-10-14 10:45:47 +03:00
|
|
|
pagination = parse_pagination(params)
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-08-19 11:03:41 +03:00
|
|
|
total_q = Query.remove_goal(query)
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-08-19 11:03:41 +03:00
|
|
|
%{"visitors" => %{"value" => unique_visitors}} = Stats.aggregate(site, total_q, ["visitors"])
|
2021-07-23 13:44:05 +03:00
|
|
|
|
|
|
|
prop_name = "event:props:" <> params["prop_name"]
|
2020-11-03 12:20:11 +03:00
|
|
|
|
|
|
|
props =
|
2021-10-14 10:45:47 +03:00
|
|
|
Stats.breakdown(site, query, prop_name, ["visitors", "events"], pagination)
|
2021-07-23 13:44:05 +03:00
|
|
|
|> transform_keys(%{
|
|
|
|
params["prop_name"] => "name",
|
2021-11-10 17:53:38 +03:00
|
|
|
"events" => "total_conversions",
|
|
|
|
"visitors" => "unique_conversions"
|
2021-07-23 13:44:05 +03:00
|
|
|
})
|
2020-11-03 12:20:11 +03:00
|
|
|
|> Enum.map(fn prop ->
|
2021-11-10 17:53:38 +03:00
|
|
|
Map.put(
|
|
|
|
prop,
|
|
|
|
"conversion_rate",
|
|
|
|
calculate_cr(unique_visitors, prop["unique_conversions"])
|
|
|
|
)
|
|
|
|
end)
|
|
|
|
|
|
|
|
if params["csv"] do
|
|
|
|
props
|
|
|
|
else
|
|
|
|
json(conn, props)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def all_props_breakdown(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
|
|
|
|
|
|
|
headers = ["prop", "name", "unique_conversions", "total_conversions"]
|
|
|
|
|
|
|
|
prop_names =
|
|
|
|
if query.filters["event:goal"] do
|
|
|
|
{_, _, goal} = query.filters["event:goal"]
|
|
|
|
|
|
|
|
Stats.props(site, query)
|
|
|
|
|> Map.get(goal, [])
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
|
|
|
values =
|
|
|
|
prop_names
|
|
|
|
|> Enum.map(fn prop ->
|
|
|
|
prop_breakdown(conn, Map.put(params, "prop_name", prop))
|
|
|
|
|> Enum.map(&Map.put(&1, "prop", prop))
|
2020-11-03 12:20:11 +03:00
|
|
|
end)
|
2021-11-10 17:53:38 +03:00
|
|
|
|> Enum.concat()
|
2020-10-28 12:09:04 +03:00
|
|
|
|
2021-11-10 17:53:38 +03:00
|
|
|
to_csv(values, headers)
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def current_visitors(conn, _) do
|
2020-07-30 11:18:28 +03:00
|
|
|
site = conn.assigns[:site]
|
2021-07-23 13:44:05 +03:00
|
|
|
json(conn, Stats.current_visitors(site))
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
2021-01-07 16:16:04 +03:00
|
|
|
|
|
|
|
defp google_api(), do: Application.fetch_env!(:plausible, :google_api)
|
2021-03-25 12:55:15 +03:00
|
|
|
|
|
|
|
def handle_errors(conn, %{kind: kind, reason: reason}) do
|
|
|
|
json(conn, %{error: Exception.format_banner(kind, reason)})
|
|
|
|
end
|
2021-06-21 14:42:16 +03:00
|
|
|
|
|
|
|
def filter_suggestions(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2021-08-18 12:33:08 +03:00
|
|
|
query = Query.from(site.timezone, params) |> Filters.add_prefix()
|
2021-06-21 14:42:16 +03:00
|
|
|
|
2021-07-23 13:44:05 +03:00
|
|
|
json(conn, Stats.filter_suggestions(site, query, params["filter_name"], params["q"]))
|
|
|
|
end
|
|
|
|
|
|
|
|
defp transform_keys(results, keys_to_replace) do
|
|
|
|
Enum.map(results, fn map ->
|
|
|
|
Enum.map(map, fn {key, val} ->
|
|
|
|
{Map.get(keys_to_replace, key, key), val}
|
|
|
|
end)
|
|
|
|
|> Enum.into(%{})
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp parse_pagination(params) do
|
|
|
|
limit = if params["limit"], do: String.to_integer(params["limit"]), else: 9
|
|
|
|
page = if params["page"], do: String.to_integer(params["page"]), else: 1
|
|
|
|
{limit, page}
|
|
|
|
end
|
|
|
|
|
2021-11-12 16:18:35 +03:00
|
|
|
defp maybe_add_percentages(stat_list, query) do
|
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
stat_list
|
|
|
|
else
|
|
|
|
total = Enum.reduce(stat_list, 0, fn %{"visitors" => count}, total -> total + count end)
|
2021-07-23 13:44:05 +03:00
|
|
|
|
2021-11-12 16:18:35 +03:00
|
|
|
Enum.map(stat_list, fn stat ->
|
|
|
|
Map.put(stat, "percentage", round(stat["visitors"] / total * 100))
|
|
|
|
end)
|
|
|
|
end
|
2021-07-23 13:44:05 +03:00
|
|
|
end
|
|
|
|
|
2021-08-04 10:50:23 +03:00
|
|
|
defp maybe_hide_noref(query, property, params) do
|
2021-07-23 13:44:05 +03:00
|
|
|
cond do
|
2021-08-04 10:50:23 +03:00
|
|
|
is_nil(query.filters[property]) and params["show_noref"] != "true" ->
|
|
|
|
new_filters = Map.put(query.filters, property, {:is_not, "Direct / None"})
|
2021-07-23 13:44:05 +03:00
|
|
|
%Query{query | filters: new_filters}
|
|
|
|
|
|
|
|
true ->
|
|
|
|
query
|
|
|
|
end
|
2021-06-21 14:42:16 +03:00
|
|
|
end
|
2021-09-20 17:17:11 +03:00
|
|
|
|
|
|
|
defp add_cr(list, list_without_goals, key_name) do
|
|
|
|
Enum.map(list, fn item ->
|
|
|
|
without_goal = Enum.find(list_without_goals, fn s -> s[key_name] === item[key_name] end)
|
|
|
|
|
|
|
|
item
|
2021-09-29 14:28:29 +03:00
|
|
|
|> Map.put(:total_visitors, without_goal["visitors"])
|
2021-11-12 16:18:35 +03:00
|
|
|
|> Map.put("conversion_rate", calculate_cr(without_goal["visitors"], item["visitors"]))
|
2021-09-20 17:17:11 +03:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2021-11-09 12:51:32 +03:00
|
|
|
defp maybe_add_cr([], _site, _query, _pagination, _key_name, _filter_name), do: []
|
|
|
|
|
2021-09-20 17:17:11 +03:00
|
|
|
defp maybe_add_cr(list, site, query, pagination, key_name, filter_name) do
|
|
|
|
if Map.has_key?(query.filters, "event:goal") do
|
|
|
|
items = Enum.map(list, fn item -> item[key_name] end)
|
|
|
|
|
|
|
|
query_without_goal =
|
|
|
|
query
|
|
|
|
|> Query.put_filter(filter_name, {:member, items})
|
|
|
|
|> Query.remove_goal()
|
|
|
|
|
|
|
|
res_without_goal =
|
|
|
|
Stats.breakdown(site, query_without_goal, filter_name, ["visitors"], pagination)
|
|
|
|
|
|
|
|
list
|
|
|
|
|> add_cr(res_without_goal, key_name)
|
|
|
|
else
|
|
|
|
list
|
|
|
|
end
|
|
|
|
end
|
2021-10-26 16:54:50 +03:00
|
|
|
|
|
|
|
defp to_csv(list, headers) do
|
|
|
|
list
|
|
|
|
|> Enum.map(fn row -> Enum.map(headers, &row[&1]) end)
|
|
|
|
|> (fn res -> [headers | res] end).()
|
|
|
|
|> CSV.encode()
|
|
|
|
|> Enum.join()
|
|
|
|
end
|
2022-01-18 19:41:15 +03:00
|
|
|
|
|
|
|
defp get_country(code) do
|
|
|
|
case Location.get_country(code) do
|
|
|
|
nil ->
|
|
|
|
Sentry.capture_message("Could not find country info", extra: %{code: code})
|
|
|
|
|
|
|
|
%Location.Country{
|
|
|
|
alpha_2: code,
|
|
|
|
alpha_3: "N/A",
|
|
|
|
name: code,
|
|
|
|
flag: nil
|
|
|
|
}
|
|
|
|
|
|
|
|
country ->
|
|
|
|
country
|
|
|
|
end
|
|
|
|
end
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|