2019-11-19 07:30:42 +03:00
|
|
|
defmodule PlausibleWeb.Api.StatsController do
|
|
|
|
use PlausibleWeb, :controller
|
|
|
|
use Plausible.Repo
|
2020-05-21 18:59:07 +03:00
|
|
|
alias Plausible.Stats.Clickhouse, as: Stats
|
2019-11-25 12:17:18 +03:00
|
|
|
alias Plausible.Stats.Query
|
2020-04-14 14:04:35 +03:00
|
|
|
plug PlausibleWeb.AuthorizeStatsPlug
|
2019-11-19 07:30:42 +03:00
|
|
|
|
|
|
|
def main_graph(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
|
|
|
plot_task = Task.async(fn -> Stats.calculate_plot(site, query) end)
|
2019-11-25 12:17:18 +03:00
|
|
|
top_stats = fetch_top_stats(site, query)
|
2020-07-21 10:30:03 +03:00
|
|
|
{plot, labels, present_index} = Task.await(plot_task)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
|
|
|
json(conn, %{
|
|
|
|
plot: plot,
|
|
|
|
labels: labels,
|
|
|
|
present_index: present_index,
|
2019-11-25 12:17:18 +03:00
|
|
|
top_stats: top_stats,
|
2020-06-08 10:35:13 +03:00
|
|
|
interval: query.step_type
|
2019-11-19 07:30:42 +03:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2020-07-14 16:52:26 +03:00
|
|
|
defp fetch_top_stats(site, %Query{period: "realtime"} = query) do
|
|
|
|
[
|
|
|
|
%{
|
2020-08-18 14:14:56 +03:00
|
|
|
name: "Current visitors",
|
2020-07-30 11:18:28 +03:00
|
|
|
count: Stats.current_visitors(site, query)
|
2020-07-14 16:52:26 +03:00
|
|
|
},
|
|
|
|
%{
|
|
|
|
name: "Pageviews (last 30 min)",
|
2020-07-21 09:58:00 +03:00
|
|
|
count: Stats.total_pageviews(site, query)
|
2020-07-14 16:52:26 +03:00
|
|
|
}
|
|
|
|
]
|
|
|
|
end
|
|
|
|
|
2019-11-25 12:17:18 +03:00
|
|
|
defp fetch_top_stats(site, %Query{filters: %{"goal" => goal}} = query) when is_binary(goal) do
|
2020-10-30 11:49:41 +03:00
|
|
|
total_filter = Map.merge(query.filters, %{"goal" => nil, "props" => nil})
|
2019-11-25 12:17:18 +03:00
|
|
|
prev_query = Query.shift_back(query)
|
2020-10-20 11:24:20 +03:00
|
|
|
unique_visitors = Stats.unique_visitors(site, %{query | filters: total_filter})
|
|
|
|
prev_unique_visitors = Stats.unique_visitors(site, %{prev_query | filters: total_filter})
|
2019-11-25 12:17:18 +03:00
|
|
|
converted_visitors = Stats.unique_visitors(site, query)
|
|
|
|
prev_converted_visitors = Stats.unique_visitors(site, prev_query)
|
2020-08-20 14:57:49 +03:00
|
|
|
completions = Stats.total_events(site, query)
|
|
|
|
prev_completions = Stats.total_events(site, prev_query)
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2020-10-30 12:26:16 +03:00
|
|
|
conversion_rate = calculate_cr(unique_visitors, converted_visitors)
|
|
|
|
prev_conversion_rate = calculate_cr(prev_unique_visitors, prev_converted_visitors)
|
2019-11-25 12:17:18 +03:00
|
|
|
|
|
|
|
[
|
2020-06-08 10:35:13 +03:00
|
|
|
%{
|
2020-08-17 16:35:55 +03:00
|
|
|
name: "Unique visitors",
|
|
|
|
count: unique_visitors,
|
|
|
|
change: percent_change(prev_unique_visitors, unique_visitors)
|
2020-06-08 10:35:13 +03:00
|
|
|
},
|
|
|
|
%{
|
2020-08-20 14:57:49 +03:00
|
|
|
name: "Unique conversions",
|
2020-06-08 10:35:13 +03:00
|
|
|
count: converted_visitors,
|
|
|
|
change: percent_change(prev_converted_visitors, converted_visitors)
|
|
|
|
},
|
2020-08-20 14:57:49 +03:00
|
|
|
%{
|
|
|
|
name: "Total conversions",
|
|
|
|
count: completions,
|
|
|
|
change: percent_change(prev_completions, completions)
|
|
|
|
},
|
2020-06-08 10:35:13 +03:00
|
|
|
%{
|
|
|
|
name: "Conversion rate",
|
|
|
|
percentage: conversion_rate,
|
|
|
|
change: percent_change(prev_conversion_rate, conversion_rate)
|
|
|
|
}
|
2019-11-25 12:17:18 +03:00
|
|
|
]
|
|
|
|
end
|
|
|
|
|
|
|
|
defp fetch_top_stats(site, query) do
|
2019-12-20 06:22:37 +03:00
|
|
|
prev_query = Query.shift_back(query)
|
2019-11-25 12:17:18 +03:00
|
|
|
{pageviews, visitors} = Stats.pageviews_and_visitors(site, query)
|
2019-12-20 06:22:37 +03:00
|
|
|
{prev_pageviews, prev_visitors} = Stats.pageviews_and_visitors(site, prev_query)
|
|
|
|
bounce_rate = Stats.bounce_rate(site, query)
|
|
|
|
prev_bounce_rate = Stats.bounce_rate(site, prev_query)
|
|
|
|
change_bounce_rate = if prev_bounce_rate > 0, do: bounce_rate - prev_bounce_rate
|
2020-08-10 16:16:12 +03:00
|
|
|
visit_duration = if !query.filters["page"] do
|
|
|
|
duration = Stats.visit_duration(site, query)
|
|
|
|
prev_duration = Stats.visit_duration(site, prev_query)
|
|
|
|
|
|
|
|
%{
|
|
|
|
name: "Visit duration",
|
|
|
|
count: duration,
|
|
|
|
change: percent_change(prev_duration, duration)
|
|
|
|
}
|
|
|
|
end
|
2019-11-25 12:17:18 +03:00
|
|
|
|
|
|
|
[
|
2020-06-08 10:35:13 +03:00
|
|
|
%{
|
|
|
|
name: "Unique visitors",
|
|
|
|
count: visitors,
|
|
|
|
change: percent_change(prev_visitors, visitors)
|
|
|
|
},
|
|
|
|
%{
|
|
|
|
name: "Total pageviews",
|
|
|
|
count: pageviews,
|
|
|
|
change: percent_change(prev_pageviews, pageviews)
|
|
|
|
},
|
2020-07-17 11:09:54 +03:00
|
|
|
%{name: "Bounce rate", percentage: bounce_rate, change: change_bounce_rate},
|
2020-08-10 16:16:12 +03:00
|
|
|
visit_duration
|
|
|
|
] |> Enum.filter(&(&1))
|
2019-11-25 12:17:18 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
defp percent_change(old_count, new_count) do
|
|
|
|
cond do
|
|
|
|
old_count == 0 and new_count > 0 ->
|
|
|
|
100
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-11-25 12:17:18 +03:00
|
|
|
old_count == 0 and new_count == 0 ->
|
|
|
|
0
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-11-25 12:17:18 +03:00
|
|
|
true ->
|
|
|
|
round((new_count - old_count) / old_count * 100)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-09-28 11:29:24 +03:00
|
|
|
def sources(conn, params) do
|
2019-11-19 07:30:42 +03:00
|
|
|
site = conn.assigns[:site]
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2020-01-06 16:51:43 +03:00
|
|
|
include = if params["include"], do: String.split(params["include"], ","), else: []
|
2020-05-07 14:28:41 +03:00
|
|
|
limit = if params["limit"], do: String.to_integer(params["limit"])
|
2020-09-10 10:42:43 +03:00
|
|
|
page = if params["page"], do: String.to_integer(params["page"])
|
2020-08-05 14:45:20 +03:00
|
|
|
show_noref = params["show_noref"] == "true"
|
2020-09-28 11:29:24 +03:00
|
|
|
json(conn, Stats.top_sources(site, query, limit || 9, page || 1, show_noref, include))
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
2020-09-28 11:29:24 +03:00
|
|
|
def utm_mediums(conn, params) do
|
2020-01-16 16:40:06 +03:00
|
|
|
site = conn.assigns[:site]
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2020-09-10 10:42:43 +03:00
|
|
|
limit = if params["limit"], do: String.to_integer(params["limit"])
|
|
|
|
page = if params["page"], do: String.to_integer(params["page"])
|
2020-09-28 11:29:24 +03:00
|
|
|
show_noref = params["show_noref"] == "true"
|
|
|
|
json(conn, Stats.utm_mediums(site, query, limit || 9, page || 1, show_noref))
|
|
|
|
end
|
|
|
|
|
|
|
|
def utm_campaigns(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
query = Query.from(site.timezone, params)
|
|
|
|
limit = if params["limit"], do: String.to_integer(params["limit"])
|
|
|
|
page = if params["page"], do: String.to_integer(params["page"])
|
|
|
|
show_noref = params["show_noref"] == "true"
|
|
|
|
json(conn, Stats.utm_campaigns(site, query, limit || 9, page || 1, show_noref))
|
|
|
|
end
|
2020-01-16 16:40:06 +03:00
|
|
|
|
2020-09-28 11:29:24 +03:00
|
|
|
def utm_sources(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
query = Query.from(site.timezone, params)
|
|
|
|
limit = if params["limit"], do: String.to_integer(params["limit"])
|
|
|
|
page = if params["page"], do: String.to_integer(params["page"])
|
|
|
|
show_noref = params["show_noref"] == "true"
|
|
|
|
json(conn, Stats.utm_sources(site, query, limit || 9, page || 1, show_noref))
|
2020-01-16 16:40:06 +03:00
|
|
|
end
|
2019-11-20 11:42:45 +03:00
|
|
|
|
|
|
|
@google_api Application.fetch_env!(:plausible, :google_api)
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
def referrer_drilldown(conn, %{"referrer" => "Google"} = params) do
|
|
|
|
site = conn.assigns[:site] |> Repo.preload(:google_auth)
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2020-06-08 10:35:13 +03:00
|
|
|
search_terms =
|
|
|
|
if site.google_auth && site.google_auth.property && !query.filters["goal"] do
|
2020-08-10 16:16:12 +03:00
|
|
|
@google_api.fetch_stats(site, query, params["limit"] || 9)
|
2020-06-08 10:35:13 +03:00
|
|
|
end
|
2019-11-19 07:30:42 +03:00
|
|
|
|
|
|
|
case search_terms do
|
|
|
|
nil ->
|
2020-08-05 14:45:20 +03:00
|
|
|
{_, total_visitors} = Stats.pageviews_and_visitors(site, query)
|
2019-11-19 07:30:42 +03:00
|
|
|
user_id = get_session(conn, :current_user_id)
|
|
|
|
is_owner = user_id && Plausible.Sites.is_owner?(user_id, site)
|
|
|
|
json(conn, %{not_configured: true, is_owner: is_owner, total_visitors: total_visitors})
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
{:ok, terms} ->
|
2020-08-05 14:45:20 +03:00
|
|
|
{_, total_visitors} = Stats.pageviews_and_visitors(site, query)
|
2019-11-19 07:30:42 +03:00
|
|
|
json(conn, %{search_terms: terms, total_visitors: total_visitors})
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
{:error, e} ->
|
|
|
|
put_status(conn, 500)
|
|
|
|
|> json(%{error: e})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def referrer_drilldown(conn, %{"referrer" => referrer} = params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2020-01-06 16:51:43 +03:00
|
|
|
include = if params["include"], do: String.split(params["include"], ","), else: []
|
2020-07-30 11:18:28 +03:00
|
|
|
limit = params["limit"] || 9
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2020-07-30 11:18:28 +03:00
|
|
|
referrers = Stats.referrer_drilldown(site, query, referrer, include, limit)
|
2020-08-05 14:45:20 +03:00
|
|
|
{_, total_visitors} = Stats.pageviews_and_visitors(site, query)
|
2019-11-19 07:30:42 +03:00
|
|
|
json(conn, %{referrers: referrers, total_visitors: total_visitors})
|
|
|
|
end
|
|
|
|
|
2020-01-16 16:40:06 +03:00
|
|
|
def referrer_drilldown_for_goal(conn, %{"referrer" => referrer} = params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2020-01-16 16:40:06 +03:00
|
|
|
|
|
|
|
referrers = Stats.referrer_drilldown_for_goal(site, query, referrer)
|
|
|
|
total_visitors = Stats.conversions_from_referrer(site, query, referrer)
|
|
|
|
json(conn, %{referrers: referrers, total_visitors: total_visitors})
|
|
|
|
end
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
def pages(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2020-01-06 16:51:43 +03:00
|
|
|
include = if params["include"], do: String.split(params["include"], ","), else: []
|
2020-05-07 14:28:41 +03:00
|
|
|
limit = if params["limit"], do: String.to_integer(params["limit"])
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2020-05-07 14:28:41 +03:00
|
|
|
json(conn, Stats.top_pages(site, query, limit || 9, include))
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
2020-07-30 11:18:28 +03:00
|
|
|
def entry_pages(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
query = Query.from(site.timezone, params)
|
|
|
|
include = if params["include"], do: String.split(params["include"], ","), else: []
|
|
|
|
limit = if params["limit"], do: String.to_integer(params["limit"])
|
|
|
|
|
|
|
|
json(conn, Stats.entry_pages(site, query, limit || 9, include))
|
|
|
|
end
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
def countries(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2020-02-10 16:17:00 +03:00
|
|
|
json(conn, Stats.countries(site, query))
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def browsers(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2020-07-30 11:18:28 +03:00
|
|
|
json(conn, Stats.browsers(site, query, params["limit"] || 9))
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def operating_systems(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2020-07-30 11:18:28 +03:00
|
|
|
json(conn, Stats.operating_systems(site, query, params["limit"] || 9))
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def screen_sizes(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
|
|
|
json(conn, Stats.top_screen_sizes(site, query))
|
|
|
|
end
|
|
|
|
|
2020-10-30 12:26:16 +03:00
|
|
|
defp calculate_cr(unique_visitors, converted_visitors) do
|
|
|
|
if unique_visitors > 0,
|
|
|
|
do: Float.round(converted_visitors / unique_visitors * 100, 1),
|
|
|
|
else: 0.0
|
|
|
|
end
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
def conversions(conn, params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-05-18 12:44:52 +03:00
|
|
|
query = Query.from(site.timezone, params)
|
2020-10-30 12:26:16 +03:00
|
|
|
total_filter = Map.merge(query.filters, %{"goal" => nil, "props" => nil})
|
|
|
|
unique_visitors = Stats.unique_visitors(site, %{query | filters: total_filter})
|
2020-10-30 11:49:41 +03:00
|
|
|
prop_names = Stats.all_props(site, query)
|
2020-10-28 12:09:04 +03:00
|
|
|
conversions = Stats.goal_conversions(site, query)
|
2020-10-30 12:26:16 +03:00
|
|
|
|> Enum.map(fn goal ->
|
|
|
|
goal
|
|
|
|
|> Map.put(:prop_names, prop_names[goal[:name]])
|
|
|
|
|> Map.put(:conversion_rate, calculate_cr(unique_visitors, goal[:count]))
|
|
|
|
end)
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2020-10-28 12:09:04 +03:00
|
|
|
json(conn, conversions)
|
|
|
|
end
|
|
|
|
|
2020-10-30 11:49:41 +03:00
|
|
|
def prop_breakdown(conn, params) do
|
2020-10-28 12:09:04 +03:00
|
|
|
site = conn.assigns[:site]
|
|
|
|
query = Query.from(site.timezone, params)
|
2020-10-30 12:26:16 +03:00
|
|
|
total_filter = Map.merge(query.filters, %{"goal" => nil, "props" => nil})
|
|
|
|
unique_visitors = Stats.unique_visitors(site, %{query | filters: total_filter})
|
|
|
|
props = Stats.property_breakdown(site, query, params["prop_name"])
|
|
|
|
|> Enum.map(fn prop ->
|
|
|
|
Map.put(prop, :conversion_rate, calculate_cr(unique_visitors, prop[:count]))
|
|
|
|
end)
|
2020-10-28 12:09:04 +03:00
|
|
|
|
2020-10-30 12:26:16 +03:00
|
|
|
json(conn, props)
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def current_visitors(conn, _) do
|
2020-07-30 11:18:28 +03:00
|
|
|
site = conn.assigns[:site]
|
|
|
|
query = Query.from(site.timezone, %{"period" => "realtime"})
|
|
|
|
json(conn, Stats.current_visitors(site, query))
|
2019-11-19 07:30:42 +03:00
|
|
|
end
|
|
|
|
end
|