2019-09-02 14:29:19 +03:00
|
|
|
defmodule PlausibleWeb.Api.ExternalController do
|
|
|
|
use PlausibleWeb, :controller
|
|
|
|
require Logger
|
|
|
|
|
2019-10-31 06:49:46 +03:00
|
|
|
def event(conn, _params) do
|
2019-09-02 14:29:19 +03:00
|
|
|
params = parse_body(conn)
|
2020-06-16 16:21:08 +03:00
|
|
|
Sentry.Context.set_extra_context(%{request: params})
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2019-10-31 06:49:46 +03:00
|
|
|
case create_event(conn, params) do
|
2021-04-27 10:26:44 +03:00
|
|
|
:ok ->
|
2019-09-02 14:29:19 +03:00
|
|
|
conn |> send_resp(202, "")
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2021-04-27 10:26:44 +03:00
|
|
|
:error ->
|
2019-09-02 14:29:19 +03:00
|
|
|
conn |> send_resp(400, "")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def error(conn, _params) do
|
2021-03-31 11:38:14 +03:00
|
|
|
Sentry.capture_message("JS snippet error")
|
2019-09-02 14:29:19 +03:00
|
|
|
send_resp(conn, 200, "")
|
|
|
|
end
|
|
|
|
|
2020-07-20 10:34:35 +03:00
|
|
|
def health(conn, _params) do
|
2020-07-21 09:58:00 +03:00
|
|
|
postgres_health =
|
|
|
|
case Ecto.Adapters.SQL.query(Plausible.Repo, "SELECT 1", []) do
|
|
|
|
{:ok, _} -> "ok"
|
|
|
|
e -> "error: #{inspect(e)}"
|
|
|
|
end
|
2020-07-20 10:34:35 +03:00
|
|
|
|
2020-07-21 09:58:00 +03:00
|
|
|
clickhouse_health =
|
2020-09-17 16:36:01 +03:00
|
|
|
case Ecto.Adapters.SQL.query(Plausible.ClickhouseRepo, "SELECT 1", []) do
|
2020-07-21 09:58:00 +03:00
|
|
|
{:ok, _} -> "ok"
|
|
|
|
e -> "error: #{inspect(e)}"
|
|
|
|
end
|
2020-07-20 10:34:35 +03:00
|
|
|
|
2020-07-21 09:58:00 +03:00
|
|
|
status =
|
|
|
|
case {postgres_health, clickhouse_health} do
|
|
|
|
{"ok", "ok"} -> 200
|
|
|
|
_ -> 500
|
|
|
|
end
|
2020-07-20 10:34:35 +03:00
|
|
|
|
|
|
|
put_status(conn, status)
|
|
|
|
|> json(%{
|
|
|
|
postgres: postgres_health,
|
2020-07-21 09:58:00 +03:00
|
|
|
clickhouse: clickhouse_health
|
2020-07-20 10:34:35 +03:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2020-12-14 17:06:43 +03:00
|
|
|
defp parse_user_agent(conn) do
|
|
|
|
user_agent = Plug.Conn.get_req_header(conn, "user-agent") |> List.first()
|
2021-01-04 17:38:56 +03:00
|
|
|
|
|
|
|
if user_agent do
|
|
|
|
Cachex.fetch!(:user_agents, user_agent, fn ua ->
|
2021-01-05 16:17:50 +03:00
|
|
|
{:commit, UAInspector.parse(ua)}
|
2021-01-04 17:38:56 +03:00
|
|
|
end)
|
|
|
|
end
|
2020-12-14 17:06:43 +03:00
|
|
|
end
|
|
|
|
|
2019-10-31 06:49:46 +03:00
|
|
|
defp create_event(conn, params) do
|
2020-08-24 15:16:07 +03:00
|
|
|
params = %{
|
|
|
|
"name" => params["n"] || params["name"],
|
|
|
|
"url" => params["u"] || params["url"],
|
|
|
|
"referrer" => params["r"] || params["referrer"],
|
|
|
|
"domain" => params["d"] || params["domain"],
|
|
|
|
"screen_width" => params["w"] || params["screen_width"],
|
2020-08-25 10:56:36 +03:00
|
|
|
"hash_mode" => params["h"] || params["hashMode"],
|
2020-10-28 12:09:04 +03:00
|
|
|
"meta" => parse_meta(params)
|
2020-08-24 15:16:07 +03:00
|
|
|
}
|
|
|
|
|
2020-12-14 17:06:43 +03:00
|
|
|
ua = parse_user_agent(conn)
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2020-11-10 16:18:59 +03:00
|
|
|
if is_bot?(ua) do
|
2021-04-27 10:26:44 +03:00
|
|
|
:ok
|
2019-09-02 14:29:19 +03:00
|
|
|
else
|
2020-11-17 15:40:10 +03:00
|
|
|
uri = params["url"] && URI.parse(params["url"])
|
2020-09-18 11:37:32 +03:00
|
|
|
query = if uri && uri.query, do: URI.decode_query(uri.query), else: %{}
|
2020-11-03 12:20:11 +03:00
|
|
|
|
2020-02-12 12:11:02 +03:00
|
|
|
ref = parse_referrer(uri, params["referrer"])
|
2020-06-12 09:51:45 +03:00
|
|
|
country_code = visitor_country(conn)
|
2020-07-15 11:47:24 +03:00
|
|
|
salts = Plausible.Session.Salts.fetch()
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2019-10-31 06:49:46 +03:00
|
|
|
event_attrs = %{
|
2020-09-17 16:36:01 +03:00
|
|
|
timestamp: NaiveDateTime.utc_now() |> NaiveDateTime.truncate(:second),
|
2019-10-31 06:49:46 +03:00
|
|
|
name: params["name"],
|
2020-03-31 15:01:27 +03:00
|
|
|
hostname: strip_www(uri && uri.host),
|
2020-08-25 10:56:36 +03:00
|
|
|
pathname: get_pathname(uri, params["hash_mode"]),
|
2021-04-27 11:38:56 +03:00
|
|
|
referrer_source: get_referrer_source(query, ref),
|
|
|
|
referrer: clean_referrer(ref),
|
|
|
|
utm_medium: query["utm_medium"],
|
|
|
|
utm_source: query["utm_source"],
|
|
|
|
utm_campaign: query["utm_campaign"],
|
|
|
|
country_code: country_code,
|
|
|
|
operating_system: ua && os_name(ua),
|
|
|
|
operating_system_version: ua && os_version(ua),
|
|
|
|
browser: ua && browser_name(ua),
|
|
|
|
browser_version: ua && browser_version(ua),
|
|
|
|
screen_size: calculate_screen_size(params["screen_width"]),
|
2020-10-28 12:09:04 +03:00
|
|
|
"meta.key": Map.keys(params["meta"]),
|
2020-11-18 15:53:43 +03:00
|
|
|
"meta.value": Map.values(params["meta"]) |> Enum.map(&Kernel.to_string/1)
|
2019-09-02 14:29:19 +03:00
|
|
|
}
|
|
|
|
|
2021-04-27 10:26:44 +03:00
|
|
|
Enum.reduce_while(get_domains(params, uri), :error, fn domain, _res ->
|
2021-04-27 11:10:37 +03:00
|
|
|
user_id = generate_user_id(conn, domain, event_attrs[:hostname], salts[:current])
|
|
|
|
|
|
|
|
previous_user_id =
|
|
|
|
salts[:previous] &&
|
|
|
|
generate_user_id(conn, domain, event_attrs[:hostname], salts[:previous])
|
|
|
|
|
|
|
|
changeset =
|
|
|
|
event_attrs
|
|
|
|
|> Map.merge(%{domain: domain, user_id: user_id})
|
|
|
|
|> Plausible.ClickhouseEvent.new()
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2021-04-27 10:26:44 +03:00
|
|
|
if changeset.valid? do
|
2021-04-27 11:38:56 +03:00
|
|
|
event = Ecto.Changeset.apply_changes(changeset)
|
2021-04-27 10:26:44 +03:00
|
|
|
session_id = Plausible.Session.Store.on_event(event, previous_user_id)
|
2020-05-21 13:36:53 +03:00
|
|
|
|
2021-04-27 10:26:44 +03:00
|
|
|
event
|
|
|
|
|> Map.put(:session_id, session_id)
|
|
|
|
|> Plausible.Event.WriteBuffer.insert()
|
|
|
|
|
|
|
|
{:cont, :ok}
|
|
|
|
else
|
|
|
|
{:halt, :error}
|
|
|
|
end
|
|
|
|
end)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-11-10 16:18:59 +03:00
|
|
|
defp is_bot?(%UAInspector.Result.Bot{}), do: true
|
2020-12-29 16:17:27 +03:00
|
|
|
|
|
|
|
defp is_bot?(%UAInspector.Result{client: %UAInspector.Result.Client{name: "Headless Chrome"}}),
|
|
|
|
do: true
|
|
|
|
|
2020-11-10 16:18:59 +03:00
|
|
|
defp is_bot?(_), do: false
|
|
|
|
|
2020-10-28 12:09:04 +03:00
|
|
|
defp parse_meta(params) do
|
2020-10-30 11:49:41 +03:00
|
|
|
raw_meta = params["m"] || params["meta"] || params["p"] || params["props"]
|
2020-11-03 12:20:11 +03:00
|
|
|
|
2020-10-28 12:09:04 +03:00
|
|
|
if raw_meta do
|
2021-03-03 12:36:19 +03:00
|
|
|
case Jason.decode(raw_meta) do
|
|
|
|
{:ok, props} when is_map(props) -> props
|
|
|
|
_ -> %{}
|
|
|
|
end
|
2020-10-28 12:09:04 +03:00
|
|
|
else
|
|
|
|
%{}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-04-27 10:26:44 +03:00
|
|
|
defp get_domains(params, uri) do
|
|
|
|
if params["domain"] do
|
|
|
|
String.split(params["domain"], ",")
|
|
|
|
|> Enum.map(&String.trim/1)
|
|
|
|
|> Enum.map(&strip_www/1)
|
|
|
|
else
|
|
|
|
List.wrap(strip_www(uri && uri.host))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-08-25 10:56:36 +03:00
|
|
|
defp get_pathname(nil, _), do: "/"
|
2020-11-03 12:20:11 +03:00
|
|
|
|
2020-08-25 10:56:36 +03:00
|
|
|
defp get_pathname(uri, hash_mode) do
|
2020-12-29 16:17:27 +03:00
|
|
|
pathname =
|
|
|
|
(uri.path || "/")
|
|
|
|
|> URI.decode()
|
2020-11-03 12:20:11 +03:00
|
|
|
|
2020-08-25 10:56:36 +03:00
|
|
|
if hash_mode && uri.fragment do
|
2020-11-17 15:40:10 +03:00
|
|
|
pathname <> "#" <> URI.decode(uri.fragment)
|
2020-08-25 10:56:36 +03:00
|
|
|
else
|
|
|
|
pathname
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-06-12 09:51:45 +03:00
|
|
|
defp visitor_country(conn) do
|
2020-07-02 11:21:11 +03:00
|
|
|
result =
|
2020-08-11 11:04:26 +03:00
|
|
|
PlausibleWeb.RemoteIp.get(conn)
|
2020-07-02 11:21:11 +03:00
|
|
|
|> Geolix.lookup()
|
|
|
|
|> Map.get(:country)
|
2020-06-12 09:51:45 +03:00
|
|
|
|
2020-06-12 18:42:38 +03:00
|
|
|
if result && result.country do
|
2020-06-12 09:51:45 +03:00
|
|
|
result.country.iso_code
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-12-14 17:06:43 +03:00
|
|
|
defp parse_referrer(_, nil), do: nil
|
2020-12-29 16:17:27 +03:00
|
|
|
|
2020-02-12 12:11:02 +03:00
|
|
|
defp parse_referrer(uri, referrer_str) do
|
|
|
|
referrer_uri = URI.parse(referrer_str)
|
|
|
|
|
|
|
|
if strip_www(referrer_uri.host) !== strip_www(uri.host) && referrer_uri.host !== "localhost" do
|
|
|
|
RefInspector.parse(referrer_str)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-04-27 11:10:37 +03:00
|
|
|
defp generate_user_id(conn, domain, hostname, salt) do
|
2020-05-21 13:36:53 +03:00
|
|
|
user_agent = List.first(Plug.Conn.get_req_header(conn, "user-agent")) || ""
|
2020-08-11 11:04:26 +03:00
|
|
|
ip_address = PlausibleWeb.RemoteIp.get(conn)
|
2021-09-10 12:35:00 +03:00
|
|
|
root_domain = get_root_domain(hostname)
|
2020-05-21 13:36:53 +03:00
|
|
|
|
2021-09-10 12:35:00 +03:00
|
|
|
if domain && root_domain do
|
|
|
|
SipHash.hash!(salt, user_agent <> ip_address <> domain <> root_domain)
|
2021-05-03 17:06:36 +03:00
|
|
|
end
|
2020-05-21 13:36:53 +03:00
|
|
|
end
|
|
|
|
|
2021-09-10 12:35:00 +03:00
|
|
|
defp get_root_domain(hostname) when is_binary(hostname) do
|
|
|
|
PublicSuffix.registrable_domain(hostname)
|
|
|
|
end
|
|
|
|
defp get_root_domain(hostname), do: hostname
|
|
|
|
|
2020-06-08 10:35:13 +03:00
|
|
|
defp calculate_screen_size(nil), do: nil
|
2019-09-02 14:29:19 +03:00
|
|
|
defp calculate_screen_size(width) when width < 576, do: "Mobile"
|
|
|
|
defp calculate_screen_size(width) when width < 992, do: "Tablet"
|
|
|
|
defp calculate_screen_size(width) when width < 1440, do: "Laptop"
|
|
|
|
defp calculate_screen_size(width) when width >= 1440, do: "Desktop"
|
|
|
|
|
2020-02-12 12:11:02 +03:00
|
|
|
defp clean_referrer(nil), do: nil
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2020-02-12 12:11:02 +03:00
|
|
|
defp clean_referrer(ref) do
|
2020-03-02 11:47:25 +03:00
|
|
|
uri = URI.parse(ref.referer)
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2021-01-07 16:16:04 +03:00
|
|
|
if right_uri?(uri) do
|
2019-09-02 14:29:19 +03:00
|
|
|
host = String.replace_prefix(uri.host, "www.", "")
|
2020-03-02 11:47:25 +03:00
|
|
|
path = uri.path || ""
|
|
|
|
host <> String.trim_trailing(path, "/")
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp parse_body(conn) do
|
2021-06-02 15:10:44 +03:00
|
|
|
case conn.body_params do
|
|
|
|
%Plug.Conn.Unfetched{} ->
|
|
|
|
{:ok, body, _conn} = Plug.Conn.read_body(conn)
|
|
|
|
Jason.decode!(body)
|
|
|
|
|
|
|
|
params ->
|
|
|
|
params
|
|
|
|
end
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
defp strip_www(nil), do: nil
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
defp strip_www(hostname) do
|
|
|
|
String.replace_prefix(hostname, "www.", "")
|
|
|
|
end
|
|
|
|
|
|
|
|
defp browser_name(ua) do
|
|
|
|
case ua.client do
|
2020-11-10 16:18:59 +03:00
|
|
|
:unknown -> ""
|
2019-09-02 14:29:19 +03:00
|
|
|
%UAInspector.Result.Client{name: "Mobile Safari"} -> "Safari"
|
|
|
|
%UAInspector.Result.Client{name: "Chrome Mobile"} -> "Chrome"
|
|
|
|
%UAInspector.Result.Client{name: "Chrome Mobile iOS"} -> "Chrome"
|
2020-11-10 16:18:59 +03:00
|
|
|
%UAInspector.Result.Client{name: "Firefox Mobile"} -> "Firefox"
|
|
|
|
%UAInspector.Result.Client{name: "Firefox Mobile iOS"} -> "Firefox"
|
2020-11-23 12:09:21 +03:00
|
|
|
%UAInspector.Result.Client{name: "Opera Mobile"} -> "Opera"
|
2020-11-10 16:18:59 +03:00
|
|
|
%UAInspector.Result.Client{name: "Chrome Webview"} -> "Mobile App"
|
2019-09-02 14:29:19 +03:00
|
|
|
%UAInspector.Result.Client{type: "mobile app"} -> "Mobile App"
|
|
|
|
client -> client.name
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-11-10 16:18:59 +03:00
|
|
|
defp major_minor(:unknown), do: ""
|
2020-12-29 16:17:27 +03:00
|
|
|
|
2020-11-10 16:18:59 +03:00
|
|
|
defp major_minor(version) do
|
|
|
|
version
|
|
|
|
|> String.split(".")
|
|
|
|
|> Enum.take(2)
|
|
|
|
|> Enum.join(".")
|
|
|
|
end
|
|
|
|
|
|
|
|
defp browser_version(ua) do
|
|
|
|
case ua.client do
|
|
|
|
:unknown -> ""
|
|
|
|
%UAInspector.Result.Client{type: "mobile app"} -> ""
|
|
|
|
client -> major_minor(client.version)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
defp os_name(ua) do
|
|
|
|
case ua.os do
|
2020-11-10 16:18:59 +03:00
|
|
|
:unknown -> ""
|
2019-09-02 14:29:19 +03:00
|
|
|
os -> os.name
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-11-10 16:18:59 +03:00
|
|
|
defp os_version(ua) do
|
|
|
|
case ua.os do
|
|
|
|
:unknown -> ""
|
|
|
|
os -> major_minor(os.version)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-09-18 11:37:32 +03:00
|
|
|
defp get_referrer_source(query, ref) do
|
2020-08-24 16:32:24 +03:00
|
|
|
source = query["utm_source"] || query["source"] || query["ref"]
|
|
|
|
source || get_source_from_referrer(ref)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp get_source_from_referrer(nil), do: nil
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2020-08-24 16:32:24 +03:00
|
|
|
defp get_source_from_referrer(ref) do
|
2019-09-02 14:29:19 +03:00
|
|
|
case ref.source do
|
|
|
|
:unknown ->
|
2020-02-12 12:11:02 +03:00
|
|
|
clean_uri(ref.referer)
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
source ->
|
|
|
|
source
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp clean_uri(uri) do
|
|
|
|
uri = URI.parse(String.trim(uri))
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2021-01-07 16:16:04 +03:00
|
|
|
if right_uri?(uri) do
|
2019-09-02 14:29:19 +03:00
|
|
|
String.replace_leading(uri.host, "www.", "")
|
|
|
|
end
|
|
|
|
end
|
2021-01-07 16:16:04 +03:00
|
|
|
|
|
|
|
defp right_uri?(%URI{host: nil}), do: false
|
|
|
|
|
|
|
|
defp right_uri?(%URI{host: host, scheme: scheme})
|
|
|
|
when scheme in ["http", "https"] and byte_size(host) > 0,
|
|
|
|
do: true
|
|
|
|
|
|
|
|
defp right_uri?(_), do: false
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|