2019-09-02 14:29:19 +03:00
|
|
|
defmodule PlausibleWeb.AuthController do
|
|
|
|
use PlausibleWeb, :controller
|
|
|
|
use Plausible.Repo
|
|
|
|
alias Plausible.Auth
|
|
|
|
require Logger
|
|
|
|
|
2020-06-08 10:35:13 +03:00
|
|
|
plug PlausibleWeb.RequireLoggedOutPlug
|
2021-10-22 16:31:31 +03:00
|
|
|
when action in [
|
|
|
|
:register_form,
|
|
|
|
:register,
|
|
|
|
:register_from_invitation_form,
|
|
|
|
:register_from_invitation,
|
|
|
|
:login_form,
|
|
|
|
:login
|
|
|
|
]
|
2020-06-08 10:35:13 +03:00
|
|
|
|
|
|
|
plug PlausibleWeb.RequireAccountPlug
|
2020-12-29 16:17:27 +03:00
|
|
|
when action in [
|
|
|
|
:user_settings,
|
|
|
|
:save_settings,
|
|
|
|
:delete_me,
|
|
|
|
:password_form,
|
|
|
|
:set_password,
|
|
|
|
:activate_form
|
|
|
|
]
|
2019-09-02 14:29:19 +03:00
|
|
|
|
|
|
|
def register_form(conn, _params) do
|
2022-05-03 10:44:17 +03:00
|
|
|
if Keyword.fetch!(Application.get_env(:plausible, :selfhost), :disable_registration) != false do
|
2021-10-26 11:59:14 +03:00
|
|
|
redirect(conn, to: Routes.auth_path(conn, :login_form))
|
2020-07-02 11:21:11 +03:00
|
|
|
else
|
|
|
|
changeset = Plausible.Auth.User.changeset(%Plausible.Auth.User{})
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2020-07-02 11:21:11 +03:00
|
|
|
render(conn, "register_form.html",
|
|
|
|
changeset: changeset,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2020-08-28 15:00:16 +03:00
|
|
|
def register(conn, params) do
|
2022-05-03 10:44:17 +03:00
|
|
|
if Keyword.fetch!(Application.get_env(:plausible, :selfhost), :disable_registration) != false do
|
2021-10-26 11:59:14 +03:00
|
|
|
redirect(conn, to: Routes.auth_path(conn, :login_form))
|
2021-01-26 12:44:48 +03:00
|
|
|
else
|
2021-06-16 15:00:07 +03:00
|
|
|
user = Plausible.Auth.User.new(params["user"])
|
2020-08-28 15:00:16 +03:00
|
|
|
|
2021-01-26 12:44:48 +03:00
|
|
|
if PlausibleWeb.Captcha.verify(params["h-captcha-response"]) do
|
|
|
|
case Repo.insert(user) do
|
|
|
|
{:ok, user} ->
|
2021-10-18 13:01:54 +03:00
|
|
|
conn = set_user_session(conn, user)
|
|
|
|
|
|
|
|
case user.email_verified do
|
|
|
|
false ->
|
|
|
|
send_email_verification(user)
|
2021-10-26 11:59:14 +03:00
|
|
|
redirect(conn, to: Routes.auth_path(conn, :activate_form))
|
2021-10-18 13:01:54 +03:00
|
|
|
|
|
|
|
true ->
|
2021-10-26 11:59:14 +03:00
|
|
|
redirect(conn, to: Routes.site_path(conn, :new))
|
2021-10-18 13:01:54 +03:00
|
|
|
end
|
2020-08-28 15:00:16 +03:00
|
|
|
|
2021-01-26 12:44:48 +03:00
|
|
|
{:error, changeset} ->
|
|
|
|
render(conn, "register_form.html",
|
|
|
|
changeset: changeset,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
else
|
|
|
|
render(conn, "register_form.html",
|
|
|
|
changeset: user,
|
|
|
|
captcha_error: "Please complete the captcha to register",
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2020-08-28 15:00:16 +03:00
|
|
|
end
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def register_from_invitation_form(conn, %{"invitation_id" => invitation_id}) do
|
2022-05-03 10:44:17 +03:00
|
|
|
if Keyword.fetch!(Application.get_env(:plausible, :selfhost), :disable_registration) == true do
|
2021-10-26 11:59:14 +03:00
|
|
|
redirect(conn, to: Routes.auth_path(conn, :login_form))
|
2021-06-16 15:00:07 +03:00
|
|
|
else
|
|
|
|
invitation = Repo.get_by(Plausible.Auth.Invitation, invitation_id: invitation_id)
|
|
|
|
changeset = Plausible.Auth.User.changeset(%Plausible.Auth.User{})
|
|
|
|
|
|
|
|
if invitation do
|
|
|
|
render(conn, "register_from_invitation_form.html",
|
|
|
|
changeset: changeset,
|
|
|
|
invitation: invitation,
|
2021-09-16 11:59:03 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"},
|
|
|
|
skip_plausible_tracking: true
|
2021-06-16 15:00:07 +03:00
|
|
|
)
|
|
|
|
else
|
2021-10-22 16:31:31 +03:00
|
|
|
render(conn, "invitation_expired.html",
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"},
|
|
|
|
skip_plausible_tracking: true
|
|
|
|
)
|
2021-06-16 15:00:07 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def register_from_invitation(conn, %{"invitation_id" => invitation_id} = params) do
|
2022-05-03 10:44:17 +03:00
|
|
|
if Keyword.fetch!(Application.get_env(:plausible, :selfhost), :disable_registration) == true do
|
2021-10-26 11:59:14 +03:00
|
|
|
redirect(conn, to: Routes.auth_path(conn, :login_form))
|
2021-06-16 15:00:07 +03:00
|
|
|
else
|
|
|
|
invitation = Repo.get_by(Plausible.Auth.Invitation, invitation_id: invitation_id)
|
|
|
|
user = Plausible.Auth.User.new(params["user"])
|
|
|
|
|
2021-09-08 15:15:37 +03:00
|
|
|
user =
|
|
|
|
case invitation.role do
|
|
|
|
:owner -> user
|
|
|
|
_ -> Plausible.Auth.User.remove_trial_expiry(user)
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
if PlausibleWeb.Captcha.verify(params["h-captcha-response"]) do
|
|
|
|
case Repo.insert(user) do
|
|
|
|
{:ok, user} ->
|
2021-10-18 13:01:54 +03:00
|
|
|
conn = set_user_session(conn, user)
|
|
|
|
|
|
|
|
case user.email_verified do
|
|
|
|
false ->
|
|
|
|
send_email_verification(user)
|
2021-10-26 11:59:14 +03:00
|
|
|
redirect(conn, to: Routes.auth_path(conn, :activate_form))
|
2021-10-18 13:01:54 +03:00
|
|
|
|
|
|
|
true ->
|
2021-10-26 11:59:14 +03:00
|
|
|
redirect(conn, to: Routes.site_path(conn, :index))
|
2021-10-18 13:01:54 +03:00
|
|
|
end
|
2021-06-16 15:00:07 +03:00
|
|
|
|
|
|
|
{:error, changeset} ->
|
|
|
|
render(conn, "register_from_invitation_form.html",
|
|
|
|
invitation: invitation,
|
|
|
|
changeset: changeset,
|
2021-10-22 16:31:31 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"},
|
|
|
|
skip_plausible_tracking: true
|
2021-06-16 15:00:07 +03:00
|
|
|
)
|
|
|
|
end
|
|
|
|
else
|
|
|
|
render(conn, "register_from_invitation_form.html",
|
|
|
|
invitation: invitation,
|
|
|
|
changeset: user,
|
|
|
|
captcha_error: "Please complete the captcha to register",
|
2021-10-22 16:31:31 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"},
|
|
|
|
skip_plausible_tracking: true
|
2021-06-16 15:00:07 +03:00
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-10-18 13:01:54 +03:00
|
|
|
defp send_email_verification(user) do
|
|
|
|
code = Auth.issue_email_verification(user)
|
|
|
|
Logger.info("VERIFICATION CODE: #{code}")
|
|
|
|
email_template = PlausibleWeb.Email.activation_email(user, code)
|
|
|
|
Plausible.Mailer.send_email(email_template)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp set_user_session(conn, user) do
|
|
|
|
conn
|
|
|
|
|> put_session(:current_user_id, user.id)
|
|
|
|
|> put_resp_cookie("logged_in", "true",
|
|
|
|
http_only: false,
|
|
|
|
max_age: 60 * 60 * 24 * 365 * 5000
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2020-12-15 12:30:45 +03:00
|
|
|
def activate_form(conn, _params) do
|
|
|
|
user = conn.assigns[:current_user]
|
2021-06-25 11:33:00 +03:00
|
|
|
|
2021-06-25 11:37:57 +03:00
|
|
|
has_invitation =
|
|
|
|
Repo.exists?(
|
|
|
|
from i in Plausible.Auth.Invitation,
|
|
|
|
where: i.email == ^user.email
|
|
|
|
)
|
2020-03-23 12:34:25 +03:00
|
|
|
|
2020-12-29 16:17:27 +03:00
|
|
|
has_code =
|
|
|
|
Repo.exists?(
|
|
|
|
from c in "email_verification_codes",
|
|
|
|
where: c.user_id == ^user.id
|
|
|
|
)
|
2020-11-03 12:20:11 +03:00
|
|
|
|
2020-12-29 16:17:27 +03:00
|
|
|
render(conn, "activate.html",
|
|
|
|
has_pin: has_code,
|
2021-06-25 11:33:00 +03:00
|
|
|
has_invitation: has_invitation,
|
2020-12-29 16:17:27 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2020-12-15 12:30:45 +03:00
|
|
|
end
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2020-12-15 12:30:45 +03:00
|
|
|
def activate(conn, %{"code" => code}) do
|
|
|
|
user = conn.assigns[:current_user]
|
2021-06-25 12:10:20 +03:00
|
|
|
|
2021-06-25 12:00:15 +03:00
|
|
|
has_invitation =
|
|
|
|
Repo.exists?(
|
|
|
|
from i in Plausible.Auth.Invitation,
|
|
|
|
where: i.email == ^user.email
|
|
|
|
)
|
2021-06-25 12:10:20 +03:00
|
|
|
|
2020-12-15 12:30:45 +03:00
|
|
|
{code, ""} = Integer.parse(code)
|
|
|
|
|
|
|
|
case Auth.verify_email(user, code) do
|
|
|
|
:ok ->
|
2021-06-25 12:00:15 +03:00
|
|
|
if has_invitation do
|
2021-10-26 11:59:14 +03:00
|
|
|
redirect(conn, to: Routes.site_path(conn, :index))
|
2021-06-16 15:00:07 +03:00
|
|
|
else
|
2021-10-26 11:59:14 +03:00
|
|
|
redirect(conn, to: Routes.site_path(conn, :new))
|
2021-06-16 15:00:07 +03:00
|
|
|
end
|
2020-12-29 16:17:27 +03:00
|
|
|
|
2020-12-15 12:30:45 +03:00
|
|
|
{:error, :incorrect} ->
|
|
|
|
render(conn, "activate.html",
|
|
|
|
error: "Incorrect activation code",
|
|
|
|
has_pin: true,
|
2021-06-25 12:00:15 +03:00
|
|
|
has_invitation: has_invitation,
|
2020-12-15 12:30:45 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2020-12-29 16:17:27 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
{:error, :expired} ->
|
2020-12-15 12:30:45 +03:00
|
|
|
render(conn, "activate.html",
|
|
|
|
error: "Code is expired, please request another one",
|
|
|
|
has_pin: false,
|
2021-06-25 12:00:15 +03:00
|
|
|
has_invitation: has_invitation,
|
2020-12-15 12:30:45 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-12-15 12:30:45 +03:00
|
|
|
def request_activation_code(conn, _params) do
|
|
|
|
user = conn.assigns[:current_user]
|
2020-12-28 18:20:35 +03:00
|
|
|
code = Auth.issue_email_verification(user)
|
2020-12-15 12:30:45 +03:00
|
|
|
|
2020-12-28 18:20:35 +03:00
|
|
|
email_template = PlausibleWeb.Email.activation_email(user, code)
|
2020-12-15 12:30:45 +03:00
|
|
|
Plausible.Mailer.send_email(email_template)
|
|
|
|
|
2020-10-16 15:48:22 +03:00
|
|
|
conn
|
2020-12-15 12:30:45 +03:00
|
|
|
|> put_flash(:success, "Activation code was sent to #{user.email}")
|
2021-10-26 11:59:14 +03:00
|
|
|
|> redirect(to: Routes.auth_path(conn, :activate_form))
|
2020-10-16 15:48:22 +03:00
|
|
|
end
|
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
def password_reset_request_form(conn, _) do
|
2020-06-08 10:35:13 +03:00
|
|
|
render(conn, "password_reset_request_form.html",
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def password_reset_request(conn, %{"email" => ""}) do
|
2020-06-08 10:35:13 +03:00
|
|
|
render(conn, "password_reset_request_form.html",
|
|
|
|
error: "Please enter an email address",
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2020-08-28 15:00:16 +03:00
|
|
|
def password_reset_request(conn, %{"email" => email} = params) do
|
|
|
|
if PlausibleWeb.Captcha.verify(params["h-captcha-response"]) do
|
|
|
|
user = Repo.get_by(Plausible.Auth.User, email: email)
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2020-08-28 15:00:16 +03:00
|
|
|
if user do
|
|
|
|
token = Auth.Token.sign_password_reset(email)
|
2020-10-05 15:01:54 +03:00
|
|
|
url = PlausibleWeb.Endpoint.url() <> "/password/reset?token=#{token}"
|
2020-08-28 15:00:16 +03:00
|
|
|
Logger.debug("PASSWORD RESET LINK: " <> url)
|
|
|
|
email_template = PlausibleWeb.Email.password_reset_email(email, url)
|
2022-01-20 18:51:42 +03:00
|
|
|
Plausible.Mailer.deliver_later(email_template)
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2020-08-28 15:00:16 +03:00
|
|
|
render(conn, "password_reset_request_success.html",
|
|
|
|
email: email,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
else
|
|
|
|
render(conn, "password_reset_request_success.html",
|
|
|
|
email: email,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
2019-09-02 14:29:19 +03:00
|
|
|
else
|
2020-08-28 15:00:16 +03:00
|
|
|
render(conn, "password_reset_request_form.html",
|
|
|
|
error: "Please complete the captcha to reset your password",
|
2020-06-08 10:35:13 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def password_reset_form(conn, %{"token" => token}) do
|
|
|
|
case Auth.Token.verify_password_reset(token) do
|
|
|
|
{:ok, _} ->
|
2020-06-08 10:35:13 +03:00
|
|
|
render(conn, "password_reset_form.html",
|
|
|
|
token: token,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
{:error, :expired} ->
|
2020-06-08 10:35:13 +03:00
|
|
|
render_error(
|
|
|
|
conn,
|
|
|
|
401,
|
|
|
|
"Your token has expired. Please request another password reset link."
|
|
|
|
)
|
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
{:error, _} ->
|
2020-06-08 10:35:13 +03:00
|
|
|
render_error(
|
|
|
|
conn,
|
|
|
|
401,
|
|
|
|
"Your token is invalid. Please request another password reset link."
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def password_reset(conn, %{"token" => token, "password" => pw}) do
|
|
|
|
case Auth.Token.verify_password_reset(token) do
|
|
|
|
{:ok, %{email: email}} ->
|
|
|
|
user = Repo.get_by(Auth.User, email: email)
|
|
|
|
changeset = Auth.User.set_password(user, pw)
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
case Repo.update(changeset) do
|
|
|
|
{:ok, _updated} ->
|
|
|
|
conn
|
|
|
|
|> put_flash(:login_title, "Password updated successfully")
|
|
|
|
|> put_flash(:login_instructions, "Please log in with your new credentials")
|
|
|
|
|> put_session(:current_user_id, nil)
|
2020-03-31 15:47:34 +03:00
|
|
|
|> delete_resp_cookie("logged_in")
|
2021-10-26 11:59:14 +03:00
|
|
|
|> redirect(to: Routes.auth_path(conn, :login_form))
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
{:error, changeset} ->
|
2020-06-08 10:35:13 +03:00
|
|
|
render(conn, "password_reset_form.html",
|
|
|
|
changeset: changeset,
|
|
|
|
token: token,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
{:error, :expired} ->
|
2020-06-08 10:35:13 +03:00
|
|
|
render_error(
|
|
|
|
conn,
|
|
|
|
401,
|
|
|
|
"Your token has expired. Please request another password reset link."
|
|
|
|
)
|
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
{:error, _} ->
|
2020-06-08 10:35:13 +03:00
|
|
|
render_error(
|
|
|
|
conn,
|
|
|
|
401,
|
|
|
|
"Your token is invalid. Please request another password reset link."
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def login(conn, %{"email" => email, "password" => password}) do
|
2021-05-25 11:32:54 +03:00
|
|
|
with :ok <- check_ip_rate_limit(conn),
|
|
|
|
{:ok, user} <- find_user(email),
|
|
|
|
:ok <- check_user_rate_limit(user),
|
|
|
|
:ok <- check_password(user, password) do
|
2021-10-26 11:59:14 +03:00
|
|
|
login_dest = get_session(conn, :login_dest) || Routes.site_path(conn, :index)
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2021-05-25 11:32:54 +03:00
|
|
|
conn
|
|
|
|
|> put_session(:current_user_id, user.id)
|
|
|
|
|> put_resp_cookie("logged_in", "true",
|
|
|
|
http_only: false,
|
|
|
|
max_age: 60 * 60 * 24 * 365 * 5000
|
2020-06-08 10:35:13 +03:00
|
|
|
)
|
2021-05-25 11:32:54 +03:00
|
|
|
|> put_session(:login_dest, nil)
|
|
|
|
|> redirect(to: login_dest)
|
|
|
|
else
|
|
|
|
:wrong_password ->
|
|
|
|
render(conn, "login_form.html",
|
|
|
|
error: "Wrong email or password. Please try again.",
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2021-05-25 11:32:54 +03:00
|
|
|
:user_not_found ->
|
|
|
|
Plausible.Auth.Password.dummy_calculation()
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2021-05-25 11:32:54 +03:00
|
|
|
render(conn, "login_form.html",
|
2020-06-08 10:35:13 +03:00
|
|
|
error: "Wrong email or password. Please try again.",
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
|
2021-05-25 11:43:03 +03:00
|
|
|
{:rate_limit, _} ->
|
2021-05-25 11:32:54 +03:00
|
|
|
render_error(
|
|
|
|
conn,
|
|
|
|
429,
|
|
|
|
"Too many login attempts. Wait a minute before trying again."
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@login_interval 60_000
|
|
|
|
@login_limit 5
|
|
|
|
defp check_ip_rate_limit(conn) do
|
|
|
|
ip_address = PlausibleWeb.RemoteIp.get(conn)
|
|
|
|
|
|
|
|
case Hammer.check_rate("login:ip:#{ip_address}", @login_interval, @login_limit) do
|
|
|
|
{:allow, _} -> :ok
|
|
|
|
{:deny, _} -> {:rate_limit, :ip_address}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp find_user(email) do
|
|
|
|
user =
|
|
|
|
Repo.one(
|
|
|
|
from u in Plausible.Auth.User,
|
|
|
|
where: u.email == ^email
|
2020-06-08 10:35:13 +03:00
|
|
|
)
|
2021-05-25 11:32:54 +03:00
|
|
|
|
|
|
|
if user, do: {:ok, user}, else: :user_not_found
|
|
|
|
end
|
|
|
|
|
|
|
|
defp check_user_rate_limit(user) do
|
|
|
|
case Hammer.check_rate("login:user:#{user.id}", @login_interval, @login_limit) do
|
|
|
|
{:allow, _} -> :ok
|
|
|
|
{:deny, _} -> {:rate_limit, :user}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp check_password(user, password) do
|
|
|
|
if Plausible.Auth.Password.match?(password, user.password_hash || "") do
|
|
|
|
:ok
|
|
|
|
else
|
|
|
|
:wrong_password
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def login_form(conn, _params) do
|
|
|
|
render(conn, "login_form.html", layout: {PlausibleWeb.LayoutView, "focus.html"})
|
|
|
|
end
|
|
|
|
|
|
|
|
def password_form(conn, _params) do
|
2020-06-08 10:35:13 +03:00
|
|
|
render(conn, "password_form.html",
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"},
|
|
|
|
skip_plausible_tracking: true
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def set_password(conn, %{"password" => pw}) do
|
|
|
|
changeset = Auth.User.set_password(conn.assigns[:current_user], pw)
|
|
|
|
|
|
|
|
case Repo.update(changeset) do
|
|
|
|
{:ok, _user} ->
|
|
|
|
redirect(conn, to: "/sites/new")
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
{:error, changeset} ->
|
2020-06-08 10:35:13 +03:00
|
|
|
render(conn, "password_form.html",
|
|
|
|
changeset: changeset,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def user_settings(conn, _params) do
|
2021-12-09 16:49:57 +03:00
|
|
|
user = conn.assigns[:current_user]
|
|
|
|
changeset = Auth.User.changeset(user)
|
2020-07-21 09:58:00 +03:00
|
|
|
|
2021-12-09 16:49:57 +03:00
|
|
|
{usage_pageviews, usage_custom_events} = Plausible.Billing.usage_breakdown(user)
|
2021-01-15 17:28:57 +03:00
|
|
|
|
2020-07-21 09:58:00 +03:00
|
|
|
render(conn, "user_settings.html",
|
2021-12-09 16:49:57 +03:00
|
|
|
user: user |> Repo.preload(:api_keys),
|
2020-07-21 09:58:00 +03:00
|
|
|
changeset: changeset,
|
2021-12-09 16:49:57 +03:00
|
|
|
subscription: user.subscription,
|
|
|
|
invoices: Plausible.Billing.paddle_api().get_invoices(user.subscription),
|
|
|
|
theme: user.theme || "system",
|
2021-01-15 17:28:57 +03:00
|
|
|
usage_pageviews: usage_pageviews,
|
|
|
|
usage_custom_events: usage_custom_events
|
2020-07-21 09:58:00 +03:00
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def save_settings(conn, %{"user" => user_params}) do
|
|
|
|
changes = Auth.User.changeset(conn.assigns[:current_user], user_params)
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
case Repo.update(changes) do
|
|
|
|
{:ok, _user} ->
|
|
|
|
conn
|
2021-03-15 12:40:53 +03:00
|
|
|
|> put_flash(:success, "Account settings saved successfully")
|
2021-10-26 11:59:14 +03:00
|
|
|
|> redirect(to: Routes.auth_path(conn, :user_settings))
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
{:error, changeset} ->
|
2020-08-19 14:43:32 +03:00
|
|
|
render(conn, "user_settings.html",
|
|
|
|
changeset: changeset,
|
|
|
|
subscription: conn.assigns[:current_user].subscription
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-02-05 12:23:30 +03:00
|
|
|
def new_api_key(conn, _params) do
|
|
|
|
key = :crypto.strong_rand_bytes(64) |> Base.url_encode64() |> binary_part(0, 64)
|
|
|
|
changeset = Auth.ApiKey.changeset(%Auth.ApiKey{}, %{key: key})
|
|
|
|
|
|
|
|
render(conn, "new_api_key.html",
|
|
|
|
changeset: changeset,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_api_key(conn, %{"api_key" => key_params}) do
|
|
|
|
api_key = %Auth.ApiKey{user_id: conn.assigns[:current_user].id}
|
2022-08-05 10:24:24 +03:00
|
|
|
key_params = Map.delete(key_params, "user_id")
|
2021-02-05 12:23:30 +03:00
|
|
|
changeset = Auth.ApiKey.changeset(api_key, key_params)
|
|
|
|
|
|
|
|
case Repo.insert(changeset) do
|
|
|
|
{:ok, _api_key} ->
|
|
|
|
conn
|
|
|
|
|> put_flash(:success, "API key created successfully")
|
|
|
|
|> redirect(to: "/settings#api-keys")
|
|
|
|
|
|
|
|
{:error, changeset} ->
|
|
|
|
render(conn, "new_api_key.html",
|
|
|
|
changeset: changeset,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def delete_api_key(conn, %{"id" => id}) do
|
2022-08-05 10:24:24 +03:00
|
|
|
query =
|
|
|
|
from k in Auth.ApiKey,
|
|
|
|
where: k.id == ^id and k.user_id == ^conn.assigns[:current_user].id
|
|
|
|
|
|
|
|
query
|
|
|
|
|> Repo.one!()
|
2021-02-05 12:23:30 +03:00
|
|
|
|> Repo.delete!()
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> put_flash(:success, "API key revoked successfully")
|
|
|
|
|> redirect(to: "/settings#api-keys")
|
|
|
|
end
|
|
|
|
|
2020-03-31 15:47:34 +03:00
|
|
|
def delete_me(conn, params) do
|
2020-06-08 10:35:13 +03:00
|
|
|
user =
|
|
|
|
conn.assigns[:current_user]
|
2021-09-08 11:09:58 +03:00
|
|
|
|> Repo.preload(site_memberships: :site)
|
2020-06-08 10:35:13 +03:00
|
|
|
|> Repo.preload(:subscription)
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2021-09-08 11:09:58 +03:00
|
|
|
for membership <- user.site_memberships do
|
|
|
|
Repo.delete!(membership)
|
|
|
|
|
|
|
|
if membership.role == :owner do
|
2021-11-26 16:39:42 +03:00
|
|
|
Plausible.Sites.delete!(membership.site)
|
2021-09-08 11:09:58 +03:00
|
|
|
end
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2020-05-07 14:26:22 +03:00
|
|
|
if user.subscription, do: Repo.delete!(user.subscription)
|
2019-09-02 14:29:19 +03:00
|
|
|
Repo.delete!(user)
|
|
|
|
|
2020-03-31 15:47:34 +03:00
|
|
|
logout(conn, params)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2021-04-01 10:43:32 +03:00
|
|
|
def logout(conn, params) do
|
|
|
|
redirect_to = Map.get(params, "redirect", "/")
|
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
conn
|
|
|
|
|> configure_session(drop: true)
|
2020-03-31 15:47:34 +03:00
|
|
|
|> delete_resp_cookie("logged_in")
|
2021-04-01 10:43:32 +03:00
|
|
|
|> redirect(to: redirect_to)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
def google_auth_callback(conn, %{"code" => code, "state" => state}) do
|
2022-08-03 12:25:50 +03:00
|
|
|
res = Plausible.Google.HTTP.fetch_access_token(code)
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
[site_id, redirect_to] = Jason.decode!(state)
|
2022-03-22 17:09:45 +03:00
|
|
|
site = Repo.get(Plausible.Site, site_id)
|
2022-09-26 12:29:56 +03:00
|
|
|
expires_at = NaiveDateTime.add(NaiveDateTime.utc_now(), res["expires_in"])
|
2022-03-22 17:09:45 +03:00
|
|
|
|
|
|
|
case redirect_to do
|
|
|
|
"import" ->
|
|
|
|
redirect(conn,
|
|
|
|
to:
|
2022-03-24 12:49:45 +03:00
|
|
|
Routes.site_path(conn, :import_from_google_view_id_form, site.domain,
|
2022-09-26 12:29:56 +03:00
|
|
|
access_token: res["access_token"],
|
|
|
|
refresh_token: res["refresh_token"],
|
|
|
|
expires_at: NaiveDateTime.to_iso8601(expires_at)
|
2022-03-22 17:09:45 +03:00
|
|
|
)
|
|
|
|
)
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
|
2022-03-22 17:09:45 +03:00
|
|
|
_ ->
|
|
|
|
id_token = res["id_token"]
|
|
|
|
[_, body, _] = String.split(id_token, ".")
|
|
|
|
id = body |> Base.decode64!(padding: false) |> Jason.decode!()
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2022-03-22 17:09:45 +03:00
|
|
|
Plausible.Site.GoogleAuth.changeset(%Plausible.Site.GoogleAuth{}, %{
|
|
|
|
email: id["email"],
|
|
|
|
refresh_token: res["refresh_token"],
|
|
|
|
access_token: res["access_token"],
|
2022-09-26 12:29:56 +03:00
|
|
|
expires_at: expires_at,
|
2022-03-22 17:09:45 +03:00
|
|
|
user_id: conn.assigns[:current_user].id,
|
|
|
|
site_id: site_id
|
|
|
|
})
|
|
|
|
|> Repo.insert!()
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2022-03-22 17:09:45 +03:00
|
|
|
site = Repo.get(Plausible.Site, site_id)
|
|
|
|
|
|
|
|
redirect(conn, to: "/#{URI.encode_www_form(site.domain)}/settings/#{redirect_to}")
|
|
|
|
end
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|