2019-09-02 14:29:19 +03:00
defmodule PlausibleWeb.AuthController do
use PlausibleWeb , :controller
use Plausible.Repo
2023-12-06 14:01:19 +03:00
2023-12-06 17:07:37 +03:00
alias Plausible . { Auth , RateLimit }
2023-11-30 15:30:04 +03:00
alias Plausible.Billing.Quota
2023-12-06 14:01:19 +03:00
alias PlausibleWeb.TwoFactor
2019-09-02 14:29:19 +03:00
require Logger
2023-09-25 11:27:29 +03:00
plug (
PlausibleWeb.RequireLoggedOutPlug
when action in [
:register ,
:register_from_invitation ,
:login_form ,
2023-12-06 14:01:19 +03:00
:login ,
:verify_2fa_form ,
:verify_2fa ,
:verify_2fa_recovery_code_form ,
:verify_2fa_recovery_code
2023-09-25 11:27:29 +03:00
]
)
plug (
PlausibleWeb.RequireAccountPlug
when action in [
:user_settings ,
:save_settings ,
2023-10-11 11:25:00 +03:00
:update_email ,
:cancel_update_email ,
:new_api_key ,
:create_api_key ,
:delete_api_key ,
2023-09-25 11:27:29 +03:00
:delete_me ,
2023-10-11 11:25:00 +03:00
:activate_form ,
:activate ,
2023-12-06 14:01:19 +03:00
:request_activation_code ,
:initiate_2fa ,
:verify_2fa_setup_form ,
:verify_2fa_setup ,
:disable_2fa ,
:generate_2fa_recovery_codes
2023-09-25 11:27:29 +03:00
]
)
2023-12-06 14:01:19 +03:00
plug (
:clear_2fa_user
when action not in [
:verify_2fa_form ,
:verify_2fa ,
:verify_2fa_recovery_code_form ,
:verify_2fa_recovery_code
]
)
defp clear_2fa_user ( conn , _opts ) do
TwoFactor.Session . clear_2fa_user ( conn )
end
2023-09-25 11:27:29 +03:00
def register ( conn , %{ " user " = > %{ " email " = > email , " password " = > password } } ) do
with { :ok , user } <- login_user ( conn , email , password ) do
conn = set_user_session ( conn , user )
2021-10-18 13:01:54 +03:00
2023-09-25 11:27:29 +03:00
if user . email_verified do
redirect ( conn , to : Routes . site_path ( conn , :new ) )
2021-06-16 15:00:07 +03:00
else
2023-10-16 14:21:18 +03:00
Auth.EmailVerification . issue_code ( user )
2023-09-25 11:27:29 +03:00
redirect ( conn , to : Routes . auth_path ( conn , :activate_form ) )
2021-06-16 15:00:07 +03:00
end
end
end
2023-09-25 11:27:29 +03:00
def register_from_invitation ( conn , %{ " user " = > %{ " email " = > email , " password " = > password } } ) do
with { :ok , user } <- login_user ( conn , email , password ) do
conn = set_user_session ( conn , user )
2021-09-08 15:15:37 +03:00
2023-09-25 11:27:29 +03:00
if user . email_verified do
redirect ( conn , to : Routes . site_path ( conn , :index ) )
2021-06-16 15:00:07 +03:00
else
2023-10-16 14:21:18 +03:00
Auth.EmailVerification . issue_code ( user )
2023-09-25 11:27:29 +03:00
redirect ( conn , to : Routes . auth_path ( conn , :activate_form ) )
2021-06-16 15:00:07 +03:00
end
end
end
2020-12-15 12:30:45 +03:00
def activate_form ( conn , _params ) do
2023-10-16 14:21:18 +03:00
user = conn . assigns . current_user
2021-06-25 11:33:00 +03:00
2020-12-29 16:17:27 +03:00
render ( conn , " activate.html " ,
2023-10-16 14:21:18 +03:00
has_email_code? : Plausible.Users . has_email_code? ( user ) ,
2023-11-02 15:18:11 +03:00
has_any_invitations? : Plausible.Site.Memberships . pending? ( user . email ) ,
2023-10-16 14:21:18 +03:00
has_any_memberships? : Plausible.Site.Memberships . any? ( user ) ,
2020-12-29 16:17:27 +03:00
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
2020-12-15 12:30:45 +03:00
end
2020-06-08 10:35:13 +03:00
2020-12-15 12:30:45 +03:00
def activate ( conn , %{ " code " = > code } ) do
user = conn . assigns [ :current_user ]
2021-06-25 12:10:20 +03:00
2023-11-02 15:18:11 +03:00
has_any_invitations? = Plausible.Site.Memberships . pending? ( user . email )
2023-10-16 14:21:18 +03:00
has_any_memberships? = Plausible.Site.Memberships . any? ( user )
2020-12-15 12:30:45 +03:00
2023-10-16 14:21:18 +03:00
case Auth.EmailVerification . verify_code ( user , code ) do
2020-12-15 12:30:45 +03:00
:ok ->
2023-10-11 11:25:00 +03:00
cond do
has_any_memberships? ->
2023-10-11 16:12:57 +03:00
handle_email_updated ( conn )
2023-10-11 11:25:00 +03:00
has_any_invitations? ->
redirect ( conn , to : Routes . site_path ( conn , :index ) )
true ->
redirect ( conn , to : Routes . site_path ( conn , :new ) )
2021-06-16 15:00:07 +03:00
end
2020-12-29 16:17:27 +03:00
2020-12-15 12:30:45 +03:00
{ :error , :incorrect } ->
render ( conn , " activate.html " ,
error : " Incorrect activation code " ,
2023-10-11 11:25:00 +03:00
has_email_code? : true ,
has_any_invitations? : has_any_invitations? ,
has_any_memberships? : has_any_memberships? ,
2020-12-15 12:30:45 +03:00
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
2020-12-29 16:17:27 +03:00
2019-09-02 14:29:19 +03:00
{ :error , :expired } ->
2020-12-15 12:30:45 +03:00
render ( conn , " activate.html " ,
error : " Code is expired, please request another one " ,
2023-10-11 11:25:00 +03:00
has_email_code? : false ,
has_any_invitations? : has_any_invitations? ,
has_any_memberships? : has_any_memberships? ,
2020-12-15 12:30:45 +03:00
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
2019-09-02 14:29:19 +03:00
end
end
2020-12-15 12:30:45 +03:00
def request_activation_code ( conn , _params ) do
2023-10-16 14:21:18 +03:00
user = conn . assigns . current_user
Auth.EmailVerification . issue_code ( user )
2020-12-15 12:30:45 +03:00
2020-10-16 15:48:22 +03:00
conn
2020-12-15 12:30:45 +03:00
|> put_flash ( :success , " Activation code was sent to #{ user . email } " )
2021-10-26 11:59:14 +03:00
|> redirect ( to : Routes . auth_path ( conn , :activate_form ) )
2020-10-16 15:48:22 +03:00
end
2019-09-02 14:29:19 +03:00
def password_reset_request_form ( conn , _ ) do
2020-06-08 10:35:13 +03:00
render ( conn , " password_reset_request_form.html " ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
2019-09-02 14:29:19 +03:00
end
def password_reset_request ( conn , %{ " email " = > " " } ) do
2020-06-08 10:35:13 +03:00
render ( conn , " password_reset_request_form.html " ,
error : " Please enter an email address " ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
2019-09-02 14:29:19 +03:00
end
2020-08-28 15:00:16 +03:00
def password_reset_request ( conn , %{ " email " = > email } = params ) do
if PlausibleWeb.Captcha . verify ( params [ " h-captcha-response " ] ) do
user = Repo . get_by ( Plausible.Auth.User , email : email )
2019-09-02 14:29:19 +03:00
2020-08-28 15:00:16 +03:00
if user do
token = Auth.Token . sign_password_reset ( email )
2020-10-05 15:01:54 +03:00
url = PlausibleWeb.Endpoint . url ( ) <> " /password/reset?token= #{ token } "
2020-08-28 15:00:16 +03:00
email_template = PlausibleWeb.Email . password_reset_email ( email , url )
2022-01-20 18:51:42 +03:00
Plausible.Mailer . deliver_later ( email_template )
2020-06-08 10:35:13 +03:00
2022-10-04 10:46:01 +03:00
Logger . debug (
" Password reset e-mail sent. In dev environment GET /sent-emails for details. "
)
2020-08-28 15:00:16 +03:00
render ( conn , " password_reset_request_success.html " ,
email : email ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
else
render ( conn , " password_reset_request_success.html " ,
email : email ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
end
2019-09-02 14:29:19 +03:00
else
2020-08-28 15:00:16 +03:00
render ( conn , " password_reset_request_form.html " ,
error : " Please complete the captcha to reset your password " ,
2020-06-08 10:35:13 +03:00
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
2019-09-02 14:29:19 +03:00
end
end
2023-10-02 16:11:59 +03:00
def password_reset_form ( conn , params ) do
case Auth.Token . verify_password_reset ( params [ " token " ] ) do
{ :ok , %{ email : email } } ->
2020-06-08 10:35:13 +03:00
render ( conn , " password_reset_form.html " ,
2023-09-25 11:27:29 +03:00
connect_live_socket : true ,
2023-10-02 16:11:59 +03:00
email : email ,
2020-06-08 10:35:13 +03:00
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
2019-09-02 14:29:19 +03:00
{ :error , :expired } ->
2020-06-08 10:35:13 +03:00
render_error (
conn ,
401 ,
" Your token has expired. Please request another password reset link. "
)
2019-09-02 14:29:19 +03:00
{ :error , _ } ->
2020-06-08 10:35:13 +03:00
render_error (
conn ,
401 ,
" Your token is invalid. Please request another password reset link. "
)
2019-09-02 14:29:19 +03:00
end
end
2023-09-25 11:27:29 +03:00
def password_reset ( conn , _params ) do
conn
|> put_flash ( :login_title , " Password updated successfully " )
|> put_flash ( :login_instructions , " Please log in with your new credentials " )
|> put_session ( :current_user_id , nil )
|> delete_resp_cookie ( " logged_in " )
|> redirect ( to : Routes . auth_path ( conn , :login_form ) )
end
2020-06-08 10:35:13 +03:00
2023-09-25 11:27:29 +03:00
def login ( conn , %{ " email " = > email , " password " = > password } ) do
with { :ok , user } <- login_user ( conn , email , password ) do
2023-12-06 14:01:19 +03:00
if Auth.TOTP . enabled? ( user ) and not TwoFactor.Session . remember_2fa? ( conn , user ) do
conn
|> TwoFactor.Session . set_2fa_user ( user )
|> redirect ( to : Routes . auth_path ( conn , :verify_2fa ) )
else
set_user_session_and_redirect ( conn , user )
end
2019-09-02 14:29:19 +03:00
end
end
2023-09-25 11:27:29 +03:00
defp login_user ( conn , email , password ) do
2021-05-25 11:32:54 +03:00
with :ok <- check_ip_rate_limit ( conn ) ,
{ :ok , user } <- find_user ( email ) ,
:ok <- check_user_rate_limit ( user ) ,
:ok <- check_password ( user , password ) do
2023-09-25 11:27:29 +03:00
{ :ok , user }
2021-05-25 11:32:54 +03:00
else
:wrong_password ->
2023-05-25 10:37:10 +03:00
maybe_log_failed_login_attempts ( " wrong password for #{ email } " )
2021-05-25 11:32:54 +03:00
render ( conn , " login_form.html " ,
error : " Wrong email or password. Please try again. " ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
2019-09-02 14:29:19 +03:00
2021-05-25 11:32:54 +03:00
:user_not_found ->
2023-05-25 10:37:10 +03:00
maybe_log_failed_login_attempts ( " user not found for #{ email } " )
2021-05-25 11:32:54 +03:00
Plausible.Auth.Password . dummy_calculation ( )
2019-09-02 14:29:19 +03:00
2021-05-25 11:32:54 +03:00
render ( conn , " login_form.html " ,
2020-06-08 10:35:13 +03:00
error : " Wrong email or password. Please try again. " ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
2021-05-25 11:43:03 +03:00
{ :rate_limit , _ } ->
2023-05-25 10:37:10 +03:00
maybe_log_failed_login_attempts ( " too many logging attempts for #{ email } " )
2021-05-25 11:32:54 +03:00
render_error (
conn ,
429 ,
" Too many login attempts. Wait a minute before trying again. "
)
end
end
2023-12-06 14:01:19 +03:00
defp redirect_to_login ( conn ) do
redirect ( conn , to : Routes . auth_path ( conn , :login_form ) )
end
defp set_user_session_and_redirect ( conn , user ) do
login_dest = get_session ( conn , :login_dest ) || Routes . site_path ( conn , :index )
conn
|> set_user_session ( user )
|> put_session ( :login_dest , nil )
|> redirect ( external : login_dest )
end
2023-09-25 11:27:29 +03:00
defp set_user_session ( conn , user ) do
conn
2023-12-06 14:01:19 +03:00
|> TwoFactor.Session . clear_2fa_user ( )
2023-09-25 11:27:29 +03:00
|> put_session ( :current_user_id , user . id )
|> put_resp_cookie ( " logged_in " , " true " ,
http_only : false ,
max_age : 60 * 60 * 24 * 365 * 5000
)
end
2023-05-25 10:37:10 +03:00
defp maybe_log_failed_login_attempts ( message ) do
if Application . get_env ( :plausible , :log_failed_login_attempts ) do
Logger . warning ( " [login] #{ message } " )
end
end
2021-05-25 11:32:54 +03:00
@login_interval 60_000
@login_limit 5
2023-12-06 14:01:19 +03:00
2021-05-25 11:32:54 +03:00
defp check_ip_rate_limit ( conn ) do
ip_address = PlausibleWeb.RemoteIp . get ( conn )
2023-12-06 17:07:37 +03:00
case RateLimit . check_rate ( " login:ip: #{ ip_address } " , @login_interval , @login_limit ) do
2021-05-25 11:32:54 +03:00
{ :allow , _ } -> :ok
{ :deny , _ } -> { :rate_limit , :ip_address }
end
end
2023-12-06 14:01:19 +03:00
defp check_user_rate_limit ( user ) do
2023-12-06 17:07:37 +03:00
case RateLimit . check_rate ( " login:user: #{ user . id } " , @login_interval , @login_limit ) do
2023-12-06 14:01:19 +03:00
{ :allow , _ } -> :ok
{ :deny , _ } -> { :rate_limit , :user }
end
end
2021-05-25 11:32:54 +03:00
defp find_user ( email ) do
user =
Repo . one (
2023-09-25 11:27:29 +03:00
from ( u in Plausible.Auth.User ,
2021-05-25 11:32:54 +03:00
where : u . email == ^ email
2023-09-25 11:27:29 +03:00
)
2020-06-08 10:35:13 +03:00
)
2021-05-25 11:32:54 +03:00
if user , do : { :ok , user } , else : :user_not_found
end
defp check_password ( user , password ) do
if Plausible.Auth.Password . match? ( password , user . password_hash || " " ) do
:ok
else
:wrong_password
2019-09-02 14:29:19 +03:00
end
end
def login_form ( conn , _params ) do
render ( conn , " login_form.html " , layout : { PlausibleWeb.LayoutView , " focus.html " } )
end
def user_settings ( conn , _params ) do
2023-12-06 14:01:19 +03:00
user = conn . assigns . current_user
settings_changeset = Auth.User . settings_changeset ( user )
email_changeset = Auth.User . settings_changeset ( user )
2023-10-11 11:25:00 +03:00
2023-10-24 11:33:48 +03:00
render_settings ( conn ,
settings_changeset : settings_changeset ,
email_changeset : email_changeset
)
2019-09-02 14:29:19 +03:00
end
2023-12-06 14:01:19 +03:00
def initiate_2fa_setup ( conn , _params ) do
case Auth.TOTP . initiate ( conn . assigns . current_user ) do
{ :ok , user , %{ totp_uri : totp_uri , secret : secret } } ->
render ( conn , " initiate_2fa_setup.html " , user : user , totp_uri : totp_uri , secret : secret )
{ :error , :already_setup } ->
conn
|> put_flash ( :error , " Two-Factor Authentication is already setup for this account. " )
|> redirect ( to : Routes . auth_path ( conn , :user_settings ) <> " # setup-2fa " )
end
end
def verify_2fa_setup_form ( conn , _params ) do
if Auth.TOTP . initiated? ( conn . assigns . current_user ) do
render ( conn , " verify_2fa_setup.html " )
else
redirect ( conn , to : Routes . auth_path ( conn , :user_settings ) <> " # setup-2fa " )
end
end
def verify_2fa_setup ( conn , %{ " code " = > code } ) do
case Auth.TOTP . enable ( conn . assigns . current_user , code ) do
{ :ok , _ , %{ recovery_codes : codes } } ->
conn
|> put_flash ( :success , " Two-Factor Authentication is fully enabled " )
|> render ( " generate_2fa_recovery_codes.html " , recovery_codes : codes , from_setup : true )
{ :error , :invalid_code } ->
conn
|> put_flash ( :error , " The provided code is invalid. Please try again " )
|> render ( " verify_2fa_setup.html " )
{ :error , :not_initiated } ->
conn
|> put_flash ( :error , " Please enable Two-Factor Authentication for this account first. " )
|> redirect ( to : Routes . auth_path ( conn , :user_settings ) <> " # setup-2fa " )
end
end
def disable_2fa ( conn , %{ " password " = > password } ) do
case Auth.TOTP . disable ( conn . assigns . current_user , password ) do
{ :ok , _ } ->
conn
|> TwoFactor.Session . clear_remember_2fa ( )
|> put_flash ( :success , " Two-Factor Authentication is disabled " )
|> redirect ( to : Routes . auth_path ( conn , :user_settings ) <> " # setup-2fa " )
{ :error , :invalid_password } ->
conn
|> put_flash ( :error , " Incorrect password provided " )
|> redirect ( to : Routes . auth_path ( conn , :user_settings ) <> " # setup-2fa " )
end
end
def generate_2fa_recovery_codes ( conn , %{ " password " = > password } ) do
case Auth.TOTP . generate_recovery_codes ( conn . assigns . current_user , password ) do
{ :ok , codes } ->
conn
|> put_flash ( :success , " New Recovery Codes generated " )
|> render ( " generate_2fa_recovery_codes.html " , recovery_codes : codes , from_setup : false )
{ :error , :invalid_password } ->
conn
|> put_flash ( :error , " Incorrect password provided " )
|> redirect ( to : Routes . auth_path ( conn , :user_settings ) <> " # setup-2fa " )
{ :error , :not_enabled } ->
conn
|> put_flash ( :error , " Please enable Two-Factor Authentication for this account first. " )
|> redirect ( to : Routes . auth_path ( conn , :user_settings ) <> " # setup-2fa " )
end
end
def verify_2fa_form ( conn , _ ) do
case TwoFactor.Session . get_2fa_user ( conn ) do
{ :ok , user } ->
if Auth.TOTP . enabled? ( user ) do
render ( conn , " verify_2fa.html " ,
remember_2fa_days : TwoFactor.Session . remember_2fa_days ( ) ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
else
redirect_to_login ( conn )
end
{ :error , :not_found } ->
redirect_to_login ( conn )
end
end
def verify_2fa ( conn , %{ " code " = > code } = params ) do
with { :ok , user } <- get_2fa_user_limited ( conn ) do
case Auth.TOTP . validate_code ( user , code ) do
{ :ok , user } ->
conn
|> TwoFactor.Session . maybe_set_remember_2fa ( user , params [ " remember_2fa " ] )
|> set_user_session_and_redirect ( user )
{ :error , :invalid_code } ->
maybe_log_failed_login_attempts (
" wrong 2FA verification code provided for #{ user . email } "
)
conn
|> put_flash ( :error , " The provided code is invalid. Please try again " )
|> render ( " verify_2fa.html " ,
remember_2fa_days : TwoFactor.Session . remember_2fa_days ( ) ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
{ :error , :not_enabled } ->
set_user_session_and_redirect ( conn , user )
end
end
end
def verify_2fa_recovery_code_form ( conn , _params ) do
case TwoFactor.Session . get_2fa_user ( conn ) do
{ :ok , user } ->
if Auth.TOTP . enabled? ( user ) do
render ( conn , " verify_2fa_recovery_code.html " ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
else
redirect_to_login ( conn )
end
{ :error , :not_found } ->
redirect_to_login ( conn )
end
end
def verify_2fa_recovery_code ( conn , %{ " recovery_code " = > recovery_code } ) do
with { :ok , user } <- get_2fa_user_limited ( conn ) do
case Auth.TOTP . use_recovery_code ( user , recovery_code ) do
:ok ->
set_user_session_and_redirect ( conn , user )
{ :error , :invalid_code } ->
maybe_log_failed_login_attempts ( " wrong 2FA recovery code provided for #{ user . email } " )
conn
|> put_flash ( :error , " The provided recovery code is invalid. Please try another one " )
|> render ( " verify_2fa_recovery_code.html " ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
{ :error , :not_enabled } ->
set_user_session_and_redirect ( conn , user )
end
end
end
defp get_2fa_user_limited ( conn ) do
case TwoFactor.Session . get_2fa_user ( conn ) do
{ :ok , user } ->
with :ok <- check_ip_rate_limit ( conn ) ,
:ok <- check_user_rate_limit ( user ) do
{ :ok , user }
else
{ :rate_limit , _ } ->
maybe_log_failed_login_attempts ( " too many logging attempts for #{ user . email } " )
conn
|> TwoFactor.Session . clear_2fa_user ( )
|> render_error (
429 ,
" Too many login attempts. Wait a minute before trying again. "
)
end
{ :error , :not_found } ->
conn
|> redirect ( to : Routes . auth_path ( conn , :login_form ) )
end
end
2019-09-02 14:29:19 +03:00
def save_settings ( conn , %{ " user " = > user_params } ) do
2023-12-06 14:01:19 +03:00
user = conn . assigns . current_user
changes = Auth.User . settings_changeset ( user , user_params )
2020-06-08 10:35:13 +03:00
2019-09-02 14:29:19 +03:00
case Repo . update ( changes ) do
{ :ok , _user } ->
conn
2021-03-15 12:40:53 +03:00
|> put_flash ( :success , " Account settings saved successfully " )
2021-10-26 11:59:14 +03:00
|> redirect ( to : Routes . auth_path ( conn , :user_settings ) )
2020-06-08 10:35:13 +03:00
2019-09-02 14:29:19 +03:00
{ :error , changeset } ->
2023-12-06 14:01:19 +03:00
email_changeset = Auth.User . settings_changeset ( user )
2023-10-11 11:25:00 +03:00
2023-12-06 14:01:19 +03:00
render_settings ( conn ,
settings_changeset : changeset ,
email_changeset : email_changeset
)
2019-09-02 14:29:19 +03:00
end
end
2023-10-11 11:25:00 +03:00
def update_email ( conn , %{ " user " = > user_params } ) do
2023-12-06 14:01:19 +03:00
user = conn . assigns . current_user
changes = Auth.User . email_changeset ( user , user_params )
2023-10-11 11:25:00 +03:00
case Repo . update ( changes ) do
{ :ok , user } ->
2023-10-11 16:12:57 +03:00
if user . email_verified do
handle_email_updated ( conn )
else
2023-10-16 14:21:18 +03:00
Auth.EmailVerification . issue_code ( user )
2023-10-11 16:12:57 +03:00
redirect ( conn , to : Routes . auth_path ( conn , :activate_form ) )
end
2023-10-11 11:25:00 +03:00
{ :error , changeset } ->
2023-12-06 14:01:19 +03:00
settings_changeset = Auth.User . settings_changeset ( user )
2023-10-11 11:25:00 +03:00
2023-12-06 14:01:19 +03:00
render_settings ( conn ,
settings_changeset : settings_changeset ,
email_changeset : changeset
)
2023-10-11 11:25:00 +03:00
end
end
def cancel_update_email ( conn , _params ) do
changeset = Auth.User . cancel_email_changeset ( conn . assigns . current_user )
case Repo . update ( changeset ) do
{ :ok , user } ->
conn
|> put_flash ( :success , " Email changed back to #{ user . email } " )
|> redirect ( to : Routes . auth_path ( conn , :user_settings ) <> " # change-email-address " )
{ :error , _ } ->
conn
|> put_flash (
:error ,
" Could not cancel email update because previous email has already been taken "
)
|> redirect ( to : Routes . auth_path ( conn , :activate_form ) )
end
end
2023-10-11 16:12:57 +03:00
defp handle_email_updated ( conn ) do
conn
|> put_flash ( :success , " Email updated successfully " )
|> redirect ( to : Routes . auth_path ( conn , :user_settings ) <> " # change-email-address " )
end
2023-10-11 11:25:00 +03:00
defp render_settings ( conn , opts ) do
settings_changeset = Keyword . fetch! ( opts , :settings_changeset )
email_changeset = Keyword . fetch! ( opts , :email_changeset )
2023-08-24 20:22:49 +03:00
user = Plausible.Users . with_subscription ( conn . assigns [ :current_user ] )
2022-09-28 14:56:07 +03:00
render ( conn , " user_settings.html " ,
user : user |> Repo . preload ( :api_keys ) ,
2023-10-11 11:25:00 +03:00
settings_changeset : settings_changeset ,
email_changeset : email_changeset ,
2022-09-28 14:56:07 +03:00
subscription : user . subscription ,
invoices : Plausible.Billing . paddle_api ( ) . get_invoices ( user . subscription ) ,
theme : user . theme || " system " ,
2023-11-30 15:30:04 +03:00
team_member_limit : Quota . team_member_limit ( user ) ,
team_member_usage : Quota . team_member_usage ( user ) ,
site_limit : Quota . site_limit ( user ) ,
site_usage : Quota . site_usage ( user ) ,
pageview_limit : Quota . monthly_pageview_limit ( user . subscription ) ,
2023-12-06 14:01:19 +03:00
pageview_usage : Quota . monthly_pageview_usage ( user ) ,
totp_enabled? : Auth.TOTP . enabled? ( user )
2022-09-28 14:56:07 +03:00
)
end
2021-02-05 12:23:30 +03:00
def new_api_key ( conn , _params ) do
2023-10-11 23:24:16 +03:00
changeset = Auth.ApiKey . changeset ( % Auth.ApiKey { } )
2021-02-05 12:23:30 +03:00
render ( conn , " new_api_key.html " ,
changeset : changeset ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
end
2023-10-11 23:24:16 +03:00
def create_api_key ( conn , %{ " api_key " = > %{ " name " = > name , " key " = > key } } ) do
case Auth . create_api_key ( conn . assigns . current_user , name , key ) do
2021-02-05 12:23:30 +03:00
{ :ok , _api_key } ->
conn
|> put_flash ( :success , " API key created successfully " )
|> redirect ( to : " /settings # api-keys " )
{ :error , changeset } ->
render ( conn , " new_api_key.html " ,
changeset : changeset ,
layout : { PlausibleWeb.LayoutView , " focus.html " }
)
end
end
def delete_api_key ( conn , %{ " id " = > id } ) do
2023-10-11 23:24:16 +03:00
case Auth . delete_api_key ( conn . assigns . current_user , id ) do
:ok ->
conn
|> put_flash ( :success , " API key revoked successfully " )
|> redirect ( to : " /settings # api-keys " )
2021-02-05 12:23:30 +03:00
2023-10-11 23:24:16 +03:00
{ :error , :not_found } ->
conn
|> put_flash ( :error , " Could not find API Key to delete " )
|> redirect ( to : " /settings # api-keys " )
end
2021-02-05 12:23:30 +03:00
end
2020-03-31 15:47:34 +03:00
def delete_me ( conn , params ) do
2023-05-24 14:23:23 +03:00
Plausible.Auth . delete_user ( conn . assigns [ :current_user ] )
2019-09-02 14:29:19 +03:00
2020-03-31 15:47:34 +03:00
logout ( conn , params )
2019-09-02 14:29:19 +03:00
end
2021-04-01 10:43:32 +03:00
def logout ( conn , params ) do
redirect_to = Map . get ( params , " redirect " , " / " )
2019-09-02 14:29:19 +03:00
conn
|> configure_session ( drop : true )
2020-03-31 15:47:34 +03:00
|> delete_resp_cookie ( " logged_in " )
2021-04-01 10:43:32 +03:00
|> redirect ( to : redirect_to )
2019-09-02 14:29:19 +03:00
end
2022-12-08 05:32:14 +03:00
def google_auth_callback ( conn , %{ " error " = > error , " state " = > state } = params ) do
[ site_id , _redirect_to ] = Jason . decode! ( state )
site = Repo . get ( Plausible.Site , site_id )
case error do
" access_denied " ->
conn
|> put_flash (
:error ,
" We were unable to authenticate your Google Analytics account. Please check that you have granted us permission to 'See and download your Google Analytics data' and try again. "
)
2023-12-04 15:22:17 +03:00
|> redirect ( external : Routes . site_path ( conn , :settings_general , site . domain ) )
2022-12-08 05:32:14 +03:00
message when message in [ " server_error " , " temporarily_unavailable " ] ->
conn
|> put_flash (
:error ,
" We are unable to authenticate your Google Analytics account because Google's authentication service is temporarily unavailable. Please try again in a few moments. "
)
2023-12-04 15:22:17 +03:00
|> redirect ( external : Routes . site_path ( conn , :settings_general , site . domain ) )
2022-12-08 05:32:14 +03:00
_any ->
Sentry . capture_message ( " Google OAuth callback failed. Reason: #{ inspect ( params ) } " )
conn
|> put_flash (
:error ,
" We were unable to authenticate your Google Analytics account. If the problem persists, please contact support for assistance. "
)
2023-12-04 15:22:17 +03:00
|> redirect ( external : Routes . site_path ( conn , :settings_general , site . domain ) )
2022-12-08 05:32:14 +03:00
end
end
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
def google_auth_callback ( conn , %{ " code " = > code , " state " = > state } ) do
2022-08-03 12:25:50 +03:00
res = Plausible.Google.HTTP . fetch_access_token ( code )
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
[ site_id , redirect_to ] = Jason . decode! ( state )
2022-03-22 17:09:45 +03:00
site = Repo . get ( Plausible.Site , site_id )
2022-09-26 12:29:56 +03:00
expires_at = NaiveDateTime . add ( NaiveDateTime . utc_now ( ) , res [ " expires_in " ] )
2022-03-22 17:09:45 +03:00
case redirect_to do
" import " ->
redirect ( conn ,
2023-12-04 15:22:17 +03:00
external :
2022-03-24 12:49:45 +03:00
Routes . site_path ( conn , :import_from_google_view_id_form , site . domain ,
2022-09-26 12:29:56 +03:00
access_token : res [ " access_token " ] ,
refresh_token : res [ " refresh_token " ] ,
expires_at : NaiveDateTime . to_iso8601 ( expires_at )
2022-03-22 17:09:45 +03:00
)
)
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
2022-03-22 17:09:45 +03:00
_ ->
id_token = res [ " id_token " ]
[ _ , body , _ ] = String . split ( id_token , " . " )
id = body |> Base . decode64! ( padding : false ) |> Jason . decode! ( )
2019-09-02 14:29:19 +03:00
2022-03-22 17:09:45 +03:00
Plausible.Site.GoogleAuth . changeset ( % Plausible.Site.GoogleAuth { } , %{
email : id [ " email " ] ,
refresh_token : res [ " refresh_token " ] ,
access_token : res [ " access_token " ] ,
2022-09-26 22:21:19 +03:00
expires : expires_at ,
2022-03-22 17:09:45 +03:00
user_id : conn . assigns [ :current_user ] . id ,
site_id : site_id
} )
|> Repo . insert! ( )
2019-09-02 14:29:19 +03:00
2022-03-22 17:09:45 +03:00
site = Repo . get ( Plausible.Site , site_id )
2023-12-04 15:22:17 +03:00
redirect ( conn , external : " / #{ URI . encode_www_form ( site . domain ) } /settings/integrations " )
2022-03-22 17:09:45 +03:00
end
2019-09-02 14:29:19 +03:00
end
end