2019-09-02 14:29:19 +03:00
|
|
|
defmodule PlausibleWeb.Email do
|
2024-04-11 10:15:01 +03:00
|
|
|
use Plausible
|
2019-09-02 14:29:19 +03:00
|
|
|
use Bamboo.Phoenix, view: PlausibleWeb.EmailView
|
|
|
|
import Bamboo.PostmarkHelper
|
|
|
|
|
2020-05-26 16:09:34 +03:00
|
|
|
def mailer_email_from do
|
|
|
|
Application.get_env(:plausible, :mailer_email)
|
|
|
|
end
|
|
|
|
|
2020-12-15 12:30:45 +03:00
|
|
|
def activation_email(user, code) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2020-12-15 12:30:45 +03:00
|
|
|
|> to(user)
|
2020-03-23 12:34:25 +03:00
|
|
|
|> tag("activation-email")
|
2020-12-15 12:30:45 +03:00
|
|
|
|> subject("#{code} is your Plausible email verification code")
|
|
|
|
|> render("activation_email.html", user: user, code: code)
|
2020-03-23 12:34:25 +03:00
|
|
|
end
|
2020-12-29 16:17:27 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
def welcome_email(user) do
|
2020-05-11 14:27:20 +03:00
|
|
|
base_email()
|
2019-09-02 14:29:19 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("welcome-email")
|
2020-03-23 12:34:25 +03:00
|
|
|
|> subject("Welcome to Plausible")
|
2023-08-21 19:21:20 +03:00
|
|
|
|> render("welcome_email.html", user: user)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2020-03-23 12:34:25 +03:00
|
|
|
def create_site_email(user) do
|
2020-05-11 14:27:20 +03:00
|
|
|
base_email()
|
2020-03-23 12:34:25 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("create-site-email")
|
|
|
|
|> subject("Your Plausible setup: Add your website details")
|
2023-08-21 19:21:20 +03:00
|
|
|
|> render("create_site_email.html", user: user)
|
2020-03-23 12:34:25 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def site_setup_help(user, site) do
|
2020-05-11 14:27:20 +03:00
|
|
|
base_email()
|
2019-09-02 14:29:19 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("help-email")
|
2020-03-23 12:34:25 +03:00
|
|
|
|> subject("Your Plausible setup: Waiting for the first page views")
|
2022-11-09 10:50:38 +03:00
|
|
|
|> render("site_setup_help_email.html",
|
|
|
|
user: user,
|
2023-08-21 19:21:20 +03:00
|
|
|
site: site
|
2022-11-09 10:50:38 +03:00
|
|
|
)
|
2020-03-23 12:34:25 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def site_setup_success(user, site) do
|
2020-05-11 14:27:20 +03:00
|
|
|
base_email()
|
2020-03-23 12:34:25 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("setup-success-email")
|
|
|
|
|> subject("Plausible is now tracking your website stats")
|
2022-11-09 10:50:38 +03:00
|
|
|
|> render("site_setup_success_email.html",
|
|
|
|
user: user,
|
2023-08-21 19:21:20 +03:00
|
|
|
site: site
|
2022-11-09 10:50:38 +03:00
|
|
|
)
|
2020-03-23 12:34:25 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def check_stats_email(user) do
|
2020-05-11 14:27:20 +03:00
|
|
|
base_email()
|
2020-03-23 12:34:25 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("check-stats-email")
|
|
|
|
|> subject("Check your Plausible website stats")
|
2023-08-21 19:21:20 +03:00
|
|
|
|> render("check_stats_email.html", user: user)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def password_reset_email(email, reset_link) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email(%{layout: nil})
|
2019-09-02 14:29:19 +03:00
|
|
|
|> to(email)
|
|
|
|
|> tag("password-reset-email")
|
|
|
|
|> subject("Plausible password reset")
|
|
|
|
|> render("password_reset_email.html", reset_link: reset_link)
|
|
|
|
end
|
|
|
|
|
2023-11-28 17:29:55 +03:00
|
|
|
def two_factor_enabled_email(user) do
|
|
|
|
priority_email()
|
|
|
|
|> to(user)
|
|
|
|
|> tag("two-factor-enabled-email")
|
2023-12-06 14:01:19 +03:00
|
|
|
|> subject("Plausible Two-Factor Authentication enabled")
|
2023-11-28 17:29:55 +03:00
|
|
|
|> render("two_factor_enabled_email.html", user: user)
|
|
|
|
end
|
|
|
|
|
|
|
|
def two_factor_disabled_email(user) do
|
|
|
|
priority_email()
|
|
|
|
|> to(user)
|
|
|
|
|> tag("two-factor-disabled-email")
|
2023-12-06 14:01:19 +03:00
|
|
|
|> subject("Plausible Two-Factor Authentication disabled")
|
2023-11-28 17:29:55 +03:00
|
|
|
|> render("two_factor_disabled_email.html", user: user)
|
|
|
|
end
|
|
|
|
|
2020-03-23 12:34:25 +03:00
|
|
|
def trial_one_week_reminder(user) do
|
2020-05-11 14:27:20 +03:00
|
|
|
base_email()
|
2019-09-02 14:29:19 +03:00
|
|
|
|> to(user)
|
2020-03-23 12:34:25 +03:00
|
|
|
|> tag("trial-one-week-reminder")
|
2020-03-24 16:29:44 +03:00
|
|
|
|> subject("Your Plausible trial expires next week")
|
2023-08-21 19:21:20 +03:00
|
|
|
|> render("trial_one_week_reminder.html", user: user)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2023-11-30 15:30:04 +03:00
|
|
|
def trial_upgrade_email(user, day, usage) do
|
|
|
|
suggested_plan = Plausible.Billing.Plans.suggest(user, usage.total)
|
2021-05-06 11:46:22 +03:00
|
|
|
|
2020-05-11 14:27:20 +03:00
|
|
|
base_email()
|
2019-09-02 14:29:19 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("trial-upgrade-email")
|
|
|
|
|> subject("Your Plausible trial ends #{day}")
|
2021-01-15 17:28:57 +03:00
|
|
|
|> render("trial_upgrade_email.html",
|
|
|
|
user: user,
|
|
|
|
day: day,
|
2023-11-30 15:30:04 +03:00
|
|
|
custom_events: usage.custom_events,
|
|
|
|
usage: usage.total,
|
2023-08-21 19:21:20 +03:00
|
|
|
suggested_plan: suggested_plan
|
2021-01-15 17:28:57 +03:00
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def trial_over_email(user) do
|
2020-05-11 14:27:20 +03:00
|
|
|
base_email()
|
2019-09-02 14:29:19 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("trial-over-email")
|
|
|
|
|> subject("Your Plausible trial has ended")
|
2023-12-28 10:42:27 +03:00
|
|
|
|> render("trial_over_email.html",
|
|
|
|
user: user,
|
|
|
|
extra_offset: Plausible.Auth.User.trial_accept_traffic_until_offset_days()
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2023-12-21 15:56:06 +03:00
|
|
|
def stats_report(email, assigns) do
|
2022-11-09 12:12:53 +03:00
|
|
|
base_email(%{layout: nil})
|
2019-09-05 19:11:07 +03:00
|
|
|
|> to(email)
|
2023-12-21 15:56:06 +03:00
|
|
|
|> tag("#{assigns.type}-report")
|
|
|
|
|> subject("#{assigns.name} report for #{assigns.site.domain}")
|
|
|
|
|> html_body(PlausibleWeb.MJML.StatsReport.render(assigns))
|
2019-09-05 19:11:07 +03:00
|
|
|
end
|
2020-05-11 14:27:20 +03:00
|
|
|
|
2020-12-11 12:50:44 +03:00
|
|
|
def spike_notification(email, site, current_visitors, sources, dashboard_link) do
|
2020-12-09 12:00:14 +03:00
|
|
|
base_email()
|
|
|
|
|> to(email)
|
|
|
|
|> tag("spike-notification")
|
2023-09-05 15:43:01 +03:00
|
|
|
|> subject("Traffic Spike on #{site.domain}")
|
2020-12-29 16:17:27 +03:00
|
|
|
|> render("spike_notification.html", %{
|
|
|
|
site: site,
|
|
|
|
current_visitors: current_visitors,
|
|
|
|
sources: sources,
|
|
|
|
link: dashboard_link
|
|
|
|
})
|
2020-12-09 12:00:14 +03:00
|
|
|
end
|
|
|
|
|
2023-12-06 15:02:22 +03:00
|
|
|
def over_limit_email(user, usage, suggested_plan) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-03-01 11:11:49 +03:00
|
|
|
|> to(user)
|
2021-02-12 11:17:53 +03:00
|
|
|
|> tag("over-limit")
|
2021-11-04 11:13:08 +03:00
|
|
|
|> subject("[Action required] You have outgrown your Plausible subscription tier")
|
2021-02-12 11:17:53 +03:00
|
|
|
|> render("over_limit.html", %{
|
|
|
|
user: user,
|
2021-02-18 16:46:58 +03:00
|
|
|
usage: usage,
|
2023-08-21 19:21:20 +03:00
|
|
|
suggested_plan: suggested_plan
|
2021-02-12 11:17:53 +03:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2023-12-06 15:02:22 +03:00
|
|
|
def enterprise_over_limit_internal_email(user, pageview_usage, site_usage, site_allowance) do
|
2022-11-10 11:32:33 +03:00
|
|
|
base_email(%{layout: nil})
|
2021-10-20 17:49:11 +03:00
|
|
|
|> to("enterprise@plausible.io")
|
|
|
|
|> tag("enterprise-over-limit")
|
|
|
|
|> subject("#{user.email} has outgrown their enterprise plan")
|
2022-09-20 11:46:28 +03:00
|
|
|
|> render("enterprise_over_limit_internal.html", %{
|
2021-10-20 17:49:11 +03:00
|
|
|
user: user,
|
2023-12-06 15:02:22 +03:00
|
|
|
pageview_usage: pageview_usage,
|
2021-10-22 12:26:07 +03:00
|
|
|
site_usage: site_usage,
|
|
|
|
site_allowance: site_allowance
|
2021-10-20 17:49:11 +03:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2023-12-06 15:02:22 +03:00
|
|
|
def dashboard_locked(user, usage, suggested_plan) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-10-29 12:51:02 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("dashboard-locked")
|
|
|
|
|> subject("[Action required] Your Plausible dashboard is now locked")
|
|
|
|
|> render("dashboard_locked.html", %{
|
|
|
|
user: user,
|
|
|
|
usage: usage,
|
|
|
|
suggested_plan: suggested_plan
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2021-04-21 15:57:38 +03:00
|
|
|
def yearly_renewal_notification(user) do
|
|
|
|
date = Timex.format!(user.subscription.next_bill_date, "{Mfull} {D}, {YYYY}")
|
|
|
|
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-04-21 15:57:38 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("yearly-renewal")
|
|
|
|
|> subject("Your Plausible subscription is up for renewal")
|
|
|
|
|> render("yearly_renewal_notification.html", %{
|
|
|
|
user: user,
|
|
|
|
date: date,
|
2021-05-13 12:42:01 +03:00
|
|
|
next_bill_amount: user.subscription.next_bill_amount,
|
|
|
|
currency: user.subscription.currency_code
|
2021-04-21 15:57:38 +03:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
|
|
|
def yearly_expiration_notification(user) do
|
2023-12-28 10:42:27 +03:00
|
|
|
next_bill_date = Timex.format!(user.subscription.next_bill_date, "{Mfull} {D}, {YYYY}")
|
|
|
|
|
|
|
|
accept_traffic_until =
|
|
|
|
user
|
|
|
|
|> Plausible.Users.accept_traffic_until()
|
|
|
|
|> Timex.format!("{Mfull} {D}, {YYYY}")
|
2021-04-21 15:57:38 +03:00
|
|
|
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-04-21 15:57:38 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("yearly-expiration")
|
|
|
|
|> subject("Your Plausible subscription is about to expire")
|
|
|
|
|> render("yearly_expiration_notification.html", %{
|
|
|
|
user: user,
|
2023-12-28 10:42:27 +03:00
|
|
|
next_bill_date: next_bill_date,
|
|
|
|
accept_traffic_until: accept_traffic_until
|
2021-04-21 15:57:38 +03:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2020-09-18 15:35:21 +03:00
|
|
|
def cancellation_email(user) do
|
|
|
|
base_email()
|
|
|
|
|> to(user.email)
|
|
|
|
|> tag("cancelled-email")
|
2023-10-13 20:14:42 +03:00
|
|
|
|> subject("Mind sharing your thoughts on Plausible?")
|
2022-11-08 20:58:53 +03:00
|
|
|
|> render("cancellation_email.html", user: user)
|
2020-09-18 15:35:21 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def new_user_invitation(invitation) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-06-16 15:00:07 +03:00
|
|
|
|> to(invitation.email)
|
|
|
|
|> tag("new-user-invitation")
|
2024-05-23 10:43:01 +03:00
|
|
|
|> subject("[#{Plausible.product_name()}] You've been invited to #{invitation.site.domain}")
|
2021-06-16 15:00:07 +03:00
|
|
|
|> render("new_user_invitation.html",
|
|
|
|
invitation: invitation
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def existing_user_invitation(invitation) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-06-16 15:00:07 +03:00
|
|
|
|> to(invitation.email)
|
|
|
|
|> tag("existing-user-invitation")
|
2024-05-23 10:43:01 +03:00
|
|
|
|> subject("[#{Plausible.product_name()}] You've been invited to #{invitation.site.domain}")
|
2021-06-16 15:00:07 +03:00
|
|
|
|> render("existing_user_invitation.html",
|
|
|
|
invitation: invitation
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def ownership_transfer_request(invitation, new_owner_account) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-06-16 15:00:07 +03:00
|
|
|
|> to(invitation.email)
|
|
|
|
|> tag("ownership-transfer-request")
|
2024-05-23 10:43:01 +03:00
|
|
|
|> subject(
|
|
|
|
"[#{Plausible.product_name()}] Request to transfer ownership of #{invitation.site.domain}"
|
|
|
|
)
|
2021-06-16 15:00:07 +03:00
|
|
|
|> render("ownership_transfer_request.html",
|
|
|
|
invitation: invitation,
|
|
|
|
new_owner_account: new_owner_account
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def invitation_accepted(invitation) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-06-16 15:00:07 +03:00
|
|
|
|> to(invitation.inviter.email)
|
|
|
|
|> tag("invitation-accepted")
|
|
|
|
|> subject(
|
2024-05-23 10:43:01 +03:00
|
|
|
"[#{Plausible.product_name()}] #{invitation.email} accepted your invitation to #{invitation.site.domain}"
|
2021-06-16 15:00:07 +03:00
|
|
|
)
|
|
|
|
|> render("invitation_accepted.html",
|
2022-11-08 20:58:53 +03:00
|
|
|
user: invitation.inviter,
|
2021-06-16 15:00:07 +03:00
|
|
|
invitation: invitation
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def invitation_rejected(invitation) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-06-16 15:00:07 +03:00
|
|
|
|> to(invitation.inviter.email)
|
|
|
|
|> tag("invitation-rejected")
|
|
|
|
|> subject(
|
2024-05-23 10:43:01 +03:00
|
|
|
"[#{Plausible.product_name()}] #{invitation.email} rejected your invitation to #{invitation.site.domain}"
|
2021-06-16 15:00:07 +03:00
|
|
|
)
|
|
|
|
|> render("invitation_rejected.html",
|
2022-11-08 20:58:53 +03:00
|
|
|
user: invitation.inviter,
|
2021-06-16 15:00:07 +03:00
|
|
|
invitation: invitation
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def ownership_transfer_accepted(invitation) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-06-16 15:00:07 +03:00
|
|
|
|> to(invitation.inviter.email)
|
|
|
|
|> tag("ownership-transfer-accepted")
|
|
|
|
|> subject(
|
2024-05-23 10:43:01 +03:00
|
|
|
"[#{Plausible.product_name()}] #{invitation.email} accepted the ownership transfer of #{invitation.site.domain}"
|
2021-06-16 15:00:07 +03:00
|
|
|
)
|
|
|
|
|> render("ownership_transfer_accepted.html",
|
2022-11-08 20:58:53 +03:00
|
|
|
user: invitation.inviter,
|
2021-06-16 15:00:07 +03:00
|
|
|
invitation: invitation
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def ownership_transfer_rejected(invitation) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-06-16 15:00:07 +03:00
|
|
|
|> to(invitation.inviter.email)
|
|
|
|
|> tag("ownership-transfer-rejected")
|
|
|
|
|> subject(
|
2024-05-23 10:43:01 +03:00
|
|
|
"[#{Plausible.product_name()}] #{invitation.email} rejected the ownership transfer of #{invitation.site.domain}"
|
2021-06-16 15:00:07 +03:00
|
|
|
)
|
|
|
|
|> render("ownership_transfer_rejected.html",
|
2022-11-08 20:58:53 +03:00
|
|
|
user: invitation.inviter,
|
2021-06-16 15:00:07 +03:00
|
|
|
invitation: invitation
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def site_member_removed(membership) do
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
2021-06-16 15:00:07 +03:00
|
|
|
|> to(membership.user.email)
|
|
|
|
|> tag("site-member-removed")
|
2024-05-23 10:43:01 +03:00
|
|
|
|> subject(
|
|
|
|
"[#{Plausible.product_name()}] Your access to #{membership.site.domain} has been revoked"
|
|
|
|
)
|
2021-06-16 15:00:07 +03:00
|
|
|
|> render("site_member_removed.html",
|
2022-11-08 20:58:53 +03:00
|
|
|
user: membership.user,
|
2021-06-16 15:00:07 +03:00
|
|
|
membership: membership
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2024-02-14 11:32:36 +03:00
|
|
|
def import_success(site_import, user) do
|
|
|
|
import_api = Plausible.Imported.ImportSources.by_name(site_import.source)
|
|
|
|
label = import_api.label()
|
|
|
|
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("import-success-email")
|
2024-02-14 11:32:36 +03:00
|
|
|
|> subject("#{label} data imported for #{site_import.site.domain}")
|
|
|
|
|> render(import_api.email_template(), %{
|
|
|
|
site_import: site_import,
|
|
|
|
label: label,
|
|
|
|
link: PlausibleWeb.Endpoint.url() <> "/" <> URI.encode_www_form(site_import.site.domain),
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
user: user,
|
|
|
|
success: true
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2024-02-14 11:32:36 +03:00
|
|
|
def import_failure(site_import, user) do
|
|
|
|
import_api = Plausible.Imported.ImportSources.by_name(site_import.source)
|
|
|
|
label = import_api.label()
|
|
|
|
|
2023-08-21 19:21:20 +03:00
|
|
|
priority_email()
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
|> to(user)
|
|
|
|
|> tag("import-failure-email")
|
2024-02-14 11:32:36 +03:00
|
|
|
|> subject("#{label} import failed for #{site_import.site.domain}")
|
|
|
|
|> render(import_api.email_template(), %{
|
|
|
|
site_import: site_import,
|
|
|
|
label: label,
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
user: user,
|
|
|
|
success: false
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2024-05-02 21:53:12 +03:00
|
|
|
def export_success(user, site, expires_at) do
|
2024-04-11 10:15:01 +03:00
|
|
|
expires_in =
|
|
|
|
if expires_at do
|
|
|
|
Timex.Format.DateTime.Formatters.Relative.format!(
|
|
|
|
expires_at,
|
|
|
|
"{relative}"
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2024-05-02 21:53:12 +03:00
|
|
|
download_url =
|
|
|
|
PlausibleWeb.Router.Helpers.site_url(
|
|
|
|
PlausibleWeb.Endpoint,
|
|
|
|
:download_export,
|
|
|
|
site.domain
|
|
|
|
)
|
|
|
|
|
2024-04-11 10:15:01 +03:00
|
|
|
priority_email()
|
|
|
|
|> to(user)
|
|
|
|
|> tag("export-success")
|
2024-05-23 10:43:01 +03:00
|
|
|
|> subject("[#{Plausible.product_name()}] Your export is now ready for download")
|
2024-04-11 10:15:01 +03:00
|
|
|
|> render("export_success.html",
|
|
|
|
user: user,
|
|
|
|
site: site,
|
|
|
|
download_url: download_url,
|
|
|
|
expires_in: expires_in
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def export_failure(user, site) do
|
|
|
|
priority_email()
|
|
|
|
|> to(user)
|
2024-05-23 10:43:01 +03:00
|
|
|
|> subject("[#{Plausible.product_name()}] Your export has failed")
|
2024-04-11 10:15:01 +03:00
|
|
|
|> render("export_failure.html", user: user, site: site)
|
|
|
|
end
|
|
|
|
|
2023-01-25 17:15:41 +03:00
|
|
|
def error_report(reported_by, trace_id, feedback) do
|
|
|
|
Map.new()
|
|
|
|
|> Map.put(:layout, nil)
|
|
|
|
|> base_email()
|
|
|
|
|> to("bugs@plausible.io")
|
|
|
|
|> put_param("ReplyTo", reported_by)
|
|
|
|
|> tag("sentry")
|
|
|
|
|> subject("Feedback to Sentry Trace #{trace_id}")
|
|
|
|
|> render("error_report_email.html", %{
|
|
|
|
reported_by: reported_by,
|
|
|
|
feedback: feedback,
|
|
|
|
trace_id: trace_id
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2024-01-03 13:19:25 +03:00
|
|
|
def approaching_accept_traffic_until(notification) do
|
2023-12-28 10:42:27 +03:00
|
|
|
base_email()
|
2024-01-03 13:19:25 +03:00
|
|
|
|> to(notification.email)
|
2023-12-28 10:42:27 +03:00
|
|
|
|> tag("drop-traffic-warning-first")
|
|
|
|
|> subject("We'll stop counting your stats")
|
2024-01-03 13:19:25 +03:00
|
|
|
|> render("approaching_accept_traffic_until.html",
|
|
|
|
time: "next week",
|
|
|
|
user: %{email: notification.email, name: notification.name}
|
|
|
|
)
|
2023-12-28 10:42:27 +03:00
|
|
|
end
|
|
|
|
|
2024-01-03 13:19:25 +03:00
|
|
|
def approaching_accept_traffic_until_tomorrow(notification) do
|
2023-12-28 10:42:27 +03:00
|
|
|
base_email()
|
2024-01-03 13:19:25 +03:00
|
|
|
|> to(notification.email)
|
2023-12-28 10:42:27 +03:00
|
|
|
|> tag("drop-traffic-warning-final")
|
|
|
|
|> subject("A reminder that we'll stop counting your stats tomorrow")
|
2024-01-03 13:19:25 +03:00
|
|
|
|> render("approaching_accept_traffic_until.html",
|
|
|
|
time: "tomorrow",
|
|
|
|
user: %{email: notification.email, name: notification.name}
|
|
|
|
)
|
2023-12-28 10:42:27 +03:00
|
|
|
end
|
|
|
|
|
2023-08-21 19:21:20 +03:00
|
|
|
@doc """
|
|
|
|
Unlike the default 'base' emails, priority emails cannot be unsubscribed from. This is achieved
|
|
|
|
by sending them through a dedicated 'priority' message stream in Postmark.
|
|
|
|
"""
|
|
|
|
def priority_email(), do: priority_email(%{layout: "priority_email.html"})
|
|
|
|
|
|
|
|
def priority_email(%{layout: layout}) do
|
|
|
|
base_email(%{layout: layout})
|
|
|
|
|> put_param("MessageStream", "priority")
|
|
|
|
end
|
|
|
|
|
2022-11-09 17:05:42 +03:00
|
|
|
def base_email(), do: base_email(%{layout: "base_email.html"})
|
2022-11-08 20:17:47 +03:00
|
|
|
|
2022-11-09 17:05:42 +03:00
|
|
|
def base_email(%{layout: layout}) do
|
2020-09-18 15:35:21 +03:00
|
|
|
mailer_from = Application.get_env(:plausible, :mailer_email)
|
|
|
|
|
2020-05-11 14:27:20 +03:00
|
|
|
new_email()
|
|
|
|
|> put_param("TrackOpens", false)
|
2020-09-18 15:35:21 +03:00
|
|
|
|> from(mailer_from)
|
2022-11-08 20:17:47 +03:00
|
|
|
|> maybe_put_layout(layout)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_put_layout(email, nil), do: email
|
|
|
|
|
|
|
|
defp maybe_put_layout(email, layout) do
|
2023-01-25 17:15:41 +03:00
|
|
|
put_html_layout(email, {PlausibleWeb.LayoutView, layout})
|
2020-05-11 14:27:20 +03:00
|
|
|
end
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|