2019-09-02 14:29:19 +03:00
|
|
|
defmodule PlausibleWeb.SiteController do
|
|
|
|
use PlausibleWeb, :controller
|
|
|
|
use Plausible.Repo
|
2019-10-31 08:39:51 +03:00
|
|
|
alias Plausible.{Sites, Goals}
|
2019-09-02 14:29:19 +03:00
|
|
|
|
|
|
|
plug PlausibleWeb.RequireAccountPlug
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
plug PlausibleWeb.AuthorizeSiteAccess,
|
2022-02-23 22:48:33 +03:00
|
|
|
[:owner, :admin, :super_admin] when action not in [:index, :new, :create_site]
|
2021-06-16 15:00:07 +03:00
|
|
|
|
2021-05-05 16:17:59 +03:00
|
|
|
def index(conn, params) do
|
2020-11-26 11:19:26 +03:00
|
|
|
user = conn.assigns[:current_user]
|
2020-12-29 16:17:27 +03:00
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
invitations =
|
|
|
|
Repo.all(
|
|
|
|
from i in Plausible.Auth.Invitation,
|
|
|
|
where: i.email == ^user.email
|
|
|
|
)
|
|
|
|
|> Repo.preload(:site)
|
|
|
|
|
|
|
|
invitation_site_ids = Enum.map(invitations, & &1.site.id)
|
|
|
|
|
2021-05-05 16:17:59 +03:00
|
|
|
{sites, pagination} =
|
|
|
|
Repo.paginate(
|
|
|
|
from(s in Plausible.Site,
|
2020-12-29 16:17:27 +03:00
|
|
|
join: sm in Plausible.Site.Membership,
|
|
|
|
on: sm.site_id == s.id,
|
|
|
|
where: sm.user_id == ^user.id,
|
2021-06-16 15:00:07 +03:00
|
|
|
where: s.id not in ^invitation_site_ids,
|
|
|
|
order_by: s.domain,
|
|
|
|
preload: [memberships: sm]
|
2021-05-05 16:17:59 +03:00
|
|
|
),
|
|
|
|
params
|
2020-12-29 16:17:27 +03:00
|
|
|
)
|
2020-11-26 11:19:26 +03:00
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
user_owns_sites =
|
|
|
|
Enum.any?(sites, fn site -> List.first(site.memberships).role == :owner end) ||
|
|
|
|
Plausible.Auth.user_owns_sites?(user)
|
|
|
|
|
|
|
|
visitors =
|
|
|
|
Plausible.Stats.Clickhouse.last_24h_visitors(sites ++ Enum.map(invitations, & &1.site))
|
|
|
|
|
|
|
|
render(conn, "index.html",
|
|
|
|
invitations: invitations,
|
|
|
|
sites: sites,
|
|
|
|
visitors: visitors,
|
|
|
|
pagination: pagination,
|
|
|
|
needs_to_upgrade: user_owns_sites && Plausible.Billing.needs_to_upgrade?(user)
|
|
|
|
)
|
2020-03-31 16:16:21 +03:00
|
|
|
end
|
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
def new(conn, _params) do
|
2022-09-01 17:09:28 +03:00
|
|
|
current_user = conn.assigns[:current_user]
|
2021-09-08 15:15:37 +03:00
|
|
|
|
2022-09-01 17:09:28 +03:00
|
|
|
owned_site_count = Plausible.Sites.owned_sites_count(current_user)
|
2021-05-04 15:37:58 +03:00
|
|
|
site_limit = Plausible.Billing.sites_limit(current_user)
|
2021-09-08 15:15:37 +03:00
|
|
|
is_at_limit = site_limit && owned_site_count >= site_limit
|
2022-09-01 17:09:28 +03:00
|
|
|
is_first_site = owned_site_count == 0
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2021-05-04 15:37:58 +03:00
|
|
|
changeset = Plausible.Site.changeset(%Plausible.Site{})
|
2020-12-15 12:30:45 +03:00
|
|
|
|
2020-12-29 16:17:27 +03:00
|
|
|
render(conn, "new.html",
|
|
|
|
changeset: changeset,
|
|
|
|
is_first_site: is_first_site,
|
2021-05-04 15:37:58 +03:00
|
|
|
is_at_limit: is_at_limit,
|
|
|
|
site_limit: site_limit,
|
2020-12-29 16:17:27 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def create_site(conn, %{"site" => site_params}) do
|
|
|
|
user = conn.assigns[:current_user]
|
2022-09-01 17:09:28 +03:00
|
|
|
site_count = Plausible.Sites.owned_sites_count(user)
|
2021-05-04 15:37:58 +03:00
|
|
|
is_first_site = site_count == 0
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2021-05-04 15:37:58 +03:00
|
|
|
case Sites.create(user, site_params) do
|
2019-09-02 14:29:19 +03:00
|
|
|
{:ok, %{site: site}} ->
|
2020-12-15 13:09:03 +03:00
|
|
|
if is_first_site do
|
|
|
|
PlausibleWeb.Email.welcome_email(user)
|
|
|
|
|> Plausible.Mailer.send_email()
|
|
|
|
end
|
|
|
|
|
2019-09-10 18:51:34 +03:00
|
|
|
conn
|
|
|
|
|> put_session(site.domain <> "_offer_email_report", true)
|
2021-10-26 11:59:14 +03:00
|
|
|
|> redirect(to: Routes.site_path(conn, :add_snippet, site.domain))
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
{:error, :site, changeset, _} ->
|
2020-06-08 10:35:13 +03:00
|
|
|
render(conn, "new.html",
|
|
|
|
changeset: changeset,
|
2020-12-15 12:30:45 +03:00
|
|
|
is_first_site: is_first_site,
|
2021-05-04 15:37:58 +03:00
|
|
|
is_at_limit: false,
|
2020-06-08 10:35:13 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2021-05-04 15:37:58 +03:00
|
|
|
|
|
|
|
{:error, :limit, _limit} ->
|
|
|
|
send_resp(conn, 400, "Site limit reached")
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def add_snippet(conn, _params) do
|
2020-12-15 12:30:45 +03:00
|
|
|
user = conn.assigns[:current_user]
|
2021-06-16 15:00:07 +03:00
|
|
|
site = conn.assigns[:site] |> Repo.preload(:custom_domain)
|
2020-02-26 12:46:28 +03:00
|
|
|
|
2020-12-29 16:17:27 +03:00
|
|
|
is_first_site =
|
|
|
|
!Repo.exists?(
|
|
|
|
from sm in Plausible.Site.Membership,
|
|
|
|
where:
|
|
|
|
sm.user_id == ^user.id and
|
|
|
|
sm.site_id != ^site.id
|
|
|
|
)
|
2020-12-15 12:30:45 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
2020-12-29 16:17:27 +03:00
|
|
|
|> render("snippet.html",
|
|
|
|
site: site,
|
|
|
|
is_first_site: is_first_site,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def new_goal(conn, _params) do
|
|
|
|
site = conn.assigns[:site]
|
2019-10-31 08:39:51 +03:00
|
|
|
changeset = Plausible.Goal.changeset(%Plausible.Goal{})
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("new_goal.html",
|
|
|
|
site: site,
|
|
|
|
changeset: changeset,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def create_goal(conn, %{"goal" => goal}) do
|
|
|
|
site = conn.assigns[:site]
|
2019-10-31 08:39:51 +03:00
|
|
|
|
|
|
|
case Plausible.Goals.create(site, goal) do
|
|
|
|
{:ok, _} ->
|
|
|
|
conn
|
2021-03-15 12:40:53 +03:00
|
|
|
|> put_flash(:success, "Goal created successfully")
|
2021-10-26 11:59:14 +03:00
|
|
|
|> redirect(to: Routes.site_path(conn, :settings_goals, site.domain))
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2020-11-27 10:54:29 +03:00
|
|
|
{:error, changeset} ->
|
2019-10-31 08:39:51 +03:00
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("new_goal.html",
|
|
|
|
site: site,
|
|
|
|
changeset: changeset,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-31 09:20:45 +03:00
|
|
|
def delete_goal(conn, %{"website" => website, "id" => goal_id}) do
|
|
|
|
Plausible.Goals.delete(goal_id)
|
2019-10-31 08:39:51 +03:00
|
|
|
|
|
|
|
conn
|
2021-03-15 12:40:53 +03:00
|
|
|
|> put_flash(:success, "Goal deleted successfully")
|
2021-10-26 11:59:14 +03:00
|
|
|
|> redirect(to: Routes.site_path(conn, :settings_goals, website))
|
2019-10-31 08:39:51 +03:00
|
|
|
end
|
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
def settings(conn, %{"website" => website}) do
|
2021-10-26 11:59:14 +03:00
|
|
|
redirect(conn, to: Routes.site_path(conn, :settings_general, website))
|
2020-11-06 14:30:38 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def settings_general(conn, _params) do
|
2020-12-29 16:17:27 +03:00
|
|
|
site =
|
2021-06-16 15:00:07 +03:00
|
|
|
conn.assigns[:site]
|
2022-03-23 12:58:36 +03:00
|
|
|
|> Repo.preload([:custom_domain])
|
|
|
|
|
|
|
|
imported_pageviews =
|
|
|
|
if site.imported_data do
|
|
|
|
Plausible.Stats.Clickhouse.imported_pageview_count(site)
|
|
|
|
else
|
|
|
|
0
|
|
|
|
end
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
|
2020-11-19 15:57:55 +03:00
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("settings_general.html",
|
|
|
|
site: site,
|
2022-03-23 12:58:36 +03:00
|
|
|
imported_pageviews: imported_pageviews,
|
2020-11-19 15:57:55 +03:00
|
|
|
changeset: Plausible.Site.changeset(site, %{}),
|
|
|
|
layout: {PlausibleWeb.LayoutView, "site_settings.html"}
|
|
|
|
)
|
|
|
|
end
|
2020-11-16 16:38:44 +03:00
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def settings_people(conn, _params) do
|
|
|
|
site =
|
|
|
|
conn.assigns[:site]
|
2021-06-17 10:51:58 +03:00
|
|
|
|> Repo.preload(memberships: :user, invitations: [], custom_domain: [])
|
2021-06-16 15:00:07 +03:00
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("settings_people.html",
|
|
|
|
site: site,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "site_settings.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def settings_visibility(conn, _params) do
|
2021-06-17 10:51:58 +03:00
|
|
|
site = conn.assigns[:site] |> Repo.preload(:custom_domain)
|
2020-11-16 16:38:44 +03:00
|
|
|
shared_links = Repo.all(from l in Plausible.Site.SharedLink, where: l.site_id == ^site.id)
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
2020-11-19 15:57:55 +03:00
|
|
|
|> render("settings_visibility.html",
|
2020-11-16 16:38:44 +03:00
|
|
|
site: site,
|
|
|
|
shared_links: shared_links,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "site_settings.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def settings_goals(conn, _params) do
|
2021-06-17 10:51:58 +03:00
|
|
|
site = conn.assigns[:site] |> Repo.preload(:custom_domain)
|
2020-11-16 16:38:44 +03:00
|
|
|
goals = Goals.for_site(site.domain)
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("settings_goals.html",
|
|
|
|
site: site,
|
|
|
|
goals: goals,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "site_settings.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def settings_search_console(conn, _params) do
|
2020-12-29 16:17:27 +03:00
|
|
|
site =
|
2021-06-16 15:00:07 +03:00
|
|
|
conn.assigns[:site]
|
2021-06-17 10:51:58 +03:00
|
|
|
|> Repo.preload([:google_auth, :custom_domain])
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2020-06-08 10:35:13 +03:00
|
|
|
search_console_domains =
|
|
|
|
if site.google_auth do
|
|
|
|
Plausible.Google.Api.fetch_verified_properties(site.google_auth)
|
|
|
|
end
|
2019-09-02 14:29:19 +03:00
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
2020-11-16 16:38:44 +03:00
|
|
|
|> render("settings_search_console.html",
|
2019-09-07 17:01:37 +03:00
|
|
|
site: site,
|
2019-10-10 07:12:15 +03:00
|
|
|
search_console_domains: search_console_domains,
|
2020-11-16 16:38:44 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "site_settings.html"}
|
2019-09-07 17:01:37 +03:00
|
|
|
)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def settings_email_reports(conn, _params) do
|
2021-06-17 10:51:58 +03:00
|
|
|
site = conn.assigns[:site] |> Repo.preload(:custom_domain)
|
2020-11-06 14:30:38 +03:00
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
2020-11-16 16:38:44 +03:00
|
|
|
|> render("settings_email_reports.html",
|
2020-11-06 14:30:38 +03:00
|
|
|
site: site,
|
2020-11-16 16:38:44 +03:00
|
|
|
weekly_report: Repo.get_by(Plausible.Site.WeeklyReport, site_id: site.id),
|
|
|
|
monthly_report: Repo.get_by(Plausible.Site.MonthlyReport, site_id: site.id),
|
2020-12-09 16:59:41 +03:00
|
|
|
spike_notification: Repo.get_by(Plausible.Site.SpikeNotification, site_id: site.id),
|
2020-11-16 16:38:44 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "site_settings.html"}
|
2020-11-06 14:30:38 +03:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def settings_custom_domain(conn, _params) do
|
2020-12-29 16:17:27 +03:00
|
|
|
site =
|
2021-06-16 15:00:07 +03:00
|
|
|
conn.assigns[:site]
|
2020-11-16 16:38:44 +03:00
|
|
|
|> Repo.preload(:custom_domain)
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
2020-12-29 16:17:27 +03:00
|
|
|
|> render("settings_custom_domain.html",
|
|
|
|
site: site,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "site_settings.html"}
|
|
|
|
)
|
2020-11-16 16:38:44 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def settings_danger_zone(conn, _params) do
|
2021-06-17 10:51:58 +03:00
|
|
|
site = conn.assigns[:site] |> Repo.preload(:custom_domain)
|
|
|
|
|
2020-11-19 15:57:55 +03:00
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("settings_danger_zone.html",
|
2021-06-17 10:51:58 +03:00
|
|
|
site: site,
|
2020-11-19 15:57:55 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "site_settings.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def update_google_auth(conn, %{"google_auth" => attrs}) do
|
|
|
|
site = conn.assigns[:site] |> Repo.preload(:google_auth)
|
2019-10-10 07:12:15 +03:00
|
|
|
|
|
|
|
Plausible.Site.GoogleAuth.set_property(site.google_auth, attrs)
|
2020-06-08 10:35:13 +03:00
|
|
|
|> Repo.update!()
|
2019-10-10 07:12:15 +03:00
|
|
|
|
|
|
|
conn
|
2021-03-15 12:40:53 +03:00
|
|
|
|> put_flash(:success, "Google integration saved successfully")
|
2021-10-26 11:59:14 +03:00
|
|
|
|> redirect(to: Routes.site_path(conn, :settings_search_console, site.domain))
|
2019-10-10 07:12:15 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def delete_google_auth(conn, _params) do
|
2020-06-30 11:11:47 +03:00
|
|
|
site =
|
2021-06-16 15:00:07 +03:00
|
|
|
conn.assigns[:site]
|
2020-06-30 11:11:47 +03:00
|
|
|
|> Repo.preload(:google_auth)
|
|
|
|
|
|
|
|
Repo.delete!(site.google_auth)
|
|
|
|
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
conn = put_flash(conn, :success, "Google account unlinked from Plausible")
|
|
|
|
|
|
|
|
panel =
|
|
|
|
conn.path_info
|
|
|
|
|> List.last()
|
|
|
|
|> String.split("-")
|
|
|
|
|> List.last()
|
|
|
|
|
|
|
|
case panel do
|
|
|
|
"search" ->
|
|
|
|
redirect(conn, to: Routes.site_path(conn, :settings_search_console, site.domain))
|
|
|
|
|
|
|
|
"import" ->
|
|
|
|
redirect(conn, to: Routes.site_path(conn, :settings_general, site.domain))
|
|
|
|
end
|
2020-06-30 11:11:47 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def update_settings(conn, %{"site" => site_params}) do
|
|
|
|
site = conn.assigns[:site]
|
2019-11-21 11:59:06 +03:00
|
|
|
changeset = site |> Plausible.Site.changeset(site_params)
|
2020-06-08 10:35:13 +03:00
|
|
|
res = changeset |> Repo.update()
|
2019-09-02 14:29:19 +03:00
|
|
|
|
|
|
|
case res do
|
|
|
|
{:ok, site} ->
|
2019-11-21 11:59:06 +03:00
|
|
|
site_session_key = "authorized_site__" <> site.domain
|
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
conn
|
2019-11-21 11:59:06 +03:00
|
|
|
|> put_session(site_session_key, nil)
|
2020-11-16 16:38:44 +03:00
|
|
|
|> put_flash(:success, "Your site settings have been saved")
|
2021-10-26 11:59:14 +03:00
|
|
|
|> redirect(to: Routes.site_path(conn, :settings_general, site.domain))
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
{:error, changeset} ->
|
2021-01-07 16:16:04 +03:00
|
|
|
render(conn, "settings_general.html", site: site, changeset: changeset)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def reset_stats(conn, _params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-09-17 16:36:01 +03:00
|
|
|
Plausible.ClickhouseRepo.clear_stats_for(site.domain)
|
2020-07-16 13:07:45 +03:00
|
|
|
|
|
|
|
conn
|
|
|
|
|> put_flash(:success, "#{site.domain} stats will be reset in a few minutes")
|
2020-11-20 13:05:11 +03:00
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/danger-zone")
|
2020-07-16 13:07:45 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def delete_site(conn, _params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2021-11-26 16:39:42 +03:00
|
|
|
Plausible.Sites.delete!(site)
|
2019-09-02 14:29:19 +03:00
|
|
|
|
|
|
|
conn
|
2021-03-15 12:40:53 +03:00
|
|
|
|> put_flash(:success, "Site deleted successfully along with all pageviews")
|
2020-06-29 11:17:15 +03:00
|
|
|
|> redirect(to: "/sites")
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def make_public(conn, _params) do
|
2020-06-08 10:35:13 +03:00
|
|
|
site =
|
2021-06-16 15:00:07 +03:00
|
|
|
conn.assigns[:site]
|
2020-06-08 10:35:13 +03:00
|
|
|
|> Plausible.Site.make_public()
|
|
|
|
|> Repo.update!()
|
2019-09-02 14:29:19 +03:00
|
|
|
|
|
|
|
conn
|
2020-11-16 16:38:44 +03:00
|
|
|
|> put_flash(:success, "Stats for #{site.domain} are now public.")
|
2021-10-26 11:59:14 +03:00
|
|
|
|> redirect(to: Routes.site_path(conn, :settings_visibility, site.domain))
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def make_private(conn, _params) do
|
2020-06-08 10:35:13 +03:00
|
|
|
site =
|
2021-06-16 15:00:07 +03:00
|
|
|
conn.assigns[:site]
|
2020-06-08 10:35:13 +03:00
|
|
|
|> Plausible.Site.make_private()
|
|
|
|
|> Repo.update!()
|
2019-09-02 14:29:19 +03:00
|
|
|
|
|
|
|
conn
|
|
|
|
|> put_flash(:success, "Stats for #{site.domain} are now private.")
|
2021-10-26 11:59:14 +03:00
|
|
|
|> redirect(to: Routes.site_path(conn, :settings_visibility, site.domain))
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def enable_weekly_report(conn, _params) do
|
|
|
|
site = conn.assigns[:site]
|
2019-09-07 17:01:37 +03:00
|
|
|
|
2019-09-09 14:19:21 +03:00
|
|
|
Plausible.Site.WeeklyReport.changeset(%Plausible.Site.WeeklyReport{}, %{
|
2019-09-10 18:51:34 +03:00
|
|
|
site_id: site.id,
|
2020-01-22 12:16:53 +03:00
|
|
|
recipients: [conn.assigns[:current_user].email]
|
2019-09-10 18:51:34 +03:00
|
|
|
})
|
2020-06-08 10:35:13 +03:00
|
|
|
|> Repo.insert!()
|
2019-09-07 17:01:37 +03:00
|
|
|
|
|
|
|
conn
|
2020-11-16 16:38:44 +03:00
|
|
|
|> put_flash(:success, "You will receive an email report every Monday going forward")
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
2019-09-07 17:01:37 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def disable_weekly_report(conn, _params) do
|
|
|
|
site = conn.assigns[:site]
|
2019-09-09 14:37:57 +03:00
|
|
|
Repo.delete_all(from wr in Plausible.Site.WeeklyReport, where: wr.site_id == ^site.id)
|
2019-09-07 17:01:37 +03:00
|
|
|
|
|
|
|
conn
|
2020-11-16 16:38:44 +03:00
|
|
|
|> put_flash(:success, "You will not receive weekly email reports going forward")
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
2019-09-07 17:01:37 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def add_weekly_report_recipient(conn, %{"recipient" => recipient}) do
|
|
|
|
site = conn.assigns[:site]
|
2020-01-22 12:16:53 +03:00
|
|
|
|
2019-09-09 14:37:57 +03:00
|
|
|
Repo.get_by(Plausible.Site.WeeklyReport, site_id: site.id)
|
2020-01-22 12:16:53 +03:00
|
|
|
|> Plausible.Site.WeeklyReport.add_recipient(recipient)
|
2020-06-08 10:35:13 +03:00
|
|
|
|> Repo.update!()
|
2019-09-09 14:37:57 +03:00
|
|
|
|
|
|
|
conn
|
2020-11-16 16:38:44 +03:00
|
|
|
|> put_flash(:success, "Added #{recipient} as a recipient for the weekly report")
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
2020-01-22 12:16:53 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def remove_weekly_report_recipient(conn, %{"recipient" => recipient}) do
|
|
|
|
site = conn.assigns[:site]
|
2020-01-22 12:16:53 +03:00
|
|
|
|
|
|
|
Repo.get_by(Plausible.Site.WeeklyReport, site_id: site.id)
|
|
|
|
|> Plausible.Site.WeeklyReport.remove_recipient(recipient)
|
2020-06-08 10:35:13 +03:00
|
|
|
|> Repo.update!()
|
2020-01-22 12:16:53 +03:00
|
|
|
|
|
|
|
conn
|
2020-06-08 10:35:13 +03:00
|
|
|
|> put_flash(
|
|
|
|
:success,
|
2020-11-16 16:38:44 +03:00
|
|
|
"Removed #{recipient} as a recipient for the weekly report"
|
2020-06-08 10:35:13 +03:00
|
|
|
)
|
2020-11-16 16:38:44 +03:00
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
2019-09-09 14:37:57 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def enable_monthly_report(conn, _params) do
|
|
|
|
site = conn.assigns[:site]
|
2019-09-09 14:37:57 +03:00
|
|
|
|
|
|
|
Plausible.Site.MonthlyReport.changeset(%Plausible.Site.MonthlyReport{}, %{
|
|
|
|
site_id: site.id,
|
2020-01-22 12:16:53 +03:00
|
|
|
recipients: [conn.assigns[:current_user].email]
|
2019-09-09 14:37:57 +03:00
|
|
|
})
|
2020-06-08 10:35:13 +03:00
|
|
|
|> Repo.insert!()
|
2019-09-09 14:37:57 +03:00
|
|
|
|
|
|
|
conn
|
2020-11-16 16:38:44 +03:00
|
|
|
|> put_flash(:success, "You will receive an email report every month going forward")
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
2019-09-09 14:37:57 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def disable_monthly_report(conn, _params) do
|
|
|
|
site = conn.assigns[:site]
|
2019-09-09 14:37:57 +03:00
|
|
|
Repo.delete_all(from mr in Plausible.Site.MonthlyReport, where: mr.site_id == ^site.id)
|
|
|
|
|
|
|
|
conn
|
2020-11-16 16:38:44 +03:00
|
|
|
|> put_flash(:success, "You will not receive monthly email reports going forward")
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
2019-09-09 14:37:57 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def add_monthly_report_recipient(conn, %{"recipient" => recipient}) do
|
|
|
|
site = conn.assigns[:site]
|
2020-01-22 12:16:53 +03:00
|
|
|
|
2019-09-09 14:37:57 +03:00
|
|
|
Repo.get_by(Plausible.Site.MonthlyReport, site_id: site.id)
|
2020-01-22 12:16:53 +03:00
|
|
|
|> Plausible.Site.MonthlyReport.add_recipient(recipient)
|
2020-06-08 10:35:13 +03:00
|
|
|
|> Repo.update!()
|
2019-09-09 14:37:57 +03:00
|
|
|
|
|
|
|
conn
|
2020-11-16 16:38:44 +03:00
|
|
|
|> put_flash(:success, "Added #{recipient} as a recipient for the monthly report")
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
2019-09-09 14:37:57 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def remove_monthly_report_recipient(conn, %{"recipient" => recipient}) do
|
|
|
|
site = conn.assigns[:site]
|
2019-10-10 07:12:15 +03:00
|
|
|
|
2020-01-22 12:16:53 +03:00
|
|
|
Repo.get_by(Plausible.Site.MonthlyReport, site_id: site.id)
|
|
|
|
|> Plausible.Site.MonthlyReport.remove_recipient(recipient)
|
2020-06-08 10:35:13 +03:00
|
|
|
|> Repo.update!()
|
2019-10-10 07:12:15 +03:00
|
|
|
|
2020-01-22 12:16:53 +03:00
|
|
|
conn
|
2020-06-08 10:35:13 +03:00
|
|
|
|> put_flash(
|
|
|
|
:success,
|
2020-11-16 16:38:44 +03:00
|
|
|
"Removed #{recipient} as a recipient for the monthly report"
|
2020-06-08 10:35:13 +03:00
|
|
|
)
|
2020-11-16 16:38:44 +03:00
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
2019-10-10 07:12:15 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def enable_spike_notification(conn, _params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-12-09 16:59:41 +03:00
|
|
|
|
2021-01-19 12:41:15 +03:00
|
|
|
res =
|
|
|
|
Plausible.Site.SpikeNotification.changeset(%Plausible.Site.SpikeNotification{}, %{
|
|
|
|
site_id: site.id,
|
|
|
|
threshold: 10,
|
|
|
|
recipients: [conn.assigns[:current_user].email]
|
|
|
|
})
|
|
|
|
|> Repo.insert()
|
2020-12-09 16:59:41 +03:00
|
|
|
|
2021-01-19 12:41:15 +03:00
|
|
|
case res do
|
|
|
|
{:ok, _} ->
|
|
|
|
conn
|
|
|
|
|> put_flash(:success, "You will a notification with traffic spikes going forward")
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
|
|
|
|
|
|
|
{:error, _} ->
|
|
|
|
conn
|
|
|
|
|> put_flash(:error, "Unable to create a spike notification")
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
|
|
|
end
|
2020-12-09 16:59:41 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def disable_spike_notification(conn, _params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-12-09 16:59:41 +03:00
|
|
|
Repo.delete_all(from mr in Plausible.Site.SpikeNotification, where: mr.site_id == ^site.id)
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> put_flash(:success, "Spike notification disabled")
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def update_spike_notification(conn, %{"spike_notification" => params}) do
|
|
|
|
site = conn.assigns[:site]
|
2020-12-09 16:59:41 +03:00
|
|
|
notification = Repo.get_by(Plausible.Site.SpikeNotification, site_id: site.id)
|
|
|
|
|
|
|
|
Plausible.Site.SpikeNotification.changeset(notification, params)
|
|
|
|
|> Repo.update!()
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> put_flash(:success, "Notification settings updated")
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def add_spike_notification_recipient(conn, %{"recipient" => recipient}) do
|
|
|
|
site = conn.assigns[:site]
|
2020-12-09 16:59:41 +03:00
|
|
|
|
|
|
|
Repo.get_by(Plausible.Site.SpikeNotification, site_id: site.id)
|
|
|
|
|> Plausible.Site.SpikeNotification.add_recipient(recipient)
|
|
|
|
|> Repo.update!()
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> put_flash(:success, "Added #{recipient} as a recipient for the traffic spike notification")
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def remove_spike_notification_recipient(conn, %{"recipient" => recipient}) do
|
|
|
|
site = conn.assigns[:site]
|
2020-12-09 16:59:41 +03:00
|
|
|
|
|
|
|
Repo.get_by(Plausible.Site.SpikeNotification, site_id: site.id)
|
|
|
|
|> Plausible.Site.SpikeNotification.remove_recipient(recipient)
|
|
|
|
|> Repo.update!()
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> put_flash(
|
|
|
|
:success,
|
|
|
|
"Removed #{recipient} as a recipient for the monthly report"
|
|
|
|
)
|
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/email-reports")
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def new_shared_link(conn, _params) do
|
|
|
|
site = conn.assigns[:site]
|
2020-01-29 12:29:11 +03:00
|
|
|
changeset = Plausible.Site.SharedLink.changeset(%Plausible.Site.SharedLink{}, %{})
|
|
|
|
|
2020-03-02 12:12:11 +03:00
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
2020-06-08 10:35:13 +03:00
|
|
|
|> render("new_shared_link.html",
|
|
|
|
site: site,
|
|
|
|
changeset: changeset,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2020-01-29 12:29:11 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def create_shared_link(conn, %{"shared_link" => link}) do
|
|
|
|
site = conn.assigns[:site]
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2021-04-15 15:31:57 +03:00
|
|
|
case Sites.create_shared_link(site, link["name"], link["password"]) do
|
2020-01-29 12:29:11 +03:00
|
|
|
{:ok, _created} ->
|
2020-11-20 13:05:11 +03:00
|
|
|
redirect(conn, to: "/#{URI.encode_www_form(site.domain)}/settings/visibility")
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2020-01-29 12:29:11 +03:00
|
|
|
{:error, changeset} ->
|
2020-03-02 12:12:11 +03:00
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
2020-06-08 10:35:13 +03:00
|
|
|
|> render("new_shared_link.html",
|
|
|
|
site: site,
|
|
|
|
changeset: changeset,
|
2021-04-06 14:32:38 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def edit_shared_link(conn, %{"slug" => slug}) do
|
|
|
|
site = conn.assigns[:site]
|
2021-04-06 14:32:38 +03:00
|
|
|
shared_link = Repo.get_by(Plausible.Site.SharedLink, slug: slug)
|
|
|
|
changeset = Plausible.Site.SharedLink.changeset(shared_link, %{})
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("edit_shared_link.html",
|
|
|
|
site: site,
|
|
|
|
changeset: changeset,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def update_shared_link(conn, %{"slug" => slug, "shared_link" => params}) do
|
|
|
|
site = conn.assigns[:site]
|
2021-04-06 14:32:38 +03:00
|
|
|
shared_link = Repo.get_by(Plausible.Site.SharedLink, slug: slug)
|
|
|
|
changeset = Plausible.Site.SharedLink.changeset(shared_link, params)
|
|
|
|
|
|
|
|
case Repo.update(changeset) do
|
|
|
|
{:ok, _created} ->
|
|
|
|
redirect(conn, to: "/#{URI.encode_www_form(site.domain)}/settings/visibility")
|
|
|
|
|
|
|
|
{:error, changeset} ->
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("edit_shared_link.html",
|
|
|
|
site: site,
|
|
|
|
changeset: changeset,
|
2020-06-08 10:35:13 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2020-01-29 12:29:11 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def delete_shared_link(conn, %{"slug" => slug}) do
|
|
|
|
site = conn.assigns[:site]
|
2020-06-08 10:35:13 +03:00
|
|
|
|
2020-01-29 12:29:11 +03:00
|
|
|
Repo.get_by(Plausible.Site.SharedLink, slug: slug)
|
2020-06-08 10:35:13 +03:00
|
|
|
|> Repo.delete!()
|
2020-01-29 12:29:11 +03:00
|
|
|
|
2020-11-20 13:05:11 +03:00
|
|
|
redirect(conn, to: "/#{URI.encode_www_form(site.domain)}/settings/visibility")
|
2020-01-29 12:29:11 +03:00
|
|
|
end
|
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
def delete_custom_domain(conn, _params) do
|
2020-07-21 09:58:00 +03:00
|
|
|
site =
|
2021-06-16 15:00:07 +03:00
|
|
|
conn.assigns[:site]
|
2020-07-21 09:58:00 +03:00
|
|
|
|> Repo.preload(:custom_domain)
|
2020-06-30 11:00:19 +03:00
|
|
|
|
|
|
|
Repo.delete!(site.custom_domain)
|
|
|
|
|
|
|
|
conn
|
2021-03-15 12:40:53 +03:00
|
|
|
|> put_flash(:success, "Custom domain deleted successfully")
|
2021-06-17 10:51:58 +03:00
|
|
|
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/general")
|
2020-06-30 11:00:19 +03:00
|
|
|
end
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
|
2022-03-24 12:49:45 +03:00
|
|
|
def import_from_google_user_metric_notice(conn, %{
|
|
|
|
"view_id" => view_id,
|
2022-09-26 12:29:56 +03:00
|
|
|
"access_token" => access_token,
|
|
|
|
"refresh_token" => refresh_token,
|
|
|
|
"expires_at" => expires_at
|
2022-03-24 12:49:45 +03:00
|
|
|
}) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("import_from_google_user_metric_form.html",
|
|
|
|
site: site,
|
|
|
|
view_id: view_id,
|
|
|
|
access_token: access_token,
|
2022-09-26 12:29:56 +03:00
|
|
|
refresh_token: refresh_token,
|
|
|
|
expires_at: expires_at,
|
2022-03-24 12:49:45 +03:00
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2022-09-26 12:29:56 +03:00
|
|
|
def import_from_google_view_id_form(conn, %{
|
|
|
|
"access_token" => access_token,
|
|
|
|
"refresh_token" => refresh_token,
|
|
|
|
"expires_at" => expires_at
|
|
|
|
}) do
|
2022-03-22 17:09:45 +03:00
|
|
|
site = conn.assigns[:site]
|
2022-09-08 21:02:17 +03:00
|
|
|
view_ids = Plausible.Google.Api.list_views(access_token)
|
2022-03-21 13:47:27 +03:00
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
2022-03-24 12:49:45 +03:00
|
|
|
|> render("import_from_google_view_id_form.html",
|
2022-03-22 17:09:45 +03:00
|
|
|
access_token: access_token,
|
2022-09-26 12:29:56 +03:00
|
|
|
refresh_token: refresh_token,
|
|
|
|
expires_at: expires_at,
|
2022-03-21 13:47:27 +03:00
|
|
|
site: site,
|
|
|
|
view_ids: view_ids,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2022-03-24 12:49:45 +03:00
|
|
|
# see https://stackoverflow.com/a/57416769
|
|
|
|
@google_analytics_new_user_metric_date ~D[2016-08-24]
|
2022-09-26 12:29:56 +03:00
|
|
|
def import_from_google_view_id(conn, %{
|
|
|
|
"view_id" => view_id,
|
|
|
|
"access_token" => access_token,
|
|
|
|
"refresh_token" => refresh_token,
|
|
|
|
"expires_at" => expires_at
|
|
|
|
}) do
|
2022-03-22 17:09:45 +03:00
|
|
|
site = conn.assigns[:site]
|
2022-08-03 12:25:50 +03:00
|
|
|
start_date = Plausible.Google.HTTP.get_analytics_start_date(view_id, access_token)
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
|
2022-03-24 12:49:45 +03:00
|
|
|
case start_date do
|
2022-06-02 14:40:52 +03:00
|
|
|
{:ok, nil} ->
|
|
|
|
site = conn.assigns[:site]
|
2022-09-08 21:02:17 +03:00
|
|
|
view_ids = Plausible.Google.Api.list_views(access_token)
|
2022-06-02 14:40:52 +03:00
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("import_from_google_view_id_form.html",
|
|
|
|
access_token: access_token,
|
2022-09-26 12:29:56 +03:00
|
|
|
refresh_token: refresh_token,
|
|
|
|
expires_at: expires_at,
|
2022-06-02 14:40:52 +03:00
|
|
|
site: site,
|
|
|
|
view_ids: view_ids,
|
|
|
|
selected_view_id_error: "No data found. Nothing to import",
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
|
2022-03-24 12:49:45 +03:00
|
|
|
{:ok, date} ->
|
|
|
|
if Timex.before?(date, @google_analytics_new_user_metric_date) do
|
|
|
|
redirect(conn,
|
|
|
|
to:
|
|
|
|
Routes.site_path(conn, :import_from_google_user_metric_notice, site.domain,
|
|
|
|
view_id: view_id,
|
2022-09-26 12:29:56 +03:00
|
|
|
access_token: access_token,
|
|
|
|
refresh_token: refresh_token,
|
|
|
|
expires_at: expires_at
|
2022-03-24 12:49:45 +03:00
|
|
|
)
|
|
|
|
)
|
|
|
|
else
|
|
|
|
redirect(conn,
|
|
|
|
to:
|
|
|
|
Routes.site_path(conn, :import_from_google_confirm, site.domain,
|
|
|
|
view_id: view_id,
|
2022-09-26 12:29:56 +03:00
|
|
|
access_token: access_token,
|
|
|
|
refresh_token: refresh_token,
|
|
|
|
expires_at: expires_at
|
2022-03-24 12:49:45 +03:00
|
|
|
)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
2022-03-22 17:09:45 +03:00
|
|
|
end
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
|
2022-09-26 12:29:56 +03:00
|
|
|
def import_from_google_confirm(conn, %{
|
|
|
|
"view_id" => view_id,
|
|
|
|
"access_token" => access_token,
|
|
|
|
"refresh_token" => refresh_token,
|
|
|
|
"expires_at" => expires_at
|
|
|
|
}) do
|
2022-03-22 17:09:45 +03:00
|
|
|
site = conn.assigns[:site]
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
|
2022-08-03 12:25:50 +03:00
|
|
|
start_date = Plausible.Google.HTTP.get_analytics_start_date(view_id, access_token)
|
2022-10-06 14:08:22 +03:00
|
|
|
end_date = Plausible.Sites.stats_start_date(site) || Timex.today(site.timezone)
|
2022-04-06 10:10:53 +03:00
|
|
|
|
2022-09-08 21:02:17 +03:00
|
|
|
{:ok, {view_name, view_id}} = Plausible.Google.Api.get_view(access_token, view_id)
|
2022-03-22 17:09:45 +03:00
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("import_from_google_confirm.html",
|
|
|
|
access_token: access_token,
|
2022-09-26 12:29:56 +03:00
|
|
|
refresh_token: refresh_token,
|
|
|
|
expires_at: expires_at,
|
2022-03-22 17:09:45 +03:00
|
|
|
site: site,
|
2022-03-23 11:48:47 +03:00
|
|
|
selected_view_id: view_id,
|
2022-09-08 21:02:17 +03:00
|
|
|
selected_view_id_name: view_name,
|
2022-03-22 17:09:45 +03:00
|
|
|
start_date: start_date,
|
|
|
|
end_date: end_date,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_from_google(conn, %{
|
|
|
|
"view_id" => view_id,
|
|
|
|
"start_date" => start_date,
|
|
|
|
"end_date" => end_date,
|
2022-09-26 12:29:56 +03:00
|
|
|
"access_token" => access_token,
|
|
|
|
"refresh_token" => refresh_token,
|
|
|
|
"expires_at" => expires_at
|
2022-03-22 17:09:45 +03:00
|
|
|
}) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
|
|
|
|
job =
|
|
|
|
Plausible.Workers.ImportGoogleAnalytics.new(%{
|
|
|
|
"site_id" => site.id,
|
|
|
|
"view_id" => view_id,
|
|
|
|
"start_date" => start_date,
|
|
|
|
"end_date" => end_date,
|
2022-09-26 12:29:56 +03:00
|
|
|
"access_token" => access_token,
|
|
|
|
"refresh_token" => refresh_token,
|
|
|
|
"token_expires_at" => expires_at
|
2022-03-22 17:09:45 +03:00
|
|
|
})
|
|
|
|
|
|
|
|
Ecto.Multi.new()
|
|
|
|
|> Ecto.Multi.update(
|
|
|
|
:update_site,
|
|
|
|
Plausible.Site.start_import(site, start_date, end_date, "Google Analytics")
|
|
|
|
)
|
|
|
|
|> Oban.insert(:oban_job, job)
|
|
|
|
|> Repo.transaction()
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> put_flash(:success, "Import scheduled. An email will be sent when it completes.")
|
|
|
|
|> redirect(to: Routes.site_path(conn, :settings_general, site.domain))
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
end
|
|
|
|
|
|
|
|
def forget_imported(conn, _params) do
|
|
|
|
site = conn.assigns[:site]
|
|
|
|
|
|
|
|
cond do
|
|
|
|
site.imported_data ->
|
2022-03-23 12:58:36 +03:00
|
|
|
Oban.cancel_all_jobs(
|
|
|
|
from(j in Oban.Job,
|
|
|
|
where:
|
|
|
|
j.queue == "google_analytics_imports" and
|
|
|
|
fragment("(? ->> 'site_id')::int", j.args) == ^site.id
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
Plausible.Imported.forget(site)
|
|
|
|
|
|
|
|
site
|
|
|
|
|> Plausible.Site.remove_imported_data()
|
|
|
|
|> Repo.update!()
|
|
|
|
|
|
|
|
conn
|
2022-03-23 12:58:36 +03:00
|
|
|
|> put_flash(:success, "Imported data has been cleared")
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
|> redirect(to: Routes.site_path(conn, :settings_general, site.domain))
|
|
|
|
|
|
|
|
true ->
|
|
|
|
conn
|
|
|
|
|> put_flash(:error, "No data has been imported")
|
|
|
|
|> redirect(to: Routes.site_path(conn, :settings_general, site.domain))
|
|
|
|
end
|
|
|
|
end
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|