2019-09-02 14:29:19 +03:00
|
|
|
defmodule PlausibleWeb.StatsController do
|
2022-09-20 15:37:18 +03:00
|
|
|
@moduledoc """
|
|
|
|
This controller is responsible for rendering stats dashboards.
|
|
|
|
|
|
|
|
The stats dashboards are currently the only part of the app that uses client-side
|
|
|
|
rendering. Since the dashboards are heavily interactive, they are built with React
|
2022-10-25 14:17:09 +03:00
|
|
|
which is an appropriate choice for highly interactive browser UIs.
|
2022-09-20 15:37:18 +03:00
|
|
|
|
|
|
|
<div class="mermaid">
|
|
|
|
sequenceDiagram
|
|
|
|
Browser->>StatsController: GET /mydomain.com
|
|
|
|
StatsController-->>Browser: StatsView.render("stats.html")
|
|
|
|
Note left of Browser: ReactDom.render(Dashboard)
|
|
|
|
|
|
|
|
Browser -) Api.StatsController: GET /api/stats/mydomain.com/top-stats
|
|
|
|
Api.StatsController --) Browser: {"top_stats": [...]}
|
|
|
|
Note left of Browser: TopStats.render()
|
|
|
|
|
|
|
|
Browser -) Api.StatsController: GET /api/stats/mydomain.com/main-graph
|
|
|
|
Api.StatsController --) Browser: [{"plot": [...], "labels": [...]}, ...]
|
|
|
|
Note left of Browser: VisitorGraph.render()
|
|
|
|
|
|
|
|
Browser -) Api.StatsController: GET /api/stats/mydomain.com/sources
|
|
|
|
Api.StatsController --) Browser: [{"name": "Google", "visitors": 292150}, ...]
|
|
|
|
Note left of Browser: Sources.render()
|
|
|
|
|
|
|
|
Note over Browser,StatsController: And so on, for all reports in the viewport
|
|
|
|
</div>
|
|
|
|
|
|
|
|
This reasoning for this sequence is as follows:
|
|
|
|
1. First paint is fast because it doesn't do any data aggregation yet - good UX
|
|
|
|
2. The basic structure of the dashboard is rendered with spinners before reports are ready - good UX
|
|
|
|
2. Rendering on the frontend allows for maximum interactivity. Re-rendering and re-fetching can be as granular as needed.
|
|
|
|
3. Routing on the frontend allows the user to navigate the dashboard without reloading the page and losing context
|
|
|
|
4. Rendering on the frontend allows caching results in the browser to reduce pressure on backends and storage
|
|
|
|
3.1 No client-side caching has been implemented yet. This is still theoretical. See https://github.com/plausible/analytics/discussions/1278
|
|
|
|
3.2 This is a big potential opportunity, because analytics data is mostly immutable. Clients can cache all historical data.
|
|
|
|
5. Since frontend rendering & navigation is harder to build and maintain than regular server-rendered HTML, we don't use SPA-style rendering anywhere else
|
2022-10-25 14:17:09 +03:00
|
|
|
.The only place currently where the benefits outweigh the costs is the dashboard.
|
2022-09-20 15:37:18 +03:00
|
|
|
"""
|
|
|
|
|
2019-09-02 14:29:19 +03:00
|
|
|
use PlausibleWeb, :controller
|
|
|
|
use Plausible.Repo
|
2022-10-04 15:34:45 +03:00
|
|
|
|
|
|
|
alias Plausible.Sites
|
2021-11-09 15:51:10 +03:00
|
|
|
alias Plausible.Stats.{Query, Filters}
|
2022-10-04 15:34:45 +03:00
|
|
|
alias PlausibleWeb.Api
|
2019-09-02 14:29:19 +03:00
|
|
|
|
2022-11-05 05:42:03 +03:00
|
|
|
plug(PlausibleWeb.AuthorizeSiteAccess when action in [:stats, :csv_export])
|
2020-05-26 16:09:34 +03:00
|
|
|
|
2020-10-02 12:02:32 +03:00
|
|
|
def stats(%{assigns: %{site: site}} = conn, _params) do
|
2022-04-06 11:52:19 +03:00
|
|
|
stats_start_date = Plausible.Sites.stats_start_date(site)
|
2022-10-04 15:34:45 +03:00
|
|
|
can_see_stats? = not Sites.locked?(site) or conn.assigns[:current_user_role] == :super_admin
|
2021-06-16 15:00:07 +03:00
|
|
|
|
|
|
|
cond do
|
2022-10-04 15:34:45 +03:00
|
|
|
stats_start_date && can_see_stats? ->
|
2021-06-16 15:00:07 +03:00
|
|
|
demo = site.domain == PlausibleWeb.Endpoint.host()
|
|
|
|
offer_email_report = get_session(conn, site.domain <> "_offer_email_report")
|
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, !demo)
|
|
|
|
|> remove_email_report_banner(site)
|
|
|
|
|> put_resp_header("x-robots-tag", "noindex")
|
|
|
|
|> render("stats.html",
|
|
|
|
site: site,
|
|
|
|
has_goals: Plausible.Sites.has_goals?(site),
|
2022-04-06 11:52:19 +03:00
|
|
|
stats_start_date: stats_start_date,
|
2023-05-03 11:46:13 +03:00
|
|
|
native_stats_start_date: NaiveDateTime.to_date(site.native_stats_start_at),
|
2021-06-16 15:00:07 +03:00
|
|
|
title: "Plausible · " <> site.domain,
|
|
|
|
offer_email_report: offer_email_report,
|
2022-04-21 10:54:08 +03:00
|
|
|
demo: demo,
|
2022-05-03 10:38:59 +03:00
|
|
|
flags: get_flags(conn.assigns[:current_user]),
|
|
|
|
is_dbip: is_dbip()
|
2021-06-16 15:00:07 +03:00
|
|
|
)
|
|
|
|
|
2022-10-04 15:34:45 +03:00
|
|
|
!stats_start_date && can_see_stats? ->
|
2021-06-16 15:00:07 +03:00
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("waiting_first_pageview.html", site: site)
|
|
|
|
|
2022-10-04 15:34:45 +03:00
|
|
|
Sites.locked?(site) ->
|
|
|
|
owner = Sites.owner_for(site)
|
2021-10-27 12:38:24 +03:00
|
|
|
|
2021-06-16 15:00:07 +03:00
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
2021-10-27 12:38:24 +03:00
|
|
|
|> render("site_locked.html", owner: owner, site: site)
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-11-15 18:06:39 +03:00
|
|
|
@doc """
|
|
|
|
The export is limited to 300 entries for other reports and 100 entries for pages because bigger result sets
|
|
|
|
start causing failures. Since we request data like time on page or bounce_rate for pages in a separate query
|
|
|
|
using the IN filter, it causes the requests to balloon in payload size.
|
|
|
|
"""
|
2021-10-26 16:54:50 +03:00
|
|
|
def csv_export(conn, params) do
|
2020-04-14 14:04:35 +03:00
|
|
|
site = conn.assigns[:site]
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
query = Query.from(site, params) |> Filters.add_prefix()
|
2021-04-23 15:27:50 +03:00
|
|
|
|
2023-02-22 19:10:18 +03:00
|
|
|
metrics =
|
2023-05-16 12:58:40 +03:00
|
|
|
if query.filters["event:goal"] do
|
|
|
|
[:visitors]
|
|
|
|
else
|
|
|
|
[:visitors, :pageviews, :visits, :views_per_visit, :bounce_rate, :visit_duration]
|
2023-02-22 19:10:18 +03:00
|
|
|
end
|
|
|
|
|
2021-04-23 15:27:50 +03:00
|
|
|
graph = Plausible.Stats.timeseries(site, query, metrics)
|
2023-03-20 11:40:37 +03:00
|
|
|
columns = [:date | metrics]
|
|
|
|
|
|
|
|
column_headers =
|
|
|
|
if query.filters["event:goal"] do
|
|
|
|
[:date, :unique_conversions]
|
|
|
|
else
|
|
|
|
columns
|
|
|
|
end
|
2020-01-13 16:16:35 +03:00
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
visitors =
|
2023-03-20 11:40:37 +03:00
|
|
|
Enum.map(graph, fn row -> Enum.map(columns, &row[&1]) end)
|
|
|
|
|> (fn data -> [column_headers | data] end).()
|
2020-06-08 10:35:13 +03:00
|
|
|
|> CSV.encode()
|
|
|
|
|> Enum.join()
|
|
|
|
|
|
|
|
filename =
|
2022-11-05 05:42:03 +03:00
|
|
|
'Plausible export #{params["domain"]} #{Timex.format!(query.date_range.first, "{ISOdate} ")} to #{Timex.format!(query.date_range.last, "{ISOdate} ")}.zip'
|
2021-10-26 16:54:50 +03:00
|
|
|
|
2021-11-04 15:20:39 +03:00
|
|
|
params = Map.merge(params, %{"limit" => "300", "csv" => "True", "detailed" => "True"})
|
2021-11-15 18:06:39 +03:00
|
|
|
limited_params = Map.merge(params, %{"limit" => "100"})
|
2021-10-26 16:54:50 +03:00
|
|
|
|
2023-01-05 14:34:01 +03:00
|
|
|
csvs = %{
|
|
|
|
'sources.csv' => fn -> Api.StatsController.sources(conn, params) end,
|
|
|
|
'utm_mediums.csv' => fn -> Api.StatsController.utm_mediums(conn, params) end,
|
|
|
|
'utm_sources.csv' => fn -> Api.StatsController.utm_sources(conn, params) end,
|
|
|
|
'utm_campaigns.csv' => fn -> Api.StatsController.utm_campaigns(conn, params) end,
|
|
|
|
'utm_contents.csv' => fn -> Api.StatsController.utm_contents(conn, params) end,
|
|
|
|
'utm_terms.csv' => fn -> Api.StatsController.utm_terms(conn, params) end,
|
|
|
|
'pages.csv' => fn -> Api.StatsController.pages(conn, limited_params) end,
|
|
|
|
'entry_pages.csv' => fn -> Api.StatsController.entry_pages(conn, params) end,
|
|
|
|
'exit_pages.csv' => fn -> Api.StatsController.exit_pages(conn, limited_params) end,
|
|
|
|
'countries.csv' => fn -> Api.StatsController.countries(conn, params) end,
|
|
|
|
'regions.csv' => fn -> Api.StatsController.regions(conn, params) end,
|
|
|
|
'cities.csv' => fn -> Api.StatsController.cities(conn, params) end,
|
|
|
|
'browsers.csv' => fn -> Api.StatsController.browsers(conn, params) end,
|
|
|
|
'operating_systems.csv' => fn -> Api.StatsController.operating_systems(conn, params) end,
|
|
|
|
'devices.csv' => fn -> Api.StatsController.screen_sizes(conn, params) end,
|
|
|
|
'conversions.csv' => fn -> Api.StatsController.conversions(conn, params) end,
|
|
|
|
'prop_breakdown.csv' => fn -> Api.StatsController.all_props_breakdown(conn, params) end
|
|
|
|
}
|
|
|
|
|
|
|
|
csv_values =
|
|
|
|
Map.values(csvs)
|
|
|
|
|> Plausible.ClickhouseRepo.parallel_tasks()
|
2021-10-26 16:54:50 +03:00
|
|
|
|
2021-11-04 15:20:39 +03:00
|
|
|
csvs =
|
2023-01-05 14:34:01 +03:00
|
|
|
Map.keys(csvs)
|
|
|
|
|> Enum.zip(csv_values)
|
2021-11-04 15:20:39 +03:00
|
|
|
|
|
|
|
csvs = [{'visitors.csv', visitors} | csvs]
|
|
|
|
|
2021-10-26 16:54:50 +03:00
|
|
|
{:ok, {_, zip_content}} = :zip.create(filename, csvs, [:memory])
|
2020-01-13 16:16:35 +03:00
|
|
|
|
2020-04-14 14:04:35 +03:00
|
|
|
conn
|
2021-10-26 16:54:50 +03:00
|
|
|
|> put_resp_content_type("application/zip")
|
2020-04-14 14:04:35 +03:00
|
|
|
|> put_resp_header("content-disposition", "attachment; filename=\"#{filename}\"")
|
2021-11-09 10:51:38 +03:00
|
|
|
|> delete_resp_cookie("exporting")
|
2021-10-26 16:54:50 +03:00
|
|
|
|> send_resp(200, zip_content)
|
2020-01-13 16:16:35 +03:00
|
|
|
end
|
|
|
|
|
2022-09-20 15:37:18 +03:00
|
|
|
@doc """
|
|
|
|
Authorizes and renders a shared link:
|
|
|
|
1. Shared link with no password protection: needs to just make sure the shared link entry is still
|
|
|
|
in our database. This check makes sure shared link access can be revoked by the site admins. If the
|
|
|
|
shared link exists, render it directly.
|
|
|
|
|
|
|
|
2. Shared link with password protection: Same checks as without the password, but an extra step is taken to
|
|
|
|
protect the page with a password. When the user passes the password challenge, a cookie is set with Plausible.Auth.Token.sign_shared_link().
|
|
|
|
The cookie allows the user to access the dashboard for 24 hours without entering the password again.
|
|
|
|
|
|
|
|
### Backwards compatibility
|
|
|
|
|
|
|
|
The URL format for shared links was changed in [this pull request](https://github.com/plausible/analytics/pull/752) in order
|
|
|
|
to make the URLs easier to bookmark. The old format is supported along with the new in order to not break old links.
|
|
|
|
|
|
|
|
See: https://plausible.io/docs/shared-links
|
|
|
|
"""
|
2022-02-22 22:46:57 +03:00
|
|
|
def shared_link(conn, %{"domain" => domain, "auth" => auth}) do
|
2022-09-20 15:37:18 +03:00
|
|
|
case find_shared_link(domain, auth) do
|
|
|
|
{:password_protected, shared_link} ->
|
|
|
|
render_password_protected_shared_link(conn, shared_link)
|
2020-01-29 12:29:11 +03:00
|
|
|
|
2022-09-20 15:37:18 +03:00
|
|
|
{:unlisted, shared_link} ->
|
2021-03-02 12:15:43 +03:00
|
|
|
render_shared_link(conn, shared_link)
|
2022-09-20 15:37:18 +03:00
|
|
|
|
|
|
|
:not_found ->
|
|
|
|
render_error(conn, 404)
|
2021-03-02 12:15:43 +03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-09-20 15:37:18 +03:00
|
|
|
@old_format_deprecation_date ~N[2022-01-01 00:00:00]
|
|
|
|
def shared_link(conn, %{"domain" => slug}) do
|
2021-03-02 12:15:43 +03:00
|
|
|
shared_link =
|
2022-09-20 15:37:18 +03:00
|
|
|
Repo.one(
|
2022-11-05 05:42:03 +03:00
|
|
|
from(l in Plausible.Site.SharedLink,
|
2022-09-20 15:37:18 +03:00
|
|
|
where: l.slug == ^slug and l.inserted_at < ^@old_format_deprecation_date,
|
|
|
|
preload: :site
|
2022-11-05 05:42:03 +03:00
|
|
|
)
|
2022-09-20 15:37:18 +03:00
|
|
|
)
|
2021-03-02 12:15:43 +03:00
|
|
|
|
|
|
|
if shared_link do
|
2022-09-20 15:37:18 +03:00
|
|
|
new_link_format = Routes.stats_path(conn, :shared_link, shared_link.site.domain, auth: slug)
|
|
|
|
redirect(conn, to: new_link_format)
|
2020-01-29 12:29:11 +03:00
|
|
|
else
|
|
|
|
render_error(conn, 404)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-09-05 12:15:09 +03:00
|
|
|
def shared_link(conn, _) do
|
|
|
|
render_error(conn, 400)
|
|
|
|
end
|
|
|
|
|
2022-09-20 15:37:18 +03:00
|
|
|
defp render_password_protected_shared_link(conn, shared_link) do
|
|
|
|
with conn <- Plug.Conn.fetch_cookies(conn),
|
|
|
|
{:ok, token} <- Map.fetch(conn.req_cookies, shared_link_cookie_name(shared_link.slug)),
|
|
|
|
{:ok, %{slug: token_slug}} <- Plausible.Auth.Token.verify_shared_link(token),
|
|
|
|
true <- token_slug == shared_link.slug do
|
|
|
|
render_shared_link(conn, shared_link)
|
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("shared_link_password.html",
|
|
|
|
link: shared_link,
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp find_shared_link(domain, auth) do
|
|
|
|
link_query =
|
2022-11-05 05:42:03 +03:00
|
|
|
from(link in Plausible.Site.SharedLink,
|
2022-09-20 15:37:18 +03:00
|
|
|
inner_join: site in assoc(link, :site),
|
|
|
|
where: link.slug == ^auth,
|
|
|
|
where: site.domain == ^domain,
|
|
|
|
limit: 1,
|
|
|
|
preload: [site: site]
|
2022-11-05 05:42:03 +03:00
|
|
|
)
|
2022-09-20 15:37:18 +03:00
|
|
|
|
|
|
|
case Repo.one(link_query) do
|
|
|
|
%Plausible.Site.SharedLink{password_hash: hash} = link when not is_nil(hash) ->
|
|
|
|
{:password_protected, link}
|
|
|
|
|
|
|
|
%Plausible.Site.SharedLink{} = link ->
|
|
|
|
{:unlisted, link}
|
|
|
|
|
|
|
|
nil ->
|
|
|
|
:not_found
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-01-29 12:29:11 +03:00
|
|
|
def authenticate_shared_link(conn, %{"slug" => slug, "password" => password}) do
|
2020-06-08 10:35:13 +03:00
|
|
|
shared_link =
|
|
|
|
Repo.get_by(Plausible.Site.SharedLink, slug: slug)
|
|
|
|
|> Repo.preload(:site)
|
2020-01-29 12:29:11 +03:00
|
|
|
|
|
|
|
if shared_link do
|
|
|
|
if Plausible.Auth.Password.match?(password, shared_link.password_hash) do
|
2021-03-02 12:15:43 +03:00
|
|
|
token = Plausible.Auth.Token.sign_shared_link(slug)
|
|
|
|
|
|
|
|
conn
|
2022-02-22 22:46:57 +03:00
|
|
|
|> put_resp_cookie(shared_link_cookie_name(slug), token)
|
2021-03-02 12:15:43 +03:00
|
|
|
|> redirect(to: "/share/#{URI.encode_www_form(shared_link.site.domain)}?auth=#{slug}")
|
2020-01-29 12:29:11 +03:00
|
|
|
else
|
2020-05-19 16:20:21 +03:00
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
2020-06-08 10:35:13 +03:00
|
|
|
|> render("shared_link_password.html",
|
|
|
|
link: shared_link,
|
|
|
|
error: "Incorrect password. Please try again.",
|
|
|
|
layout: {PlausibleWeb.LayoutView, "focus.html"}
|
|
|
|
)
|
2020-01-29 12:29:11 +03:00
|
|
|
end
|
|
|
|
else
|
|
|
|
render_error(conn, 404)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-03-02 12:15:43 +03:00
|
|
|
defp render_shared_link(conn, shared_link) do
|
2021-12-14 13:10:34 +03:00
|
|
|
cond do
|
|
|
|
!shared_link.site.locked ->
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> put_resp_header("x-robots-tag", "noindex")
|
|
|
|
|> delete_resp_header("x-frame-options")
|
|
|
|
|> render("stats.html",
|
|
|
|
site: shared_link.site,
|
2022-10-04 15:34:45 +03:00
|
|
|
has_goals: Sites.has_goals?(shared_link.site),
|
2022-04-06 11:52:19 +03:00
|
|
|
stats_start_date: shared_link.site.stats_start_date,
|
2023-05-03 11:46:13 +03:00
|
|
|
native_stats_start_date: NaiveDateTime.to_date(shared_link.site.native_stats_start_at),
|
2021-12-14 13:10:34 +03:00
|
|
|
title: "Plausible · " <> shared_link.site.domain,
|
|
|
|
offer_email_report: false,
|
|
|
|
demo: false,
|
|
|
|
skip_plausible_tracking: true,
|
|
|
|
shared_link_auth: shared_link.slug,
|
|
|
|
embedded: conn.params["embed"] == "true",
|
|
|
|
background: conn.params["background"],
|
2022-04-21 10:54:08 +03:00
|
|
|
theme: conn.params["theme"],
|
2022-05-03 10:49:39 +03:00
|
|
|
flags: get_flags(conn.assigns[:current_user]),
|
|
|
|
is_dbip: is_dbip()
|
2021-12-14 13:10:34 +03:00
|
|
|
)
|
|
|
|
|
2022-10-04 15:34:45 +03:00
|
|
|
Sites.locked?(shared_link.site) ->
|
|
|
|
owner = Sites.owner_for(shared_link.site)
|
2021-12-14 13:10:34 +03:00
|
|
|
|
|
|
|
conn
|
|
|
|
|> assign(:skip_plausible_tracking, true)
|
|
|
|
|> render("site_locked.html", owner: owner, site: shared_link.site)
|
|
|
|
end
|
2020-01-29 12:29:11 +03:00
|
|
|
end
|
|
|
|
|
2020-04-13 15:14:16 +03:00
|
|
|
defp remove_email_report_banner(conn, site) do
|
|
|
|
if conn.assigns[:current_user] do
|
2020-11-06 14:34:31 +03:00
|
|
|
delete_session(conn, site.domain <> "_offer_email_report")
|
2020-04-13 15:14:16 +03:00
|
|
|
else
|
|
|
|
conn
|
|
|
|
end
|
|
|
|
end
|
2022-02-22 22:46:57 +03:00
|
|
|
|
|
|
|
defp shared_link_cookie_name(slug), do: "shared-link-" <> slug
|
2022-04-21 10:54:08 +03:00
|
|
|
|
|
|
|
defp get_flags(user) do
|
|
|
|
%{
|
2023-02-07 16:00:49 +03:00
|
|
|
custom_dimension_filter: FunWithFlags.enabled?(:custom_dimension_filter, for: user),
|
2023-04-26 14:22:33 +03:00
|
|
|
views_per_visit_metric: FunWithFlags.enabled?(:views_per_visit_metric, for: user)
|
2022-04-21 10:54:08 +03:00
|
|
|
}
|
|
|
|
end
|
2022-05-03 10:38:59 +03:00
|
|
|
|
|
|
|
defp is_dbip() do
|
2023-01-17 18:05:09 +03:00
|
|
|
is_or_nil =
|
|
|
|
if Application.get_env(:plausible, :is_selfhost) do
|
|
|
|
if type = Plausible.Geo.database_type() do
|
2022-05-03 10:38:59 +03:00
|
|
|
String.starts_with?(type, "DBIP")
|
2023-01-17 18:05:09 +03:00
|
|
|
end
|
2022-05-03 10:38:59 +03:00
|
|
|
end
|
2023-01-17 18:05:09 +03:00
|
|
|
|
|
|
|
!!is_or_nil
|
2022-05-03 10:38:59 +03:00
|
|
|
end
|
2019-09-02 14:29:19 +03:00
|
|
|
end
|