2020-11-13 03:26:53 +03:00
|
|
|
import React from 'react'
|
2020-11-13 03:33:01 +03:00
|
|
|
import { Link, withRouter } from 'react-router-dom'
|
2021-12-03 14:59:32 +03:00
|
|
|
import {formatDay, formatMonthYYYY, nowForSite, parseUTCDate} from './util/date'
|
|
|
|
import * as storage from './util/storage'
|
2023-02-07 16:00:49 +03:00
|
|
|
import { COMPARISON_DISABLED_PERIODS } from './comparison-input'
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2022-03-31 13:52:26 +03:00
|
|
|
const PERIODS = ['realtime', 'day', 'month', '7d', '30d', '6mo', '12mo', 'year', 'all', 'custom']
|
2019-11-19 07:30:42 +03:00
|
|
|
|
|
|
|
export function parseQuery(querystring, site) {
|
2019-11-20 08:48:27 +03:00
|
|
|
const q = new URLSearchParams(querystring)
|
|
|
|
let period = q.get('period')
|
2022-11-15 17:38:39 +03:00
|
|
|
const periodKey = `period__${site.domain}`
|
2019-11-19 07:30:42 +03:00
|
|
|
|
|
|
|
if (PERIODS.includes(period)) {
|
2021-04-28 11:31:10 +03:00
|
|
|
if (period !== 'custom' && period !== 'realtime') storage.setItem(periodKey, period)
|
|
|
|
} else if (storage.getItem(periodKey)) {
|
2022-11-15 17:38:39 +03:00
|
|
|
period = storage.getItem(periodKey)
|
|
|
|
} else {
|
|
|
|
period = '30d'
|
|
|
|
}
|
2019-11-19 07:30:42 +03:00
|
|
|
|
2023-03-07 18:52:26 +03:00
|
|
|
let comparison = q.get('comparison')
|
2023-02-07 16:00:49 +03:00
|
|
|
if (COMPARISON_DISABLED_PERIODS.includes(period)) comparison = null
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
return {
|
2021-01-07 16:24:59 +03:00
|
|
|
period,
|
2020-09-09 11:13:55 +03:00
|
|
|
date: q.get('date') ? parseUTCDate(q.get('date')) : nowForSite(site),
|
2020-03-04 18:24:18 +03:00
|
|
|
from: q.get('from') ? parseUTCDate(q.get('from')) : undefined,
|
|
|
|
to: q.get('to') ? parseUTCDate(q.get('to')) : undefined,
|
[Continued] Google Analytics import (#1753)
* Add has_imported_stats boolean to Site
* Add Google Analytics import panel to general settings
* Get GA profiles to display in import settings panel
* Add import_from_google method as entrypoint to import data
* Add imported_visitors table
* Remove conflicting code from migration
* Import visitors data into clickhouse database
* Pass another dataset to main graph for rendering in red
This adds another entry to the JSON data returned via the main graph API
called `imported_plot`, which is similar to `plot` in form but will be
completed with previously imported data. Currently it simply returns
the values from `plot` / 2. The data is rendered in the main graph in
red without fill, and without an indicator for the present. Rationale:
imported data will not continue to grow so there is no projection
forward, only backwards.
* Hook imported GA data to dashboard timeseries plot
* Add settings option to forget imported data
* Import sources from google analytics
* Merge imported sources when queried
* Merge imported source data native data when querying sources
* Start converting metrics to atoms so they can be subqueried
This changes "visitors" and in some places "sources" to atoms. This does
not change the behaviour of the functions - the tests all pass unchanged
following this commit. This is necessary as joining subqueries requires
that the keys in `select` statements be atoms and not strings.
* Convery GA (direct) source to empty string
* Import utm campaign and utm medium from GA
* format
* Import all data types from GA into new tables
* Handle large amounts of more data more safely
* Fix some mistakes in tables
* Make GA requests in chunks of 5 queries
* Only display imported timeseries when there is no filter
* Correctly show last 30 minutes timeseries when 'realtime'
* Add with_imported key to Query struct
* Account for injected :is_not filter on sources from dashboard
* Also add tentative imported_utm_sources table
This needs a bit more work on the google import side, as GA do not
report sources and utm sources as distinct things.
* Return imported data to dashboard for rest of Sources panel
This extends the merge_imported function definition for sources to
utm_sources, utm_mediums and utm_campaigns too. This appears to be
working on the DB side but something is incomplete on the client side.
* Clear imported stats from all tables when requested
* Merge entry pages and exit pages from imported data into unfiltered dashboard view
This requires converting the `"visits"` and `"visit_duration"` metrics
to atoms so that they can be used in ecto subqueries.
* Display imported devices, browsers and OSs on dashboard
* Display imported country data on dashboard
* Add more metrics to entries/exits for modals
* make sure data is returned via API with correct keys
* Import regions and cities from GA
* Capitalize device upon import to match native data
* Leave query limits/offsets until after possibly joining with imported data
* Also import timeOnPage and pageviews for pages from GA
* imported_countries -> imported_locations
* Get timeOnPage and pageviews for pages from GA
These are needed for the pages modal, and for calculating exit rates for
exit pages.
* Add indicator to dashboard when imported data is being used
* Don't show imported data as separately line on main graph
* "bounce_rate" -> :bounce_rate, so it works in subqueries
* Drop imported browser and OS versions
These are not needed.
* Toggle displaying imported data by clicking indicator
* Parse referrers with RefInspector
- Use 'ga:fullReferrer' instead of 'ga:source'. This provides the actual
referrer host + path, whereas 'ga:source' includes utm_mediums and
other values when relevant.
- 'ga:fullReferror' does however include search engine names directly,
so they are manually checked for as RefInspector won't pick up on
these.
* Keep imported data indicator on dashboard and strikethrough when hidden
* Add unlink google button to import panel
* Rename some GA browsers and OSes to plausible versions
* Get main top pages and exit pages panels working correctly with imported data
* mix format
* Fetch time_on_pages for imported data when needed
* entry pages need to fetch bounces from GA
* "sample_percent" -> :sample_percent as only atoms can be used in subqueries
* Calculate bounce_rate for joined native and imported data for top pages modal
* Flip some query bindings around to be less misleading
* Fixup entry page modal visit durations
* mix format
* Fetch bounces and visit_duration for sources from GA
* add more source metrics used for data in modals
* Make sources modals display correct values
* imported_visitors: bounce_rate -> bounces, avg_visit_duration -> visit_duration
* Merge imported data into aggregate stats
* Reformat top graph side icons
* Ensure sample_percent is yielded from aggregate data
* filter event_props should be strings
* Hide imported data from frontend when using filter
* Fix existing tests
* fix tests
* Fix imported indicator appearing when filtering
* comma needed, lost when rebasing
* Import utm_terms and utm_content from GA
* Merge imported utm_term and utm_content
* Rename imported Countries data as Locations
* Set imported city schema field to int
* Remove utm_terms and utm_content when clearing imported
* Clean locations import from Google Analytics
- Country and region should be set to "" when GA provides "(not set)"
- City should be set to 0 for "unknown", as we cannot reliably import
city data from GA.
* Display imported region and city in dashboard
* os -> operating_system in some parts of code
The inconsistency of using os in some places and operating_system in
others causes trouble with subqueries and joins for the native and
imported data, which would require additional logic to account for. The
simplest solution is the just use a consistent word for all uses. This
doesn't make any user-facing or database changes.
* to_atom -> to_existing_atom
* format
* "events" metric -> :events
* ignore imported data when "events" in metrics
* update "bounce_rate"
* atomise some more metrics from new city and region api
* atomise some more metrics for email handlers
* "conversion_rate" -> :conversion_rate during csv export
* Move imported data stats code to own module
* Move imported timeseries function to Stats.Imported
* Use Timex.parse to import dates from GA
* has_imported_stats -> imported_source
* "time_on_page" -> :time_on_page
* Convert imported GA data to UTC
* Clean up GA request code a bit
There was some weird logic here with two separate lists that really
ought to be together, so this merges those.
* Fail sooner if GA timezone can't be identified
* Link imported tables to site by id
* imported_utm_content -> imported_utm_contents
* Imported GA from all of time
* Reorganise GA data fetch logic
- Fetch data from the start of time (2005)
- Check whether no data was fetched, and if so, inform user and don't
consider data to be imported.
* Clarify removal of "visits" data when it isn't in metrics
* Apply location filters from API
This makes it consistent with the sources etc which filter out 'Direct /
None' on the API side. These filters are used by both the native and
imported data handling code, which would otherwise both duplicate the
filters in their `where` clauses.
* Do not use changeset for setting site.imported_source
* Add all metrics to all dimensions
* Run GA import in the background
* Send email when GA import completes
* Add handler to insert imported data into tests and imported_browsers_factory
* Add remaining import data test factories
* Add imported location data to test
* Test main graph with imported data
* Add imported data to operating systems tests
* Add imported data to pages tests
* Add imported data to entry pages tests
* Add imported data to exit pages tests
* Add imported data to devices tests
* Add imported data to sources tests
* Add imported data to UTM tests
* Add new test module for the data import step
* Test import of sources GA data
* Test import of utm_mediums GA data
* Test import of utm_campaigns GA data
* Add tests for UTM terms
* Add tests for UTM contents
* Add test for importing pages and entry pages data from GA
* Add test for importing exit page data
* Fix module file name typo
* Add test for importing location data from GA
* Add test for importing devices data from GA
* Add test for importing browsers data from GA
* Add test for importing OS data from GA
* Paginate GA requests to download all data
* Bump clickhouse_ecto version
* Move RefInspector wrapper function into module
* Drop timezone transform on import
* Order imported by side_id then date
* More strings -> atoms
Also changes a conditional to be a bit nicer
* Remove parallelisation of data import
* Split sources and UTM sources from fetched GA data
GA has only a "source" dimension and no "UTM source" dimension. Instead
it returns these combined. The logic herein to tease these apart is:
1. "(direct)" -> it's a direct source
2. if the source is a domain -> it's a source
3. "google" -> it's from adwords; let's make this a UTM source "adwords"
4. else -> just a UTM source
* Keep prop names in queries as strings
* fix typo
* Fix import
* Insert data to clickhouse in batches
* Fix link when removing imported data
* Merge source tables
* Import hostname as well as pathname
* Record start and end time of imported data
* Track import progress
* Fix month interval with imported data
* Do not JOIN when imported date range has no overlap
* Fix time on page using exits
Co-authored-by: mcol <mcol@posteo.net>
2022-03-11 00:04:59 +03:00
|
|
|
with_imported: q.get('with_imported') ? q.get('with_imported') === 'true' : true,
|
2023-02-07 16:00:49 +03:00
|
|
|
comparison: comparison,
|
2020-07-30 11:18:28 +03:00
|
|
|
filters: {
|
|
|
|
'goal': q.get('goal'),
|
2020-10-30 11:49:41 +03:00
|
|
|
'props': JSON.parse(q.get('props')),
|
2020-07-30 11:18:28 +03:00
|
|
|
'source': q.get('source'),
|
2020-09-28 11:29:24 +03:00
|
|
|
'utm_medium': q.get('utm_medium'),
|
|
|
|
'utm_source': q.get('utm_source'),
|
|
|
|
'utm_campaign': q.get('utm_campaign'),
|
2021-12-16 12:02:09 +03:00
|
|
|
'utm_content': q.get('utm_content'),
|
|
|
|
'utm_term': q.get('utm_term'),
|
2020-08-10 16:16:12 +03:00
|
|
|
'referrer': q.get('referrer'),
|
2020-10-13 12:03:42 +03:00
|
|
|
'screen': q.get('screen'),
|
|
|
|
'browser': q.get('browser'),
|
2020-11-10 16:18:59 +03:00
|
|
|
'browser_version': q.get('browser_version'),
|
2020-10-13 12:03:42 +03:00
|
|
|
'os': q.get('os'),
|
2020-11-10 16:18:59 +03:00
|
|
|
'os_version': q.get('os_version'),
|
2020-10-13 12:03:42 +03:00
|
|
|
'country': q.get('country'),
|
2021-11-23 12:39:09 +03:00
|
|
|
'region': q.get('region'),
|
|
|
|
'city': q.get('city'),
|
Adds entry and exit pages to Top Pages module (#712)
* Initial Pass
* Adds support for page visits counting by referrer
* Includes goal selection in entry and exit computation
* Adds goal-based entry and exit page stats, formatting, code cleanup
* Changelog
* Format
* Exit rate, visit duration, updated tests
* I keep forgetting to format :/
* Tests, last time
* Fixes double counting, exit rate >100%, relevant tests
* Fixes exit pages on filter and goal states
* Adds entry and exit filters, fixes various bugs
* Fixes discussed issues
* Format
* Fixes impossible case in tests
Originally, there were only 2 pageviews for `test-site.com`,`/` on `2019-01-01`, but that doesn't make sense when there were 3 sessions that exited on the same site/date.
* Format
* Removes boolean function parameter in favor of separate function
* Adds support for queries that use `page` filter as `entry-page`
* Format
* Makes loader/title interaction in sources report consistent
2021-02-26 12:02:37 +03:00
|
|
|
'page': q.get('page'),
|
|
|
|
'entry_page': q.get('entry_page'),
|
|
|
|
'exit_page': q.get('exit_page')
|
2020-07-30 11:18:28 +03:00
|
|
|
}
|
2019-11-19 07:30:42 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-07-21 16:50:26 +03:00
|
|
|
export function appliedFilters(query) {
|
|
|
|
return Object.keys(query.filters)
|
|
|
|
.map((key) => [key, query.filters[key]])
|
2021-10-11 15:48:19 +03:00
|
|
|
.filter(([_key, value]) => !!value);
|
2020-12-29 12:00:41 +03:00
|
|
|
}
|
|
|
|
|
2020-11-13 03:26:53 +03:00
|
|
|
function generateQueryString(data) {
|
|
|
|
const query = new URLSearchParams(window.location.search)
|
|
|
|
Object.keys(data).forEach(key => {
|
|
|
|
if (!data[key]) {
|
|
|
|
query.delete(key)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
query.set(key, data[key])
|
|
|
|
})
|
|
|
|
return query.toString()
|
|
|
|
}
|
|
|
|
|
2020-11-11 13:23:42 +03:00
|
|
|
export function navigateToQuery(history, queryFrom, newData) {
|
2021-01-07 16:24:59 +03:00
|
|
|
// if we update any data that we store in localstorage, make sure going back in history will
|
|
|
|
// revert them
|
2020-11-11 13:23:42 +03:00
|
|
|
if (newData.period && newData.period !== queryFrom.period) {
|
|
|
|
const replaceQuery = new URLSearchParams(window.location.search)
|
|
|
|
replaceQuery.set('period', queryFrom.period)
|
|
|
|
history.replace({ search: replaceQuery.toString() })
|
|
|
|
}
|
|
|
|
|
|
|
|
// then push the new query to the history
|
2020-11-13 03:26:53 +03:00
|
|
|
history.push({ search: generateQueryString(newData) })
|
|
|
|
}
|
2020-11-11 13:23:42 +03:00
|
|
|
|
2020-11-13 03:33:01 +03:00
|
|
|
class QueryLink extends React.Component {
|
2021-03-31 15:13:58 +03:00
|
|
|
constructor(props) {
|
|
|
|
super(props)
|
2020-11-13 03:26:53 +03:00
|
|
|
this.onClick = this.onClick.bind(this)
|
|
|
|
}
|
|
|
|
|
|
|
|
onClick(e) {
|
|
|
|
e.preventDefault()
|
2020-11-13 03:35:09 +03:00
|
|
|
navigateToQuery(this.props.history, this.props.query, this.props.to)
|
2020-11-13 03:26:53 +03:00
|
|
|
if (this.props.onClick) this.props.onClick(e)
|
|
|
|
}
|
|
|
|
|
|
|
|
render() {
|
2021-10-11 15:48:19 +03:00
|
|
|
const { to, ...props } = this.props
|
2021-01-07 16:24:59 +03:00
|
|
|
return (
|
|
|
|
<Link
|
|
|
|
{...props}
|
|
|
|
to={{ pathname: window.location.pathname, search: generateQueryString(to) }}
|
|
|
|
onClick={this.onClick}
|
|
|
|
/>
|
2022-11-15 17:38:39 +03:00
|
|
|
)
|
2020-11-13 03:26:53 +03:00
|
|
|
}
|
2020-11-11 13:23:42 +03:00
|
|
|
}
|
2020-11-13 03:33:01 +03:00
|
|
|
const QueryLinkWithRouter = withRouter(QueryLink)
|
|
|
|
export { QueryLinkWithRouter as QueryLink };
|
2020-11-11 13:23:42 +03:00
|
|
|
|
2021-11-23 12:39:09 +03:00
|
|
|
function QueryButton({history, query, to, disabled, className, children, onClick}) {
|
|
|
|
return (
|
|
|
|
<button
|
|
|
|
className={className}
|
|
|
|
onClick={(event) => {
|
|
|
|
event.preventDefault()
|
|
|
|
navigateToQuery(history, query, to)
|
|
|
|
if (onClick) onClick(event)
|
|
|
|
history.push({ pathname: window.location.pathname, search: generateQueryString(to) })
|
|
|
|
}}
|
|
|
|
type="button"
|
|
|
|
disabled={disabled}
|
|
|
|
>
|
|
|
|
{children}
|
|
|
|
</button>
|
|
|
|
)
|
2021-01-07 16:24:59 +03:00
|
|
|
}
|
2021-11-23 12:39:09 +03:00
|
|
|
|
2021-01-07 16:24:59 +03:00
|
|
|
const QueryButtonWithRouter = withRouter(QueryButton)
|
|
|
|
export { QueryButtonWithRouter as QueryButton };
|
|
|
|
|
2019-11-19 07:30:42 +03:00
|
|
|
export function toHuman(query) {
|
|
|
|
if (query.period === 'day') {
|
|
|
|
return `on ${formatDay(query.date)}`
|
2021-01-07 16:24:59 +03:00
|
|
|
} if (query.period === 'month') {
|
2019-11-19 07:30:42 +03:00
|
|
|
return `in ${formatMonthYYYY(query.date)}`
|
2021-01-07 16:24:59 +03:00
|
|
|
} if (query.period === '7d') {
|
2019-11-19 07:30:42 +03:00
|
|
|
return 'in the last 7 days'
|
2021-01-07 16:24:59 +03:00
|
|
|
} if (query.period === '30d') {
|
2019-11-19 07:30:42 +03:00
|
|
|
return 'in the last 30 days'
|
2021-01-07 16:24:59 +03:00
|
|
|
} if (query.period === '6mo') {
|
2020-01-08 12:25:46 +03:00
|
|
|
return 'in the last 6 months'
|
2021-01-07 16:24:59 +03:00
|
|
|
} if (query.period === '12mo') {
|
2020-01-08 12:31:58 +03:00
|
|
|
return 'in the last 12 months'
|
2019-11-19 07:30:42 +03:00
|
|
|
}
|
2021-11-23 12:39:09 +03:00
|
|
|
return ''
|
2019-11-19 07:30:42 +03:00
|
|
|
}
|
2019-11-20 08:48:27 +03:00
|
|
|
|
|
|
|
export function eventName(query) {
|
|
|
|
if (query.filters.goal) {
|
|
|
|
if (query.filters.goal.startsWith('Visit ')) {
|
|
|
|
return 'pageviews'
|
|
|
|
}
|
|
|
|
return 'events'
|
|
|
|
}
|
2021-01-07 16:24:59 +03:00
|
|
|
return 'pageviews'
|
2019-11-20 08:48:27 +03:00
|
|
|
}
|
2021-06-21 14:42:16 +03:00
|
|
|
|
|
|
|
export const formattedFilters = {
|
|
|
|
'goal': 'Goal',
|
2022-04-21 11:47:15 +03:00
|
|
|
'props': 'Property',
|
|
|
|
'prop_key': 'Property',
|
|
|
|
'prop_value': 'Value',
|
2021-06-21 14:42:16 +03:00
|
|
|
'source': 'Source',
|
|
|
|
'utm_medium': 'UTM Medium',
|
|
|
|
'utm_source': 'UTM Source',
|
|
|
|
'utm_campaign': 'UTM Campaign',
|
2021-12-16 12:02:09 +03:00
|
|
|
'utm_content': 'UTM Content',
|
|
|
|
'utm_term': 'UTM Term',
|
2021-06-21 14:42:16 +03:00
|
|
|
'referrer': 'Referrer URL',
|
|
|
|
'screen': 'Screen size',
|
|
|
|
'browser': 'Browser',
|
|
|
|
'browser_version': 'Browser Version',
|
|
|
|
'os': 'Operating System',
|
|
|
|
'os_version': 'Operating System Version',
|
|
|
|
'country': 'Country',
|
2021-11-23 12:39:09 +03:00
|
|
|
'region': 'Region',
|
|
|
|
'city': 'City',
|
2021-06-21 14:42:16 +03:00
|
|
|
'page': 'Page',
|
|
|
|
'entry_page': 'Entry Page',
|
|
|
|
'exit_page': 'Exit Page'
|
|
|
|
}
|