WIP graph to API v2

This commit is contained in:
Robert Joonas 2024-09-05 12:46:57 +03:00
parent 09999f8b0c
commit b4853422d8
11 changed files with 1693 additions and 36 deletions

View File

@ -1,5 +1,6 @@
import { formatISO } from './util/date'
import { serializeApiFilters } from './util/filters'
import { addPrefixes, serializeApiFilters } from './util/filters'
import { apiPath } from './util/url'
let abortController = new AbortController()
let SHARED_LINK_AUTH = null
@ -74,3 +75,44 @@ export function put(url, body) {
body: JSON.stringify(body)
})
}
export function fetchGraph(site, query, params) {
const url = apiPath(site, '/main-graph-v2')
const {interval, metrics } = params
let dateRange
if (query.period === 'realtime') {
dateRange = '30m'
} else if (query.period == 'custom') {
dateRange = [formatISO(query.from), formatISO(query.to)]
} else {
dateRange = query.period
}
const body = {
dimensions: [`time:${interval}`],
metrics: metrics,
date_range: dateRange,
date: formatISO(query.date),
filters: addPrefixes(query.filters),
include: {imports: query.with_imported === true}
}
if (query.comparison) {
let comparisonParams = {mode: query.comparison, match_day_of_week: query.match_day_of_week}
if (query.comparison === 'custom') {
comparisonParams.from = formatISO(query.compare_from)
comparisonParams.to = formatISO(query.compare_to)
}
body.comparison_params = comparisonParams
}
return fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(body)
})
}

View File

@ -27,8 +27,13 @@ function fetchTopStats(site, query) {
}
function fetchMainGraph(site, query, metric, interval) {
const params = { metric, interval }
return api.get(url.apiPath(site, '/main-graph'), query, params)
if (site.flags.dashboard_api_v2) {
const params = { metrics: [metric], interval }
return api.fetchGraph(site, query, params).then((res) => { return res.json() })
} else {
const params = { metric, interval }
return api.get(url.apiPath(site, '/main-graph'), query, params)
}
}
export default function VisitorGraph({ updateImportedDataInView }) {

View File

@ -184,16 +184,18 @@ export function cleanLabels(filters, labels, mergedFilterKey, mergedLabels) {
const EVENT_FILTER_KEYS = new Set(["name", "page", "goal", "hostname"])
export function serializeApiFilters(filters) {
const apiFilters = filters.map(([operation, filterKey, clauses]) => {
export function addPrefixes(filters) {
return filters.map(([operation, filterKey, clauses]) => {
let apiFilterKey = `visit:${filterKey}`
if (filterKey.startsWith(EVENT_PROPS_PREFIX) || EVENT_FILTER_KEYS.has(filterKey)) {
apiFilterKey = `event:${filterKey}`
}
return [operation, apiFilterKey, clauses]
})
}
return JSON.stringify(apiFilters)
export function serializeApiFilters(filters) {
return JSON.stringify(addPrefixes(filters))
}
export function fetchSuggestions(apiPath, query, input, additionalFilter) {

View File

@ -55,6 +55,17 @@ defmodule Plausible.Stats.Goal.Revenue do
end
end
@doc """
Calls `get_revenue_tracking_currency/3` under the hood. Returns `{currency, query}`,
instead of `{currency, metrics}`.
"""
def get_revenue_tracking_currency(site, %Query{v2: true, metrics: metrics} = query) do
{currency, metrics} = get_revenue_tracking_currency(site, query, metrics)
query = struct!(query, metrics: metrics)
{currency, query}
end
def cast_revenue_metrics_to_money([%{goal: _goal} | _rest] = results, revenue_goals)
when is_list(revenue_goals) do
for result <- results do

View File

@ -24,6 +24,8 @@ defmodule Plausible.Stats.Filters.QueryParser do
{:ok, dimensions} <- parse_dimensions(Map.get(params, "dimensions", [])),
{:ok, order_by} <- parse_order_by(Map.get(params, "order_by")),
{:ok, include} <- parse_include(Map.get(params, "include", %{})),
include <-
maybe_include_realtime_labels(include, dimensions, Map.get(params, "date_range")),
preloaded_goals <- preload_goals_if_needed(site, filters, dimensions),
query = %{
metrics: metrics,
@ -274,6 +276,7 @@ defmodule Plausible.Stats.Filters.QueryParser do
end
defp parse_time("time"), do: {:ok, "time"}
defp parse_time("time:minute"), do: {:ok, "time:minute"}
defp parse_time("time:hour"), do: {:ok, "time:hour"}
defp parse_time("time:day"), do: {:ok, "time:day"}
defp parse_time("time:week"), do: {:ok, "time:week"}
@ -298,6 +301,12 @@ defmodule Plausible.Stats.Filters.QueryParser do
defp parse_include_value({"time_labels", value}) when is_boolean(value),
do: {:ok, {:time_labels, value}}
defp maybe_include_realtime_labels(include, ["time:minute"], "30m") do
Map.put(include, :realtime_labels, true)
end
defp maybe_include_realtime_labels(include, _, _), do: include
defp parse_filter_key_string(filter_key, error_message \\ "") do
case filter_key do
"event:props:" <> property_name ->

View File

@ -27,13 +27,21 @@ defmodule Plausible.Stats.JSONSchema do
|> JSONPointer.add!("#/definitions/metric/oneOf/0", %{
"const" => "time_on_page"
})
|> JSONPointer.add!("#/definitions/metric/oneOf/0", %{
"const" => "total_revenue"
})
|> JSONPointer.add!("#/definitions/metric/oneOf/0", %{
"const" => "average_revenue"
})
|> JSONPointer.add!("#/definitions/date_range/oneOf/0", %{
"const" => "30m"
})
|> JSONPointer.add!("#/definitions/date_range/oneOf/0", %{
"const" => "realtime"
})
|> JSONPointer.add!("#/definitions/time_dimensions/enum/0", "time:minute")
|> JSONPointer.add!("#/properties/date", %{"type" => "string"})
|> JSONPointer.add!("#/properties/domain", %{"type" => "string"})
|> ExJsonSchema.Schema.resolve()
def validate(schema_type, params) do

View File

@ -1,13 +1,8 @@
defmodule Plausible.Stats.Timeseries do
@moduledoc """
Builds timeseries results for v1 of our stats API and dashboards.
Avoid adding new logic here - update QueryBuilder etc instead.
"""
use Plausible
use Plausible.ClickhouseRepo
alias Plausible.Stats.{Query, QueryOptimizer, QueryResult, SQL}
alias Plausible.Stats.Goal.Revenue
@time_dimension %{
"month" => "time:month",
@ -17,35 +12,43 @@ defmodule Plausible.Stats.Timeseries do
"minute" => "time:minute"
}
def timeseries(site, query, metrics) do
{currency, metrics} =
def timeseries(site, %Query{v2: false} = query, metrics) do
v2_query =
query
|> Query.set(metrics: metrics)
|> Query.set(dimensions: [time_dimension(query)])
|> Query.set(v2: true)
timeseries(site, v2_query)
end
def timeseries(site, %Query{v2: true} = query) do
{currency, query} =
on_ee do
Plausible.Stats.Goal.Revenue.get_revenue_tracking_currency(site, query, metrics)
Revenue.get_revenue_tracking_currency(site, query)
else
{nil, metrics}
{nil, query}
end
query_with_metrics =
Query.set(
query,
metrics: transform_metrics(metrics, %{conversion_rate: :group_conversion_rate}),
dimensions: [time_dimension(query)],
order_by: [{time_dimension(query), :asc}],
v2: true,
include: %{time_labels: true, imports: query.include.imports}
)
[time_dimension] = query.dimensions
query =
query
|> Query.set(order_by: [{time_dimension, :asc}])
|> Query.set(include: Map.put(query.include, :time_labels, :true))
|> transform_metrics(%{conversion_rate: :group_conversion_rate})
|> QueryOptimizer.optimize()
q = SQL.QueryBuilder.build(query_with_metrics, site)
q = SQL.QueryBuilder.build(query, site)
query_result =
q
|> ClickhouseRepo.all(query: query)
|> QueryResult.from(site, query_with_metrics)
|> QueryResult.from(site, query)
timeseries_result =
query_result
|> build_timeseries_result(query_with_metrics, currency)
|> build_timeseries_result(query, currency)
|> transform_keys(%{group_conversion_rate: :conversion_rate})
{timeseries_result, query_result.meta}
@ -96,8 +99,9 @@ defmodule Plausible.Stats.Timeseries do
end)
end
defp transform_metrics(metrics, to_replace) do
Enum.map(metrics, &Map.get(to_replace, &1, &1))
defp transform_metrics(%Query{metrics: metrics} = query, to_replace) do
new_metrics = Enum.map(metrics, &Map.get(to_replace, &1, &1))
Query.set(query, metrics: new_metrics)
end
defp transform_keys(results, keys_to_replace) do
@ -109,13 +113,15 @@ defmodule Plausible.Stats.Timeseries do
end)
end
defp transform_realtime_labels(results, %Query{period: "30m"}) do
Enum.with_index(results)
|> Enum.map(fn {entry, index} -> %{entry | date: -30 + index} end)
defp transform_realtime_labels(results, query) do
if query.period == "30m" or query.include[:realtime_labels] == true do
Enum.with_index(results)
|> Enum.map(fn {entry, index} -> %{entry | date: -30 + index} end)
else
results
end
end
defp transform_realtime_labels(results, _query), do: results
on_ee do
defp cast_revenue_metrics_to_money(results, revenue_goals) do
Plausible.Stats.Goal.Revenue.cast_revenue_metrics_to_money(results, revenue_goals)

View File

@ -0,0 +1,147 @@
defmodule PlausibleWeb.Api.GraphController do
use PlausibleWeb, :controller
alias Plausible.Stats
alias Plausible.Stats.{Query, Comparisons, DateTimeRange}
def graph(conn, params) do
site = conn.assigns[:site]
params = Map.put(params, "site_id", to_string(site.id))
case Query.build(site, :internal, params, debug_metadata(conn)) do
{:ok, query} ->
[metric] = query.metrics
{timeseries_result, _} = Stats.Timeseries.timeseries(site, query)
comparison_result = get_comparison_result(site, query, Map.get(params, "comparison_params"))
time_labels = label_timeseries(timeseries_result, comparison_result)
present_index = present_index_for(site, query, time_labels)
full_intervals = build_full_intervals(query, time_labels)
json(conn, %{
metric: metric,
plot: plot_timeseries(timeseries_result, metric),
labels: time_labels,
comparison_plot: comparison_result && plot_timeseries(comparison_result, metric),
comparison_labels: comparison_result && label_timeseries(comparison_result, nil),
present_index: present_index,
full_intervals: full_intervals
})
{:error, message} ->
conn
|> put_status(400)
|> json(message)
|> halt()
end
end
defp get_comparison_result(_site, _query, nil), do: nil
defp get_comparison_result(site, query, %{} = comparison_params) do
with {comparison_mode, comparison_opts} = parse_comparison_params(comparison_params),
{:ok, comparison_query} <- Comparisons.compare(site, query, comparison_mode, comparison_opts) do
Stats.Timeseries.timeseries(site, comparison_query) |> elem(0)
else
_ -> nil
end
end
defp plot_timeseries(results, metric) do
Enum.map(results, fn row ->
case row[metric] do
nil -> 0
%Money{} = money -> Decimal.to_float(money.amount)
value -> value
end
end)
end
defp label_timeseries(main_result, nil) do
Enum.map(main_result, & &1.date)
end
@blank_value "__blank__"
defp label_timeseries(main_result, comparison_result) do
blanks_to_fill = Enum.count(comparison_result) - Enum.count(main_result)
if blanks_to_fill > 0 do
blanks = List.duplicate(@blank_value, blanks_to_fill)
Enum.map(main_result, & &1.date) ++ blanks
else
Enum.map(main_result, & &1.date)
end
end
defp present_index_for(site, query, time_labels) do
now = DateTime.now!(site.timezone)
["time:" <> interval] = query.dimensions
current_time_label =
case interval do
"hour" -> Calendar.strftime(now, "%Y-%m-%d %H:00:00")
"day" -> DateTime.to_date(now) |> Date.to_string()
"week" -> DateTime.to_date(now) |> date_or_weekstart(query) |> Date.to_string()
"month" -> DateTime.to_date(now) |> Date.beginning_of_month() |> Date.to_string()
"minute" -> Calendar.strftime(now, "%Y-%m-%d %H:%M:00")
end
Enum.find_index(time_labels, &(&1 == current_time_label))
end
defp build_full_intervals(%{dimensions: ["time:week"], date_range: date_range}, labels) do
date_range = DateTimeRange.to_date_range(date_range)
build_intervals(labels, date_range, &Date.beginning_of_week/1, &Date.end_of_week/1)
end
defp build_full_intervals(%{dimensions: ["time:month"], date_range: date_range}, labels) do
date_range = DateTimeRange.to_date_range(date_range)
build_intervals(labels, date_range, &Date.beginning_of_month/1, &Date.end_of_month/1)
end
defp build_full_intervals(_query, _labels) do
nil
end
def build_intervals(labels, date_range, start_fn, end_fn) do
for label <- labels, into: %{} do
case Date.from_iso8601(label) do
{:ok, date} ->
interval_start = start_fn.(date)
interval_end = end_fn.(date)
within_interval? =
Enum.member?(date_range, interval_start) && Enum.member?(date_range, interval_end)
{label, within_interval?}
_ ->
{label, false}
end
end
end
defp date_or_weekstart(date, query) do
weekstart = Date.beginning_of_week(date)
date_range = DateTimeRange.to_date_range(query.date_range)
if Enum.member?(date_range, weekstart) do
weekstart
else
date
end
end
defp parse_comparison_params(params) do
options = [
from: params["from"],
to: params["to"],
match_day_of_week?: params["match_day_of_week"]
]
{params["mode"], options}
end
end

View File

@ -362,7 +362,9 @@ defmodule PlausibleWeb.StatsController do
defp shared_link_cookie_name(slug), do: "shared-link-" <> slug
defp get_flags(_user, _site), do: %{}
defp get_flags(user, _site) do
%{dashboard_api_v2: FunWithFlags.enabled?(:dashboard_api_v2, for: user)}
end
defp is_dbip() do
on_ee do

View File

@ -170,6 +170,8 @@ defmodule PlausibleWeb.Router do
get "/:domain/conversions", StatsController, :conversions
get "/:domain/custom-prop-values/:prop_key", StatsController, :custom_prop_values
get "/:domain/suggestions/:filter_name", StatsController, :filter_suggestions
post "/:domain/main-graph-v2", GraphController, :graph
end
scope "/api/v1/stats", PlausibleWeb.Api, assigns: %{api_scope: "stats:read:*"} do

File diff suppressed because it is too large Load Diff