mirror of
https://github.com/plausible/analytics.git
synced 2024-12-26 11:02:52 +03:00
f8b4d5066a
* Clean up references to no longer active `google_analytics_imports` Oban queue * Stub CSV importer * Add SiteImport schema * Rename `Plausible.Imported` module file to match module name * Add `import_id` column to `Imported.*` CH schemas * Implement Importer behavior and manage imports state using new entities * Implement importer callbacks and maintain site.imported_data for UA * Keep imports in sync when forgetting all imports * Scope imported data queries to completed import IDs * Mark newly imported data with respective import ID * Clean up Importer implementation a bit * Test querying legacy and new imported data * Send Oban notifications on import worker failure too * Fix checking for forgettable imports and remove redundant function * Fix UA integration test * Change site import source to atom enum and add source label * Add typespecs and reduce repetition in `Plausible.Imported` * Improve documentation and typespecs * Add test for purging particular import * Switch email notification templates depending on import source * Document running import synchronously * Fix UA importer args parsing and ensure it's covered by tests * Clear `site.stats_start_date` on complete import to force recalculation * Test Oban notifications (h/t @ruslandoga) * Purge stats on import failure right away to reduce a chance of leaving debris behind * Fix typos Co-authored-by: hq1 <hq@mtod.org> * Fix another typo * Refactor fetching earliest import and earliest stats start date * Use `Date.after?` instead of `Timex.after?` * Cache import data in site virtual fields and limit queried imports to 5 * Ensure always current `stats_start_date` is used * Work around broken typespec in Timex * Make `SiteController.forget_imported` action idempotent * Discard irrecoverably failed import tasks * Use macros for site import statuses There's also a fix ensuring only complete imports are considered where relevant - couldn't isolate it as it was in a common hunk * Use `import_id` as worker job uniqueness criterion * Do not load imported stats data in plugins API context --------- Co-authored-by: hq1 <hq@mtod.org>
116 lines
3.4 KiB
Elixir
116 lines
3.4 KiB
Elixir
defmodule PlausibleWeb.AuthorizeStatsApiPlug do
|
|
import Plug.Conn
|
|
use Plausible.Repo
|
|
alias Plausible.Auth
|
|
alias Plausible.Sites
|
|
alias Plausible.RateLimit
|
|
alias PlausibleWeb.Api.Helpers, as: H
|
|
|
|
def init(options) do
|
|
options
|
|
end
|
|
|
|
def call(conn, _opts) do
|
|
with {:ok, token} <- get_bearer_token(conn),
|
|
{:ok, api_key} <- Auth.find_api_key(token),
|
|
:ok <- check_api_key_rate_limit(api_key),
|
|
{:ok, site} <- verify_access(api_key, conn.params["site_id"]) do
|
|
Plausible.OpenTelemetry.add_site_attributes(site)
|
|
site = Plausible.Imported.load_import_data(site)
|
|
assign(conn, :site, site)
|
|
else
|
|
{:error, :missing_api_key} ->
|
|
H.unauthorized(
|
|
conn,
|
|
"Missing API key. Please use a valid Plausible API key as a Bearer Token."
|
|
)
|
|
|
|
{:error, :missing_site_id} ->
|
|
H.bad_request(
|
|
conn,
|
|
"Missing site ID. Please provide the required site_id parameter with your request."
|
|
)
|
|
|
|
{:error, :rate_limit, limit} ->
|
|
H.too_many_requests(
|
|
conn,
|
|
"Too many API requests. Your API key is limited to #{limit} requests per hour. Please contact us to request more capacity."
|
|
)
|
|
|
|
{:error, :invalid_api_key} ->
|
|
H.unauthorized(
|
|
conn,
|
|
"Invalid API key or site ID. Please make sure you're using a valid API key with access to the site you've requested."
|
|
)
|
|
|
|
{:error, :upgrade_required} ->
|
|
H.payment_required(
|
|
conn,
|
|
"The account that owns this API key does not have access to Stats API. Please make sure you're using the API key of a subscriber account and that the subscription plan includes Stats API"
|
|
)
|
|
|
|
{:error, :site_locked} ->
|
|
H.payment_required(
|
|
conn,
|
|
"This Plausible site is locked due to missing active subscription. In order to access it, the site owner should subscribe to a suitable plan"
|
|
)
|
|
end
|
|
end
|
|
|
|
defp verify_access(_api_key, nil), do: {:error, :missing_site_id}
|
|
|
|
defp verify_access(api_key, site_id) do
|
|
domain_based_search =
|
|
from s in Plausible.Site, where: s.domain == ^site_id or s.domain_changed_from == ^site_id
|
|
|
|
case Repo.one(domain_based_search) do
|
|
%Plausible.Site{} = site ->
|
|
is_member? = Sites.is_member?(api_key.user_id, site)
|
|
is_super_admin? = Plausible.Auth.is_super_admin?(api_key.user_id)
|
|
|
|
cond do
|
|
is_super_admin? ->
|
|
{:ok, site}
|
|
|
|
Sites.locked?(site) ->
|
|
{:error, :site_locked}
|
|
|
|
Plausible.Billing.Feature.StatsAPI.check_availability(api_key.user) !== :ok ->
|
|
{:error, :upgrade_required}
|
|
|
|
is_member? ->
|
|
{:ok, site}
|
|
|
|
true ->
|
|
{:error, :invalid_api_key}
|
|
end
|
|
|
|
nil ->
|
|
{:error, :invalid_api_key}
|
|
end
|
|
end
|
|
|
|
defp get_bearer_token(conn) do
|
|
authorization_header =
|
|
Plug.Conn.get_req_header(conn, "authorization")
|
|
|> List.first()
|
|
|
|
case authorization_header do
|
|
"Bearer " <> token -> {:ok, String.trim(token)}
|
|
_ -> {:error, :missing_api_key}
|
|
end
|
|
end
|
|
|
|
@one_hour 60 * 60 * 1000
|
|
defp check_api_key_rate_limit(api_key) do
|
|
case RateLimit.check_rate(
|
|
"api_request:#{api_key.id}",
|
|
@one_hour,
|
|
api_key.hourly_request_limit
|
|
) do
|
|
{:allow, _} -> :ok
|
|
{:deny, _} -> {:error, :rate_limit, api_key.hourly_request_limit}
|
|
end
|
|
end
|
|
end
|