analytics/test/plausible_web/plugs/no_robots_test.exs
hq1 fea9bb32ee
Prevent robots from looking too much into public dashboards (#3118)
* Add noindex,nofollow to dashboard pages

* Implement NoRobots plug

* Enable NoRobots plug in the router

* Fixup internal route

* Fix double slash in the router

* Add special bot treatment to plausible.io live demo page

* Revert aggressive protection with agent detection
2023-07-11 10:47:03 +02:00

37 lines
1.1 KiB
Elixir

defmodule PlausibleWeb.Plugs.NoRobotsTest do
use Plausible.DataCase, async: true
use Plug.Test
alias PlausibleWeb.Plugs.NoRobots
test "non-bots pass - when no user agent is supplied" do
conn = :get |> conn("/") |> NoRobots.call()
assert get_resp_header(conn, "x-robots-tag") == ["noindex, nofollow"]
assert get_resp_header(conn, "x-plausible-forbidden-reason") == []
assert conn.private.robots == "noindex, nofollow"
refute conn.halted
refute conn.status
end
test "non-bots pass - when user agent is supplied" do
conn = :get |> conn("/") |> NoRobots.call()
assert get_resp_header(conn, "x-robots-tag") == ["noindex, nofollow"]
assert get_resp_header(conn, "x-plausible-forbidden-reason") == []
assert conn.private.robots == "noindex, nofollow"
refute conn.halted
refute conn.status
end
test "writes index, nofollow for plausible.io live demo" do
conn = :get |> conn("/plausible.io") |> NoRobots.call()
assert get_resp_header(conn, "x-robots-tag") == ["index, nofollow"]
assert get_resp_header(conn, "x-plausible-forbidden-reason") == []
refute conn.halted
refute conn.status
end
end