2020-01-18 11:38:21 +03:00
|
|
|
/*
|
2023-03-23 01:54:23 +03:00
|
|
|
* Copyright (c) 2018-2023, Andreas Kling <kling@serenityos.org>
|
2022-04-30 13:06:30 +03:00
|
|
|
* Copyright (c) 2022, Dex♪ <dexes.ttp@gmail.com>
|
2020-01-18 11:38:21 +03:00
|
|
|
*
|
2021-04-22 11:24:48 +03:00
|
|
|
* SPDX-License-Identifier: BSD-2-Clause
|
2020-01-18 11:38:21 +03:00
|
|
|
*/
|
|
|
|
|
2021-01-17 20:17:00 +03:00
|
|
|
#include <AK/Debug.h>
|
2024-04-01 22:28:06 +03:00
|
|
|
#include <LibCore/DateTime.h>
|
2023-08-13 23:35:35 +03:00
|
|
|
#include <LibCore/Directory.h>
|
2021-09-12 07:15:15 +03:00
|
|
|
#include <LibCore/ElapsedTimer.h>
|
2023-03-23 01:54:23 +03:00
|
|
|
#include <LibCore/MimeData.h>
|
2023-12-23 23:42:05 +03:00
|
|
|
#include <LibCore/Resource.h>
|
2022-12-31 01:57:58 +03:00
|
|
|
#include <LibWeb/Cookie/Cookie.h>
|
|
|
|
#include <LibWeb/Cookie/ParsedCookie.h>
|
2024-03-24 15:37:33 +03:00
|
|
|
#include <LibWeb/Fetch/Infrastructure/URL.h>
|
2021-01-05 20:12:29 +03:00
|
|
|
#include <LibWeb/Loader/ContentFilter.h>
|
2023-09-21 18:32:31 +03:00
|
|
|
#include <LibWeb/Loader/GeneratedPagesLoader.h>
|
2020-06-01 22:58:29 +03:00
|
|
|
#include <LibWeb/Loader/LoadRequest.h>
|
2022-04-08 00:16:47 +03:00
|
|
|
#include <LibWeb/Loader/ProxyMappings.h>
|
2020-06-01 22:33:23 +03:00
|
|
|
#include <LibWeb/Loader/Resource.h>
|
2020-06-01 21:42:50 +03:00
|
|
|
#include <LibWeb/Loader/ResourceLoader.h>
|
2024-05-26 18:12:49 +03:00
|
|
|
#include <LibWeb/Loader/UserAgent.h>
|
2022-09-07 21:30:31 +03:00
|
|
|
#include <LibWeb/Platform/EventLoopPlugin.h>
|
|
|
|
#include <LibWeb/Platform/Timer.h>
|
2022-04-30 13:09:19 +03:00
|
|
|
|
2020-03-07 12:27:02 +03:00
|
|
|
namespace Web {
|
|
|
|
|
2022-04-30 13:06:30 +03:00
|
|
|
ResourceLoaderConnectorRequest::ResourceLoaderConnectorRequest() = default;
|
|
|
|
|
|
|
|
ResourceLoaderConnectorRequest::~ResourceLoaderConnectorRequest() = default;
|
|
|
|
|
|
|
|
ResourceLoaderConnector::ResourceLoaderConnector() = default;
|
|
|
|
|
|
|
|
ResourceLoaderConnector::~ResourceLoaderConnector() = default;
|
|
|
|
|
|
|
|
static RefPtr<ResourceLoader> s_resource_loader;
|
|
|
|
|
|
|
|
void ResourceLoader::initialize(RefPtr<ResourceLoaderConnector> connector)
|
2019-10-08 20:37:15 +03:00
|
|
|
{
|
2022-04-30 13:06:30 +03:00
|
|
|
if (connector)
|
|
|
|
s_resource_loader = ResourceLoader::try_create(connector.release_nonnull()).release_value_but_fixme_should_propagate_errors();
|
2019-10-08 20:37:15 +03:00
|
|
|
}
|
|
|
|
|
2022-04-30 13:06:30 +03:00
|
|
|
ResourceLoader& ResourceLoader::the()
|
2022-01-14 16:12:49 +03:00
|
|
|
{
|
2022-04-30 13:06:30 +03:00
|
|
|
if (!s_resource_loader) {
|
|
|
|
dbgln("Web::ResourceLoader was not initialized");
|
|
|
|
VERIFY_NOT_REACHED();
|
|
|
|
}
|
|
|
|
return *s_resource_loader;
|
|
|
|
}
|
2022-01-14 16:12:49 +03:00
|
|
|
|
2022-04-30 13:06:30 +03:00
|
|
|
ErrorOr<NonnullRefPtr<ResourceLoader>> ResourceLoader::try_create(NonnullRefPtr<ResourceLoaderConnector> connector)
|
|
|
|
{
|
|
|
|
return adopt_nonnull_ref_or_enomem(new (nothrow) ResourceLoader(move(connector)));
|
2022-01-14 16:12:49 +03:00
|
|
|
}
|
|
|
|
|
2022-04-30 13:06:30 +03:00
|
|
|
ResourceLoader::ResourceLoader(NonnullRefPtr<ResourceLoaderConnector> connector)
|
|
|
|
: m_connector(move(connector))
|
2023-11-20 03:44:27 +03:00
|
|
|
, m_user_agent(MUST(String::from_utf8(default_user_agent)))
|
|
|
|
, m_platform(MUST(String::from_utf8(default_platform)))
|
2019-11-24 16:24:09 +03:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2024-03-18 06:22:27 +03:00
|
|
|
void ResourceLoader::prefetch_dns(URL::URL const& url)
|
2021-09-27 23:38:29 +03:00
|
|
|
{
|
2024-03-26 13:39:45 +03:00
|
|
|
if (url.scheme().is_one_of("file"sv, "data"sv))
|
|
|
|
return;
|
|
|
|
|
2022-06-10 13:53:43 +03:00
|
|
|
if (ContentFilter::the().is_filtered(url)) {
|
|
|
|
dbgln("ResourceLoader: Refusing to prefetch DNS for '{}': \033[31;1mURL was filtered\033[0m", url);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-04-30 13:06:30 +03:00
|
|
|
m_connector->prefetch_dns(url);
|
2021-09-27 23:38:29 +03:00
|
|
|
}
|
|
|
|
|
2024-03-18 06:22:27 +03:00
|
|
|
void ResourceLoader::preconnect(URL::URL const& url)
|
2021-09-27 23:38:29 +03:00
|
|
|
{
|
2023-12-28 23:33:56 +03:00
|
|
|
if (url.scheme().is_one_of("file"sv, "data"sv))
|
|
|
|
return;
|
|
|
|
|
2022-06-10 13:53:43 +03:00
|
|
|
if (ContentFilter::the().is_filtered(url)) {
|
|
|
|
dbgln("ResourceLoader: Refusing to pre-connect to '{}': \033[31;1mURL was filtered\033[0m", url);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-04-30 13:06:30 +03:00
|
|
|
m_connector->preconnect(url);
|
2021-09-27 23:38:29 +03:00
|
|
|
}
|
|
|
|
|
2020-06-01 22:58:29 +03:00
|
|
|
static HashMap<LoadRequest, NonnullRefPtr<Resource>> s_resource_cache;
|
|
|
|
|
2021-09-12 07:15:15 +03:00
|
|
|
RefPtr<Resource> ResourceLoader::load_resource(Resource::Type type, LoadRequest& request)
|
2020-06-01 22:33:23 +03:00
|
|
|
{
|
2020-06-01 22:58:29 +03:00
|
|
|
if (!request.is_valid())
|
2020-06-01 22:33:23 +03:00
|
|
|
return nullptr;
|
|
|
|
|
2022-09-29 02:30:58 +03:00
|
|
|
bool use_cache = request.url().scheme() != "file";
|
2021-01-24 12:34:52 +03:00
|
|
|
|
|
|
|
if (use_cache) {
|
|
|
|
auto it = s_resource_cache.find(request);
|
|
|
|
if (it != s_resource_cache.end()) {
|
|
|
|
if (it->value->type() != type) {
|
|
|
|
dbgln("FIXME: Not using cached resource for {} since there's a type mismatch.", request.url());
|
|
|
|
} else {
|
2021-02-07 15:03:24 +03:00
|
|
|
dbgln_if(CACHE_DEBUG, "Reusing cached resource for: {}", request.url());
|
2021-01-24 12:34:52 +03:00
|
|
|
return it->value;
|
|
|
|
}
|
2020-06-06 00:35:08 +03:00
|
|
|
}
|
2020-06-01 22:58:29 +03:00
|
|
|
}
|
|
|
|
|
2020-06-02 21:27:26 +03:00
|
|
|
auto resource = Resource::create({}, type, request);
|
2020-06-01 22:58:29 +03:00
|
|
|
|
2021-01-24 12:34:52 +03:00
|
|
|
if (use_cache)
|
|
|
|
s_resource_cache.set(request, resource);
|
2020-06-01 22:33:23 +03:00
|
|
|
|
|
|
|
load(
|
2020-09-28 12:55:26 +03:00
|
|
|
request,
|
2021-04-03 16:11:36 +03:00
|
|
|
[=](auto data, auto& headers, auto status_code) {
|
|
|
|
const_cast<Resource&>(*resource).did_load({}, data, headers, status_code);
|
2020-06-01 22:33:23 +03:00
|
|
|
},
|
2023-10-02 20:19:26 +03:00
|
|
|
[=](auto& error, auto status_code, auto, auto) {
|
2021-04-03 16:11:36 +03:00
|
|
|
const_cast<Resource&>(*resource).did_fail({}, error, status_code);
|
2020-06-01 22:33:23 +03:00
|
|
|
});
|
|
|
|
|
|
|
|
return resource;
|
|
|
|
}
|
|
|
|
|
2024-03-18 06:22:27 +03:00
|
|
|
static ByteString sanitized_url_for_logging(URL::URL const& url)
|
2021-09-16 01:52:39 +03:00
|
|
|
{
|
2022-09-29 02:30:58 +03:00
|
|
|
if (url.scheme() == "data"sv)
|
AK: Decode data URLs to separate class (and parse like every other URL)
Parsing 'data:' URLs took it's own route. It never set standard URL
fields like path, query or fragment (except for scheme) and instead
gave us separate methods called `data_payload()`, `data_mime_type()`,
and `data_payload_is_base64()`.
Because parsing 'data:' didn't use standard fields, running the
following JS code:
new URL('#a', 'data:text/plain,hello').toString()
not only cleared the path as URLParser doesn't check for data from
data_payload() function (making the result be 'data:#a'), but it also
crashes the program because we forbid having an empty MIME type when we
serialize to string.
With this change, 'data:' URLs will be parsed like every other URLs.
To decode the 'data:' URL contents, one needs to call process_data_url()
on a URL, which will return a struct containing MIME type with already
decoded data! :^)
2023-07-06 20:11:58 +03:00
|
|
|
return "[data URL]"sv;
|
2023-12-16 17:19:34 +03:00
|
|
|
return url.to_byte_string();
|
2021-09-16 01:52:39 +03:00
|
|
|
}
|
|
|
|
|
2023-12-16 17:19:34 +03:00
|
|
|
static void emit_signpost(ByteString const& message, int id)
|
2022-03-24 11:45:30 +03:00
|
|
|
{
|
2022-04-30 13:09:19 +03:00
|
|
|
(void)message;
|
|
|
|
(void)id;
|
2022-03-24 11:45:30 +03:00
|
|
|
}
|
|
|
|
|
2024-06-09 14:27:58 +03:00
|
|
|
static void store_response_cookies(Page& page, URL::URL const& url, ByteString const& set_cookie_entry)
|
2022-12-31 01:57:58 +03:00
|
|
|
{
|
2024-06-09 14:27:58 +03:00
|
|
|
auto cookie = Cookie::parse_cookie(set_cookie_entry);
|
|
|
|
if (!cookie.has_value())
|
|
|
|
return;
|
|
|
|
page.client().page_did_set_cookie(url, cookie.value(), Cookie::Source::Http); // FIXME: Determine cookie source correctly
|
2022-12-31 01:57:58 +03:00
|
|
|
}
|
|
|
|
|
2024-06-09 12:28:37 +03:00
|
|
|
static HTTP::HeaderMap response_headers_for_file(StringView path, Optional<time_t> const& modified_time)
|
2023-12-23 23:42:05 +03:00
|
|
|
{
|
|
|
|
// For file:// and resource:// URLs, we have to guess the MIME type, since there's no HTTP header to tell us what
|
|
|
|
// it is. We insert a fake Content-Type header here, so that clients can use it to learn the MIME type.
|
|
|
|
auto mime_type = Core::guess_mime_type_based_on_filename(path);
|
|
|
|
|
2024-06-09 12:28:37 +03:00
|
|
|
HTTP::HeaderMap response_headers;
|
2023-12-23 23:42:05 +03:00
|
|
|
response_headers.set("Content-Type"sv, mime_type);
|
|
|
|
|
2024-04-01 22:28:06 +03:00
|
|
|
if (modified_time.has_value()) {
|
|
|
|
auto const datetime = Core::DateTime::from_timestamp(modified_time.value());
|
|
|
|
response_headers.set("Last-Modified"sv, datetime.to_byte_string("%a, %d %b %Y %H:%M:%S GMT"sv, Core::DateTime::LocalTime::No));
|
|
|
|
}
|
|
|
|
|
2023-12-23 23:42:05 +03:00
|
|
|
return response_headers;
|
|
|
|
}
|
|
|
|
|
2024-05-26 14:52:39 +03:00
|
|
|
static void log_request_start(LoadRequest const& request)
|
2019-10-08 20:37:15 +03:00
|
|
|
{
|
2024-05-26 14:52:39 +03:00
|
|
|
auto url_for_logging = sanitized_url_for_logging(request.url());
|
2021-09-12 07:17:04 +03:00
|
|
|
|
2024-05-26 14:52:39 +03:00
|
|
|
emit_signpost(ByteString::formatted("Starting load: {}", url_for_logging), request.id());
|
2023-12-09 00:08:30 +03:00
|
|
|
dbgln_if(SPAM_DEBUG, "ResourceLoader: Starting load of: \"{}\"", url_for_logging);
|
2024-05-26 14:52:39 +03:00
|
|
|
}
|
2022-03-24 11:45:30 +03:00
|
|
|
|
2024-05-26 14:52:39 +03:00
|
|
|
static void log_success(LoadRequest const& request)
|
|
|
|
{
|
|
|
|
auto url_for_logging = sanitized_url_for_logging(request.url());
|
|
|
|
auto load_time_ms = request.load_time().to_milliseconds();
|
|
|
|
|
|
|
|
emit_signpost(ByteString::formatted("Finished load: {}", url_for_logging), request.id());
|
|
|
|
dbgln_if(SPAM_DEBUG, "ResourceLoader: Finished load of: \"{}\", Duration: {}ms", url_for_logging, load_time_ms);
|
|
|
|
}
|
|
|
|
|
|
|
|
template<typename ErrorType>
|
|
|
|
static void log_failure(LoadRequest const& request, ErrorType const& error)
|
|
|
|
{
|
|
|
|
auto url_for_logging = sanitized_url_for_logging(request.url());
|
|
|
|
auto load_time_ms = request.load_time().to_milliseconds();
|
|
|
|
|
|
|
|
emit_signpost(ByteString::formatted("Failed load: {}", url_for_logging), request.id());
|
|
|
|
dbgln("ResourceLoader: Failed load of: \"{}\", \033[31;1mError: {}\033[0m, Duration: {}ms", url_for_logging, error, load_time_ms);
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool should_block_request(LoadRequest const& request)
|
|
|
|
{
|
|
|
|
auto const& url = request.url();
|
2021-09-12 07:17:04 +03:00
|
|
|
|
2024-05-26 14:52:39 +03:00
|
|
|
auto is_port_blocked = [](int port) {
|
|
|
|
static constexpr auto ports = to_array({ 1, 7, 9, 11, 13, 15, 17, 19, 20, 21, 22, 23, 25, 37, 42,
|
|
|
|
43, 53, 77, 79, 87, 95, 101, 102, 103, 104, 109, 110, 111, 113, 115, 117, 119, 123, 135, 139,
|
|
|
|
143, 179, 389, 465, 512, 513, 514, 515, 526, 530, 531, 532, 540, 556, 563, 587, 601, 636,
|
|
|
|
993, 995, 2049, 3659, 4045, 6000, 6379, 6665, 6666, 6667, 6668, 6669 });
|
|
|
|
|
|
|
|
return ports.first_index_of(port).has_value();
|
2021-09-12 07:17:04 +03:00
|
|
|
};
|
2021-01-05 20:12:29 +03:00
|
|
|
|
2024-05-26 14:52:39 +03:00
|
|
|
if (is_port_blocked(url.port_or_default())) {
|
|
|
|
log_failure(request, ByteString::formatted("Port #{} is blocked", url.port_or_default()));
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (ContentFilter::the().is_filtered(url)) {
|
|
|
|
log_failure(request, "URL was filtered"sv);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
void ResourceLoader::load(LoadRequest& request, SuccessCallback success_callback, ErrorCallback error_callback, Optional<u32> timeout, TimeoutCallback timeout_callback)
|
|
|
|
{
|
|
|
|
auto const& url = request.url();
|
|
|
|
|
|
|
|
log_request_start(request);
|
|
|
|
request.start_timer();
|
|
|
|
|
|
|
|
if (should_block_request(request)) {
|
|
|
|
error_callback("Request was blocked", {}, {}, {});
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto respond_directory_page = [](LoadRequest const& request, URL::URL const& url, SuccessCallback const& success_callback, ErrorCallback const& error_callback) {
|
2023-12-26 12:12:35 +03:00
|
|
|
auto maybe_response = load_file_directory_page(url);
|
|
|
|
if (maybe_response.is_error()) {
|
|
|
|
log_failure(request, maybe_response.error());
|
|
|
|
if (error_callback)
|
|
|
|
error_callback(ByteString::formatted("{}", maybe_response.error()), 500u, {}, {});
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
log_success(request);
|
2024-06-09 12:28:37 +03:00
|
|
|
HTTP::HeaderMap response_headers;
|
2023-12-26 12:12:35 +03:00
|
|
|
response_headers.set("Content-Type"sv, "text/html"sv);
|
|
|
|
success_callback(maybe_response.release_value().bytes(), response_headers, {});
|
|
|
|
};
|
|
|
|
|
2022-09-29 02:30:58 +03:00
|
|
|
if (url.scheme() == "about") {
|
2021-08-07 11:37:26 +03:00
|
|
|
dbgln_if(SPAM_DEBUG, "Loading about: URL {}", url);
|
2021-09-22 15:34:28 +03:00
|
|
|
log_success(request);
|
2022-05-13 13:45:21 +03:00
|
|
|
|
2024-06-09 12:28:37 +03:00
|
|
|
HTTP::HeaderMap response_headers;
|
2022-05-13 13:45:21 +03:00
|
|
|
response_headers.set("Content-Type", "text/html; charset=UTF-8");
|
|
|
|
|
2024-01-12 21:41:26 +03:00
|
|
|
// About version page
|
|
|
|
if (url.path_segment_at_index(0) == "version") {
|
|
|
|
success_callback(MUST(load_about_version_page()).bytes(), response_headers, {});
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-01-12 21:40:41 +03:00
|
|
|
// Other about static HTML pages
|
|
|
|
auto resource = Core::Resource::load_from_uri(MUST(String::formatted("resource://ladybird/{}.html", url.path_segment_at_index(0))));
|
|
|
|
if (!resource.is_error()) {
|
|
|
|
auto data = resource.value()->data();
|
|
|
|
success_callback(data, response_headers, {});
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-09-07 21:30:31 +03:00
|
|
|
Platform::EventLoopPlugin::the().deferred_invoke([success_callback = move(success_callback), response_headers = move(response_headers)] {
|
2023-12-16 17:19:34 +03:00
|
|
|
success_callback(ByteString::empty().to_byte_buffer(), response_headers, {});
|
2020-05-10 12:13:36 +03:00
|
|
|
});
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-09-29 02:30:58 +03:00
|
|
|
if (url.scheme() == "data") {
|
2024-03-24 15:37:33 +03:00
|
|
|
auto data_url_or_error = Fetch::Infrastructure::process_data_url(url);
|
AK: Decode data URLs to separate class (and parse like every other URL)
Parsing 'data:' URLs took it's own route. It never set standard URL
fields like path, query or fragment (except for scheme) and instead
gave us separate methods called `data_payload()`, `data_mime_type()`,
and `data_payload_is_base64()`.
Because parsing 'data:' didn't use standard fields, running the
following JS code:
new URL('#a', 'data:text/plain,hello').toString()
not only cleared the path as URLParser doesn't check for data from
data_payload() function (making the result be 'data:#a'), but it also
crashes the program because we forbid having an empty MIME type when we
serialize to string.
With this change, 'data:' URLs will be parsed like every other URLs.
To decode the 'data:' URL contents, one needs to call process_data_url()
on a URL, which will return a struct containing MIME type with already
decoded data! :^)
2023-07-06 20:11:58 +03:00
|
|
|
if (data_url_or_error.is_error()) {
|
|
|
|
auto error_message = data_url_or_error.error().string_literal();
|
|
|
|
log_failure(request, error_message);
|
2023-10-02 20:19:26 +03:00
|
|
|
error_callback(error_message, {}, {}, {});
|
AK: Decode data URLs to separate class (and parse like every other URL)
Parsing 'data:' URLs took it's own route. It never set standard URL
fields like path, query or fragment (except for scheme) and instead
gave us separate methods called `data_payload()`, `data_mime_type()`,
and `data_payload_is_base64()`.
Because parsing 'data:' didn't use standard fields, running the
following JS code:
new URL('#a', 'data:text/plain,hello').toString()
not only cleared the path as URLParser doesn't check for data from
data_payload() function (making the result be 'data:#a'), but it also
crashes the program because we forbid having an empty MIME type when we
serialize to string.
With this change, 'data:' URLs will be parsed like every other URLs.
To decode the 'data:' URL contents, one needs to call process_data_url()
on a URL, which will return a struct containing MIME type with already
decoded data! :^)
2023-07-06 20:11:58 +03:00
|
|
|
return;
|
2021-10-23 16:43:59 +03:00
|
|
|
}
|
AK: Decode data URLs to separate class (and parse like every other URL)
Parsing 'data:' URLs took it's own route. It never set standard URL
fields like path, query or fragment (except for scheme) and instead
gave us separate methods called `data_payload()`, `data_mime_type()`,
and `data_payload_is_base64()`.
Because parsing 'data:' didn't use standard fields, running the
following JS code:
new URL('#a', 'data:text/plain,hello').toString()
not only cleared the path as URLParser doesn't check for data from
data_payload() function (making the result be 'data:#a'), but it also
crashes the program because we forbid having an empty MIME type when we
serialize to string.
With this change, 'data:' URLs will be parsed like every other URLs.
To decode the 'data:' URL contents, one needs to call process_data_url()
on a URL, which will return a struct containing MIME type with already
decoded data! :^)
2023-07-06 20:11:58 +03:00
|
|
|
auto data_url = data_url_or_error.release_value();
|
|
|
|
|
|
|
|
dbgln_if(SPAM_DEBUG, "ResourceLoader loading a data URL with mime-type: '{}', payload='{}'",
|
2024-06-02 19:34:46 +03:00
|
|
|
MUST(data_url.mime_type.serialized()),
|
AK: Decode data URLs to separate class (and parse like every other URL)
Parsing 'data:' URLs took it's own route. It never set standard URL
fields like path, query or fragment (except for scheme) and instead
gave us separate methods called `data_payload()`, `data_mime_type()`,
and `data_payload_is_base64()`.
Because parsing 'data:' didn't use standard fields, running the
following JS code:
new URL('#a', 'data:text/plain,hello').toString()
not only cleared the path as URLParser doesn't check for data from
data_payload() function (making the result be 'data:#a'), but it also
crashes the program because we forbid having an empty MIME type when we
serialize to string.
With this change, 'data:' URLs will be parsed like every other URLs.
To decode the 'data:' URL contents, one needs to call process_data_url()
on a URL, which will return a struct containing MIME type with already
decoded data! :^)
2023-07-06 20:11:58 +03:00
|
|
|
StringView(data_url.body.bytes()));
|
2020-04-26 23:52:30 +03:00
|
|
|
|
2024-06-09 12:28:37 +03:00
|
|
|
HTTP::HeaderMap response_headers;
|
2024-06-02 19:34:46 +03:00
|
|
|
response_headers.set("Content-Type", MUST(data_url.mime_type.serialized()).to_byte_string());
|
2023-07-06 20:25:35 +03:00
|
|
|
|
2021-09-22 15:34:28 +03:00
|
|
|
log_success(request);
|
AK: Decode data URLs to separate class (and parse like every other URL)
Parsing 'data:' URLs took it's own route. It never set standard URL
fields like path, query or fragment (except for scheme) and instead
gave us separate methods called `data_payload()`, `data_mime_type()`,
and `data_payload_is_base64()`.
Because parsing 'data:' didn't use standard fields, running the
following JS code:
new URL('#a', 'data:text/plain,hello').toString()
not only cleared the path as URLParser doesn't check for data from
data_payload() function (making the result be 'data:#a'), but it also
crashes the program because we forbid having an empty MIME type when we
serialize to string.
With this change, 'data:' URLs will be parsed like every other URLs.
To decode the 'data:' URL contents, one needs to call process_data_url()
on a URL, which will return a struct containing MIME type with already
decoded data! :^)
2023-07-06 20:11:58 +03:00
|
|
|
|
|
|
|
Platform::EventLoopPlugin::the().deferred_invoke([data = move(data_url.body), response_headers = move(response_headers), success_callback = move(success_callback)] {
|
2023-07-06 20:25:35 +03:00
|
|
|
success_callback(data, response_headers, {});
|
2020-04-26 23:52:30 +03:00
|
|
|
});
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-12-23 23:42:05 +03:00
|
|
|
if (url.scheme() == "resource") {
|
|
|
|
auto resource = Core::Resource::load_from_uri(url.serialize());
|
|
|
|
if (resource.is_error()) {
|
|
|
|
log_failure(request, resource.error());
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-12-26 12:12:35 +03:00
|
|
|
// When resource URI is a directory use file directory loader to generate response
|
|
|
|
if (resource.value()->is_directory()) {
|
|
|
|
respond_directory_page(request, resource.value()->file_url(), success_callback, error_callback);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-12-23 23:42:05 +03:00
|
|
|
auto data = resource.value()->data();
|
2024-04-01 22:28:06 +03:00
|
|
|
auto response_headers = response_headers_for_file(url.serialize_path(), resource.value()->modified_time());
|
2023-12-23 23:42:05 +03:00
|
|
|
|
|
|
|
log_success(request);
|
|
|
|
success_callback(data, response_headers, {});
|
|
|
|
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-09-29 02:30:58 +03:00
|
|
|
if (url.scheme() == "file") {
|
2023-12-11 14:23:04 +03:00
|
|
|
if (request.page())
|
|
|
|
m_page = request.page();
|
2022-02-26 19:50:31 +03:00
|
|
|
|
2023-11-10 04:09:21 +03:00
|
|
|
if (!m_page.has_value()) {
|
|
|
|
log_failure(request, "INTERNAL ERROR: No Page for request");
|
2022-07-13 23:49:25 +03:00
|
|
|
return;
|
2023-11-10 04:09:21 +03:00
|
|
|
}
|
2023-01-31 00:35:47 +03:00
|
|
|
|
2024-05-26 14:52:39 +03:00
|
|
|
FileRequest file_request(url.serialize_path(), [this, success_callback = move(success_callback), error_callback = move(error_callback), request, respond_directory_page](ErrorOr<i32> file_or_error) {
|
2022-02-26 19:50:31 +03:00
|
|
|
--m_pending_loads;
|
|
|
|
if (on_load_counter_change)
|
|
|
|
on_load_counter_change();
|
|
|
|
|
|
|
|
if (file_or_error.is_error()) {
|
|
|
|
log_failure(request, file_or_error.error());
|
2023-04-04 23:12:42 +03:00
|
|
|
if (error_callback) {
|
|
|
|
auto status = file_or_error.error().code() == ENOENT ? 404u : 500u;
|
2023-12-16 17:19:34 +03:00
|
|
|
error_callback(ByteString::formatted("{}", file_or_error.error()), status, {}, {});
|
2023-04-04 23:12:42 +03:00
|
|
|
}
|
2022-02-26 19:50:31 +03:00
|
|
|
return;
|
|
|
|
}
|
2019-10-08 20:37:15 +03:00
|
|
|
|
2022-02-26 19:50:31 +03:00
|
|
|
auto const fd = file_or_error.value();
|
|
|
|
|
2023-08-13 23:35:35 +03:00
|
|
|
// When local file is a directory use file directory loader to generate response
|
|
|
|
auto maybe_is_valid_directory = Core::Directory::is_valid_directory(fd);
|
|
|
|
if (!maybe_is_valid_directory.is_error() && maybe_is_valid_directory.value()) {
|
2023-12-26 12:12:35 +03:00
|
|
|
respond_directory_page(request, request.url(), success_callback, error_callback);
|
2023-08-13 23:35:35 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-04-01 22:28:06 +03:00
|
|
|
auto st_or_error = Core::System::fstat(fd);
|
|
|
|
if (st_or_error.is_error()) {
|
|
|
|
log_failure(request, st_or_error.error());
|
|
|
|
if (error_callback)
|
|
|
|
error_callback(ByteString::formatted("{}", st_or_error.error()), 500u, {}, {});
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-08-13 23:35:35 +03:00
|
|
|
// Try to read file normally
|
2023-02-09 05:02:46 +03:00
|
|
|
auto maybe_file = Core::File::adopt_fd(fd, Core::File::OpenMode::Read);
|
2022-02-26 19:50:31 +03:00
|
|
|
if (maybe_file.is_error()) {
|
|
|
|
log_failure(request, maybe_file.error());
|
|
|
|
if (error_callback)
|
2023-12-16 17:19:34 +03:00
|
|
|
error_callback(ByteString::formatted("{}", maybe_file.error()), 500u, {}, {});
|
2022-02-26 19:50:31 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto file = maybe_file.release_value();
|
2022-12-11 19:49:00 +03:00
|
|
|
auto maybe_data = file->read_until_eof();
|
2022-02-26 19:50:31 +03:00
|
|
|
if (maybe_data.is_error()) {
|
|
|
|
log_failure(request, maybe_data.error());
|
|
|
|
if (error_callback)
|
2023-12-16 17:19:34 +03:00
|
|
|
error_callback(ByteString::formatted("{}", maybe_data.error()), 500u, {}, {});
|
2022-02-26 19:50:31 +03:00
|
|
|
return;
|
|
|
|
}
|
2023-03-23 01:54:23 +03:00
|
|
|
|
2023-12-23 23:42:05 +03:00
|
|
|
auto data = maybe_data.release_value();
|
2024-04-01 22:28:06 +03:00
|
|
|
auto response_headers = response_headers_for_file(request.url().serialize_path(), st_or_error.value().st_mtime);
|
2023-03-23 01:54:23 +03:00
|
|
|
|
2023-12-23 23:42:05 +03:00
|
|
|
log_success(request);
|
2023-03-23 01:54:23 +03:00
|
|
|
success_callback(data, response_headers, {});
|
2023-01-31 00:35:47 +03:00
|
|
|
});
|
|
|
|
|
2023-12-11 14:23:04 +03:00
|
|
|
(*m_page)->client().request_file(move(file_request));
|
2022-02-26 19:50:31 +03:00
|
|
|
|
|
|
|
++m_pending_loads;
|
|
|
|
if (on_load_counter_change)
|
|
|
|
on_load_counter_change();
|
|
|
|
|
2019-10-08 20:37:15 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-06-04 09:00:13 +03:00
|
|
|
if (url.scheme() == "http" || url.scheme() == "https") {
|
2024-05-26 14:52:39 +03:00
|
|
|
auto protocol_request = start_network_request(request);
|
2021-04-23 23:45:52 +03:00
|
|
|
if (!protocol_request) {
|
2020-04-04 21:00:07 +03:00
|
|
|
if (error_callback)
|
2024-05-26 14:52:39 +03:00
|
|
|
error_callback("Failed to start network request"sv, {}, {}, {});
|
2020-04-04 21:00:07 +03:00
|
|
|
return;
|
|
|
|
}
|
2022-06-23 17:17:08 +03:00
|
|
|
|
|
|
|
if (timeout.has_value() && timeout.value() > 0) {
|
2022-09-07 21:30:31 +03:00
|
|
|
auto timer = Platform::Timer::create_single_shot(timeout.value(), nullptr);
|
2022-11-19 04:09:53 +03:00
|
|
|
timer->on_timeout = [timer, protocol_request, timeout_callback = move(timeout_callback)] {
|
2022-06-23 17:17:08 +03:00
|
|
|
protocol_request->stop();
|
2022-07-02 23:32:25 +03:00
|
|
|
if (timeout_callback)
|
|
|
|
timeout_callback();
|
2022-06-23 17:17:08 +03:00
|
|
|
};
|
|
|
|
timer->start();
|
|
|
|
}
|
|
|
|
|
2024-05-26 14:52:39 +03:00
|
|
|
auto on_buffered_request_finished = [this, success_callback = move(success_callback), error_callback = move(error_callback), request, &protocol_request = *protocol_request](bool success, auto, auto& response_headers, auto status_code, ReadonlyBytes payload) mutable {
|
|
|
|
handle_network_response_headers(request, response_headers);
|
|
|
|
finish_network_request(protocol_request);
|
2022-12-31 01:57:58 +03:00
|
|
|
|
2023-03-15 14:28:33 +03:00
|
|
|
if (!success || (status_code.has_value() && *status_code >= 400 && *status_code <= 599 && (payload.is_empty() || !request.is_main_resource()))) {
|
2022-03-08 19:44:22 +03:00
|
|
|
StringBuilder error_builder;
|
|
|
|
if (status_code.has_value())
|
|
|
|
error_builder.appendff("Load failed: {}", *status_code);
|
|
|
|
else
|
2022-07-11 20:32:29 +03:00
|
|
|
error_builder.append("Load failed"sv);
|
2022-03-08 19:44:22 +03:00
|
|
|
log_failure(request, error_builder.string_view());
|
2020-04-01 01:59:11 +03:00
|
|
|
if (error_callback)
|
2023-12-16 17:19:34 +03:00
|
|
|
error_callback(error_builder.to_byte_string(), status_code, payload, response_headers);
|
2019-11-30 13:58:47 +03:00
|
|
|
return;
|
2019-10-08 20:37:15 +03:00
|
|
|
}
|
2024-05-26 14:52:39 +03:00
|
|
|
|
2021-09-12 07:17:04 +03:00
|
|
|
log_success(request);
|
2021-09-22 15:34:28 +03:00
|
|
|
success_callback(payload, response_headers, status_code);
|
2019-10-08 20:37:15 +03:00
|
|
|
};
|
2024-05-24 18:37:02 +03:00
|
|
|
|
|
|
|
protocol_request->set_buffered_request_finished_callback(move(on_buffered_request_finished));
|
2019-10-08 20:37:15 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-12-16 17:19:34 +03:00
|
|
|
auto not_implemented_error = ByteString::formatted("Protocol not implemented: {}", url.scheme());
|
2021-09-12 07:17:04 +03:00
|
|
|
log_failure(request, not_implemented_error);
|
2020-04-01 01:59:11 +03:00
|
|
|
if (error_callback)
|
2023-10-02 20:19:26 +03:00
|
|
|
error_callback(not_implemented_error, {}, {}, {});
|
2019-10-08 20:37:15 +03:00
|
|
|
}
|
2020-03-07 12:27:02 +03:00
|
|
|
|
2024-05-26 14:52:39 +03:00
|
|
|
void ResourceLoader::load_unbuffered(LoadRequest& request, OnHeadersReceived on_headers_received, OnDataReceived on_data_received, OnComplete on_complete)
|
2020-04-26 23:08:03 +03:00
|
|
|
{
|
2024-05-26 14:52:39 +03:00
|
|
|
auto const& url = request.url();
|
|
|
|
|
|
|
|
log_request_start(request);
|
|
|
|
request.start_timer();
|
|
|
|
|
|
|
|
if (should_block_request(request)) {
|
|
|
|
on_complete(false, "Request was blocked"sv);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-06-04 09:00:13 +03:00
|
|
|
if (!url.scheme().is_one_of("http"sv, "https"sv)) {
|
2024-05-26 14:52:39 +03:00
|
|
|
// FIXME: Non-network requests from fetch should not go through this path.
|
|
|
|
on_complete(false, "Cannot establish connection non-network scheme"sv);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto protocol_request = start_network_request(request);
|
|
|
|
if (!protocol_request) {
|
|
|
|
on_complete(false, "Failed to start network request"sv);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto protocol_headers_received = [this, on_headers_received = move(on_headers_received), request](auto const& response_headers, auto status_code) {
|
|
|
|
handle_network_response_headers(request, response_headers);
|
|
|
|
on_headers_received(response_headers, move(status_code));
|
|
|
|
};
|
|
|
|
|
|
|
|
auto protocol_data_received = [on_data_received = move(on_data_received)](auto data) {
|
|
|
|
on_data_received(data);
|
|
|
|
};
|
|
|
|
|
|
|
|
auto protocol_complete = [this, on_complete = move(on_complete), request, &protocol_request = *protocol_request](bool success, u64) {
|
|
|
|
finish_network_request(protocol_request);
|
|
|
|
|
|
|
|
if (success) {
|
|
|
|
log_success(request);
|
|
|
|
on_complete(true, {});
|
|
|
|
} else {
|
|
|
|
log_failure(request, "Request finished with error"sv);
|
|
|
|
on_complete(false, "Request finished with error"sv);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
protocol_request->set_unbuffered_request_callbacks(move(protocol_headers_received), move(protocol_data_received), move(protocol_complete));
|
|
|
|
}
|
|
|
|
|
|
|
|
RefPtr<ResourceLoaderConnectorRequest> ResourceLoader::start_network_request(LoadRequest const& request)
|
|
|
|
{
|
|
|
|
auto proxy = ProxyMappings::the().proxy_for_url(request.url());
|
|
|
|
|
2024-06-09 14:46:04 +03:00
|
|
|
HTTP::HeaderMap headers;
|
2024-05-26 14:52:39 +03:00
|
|
|
headers.set("User-Agent", m_user_agent.to_byte_string());
|
|
|
|
headers.set("Accept-Encoding", "gzip, deflate, br");
|
|
|
|
|
|
|
|
for (auto const& it : request.headers()) {
|
|
|
|
headers.set(it.key, it.value);
|
|
|
|
}
|
|
|
|
|
|
|
|
auto protocol_request = m_connector->start_request(request.method(), request.url(), headers, request.body(), proxy);
|
|
|
|
if (!protocol_request) {
|
|
|
|
log_failure(request, "Failed to initiate load"sv);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
protocol_request->on_certificate_requested = []() -> ResourceLoaderConnectorRequest::CertificateAndKey {
|
|
|
|
return {};
|
|
|
|
};
|
|
|
|
|
|
|
|
++m_pending_loads;
|
|
|
|
if (on_load_counter_change)
|
|
|
|
on_load_counter_change();
|
|
|
|
|
|
|
|
m_active_requests.set(*protocol_request);
|
|
|
|
return protocol_request;
|
|
|
|
}
|
|
|
|
|
2024-06-09 12:28:37 +03:00
|
|
|
void ResourceLoader::handle_network_response_headers(LoadRequest const& request, HTTP::HeaderMap const& response_headers)
|
2024-05-26 14:52:39 +03:00
|
|
|
{
|
|
|
|
if (!request.page())
|
|
|
|
return;
|
|
|
|
|
2024-06-09 14:27:58 +03:00
|
|
|
for (auto const& [header, value] : response_headers.headers()) {
|
|
|
|
if (header.equals_ignoring_ascii_case("Set-Cookie"sv)) {
|
|
|
|
store_response_cookies(*request.page(), request.url(), value);
|
|
|
|
}
|
|
|
|
}
|
2024-05-26 14:52:39 +03:00
|
|
|
|
|
|
|
if (auto cache_control = response_headers.get("Cache-Control"); cache_control.has_value()) {
|
|
|
|
if (cache_control.value().contains("no-store"sv))
|
|
|
|
s_resource_cache.remove(request);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void ResourceLoader::finish_network_request(NonnullRefPtr<ResourceLoaderConnectorRequest> const& protocol_request)
|
|
|
|
{
|
|
|
|
--m_pending_loads;
|
|
|
|
if (on_load_counter_change)
|
|
|
|
on_load_counter_change();
|
|
|
|
|
|
|
|
Platform::EventLoopPlugin::the().deferred_invoke([this, protocol_request] {
|
|
|
|
m_active_requests.remove(protocol_request);
|
|
|
|
});
|
2020-04-12 07:01:34 +03:00
|
|
|
}
|
|
|
|
|
2021-03-29 22:31:09 +03:00
|
|
|
void ResourceLoader::clear_cache()
|
|
|
|
{
|
2021-08-07 11:37:26 +03:00
|
|
|
dbgln_if(CACHE_DEBUG, "Clearing {} items from ResourceLoader cache", s_resource_cache.size());
|
2021-03-29 22:31:09 +03:00
|
|
|
s_resource_cache.clear();
|
|
|
|
}
|
|
|
|
|
2022-03-23 21:00:01 +03:00
|
|
|
void ResourceLoader::evict_from_cache(LoadRequest const& request)
|
|
|
|
{
|
|
|
|
dbgln_if(CACHE_DEBUG, "Removing resource {} from cache", request.url());
|
|
|
|
s_resource_cache.remove(request);
|
|
|
|
}
|
|
|
|
|
2020-03-07 12:27:02 +03:00
|
|
|
}
|