mirror of
https://github.com/makeworld-the-better-one/amfora.git
synced 2024-11-29 12:03:22 +03:00
🎨 Rename page cache funcs
This commit is contained in:
parent
f3b7437046
commit
edd128e7c5
26
cache/cache.go
vendored
26
cache/cache.go
vendored
@ -42,12 +42,12 @@ func removeUrl(url string) {
|
||||
}
|
||||
}
|
||||
|
||||
// Add adds a page to the cache, removing earlier pages as needed
|
||||
// AddPage adds a page to the cache, removing earlier pages as needed
|
||||
// to keep the cache inside its limits.
|
||||
//
|
||||
// If your page is larger than the max cache size, the provided page
|
||||
// will silently not be added to the cache.
|
||||
func Add(p *structs.Page) {
|
||||
func AddPage(p *structs.Page) {
|
||||
if p.Url == "" || strings.HasPrefix(p.Url, "about:") {
|
||||
// Just in case, these pages shouldn't be cached
|
||||
return
|
||||
@ -62,11 +62,11 @@ func Add(p *structs.Page) {
|
||||
// There should only ever be 1 page to remove at most,
|
||||
// but this handles more just in case.
|
||||
for NumPages() >= maxPages && maxPages > 0 {
|
||||
Remove(urls[0])
|
||||
RemovePage(urls[0])
|
||||
}
|
||||
// Do the same but for cache size
|
||||
for Size()+p.Size() > maxSize && maxSize > 0 {
|
||||
Remove(urls[0])
|
||||
for SizePages()+p.Size() > maxSize && maxSize > 0 {
|
||||
RemovePage(urls[0])
|
||||
}
|
||||
|
||||
lock.Lock()
|
||||
@ -77,25 +77,25 @@ func Add(p *structs.Page) {
|
||||
urls = append(urls, p.Url)
|
||||
}
|
||||
|
||||
// Remove will remove a page from the cache.
|
||||
// RemovePage will remove a page from the cache.
|
||||
// Even if the page doesn't exist there will be no error.
|
||||
func Remove(url string) {
|
||||
func RemovePage(url string) {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
delete(pages, url)
|
||||
removeUrl(url)
|
||||
}
|
||||
|
||||
// Clear removes all pages from the cache.
|
||||
func Clear() {
|
||||
// ClearPages removes all pages from the cache.
|
||||
func ClearPages() {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
pages = make(map[string]*structs.Page)
|
||||
urls = make([]string, 0)
|
||||
}
|
||||
|
||||
// Size returns the approx. current size of the cache in bytes.
|
||||
func Size() int {
|
||||
// SizePages returns the approx. current size of the cache in bytes.
|
||||
func SizePages() int {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
n := 0
|
||||
@ -111,9 +111,9 @@ func NumPages() int {
|
||||
return len(pages)
|
||||
}
|
||||
|
||||
// Get returns the page struct, and a bool indicating if the page was in the cache or not.
|
||||
// GetPage returns the page struct, and a bool indicating if the page was in the cache or not.
|
||||
// An empty page struct is returned if the page isn't in the cache.
|
||||
func Get(url string) (*structs.Page, bool) {
|
||||
func GetPage(url string) (*structs.Page, bool) {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
p, ok := pages[url]
|
||||
|
28
cache/cache_test.go
vendored
28
cache/cache_test.go
vendored
@ -12,7 +12,7 @@ var p2 = structs.Page{Url: "example.org"}
|
||||
var queryPage = structs.Page{Url: "gemini://example.com/test?query"}
|
||||
|
||||
func reset() {
|
||||
Clear()
|
||||
ClearPages()
|
||||
SetMaxPages(0)
|
||||
SetMaxSize(0)
|
||||
}
|
||||
@ -20,8 +20,8 @@ func reset() {
|
||||
func TestMaxPages(t *testing.T) {
|
||||
reset()
|
||||
SetMaxPages(1)
|
||||
Add(&p)
|
||||
Add(&p2)
|
||||
AddPage(&p)
|
||||
AddPage(&p2)
|
||||
assert.Equal(t, 1, NumPages(), "there should only be one page")
|
||||
}
|
||||
|
||||
@ -29,24 +29,24 @@ func TestMaxSize(t *testing.T) {
|
||||
reset()
|
||||
assert := assert.New(t)
|
||||
SetMaxSize(p.Size())
|
||||
Add(&p)
|
||||
AddPage(&p)
|
||||
assert.Equal(1, NumPages(), "one page should be added")
|
||||
Add(&p2)
|
||||
AddPage(&p2)
|
||||
assert.Equal(1, NumPages(), "there should still be just one page due to cache size limits")
|
||||
assert.Equal(p2.Url, urls[0], "the only page url should be the second page one")
|
||||
}
|
||||
|
||||
func TestRemove(t *testing.T) {
|
||||
reset()
|
||||
Add(&p)
|
||||
Remove(p.Url)
|
||||
AddPage(&p)
|
||||
RemovePage(p.Url)
|
||||
assert.Equal(t, 0, NumPages(), "there shouldn't be any pages after the removal")
|
||||
}
|
||||
|
||||
func TestClearAndNumPages(t *testing.T) {
|
||||
reset()
|
||||
Add(&p)
|
||||
Clear()
|
||||
AddPage(&p)
|
||||
ClearPages()
|
||||
assert.Equal(t, 0, len(pages), "map should be empty")
|
||||
assert.Equal(t, 0, len(urls), "urls slice shoulde be empty")
|
||||
assert.Equal(t, 0, NumPages(), "NumPages should report empty too")
|
||||
@ -54,15 +54,15 @@ func TestClearAndNumPages(t *testing.T) {
|
||||
|
||||
func TestSize(t *testing.T) {
|
||||
reset()
|
||||
Add(&p)
|
||||
assert.Equal(t, p.Size(), Size(), "sizes should match")
|
||||
AddPage(&p)
|
||||
assert.Equal(t, p.Size(), SizePages(), "sizes should match")
|
||||
}
|
||||
|
||||
func TestGet(t *testing.T) {
|
||||
reset()
|
||||
Add(&p)
|
||||
Add(&p2)
|
||||
page, ok := Get(p.Url)
|
||||
AddPage(&p)
|
||||
AddPage(&p2)
|
||||
page, ok := GetPage(p.Url)
|
||||
if !ok {
|
||||
t.Fatal("Get should say that the page was found")
|
||||
}
|
||||
|
4
cache/redir.go
vendored
4
cache/redir.go
vendored
@ -1,8 +1,6 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
import "sync"
|
||||
|
||||
// Functions for caching redirects.
|
||||
|
||||
|
@ -9,6 +9,7 @@
|
||||
# example.com:123
|
||||
|
||||
[a-general]
|
||||
# Press Ctrl-H to access it
|
||||
home = "gemini://gemini.circumlunar.space"
|
||||
|
||||
# What command to run to open a HTTP URL. Set to "default" to try to guess the browser,
|
||||
@ -16,21 +17,36 @@ home = "gemini://gemini.circumlunar.space"
|
||||
# If a command is set, than the URL will be added (in quotes) to the end of the command.
|
||||
# A space will be prepended if necessary.
|
||||
http = "default"
|
||||
search = "gemini://gus.guru/search" # Any URL that will accept a query string can be put here
|
||||
color = true # Whether colors will be used in the terminal
|
||||
bullets = true # Whether to replace list asterisks with unicode bullets
|
||||
|
||||
# Any URL that will accept a query string can be put here
|
||||
search = "gemini://gus.guru/search"
|
||||
|
||||
# Whether colors will be used in the terminal
|
||||
color = true
|
||||
|
||||
# Whether to replace list asterisks with unicode bullets
|
||||
bullets = true
|
||||
|
||||
# A number from 0 to 1, indicating what percentage of the terminal width the left margin should take up.
|
||||
left_margin = 0.15
|
||||
max_width = 100 # The max number of columns to wrap a page's text to. Preformatted blocks are not wrapped.
|
||||
|
||||
# The max number of columns to wrap a page's text to. Preformatted blocks are not wrapped.
|
||||
max_width = 100
|
||||
|
||||
# 'downloads' is the path to a downloads folder.
|
||||
# An empty value means the code will find the default downloads folder for your system.
|
||||
# If the path does not exist it will be created.
|
||||
downloads = ""
|
||||
|
||||
# Max size for displayable content in bytes - after that size a download window pops up
|
||||
page_max_size = 2097152 # 2 MiB
|
||||
# Max time it takes to load a page in seconds - after that a download window pops up
|
||||
page_max_time = 10
|
||||
|
||||
# Whether to replace tab numbers with emoji favicons, which are cached.
|
||||
emoji_favicons = false
|
||||
|
||||
|
||||
# Options for page cache - which is only for text/gemini pages
|
||||
# Increase the cache size to speed up browsing at the expense of memory
|
||||
[cache]
|
||||
@ -38,15 +54,16 @@ page_max_time = 10
|
||||
max_size = 0 # Size in bytes
|
||||
max_pages = 30 # The maximum number of pages the cache will store
|
||||
|
||||
|
||||
[theme]
|
||||
# This section is for changing the COLORS used in Amfora.
|
||||
# These colors only apply if color is enabled above.
|
||||
# Colors can be set using a W3C color name, or a hex value such as #ffffff".
|
||||
# These colors only apply if 'color' is enabled above.
|
||||
# Colors can be set using a W3C color name, or a hex value such as "#ffffff".
|
||||
|
||||
# Note that not all colors will work on terminals that do not have truecolor support.
|
||||
# If you want to stick to the standard 16 or 256 colors, you can get
|
||||
# a list of those here: https://jonasjacek.github.io/colors/
|
||||
# Do NOT use the names from that site, just the hex codes.
|
||||
# DO NOT use the names from that site, just the hex codes.
|
||||
|
||||
# Definitions:
|
||||
# bg = background
|
||||
|
@ -180,11 +180,11 @@ func Init() {
|
||||
// Detect if it's a search or URL
|
||||
if strings.Contains(query, " ") || (!strings.Contains(query, "//") && !strings.Contains(query, ".") && !strings.HasPrefix(query, "about:")) {
|
||||
u := viper.GetString("a-general.search") + "?" + queryEscape(query)
|
||||
cache.Remove(u) // Don't use the cached version of the search
|
||||
cache.RemovePage(u) // Don't use the cached version of the search
|
||||
URL(u)
|
||||
} else {
|
||||
// Full URL
|
||||
cache.Remove(query) // Don't use cached version for manually entered URL
|
||||
cache.RemovePage(query) // Don't use cached version for manually entered URL
|
||||
URL(query)
|
||||
}
|
||||
return
|
||||
@ -550,7 +550,7 @@ func Reload() {
|
||||
return
|
||||
}
|
||||
|
||||
go cache.Remove(tabs[curTab].page.Url)
|
||||
go cache.RemovePage(tabs[curTab].page.Url)
|
||||
go func(t *tab) {
|
||||
handleURL(t, t.page.Url) // goURL is not used bc history shouldn't be added to
|
||||
if t == tabs[curTab] {
|
||||
|
@ -221,7 +221,7 @@ func handleURL(t *tab, u string) (string, bool) {
|
||||
// Gemini URL
|
||||
|
||||
// Load page from cache if possible
|
||||
page, ok := cache.Get(u)
|
||||
page, ok := cache.GetPage(u)
|
||||
if ok {
|
||||
setPage(t, page)
|
||||
return ret(u, true)
|
||||
@ -285,7 +285,7 @@ func handleURL(t *tab, u string) (string, bool) {
|
||||
}
|
||||
|
||||
page.Width = termW
|
||||
go cache.Add(page)
|
||||
go cache.AddPage(page)
|
||||
setPage(t, page)
|
||||
return ret(u, true)
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user