summaryrefslogtreecommitdiffstats
path: root/modules/setting
diff options
context:
space:
mode:
authorzeripath <art27@cantab.net>2021-07-10 22:54:15 +0100
committerGitHub <noreply@github.com>2021-07-10 23:54:15 +0200
commit2f725cbc9e836e1d11b8645ee11ed2c82da8c0b7 (patch)
tree4528863d2c546dc3126970bff029334694d80e80 /modules/setting
parent07284792d40c556ec4c9e1c92379a922e746e12b (diff)
downloadgitea-2f725cbc9e836e1d11b8645ee11ed2c82da8c0b7.tar.gz
gitea-2f725cbc9e836e1d11b8645ee11ed2c82da8c0b7.zip
Add LRU mem cache implementation (#16226)
The current default memory cache implementation is unbounded in size and number of objects cached. This is hardly ideal. This PR proposes creating a TwoQueue LRU cache as the underlying cache for Gitea. The cache is limited by the number of objects stored in the cache (rather than size) for simplicity. The default number of objects is 50000 - which is perhaps too small as most of our objects cached are going to be much less than 1kB. It may be worth considering using a different LRU implementation that actively limits sizes or avoids GC - however, this is just a beginning implementation. Signed-off-by: Andrew Thornton <art27@cantab.net>
Diffstat (limited to 'modules/setting')
-rw-r--r--modules/setting/cache.go7
1 files changed, 6 insertions, 1 deletions
diff --git a/modules/setting/cache.go b/modules/setting/cache.go
index 7bfea91961..2bfe2318f5 100644
--- a/modules/setting/cache.go
+++ b/modules/setting/cache.go
@@ -58,11 +58,16 @@ func newCacheService() {
log.Fatal("Failed to map Cache settings: %v", err)
}
- CacheService.Adapter = sec.Key("ADAPTER").In("memory", []string{"memory", "redis", "memcache"})
+ CacheService.Adapter = sec.Key("ADAPTER").In("memory", []string{"memory", "redis", "memcache", "twoqueue"})
switch CacheService.Adapter {
case "memory":
case "redis", "memcache":
CacheService.Conn = strings.Trim(sec.Key("HOST").String(), "\" ")
+ case "twoqueue":
+ CacheService.Conn = strings.TrimSpace(sec.Key("HOST").String())
+ if CacheService.Conn == "" {
+ CacheService.Conn = "50000"
+ }
case "": // disable cache
CacheService.Enabled = false
default: