aboutsummaryrefslogtreecommitdiffstats
path: root/modules
diff options
context:
space:
mode:
authorwxiaoguang <wxiaoguang@gmail.com>2023-05-22 06:35:11 +0800
committerGitHub <noreply@github.com>2023-05-21 22:35:11 +0000
commit4647660776436f0a83129b4ceb8426b1fb0599bb (patch)
tree465ae0fc7cb7c4b0b30f6600a701c240cdf2a3a6 /modules
parent65dff8e36459a7852c40ebb4732a598ddb9bf915 (diff)
downloadgitea-4647660776436f0a83129b4ceb8426b1fb0599bb.tar.gz
gitea-4647660776436f0a83129b4ceb8426b1fb0599bb.zip
Rewrite logger system (#24726)
## ⚠️ Breaking The `log.<mode>.<logger>` style config has been dropped. If you used it, please check the new config manual & app.example.ini to make your instance output logs as expected. Although many legacy options still work, it's encouraged to upgrade to the new options. The SMTP logger is deleted because SMTP is not suitable to collect logs. If you have manually configured Gitea log options, please confirm the logger system works as expected after upgrading. ## Description Close #12082 and maybe more log-related issues, resolve some related FIXMEs in old code (which seems unfixable before) Just like rewriting queue #24505 : make code maintainable, clear legacy bugs, and add the ability to support more writers (eg: JSON, structured log) There is a new document (with examples): `logging-config.en-us.md` This PR is safer than the queue rewriting, because it's just for logging, it won't break other logic. ## The old problems The logging system is quite old and difficult to maintain: * Unclear concepts: Logger, NamedLogger, MultiChannelledLogger, SubLogger, EventLogger, WriterLogger etc * Some code is diffuclt to konw whether it is right: `log.DelNamedLogger("console")` vs `log.DelNamedLogger(log.DEFAULT)` vs `log.DelLogger("console")` * The old system heavily depends on ini config system, it's difficult to create new logger for different purpose, and it's very fragile. * The "color" trick is difficult to use and read, many colors are unnecessary, and in the future structured log could help * It's difficult to add other log formats, eg: JSON format * The log outputer doesn't have full control of its goroutine, it's difficult to make outputer have advanced behaviors * The logs could be lost in some cases: eg: no Fatal error when using CLI. * Config options are passed by JSON, which is quite fragile. * INI package makes the KEY in `[log]` section visible in `[log.sub1]` and `[log.sub1.subA]`, this behavior is quite fragile and would cause more unclear problems, and there is no strong requirement to support `log.<mode>.<logger>` syntax. ## The new design See `logger.go` for documents. ## Screenshot <details> ![image](https://github.com/go-gitea/gitea/assets/2114189/4462d713-ba39-41f5-bb08-de912e67e1ff) ![image](https://github.com/go-gitea/gitea/assets/2114189/b188035e-f691-428b-8b2d-ff7b2199b2f9) ![image](https://github.com/go-gitea/gitea/assets/2114189/132e9745-1c3b-4e00-9e0d-15eaea495dee) </details> ## TODO * [x] add some new tests * [x] fix some tests * [x] test some sub-commands (manually ....) --------- Co-authored-by: Jason Song <i@wolfogre.com> Co-authored-by: delvh <dev.lh@web.de> Co-authored-by: Giteabot <teabot@gitea.io>
Diffstat (limited to 'modules')
-rw-r--r--modules/context/access_log.go5
-rw-r--r--modules/doctor/doctor.go74
-rw-r--r--modules/git/git_test.go3
-rw-r--r--modules/graceful/manager.go2
-rw-r--r--modules/graceful/manager_unix.go3
-rw-r--r--modules/graceful/releasereopen/releasereopen.go61
-rw-r--r--modules/graceful/releasereopen/releasereopen_test.go43
-rw-r--r--modules/indexer/code/elastic_search.go20
-rw-r--r--modules/indexer/issues/elastic_search.go21
-rw-r--r--modules/lfs/pointer.go12
-rw-r--r--modules/log/color.go115
-rw-r--r--modules/log/color_console.go14
-rw-r--r--modules/log/color_console_other.go (renamed from modules/log/console_other.go)0
-rw-r--r--modules/log/color_console_windows.go (renamed from modules/log/console_windows.go)2
-rw-r--r--modules/log/color_router.go (renamed from modules/log/colors_router.go)60
-rw-r--r--modules/log/colors.go435
-rw-r--r--modules/log/conn.go137
-rw-r--r--modules/log/conn_test.go230
-rw-r--r--modules/log/console.go93
-rw-r--r--modules/log/console_test.go137
-rw-r--r--modules/log/errors.go61
-rw-r--r--modules/log/event.go460
-rw-r--r--modules/log/event_format.go246
-rw-r--r--modules/log/event_format_test.go57
-rw-r--r--modules/log/event_writer.go54
-rw-r--r--modules/log/event_writer_base.go160
-rw-r--r--modules/log/event_writer_conn.go111
-rw-r--r--modules/log/event_writer_conn_test.go75
-rw-r--r--modules/log/event_writer_console.go40
-rw-r--r--modules/log/event_writer_file.go48
-rw-r--r--modules/log/file.go283
-rw-r--r--modules/log/file_test.go235
-rw-r--r--modules/log/flags.go134
-rw-r--r--modules/log/flags_test.go30
-rw-r--r--modules/log/init.go35
-rw-r--r--modules/log/level.go99
-rw-r--r--modules/log/log.go305
-rw-r--r--modules/log/log_test.go152
-rw-r--r--modules/log/logger.go163
-rw-r--r--modules/log/logger_global.go83
-rw-r--r--modules/log/logger_impl.go239
-rw-r--r--modules/log/logger_test.go145
-rw-r--r--modules/log/manager.go142
-rw-r--r--modules/log/manager_test.go42
-rw-r--r--modules/log/misc.go78
-rw-r--r--modules/log/multichannel.go104
-rw-r--r--modules/log/provider.go25
-rw-r--r--modules/log/smtp.go114
-rw-r--r--modules/log/smtp_test.go85
-rw-r--r--modules/log/stack.go6
-rw-r--r--modules/log/writer.go269
-rw-r--r--modules/log/writer_test.go275
-rw-r--r--modules/private/manager.go14
-rw-r--r--modules/setting/config_provider.go52
-rw-r--r--modules/setting/config_provider_test.go66
-rw-r--r--modules/setting/database.go2
-rw-r--r--modules/setting/log.go487
-rw-r--r--modules/setting/log_test.go387
-rw-r--r--modules/setting/repository.go1
-rw-r--r--modules/setting/server.go1
-rw-r--r--modules/setting/setting.go15
-rw-r--r--modules/ssh/ssh.go16
-rw-r--r--modules/templates/htmlrenderer.go2
-rw-r--r--modules/test/logchecker.go67
-rw-r--r--modules/test/logchecker_test.go2
-rw-r--r--modules/testlogger/testlogger.go98
-rw-r--r--modules/util/rotatingfilewriter/writer.go246
-rw-r--r--modules/util/rotatingfilewriter/writer_test.go48
-rw-r--r--modules/web/routing/logger.go32
69 files changed, 3162 insertions, 4196 deletions
diff --git a/modules/context/access_log.go b/modules/context/access_log.go
index b6468d139b..9b649a6a01 100644
--- a/modules/context/access_log.go
+++ b/modules/context/access_log.go
@@ -95,10 +95,7 @@ func AccessLogger() func(http.Handler) http.Handler {
log.Error("Could not set up chi access logger: %v", err.Error())
}
- err = logger.SendLog(log.INFO, "", "", 0, buf.String(), "")
- if err != nil {
- log.Error("Could not set up chi access logger: %v", err.Error())
- }
+ logger.Info("%s", buf.String())
})
}
}
diff --git a/modules/doctor/doctor.go b/modules/doctor/doctor.go
index 32eb5938c3..10838a7512 100644
--- a/modules/doctor/doctor.go
+++ b/modules/doctor/doctor.go
@@ -6,6 +6,7 @@ package doctor
import (
"context"
"fmt"
+ "os"
"sort"
"strings"
@@ -26,27 +27,9 @@ type Check struct {
Priority int
}
-type wrappedLevelLogger struct {
- log.LevelLogger
-}
-
-func (w *wrappedLevelLogger) Log(skip int, level log.Level, format string, v ...interface{}) error {
- return w.LevelLogger.Log(
- skip+1,
- level,
- " - %s "+format,
- append(
- []interface{}{
- log.NewColoredValueBytes(
- fmt.Sprintf("[%s]", strings.ToUpper(level.String()[0:1])),
- level.Color()),
- }, v...)...)
-}
-
-func initDBDisableConsole(ctx context.Context, disableConsole bool) error {
+func initDBSkipLogger(ctx context.Context) error {
setting.Init(&setting.Options{})
setting.LoadDBSetting()
- setting.InitSQLLog(disableConsole)
if err := db.InitEngine(ctx); err != nil {
return fmt.Errorf("db.InitEngine: %w", err)
}
@@ -57,30 +40,61 @@ func initDBDisableConsole(ctx context.Context, disableConsole bool) error {
return nil
}
+type doctorCheckLogger struct {
+ colorize bool
+}
+
+var _ log.BaseLogger = (*doctorCheckLogger)(nil)
+
+func (d *doctorCheckLogger) Log(skip int, level log.Level, format string, v ...any) {
+ _, _ = fmt.Fprintf(os.Stdout, format+"\n", v...)
+}
+
+func (d *doctorCheckLogger) GetLevel() log.Level {
+ return log.TRACE
+}
+
+type doctorCheckStepLogger struct {
+ colorize bool
+}
+
+var _ log.BaseLogger = (*doctorCheckStepLogger)(nil)
+
+func (d *doctorCheckStepLogger) Log(skip int, level log.Level, format string, v ...any) {
+ levelChar := fmt.Sprintf("[%s]", strings.ToUpper(level.String()[0:1]))
+ var levelArg any = levelChar
+ if d.colorize {
+ levelArg = log.NewColoredValue(levelChar, level.ColorAttributes()...)
+ }
+ args := append([]any{levelArg}, v...)
+ _, _ = fmt.Fprintf(os.Stdout, " - %s "+format+"\n", args...)
+}
+
+func (d *doctorCheckStepLogger) GetLevel() log.Level {
+ return log.TRACE
+}
+
// Checks is the list of available commands
var Checks []*Check
// RunChecks runs the doctor checks for the provided list
-func RunChecks(ctx context.Context, logger log.Logger, autofix bool, checks []*Check) error {
- wrappedLogger := log.LevelLoggerLogger{
- LevelLogger: &wrappedLevelLogger{logger},
- }
-
+func RunChecks(ctx context.Context, colorize, autofix bool, checks []*Check) error {
+ // the checks output logs by a special logger, they do not use the default logger
+ logger := log.BaseLoggerToGeneralLogger(&doctorCheckLogger{colorize: colorize})
+ loggerStep := log.BaseLoggerToGeneralLogger(&doctorCheckStepLogger{colorize: colorize})
dbIsInit := false
for i, check := range checks {
if !dbIsInit && !check.SkipDatabaseInitialization {
// Only open database after the most basic configuration check
- setting.Log.EnableXORMLog = false
- if err := initDBDisableConsole(ctx, true); err != nil {
+ if err := initDBSkipLogger(ctx); err != nil {
logger.Error("Error whilst initializing the database: %v", err)
logger.Error("Check if you are using the right config file. You can use a --config directive to specify one.")
return nil
}
dbIsInit = true
}
- logger.Info("[%d] %s", log.NewColoredIDValue(i+1), check.Title)
- logger.Flush()
- if err := check.Run(ctx, &wrappedLogger, autofix); err != nil {
+ logger.Info("\n[%d] %s", i+1, check.Title)
+ if err := check.Run(ctx, loggerStep, autofix); err != nil {
if check.AbortIfFailed {
logger.Critical("FAIL")
return err
@@ -88,9 +102,9 @@ func RunChecks(ctx context.Context, logger log.Logger, autofix bool, checks []*C
logger.Error("ERROR")
} else {
logger.Info("OK")
- logger.Flush()
}
}
+ logger.Info("\nAll done.")
return nil
}
diff --git a/modules/git/git_test.go b/modules/git/git_test.go
index e3bfe496da..25eb308531 100644
--- a/modules/git/git_test.go
+++ b/modules/git/git_test.go
@@ -10,7 +10,6 @@ import (
"strings"
"testing"
- "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
@@ -18,8 +17,6 @@ import (
)
func testRun(m *testing.M) error {
- _ = log.NewLogger(1000, "console", "console", `{"level":"trace","stacktracelevel":"NONE","stderr":true}`)
-
gitHomePath, err := os.MkdirTemp(os.TempDir(), "git-home")
if err != nil {
return fmt.Errorf("unable to create temp dir: %w", err)
diff --git a/modules/graceful/manager.go b/modules/graceful/manager.go
index c7b4c101ef..d32788092d 100644
--- a/modules/graceful/manager.go
+++ b/modules/graceful/manager.go
@@ -30,7 +30,7 @@ const (
// * HTTP redirection fallback
// * Builtin SSH listener
//
-// If you add an additional place you must increment this number
+// If you add a new place you must increment this number
// and add a function to call manager.InformCleanup if it's not going to be used
const numberOfServersToCreate = 4
diff --git a/modules/graceful/manager_unix.go b/modules/graceful/manager_unix.go
index 5d72111bff..d89f6fc725 100644
--- a/modules/graceful/manager_unix.go
+++ b/modules/graceful/manager_unix.go
@@ -16,6 +16,7 @@ import (
"syscall"
"time"
+ "code.gitea.io/gitea/modules/graceful/releasereopen"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/setting"
@@ -185,7 +186,7 @@ func (g *Manager) handleSignals(ctx context.Context) {
case syscall.SIGUSR1:
log.Warn("PID %d. Received SIGUSR1. Releasing and reopening logs", pid)
g.notify(statusMsg("Releasing and reopening logs"))
- if err := log.ReleaseReopen(); err != nil {
+ if err := releasereopen.GetManager().ReleaseReopen(); err != nil {
log.Error("Error whilst releasing and reopening logs: %v", err)
}
case syscall.SIGUSR2:
diff --git a/modules/graceful/releasereopen/releasereopen.go b/modules/graceful/releasereopen/releasereopen.go
new file mode 100644
index 0000000000..de5b07c0a6
--- /dev/null
+++ b/modules/graceful/releasereopen/releasereopen.go
@@ -0,0 +1,61 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package releasereopen
+
+import (
+ "errors"
+ "sync"
+)
+
+type ReleaseReopener interface {
+ ReleaseReopen() error
+}
+
+type Manager struct {
+ mu sync.Mutex
+ counter int64
+
+ releaseReopeners map[int64]ReleaseReopener
+}
+
+func (r *Manager) Register(rr ReleaseReopener) (cancel func()) {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+
+ r.counter++
+ currentCounter := r.counter
+ r.releaseReopeners[r.counter] = rr
+
+ return func() {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+
+ delete(r.releaseReopeners, currentCounter)
+ }
+}
+
+func (r *Manager) ReleaseReopen() error {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+
+ var errs []error
+ for _, rr := range r.releaseReopeners {
+ if err := rr.ReleaseReopen(); err != nil {
+ errs = append(errs, err)
+ }
+ }
+ return errors.Join(errs...)
+}
+
+func GetManager() *Manager {
+ return manager
+}
+
+func NewManager() *Manager {
+ return &Manager{
+ releaseReopeners: make(map[int64]ReleaseReopener),
+ }
+}
+
+var manager = NewManager()
diff --git a/modules/graceful/releasereopen/releasereopen_test.go b/modules/graceful/releasereopen/releasereopen_test.go
new file mode 100644
index 0000000000..0e8b48257d
--- /dev/null
+++ b/modules/graceful/releasereopen/releasereopen_test.go
@@ -0,0 +1,43 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package releasereopen
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+type testReleaseReopener struct {
+ count int
+}
+
+func (t *testReleaseReopener) ReleaseReopen() error {
+ t.count++
+ return nil
+}
+
+func TestManager(t *testing.T) {
+ m := NewManager()
+
+ t1 := &testReleaseReopener{}
+ t2 := &testReleaseReopener{}
+ t3 := &testReleaseReopener{}
+
+ _ = m.Register(t1)
+ c2 := m.Register(t2)
+ _ = m.Register(t3)
+
+ assert.NoError(t, m.ReleaseReopen())
+ assert.EqualValues(t, 1, t1.count)
+ assert.EqualValues(t, 1, t2.count)
+ assert.EqualValues(t, 1, t3.count)
+
+ c2()
+
+ assert.NoError(t, m.ReleaseReopen())
+ assert.EqualValues(t, 2, t1.count)
+ assert.EqualValues(t, 1, t2.count)
+ assert.EqualValues(t, 2, t3.count)
+}
diff --git a/modules/indexer/code/elastic_search.go b/modules/indexer/code/elastic_search.go
index 6097538009..0e56a86588 100644
--- a/modules/indexer/code/elastic_search.go
+++ b/modules/indexer/code/elastic_search.go
@@ -49,14 +49,6 @@ type ElasticSearchIndexer struct {
lock sync.RWMutex
}
-type elasticLogger struct {
- log.Logger
-}
-
-func (l elasticLogger) Printf(format string, args ...interface{}) {
- _ = l.Logger.Log(2, l.Logger.GetLevel(), format, args...)
-}
-
// NewElasticSearchIndexer creates a new elasticsearch indexer
func NewElasticSearchIndexer(url, indexerName string) (*ElasticSearchIndexer, bool, error) {
opts := []elastic.ClientOptionFunc{
@@ -66,15 +58,11 @@ func NewElasticSearchIndexer(url, indexerName string) (*ElasticSearchIndexer, bo
elastic.SetGzip(false),
}
- logger := elasticLogger{log.GetLogger(log.DEFAULT)}
+ logger := log.GetLogger(log.DEFAULT)
- if logger.GetLevel() == log.TRACE || logger.GetLevel() == log.DEBUG {
- opts = append(opts, elastic.SetTraceLog(logger))
- } else if logger.GetLevel() == log.ERROR || logger.GetLevel() == log.CRITICAL || logger.GetLevel() == log.FATAL {
- opts = append(opts, elastic.SetErrorLog(logger))
- } else if logger.GetLevel() == log.INFO || logger.GetLevel() == log.WARN {
- opts = append(opts, elastic.SetInfoLog(logger))
- }
+ opts = append(opts, elastic.SetTraceLog(&log.PrintfLogger{Logf: logger.Trace}))
+ opts = append(opts, elastic.SetInfoLog(&log.PrintfLogger{Logf: logger.Info}))
+ opts = append(opts, elastic.SetErrorLog(&log.PrintfLogger{Logf: logger.Error}))
client, err := elastic.NewClient(opts...)
if err != nil {
diff --git a/modules/indexer/issues/elastic_search.go b/modules/indexer/issues/elastic_search.go
index fd1dd4b452..ec62f857ad 100644
--- a/modules/indexer/issues/elastic_search.go
+++ b/modules/indexer/issues/elastic_search.go
@@ -29,14 +29,6 @@ type ElasticSearchIndexer struct {
lock sync.RWMutex
}
-type elasticLogger struct {
- log.LevelLogger
-}
-
-func (l elasticLogger) Printf(format string, args ...interface{}) {
- _ = l.Log(2, l.GetLevel(), format, args...)
-}
-
// NewElasticSearchIndexer creates a new elasticsearch indexer
func NewElasticSearchIndexer(url, indexerName string) (*ElasticSearchIndexer, error) {
opts := []elastic.ClientOptionFunc{
@@ -46,15 +38,10 @@ func NewElasticSearchIndexer(url, indexerName string) (*ElasticSearchIndexer, er
elastic.SetGzip(false),
}
- logger := elasticLogger{log.GetLogger(log.DEFAULT)}
-
- if logger.GetLevel() == log.TRACE || logger.GetLevel() == log.DEBUG {
- opts = append(opts, elastic.SetTraceLog(logger))
- } else if logger.GetLevel() == log.ERROR || logger.GetLevel() == log.CRITICAL || logger.GetLevel() == log.FATAL {
- opts = append(opts, elastic.SetErrorLog(logger))
- } else if logger.GetLevel() == log.INFO || logger.GetLevel() == log.WARN {
- opts = append(opts, elastic.SetInfoLog(logger))
- }
+ logger := log.GetLogger(log.DEFAULT)
+ opts = append(opts, elastic.SetTraceLog(&log.PrintfLogger{Logf: logger.Trace}))
+ opts = append(opts, elastic.SetInfoLog(&log.PrintfLogger{Logf: logger.Info}))
+ opts = append(opts, elastic.SetErrorLog(&log.PrintfLogger{Logf: logger.Error}))
client, err := elastic.NewClient(opts...)
if err != nil {
diff --git a/modules/lfs/pointer.go b/modules/lfs/pointer.go
index f7f225bf1c..d7653e836c 100644
--- a/modules/lfs/pointer.go
+++ b/modules/lfs/pointer.go
@@ -13,8 +13,6 @@ import (
"strconv"
"strings"
- "code.gitea.io/gitea/modules/log"
-
"github.com/minio/sha256-simd"
)
@@ -113,15 +111,11 @@ func (p Pointer) RelativePath() string {
return path.Join(p.Oid[0:2], p.Oid[2:4], p.Oid[4:])
}
-// ColorFormat provides a basic color format for a Team
-func (p Pointer) ColorFormat(s fmt.State) {
+func (p Pointer) LogString() string {
if p.Oid == "" && p.Size == 0 {
- log.ColorFprintf(s, "<empty>")
- return
+ return "<LFSPointer empty>"
}
- log.ColorFprintf(s, "%s:%d",
- log.NewColoredIDValue(p.Oid),
- p.Size)
+ return fmt.Sprintf("<LFSPointer %s:%d>", p.Oid, p.Size)
}
// GeneratePointer generates a pointer for arbitrary content
diff --git a/modules/log/color.go b/modules/log/color.go
new file mode 100644
index 0000000000..dcbba5f6d6
--- /dev/null
+++ b/modules/log/color.go
@@ -0,0 +1,115 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "fmt"
+ "strconv"
+)
+
+const escape = "\033"
+
+// ColorAttribute defines a single SGR Code
+type ColorAttribute int
+
+// Base ColorAttributes
+const (
+ Reset ColorAttribute = iota
+ Bold
+ Faint
+ Italic
+ Underline
+ BlinkSlow
+ BlinkRapid
+ ReverseVideo
+ Concealed
+ CrossedOut
+)
+
+// Foreground text colors
+const (
+ FgBlack ColorAttribute = iota + 30
+ FgRed
+ FgGreen
+ FgYellow
+ FgBlue
+ FgMagenta
+ FgCyan
+ FgWhite
+)
+
+// Foreground Hi-Intensity text colors
+const (
+ FgHiBlack ColorAttribute = iota + 90
+ FgHiRed
+ FgHiGreen
+ FgHiYellow
+ FgHiBlue
+ FgHiMagenta
+ FgHiCyan
+ FgHiWhite
+)
+
+// Background text colors
+const (
+ BgBlack ColorAttribute = iota + 40
+ BgRed
+ BgGreen
+ BgYellow
+ BgBlue
+ BgMagenta
+ BgCyan
+ BgWhite
+)
+
+// Background Hi-Intensity text colors
+const (
+ BgHiBlack ColorAttribute = iota + 100
+ BgHiRed
+ BgHiGreen
+ BgHiYellow
+ BgHiBlue
+ BgHiMagenta
+ BgHiCyan
+ BgHiWhite
+)
+
+var (
+ resetBytes = ColorBytes(Reset)
+ fgCyanBytes = ColorBytes(FgCyan)
+ fgGreenBytes = ColorBytes(FgGreen)
+)
+
+type ColoredValue struct {
+ v any
+ colors []ColorAttribute
+}
+
+func (c *ColoredValue) Format(f fmt.State, verb rune) {
+ _, _ = f.Write(ColorBytes(c.colors...))
+ s := fmt.Sprintf(fmt.FormatString(f, verb), c.v)
+ _, _ = f.Write([]byte(s))
+ _, _ = f.Write(resetBytes)
+}
+
+func NewColoredValue(v any, color ...ColorAttribute) *ColoredValue {
+ return &ColoredValue{v: v, colors: color}
+}
+
+// ColorBytes converts a list of ColorAttributes to a byte array
+func ColorBytes(attrs ...ColorAttribute) []byte {
+ bytes := make([]byte, 0, 20)
+ bytes = append(bytes, escape[0], '[')
+ if len(attrs) > 0 {
+ bytes = append(bytes, strconv.Itoa(int(attrs[0]))...)
+ for _, a := range attrs[1:] {
+ bytes = append(bytes, ';')
+ bytes = append(bytes, strconv.Itoa(int(a))...)
+ }
+ } else {
+ bytes = append(bytes, strconv.Itoa(int(Bold))...)
+ }
+ bytes = append(bytes, 'm')
+ return bytes
+}
diff --git a/modules/log/color_console.go b/modules/log/color_console.go
new file mode 100644
index 0000000000..2658652ec6
--- /dev/null
+++ b/modules/log/color_console.go
@@ -0,0 +1,14 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+// CanColorStdout reports if we can color the Stdout
+// Although we could do terminal sniffing and the like - in reality
+// most tools on *nix are happy to display ansi colors.
+// We will terminal sniff on Windows in console_windows.go
+var CanColorStdout = true
+
+// CanColorStderr reports if we can color the Stderr
+var CanColorStderr = true
diff --git a/modules/log/console_other.go b/modules/log/color_console_other.go
index c30be41544..c30be41544 100644
--- a/modules/log/console_other.go
+++ b/modules/log/color_console_other.go
diff --git a/modules/log/console_windows.go b/modules/log/color_console_windows.go
index 54dac12fa0..3f59e934da 100644
--- a/modules/log/console_windows.go
+++ b/modules/log/color_console_windows.go
@@ -20,7 +20,7 @@ func enableVTMode(console windows.Handle) bool {
// EnableVirtualTerminalProcessing is the console mode to allow ANSI code
// interpretation on the console. See:
// https://docs.microsoft.com/en-us/windows/console/setconsolemode
- // It only works on windows 10. Earlier terminals will fail with an err which we will
+ // It only works on Windows 10. Earlier terminals will fail with an err which we will
// handle to say don't color
mode |= windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING
err = windows.SetConsoleMode(console, mode)
diff --git a/modules/log/colors_router.go b/modules/log/color_router.go
index efc7337b6b..80e7e02079 100644
--- a/modules/log/colors_router.go
+++ b/modules/log/color_router.go
@@ -8,15 +8,15 @@ import (
"time"
)
-var statusToColor = map[int][]byte{
- 100: ColorBytes(Bold),
- 200: ColorBytes(FgGreen),
- 300: ColorBytes(FgYellow),
- 304: ColorBytes(FgCyan),
- 400: ColorBytes(Bold, FgRed),
- 401: ColorBytes(Bold, FgMagenta),
- 403: ColorBytes(Bold, FgMagenta),
- 500: ColorBytes(Bold, BgRed),
+var statusToColor = map[int][]ColorAttribute{
+ 100: {Bold},
+ 200: {FgGreen},
+ 300: {FgYellow},
+ 304: {FgCyan},
+ 400: {Bold, FgRed},
+ 401: {Bold, FgMagenta},
+ 403: {Bold, FgMagenta},
+ 500: {Bold, BgRed},
}
// ColoredStatus adds colors for HTTP status
@@ -26,30 +26,30 @@ func ColoredStatus(status int, s ...string) *ColoredValue {
color, ok = statusToColor[(status/100)*100]
}
if !ok {
- color = fgBoldBytes
+ color = []ColorAttribute{Bold}
}
if len(s) > 0 {
- return NewColoredValueBytes(s[0], &color)
+ return NewColoredValue(s[0], color...)
}
- return NewColoredValueBytes(status, &color)
+ return NewColoredValue(status, color...)
}
-var methodToColor = map[string][]byte{
- "GET": ColorBytes(FgBlue),
- "POST": ColorBytes(FgGreen),
- "DELETE": ColorBytes(FgRed),
- "PATCH": ColorBytes(FgCyan),
- "PUT": ColorBytes(FgYellow, Faint),
- "HEAD": ColorBytes(FgBlue, Faint),
+var methodToColor = map[string][]ColorAttribute{
+ "GET": {FgBlue},
+ "POST": {FgGreen},
+ "DELETE": {FgRed},
+ "PATCH": {FgCyan},
+ "PUT": {FgYellow, Faint},
+ "HEAD": {FgBlue, Faint},
}
// ColoredMethod adds colors for HTTP methods on log
func ColoredMethod(method string) *ColoredValue {
color, ok := methodToColor[method]
if !ok {
- return NewColoredValueBytes(method, &fgBoldBytes)
+ return NewColoredValue(method, Bold)
}
- return NewColoredValueBytes(method, &color)
+ return NewColoredValue(method, color...)
}
var (
@@ -61,15 +61,15 @@ var (
10 * time.Second,
}
- durationColors = [][]byte{
- ColorBytes(FgGreen),
- ColorBytes(Bold),
- ColorBytes(FgYellow),
- ColorBytes(FgRed, Bold),
- ColorBytes(BgRed),
+ durationColors = [][]ColorAttribute{
+ {FgGreen},
+ {Bold},
+ {FgYellow},
+ {FgRed, Bold},
+ {BgRed},
}
- wayTooLong = ColorBytes(BgMagenta)
+ wayTooLong = BgMagenta
)
// ColoredTime converts the provided time to a ColoredValue for logging. The duration is always formatted in milliseconds.
@@ -80,8 +80,8 @@ func ColoredTime(duration time.Duration) *ColoredValue {
str := fmt.Sprintf("%.1fms", float64(duration.Microseconds())/1000)
for i, k := range durations {
if duration < k {
- return NewColoredValueBytes(str, &durationColors[i])
+ return NewColoredValue(str, durationColors[i]...)
}
}
- return NewColoredValueBytes(str, &wayTooLong)
+ return NewColoredValue(str, wayTooLong)
}
diff --git a/modules/log/colors.go b/modules/log/colors.go
deleted file mode 100644
index 85e205cb67..0000000000
--- a/modules/log/colors.go
+++ /dev/null
@@ -1,435 +0,0 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "fmt"
- "io"
- "reflect"
- "strconv"
- "strings"
-)
-
-const escape = "\033"
-
-// ColorAttribute defines a single SGR Code
-type ColorAttribute int
-
-// Base ColorAttributes
-const (
- Reset ColorAttribute = iota
- Bold
- Faint
- Italic
- Underline
- BlinkSlow
- BlinkRapid
- ReverseVideo
- Concealed
- CrossedOut
-)
-
-// Foreground text colors
-const (
- FgBlack ColorAttribute = iota + 30
- FgRed
- FgGreen
- FgYellow
- FgBlue
- FgMagenta
- FgCyan
- FgWhite
-)
-
-// Foreground Hi-Intensity text colors
-const (
- FgHiBlack ColorAttribute = iota + 90
- FgHiRed
- FgHiGreen
- FgHiYellow
- FgHiBlue
- FgHiMagenta
- FgHiCyan
- FgHiWhite
-)
-
-// Background text colors
-const (
- BgBlack ColorAttribute = iota + 40
- BgRed
- BgGreen
- BgYellow
- BgBlue
- BgMagenta
- BgCyan
- BgWhite
-)
-
-// Background Hi-Intensity text colors
-const (
- BgHiBlack ColorAttribute = iota + 100
- BgHiRed
- BgHiGreen
- BgHiYellow
- BgHiBlue
- BgHiMagenta
- BgHiCyan
- BgHiWhite
-)
-
-var colorAttributeToString = map[ColorAttribute]string{
- Reset: "Reset",
- Bold: "Bold",
- Faint: "Faint",
- Italic: "Italic",
- Underline: "Underline",
- BlinkSlow: "BlinkSlow",
- BlinkRapid: "BlinkRapid",
- ReverseVideo: "ReverseVideo",
- Concealed: "Concealed",
- CrossedOut: "CrossedOut",
- FgBlack: "FgBlack",
- FgRed: "FgRed",
- FgGreen: "FgGreen",
- FgYellow: "FgYellow",
- FgBlue: "FgBlue",
- FgMagenta: "FgMagenta",
- FgCyan: "FgCyan",
- FgWhite: "FgWhite",
- FgHiBlack: "FgHiBlack",
- FgHiRed: "FgHiRed",
- FgHiGreen: "FgHiGreen",
- FgHiYellow: "FgHiYellow",
- FgHiBlue: "FgHiBlue",
- FgHiMagenta: "FgHiMagenta",
- FgHiCyan: "FgHiCyan",
- FgHiWhite: "FgHiWhite",
- BgBlack: "BgBlack",
- BgRed: "BgRed",
- BgGreen: "BgGreen",
- BgYellow: "BgYellow",
- BgBlue: "BgBlue",
- BgMagenta: "BgMagenta",
- BgCyan: "BgCyan",
- BgWhite: "BgWhite",
- BgHiBlack: "BgHiBlack",
- BgHiRed: "BgHiRed",
- BgHiGreen: "BgHiGreen",
- BgHiYellow: "BgHiYellow",
- BgHiBlue: "BgHiBlue",
- BgHiMagenta: "BgHiMagenta",
- BgHiCyan: "BgHiCyan",
- BgHiWhite: "BgHiWhite",
-}
-
-func (c *ColorAttribute) String() string {
- return colorAttributeToString[*c]
-}
-
-var colorAttributeFromString = map[string]ColorAttribute{}
-
-// ColorAttributeFromString will return a ColorAttribute given a string
-func ColorAttributeFromString(from string) ColorAttribute {
- lowerFrom := strings.TrimSpace(strings.ToLower(from))
- return colorAttributeFromString[lowerFrom]
-}
-
-// ColorString converts a list of ColorAttributes to a color string
-func ColorString(attrs ...ColorAttribute) string {
- return string(ColorBytes(attrs...))
-}
-
-// ColorBytes converts a list of ColorAttributes to a byte array
-func ColorBytes(attrs ...ColorAttribute) []byte {
- bytes := make([]byte, 0, 20)
- bytes = append(bytes, escape[0], '[')
- if len(attrs) > 0 {
- bytes = append(bytes, strconv.Itoa(int(attrs[0]))...)
- for _, a := range attrs[1:] {
- bytes = append(bytes, ';')
- bytes = append(bytes, strconv.Itoa(int(a))...)
- }
- } else {
- bytes = append(bytes, strconv.Itoa(int(Bold))...)
- }
- bytes = append(bytes, 'm')
- return bytes
-}
-
-var levelToColor = map[Level][]byte{
- TRACE: ColorBytes(Bold, FgCyan),
- DEBUG: ColorBytes(Bold, FgBlue),
- INFO: ColorBytes(Bold, FgGreen),
- WARN: ColorBytes(Bold, FgYellow),
- ERROR: ColorBytes(Bold, FgRed),
- CRITICAL: ColorBytes(Bold, BgMagenta),
- FATAL: ColorBytes(Bold, BgRed),
- NONE: ColorBytes(Reset),
-}
-
-var (
- resetBytes = ColorBytes(Reset)
- fgCyanBytes = ColorBytes(FgCyan)
- fgGreenBytes = ColorBytes(FgGreen)
- fgBoldBytes = ColorBytes(Bold)
-)
-
-type protectedANSIWriterMode int
-
-const (
- escapeAll protectedANSIWriterMode = iota
- allowColor
- removeColor
-)
-
-type protectedANSIWriter struct {
- w io.Writer
- mode protectedANSIWriterMode
-}
-
-// Write will protect against unusual characters
-func (c *protectedANSIWriter) Write(bytes []byte) (int, error) {
- end := len(bytes)
- totalWritten := 0
-normalLoop:
- for i := 0; i < end; {
- lasti := i
-
- if c.mode == escapeAll {
- for i < end && (bytes[i] >= ' ' || bytes[i] == '\n' || bytes[i] == '\t') {
- i++
- }
- } else {
- // Allow tabs if we're not escaping everything
- for i < end && (bytes[i] >= ' ' || bytes[i] == '\t') {
- i++
- }
- }
-
- if i > lasti {
- written, err := c.w.Write(bytes[lasti:i])
- totalWritten += written
- if err != nil {
- return totalWritten, err
- }
-
- }
- if i >= end {
- break
- }
-
- // If we're not just escaping all we should prefix all newlines with a \t
- if c.mode != escapeAll {
- if bytes[i] == '\n' {
- written, err := c.w.Write([]byte{'\n', '\t'})
- if written > 0 {
- totalWritten++
- }
- if err != nil {
- return totalWritten, err
- }
- i++
- continue normalLoop
- }
-
- if bytes[i] == escape[0] && i+1 < end && bytes[i+1] == '[' {
- for j := i + 2; j < end; j++ {
- if bytes[j] >= '0' && bytes[j] <= '9' {
- continue
- }
- if bytes[j] == ';' {
- continue
- }
- if bytes[j] == 'm' {
- if c.mode == allowColor {
- written, err := c.w.Write(bytes[i : j+1])
- totalWritten += written
- if err != nil {
- return totalWritten, err
- }
- } else {
- totalWritten = j
- }
- i = j + 1
- continue normalLoop
- }
- break
- }
- }
- }
-
- // Process naughty character
- if _, err := fmt.Fprintf(c.w, `\%#03o`, bytes[i]); err != nil {
- return totalWritten, err
- }
- i++
- totalWritten++
- }
- return totalWritten, nil
-}
-
-// ColorSprintf returns a colored string from a format and arguments
-// arguments will be wrapped in ColoredValues to protect against color spoofing
-func ColorSprintf(format string, args ...interface{}) string {
- if len(args) > 0 {
- v := make([]interface{}, len(args))
- for i := 0; i < len(v); i++ {
- v[i] = NewColoredValuePointer(&args[i])
- }
- return fmt.Sprintf(format, v...)
- }
- return format
-}
-
-// ColorFprintf will write to the provided writer similar to ColorSprintf
-func ColorFprintf(w io.Writer, format string, args ...interface{}) (int, error) {
- if len(args) > 0 {
- v := make([]interface{}, len(args))
- for i := 0; i < len(v); i++ {
- v[i] = NewColoredValuePointer(&args[i])
- }
- return fmt.Fprintf(w, format, v...)
- }
- return fmt.Fprint(w, format)
-}
-
-// ColorFormatted structs provide their own colored string when formatted with ColorSprintf
-type ColorFormatted interface {
- // ColorFormat provides the colored representation of the value
- ColorFormat(s fmt.State)
-}
-
-var colorFormattedType = reflect.TypeOf((*ColorFormatted)(nil)).Elem()
-
-// ColoredValue will Color the provided value
-type ColoredValue struct {
- colorBytes *[]byte
- resetBytes *[]byte
- Value *interface{}
-}
-
-// NewColoredValue is a helper function to create a ColoredValue from a Value
-// If no color is provided it defaults to Bold with standard Reset
-// If a ColoredValue is provided it is not changed
-func NewColoredValue(value interface{}, color ...ColorAttribute) *ColoredValue {
- return NewColoredValuePointer(&value, color...)
-}
-
-// NewColoredValuePointer is a helper function to create a ColoredValue from a Value Pointer
-// If no color is provided it defaults to Bold with standard Reset
-// If a ColoredValue is provided it is not changed
-func NewColoredValuePointer(value *interface{}, color ...ColorAttribute) *ColoredValue {
- if val, ok := (*value).(*ColoredValue); ok {
- return val
- }
- if len(color) > 0 {
- bytes := ColorBytes(color...)
- return &ColoredValue{
- colorBytes: &bytes,
- resetBytes: &resetBytes,
- Value: value,
- }
- }
- return &ColoredValue{
- colorBytes: &fgBoldBytes,
- resetBytes: &resetBytes,
- Value: value,
- }
-}
-
-// NewColoredValueBytes creates a value from the provided value with color bytes
-// If a ColoredValue is provided it is not changed
-func NewColoredValueBytes(value interface{}, colorBytes *[]byte) *ColoredValue {
- if val, ok := value.(*ColoredValue); ok {
- return val
- }
- return &ColoredValue{
- colorBytes: colorBytes,
- resetBytes: &resetBytes,
- Value: &value,
- }
-}
-
-// NewColoredIDValue is a helper function to create a ColoredValue from a Value
-// The Value will be colored with FgCyan
-// If a ColoredValue is provided it is not changed
-func NewColoredIDValue(value interface{}) *ColoredValue {
- return NewColoredValueBytes(value, &fgCyanBytes)
-}
-
-// Format will format the provided value and protect against ANSI color spoofing within the value
-// If the wrapped value is ColorFormatted and the format is "%-v" then its ColorString will
-// be used. It is presumed that this ColorString is safe.
-func (cv *ColoredValue) Format(s fmt.State, c rune) {
- if c == 'v' && s.Flag('-') {
- if val, ok := (*cv.Value).(ColorFormatted); ok {
- val.ColorFormat(s)
- return
- }
- v := reflect.ValueOf(*cv.Value)
- t := v.Type()
-
- if reflect.PtrTo(t).Implements(colorFormattedType) {
- vp := reflect.New(t)
- vp.Elem().Set(v)
- val := vp.Interface().(ColorFormatted)
- val.ColorFormat(s)
- return
- }
- }
- s.Write(*cv.colorBytes)
- fmt.Fprintf(&protectedANSIWriter{w: s}, fmtString(s, c), *(cv.Value))
- s.Write(*cv.resetBytes)
-}
-
-// ColorFormatAsString returns the result of the ColorFormat without the color
-func ColorFormatAsString(colorVal ColorFormatted) string {
- s := new(strings.Builder)
- _, _ = ColorFprintf(&protectedANSIWriter{w: s, mode: removeColor}, "%-v", colorVal)
- return s.String()
-}
-
-// SetColorBytes will allow a user to set the colorBytes of a colored value
-func (cv *ColoredValue) SetColorBytes(colorBytes []byte) {
- cv.colorBytes = &colorBytes
-}
-
-// SetColorBytesPointer will allow a user to set the colorBytes pointer of a colored value
-func (cv *ColoredValue) SetColorBytesPointer(colorBytes *[]byte) {
- cv.colorBytes = colorBytes
-}
-
-// SetResetBytes will allow a user to set the resetBytes pointer of a colored value
-func (cv *ColoredValue) SetResetBytes(resetBytes []byte) {
- cv.resetBytes = &resetBytes
-}
-
-// SetResetBytesPointer will allow a user to set the resetBytes pointer of a colored value
-func (cv *ColoredValue) SetResetBytesPointer(resetBytes *[]byte) {
- cv.resetBytes = resetBytes
-}
-
-func fmtString(s fmt.State, c rune) string {
- var width, precision string
- base := make([]byte, 0, 8)
- base = append(base, '%')
- for _, c := range []byte(" +-#0") {
- if s.Flag(int(c)) {
- base = append(base, c)
- }
- }
- if w, ok := s.Width(); ok {
- width = strconv.Itoa(w)
- }
- if p, ok := s.Precision(); ok {
- precision = "." + strconv.Itoa(p)
- }
- return fmt.Sprintf("%s%s%s%c", base, width, precision, c)
-}
-
-func init() {
- for attr, from := range colorAttributeToString {
- colorAttributeFromString[strings.ToLower(from)] = attr
- }
-}
diff --git a/modules/log/conn.go b/modules/log/conn.go
deleted file mode 100644
index b21a744037..0000000000
--- a/modules/log/conn.go
+++ /dev/null
@@ -1,137 +0,0 @@
-// Copyright 2014 The Gogs Authors. All rights reserved.
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "fmt"
- "io"
- "net"
-
- "code.gitea.io/gitea/modules/json"
-)
-
-type connWriter struct {
- innerWriter io.WriteCloser
- ReconnectOnMsg bool `json:"reconnectOnMsg"`
- Reconnect bool `json:"reconnect"`
- Net string `json:"net"`
- Addr string `json:"addr"`
-}
-
-// Close the inner writer
-func (i *connWriter) Close() error {
- if i.innerWriter != nil {
- return i.innerWriter.Close()
- }
- return nil
-}
-
-// Write the data to the connection
-func (i *connWriter) Write(p []byte) (int, error) {
- if i.neededConnectOnMsg() {
- if err := i.connect(); err != nil {
- return 0, err
- }
- }
-
- if i.ReconnectOnMsg {
- defer i.innerWriter.Close()
- }
-
- return i.innerWriter.Write(p)
-}
-
-func (i *connWriter) neededConnectOnMsg() bool {
- if i.Reconnect {
- i.Reconnect = false
- return true
- }
-
- if i.innerWriter == nil {
- return true
- }
-
- return i.ReconnectOnMsg
-}
-
-func (i *connWriter) connect() error {
- if i.innerWriter != nil {
- i.innerWriter.Close()
- i.innerWriter = nil
- }
-
- conn, err := net.Dial(i.Net, i.Addr)
- if err != nil {
- return err
- }
-
- if tcpConn, ok := conn.(*net.TCPConn); ok {
- err = tcpConn.SetKeepAlive(true)
- if err != nil {
- return err
- }
- }
-
- i.innerWriter = conn
- return nil
-}
-
-func (i *connWriter) releaseReopen() error {
- if i.innerWriter != nil {
- return i.connect()
- }
- return nil
-}
-
-// ConnLogger implements LoggerProvider.
-// it writes messages in keep-live tcp connection.
-type ConnLogger struct {
- WriterLogger
- ReconnectOnMsg bool `json:"reconnectOnMsg"`
- Reconnect bool `json:"reconnect"`
- Net string `json:"net"`
- Addr string `json:"addr"`
-}
-
-// NewConn creates new ConnLogger returning as LoggerProvider.
-func NewConn() LoggerProvider {
- conn := new(ConnLogger)
- conn.Level = TRACE
- return conn
-}
-
-// Init inits connection writer with json config.
-// json config only need key "level".
-func (log *ConnLogger) Init(jsonconfig string) error {
- err := json.Unmarshal([]byte(jsonconfig), log)
- if err != nil {
- return fmt.Errorf("Unable to parse JSON: %w", err)
- }
- log.NewWriterLogger(&connWriter{
- ReconnectOnMsg: log.ReconnectOnMsg,
- Reconnect: log.Reconnect,
- Net: log.Net,
- Addr: log.Addr,
- }, log.Level)
- return nil
-}
-
-// Flush does nothing for this implementation
-func (log *ConnLogger) Flush() {
-}
-
-// GetName returns the default name for this implementation
-func (log *ConnLogger) GetName() string {
- return "conn"
-}
-
-// ReleaseReopen causes the ConnLogger to reconnect to the server
-func (log *ConnLogger) ReleaseReopen() error {
- return log.out.(*connWriter).releaseReopen()
-}
-
-func init() {
- Register("conn", NewConn)
-}
diff --git a/modules/log/conn_test.go b/modules/log/conn_test.go
deleted file mode 100644
index 445bd77653..0000000000
--- a/modules/log/conn_test.go
+++ /dev/null
@@ -1,230 +0,0 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "fmt"
- "io"
- "net"
- "strings"
- "sync"
- "testing"
- "time"
-
- "github.com/stretchr/testify/assert"
-)
-
-func listenReadAndClose(t *testing.T, l net.Listener, expected string) {
- conn, err := l.Accept()
- assert.NoError(t, err)
- defer conn.Close()
- written, err := io.ReadAll(conn)
-
- assert.NoError(t, err)
- assert.Equal(t, expected, string(written))
-}
-
-func TestConnLogger(t *testing.T) {
- protocol := "tcp"
- address := ":3099"
-
- l, err := net.Listen(protocol, address)
- if err != nil {
- t.Fatal(err)
- }
- defer l.Close()
-
- prefix := "TestPrefix "
- level := INFO
- flags := LstdFlags | LUTC | Lfuncname
-
- logger := NewConn()
- connLogger := logger.(*ConnLogger)
-
- logger.Init(fmt.Sprintf("{\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d,\"reconnectOnMsg\":%t,\"reconnect\":%t,\"net\":\"%s\",\"addr\":\"%s\"}", prefix, level.String(), flags, true, true, protocol, address))
-
- assert.Equal(t, flags, connLogger.Flags)
- assert.Equal(t, level, connLogger.Level)
- assert.Equal(t, level, logger.GetLevel())
-
- location, _ := time.LoadLocation("EST")
-
- date := time.Date(2019, time.January, 13, 22, 3, 30, 15, location)
-
- dateString := date.UTC().Format("2006/01/02 15:04:05")
-
- event := Event{
- level: INFO,
- msg: "TEST MSG",
- caller: "CALLER",
- filename: "FULL/FILENAME",
- line: 1,
- time: date,
- }
- expected := fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- var wg sync.WaitGroup
- wg.Add(2)
- go func() {
- defer wg.Done()
- listenReadAndClose(t, l, expected)
- }()
- go func() {
- defer wg.Done()
- err := logger.LogEvent(&event)
- assert.NoError(t, err)
- }()
- wg.Wait()
-
- event.level = WARN
- expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- wg.Add(2)
- go func() {
- defer wg.Done()
- listenReadAndClose(t, l, expected)
- }()
- go func() {
- defer wg.Done()
- err := logger.LogEvent(&event)
- assert.NoError(t, err)
- }()
- wg.Wait()
-
- logger.Close()
-}
-
-func TestConnLoggerBadConfig(t *testing.T) {
- logger := NewConn()
-
- err := logger.Init("{")
- assert.Error(t, err)
- assert.Contains(t, err.Error(), "Unable to parse JSON")
- logger.Close()
-}
-
-func TestConnLoggerCloseBeforeSend(t *testing.T) {
- protocol := "tcp"
- address := ":3099"
-
- prefix := "TestPrefix "
- level := INFO
- flags := LstdFlags | LUTC | Lfuncname
-
- logger := NewConn()
-
- logger.Init(fmt.Sprintf("{\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d,\"reconnectOnMsg\":%t,\"reconnect\":%t,\"net\":\"%s\",\"addr\":\"%s\"}", prefix, level.String(), flags, false, false, protocol, address))
- logger.Close()
-}
-
-func TestConnLoggerFailConnect(t *testing.T) {
- protocol := "tcp"
- address := ":3099"
-
- prefix := "TestPrefix "
- level := INFO
- flags := LstdFlags | LUTC | Lfuncname
-
- logger := NewConn()
-
- logger.Init(fmt.Sprintf("{\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d,\"reconnectOnMsg\":%t,\"reconnect\":%t,\"net\":\"%s\",\"addr\":\"%s\"}", prefix, level.String(), flags, false, false, protocol, address))
-
- assert.Equal(t, level, logger.GetLevel())
-
- location, _ := time.LoadLocation("EST")
-
- date := time.Date(2019, time.January, 13, 22, 3, 30, 15, location)
-
- // dateString := date.UTC().Format("2006/01/02 15:04:05")
-
- event := Event{
- level: INFO,
- msg: "TEST MSG",
- caller: "CALLER",
- filename: "FULL/FILENAME",
- line: 1,
- time: date,
- }
-
- err := logger.LogEvent(&event)
- assert.Error(t, err)
-
- logger.Close()
-}
-
-func TestConnLoggerClose(t *testing.T) {
- protocol := "tcp"
- address := ":3099"
-
- l, err := net.Listen(protocol, address)
- if err != nil {
- t.Fatal(err)
- }
- defer l.Close()
-
- prefix := "TestPrefix "
- level := INFO
- flags := LstdFlags | LUTC | Lfuncname
-
- logger := NewConn()
- connLogger := logger.(*ConnLogger)
-
- logger.Init(fmt.Sprintf("{\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d,\"reconnectOnMsg\":%t,\"reconnect\":%t,\"net\":\"%s\",\"addr\":\"%s\"}", prefix, level.String(), flags, false, false, protocol, address))
-
- assert.Equal(t, flags, connLogger.Flags)
- assert.Equal(t, level, connLogger.Level)
- assert.Equal(t, level, logger.GetLevel())
- location, _ := time.LoadLocation("EST")
-
- date := time.Date(2019, time.January, 13, 22, 3, 30, 15, location)
-
- dateString := date.UTC().Format("2006/01/02 15:04:05")
-
- event := Event{
- level: INFO,
- msg: "TEST MSG",
- caller: "CALLER",
- filename: "FULL/FILENAME",
- line: 1,
- time: date,
- }
- expected := fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- var wg sync.WaitGroup
- wg.Add(2)
- go func() {
- defer wg.Done()
- err := logger.LogEvent(&event)
- assert.NoError(t, err)
- logger.Close()
- }()
- go func() {
- defer wg.Done()
- listenReadAndClose(t, l, expected)
- }()
- wg.Wait()
-
- logger = NewConn()
- connLogger = logger.(*ConnLogger)
-
- logger.Init(fmt.Sprintf("{\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d,\"reconnectOnMsg\":%t,\"reconnect\":%t,\"net\":\"%s\",\"addr\":\"%s\"}", prefix, level.String(), flags, false, true, protocol, address))
-
- assert.Equal(t, flags, connLogger.Flags)
- assert.Equal(t, level, connLogger.Level)
- assert.Equal(t, level, logger.GetLevel())
-
- event.level = WARN
- expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- wg.Add(2)
- go func() {
- defer wg.Done()
- listenReadAndClose(t, l, expected)
- }()
- go func() {
- defer wg.Done()
- err := logger.LogEvent(&event)
- assert.NoError(t, err)
- logger.Close()
- }()
- wg.Wait()
- logger.Flush()
- logger.Close()
-}
diff --git a/modules/log/console.go b/modules/log/console.go
deleted file mode 100644
index ce0415d139..0000000000
--- a/modules/log/console.go
+++ /dev/null
@@ -1,93 +0,0 @@
-// Copyright 2014 The Gogs Authors. All rights reserved.
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "fmt"
- "io"
- "os"
-
- "code.gitea.io/gitea/modules/json"
-)
-
-// CanColorStdout reports if we can color the Stdout
-// Although we could do terminal sniffing and the like - in reality
-// most tools on *nix are happy to display ansi colors.
-// We will terminal sniff on Windows in console_windows.go
-var CanColorStdout = true
-
-// CanColorStderr reports if we can color the Stderr
-var CanColorStderr = true
-
-type nopWriteCloser struct {
- w io.WriteCloser
-}
-
-func (n *nopWriteCloser) Write(p []byte) (int, error) {
- return n.w.Write(p)
-}
-
-func (n *nopWriteCloser) Close() error {
- return nil
-}
-
-// ConsoleLogger implements LoggerProvider and writes messages to terminal.
-type ConsoleLogger struct {
- WriterLogger
- Stderr bool `json:"stderr"`
-}
-
-// NewConsoleLogger create ConsoleLogger returning as LoggerProvider.
-func NewConsoleLogger() LoggerProvider {
- log := &ConsoleLogger{}
- log.NewWriterLogger(&nopWriteCloser{
- w: os.Stdout,
- })
- return log
-}
-
-// Init inits connection writer with json config.
-// json config only need key "level".
-func (log *ConsoleLogger) Init(config string) error {
- err := json.Unmarshal([]byte(config), log)
- if err != nil {
- return fmt.Errorf("Unable to parse JSON: %w", err)
- }
- if log.Stderr {
- log.NewWriterLogger(&nopWriteCloser{
- w: os.Stderr,
- })
- } else {
- log.NewWriterLogger(log.out)
- }
- return nil
-}
-
-// Flush when log should be flushed
-func (log *ConsoleLogger) Flush() {
-}
-
-// ReleaseReopen causes the console logger to reconnect to os.Stdout
-func (log *ConsoleLogger) ReleaseReopen() error {
- if log.Stderr {
- log.NewWriterLogger(&nopWriteCloser{
- w: os.Stderr,
- })
- } else {
- log.NewWriterLogger(&nopWriteCloser{
- w: os.Stdout,
- })
- }
- return nil
-}
-
-// GetName returns the default name for this implementation
-func (log *ConsoleLogger) GetName() string {
- return "console"
-}
-
-func init() {
- Register("console", NewConsoleLogger)
-}
diff --git a/modules/log/console_test.go b/modules/log/console_test.go
deleted file mode 100644
index e4c3882d4f..0000000000
--- a/modules/log/console_test.go
+++ /dev/null
@@ -1,137 +0,0 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "fmt"
- "strings"
- "testing"
- "time"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestConsoleLoggerBadConfig(t *testing.T) {
- logger := NewConsoleLogger()
-
- err := logger.Init("{")
- assert.Error(t, err)
- assert.Contains(t, err.Error(), "Unable to parse JSON")
- logger.Close()
-}
-
-func TestConsoleLoggerMinimalConfig(t *testing.T) {
- for _, level := range Levels() {
- var written []byte
- var closed bool
-
- c := CallbackWriteCloser{
- callback: func(p []byte, close bool) {
- written = p
- closed = close
- },
- }
- prefix := ""
- flags := LstdFlags
-
- cw := NewConsoleLogger()
- realCW := cw.(*ConsoleLogger)
- cw.Init(fmt.Sprintf("{\"level\":\"%s\"}", level))
- nwc := realCW.out.(*nopWriteCloser)
- nwc.w = c
-
- assert.Equal(t, flags, realCW.Flags)
- assert.Equal(t, FromString(level), realCW.Level)
- assert.Equal(t, FromString(level), cw.GetLevel())
- assert.Equal(t, prefix, realCW.Prefix)
- assert.Equal(t, "", string(written))
- cw.Close()
- assert.False(t, closed)
-
- }
-}
-
-func TestConsoleLogger(t *testing.T) {
- var written []byte
- var closed bool
-
- c := CallbackWriteCloser{
- callback: func(p []byte, close bool) {
- written = p
- closed = close
- },
- }
- prefix := "TestPrefix "
- level := INFO
- flags := LstdFlags | LUTC | Lfuncname
-
- cw := NewConsoleLogger()
- realCW := cw.(*ConsoleLogger)
- realCW.Colorize = false
- nwc := realCW.out.(*nopWriteCloser)
- nwc.w = c
-
- cw.Init(fmt.Sprintf("{\"expression\":\"FILENAME\",\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d}", prefix, level.String(), flags))
-
- assert.Equal(t, flags, realCW.Flags)
- assert.Equal(t, level, realCW.Level)
- assert.Equal(t, level, cw.GetLevel())
-
- location, _ := time.LoadLocation("EST")
-
- date := time.Date(2019, time.January, 13, 22, 3, 30, 15, location)
-
- dateString := date.UTC().Format("2006/01/02 15:04:05")
-
- event := Event{
- level: INFO,
- msg: "TEST MSG",
- caller: "CALLER",
- filename: "FULL/FILENAME",
- line: 1,
- time: date,
- }
-
- expected := fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- cw.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event.level = DEBUG
- expected = ""
- cw.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
-
- event.level = TRACE
- expected = ""
- cw.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
-
- nonMatchEvent := Event{
- level: INFO,
- msg: "TEST MSG",
- caller: "CALLER",
- filename: "FULL/FI_LENAME",
- line: 1,
- time: date,
- }
- event.level = INFO
- expected = ""
- cw.LogEvent(&nonMatchEvent)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
-
- event.level = WARN
- expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- cw.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- cw.Close()
- assert.False(t, closed)
-}
diff --git a/modules/log/errors.go b/modules/log/errors.go
deleted file mode 100644
index 942639a434..0000000000
--- a/modules/log/errors.go
+++ /dev/null
@@ -1,61 +0,0 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import "fmt"
-
-// ErrTimeout represents a "Timeout" kind of error.
-type ErrTimeout struct {
- Name string
- Provider string
-}
-
-// IsErrTimeout checks if an error is a ErrTimeout.
-func IsErrTimeout(err error) bool {
- if err == nil {
- return false
- }
- _, ok := err.(ErrTimeout)
- return ok
-}
-
-func (err ErrTimeout) Error() string {
- return fmt.Sprintf("Log Timeout for %s (%s)", err.Name, err.Provider)
-}
-
-// ErrUnknownProvider represents a "Unknown Provider" kind of error.
-type ErrUnknownProvider struct {
- Provider string
-}
-
-// IsErrUnknownProvider checks if an error is a ErrUnknownProvider.
-func IsErrUnknownProvider(err error) bool {
- if err == nil {
- return false
- }
- _, ok := err.(ErrUnknownProvider)
- return ok
-}
-
-func (err ErrUnknownProvider) Error() string {
- return fmt.Sprintf("Unknown Log Provider \"%s\" (Was it registered?)", err.Provider)
-}
-
-// ErrDuplicateName represents a Duplicate Name error
-type ErrDuplicateName struct {
- Name string
-}
-
-// IsErrDuplicateName checks if an error is a ErrDuplicateName.
-func IsErrDuplicateName(err error) bool {
- if err == nil {
- return false
- }
- _, ok := err.(ErrDuplicateName)
- return ok
-}
-
-func (err ErrDuplicateName) Error() string {
- return fmt.Sprintf("Duplicate named logger: %s", err.Name)
-}
diff --git a/modules/log/event.go b/modules/log/event.go
deleted file mode 100644
index 723c8810bc..0000000000
--- a/modules/log/event.go
+++ /dev/null
@@ -1,460 +0,0 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "context"
- "fmt"
- "runtime/pprof"
- "sync"
- "time"
-
- "code.gitea.io/gitea/modules/process"
-)
-
-// Event represents a logging event
-type Event struct {
- level Level
- msg string
- caller string
- filename string
- line int
- time time.Time
- stacktrace string
-}
-
-// EventLogger represents the behaviours of a logger
-type EventLogger interface {
- LogEvent(event *Event) error
- Close()
- Flush()
- GetLevel() Level
- GetStacktraceLevel() Level
- GetName() string
- ReleaseReopen() error
-}
-
-// ChannelledLog represents a cached channel to a LoggerProvider
-type ChannelledLog struct {
- ctx context.Context
- finished context.CancelFunc
- name string
- provider string
- queue chan *Event
- loggerProvider LoggerProvider
- flush chan bool
- close chan bool
- closed chan bool
-}
-
-// NewChannelledLog a new logger instance with given logger provider and config.
-func NewChannelledLog(parent context.Context, name, provider, config string, bufferLength int64) (*ChannelledLog, error) {
- if log, ok := providers[provider]; ok {
-
- l := &ChannelledLog{
- queue: make(chan *Event, bufferLength),
- flush: make(chan bool),
- close: make(chan bool),
- closed: make(chan bool),
- }
- l.loggerProvider = log()
- if err := l.loggerProvider.Init(config); err != nil {
- return nil, err
- }
- l.name = name
- l.provider = provider
- l.ctx, _, l.finished = process.GetManager().AddTypedContext(parent, fmt.Sprintf("Logger: %s(%s)", l.name, l.provider), process.SystemProcessType, false)
- go l.Start()
- return l, nil
- }
- return nil, ErrUnknownProvider{provider}
-}
-
-// Start processing the ChannelledLog
-func (l *ChannelledLog) Start() {
- pprof.SetGoroutineLabels(l.ctx)
- defer l.finished()
- for {
- select {
- case event, ok := <-l.queue:
- if !ok {
- l.closeLogger()
- return
- }
- l.loggerProvider.LogEvent(event) //nolint:errcheck
- case _, ok := <-l.flush:
- if !ok {
- l.closeLogger()
- return
- }
- l.emptyQueue()
- l.loggerProvider.Flush()
- case <-l.close:
- l.emptyQueue()
- l.closeLogger()
- return
- }
- }
-}
-
-// LogEvent logs an event to this ChannelledLog
-func (l *ChannelledLog) LogEvent(event *Event) error {
- select {
- case l.queue <- event:
- return nil
- case <-time.After(60 * time.Second):
- // We're blocked!
- return ErrTimeout{
- Name: l.name,
- Provider: l.provider,
- }
- }
-}
-
-func (l *ChannelledLog) emptyQueue() bool {
- for {
- select {
- case event, ok := <-l.queue:
- if !ok {
- return false
- }
- l.loggerProvider.LogEvent(event) //nolint:errcheck
- default:
- return true
- }
- }
-}
-
-func (l *ChannelledLog) closeLogger() {
- l.loggerProvider.Flush()
- l.loggerProvider.Close()
- l.closed <- true
-}
-
-// Close this ChannelledLog
-func (l *ChannelledLog) Close() {
- l.close <- true
- <-l.closed
-}
-
-// Flush this ChannelledLog
-func (l *ChannelledLog) Flush() {
- l.flush <- true
-}
-
-// ReleaseReopen this ChannelledLog
-func (l *ChannelledLog) ReleaseReopen() error {
- return l.loggerProvider.ReleaseReopen()
-}
-
-// GetLevel gets the level of this ChannelledLog
-func (l *ChannelledLog) GetLevel() Level {
- return l.loggerProvider.GetLevel()
-}
-
-// GetStacktraceLevel gets the level of this ChannelledLog
-func (l *ChannelledLog) GetStacktraceLevel() Level {
- return l.loggerProvider.GetStacktraceLevel()
-}
-
-// GetName returns the name of this ChannelledLog
-func (l *ChannelledLog) GetName() string {
- return l.name
-}
-
-// MultiChannelledLog represents a cached channel to a LoggerProvider
-type MultiChannelledLog struct {
- ctx context.Context
- finished context.CancelFunc
- name string
- bufferLength int64
- queue chan *Event
- rwmutex sync.RWMutex
- loggers map[string]EventLogger
- flush chan bool
- close chan bool
- started bool
- level Level
- stacktraceLevel Level
- closed chan bool
- paused chan bool
-}
-
-// NewMultiChannelledLog a new logger instance with given logger provider and config.
-func NewMultiChannelledLog(name string, bufferLength int64) *MultiChannelledLog {
- ctx, _, finished := process.GetManager().AddTypedContext(context.Background(), fmt.Sprintf("Logger: %s", name), process.SystemProcessType, false)
-
- m := &MultiChannelledLog{
- ctx: ctx,
- finished: finished,
- name: name,
- queue: make(chan *Event, bufferLength),
- flush: make(chan bool),
- bufferLength: bufferLength,
- loggers: make(map[string]EventLogger),
- level: NONE,
- stacktraceLevel: NONE,
- close: make(chan bool),
- closed: make(chan bool),
- paused: make(chan bool),
- }
- return m
-}
-
-// AddLogger adds a logger to this MultiChannelledLog
-func (m *MultiChannelledLog) AddLogger(logger EventLogger) error {
- m.rwmutex.Lock()
- name := logger.GetName()
- if _, has := m.loggers[name]; has {
- m.rwmutex.Unlock()
- return ErrDuplicateName{name}
- }
- m.loggers[name] = logger
- if logger.GetLevel() < m.level {
- m.level = logger.GetLevel()
- }
- if logger.GetStacktraceLevel() < m.stacktraceLevel {
- m.stacktraceLevel = logger.GetStacktraceLevel()
- }
- m.rwmutex.Unlock()
- go m.Start()
- return nil
-}
-
-// DelLogger removes a sub logger from this MultiChannelledLog
-// NB: If you delete the last sublogger this logger will simply drop
-// log events
-func (m *MultiChannelledLog) DelLogger(name string) bool {
- m.rwmutex.Lock()
- logger, has := m.loggers[name]
- if !has {
- m.rwmutex.Unlock()
- return false
- }
- delete(m.loggers, name)
- m.internalResetLevel()
- m.rwmutex.Unlock()
- logger.Flush()
- logger.Close()
- return true
-}
-
-// GetEventLogger returns a sub logger from this MultiChannelledLog
-func (m *MultiChannelledLog) GetEventLogger(name string) EventLogger {
- m.rwmutex.RLock()
- defer m.rwmutex.RUnlock()
- return m.loggers[name]
-}
-
-// GetEventLoggerNames returns a list of names
-func (m *MultiChannelledLog) GetEventLoggerNames() []string {
- m.rwmutex.RLock()
- defer m.rwmutex.RUnlock()
- var keys []string
- for k := range m.loggers {
- keys = append(keys, k)
- }
- return keys
-}
-
-func (m *MultiChannelledLog) closeLoggers() {
- m.rwmutex.Lock()
- for _, logger := range m.loggers {
- logger.Flush()
- logger.Close()
- }
- m.rwmutex.Unlock()
- m.closed <- true
-}
-
-// Pause pauses this Logger
-func (m *MultiChannelledLog) Pause() {
- m.paused <- true
-}
-
-// Resume resumes this Logger
-func (m *MultiChannelledLog) Resume() {
- m.paused <- false
-}
-
-// ReleaseReopen causes this logger to tell its subloggers to release and reopen
-func (m *MultiChannelledLog) ReleaseReopen() error {
- m.rwmutex.Lock()
- defer m.rwmutex.Unlock()
- var accumulatedErr error
- for _, logger := range m.loggers {
- if err := logger.ReleaseReopen(); err != nil {
- if accumulatedErr == nil {
- accumulatedErr = fmt.Errorf("Error whilst reopening: %s Error: %w", logger.GetName(), err)
- } else {
- accumulatedErr = fmt.Errorf("Error whilst reopening: %s Error: %v & %w", logger.GetName(), err, accumulatedErr)
- }
- }
- }
- return accumulatedErr
-}
-
-// Start processing the MultiChannelledLog
-func (m *MultiChannelledLog) Start() {
- m.rwmutex.Lock()
- if m.started {
- m.rwmutex.Unlock()
- return
- }
- pprof.SetGoroutineLabels(m.ctx)
- defer m.finished()
-
- m.started = true
- m.rwmutex.Unlock()
- paused := false
- for {
- if paused {
- select {
- case paused = <-m.paused:
- if !paused {
- m.ResetLevel()
- }
- case _, ok := <-m.flush:
- if !ok {
- m.closeLoggers()
- return
- }
- m.rwmutex.RLock()
- for _, logger := range m.loggers {
- logger.Flush()
- }
- m.rwmutex.RUnlock()
- case <-m.close:
- m.closeLoggers()
- return
- }
- continue
- }
- select {
- case paused = <-m.paused:
- if paused && m.level < INFO {
- m.level = INFO
- }
- case event, ok := <-m.queue:
- if !ok {
- m.closeLoggers()
- return
- }
- m.rwmutex.RLock()
- for _, logger := range m.loggers {
- err := logger.LogEvent(event)
- if err != nil {
- fmt.Println(err) //nolint:forbidigo
- }
- }
- m.rwmutex.RUnlock()
- case _, ok := <-m.flush:
- if !ok {
- m.closeLoggers()
- return
- }
- m.emptyQueue()
- m.rwmutex.RLock()
- for _, logger := range m.loggers {
- logger.Flush()
- }
- m.rwmutex.RUnlock()
- case <-m.close:
- m.emptyQueue()
- m.closeLoggers()
- return
- }
- }
-}
-
-func (m *MultiChannelledLog) emptyQueue() bool {
- for {
- select {
- case event, ok := <-m.queue:
- if !ok {
- return false
- }
- m.rwmutex.RLock()
- for _, logger := range m.loggers {
- err := logger.LogEvent(event)
- if err != nil {
- fmt.Println(err) //nolint:forbidigo
- }
- }
- m.rwmutex.RUnlock()
- default:
- return true
- }
- }
-}
-
-// LogEvent logs an event to this MultiChannelledLog
-func (m *MultiChannelledLog) LogEvent(event *Event) error {
- select {
- case m.queue <- event:
- return nil
- case <-time.After(100 * time.Millisecond):
- // We're blocked!
- return ErrTimeout{
- Name: m.name,
- Provider: "MultiChannelledLog",
- }
- }
-}
-
-// Close this MultiChannelledLog
-func (m *MultiChannelledLog) Close() {
- m.close <- true
- <-m.closed
-}
-
-// Flush this ChannelledLog
-func (m *MultiChannelledLog) Flush() {
- m.flush <- true
-}
-
-// GetLevel gets the level of this MultiChannelledLog
-func (m *MultiChannelledLog) GetLevel() Level {
- m.rwmutex.RLock()
- defer m.rwmutex.RUnlock()
- return m.level
-}
-
-// GetStacktraceLevel gets the level of this MultiChannelledLog
-func (m *MultiChannelledLog) GetStacktraceLevel() Level {
- m.rwmutex.RLock()
- defer m.rwmutex.RUnlock()
- return m.stacktraceLevel
-}
-
-func (m *MultiChannelledLog) internalResetLevel() Level {
- m.level = NONE
- for _, logger := range m.loggers {
- level := logger.GetLevel()
- if level < m.level {
- m.level = level
- }
- level = logger.GetStacktraceLevel()
- if level < m.stacktraceLevel {
- m.stacktraceLevel = level
- }
- }
- return m.level
-}
-
-// ResetLevel will reset the level of this MultiChannelledLog
-func (m *MultiChannelledLog) ResetLevel() Level {
- m.rwmutex.Lock()
- defer m.rwmutex.Unlock()
- return m.internalResetLevel()
-}
-
-// GetName gets the name of this MultiChannelledLog
-func (m *MultiChannelledLog) GetName() string {
- return m.name
-}
-
-func (e *Event) GetMsg() string {
- return e.msg
-}
diff --git a/modules/log/event_format.go b/modules/log/event_format.go
new file mode 100644
index 0000000000..524ca3dd87
--- /dev/null
+++ b/modules/log/event_format.go
@@ -0,0 +1,246 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+ "time"
+)
+
+type Event struct {
+ Time time.Time
+
+ GoroutinePid string
+ Caller string
+ Filename string
+ Line int
+
+ Level Level
+
+ MsgSimpleText string
+
+ msgFormat string // the format and args is only valid in the caller's goroutine
+ msgArgs []any // they are discarded before the event is passed to the writer's channel
+
+ Stacktrace string
+}
+
+type EventFormatted struct {
+ Origin *Event
+ Msg any // the message formatted by the writer's formatter, the writer knows its type
+}
+
+type EventFormatter func(mode *WriterMode, event *Event, msgFormat string, msgArgs ...any) []byte
+
+type logStringFormatter struct {
+ v LogStringer
+}
+
+var _ fmt.Formatter = logStringFormatter{}
+
+func (l logStringFormatter) Format(f fmt.State, verb rune) {
+ if f.Flag('#') && verb == 'v' {
+ _, _ = fmt.Fprintf(f, "%#v", l.v)
+ return
+ }
+ _, _ = f.Write([]byte(l.v.LogString()))
+}
+
+// Copy of cheap integer to fixed-width decimal to ascii from logger.
+// TODO: legacy bugs: doesn't support negative number, overflow if wid it too large.
+func itoa(buf []byte, i, wid int) []byte {
+ var s [20]byte
+ bp := len(s) - 1
+ for i >= 10 || wid > 1 {
+ wid--
+ q := i / 10
+ s[bp] = byte('0' + i - q*10)
+ bp--
+ i = q
+ }
+ // i < 10
+ s[bp] = byte('0' + i)
+ return append(buf, s[bp:]...)
+}
+
+func colorSprintf(colorize bool, format string, args ...any) string {
+ hasColorValue := false
+ for _, v := range args {
+ if _, hasColorValue = v.(*ColoredValue); hasColorValue {
+ break
+ }
+ }
+ if colorize || !hasColorValue {
+ return fmt.Sprintf(format, args...)
+ }
+
+ noColors := make([]any, len(args))
+ copy(noColors, args)
+ for i, v := range args {
+ if cv, ok := v.(*ColoredValue); ok {
+ noColors[i] = cv.v
+ }
+ }
+ return fmt.Sprintf(format, noColors...)
+}
+
+// EventFormatTextMessage makes the log message for a writer with its mode. This function is a copy of the original package
+func EventFormatTextMessage(mode *WriterMode, event *Event, msgFormat string, msgArgs ...any) []byte {
+ buf := make([]byte, 0, 1024)
+ buf = append(buf, mode.Prefix...)
+ t := event.Time
+ flags := mode.Flags.Bits()
+ if flags&(Ldate|Ltime|Lmicroseconds) != 0 {
+ if mode.Colorize {
+ buf = append(buf, fgCyanBytes...)
+ }
+ if flags&LUTC != 0 {
+ t = t.UTC()
+ }
+ if flags&Ldate != 0 {
+ year, month, day := t.Date()
+ buf = itoa(buf, year, 4)
+ buf = append(buf, '/')
+ buf = itoa(buf, int(month), 2)
+ buf = append(buf, '/')
+ buf = itoa(buf, day, 2)
+ buf = append(buf, ' ')
+ }
+ if flags&(Ltime|Lmicroseconds) != 0 {
+ hour, min, sec := t.Clock()
+ buf = itoa(buf, hour, 2)
+ buf = append(buf, ':')
+ buf = itoa(buf, min, 2)
+ buf = append(buf, ':')
+ buf = itoa(buf, sec, 2)
+ if flags&Lmicroseconds != 0 {
+ buf = append(buf, '.')
+ buf = itoa(buf, t.Nanosecond()/1e3, 6)
+ }
+ buf = append(buf, ' ')
+ }
+ if mode.Colorize {
+ buf = append(buf, resetBytes...)
+ }
+
+ }
+ if flags&(Lshortfile|Llongfile) != 0 {
+ if mode.Colorize {
+ buf = append(buf, fgGreenBytes...)
+ }
+ file := event.Filename
+ if flags&Lmedfile == Lmedfile {
+ startIndex := len(file) - 20
+ if startIndex > 0 {
+ file = "..." + file[startIndex:]
+ }
+ } else if flags&Lshortfile != 0 {
+ startIndex := strings.LastIndexByte(file, '/')
+ if startIndex > 0 && startIndex < len(file) {
+ file = file[startIndex+1:]
+ }
+ }
+ buf = append(buf, file...)
+ buf = append(buf, ':')
+ buf = itoa(buf, event.Line, -1)
+ if flags&(Lfuncname|Lshortfuncname) != 0 {
+ buf = append(buf, ':')
+ } else {
+ if mode.Colorize {
+ buf = append(buf, resetBytes...)
+ }
+ buf = append(buf, ' ')
+ }
+ }
+ if flags&(Lfuncname|Lshortfuncname) != 0 {
+ if mode.Colorize {
+ buf = append(buf, fgGreenBytes...)
+ }
+ funcname := event.Caller
+ if flags&Lshortfuncname != 0 {
+ lastIndex := strings.LastIndexByte(funcname, '.')
+ if lastIndex > 0 && len(funcname) > lastIndex+1 {
+ funcname = funcname[lastIndex+1:]
+ }
+ }
+ buf = append(buf, funcname...)
+ if mode.Colorize {
+ buf = append(buf, resetBytes...)
+ }
+ buf = append(buf, ' ')
+ }
+
+ if flags&(Llevel|Llevelinitial) != 0 {
+ level := strings.ToUpper(event.Level.String())
+ if mode.Colorize {
+ buf = append(buf, ColorBytes(levelToColor[event.Level]...)...)
+ }
+ buf = append(buf, '[')
+ if flags&Llevelinitial != 0 {
+ buf = append(buf, level[0])
+ } else {
+ buf = append(buf, level...)
+ }
+ buf = append(buf, ']')
+ if mode.Colorize {
+ buf = append(buf, resetBytes...)
+ }
+ buf = append(buf, ' ')
+ }
+
+ var msg []byte
+
+ // if the log needs colorizing, do it
+ if mode.Colorize && len(msgArgs) > 0 {
+ hasColorValue := false
+ for _, v := range msgArgs {
+ if _, hasColorValue = v.(*ColoredValue); hasColorValue {
+ break
+ }
+ }
+ if hasColorValue {
+ msg = []byte(fmt.Sprintf(msgFormat, msgArgs...))
+ }
+ }
+ // try to re-use the pre-formatted simple text message
+ if len(msg) == 0 {
+ msg = []byte(event.MsgSimpleText)
+ }
+ // if still no message, do the normal Sprintf for the message
+ if len(msg) == 0 {
+ msg = []byte(colorSprintf(mode.Colorize, msgFormat, msgArgs...))
+ }
+ // remove at most one trailing new line
+ if len(msg) > 0 && msg[len(msg)-1] == '\n' {
+ msg = msg[:len(msg)-1]
+ }
+
+ if flags&Lgopid == Lgopid {
+ if event.GoroutinePid != "" {
+ buf = append(buf, '[')
+ if mode.Colorize {
+ buf = append(buf, ColorBytes(FgHiYellow)...)
+ }
+ buf = append(buf, event.GoroutinePid...)
+ if mode.Colorize {
+ buf = append(buf, resetBytes...)
+ }
+ buf = append(buf, ']', ' ')
+ }
+ }
+ buf = append(buf, msg...)
+
+ if event.Stacktrace != "" && mode.StacktraceLevel <= event.Level {
+ lines := bytes.Split([]byte(event.Stacktrace), []byte("\n"))
+ for _, line := range lines {
+ buf = append(buf, "\n\t"...)
+ buf = append(buf, line...)
+ }
+ buf = append(buf, '\n')
+ }
+ buf = append(buf, '\n')
+ return buf
+}
diff --git a/modules/log/event_format_test.go b/modules/log/event_format_test.go
new file mode 100644
index 0000000000..7c299a607d
--- /dev/null
+++ b/modules/log/event_format_test.go
@@ -0,0 +1,57 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestItoa(t *testing.T) {
+ b := itoa(nil, 0, 0)
+ assert.Equal(t, "0", string(b))
+
+ b = itoa(nil, 0, 1)
+ assert.Equal(t, "0", string(b))
+
+ b = itoa(nil, 0, 2)
+ assert.Equal(t, "00", string(b))
+}
+
+func TestEventFormatTextMessage(t *testing.T) {
+ res := EventFormatTextMessage(&WriterMode{Prefix: "[PREFIX] ", Colorize: false, Flags: Flags{defined: true, flags: 0xffffffff}},
+ &Event{
+ Time: time.Date(2020, 1, 2, 3, 4, 5, 6, time.UTC),
+ Caller: "caller",
+ Filename: "filename",
+ Line: 123,
+ GoroutinePid: "pid",
+ Level: ERROR,
+ Stacktrace: "stacktrace",
+ },
+ "msg format: %v %v", "arg0", NewColoredValue("arg1", FgBlue),
+ )
+
+ assert.Equal(t, `[PREFIX] 2020/01/02 03:04:05.000000 filename:123:caller [E] [pid] msg format: arg0 arg1
+ stacktrace
+
+`, string(res))
+
+ res = EventFormatTextMessage(&WriterMode{Prefix: "[PREFIX] ", Colorize: true, Flags: Flags{defined: true, flags: 0xffffffff}},
+ &Event{
+ Time: time.Date(2020, 1, 2, 3, 4, 5, 6, time.UTC),
+ Caller: "caller",
+ Filename: "filename",
+ Line: 123,
+ GoroutinePid: "pid",
+ Level: ERROR,
+ Stacktrace: "stacktrace",
+ },
+ "msg format: %v %v", "arg0", NewColoredValue("arg1", FgBlue),
+ )
+
+ assert.Equal(t, "[PREFIX] \x1b[36m2020/01/02 03:04:05.000000 \x1b[0m\x1b[32mfilename:123:\x1b[32mcaller\x1b[0m \x1b[1;31m[E]\x1b[0m [\x1b[93mpid\x1b[0m] msg format: arg0 \x1b[34marg1\x1b[0m\n\tstacktrace\n\n", string(res))
+}
diff --git a/modules/log/event_writer.go b/modules/log/event_writer.go
new file mode 100644
index 0000000000..4b77e488de
--- /dev/null
+++ b/modules/log/event_writer.go
@@ -0,0 +1,54 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "fmt"
+)
+
+// EventWriter is the general interface for all event writers
+// EventWriterBase is only used as its base interface
+// A writer implementation could override the default EventWriterBase functions
+// eg: a writer can override the Run to handle events in its own way with its own goroutine
+type EventWriter interface {
+ EventWriterBase
+}
+
+// WriterMode is the mode for creating a new EventWriter, it contains common options for all writers
+// Its WriterOption field is the specified options for a writer, it should be passed by value but not by pointer
+type WriterMode struct {
+ BufferLen int
+
+ Level Level
+ Prefix string
+ Colorize bool
+ Flags Flags
+
+ Expression string
+
+ StacktraceLevel Level
+
+ WriterOption any
+}
+
+// EventWriterProvider is the function for creating a new EventWriter
+type EventWriterProvider func(writerName string, writerMode WriterMode) EventWriter
+
+var eventWriterProviders = map[string]EventWriterProvider{}
+
+func RegisterEventWriter(writerType string, p EventWriterProvider) {
+ eventWriterProviders[writerType] = p
+}
+
+func HasEventWriter(writerType string) bool {
+ _, ok := eventWriterProviders[writerType]
+ return ok
+}
+
+func NewEventWriter(name, writerType string, mode WriterMode) (EventWriter, error) {
+ if p, ok := eventWriterProviders[writerType]; ok {
+ return p(name, mode), nil
+ }
+ return nil, fmt.Errorf("unknown event writer type %q for writer %q", writerType, name)
+}
diff --git a/modules/log/event_writer_base.go b/modules/log/event_writer_base.go
new file mode 100644
index 0000000000..f61d9a7b9d
--- /dev/null
+++ b/modules/log/event_writer_base.go
@@ -0,0 +1,160 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "regexp"
+ "time"
+)
+
+// EventWriterBase is the base interface for most event writers
+// It provides default implementations for most methods
+type EventWriterBase interface {
+ Base() *EventWriterBaseImpl
+ GetWriterType() string
+ GetWriterName() string
+ GetLevel() Level
+
+ Run(ctx context.Context)
+}
+
+type EventWriterBaseImpl struct {
+ writerType string
+
+ Name string
+ Mode *WriterMode
+ Queue chan *EventFormatted
+
+ FormatMessage EventFormatter // format the Event to a message and write it to output
+ OutputWriteCloser io.WriteCloser // it will be closed when the event writer is stopped
+ GetPauseChan func() chan struct{}
+
+ shared bool
+ stopped chan struct{}
+}
+
+var _ EventWriterBase = (*EventWriterBaseImpl)(nil)
+
+func (b *EventWriterBaseImpl) Base() *EventWriterBaseImpl {
+ return b
+}
+
+func (b *EventWriterBaseImpl) GetWriterType() string {
+ return b.writerType
+}
+
+func (b *EventWriterBaseImpl) GetWriterName() string {
+ return b.Name
+}
+
+func (b *EventWriterBaseImpl) GetLevel() Level {
+ return b.Mode.Level
+}
+
+// Run is the default implementation for EventWriter.Run
+func (b *EventWriterBaseImpl) Run(ctx context.Context) {
+ defer b.OutputWriteCloser.Close()
+
+ var exprRegexp *regexp.Regexp
+ if b.Mode.Expression != "" {
+ var err error
+ if exprRegexp, err = regexp.Compile(b.Mode.Expression); err != nil {
+ FallbackErrorf("unable to compile expression %q for writer %q: %v", b.Mode.Expression, b.Name, err)
+ }
+ }
+
+ for {
+ if b.GetPauseChan != nil {
+ pause := b.GetPauseChan()
+ if pause != nil {
+ select {
+ case <-pause:
+ case <-ctx.Done():
+ return
+ }
+ }
+ }
+
+ select {
+ case <-ctx.Done():
+ return
+ case event, ok := <-b.Queue:
+ if !ok {
+ return
+ }
+
+ if exprRegexp != nil {
+ fileLineCaller := fmt.Sprintf("%s:%d:%s", event.Origin.Filename, event.Origin.Line, event.Origin.Caller)
+ matched := exprRegexp.Match([]byte(fileLineCaller)) || exprRegexp.Match([]byte(event.Origin.MsgSimpleText))
+ if !matched {
+ continue
+ }
+ }
+
+ var err error
+ switch msg := event.Msg.(type) {
+ case string:
+ _, err = b.OutputWriteCloser.Write([]byte(msg))
+ case []byte:
+ _, err = b.OutputWriteCloser.Write(msg)
+ case io.WriterTo:
+ _, err = msg.WriteTo(b.OutputWriteCloser)
+ default:
+ _, err = b.OutputWriteCloser.Write([]byte(fmt.Sprint(msg)))
+ }
+ if err != nil {
+ FallbackErrorf("unable to write log message of %q (%v): %v", b.Name, err, event.Msg)
+ }
+ }
+ }
+}
+
+func NewEventWriterBase(name, writerType string, mode WriterMode) *EventWriterBaseImpl {
+ if mode.BufferLen == 0 {
+ mode.BufferLen = 1000
+ }
+ if mode.Level == UNDEFINED {
+ mode.Level = INFO
+ }
+ if mode.StacktraceLevel == UNDEFINED {
+ mode.StacktraceLevel = NONE
+ }
+ b := &EventWriterBaseImpl{
+ writerType: writerType,
+
+ Name: name,
+ Mode: &mode,
+ Queue: make(chan *EventFormatted, mode.BufferLen),
+
+ GetPauseChan: GetManager().GetPauseChan, // by default, use the global pause channel
+ FormatMessage: EventFormatTextMessage,
+ }
+ return b
+}
+
+// eventWriterStartGo use "go" to start an event worker's Run method
+func eventWriterStartGo(ctx context.Context, w EventWriter, shared bool) {
+ if w.Base().stopped != nil {
+ return // already started
+ }
+ w.Base().shared = shared
+ w.Base().stopped = make(chan struct{})
+ go func() {
+ defer close(w.Base().stopped)
+ w.Run(ctx)
+ }()
+}
+
+// eventWriterStopWait stops an event writer and waits for it to finish flushing (with a timeout)
+func eventWriterStopWait(w EventWriter) {
+ close(w.Base().Queue)
+ select {
+ case <-w.Base().stopped:
+ case <-time.After(2 * time.Second):
+ FallbackErrorf("unable to stop log writer %q in time, skip", w.GetWriterName())
+ }
+}
diff --git a/modules/log/event_writer_conn.go b/modules/log/event_writer_conn.go
new file mode 100644
index 0000000000..022206aa4d
--- /dev/null
+++ b/modules/log/event_writer_conn.go
@@ -0,0 +1,111 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "io"
+ "net"
+)
+
+type WriterConnOption struct {
+ Addr string
+ Protocol string
+ Reconnect bool
+ ReconnectOnMsg bool
+}
+
+type eventWriterConn struct {
+ *EventWriterBaseImpl
+ connWriter connWriter
+}
+
+var _ EventWriter = (*eventWriterConn)(nil)
+
+func NewEventWriterConn(writerName string, writerMode WriterMode) EventWriter {
+ w := &eventWriterConn{EventWriterBaseImpl: NewEventWriterBase(writerName, "conn", writerMode)}
+ opt := writerMode.WriterOption.(WriterConnOption)
+ w.connWriter = connWriter{
+ ReconnectOnMsg: opt.ReconnectOnMsg,
+ Reconnect: opt.Reconnect,
+ Net: opt.Protocol,
+ Addr: opt.Addr,
+ }
+ w.OutputWriteCloser = &w.connWriter
+ return w
+}
+
+func init() {
+ RegisterEventWriter("conn", NewEventWriterConn)
+}
+
+// below is copied from old code
+
+type connWriter struct {
+ innerWriter io.WriteCloser
+
+ ReconnectOnMsg bool
+ Reconnect bool
+ Net string `json:"net"`
+ Addr string `json:"addr"`
+}
+
+var _ io.WriteCloser = (*connWriter)(nil)
+
+// Close the inner writer
+func (i *connWriter) Close() error {
+ if i.innerWriter != nil {
+ return i.innerWriter.Close()
+ }
+ return nil
+}
+
+// Write the data to the connection
+func (i *connWriter) Write(p []byte) (int, error) {
+ if i.neededConnectOnMsg() {
+ if err := i.connect(); err != nil {
+ return 0, err
+ }
+ }
+
+ if i.ReconnectOnMsg {
+ defer i.innerWriter.Close()
+ }
+
+ return i.innerWriter.Write(p)
+}
+
+func (i *connWriter) neededConnectOnMsg() bool {
+ if i.Reconnect {
+ i.Reconnect = false
+ return true
+ }
+
+ if i.innerWriter == nil {
+ return true
+ }
+
+ return i.ReconnectOnMsg
+}
+
+func (i *connWriter) connect() error {
+ if i.innerWriter != nil {
+ _ = i.innerWriter.Close()
+ i.innerWriter = nil
+ }
+
+ conn, err := net.Dial(i.Net, i.Addr)
+ if err != nil {
+ return err
+ }
+
+ if tcpConn, ok := conn.(*net.TCPConn); ok {
+ err = tcpConn.SetKeepAlive(true)
+ if err != nil {
+ return err
+ }
+ }
+
+ i.innerWriter = conn
+ return nil
+}
diff --git a/modules/log/event_writer_conn_test.go b/modules/log/event_writer_conn_test.go
new file mode 100644
index 0000000000..e08ec025a3
--- /dev/null
+++ b/modules/log/event_writer_conn_test.go
@@ -0,0 +1,75 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "net"
+ "strings"
+ "sync"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func listenReadAndClose(t *testing.T, l net.Listener, expected string) {
+ conn, err := l.Accept()
+ assert.NoError(t, err)
+ defer conn.Close()
+ written, err := io.ReadAll(conn)
+
+ assert.NoError(t, err)
+ assert.Equal(t, expected, string(written))
+}
+
+func TestConnLogger(t *testing.T) {
+ protocol := "tcp"
+ address := ":3099"
+
+ l, err := net.Listen(protocol, address)
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer l.Close()
+
+ prefix := "TestPrefix "
+ level := INFO
+ flags := LstdFlags | LUTC | Lfuncname
+
+ logger := NewLoggerWithWriters(context.Background(), NewEventWriterConn("test-conn", WriterMode{
+ Level: level,
+ Prefix: prefix,
+ Flags: FlagsFromBits(flags),
+ WriterOption: WriterConnOption{Addr: address, Protocol: protocol, Reconnect: true, ReconnectOnMsg: true},
+ }))
+
+ location, _ := time.LoadLocation("EST")
+
+ date := time.Date(2019, time.January, 13, 22, 3, 30, 15, location)
+
+ dateString := date.UTC().Format("2006/01/02 15:04:05")
+
+ event := Event{
+ Level: INFO,
+ MsgSimpleText: "TEST MSG",
+ Caller: "CALLER",
+ Filename: "FULL/FILENAME",
+ Line: 1,
+ Time: date,
+ }
+ expected := fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.Filename, event.Line, event.Caller, strings.ToUpper(event.Level.String())[0], event.MsgSimpleText)
+ var wg sync.WaitGroup
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ listenReadAndClose(t, l, expected)
+ }()
+ logger.SendLogEvent(&event)
+ wg.Wait()
+
+ logger.Close()
+}
diff --git a/modules/log/event_writer_console.go b/modules/log/event_writer_console.go
new file mode 100644
index 0000000000..78183de644
--- /dev/null
+++ b/modules/log/event_writer_console.go
@@ -0,0 +1,40 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "io"
+ "os"
+)
+
+type WriterConsoleOption struct {
+ Stderr bool
+}
+
+type eventWriterConsole struct {
+ *EventWriterBaseImpl
+}
+
+var _ EventWriter = (*eventWriterConsole)(nil)
+
+type nopCloser struct {
+ io.Writer
+}
+
+func (nopCloser) Close() error { return nil }
+
+func NewEventWriterConsole(name string, mode WriterMode) EventWriter {
+ w := &eventWriterConsole{EventWriterBaseImpl: NewEventWriterBase(name, "console", mode)}
+ opt := mode.WriterOption.(WriterConsoleOption)
+ if opt.Stderr {
+ w.OutputWriteCloser = nopCloser{os.Stderr}
+ } else {
+ w.OutputWriteCloser = nopCloser{os.Stdout}
+ }
+ return w
+}
+
+func init() {
+ RegisterEventWriter("console", NewEventWriterConsole)
+}
diff --git a/modules/log/event_writer_file.go b/modules/log/event_writer_file.go
new file mode 100644
index 0000000000..4f41b96453
--- /dev/null
+++ b/modules/log/event_writer_file.go
@@ -0,0 +1,48 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "code.gitea.io/gitea/modules/util/rotatingfilewriter"
+)
+
+type WriterFileOption struct {
+ FileName string
+ MaxSize int64
+ LogRotate bool
+ DailyRotate bool
+ MaxDays int
+ Compress bool
+ CompressionLevel int
+}
+
+type eventWriterFile struct {
+ *EventWriterBaseImpl
+ fileWriter *rotatingfilewriter.RotatingFileWriter
+}
+
+var _ EventWriter = (*eventWriterFile)(nil)
+
+func NewEventWriterFile(name string, mode WriterMode) EventWriter {
+ w := &eventWriterFile{EventWriterBaseImpl: NewEventWriterBase(name, "file", mode)}
+ opt := mode.WriterOption.(WriterFileOption)
+ var err error
+ w.fileWriter, err = rotatingfilewriter.Open(opt.FileName, &rotatingfilewriter.Options{
+ Rotate: opt.LogRotate,
+ MaximumSize: opt.MaxSize,
+ RotateDaily: opt.DailyRotate,
+ KeepDays: opt.MaxDays,
+ Compress: opt.Compress,
+ CompressionLevel: opt.CompressionLevel,
+ })
+ if err != nil {
+ FallbackErrorf("unable to open log file %q: %v", opt.FileName, err)
+ }
+ w.OutputWriteCloser = w.fileWriter
+ return w
+}
+
+func init() {
+ RegisterEventWriter("file", NewEventWriterFile)
+}
diff --git a/modules/log/file.go b/modules/log/file.go
deleted file mode 100644
index 2ec6de450c..0000000000
--- a/modules/log/file.go
+++ /dev/null
@@ -1,283 +0,0 @@
-// Copyright 2014 The Gogs Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "bufio"
- "compress/gzip"
- "errors"
- "fmt"
- "os"
- "path/filepath"
- "strings"
- "sync"
- "time"
-
- "code.gitea.io/gitea/modules/json"
- "code.gitea.io/gitea/modules/util"
-)
-
-// FileLogger implements LoggerProvider.
-// It writes messages by lines limit, file size limit, or time frequency.
-type FileLogger struct {
- WriterLogger
- mw *MuxWriter
- // The opened file
- Filename string `json:"filename"`
-
- // Rotate at size
- Maxsize int `json:"maxsize"`
- maxsizeCursize int
-
- // Rotate daily
- Daily bool `json:"daily"`
- Maxdays int64 `json:"maxdays"`
- dailyOpenDate int
-
- Rotate bool `json:"rotate"`
-
- Compress bool `json:"compress"`
- CompressionLevel int `json:"compressionLevel"`
-
- startLock sync.Mutex // Only one log can write to the file
-}
-
-// MuxWriter an *os.File writer with locker.
-type MuxWriter struct {
- mu sync.Mutex
- fd *os.File
- owner *FileLogger
-}
-
-// Write writes to os.File.
-func (mw *MuxWriter) Write(b []byte) (int, error) {
- mw.mu.Lock()
- defer mw.mu.Unlock()
- mw.owner.docheck(len(b))
- return mw.fd.Write(b)
-}
-
-// Close the internal writer
-func (mw *MuxWriter) Close() error {
- return mw.fd.Close()
-}
-
-// SetFd sets os.File in writer.
-func (mw *MuxWriter) SetFd(fd *os.File) {
- if mw.fd != nil {
- mw.fd.Close()
- }
- mw.fd = fd
-}
-
-// NewFileLogger create a FileLogger returning as LoggerProvider.
-func NewFileLogger() LoggerProvider {
- log := &FileLogger{
- Filename: "",
- Maxsize: 1 << 28, // 256 MB
- Daily: true,
- Maxdays: 7,
- Rotate: true,
- Compress: true,
- CompressionLevel: gzip.DefaultCompression,
- }
- log.Level = TRACE
- // use MuxWriter instead direct use os.File for lock write when rotate
- log.mw = new(MuxWriter)
- log.mw.owner = log
-
- return log
-}
-
-// Init file logger with json config.
-// config like:
-//
-// {
-// "filename":"log/gogs.log",
-// "maxsize":1<<30,
-// "daily":true,
-// "maxdays":15,
-// "rotate":true
-// }
-func (log *FileLogger) Init(config string) error {
- if err := json.Unmarshal([]byte(config), log); err != nil {
- return fmt.Errorf("Unable to parse JSON: %w", err)
- }
- if len(log.Filename) == 0 {
- return errors.New("config must have filename")
- }
- // set MuxWriter as Logger's io.Writer
- log.NewWriterLogger(log.mw)
- return log.StartLogger()
-}
-
-// StartLogger start file logger. create log file and set to locker-inside file writer.
-func (log *FileLogger) StartLogger() error {
- fd, err := log.createLogFile()
- if err != nil {
- return err
- }
- log.mw.SetFd(fd)
- return log.initFd()
-}
-
-func (log *FileLogger) docheck(size int) {
- log.startLock.Lock()
- defer log.startLock.Unlock()
- if log.Rotate && ((log.Maxsize > 0 && log.maxsizeCursize >= log.Maxsize) ||
- (log.Daily && time.Now().Day() != log.dailyOpenDate)) {
- if err := log.DoRotate(); err != nil {
- fmt.Fprintf(os.Stderr, "FileLogger(%q): %s\n", log.Filename, err)
- return
- }
- }
- log.maxsizeCursize += size
-}
-
-func (log *FileLogger) createLogFile() (*os.File, error) {
- // Open the log file
- return os.OpenFile(log.Filename, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0o660)
-}
-
-func (log *FileLogger) initFd() error {
- fd := log.mw.fd
- finfo, err := fd.Stat()
- if err != nil {
- return fmt.Errorf("get stat: %w", err)
- }
- log.maxsizeCursize = int(finfo.Size())
- log.dailyOpenDate = time.Now().Day()
- return nil
-}
-
-// DoRotate means it need to write file in new file.
-// new file name like xx.log.2013-01-01.2
-func (log *FileLogger) DoRotate() error {
- _, err := os.Lstat(log.Filename)
- if err == nil { // file exists
- // Find the next available number
- num := 1
- fname := ""
- for ; err == nil && num <= 999; num++ {
- fname = log.Filename + fmt.Sprintf(".%s.%03d", time.Now().Format("2006-01-02"), num)
- _, err = os.Lstat(fname)
- if log.Compress && err != nil {
- _, err = os.Lstat(fname + ".gz")
- }
- }
- // return error if the last file checked still existed
- if err == nil {
- return fmt.Errorf("rotate: cannot find free log number to rename %s", log.Filename)
- }
-
- fd := log.mw.fd
- fd.Close()
-
- // close fd before rename
- // Rename the file to its newfound home
- if err = util.Rename(log.Filename, fname); err != nil {
- return fmt.Errorf("Rotate: %w", err)
- }
-
- if log.Compress {
- go compressOldLogFile(fname, log.CompressionLevel) //nolint:errcheck
- }
-
- // re-start logger
- if err = log.StartLogger(); err != nil {
- return fmt.Errorf("Rotate StartLogger: %w", err)
- }
-
- go log.deleteOldLog()
- }
-
- return nil
-}
-
-func compressOldLogFile(fname string, compressionLevel int) error {
- reader, err := os.Open(fname)
- if err != nil {
- return err
- }
- defer reader.Close()
- buffer := bufio.NewReader(reader)
- fw, err := os.OpenFile(fname+".gz", os.O_WRONLY|os.O_CREATE, 0o660)
- if err != nil {
- return err
- }
- defer fw.Close()
- zw, err := gzip.NewWriterLevel(fw, compressionLevel)
- if err != nil {
- return err
- }
- defer zw.Close()
- _, err = buffer.WriteTo(zw)
- if err != nil {
- zw.Close()
- fw.Close()
- util.Remove(fname + ".gz") //nolint:errcheck
- return err
- }
- reader.Close()
- return util.Remove(fname)
-}
-
-func (log *FileLogger) deleteOldLog() {
- dir := filepath.Dir(log.Filename)
- _ = filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) (returnErr error) {
- defer func() {
- if r := recover(); r != nil {
- returnErr = fmt.Errorf("Unable to delete old log '%s', error: %+v", path, r)
- }
- }()
-
- if err != nil {
- return err
- }
- if d.IsDir() {
- return nil
- }
- info, err := d.Info()
- if err != nil {
- return err
- }
- if info.ModTime().Unix() < (time.Now().Unix() - 60*60*24*log.Maxdays) {
- if strings.HasPrefix(filepath.Base(path), filepath.Base(log.Filename)) {
- if err := util.Remove(path); err != nil {
- returnErr = fmt.Errorf("Failed to remove %s: %w", path, err)
- }
- }
- }
- return returnErr
- })
-}
-
-// Flush flush file logger.
-// there are no buffering messages in file logger in memory.
-// flush file means sync file from disk.
-func (log *FileLogger) Flush() {
- _ = log.mw.fd.Sync()
-}
-
-// ReleaseReopen releases and reopens log files
-func (log *FileLogger) ReleaseReopen() error {
- closingErr := log.mw.fd.Close()
- startingErr := log.StartLogger()
- if startingErr != nil {
- if closingErr != nil {
- return fmt.Errorf("Error during closing: %v Error during starting: %v", closingErr, startingErr)
- }
- return startingErr
- }
- return closingErr
-}
-
-// GetName returns the default name for this implementation
-func (log *FileLogger) GetName() string {
- return "file"
-}
-
-func init() {
- Register("file", NewFileLogger)
-}
diff --git a/modules/log/file_test.go b/modules/log/file_test.go
deleted file mode 100644
index 34f7459806..0000000000
--- a/modules/log/file_test.go
+++ /dev/null
@@ -1,235 +0,0 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "compress/gzip"
- "fmt"
- "io"
- "os"
- "path/filepath"
- "strings"
- "testing"
- "time"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestFileLoggerFails(t *testing.T) {
- tmpDir := t.TempDir()
-
- prefix := "TestPrefix "
- level := INFO
- flags := LstdFlags | LUTC | Lfuncname
- // filename := filepath.Join(tmpDir, "test.log")
-
- fileLogger := NewFileLogger()
- // realFileLogger, ok := fileLogger.(*FileLogger)
- // assert.True(t, ok)
-
- // Fail if there is bad json
- err := fileLogger.Init("{")
- assert.Error(t, err)
-
- // Fail if there is no filename
- err = fileLogger.Init(fmt.Sprintf("{\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d,\"filename\":\"%s\"}", prefix, level.String(), flags, ""))
- assert.Error(t, err)
-
- // Fail if the file isn't a filename
- err = fileLogger.Init(fmt.Sprintf("{\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d,\"filename\":\"%s\"}", prefix, level.String(), flags, filepath.ToSlash(tmpDir)))
- assert.Error(t, err)
-}
-
-func TestFileLogger(t *testing.T) {
- tmpDir := t.TempDir()
-
- prefix := "TestPrefix "
- level := INFO
- flags := LstdFlags | LUTC | Lfuncname
- filename := filepath.Join(tmpDir, "test.log")
-
- fileLogger := NewFileLogger()
- realFileLogger, ok := fileLogger.(*FileLogger)
- assert.True(t, ok)
-
- location, _ := time.LoadLocation("EST")
-
- date := time.Date(2019, time.January, 13, 22, 3, 30, 15, location)
-
- dateString := date.UTC().Format("2006/01/02 15:04:05")
-
- event := Event{
- level: INFO,
- msg: "TEST MSG",
- caller: "CALLER",
- filename: "FULL/FILENAME",
- line: 1,
- time: date,
- }
-
- expected := fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
-
- fileLogger.Init(fmt.Sprintf("{\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d,\"filename\":\"%s\",\"maxsize\":%d,\"compress\":false}", prefix, level.String(), flags, filepath.ToSlash(filename), len(expected)*2))
-
- assert.Equal(t, flags, realFileLogger.Flags)
- assert.Equal(t, level, realFileLogger.Level)
- assert.Equal(t, level, fileLogger.GetLevel())
-
- fileLogger.LogEvent(&event)
- fileLogger.Flush()
- logData, err := os.ReadFile(filename)
- assert.NoError(t, err)
- assert.Equal(t, expected, string(logData))
-
- event.level = DEBUG
- fileLogger.LogEvent(&event)
- fileLogger.Flush()
- logData, err = os.ReadFile(filename)
- assert.NoError(t, err)
- assert.Equal(t, expected, string(logData))
-
- event.level = TRACE
- fileLogger.LogEvent(&event)
- fileLogger.Flush()
- logData, err = os.ReadFile(filename)
- assert.NoError(t, err)
- assert.Equal(t, expected, string(logData))
-
- event.level = WARN
- expected += fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- fileLogger.LogEvent(&event)
- fileLogger.Flush()
- logData, err = os.ReadFile(filename)
- assert.NoError(t, err)
- assert.Equal(t, expected, string(logData))
-
- // Should rotate
- fileLogger.LogEvent(&event)
- fileLogger.Flush()
- logData, err = os.ReadFile(filename + fmt.Sprintf(".%s.%03d", time.Now().Format("2006-01-02"), 1))
- assert.NoError(t, err)
- assert.Equal(t, expected, string(logData))
-
- logData, err = os.ReadFile(filename)
- assert.NoError(t, err)
- expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- assert.Equal(t, expected, string(logData))
-
- for num := 2; num <= 999; num++ {
- file, err := os.OpenFile(filename+fmt.Sprintf(".%s.%03d", time.Now().Format("2006-01-02"), num), os.O_RDONLY|os.O_CREATE, 0o666)
- assert.NoError(t, err)
- file.Close()
- }
- err = realFileLogger.DoRotate()
- assert.Error(t, err)
-
- expected += fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- fileLogger.LogEvent(&event)
- fileLogger.Flush()
- logData, err = os.ReadFile(filename)
- assert.NoError(t, err)
- assert.Equal(t, expected, string(logData))
-
- // Should fail to rotate
- expected += fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- fileLogger.LogEvent(&event)
- fileLogger.Flush()
- logData, err = os.ReadFile(filename)
- assert.NoError(t, err)
- assert.Equal(t, expected, string(logData))
-
- fileLogger.Close()
-}
-
-func TestCompressFileLogger(t *testing.T) {
- tmpDir := t.TempDir()
-
- prefix := "TestPrefix "
- level := INFO
- flags := LstdFlags | LUTC | Lfuncname
- filename := filepath.Join(tmpDir, "test.log")
-
- fileLogger := NewFileLogger()
- realFileLogger, ok := fileLogger.(*FileLogger)
- assert.True(t, ok)
-
- location, _ := time.LoadLocation("EST")
-
- date := time.Date(2019, time.January, 13, 22, 3, 30, 15, location)
-
- dateString := date.UTC().Format("2006/01/02 15:04:05")
-
- event := Event{
- level: INFO,
- msg: "TEST MSG",
- caller: "CALLER",
- filename: "FULL/FILENAME",
- line: 1,
- time: date,
- }
-
- expected := fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
-
- fileLogger.Init(fmt.Sprintf("{\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d,\"filename\":\"%s\",\"maxsize\":%d,\"compress\":true}", prefix, level.String(), flags, filepath.ToSlash(filename), len(expected)*2))
-
- fileLogger.LogEvent(&event)
- fileLogger.Flush()
- logData, err := os.ReadFile(filename)
- assert.NoError(t, err)
- assert.Equal(t, expected, string(logData))
-
- event.level = WARN
- expected += fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- fileLogger.LogEvent(&event)
- fileLogger.Flush()
- logData, err = os.ReadFile(filename)
- assert.NoError(t, err)
- assert.Equal(t, expected, string(logData))
-
- // Should rotate
- fileLogger.LogEvent(&event)
- fileLogger.Flush()
-
- for num := 2; num <= 999; num++ {
- file, err := os.OpenFile(filename+fmt.Sprintf(".%s.%03d.gz", time.Now().Format("2006-01-02"), num), os.O_RDONLY|os.O_CREATE, 0o666)
- assert.NoError(t, err)
- file.Close()
- }
- err = realFileLogger.DoRotate()
- assert.Error(t, err)
-}
-
-func TestCompressOldFile(t *testing.T) {
- tmpDir := t.TempDir()
- fname := filepath.Join(tmpDir, "test")
- nonGzip := filepath.Join(tmpDir, "test-nonGzip")
-
- f, err := os.OpenFile(fname, os.O_CREATE|os.O_WRONLY, 0o660)
- assert.NoError(t, err)
- ng, err := os.OpenFile(nonGzip, os.O_CREATE|os.O_WRONLY, 0o660)
- assert.NoError(t, err)
-
- for i := 0; i < 999; i++ {
- f.WriteString("This is a test file\n")
- ng.WriteString("This is a test file\n")
- }
- f.Close()
- ng.Close()
-
- err = compressOldLogFile(fname, -1)
- assert.NoError(t, err)
-
- _, err = os.Lstat(fname + ".gz")
- assert.NoError(t, err)
-
- f, err = os.Open(fname + ".gz")
- assert.NoError(t, err)
- zr, err := gzip.NewReader(f)
- assert.NoError(t, err)
- data, err := io.ReadAll(zr)
- assert.NoError(t, err)
- original, err := os.ReadFile(nonGzip)
- assert.NoError(t, err)
- assert.Equal(t, original, data)
-}
diff --git a/modules/log/flags.go b/modules/log/flags.go
index 4a3732600b..f025159d53 100644
--- a/modules/log/flags.go
+++ b/modules/log/flags.go
@@ -1,9 +1,14 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package log
-import "strings"
+import (
+ "sort"
+ "strings"
+
+ "code.gitea.io/gitea/modules/json"
+)
// These flags define which text to prefix to each log entry generated
// by the Logger. Bits are or'ed together to control what's printed.
@@ -15,26 +20,30 @@ import "strings"
// The standard is:
// 2009/01/23 01:23:23 ...a/logger/c/d.go:23:runtime.Caller() [I]: message
const (
- Ldate = 1 << iota // the date in the local time zone: 2009/01/23
- Ltime // the time in the local time zone: 01:23:23
- Lmicroseconds // microsecond resolution: 01:23:23.123123. assumes Ltime.
- Llongfile // full file name and line number: /a/logger/c/d.go:23
- Lshortfile // final file name element and line number: d.go:23. overrides Llongfile
- Lfuncname // function name of the caller: runtime.Caller()
- Lshortfuncname // last part of the function name
- LUTC // if Ldate or Ltime is set, use UTC rather than the local time zone
- Llevelinitial // Initial character of the provided level in brackets eg. [I] for info
- Llevel // Provided level in brackets [INFO]
-
- // Last 20 characters of the filename
- Lmedfile = Lshortfile | Llongfile
-
- // LstdFlags is the initial value for the standard logger
- LstdFlags = Ldate | Ltime | Lmedfile | Lshortfuncname | Llevelinitial
+ Ldate uint32 = 1 << iota // the date in the local time zone: 2009/01/23
+ Ltime // the time in the local time zone: 01:23:23
+ Lmicroseconds // microsecond resolution: 01:23:23.123123. assumes Ltime.
+ Llongfile // full file name and line number: /a/logger/c/d.go:23
+ Lshortfile // final file name element and line number: d.go:23. overrides Llongfile
+ Lfuncname // function name of the caller: runtime.Caller()
+ Lshortfuncname // last part of the function name
+ LUTC // if Ldate or Ltime is set, use UTC rather than the local time zone
+ Llevelinitial // Initial character of the provided level in brackets, eg. [I] for info
+ Llevel // Provided level in brackets [INFO]
+ Lgopid // the Goroutine-PID of the context
+
+ Lmedfile = Lshortfile | Llongfile // last 20 characters of the filename
+ LstdFlags = Ldate | Ltime | Lmedfile | Lshortfuncname | Llevelinitial // default
)
-var flagFromString = map[string]int{
- "none": 0,
+const Ldefault = LstdFlags
+
+type Flags struct {
+ defined bool
+ flags uint32
+}
+
+var flagFromString = map[string]uint32{
"date": Ldate,
"time": Ltime,
"microseconds": Lmicroseconds,
@@ -45,22 +54,81 @@ var flagFromString = map[string]int{
"utc": LUTC,
"levelinitial": Llevelinitial,
"level": Llevel,
- "medfile": Lmedfile,
- "stdflags": LstdFlags,
+ "gopid": Lgopid,
+
+ "medfile": Lmedfile,
+ "stdflags": LstdFlags,
}
-// FlagsFromString takes a comma separated list of flags and returns
-// the flags for this string
-func FlagsFromString(from string) int {
- flags := 0
- for _, flag := range strings.Split(strings.ToLower(from), ",") {
- f, ok := flagFromString[strings.TrimSpace(flag)]
- if ok {
- flags |= f
+var flagComboToString = []struct {
+ flag uint32
+ name string
+}{
+ // name with more bits comes first
+ {LstdFlags, "stdflags"},
+ {Lmedfile, "medfile"},
+
+ {Ldate, "date"},
+ {Ltime, "time"},
+ {Lmicroseconds, "microseconds"},
+ {Llongfile, "longfile"},
+ {Lshortfile, "shortfile"},
+ {Lfuncname, "funcname"},
+ {Lshortfuncname, "shortfuncname"},
+ {LUTC, "utc"},
+ {Llevelinitial, "levelinitial"},
+ {Llevel, "level"},
+ {Lgopid, "gopid"},
+}
+
+func (f Flags) Bits() uint32 {
+ if !f.defined {
+ return Ldefault
+ }
+ return f.flags
+}
+
+func (f Flags) String() string {
+ flags := f.Bits()
+ var flagNames []string
+ for _, it := range flagComboToString {
+ if flags&it.flag == it.flag {
+ flags &^= it.flag
+ flagNames = append(flagNames, it.name)
}
}
- if flags == 0 {
- return -1
+ if len(flagNames) == 0 {
+ return "none"
+ }
+ sort.Strings(flagNames)
+ return strings.Join(flagNames, ",")
+}
+
+func (f *Flags) UnmarshalJSON(bytes []byte) error {
+ var s string
+ if err := json.Unmarshal(bytes, &s); err != nil {
+ return err
}
- return flags
+ *f = FlagsFromString(s)
+ return nil
+}
+
+func (f Flags) MarshalJSON() ([]byte, error) {
+ return []byte(`"` + f.String() + `"`), nil
+}
+
+func FlagsFromString(from string, def ...uint32) Flags {
+ from = strings.TrimSpace(from)
+ if from == "" && len(def) > 0 {
+ return Flags{defined: true, flags: def[0]}
+ }
+ flags := uint32(0)
+ for _, flag := range strings.Split(strings.ToLower(from), ",") {
+ flags |= flagFromString[strings.TrimSpace(flag)]
+ }
+ return Flags{defined: true, flags: flags}
+}
+
+func FlagsFromBits(flags uint32) Flags {
+ return Flags{defined: true, flags: flags}
}
diff --git a/modules/log/flags_test.go b/modules/log/flags_test.go
new file mode 100644
index 0000000000..03972a9fb0
--- /dev/null
+++ b/modules/log/flags_test.go
@@ -0,0 +1,30 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/json"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestFlags(t *testing.T) {
+ assert.EqualValues(t, Ldefault, Flags{}.Bits())
+ assert.EqualValues(t, 0, FlagsFromString("").Bits())
+ assert.EqualValues(t, Lgopid, FlagsFromString("", Lgopid).Bits())
+ assert.EqualValues(t, 0, FlagsFromString("none", Lgopid).Bits())
+ assert.EqualValues(t, Ldate|Ltime, FlagsFromString("date,time", Lgopid).Bits())
+
+ assert.EqualValues(t, "stdflags", FlagsFromString("stdflags").String())
+ assert.EqualValues(t, "medfile", FlagsFromString("medfile").String())
+
+ bs, err := json.Marshal(FlagsFromString("utc,level"))
+ assert.NoError(t, err)
+ assert.EqualValues(t, `"level,utc"`, string(bs))
+ var flags Flags
+ assert.NoError(t, json.Unmarshal(bs, &flags))
+ assert.EqualValues(t, LUTC|Llevel, flags.Bits())
+}
diff --git a/modules/log/init.go b/modules/log/init.go
new file mode 100644
index 0000000000..798ba86410
--- /dev/null
+++ b/modules/log/init.go
@@ -0,0 +1,35 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "runtime"
+ "strings"
+
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/util/rotatingfilewriter"
+)
+
+var projectPackagePrefix string
+
+func init() {
+ _, filename, _, _ := runtime.Caller(0)
+ projectPackagePrefix = strings.TrimSuffix(filename, "modules/log/init.go")
+ if projectPackagePrefix == filename {
+ // in case the source code file is moved, we can not trim the suffix, the code above should also be updated.
+ panic("unable to detect correct package prefix, please update file: " + filename)
+ }
+
+ rotatingfilewriter.ErrorPrintf = FallbackErrorf
+
+ process.Trace = func(start bool, pid process.IDType, description string, parentPID process.IDType, typ string) {
+ if start && parentPID != "" {
+ Log(1, TRACE, "Start %s: %s (from %s) (%s)", NewColoredValue(pid, FgHiYellow), description, NewColoredValue(parentPID, FgYellow), NewColoredValue(typ, Reset))
+ } else if start {
+ Log(1, TRACE, "Start %s: %s (%s)", NewColoredValue(pid, FgHiYellow), description, NewColoredValue(typ, Reset))
+ } else {
+ Log(1, TRACE, "Done %s: %s", NewColoredValue(pid, FgHiYellow), NewColoredValue(description, Reset))
+ }
+ }
+}
diff --git a/modules/log/level.go b/modules/log/level.go
index 3c8a736b30..01fa3f5e46 100644
--- a/modules/log/level.go
+++ b/modules/log/level.go
@@ -5,8 +5,6 @@ package log
import (
"bytes"
- "fmt"
- "os"
"strings"
"code.gitea.io/gitea/modules/json"
@@ -16,53 +14,53 @@ import (
type Level int
const (
- // TRACE represents the lowest log level
- TRACE Level = iota
- // DEBUG is for debug logging
+ UNDEFINED Level = iota
+ TRACE
DEBUG
- // INFO is for information
INFO
- // WARN is for warning information
WARN
- // ERROR is for error reporting
ERROR
- // CRITICAL is for critical errors
- CRITICAL
- // FATAL is for fatal errors
FATAL
- // NONE is for no logging
NONE
)
+const CRITICAL = ERROR // most logger frameworks doesn't support CRITICAL, and it doesn't seem useful
+
var toString = map[Level]string{
- TRACE: "trace",
- DEBUG: "debug",
- INFO: "info",
- WARN: "warn",
- ERROR: "error",
- CRITICAL: "critical",
- FATAL: "fatal",
- NONE: "none",
+ UNDEFINED: "undefined",
+
+ TRACE: "trace",
+ DEBUG: "debug",
+ INFO: "info",
+ WARN: "warn",
+ ERROR: "error",
+
+ FATAL: "fatal",
+ NONE: "none",
}
var toLevel = map[string]Level{
- "trace": TRACE,
- "debug": DEBUG,
- "info": INFO,
- "warn": WARN,
- "error": ERROR,
- "critical": CRITICAL,
- "fatal": FATAL,
- "none": NONE,
+ "undefined": UNDEFINED,
+
+ "trace": TRACE,
+ "debug": DEBUG,
+ "info": INFO,
+ "warn": WARN,
+ "warning": WARN,
+ "error": ERROR,
+
+ "fatal": FATAL,
+ "none": NONE,
}
-// Levels returns all the possible logging levels
-func Levels() []string {
- keys := make([]string, 0)
- for key := range toLevel {
- keys = append(keys, key)
- }
- return keys
+var levelToColor = map[Level][]ColorAttribute{
+ TRACE: {Bold, FgCyan},
+ DEBUG: {Bold, FgBlue},
+ INFO: {Bold, FgGreen},
+ WARN: {Bold, FgYellow},
+ ERROR: {Bold, FgRed},
+ FATAL: {Bold, BgRed},
+ NONE: {Reset},
}
func (l Level) String() string {
@@ -73,14 +71,13 @@ func (l Level) String() string {
return "info"
}
-// Color returns the color string for this Level
-func (l Level) Color() *[]byte {
+func (l Level) ColorAttributes() []ColorAttribute {
color, ok := levelToColor[l]
if ok {
- return &(color)
+ return color
}
none := levelToColor[NONE]
- return &none
+ return none
}
// MarshalJSON takes a Level and turns it into text
@@ -91,31 +88,29 @@ func (l Level) MarshalJSON() ([]byte, error) {
return buffer.Bytes(), nil
}
-// FromString takes a level string and returns a Level
-func FromString(level string) Level {
- temp, ok := toLevel[strings.ToLower(level)]
- if !ok {
- return INFO
- }
- return temp
-}
-
// UnmarshalJSON takes text and turns it into a Level
func (l *Level) UnmarshalJSON(b []byte) error {
- var tmp interface{}
+ var tmp any
err := json.Unmarshal(b, &tmp)
if err != nil {
- fmt.Fprintf(os.Stderr, "Err: %v", err)
return err
}
switch v := tmp.(type) {
case string:
- *l = FromString(v)
+ *l = LevelFromString(v)
case int:
- *l = FromString(Level(v).String())
+ *l = LevelFromString(Level(v).String())
default:
*l = INFO
}
return nil
}
+
+// LevelFromString takes a level string and returns a Level
+func LevelFromString(level string) Level {
+ if l, ok := toLevel[strings.ToLower(level)]; ok {
+ return l
+ }
+ return INFO
+}
diff --git a/modules/log/log.go b/modules/log/log.go
deleted file mode 100644
index eee2728bf6..0000000000
--- a/modules/log/log.go
+++ /dev/null
@@ -1,305 +0,0 @@
-// Copyright 2014 The Gogs Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "fmt"
- "os"
- "runtime"
- "strings"
- "sync"
-
- "code.gitea.io/gitea/modules/process"
-)
-
-type loggerMap struct {
- sync.Map
-}
-
-func (m *loggerMap) Load(k string) (*MultiChannelledLogger, bool) {
- v, ok := m.Map.Load(k)
- if !ok {
- return nil, false
- }
- l, ok := v.(*MultiChannelledLogger)
- return l, ok
-}
-
-func (m *loggerMap) Store(k string, v *MultiChannelledLogger) {
- m.Map.Store(k, v)
-}
-
-func (m *loggerMap) Delete(k string) {
- m.Map.Delete(k)
-}
-
-var (
- // DEFAULT is the name of the default logger
- DEFAULT = "default"
- // NamedLoggers map of named loggers
- NamedLoggers loggerMap
- prefix string
-)
-
-// NewLogger create a logger for the default logger
-func NewLogger(bufLen int64, name, provider, config string) *MultiChannelledLogger {
- err := NewNamedLogger(DEFAULT, bufLen, name, provider, config)
- if err != nil {
- CriticalWithSkip(1, "Unable to create default logger: %v", err)
- panic(err)
- }
- l, _ := NamedLoggers.Load(DEFAULT)
- return l
-}
-
-// NewNamedLogger creates a new named logger for a given configuration
-func NewNamedLogger(name string, bufLen int64, subname, provider, config string) error {
- logger, ok := NamedLoggers.Load(name)
- if !ok {
- logger = newLogger(name, bufLen)
- NamedLoggers.Store(name, logger)
- }
-
- return logger.SetLogger(subname, provider, config)
-}
-
-// DelNamedLogger closes and deletes the named logger
-func DelNamedLogger(name string) {
- l, ok := NamedLoggers.Load(name)
- if ok {
- NamedLoggers.Delete(name)
- l.Close()
- }
-}
-
-// DelLogger removes the named sublogger from the default logger
-func DelLogger(name string) error {
- logger, _ := NamedLoggers.Load(DEFAULT)
- found, err := logger.DelLogger(name)
- if !found {
- Trace("Log %s not found, no need to delete", name)
- }
- return err
-}
-
-// GetLogger returns either a named logger or the default logger
-func GetLogger(name string) *MultiChannelledLogger {
- logger, ok := NamedLoggers.Load(name)
- if ok {
- return logger
- }
- logger, _ = NamedLoggers.Load(DEFAULT)
- return logger
-}
-
-// GetLevel returns the minimum logger level
-func GetLevel() Level {
- l, _ := NamedLoggers.Load(DEFAULT)
- return l.GetLevel()
-}
-
-// GetStacktraceLevel returns the minimum logger level
-func GetStacktraceLevel() Level {
- l, _ := NamedLoggers.Load(DEFAULT)
- return l.GetStacktraceLevel()
-}
-
-// Trace records trace log
-func Trace(format string, v ...interface{}) {
- Log(1, TRACE, format, v...)
-}
-
-// IsTrace returns true if at least one logger is TRACE
-func IsTrace() bool {
- return GetLevel() <= TRACE
-}
-
-// Debug records debug log
-func Debug(format string, v ...interface{}) {
- Log(1, DEBUG, format, v...)
-}
-
-// IsDebug returns true if at least one logger is DEBUG
-func IsDebug() bool {
- return GetLevel() <= DEBUG
-}
-
-// Info records info log
-func Info(format string, v ...interface{}) {
- Log(1, INFO, format, v...)
-}
-
-// IsInfo returns true if at least one logger is INFO
-func IsInfo() bool {
- return GetLevel() <= INFO
-}
-
-// Warn records warning log
-func Warn(format string, v ...interface{}) {
- Log(1, WARN, format, v...)
-}
-
-// IsWarn returns true if at least one logger is WARN
-func IsWarn() bool {
- return GetLevel() <= WARN
-}
-
-// Error records error log
-func Error(format string, v ...interface{}) {
- Log(1, ERROR, format, v...)
-}
-
-// ErrorWithSkip records error log from "skip" calls back from this function
-func ErrorWithSkip(skip int, format string, v ...interface{}) {
- Log(skip+1, ERROR, format, v...)
-}
-
-// IsError returns true if at least one logger is ERROR
-func IsError() bool {
- return GetLevel() <= ERROR
-}
-
-// Critical records critical log
-func Critical(format string, v ...interface{}) {
- Log(1, CRITICAL, format, v...)
-}
-
-// CriticalWithSkip records critical log from "skip" calls back from this function
-func CriticalWithSkip(skip int, format string, v ...interface{}) {
- Log(skip+1, CRITICAL, format, v...)
-}
-
-// IsCritical returns true if at least one logger is CRITICAL
-func IsCritical() bool {
- return GetLevel() <= CRITICAL
-}
-
-// Fatal records fatal log and exit process
-func Fatal(format string, v ...interface{}) {
- Log(1, FATAL, format, v...)
- Close()
- os.Exit(1)
-}
-
-// FatalWithSkip records fatal log from "skip" calls back from this function
-func FatalWithSkip(skip int, format string, v ...interface{}) {
- Log(skip+1, FATAL, format, v...)
- Close()
- os.Exit(1)
-}
-
-// IsFatal returns true if at least one logger is FATAL
-func IsFatal() bool {
- return GetLevel() <= FATAL
-}
-
-// Pause pauses all the loggers
-func Pause() {
- NamedLoggers.Range(func(key, value interface{}) bool {
- logger := value.(*MultiChannelledLogger)
- logger.Pause()
- logger.Flush()
- return true
- })
-}
-
-// Resume resumes all the loggers
-func Resume() {
- NamedLoggers.Range(func(key, value interface{}) bool {
- logger := value.(*MultiChannelledLogger)
- logger.Resume()
- return true
- })
-}
-
-// ReleaseReopen releases and reopens logging files
-func ReleaseReopen() error {
- var accumulatedErr error
- NamedLoggers.Range(func(key, value interface{}) bool {
- logger := value.(*MultiChannelledLogger)
- if err := logger.ReleaseReopen(); err != nil {
- if accumulatedErr == nil {
- accumulatedErr = fmt.Errorf("Error reopening %s: %w", key.(string), err)
- } else {
- accumulatedErr = fmt.Errorf("Error reopening %s: %v & %w", key.(string), err, accumulatedErr)
- }
- }
- return true
- })
- return accumulatedErr
-}
-
-// Close closes all the loggers
-func Close() {
- l, ok := NamedLoggers.Load(DEFAULT)
- if !ok {
- return
- }
- NamedLoggers.Delete(DEFAULT)
- l.Close()
-}
-
-// Log a message with defined skip and at logging level
-// A skip of 0 refers to the caller of this command
-func Log(skip int, level Level, format string, v ...interface{}) {
- l, ok := NamedLoggers.Load(DEFAULT)
- if ok {
- l.Log(skip+1, level, format, v...) //nolint:errcheck
- }
-}
-
-// LoggerAsWriter is a io.Writer shim around the gitea log
-type LoggerAsWriter struct {
- ourLoggers []*MultiChannelledLogger
- level Level
-}
-
-// NewLoggerAsWriter creates a Writer representation of the logger with setable log level
-func NewLoggerAsWriter(level string, ourLoggers ...*MultiChannelledLogger) *LoggerAsWriter {
- if len(ourLoggers) == 0 {
- l, _ := NamedLoggers.Load(DEFAULT)
- ourLoggers = []*MultiChannelledLogger{l}
- }
- l := &LoggerAsWriter{
- ourLoggers: ourLoggers,
- level: FromString(level),
- }
- return l
-}
-
-// Write implements the io.Writer interface to allow spoofing of chi
-func (l *LoggerAsWriter) Write(p []byte) (int, error) {
- for _, logger := range l.ourLoggers {
- // Skip = 3 because this presumes that we have been called by log.Println()
- // If the caller has used log.Output or the like this will be wrong
- logger.Log(3, l.level, string(p)) //nolint:errcheck
- }
- return len(p), nil
-}
-
-// Log takes a given string and logs it at the set log-level
-func (l *LoggerAsWriter) Log(msg string) {
- for _, logger := range l.ourLoggers {
- // Set the skip to reference the call just above this
- _ = logger.Log(1, l.level, msg)
- }
-}
-
-func init() {
- process.Trace = func(start bool, pid process.IDType, description string, parentPID process.IDType, typ string) {
- if start && parentPID != "" {
- Log(1, TRACE, "Start %s: %s (from %s) (%s)", NewColoredValue(pid, FgHiYellow), description, NewColoredValue(parentPID, FgYellow), NewColoredValue(typ, Reset))
- } else if start {
- Log(1, TRACE, "Start %s: %s (%s)", NewColoredValue(pid, FgHiYellow), description, NewColoredValue(typ, Reset))
- } else {
- Log(1, TRACE, "Done %s: %s", NewColoredValue(pid, FgHiYellow), NewColoredValue(description, Reset))
- }
- }
- _, filename, _, _ := runtime.Caller(0)
- prefix = strings.TrimSuffix(filename, "modules/log/log.go")
- if prefix == filename {
- // in case the source code file is moved, we can not trim the suffix, the code above should also be updated.
- panic("unable to detect correct package prefix, please update file: " + filename)
- }
-}
diff --git a/modules/log/log_test.go b/modules/log/log_test.go
deleted file mode 100644
index 819cdb521f..0000000000
--- a/modules/log/log_test.go
+++ /dev/null
@@ -1,152 +0,0 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "fmt"
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func baseConsoleTest(t *testing.T, logger *MultiChannelledLogger) (chan []byte, chan bool) {
- written := make(chan []byte)
- closed := make(chan bool)
-
- c := CallbackWriteCloser{
- callback: func(p []byte, close bool) {
- written <- p
- closed <- close
- },
- }
- m := logger.MultiChannelledLog
-
- channelledLog := m.GetEventLogger("console")
- assert.NotEmpty(t, channelledLog)
- realChanLog, ok := channelledLog.(*ChannelledLog)
- assert.True(t, ok)
- realCL, ok := realChanLog.loggerProvider.(*ConsoleLogger)
- assert.True(t, ok)
- assert.Equal(t, INFO, realCL.Level)
- realCL.out = c
-
- format := "test: %s"
- args := []interface{}{"A"}
-
- logger.Log(0, INFO, format, args...)
- line := <-written
- assert.Contains(t, string(line), fmt.Sprintf(format, args...))
- assert.False(t, <-closed)
-
- format = "test2: %s"
- logger.Warn(format, args...)
- line = <-written
-
- assert.Contains(t, string(line), fmt.Sprintf(format, args...))
- assert.False(t, <-closed)
-
- format = "testerror: %s"
- logger.Error(format, args...)
- line = <-written
- assert.Contains(t, string(line), fmt.Sprintf(format, args...))
- assert.False(t, <-closed)
- return written, closed
-}
-
-func TestNewLoggerUnexported(t *testing.T) {
- level := INFO
- logger := newLogger("UNEXPORTED", 0)
- err := logger.SetLogger("console", "console", fmt.Sprintf(`{"level":"%s"}`, level.String()))
- assert.NoError(t, err)
- out := logger.MultiChannelledLog.GetEventLogger("console")
- assert.NotEmpty(t, out)
- chanlog, ok := out.(*ChannelledLog)
- assert.True(t, ok)
- assert.Equal(t, "console", chanlog.provider)
- assert.Equal(t, INFO, logger.GetLevel())
- baseConsoleTest(t, logger)
-}
-
-func TestNewLoggger(t *testing.T) {
- level := INFO
- logger := NewLogger(0, "console", "console", fmt.Sprintf(`{"level":"%s"}`, level.String()))
-
- assert.Equal(t, INFO, GetLevel())
- assert.False(t, IsTrace())
- assert.False(t, IsDebug())
- assert.True(t, IsInfo())
- assert.True(t, IsWarn())
- assert.True(t, IsError())
-
- written, closed := baseConsoleTest(t, logger)
-
- format := "test: %s"
- args := []interface{}{"A"}
-
- Log(0, INFO, format, args...)
- line := <-written
- assert.Contains(t, string(line), fmt.Sprintf(format, args...))
- assert.False(t, <-closed)
-
- Info(format, args...)
- line = <-written
- assert.Contains(t, string(line), fmt.Sprintf(format, args...))
- assert.False(t, <-closed)
-
- go DelLogger("console")
- line = <-written
- assert.Equal(t, "", string(line))
- assert.True(t, <-closed)
-}
-
-func TestNewLogggerRecreate(t *testing.T) {
- level := INFO
- NewLogger(0, "console", "console", fmt.Sprintf(`{"level":"%s"}`, level.String()))
-
- assert.Equal(t, INFO, GetLevel())
- assert.False(t, IsTrace())
- assert.False(t, IsDebug())
- assert.True(t, IsInfo())
- assert.True(t, IsWarn())
- assert.True(t, IsError())
-
- format := "test: %s"
- args := []interface{}{"A"}
-
- Log(0, INFO, format, args...)
-
- NewLogger(0, "console", "console", fmt.Sprintf(`{"level":"%s"}`, level.String()))
-
- assert.Equal(t, INFO, GetLevel())
- assert.False(t, IsTrace())
- assert.False(t, IsDebug())
- assert.True(t, IsInfo())
- assert.True(t, IsWarn())
- assert.True(t, IsError())
-
- Log(0, INFO, format, args...)
-
- assert.Panics(t, func() {
- NewLogger(0, "console", "console", fmt.Sprintf(`{"level":"%s"`, level.String()))
- })
-
- go DelLogger("console")
-
- // We should be able to redelete without a problem
- go DelLogger("console")
-}
-
-func TestNewNamedLogger(t *testing.T) {
- level := INFO
- err := NewNamedLogger("test", 0, "console", "console", fmt.Sprintf(`{"level":"%s"}`, level.String()))
- assert.NoError(t, err)
- logger, _ := NamedLoggers.Load("test")
- assert.Equal(t, level, logger.GetLevel())
-
- written, closed := baseConsoleTest(t, logger)
- go DelNamedLogger("test")
- line := <-written
- assert.Equal(t, "", string(line))
- assert.True(t, <-closed)
-}
diff --git a/modules/log/logger.go b/modules/log/logger.go
index 71949e29b8..a833b6ef0f 100644
--- a/modules/log/logger.go
+++ b/modules/log/logger.go
@@ -1,141 +1,50 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
+// Package log provides logging capabilities for Gitea.
+// Concepts:
+//
+// * Logger: a Logger provides logging functions and dispatches log events to all its writers
+//
+// * EventWriter: written log Event to a destination (eg: file, console)
+// - EventWriterBase: the base struct of a writer, it contains common fields and functions for all writers
+// - WriterType: the type name of a writer, eg: console, file
+// - WriterName: aka Mode Name in document, the name of a writer instance, it's usually defined by the config file.
+// It is called "mode name" because old code use MODE as config key, to keep compatibility, keep this concept.
+//
+// * WriterMode: the common options for all writers, eg: log level.
+// - WriterConsoleOption and others: the specified options for a writer, eg: file path, remote address.
+//
+// Call graph:
+// -> log.Info()
+// -> LoggerImpl.Log()
+// -> LoggerImpl.SendLogEvent, then the event goes into writer's goroutines
+// -> EventWriter.Run() handles the events
package log
-import "os"
-
-// Logger is the basic interface for logging
-type Logger interface {
- LevelLogger
- Trace(format string, v ...interface{})
- IsTrace() bool
- Debug(format string, v ...interface{})
- IsDebug() bool
- Info(format string, v ...interface{})
- IsInfo() bool
- Warn(format string, v ...interface{})
- IsWarn() bool
- Error(format string, v ...interface{})
- ErrorWithSkip(skip int, format string, v ...interface{})
- IsError() bool
- Critical(format string, v ...interface{})
- CriticalWithSkip(skip int, format string, v ...interface{})
- IsCritical() bool
- Fatal(format string, v ...interface{})
- FatalWithSkip(skip int, format string, v ...interface{})
- IsFatal() bool
-}
-
-// LevelLogger is the simplest logging interface
-type LevelLogger interface {
- Flush()
- Close()
+// BaseLogger provides the basic logging functions
+type BaseLogger interface {
+ Log(skip int, level Level, format string, v ...any)
GetLevel() Level
- Log(skip int, level Level, format string, v ...interface{}) error
}
-// SettableLogger is the interface of loggers which have subloggers
-type SettableLogger interface {
- SetLogger(name, provider, config string) error
- DelLogger(name string) (bool, error)
-}
+// LevelLogger provides level-related logging functions
+type LevelLogger interface {
+ LevelEnabled(level Level) bool
-// StacktraceLogger is a logger that can log stacktraces
-type StacktraceLogger interface {
- GetStacktraceLevel() Level
+ Trace(format string, v ...any)
+ Debug(format string, v ...any)
+ Info(format string, v ...any)
+ Warn(format string, v ...any)
+ Error(format string, v ...any)
+ Critical(format string, v ...any)
}
-// LevelLoggerLogger wraps a LevelLogger as a Logger
-type LevelLoggerLogger struct {
+type Logger interface {
+ BaseLogger
LevelLogger
}
-// Trace records trace log
-func (l *LevelLoggerLogger) Trace(format string, v ...interface{}) {
- l.Log(1, TRACE, format, v...) //nolint:errcheck
-}
-
-// IsTrace returns true if the logger is TRACE
-func (l *LevelLoggerLogger) IsTrace() bool {
- return l.GetLevel() <= TRACE
-}
-
-// Debug records debug log
-func (l *LevelLoggerLogger) Debug(format string, v ...interface{}) {
- l.Log(1, DEBUG, format, v...) //nolint:errcheck
-}
-
-// IsDebug returns true if the logger is DEBUG
-func (l *LevelLoggerLogger) IsDebug() bool {
- return l.GetLevel() <= DEBUG
-}
-
-// Info records information log
-func (l *LevelLoggerLogger) Info(format string, v ...interface{}) {
- l.Log(1, INFO, format, v...) //nolint:errcheck
-}
-
-// IsInfo returns true if the logger is INFO
-func (l *LevelLoggerLogger) IsInfo() bool {
- return l.GetLevel() <= INFO
-}
-
-// Warn records warning log
-func (l *LevelLoggerLogger) Warn(format string, v ...interface{}) {
- l.Log(1, WARN, format, v...) //nolint:errcheck
-}
-
-// IsWarn returns true if the logger is WARN
-func (l *LevelLoggerLogger) IsWarn() bool {
- return l.GetLevel() <= WARN
-}
-
-// Error records error log
-func (l *LevelLoggerLogger) Error(format string, v ...interface{}) {
- l.Log(1, ERROR, format, v...) //nolint:errcheck
-}
-
-// ErrorWithSkip records error log from "skip" calls back from this function
-func (l *LevelLoggerLogger) ErrorWithSkip(skip int, format string, v ...interface{}) {
- l.Log(skip+1, ERROR, format, v...) //nolint:errcheck
-}
-
-// IsError returns true if the logger is ERROR
-func (l *LevelLoggerLogger) IsError() bool {
- return l.GetLevel() <= ERROR
-}
-
-// Critical records critical log
-func (l *LevelLoggerLogger) Critical(format string, v ...interface{}) {
- l.Log(1, CRITICAL, format, v...) //nolint:errcheck
-}
-
-// CriticalWithSkip records critical log from "skip" calls back from this function
-func (l *LevelLoggerLogger) CriticalWithSkip(skip int, format string, v ...interface{}) {
- l.Log(skip+1, CRITICAL, format, v...) //nolint:errcheck
-}
-
-// IsCritical returns true if the logger is CRITICAL
-func (l *LevelLoggerLogger) IsCritical() bool {
- return l.GetLevel() <= CRITICAL
-}
-
-// Fatal records fatal log and exit the process
-func (l *LevelLoggerLogger) Fatal(format string, v ...interface{}) {
- l.Log(1, FATAL, format, v...) //nolint:errcheck
- l.Close()
- os.Exit(1)
-}
-
-// FatalWithSkip records fatal log from "skip" calls back from this function and exits the process
-func (l *LevelLoggerLogger) FatalWithSkip(skip int, format string, v ...interface{}) {
- l.Log(skip+1, FATAL, format, v...) //nolint:errcheck
- l.Close()
- os.Exit(1)
-}
-
-// IsFatal returns true if the logger is FATAL
-func (l *LevelLoggerLogger) IsFatal() bool {
- return l.GetLevel() <= FATAL
+type LogStringer interface { //nolint:revive
+ LogString() string
}
diff --git a/modules/log/logger_global.go b/modules/log/logger_global.go
new file mode 100644
index 0000000000..f100341254
--- /dev/null
+++ b/modules/log/logger_global.go
@@ -0,0 +1,83 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "fmt"
+ "os"
+)
+
+// FallbackErrorf is the last chance to show an error if the logger has internal errors
+func FallbackErrorf(format string, args ...any) {
+ _, _ = fmt.Fprintf(os.Stderr, format+"\n", args)
+}
+
+func GetLevel() Level {
+ return GetLogger(DEFAULT).GetLevel()
+}
+
+func Log(skip int, level Level, format string, v ...any) {
+ GetLogger(DEFAULT).Log(skip+1, level, format, v...)
+}
+
+func Trace(format string, v ...any) {
+ Log(1, TRACE, format, v...)
+}
+
+func IsTrace() bool {
+ return GetLevel() <= TRACE
+}
+
+func Debug(format string, v ...any) {
+ Log(1, DEBUG, format, v...)
+}
+
+func IsDebug() bool {
+ return GetLevel() <= DEBUG
+}
+
+func Info(format string, v ...any) {
+ Log(1, INFO, format, v...)
+}
+
+func Warn(format string, v ...any) {
+ Log(1, WARN, format, v...)
+}
+
+func Error(format string, v ...any) {
+ Log(1, ERROR, format, v...)
+}
+
+func ErrorWithSkip(skip int, format string, v ...any) {
+ Log(skip+1, ERROR, format, v...)
+}
+
+func Critical(format string, v ...any) {
+ Log(1, ERROR, format, v...)
+}
+
+// Fatal records fatal log and exit process
+func Fatal(format string, v ...any) {
+ Log(1, FATAL, format, v...)
+ GetManager().Close()
+ os.Exit(1)
+}
+
+func GetLogger(name string) Logger {
+ return GetManager().GetLogger(name)
+}
+
+func IsLoggerEnabled(name string) bool {
+ return GetManager().GetLogger(name).IsEnabled()
+}
+
+func SetConsoleLogger(loggerName, writerName string, level Level) {
+ writer := NewEventWriterConsole(writerName, WriterMode{
+ Level: level,
+ Flags: FlagsFromBits(LstdFlags),
+ Colorize: CanColorStdout,
+ WriterOption: WriterConsoleOption{},
+ })
+ GetManager().GetLogger(loggerName).RemoveAllWriters().AddWriters(writer)
+}
diff --git a/modules/log/logger_impl.go b/modules/log/logger_impl.go
new file mode 100644
index 0000000000..c7e8fde3c0
--- /dev/null
+++ b/modules/log/logger_impl.go
@@ -0,0 +1,239 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "context"
+ "runtime"
+ "strings"
+ "sync"
+ "sync/atomic"
+ "time"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/util"
+)
+
+type LoggerImpl struct {
+ LevelLogger
+
+ ctx context.Context
+ ctxCancel context.CancelFunc
+
+ level atomic.Int32
+ stacktraceLevel atomic.Int32
+
+ eventWriterMu sync.RWMutex
+ eventWriters map[string]EventWriter
+}
+
+var (
+ _ BaseLogger = (*LoggerImpl)(nil)
+ _ LevelLogger = (*LoggerImpl)(nil)
+)
+
+// SendLogEvent sends a log event to all writers
+func (l *LoggerImpl) SendLogEvent(event *Event) {
+ l.eventWriterMu.RLock()
+ defer l.eventWriterMu.RUnlock()
+
+ if len(l.eventWriters) == 0 {
+ FallbackErrorf("[no logger writer]: %s", event.MsgSimpleText)
+ return
+ }
+
+ // the writers have their own goroutines, the message arguments (with Stringer) shouldn't be used in other goroutines
+ // so the event message must be formatted here
+ msgFormat, msgArgs := event.msgFormat, event.msgArgs
+ event.msgFormat, event.msgArgs = "(already processed by formatters)", nil
+
+ for _, w := range l.eventWriters {
+ if event.Level < w.GetLevel() {
+ continue
+ }
+ formatted := &EventFormatted{
+ Origin: event,
+ Msg: w.Base().FormatMessage(w.Base().Mode, event, msgFormat, msgArgs...),
+ }
+ select {
+ case w.Base().Queue <- formatted:
+ default:
+ bs, _ := json.Marshal(event)
+ FallbackErrorf("log writer %q queue is full, event: %v", w.GetWriterName(), string(bs))
+ }
+ }
+}
+
+// syncLevelInternal syncs the level of the logger with the levels of the writers
+func (l *LoggerImpl) syncLevelInternal() {
+ lowestLevel := NONE
+ for _, w := range l.eventWriters {
+ if w.GetLevel() < lowestLevel {
+ lowestLevel = w.GetLevel()
+ }
+ }
+ l.level.Store(int32(lowestLevel))
+
+ lowestLevel = NONE
+ for _, w := range l.eventWriters {
+ if w.Base().Mode.StacktraceLevel < lowestLevel {
+ lowestLevel = w.GetLevel()
+ }
+ }
+ l.stacktraceLevel.Store(int32(lowestLevel))
+}
+
+// removeWriterInternal removes a writer from the logger, and stops it if it's not shared
+func (l *LoggerImpl) removeWriterInternal(w EventWriter) {
+ if !w.Base().shared {
+ eventWriterStopWait(w) // only stop non-shared writers, shared writers are managed by the manager
+ }
+ delete(l.eventWriters, w.GetWriterName())
+}
+
+// AddWriters adds writers to the logger, and starts them. Existing writers will be replaced by new ones.
+func (l *LoggerImpl) AddWriters(writer ...EventWriter) {
+ l.eventWriterMu.Lock()
+ defer l.eventWriterMu.Unlock()
+
+ for _, w := range writer {
+ if old, ok := l.eventWriters[w.GetWriterName()]; ok {
+ l.removeWriterInternal(old)
+ }
+ }
+
+ for _, w := range writer {
+ l.eventWriters[w.GetWriterName()] = w
+ eventWriterStartGo(l.ctx, w, false)
+ }
+
+ l.syncLevelInternal()
+}
+
+// RemoveWriter removes a writer from the logger, and the writer is closed and flushed if it is not shared
+func (l *LoggerImpl) RemoveWriter(modeName string) error {
+ l.eventWriterMu.Lock()
+ defer l.eventWriterMu.Unlock()
+
+ w, ok := l.eventWriters[modeName]
+ if !ok {
+ return util.ErrNotExist
+ }
+
+ l.removeWriterInternal(w)
+ l.syncLevelInternal()
+ return nil
+}
+
+// RemoveAllWriters removes all writers from the logger, non-shared writers are closed and flushed
+func (l *LoggerImpl) RemoveAllWriters() *LoggerImpl {
+ l.eventWriterMu.Lock()
+ defer l.eventWriterMu.Unlock()
+
+ for _, w := range l.eventWriters {
+ l.removeWriterInternal(w)
+ }
+ l.eventWriters = map[string]EventWriter{}
+ l.syncLevelInternal()
+ return l
+}
+
+// DumpWriters dumps the writers as a JSON map, it's used for debugging and display purposes.
+func (l *LoggerImpl) DumpWriters() map[string]any {
+ l.eventWriterMu.RLock()
+ defer l.eventWriterMu.RUnlock()
+
+ writers := make(map[string]any, len(l.eventWriters))
+ for k, w := range l.eventWriters {
+ bs, err := json.Marshal(w.Base().Mode)
+ if err != nil {
+ FallbackErrorf("marshal writer %q to dump failed: %v", k, err)
+ continue
+ }
+ m := map[string]any{}
+ _ = json.Unmarshal(bs, &m)
+ m["WriterType"] = w.GetWriterType()
+ writers[k] = m
+ }
+ return writers
+}
+
+// Close closes the logger, non-shared writers are closed and flushed
+func (l *LoggerImpl) Close() {
+ l.RemoveAllWriters()
+ l.ctxCancel()
+}
+
+// IsEnabled returns true if the logger is enabled: it has a working level and has writers
+// Fatal is not considered as enabled, because it's a special case and the process just exits
+func (l *LoggerImpl) IsEnabled() bool {
+ l.eventWriterMu.RLock()
+ defer l.eventWriterMu.RUnlock()
+ return l.level.Load() < int32(FATAL) && len(l.eventWriters) > 0
+}
+
+// Log prepares the log event, if the level matches, the event will be sent to the writers
+func (l *LoggerImpl) Log(skip int, level Level, format string, logArgs ...any) {
+ if Level(l.level.Load()) > level {
+ return
+ }
+
+ event := &Event{
+ Time: time.Now(),
+ Level: level,
+ Caller: "?()",
+ }
+
+ pc, filename, line, ok := runtime.Caller(skip + 1)
+ if ok {
+ fn := runtime.FuncForPC(pc)
+ if fn != nil {
+ event.Caller = fn.Name() + "()"
+ }
+ }
+ event.Filename, event.Line = strings.TrimPrefix(filename, projectPackagePrefix), line
+
+ if l.stacktraceLevel.Load() <= int32(level) {
+ event.Stacktrace = Stack(skip + 1)
+ }
+
+ labels := getGoroutineLabels()
+ if labels != nil {
+ event.GoroutinePid = labels["pid"]
+ }
+
+ // get a simple text message without color
+ msgArgs := make([]any, len(logArgs))
+ copy(msgArgs, logArgs)
+
+ // handle LogStringer values
+ for i, v := range msgArgs {
+ if cv, ok := v.(*ColoredValue); ok {
+ if s, ok := cv.v.(LogStringer); ok {
+ cv.v = logStringFormatter{v: s}
+ }
+ } else if s, ok := v.(LogStringer); ok {
+ msgArgs[i] = logStringFormatter{v: s}
+ }
+ }
+
+ event.MsgSimpleText = colorSprintf(false, format, msgArgs...)
+ event.msgFormat = format
+ event.msgArgs = msgArgs
+ l.SendLogEvent(event)
+}
+
+func (l *LoggerImpl) GetLevel() Level {
+ return Level(l.level.Load())
+}
+
+func NewLoggerWithWriters(ctx context.Context, writer ...EventWriter) *LoggerImpl {
+ l := &LoggerImpl{}
+ l.ctx, l.ctxCancel = context.WithCancel(ctx)
+ l.LevelLogger = BaseLoggerToGeneralLogger(l)
+ l.eventWriters = map[string]EventWriter{}
+ l.syncLevelInternal()
+ l.AddWriters(writer...)
+ return l
+}
diff --git a/modules/log/logger_test.go b/modules/log/logger_test.go
new file mode 100644
index 0000000000..1fb63bf629
--- /dev/null
+++ b/modules/log/logger_test.go
@@ -0,0 +1,145 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "context"
+ "sync"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+)
+
+type dummyWriter struct {
+ *EventWriterBaseImpl
+
+ delay time.Duration
+
+ mu sync.Mutex
+ logs []string
+}
+
+func (d *dummyWriter) Write(p []byte) (n int, err error) {
+ if d.delay > 0 {
+ time.Sleep(d.delay)
+ }
+ d.mu.Lock()
+ defer d.mu.Unlock()
+ d.logs = append(d.logs, string(p))
+ return len(p), nil
+}
+
+func (d *dummyWriter) Close() error {
+ return nil
+}
+
+func (d *dummyWriter) GetLogs() []string {
+ d.mu.Lock()
+ defer d.mu.Unlock()
+ logs := make([]string, len(d.logs))
+ copy(logs, d.logs)
+ return logs
+}
+
+func newDummyWriter(name string, level Level, delay time.Duration) *dummyWriter {
+ w := &dummyWriter{
+ EventWriterBaseImpl: NewEventWriterBase(name, "dummy", WriterMode{Level: level, Flags: FlagsFromBits(0)}),
+ }
+ w.delay = delay
+ w.Base().OutputWriteCloser = w
+ return w
+}
+
+func TestLogger(t *testing.T) {
+ logger := NewLoggerWithWriters(context.Background())
+
+ dump := logger.DumpWriters()
+ assert.EqualValues(t, 0, len(dump))
+ assert.EqualValues(t, NONE, logger.GetLevel())
+ assert.False(t, logger.IsEnabled())
+
+ w1 := newDummyWriter("dummy-1", DEBUG, 0)
+ logger.AddWriters(w1)
+ assert.EqualValues(t, DEBUG, logger.GetLevel())
+
+ w2 := newDummyWriter("dummy-2", WARN, 200*time.Millisecond)
+ logger.AddWriters(w2)
+ assert.EqualValues(t, DEBUG, logger.GetLevel())
+
+ dump = logger.DumpWriters()
+ assert.EqualValues(t, 2, len(dump))
+
+ logger.Trace("trace-level") // this level is not logged
+ logger.Debug("debug-level")
+ logger.Error("error-level")
+
+ // w2 is slow, so only w1 has logs
+ time.Sleep(100 * time.Millisecond)
+ assert.Equal(t, []string{"debug-level\n", "error-level\n"}, w1.GetLogs())
+ assert.Equal(t, []string{}, w2.GetLogs())
+
+ logger.Close()
+
+ // after Close, all logs are flushed
+ assert.Equal(t, []string{"debug-level\n", "error-level\n"}, w1.GetLogs())
+ assert.Equal(t, []string{"error-level\n"}, w2.GetLogs())
+}
+
+func TestLoggerPause(t *testing.T) {
+ logger := NewLoggerWithWriters(context.Background())
+
+ w1 := newDummyWriter("dummy-1", DEBUG, 0)
+ logger.AddWriters(w1)
+
+ GetManager().PauseAll()
+
+ logger.Info("info-level")
+ time.Sleep(100 * time.Millisecond)
+ assert.Equal(t, []string{}, w1.GetLogs())
+
+ GetManager().ResumeAll()
+
+ time.Sleep(100 * time.Millisecond)
+ assert.Equal(t, []string{"info-level\n"}, w1.GetLogs())
+
+ logger.Close()
+}
+
+type testLogString struct {
+ Field string
+}
+
+func (t testLogString) LogString() string {
+ return "log-string"
+}
+
+func TestLoggerLogString(t *testing.T) {
+ logger := NewLoggerWithWriters(context.Background())
+
+ w1 := newDummyWriter("dummy-1", DEBUG, 0)
+ w1.Mode.Colorize = true
+ logger.AddWriters(w1)
+
+ logger.Info("%s %s %#v %v", testLogString{}, &testLogString{}, testLogString{Field: "detail"}, NewColoredValue(testLogString{}, FgRed))
+ logger.Close()
+
+ assert.Equal(t, []string{"log-string log-string log.testLogString{Field:\"detail\"} \x1b[31mlog-string\x1b[0m\n"}, w1.GetLogs())
+}
+
+func TestLoggerExpressionFilter(t *testing.T) {
+ logger := NewLoggerWithWriters(context.Background())
+
+ w1 := newDummyWriter("dummy-1", DEBUG, 0)
+ w1.Mode.Expression = "foo.*"
+ logger.AddWriters(w1)
+
+ logger.Info("foo")
+ logger.Info("bar")
+ logger.Info("foo bar")
+ logger.SendLogEvent(&Event{Level: INFO, Filename: "foo.go", MsgSimpleText: "by filename"})
+ logger.Close()
+
+ assert.Equal(t, []string{"foo\n", "foo bar\n", "by filename\n"}, w1.GetLogs())
+}
diff --git a/modules/log/manager.go b/modules/log/manager.go
new file mode 100644
index 0000000000..bbaef7eb20
--- /dev/null
+++ b/modules/log/manager.go
@@ -0,0 +1,142 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "context"
+ "fmt"
+ "sync"
+ "sync/atomic"
+)
+
+const DEFAULT = "default"
+
+// LoggerManager manages loggers and shared event writers
+type LoggerManager struct {
+ ctx context.Context
+ ctxCancel context.CancelFunc
+
+ mu sync.Mutex
+ writers map[string]EventWriter
+ loggers map[string]*LoggerImpl
+ defaultLogger atomic.Pointer[LoggerImpl]
+
+ pauseMu sync.RWMutex
+ pauseChan chan struct{}
+}
+
+// GetLogger returns a logger with the given name. If the logger doesn't exist, a new empty one will be created.
+func (m *LoggerManager) GetLogger(name string) *LoggerImpl {
+ if name == DEFAULT {
+ if logger := m.defaultLogger.Load(); logger != nil {
+ return logger
+ }
+ }
+
+ m.mu.Lock()
+ defer m.mu.Unlock()
+
+ logger := m.loggers[name]
+ if logger == nil {
+ logger = NewLoggerWithWriters(m.ctx)
+ m.loggers[name] = logger
+ if name == DEFAULT {
+ m.defaultLogger.Store(logger)
+ }
+ }
+
+ return logger
+}
+
+// PauseAll pauses all event writers
+func (m *LoggerManager) PauseAll() {
+ m.pauseMu.Lock()
+ m.pauseChan = make(chan struct{})
+ m.pauseMu.Unlock()
+}
+
+// ResumeAll resumes all event writers
+func (m *LoggerManager) ResumeAll() {
+ m.pauseMu.Lock()
+ close(m.pauseChan)
+ m.pauseChan = nil
+ m.pauseMu.Unlock()
+}
+
+// GetPauseChan returns a channel for writer pausing
+func (m *LoggerManager) GetPauseChan() chan struct{} {
+ m.pauseMu.RLock()
+ defer m.pauseMu.RUnlock()
+ return m.pauseChan
+}
+
+// Close closes the logger manager, all loggers and writers will be closed, the messages are flushed.
+func (m *LoggerManager) Close() {
+ m.mu.Lock()
+ defer m.mu.Unlock()
+
+ for _, logger := range m.loggers {
+ logger.Close()
+ }
+ m.loggers = map[string]*LoggerImpl{}
+
+ for _, writer := range m.writers {
+ eventWriterStopWait(writer)
+ }
+ m.writers = map[string]EventWriter{}
+
+ m.ctxCancel()
+}
+
+// DumpLoggers returns a map of all loggers and their event writers, for debugging and display purposes.
+func (m *LoggerManager) DumpLoggers() map[string]any {
+ m.mu.Lock()
+ defer m.mu.Unlock()
+
+ dump := map[string]any{}
+ for name, logger := range m.loggers {
+ loggerDump := map[string]any{
+ "IsEnabled": logger.IsEnabled(),
+ "EventWriters": logger.DumpWriters(),
+ }
+ dump[name] = loggerDump
+ }
+ return dump
+}
+
+// NewSharedWriter creates a new shared event writer, it can be used by multiple loggers, and a shared writer won't be closed if a logger is closed.
+func (m *LoggerManager) NewSharedWriter(writerName, writerType string, mode WriterMode) (writer EventWriter, err error) {
+ m.mu.Lock()
+ defer m.mu.Unlock()
+
+ if _, ok := m.writers[writerName]; ok {
+ return nil, fmt.Errorf("log event writer %q has been added before", writerName)
+ }
+
+ if writer, err = NewEventWriter(writerName, writerType, mode); err != nil {
+ return nil, err
+ }
+
+ m.writers[writerName] = writer
+ eventWriterStartGo(m.ctx, writer, true)
+ return writer, nil
+}
+
+func (m *LoggerManager) GetSharedWriter(writerName string) EventWriter {
+ m.mu.Lock()
+ defer m.mu.Unlock()
+ return m.writers[writerName]
+}
+
+var loggerManager = NewManager()
+
+func GetManager() *LoggerManager {
+ return loggerManager
+}
+
+func NewManager() *LoggerManager {
+ m := &LoggerManager{writers: map[string]EventWriter{}, loggers: map[string]*LoggerImpl{}}
+ m.ctx, m.ctxCancel = context.WithCancel(context.Background())
+ return m
+}
diff --git a/modules/log/manager_test.go b/modules/log/manager_test.go
new file mode 100644
index 0000000000..aa01f79980
--- /dev/null
+++ b/modules/log/manager_test.go
@@ -0,0 +1,42 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSharedWorker(t *testing.T) {
+ RegisterEventWriter("dummy", func(writerName string, writerMode WriterMode) EventWriter {
+ return newDummyWriter(writerName, writerMode.Level, 0)
+ })
+
+ m := NewManager()
+ _, err := m.NewSharedWriter("dummy-1", "dummy", WriterMode{Level: DEBUG, Flags: FlagsFromBits(0)})
+ assert.NoError(t, err)
+
+ w := m.GetSharedWriter("dummy-1")
+ assert.NotNil(t, w)
+ loggerTest := m.GetLogger("test")
+ loggerTest.AddWriters(w)
+ loggerTest.Info("msg-1")
+ loggerTest.RemoveAllWriters() // the shared writer is not closed here
+ loggerTest.Info("never seen")
+
+ // the shared writer can still be used later
+ w = m.GetSharedWriter("dummy-1")
+ assert.NotNil(t, w)
+ loggerTest.AddWriters(w)
+ loggerTest.Info("msg-2")
+
+ m.GetLogger("test-another").AddWriters(w)
+ m.GetLogger("test-another").Info("msg-3")
+
+ m.Close()
+
+ logs := w.(*dummyWriter).GetLogs()
+ assert.Equal(t, []string{"msg-1\n", "msg-2\n", "msg-3\n"}, logs)
+}
diff --git a/modules/log/misc.go b/modules/log/misc.go
new file mode 100644
index 0000000000..ae4ce04cf3
--- /dev/null
+++ b/modules/log/misc.go
@@ -0,0 +1,78 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package log
+
+import (
+ "io"
+)
+
+type baseToLogger struct {
+ base BaseLogger
+}
+
+// BaseLoggerToGeneralLogger wraps a BaseLogger (which only has Log() function) to a Logger (which has Info() function)
+func BaseLoggerToGeneralLogger(b BaseLogger) Logger {
+ l := &baseToLogger{base: b}
+ return l
+}
+
+var _ Logger = (*baseToLogger)(nil)
+
+func (s *baseToLogger) Log(skip int, level Level, format string, v ...any) {
+ s.base.Log(skip+1, level, format, v...)
+}
+
+func (s *baseToLogger) GetLevel() Level {
+ return s.base.GetLevel()
+}
+
+func (s *baseToLogger) LevelEnabled(level Level) bool {
+ return s.base.GetLevel() <= level
+}
+
+func (s *baseToLogger) Trace(format string, v ...any) {
+ s.base.Log(1, TRACE, format, v...)
+}
+
+func (s *baseToLogger) Debug(format string, v ...any) {
+ s.base.Log(1, DEBUG, format, v...)
+}
+
+func (s *baseToLogger) Info(format string, v ...any) {
+ s.base.Log(1, INFO, format, v...)
+}
+
+func (s *baseToLogger) Warn(format string, v ...any) {
+ s.base.Log(1, WARN, format, v...)
+}
+
+func (s *baseToLogger) Error(format string, v ...any) {
+ s.base.Log(1, ERROR, format, v...)
+}
+
+func (s *baseToLogger) Critical(format string, v ...any) {
+ s.base.Log(1, CRITICAL, format, v...)
+}
+
+type PrintfLogger struct {
+ Logf func(format string, args ...any)
+}
+
+func (p *PrintfLogger) Printf(format string, args ...any) {
+ p.Logf(format, args...)
+}
+
+type loggerToWriter struct {
+ logf func(format string, args ...any)
+}
+
+func (p *loggerToWriter) Write(bs []byte) (int, error) {
+ p.logf("%s", string(bs))
+ return len(bs), nil
+}
+
+// LoggerToWriter wraps a log function to an io.Writer
+func LoggerToWriter(logf func(format string, args ...any)) io.Writer {
+ return &loggerToWriter{logf: logf}
+}
diff --git a/modules/log/multichannel.go b/modules/log/multichannel.go
deleted file mode 100644
index 6b8a9b8246..0000000000
--- a/modules/log/multichannel.go
+++ /dev/null
@@ -1,104 +0,0 @@
-// Copyright 2020 The Gogs Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "fmt"
- "runtime"
- "strings"
- "time"
-)
-
-// MultiChannelledLogger is default logger in the Gitea application.
-// it can contain several providers and log message into all providers.
-type MultiChannelledLogger struct {
- LevelLoggerLogger
- *MultiChannelledLog
- bufferLength int64
-}
-
-// newLogger initializes and returns a new logger.
-func newLogger(name string, buffer int64) *MultiChannelledLogger {
- l := &MultiChannelledLogger{
- MultiChannelledLog: NewMultiChannelledLog(name, buffer),
- bufferLength: buffer,
- }
- l.LevelLogger = l
- return l
-}
-
-// SetLogger sets new logger instance with given logger provider and config.
-func (l *MultiChannelledLogger) SetLogger(name, provider, config string) error {
- eventLogger, err := NewChannelledLog(l.ctx, name, provider, config, l.bufferLength)
- if err != nil {
- return fmt.Errorf("failed to create sublogger (%s): %w", name, err)
- }
-
- l.MultiChannelledLog.DelLogger(name)
-
- err = l.MultiChannelledLog.AddLogger(eventLogger)
- if err != nil {
- if IsErrDuplicateName(err) {
- return fmt.Errorf("%w other names: %v", err, l.MultiChannelledLog.GetEventLoggerNames())
- }
- return fmt.Errorf("failed to add sublogger (%s): %w", name, err)
- }
-
- return nil
-}
-
-// DelLogger deletes a sublogger from this logger.
-func (l *MultiChannelledLogger) DelLogger(name string) (bool, error) {
- return l.MultiChannelledLog.DelLogger(name), nil
-}
-
-// Log msg at the provided level with the provided caller defined by skip (0 being the function that calls this function)
-func (l *MultiChannelledLogger) Log(skip int, level Level, format string, v ...interface{}) error {
- if l.GetLevel() > level {
- return nil
- }
- caller := "?()"
- pc, filename, line, ok := runtime.Caller(skip + 1)
- if ok {
- // Get caller function name.
- fn := runtime.FuncForPC(pc)
- if fn != nil {
- caller = fn.Name() + "()"
- }
- }
- msg := format
- if len(v) > 0 {
- msg = ColorSprintf(format, v...)
- }
- labels := getGoroutineLabels()
- if labels != nil {
- pid, ok := labels["pid"]
- if ok {
- msg = "[" + ColorString(FgHiYellow) + pid + ColorString(Reset) + "] " + msg
- }
- }
- stack := ""
- if l.GetStacktraceLevel() <= level {
- stack = Stack(skip + 1)
- }
- return l.SendLog(level, caller, strings.TrimPrefix(filename, prefix), line, msg, stack)
-}
-
-// SendLog sends a log event at the provided level with the information given
-func (l *MultiChannelledLogger) SendLog(level Level, caller, filename string, line int, msg, stack string) error {
- if l.GetLevel() > level {
- return nil
- }
- event := &Event{
- level: level,
- caller: caller,
- filename: filename,
- line: line,
- msg: msg,
- time: time.Now(),
- stacktrace: stack,
- }
- l.LogEvent(event) //nolint:errcheck
- return nil
-}
diff --git a/modules/log/provider.go b/modules/log/provider.go
deleted file mode 100644
index b5058139d7..0000000000
--- a/modules/log/provider.go
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-// LoggerProvider represents behaviors of a logger provider.
-type LoggerProvider interface {
- Init(config string) error
- EventLogger
-}
-
-type loggerProvider func() LoggerProvider
-
-var providers = make(map[string]loggerProvider)
-
-// Register registers given logger provider to providers.
-func Register(name string, log loggerProvider) {
- if log == nil {
- panic("log: register provider is nil")
- }
- if _, dup := providers[name]; dup {
- panic("log: register called twice for provider \"" + name + "\"")
- }
- providers[name] = log
-}
diff --git a/modules/log/smtp.go b/modules/log/smtp.go
deleted file mode 100644
index 4e896496d7..0000000000
--- a/modules/log/smtp.go
+++ /dev/null
@@ -1,114 +0,0 @@
-// Copyright 2014 The Gogs Authors. All rights reserved.
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "fmt"
- "net/smtp"
- "strings"
-
- "code.gitea.io/gitea/modules/json"
-)
-
-type smtpWriter struct {
- owner *SMTPLogger
-}
-
-// Write sends the message as an email
-func (s *smtpWriter) Write(p []byte) (int, error) {
- return s.owner.sendMail(p)
-}
-
-// Close does nothing
-func (s *smtpWriter) Close() error {
- return nil
-}
-
-// SMTPLogger implements LoggerProvider and is used to send emails via given SMTP-server.
-type SMTPLogger struct {
- WriterLogger
- Username string `json:"Username"`
- Password string `json:"password"`
- Host string `json:"host"`
- Subject string `json:"subject"`
- RecipientAddresses []string `json:"sendTos"`
- sendMailFn func(string, smtp.Auth, string, []string, []byte) error
-}
-
-// NewSMTPLogger creates smtp writer.
-func NewSMTPLogger() LoggerProvider {
- s := &SMTPLogger{}
- s.Level = TRACE
- s.sendMailFn = smtp.SendMail
- return s
-}
-
-// Init smtp writer with json config.
-// config like:
-//
-// {
-// "Username":"example@gmail.com",
-// "password:"password",
-// "host":"smtp.gmail.com:465",
-// "subject":"email title",
-// "sendTos":["email1","email2"],
-// "level":LevelError
-// }
-func (log *SMTPLogger) Init(jsonconfig string) error {
- err := json.Unmarshal([]byte(jsonconfig), log)
- if err != nil {
- return fmt.Errorf("Unable to parse JSON: %w", err)
- }
- log.NewWriterLogger(&smtpWriter{
- owner: log,
- })
- log.sendMailFn = smtp.SendMail
- return nil
-}
-
-// WriteMsg writes message in smtp writer.
-// it will send an email with subject and only this message.
-func (log *SMTPLogger) sendMail(p []byte) (int, error) {
- hp := strings.Split(log.Host, ":")
-
- // Set up authentication information.
- auth := smtp.PlainAuth(
- "",
- log.Username,
- log.Password,
- hp[0],
- )
- // Connect to the server, authenticate, set the sender and recipient,
- // and send the email all in one step.
- contentType := "Content-Type: text/plain" + "; charset=UTF-8"
- mailmsg := []byte("To: " + strings.Join(log.RecipientAddresses, ";") + "\r\nFrom: " + log.Username + "<" + log.Username +
- ">\r\nSubject: " + log.Subject + "\r\n" + contentType + "\r\n\r\n")
- mailmsg = append(mailmsg, p...)
- return len(p), log.sendMailFn(
- log.Host,
- auth,
- log.Username,
- log.RecipientAddresses,
- mailmsg,
- )
-}
-
-// Flush when log should be flushed
-func (log *SMTPLogger) Flush() {
-}
-
-// ReleaseReopen does nothing
-func (log *SMTPLogger) ReleaseReopen() error {
- return nil
-}
-
-// GetName returns the default name for this implementation
-func (log *SMTPLogger) GetName() string {
- return "smtp"
-}
-
-func init() {
- Register("smtp", NewSMTPLogger)
-}
diff --git a/modules/log/smtp_test.go b/modules/log/smtp_test.go
deleted file mode 100644
index d7d28f28f8..0000000000
--- a/modules/log/smtp_test.go
+++ /dev/null
@@ -1,85 +0,0 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "fmt"
- "net/smtp"
- "strings"
- "testing"
- "time"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestSMTPLogger(t *testing.T) {
- prefix := "TestPrefix "
- level := INFO
- flags := LstdFlags | LUTC | Lfuncname
- username := "testuser"
- password := "testpassword"
- host := "testhost"
- subject := "testsubject"
- sendTos := []string{"testto1", "testto2"}
-
- logger := NewSMTPLogger()
- smtpLogger, ok := logger.(*SMTPLogger)
- assert.True(t, ok)
-
- err := logger.Init(fmt.Sprintf("{\"prefix\":\"%s\",\"level\":\"%s\",\"flags\":%d,\"username\":\"%s\",\"password\":\"%s\",\"host\":\"%s\",\"subject\":\"%s\",\"sendTos\":[\"%s\",\"%s\"]}", prefix, level.String(), flags, username, password, host, subject, sendTos[0], sendTos[1]))
- assert.NoError(t, err)
-
- assert.Equal(t, flags, smtpLogger.Flags)
- assert.Equal(t, level, smtpLogger.Level)
- assert.Equal(t, level, logger.GetLevel())
-
- location, _ := time.LoadLocation("EST")
-
- date := time.Date(2019, time.January, 13, 22, 3, 30, 15, location)
-
- dateString := date.UTC().Format("2006/01/02 15:04:05")
-
- event := Event{
- level: INFO,
- msg: "TEST MSG",
- caller: "CALLER",
- filename: "FULL/FILENAME",
- line: 1,
- time: date,
- }
-
- expected := fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
-
- var envToHost string
- var envFrom string
- var envTo []string
- var envMsg []byte
- smtpLogger.sendMailFn = func(addr string, a smtp.Auth, from string, to []string, msg []byte) error {
- envToHost = addr
- envFrom = from
- envTo = to
- envMsg = msg
- return nil
- }
-
- err = logger.LogEvent(&event)
- assert.NoError(t, err)
- assert.Equal(t, host, envToHost)
- assert.Equal(t, username, envFrom)
- assert.Equal(t, sendTos, envTo)
- assert.Contains(t, string(envMsg), expected)
-
- logger.Flush()
-
- event.level = WARN
- expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- err = logger.LogEvent(&event)
- assert.NoError(t, err)
- assert.Equal(t, host, envToHost)
- assert.Equal(t, username, envFrom)
- assert.Equal(t, sendTos, envTo)
- assert.Contains(t, string(envMsg), expected)
-
- logger.Close()
-}
diff --git a/modules/log/stack.go b/modules/log/stack.go
index d4496cff03..9b22e92867 100644
--- a/modules/log/stack.go
+++ b/modules/log/stack.go
@@ -32,19 +32,19 @@ func Stack(skip int) string {
}
// Print equivalent of debug.Stack()
- fmt.Fprintf(buf, "%s:%d (0x%x)\n", filename, lineNumber, programCounter)
+ _, _ = fmt.Fprintf(buf, "%s:%d (0x%x)\n", filename, lineNumber, programCounter)
// Now try to print the offending line
if filename != lastFilename {
data, err := os.ReadFile(filename)
if err != nil {
- // can't read this sourcefile
+ // can't read this source file
// likely we don't have the sourcecode available
continue
}
lines = bytes.Split(data, []byte{'\n'})
lastFilename = filename
}
- fmt.Fprintf(buf, "\t%s: %s\n", functionName(programCounter), source(lines, lineNumber))
+ _, _ = fmt.Fprintf(buf, "\t%s: %s\n", functionName(programCounter), source(lines, lineNumber))
}
return buf.String()
}
diff --git a/modules/log/writer.go b/modules/log/writer.go
deleted file mode 100644
index 61f1d866ee..0000000000
--- a/modules/log/writer.go
+++ /dev/null
@@ -1,269 +0,0 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "bytes"
- "fmt"
- "io"
- "regexp"
- "strings"
- "sync"
-)
-
-type byteArrayWriter []byte
-
-func (b *byteArrayWriter) Write(p []byte) (int, error) {
- *b = append(*b, p...)
- return len(p), nil
-}
-
-// WriterLogger represent a basic logger for Gitea
-type WriterLogger struct {
- out io.WriteCloser
- mu sync.Mutex
-
- Level Level `json:"level"`
- StacktraceLevel Level `json:"stacktraceLevel"`
- Flags int `json:"flags"`
- Prefix string `json:"prefix"`
- Colorize bool `json:"colorize"`
- Expression string `json:"expression"`
- regexp *regexp.Regexp
-}
-
-// NewWriterLogger creates a new WriterLogger from the provided WriteCloser.
-// Optionally the level can be changed at the same time.
-func (logger *WriterLogger) NewWriterLogger(out io.WriteCloser, level ...Level) {
- logger.mu.Lock()
- defer logger.mu.Unlock()
- logger.out = out
- switch logger.Flags {
- case 0:
- logger.Flags = LstdFlags
- case -1:
- logger.Flags = 0
- }
- if len(level) > 0 {
- logger.Level = level[0]
- }
- logger.createExpression()
-}
-
-func (logger *WriterLogger) createExpression() {
- if len(logger.Expression) > 0 {
- var err error
- logger.regexp, err = regexp.Compile(logger.Expression)
- if err != nil {
- logger.regexp = nil
- }
- }
-}
-
-// GetLevel returns the logging level for this logger
-func (logger *WriterLogger) GetLevel() Level {
- return logger.Level
-}
-
-// GetStacktraceLevel returns the stacktrace logging level for this logger
-func (logger *WriterLogger) GetStacktraceLevel() Level {
- return logger.StacktraceLevel
-}
-
-// Copy of cheap integer to fixed-width decimal to ascii from logger.
-func itoa(buf *[]byte, i, wid int) {
- var logger [20]byte
- bp := len(logger) - 1
- for i >= 10 || wid > 1 {
- wid--
- q := i / 10
- logger[bp] = byte('0' + i - q*10)
- bp--
- i = q
- }
- // i < 10
- logger[bp] = byte('0' + i)
- *buf = append(*buf, logger[bp:]...)
-}
-
-func (logger *WriterLogger) createMsg(buf *[]byte, event *Event) {
- *buf = append(*buf, logger.Prefix...)
- t := event.time
- if logger.Flags&(Ldate|Ltime|Lmicroseconds) != 0 {
- if logger.Colorize {
- *buf = append(*buf, fgCyanBytes...)
- }
- if logger.Flags&LUTC != 0 {
- t = t.UTC()
- }
- if logger.Flags&Ldate != 0 {
- year, month, day := t.Date()
- itoa(buf, year, 4)
- *buf = append(*buf, '/')
- itoa(buf, int(month), 2)
- *buf = append(*buf, '/')
- itoa(buf, day, 2)
- *buf = append(*buf, ' ')
- }
- if logger.Flags&(Ltime|Lmicroseconds) != 0 {
- hour, min, sec := t.Clock()
- itoa(buf, hour, 2)
- *buf = append(*buf, ':')
- itoa(buf, min, 2)
- *buf = append(*buf, ':')
- itoa(buf, sec, 2)
- if logger.Flags&Lmicroseconds != 0 {
- *buf = append(*buf, '.')
- itoa(buf, t.Nanosecond()/1e3, 6)
- }
- *buf = append(*buf, ' ')
- }
- if logger.Colorize {
- *buf = append(*buf, resetBytes...)
- }
-
- }
- if logger.Flags&(Lshortfile|Llongfile) != 0 {
- if logger.Colorize {
- *buf = append(*buf, fgGreenBytes...)
- }
- file := event.filename
- if logger.Flags&Lmedfile == Lmedfile {
- startIndex := len(file) - 20
- if startIndex > 0 {
- file = "..." + file[startIndex:]
- }
- } else if logger.Flags&Lshortfile != 0 {
- startIndex := strings.LastIndexByte(file, '/')
- if startIndex > 0 && startIndex < len(file) {
- file = file[startIndex+1:]
- }
- }
- *buf = append(*buf, file...)
- *buf = append(*buf, ':')
- itoa(buf, event.line, -1)
- if logger.Flags&(Lfuncname|Lshortfuncname) != 0 {
- *buf = append(*buf, ':')
- } else {
- if logger.Colorize {
- *buf = append(*buf, resetBytes...)
- }
- *buf = append(*buf, ' ')
- }
- }
- if logger.Flags&(Lfuncname|Lshortfuncname) != 0 {
- if logger.Colorize {
- *buf = append(*buf, fgGreenBytes...)
- }
- funcname := event.caller
- if logger.Flags&Lshortfuncname != 0 {
- lastIndex := strings.LastIndexByte(funcname, '.')
- if lastIndex > 0 && len(funcname) > lastIndex+1 {
- funcname = funcname[lastIndex+1:]
- }
- }
- *buf = append(*buf, funcname...)
- if logger.Colorize {
- *buf = append(*buf, resetBytes...)
- }
- *buf = append(*buf, ' ')
-
- }
- if logger.Flags&(Llevel|Llevelinitial) != 0 {
- level := strings.ToUpper(event.level.String())
- if logger.Colorize {
- *buf = append(*buf, levelToColor[event.level]...)
- }
- *buf = append(*buf, '[')
- if logger.Flags&Llevelinitial != 0 {
- *buf = append(*buf, level[0])
- } else {
- *buf = append(*buf, level...)
- }
- *buf = append(*buf, ']')
- if logger.Colorize {
- *buf = append(*buf, resetBytes...)
- }
- *buf = append(*buf, ' ')
- }
-
- msg := []byte(event.msg)
- if len(msg) > 0 && msg[len(msg)-1] == '\n' {
- msg = msg[:len(msg)-1]
- }
-
- pawMode := allowColor
- if !logger.Colorize {
- pawMode = removeColor
- }
-
- baw := byteArrayWriter(*buf)
- (&protectedANSIWriter{
- w: &baw,
- mode: pawMode,
- }).Write(msg) //nolint:errcheck
- *buf = baw
-
- if event.stacktrace != "" && logger.StacktraceLevel <= event.level {
- lines := bytes.Split([]byte(event.stacktrace), []byte("\n"))
- if len(lines) > 1 {
- for _, line := range lines {
- *buf = append(*buf, "\n\t"...)
- *buf = append(*buf, line...)
- }
- }
- *buf = append(*buf, '\n')
- }
- *buf = append(*buf, '\n')
-}
-
-// LogEvent logs the event to the internal writer
-func (logger *WriterLogger) LogEvent(event *Event) error {
- if logger.Level > event.level {
- return nil
- }
-
- logger.mu.Lock()
- defer logger.mu.Unlock()
- if !logger.Match(event) {
- return nil
- }
- var buf []byte
- logger.createMsg(&buf, event)
- _, err := logger.out.Write(buf)
- return err
-}
-
-// Match checks if the given event matches the logger's regexp expression
-func (logger *WriterLogger) Match(event *Event) bool {
- if logger.regexp == nil {
- return true
- }
- if logger.regexp.Match([]byte(fmt.Sprintf("%s:%d:%s", event.filename, event.line, event.caller))) {
- return true
- }
- // Match on the non-colored msg - therefore strip out colors
- var msg []byte
- baw := byteArrayWriter(msg)
- (&protectedANSIWriter{
- w: &baw,
- mode: removeColor,
- }).Write([]byte(event.msg)) //nolint:errcheck
- msg = baw
- return logger.regexp.Match(msg)
-}
-
-// Close the base logger
-func (logger *WriterLogger) Close() {
- logger.mu.Lock()
- defer logger.mu.Unlock()
- if logger.out != nil {
- logger.out.Close()
- }
-}
-
-// GetName returns empty for these provider loggers
-func (logger *WriterLogger) GetName() string {
- return ""
-}
diff --git a/modules/log/writer_test.go b/modules/log/writer_test.go
deleted file mode 100644
index 8c03f87d90..0000000000
--- a/modules/log/writer_test.go
+++ /dev/null
@@ -1,275 +0,0 @@
-// Copyright 2019 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package log
-
-import (
- "fmt"
- "strings"
- "testing"
- "time"
-
- "github.com/stretchr/testify/assert"
-)
-
-type CallbackWriteCloser struct {
- callback func([]byte, bool)
-}
-
-func (c CallbackWriteCloser) Write(p []byte) (int, error) {
- c.callback(p, false)
- return len(p), nil
-}
-
-func (c CallbackWriteCloser) Close() error {
- c.callback(nil, true)
- return nil
-}
-
-func TestBaseLogger(t *testing.T) {
- var written []byte
- var closed bool
-
- c := CallbackWriteCloser{
- callback: func(p []byte, close bool) {
- written = p
- closed = close
- },
- }
- prefix := "TestPrefix "
- b := WriterLogger{
- out: c,
- Level: INFO,
- Flags: LstdFlags | LUTC,
- Prefix: prefix,
- }
- location, _ := time.LoadLocation("EST")
-
- date := time.Date(2019, time.January, 13, 22, 3, 30, 15, location)
-
- dateString := date.UTC().Format("2006/01/02 15:04:05")
-
- event := Event{
- level: INFO,
- msg: "TEST MSG",
- caller: "CALLER",
- filename: "FULL/FILENAME",
- line: 1,
- time: date,
- }
-
- assert.Equal(t, INFO, b.GetLevel())
-
- expected := fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event.level = DEBUG
- expected = ""
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
-
- event.level = TRACE
- expected = ""
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
-
- event.level = WARN
- expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event.level = ERROR
- expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event.level = CRITICAL
- expected = fmt.Sprintf("%s%s %s:%d:%s [%c] %s\n", prefix, dateString, event.filename, event.line, event.caller, strings.ToUpper(event.level.String())[0], event.msg)
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- b.Close()
- assert.True(t, closed)
-}
-
-func TestBaseLoggerDated(t *testing.T) {
- var written []byte
- var closed bool
-
- c := CallbackWriteCloser{
- callback: func(p []byte, close bool) {
- written = p
- closed = close
- },
- }
- prefix := ""
- b := WriterLogger{
- out: c,
- Level: WARN,
- Flags: Ldate | Ltime | Lmicroseconds | Lshortfile | Llevel,
- Prefix: prefix,
- }
-
- location, _ := time.LoadLocation("EST")
-
- date := time.Date(2019, time.January, 13, 22, 3, 30, 115, location)
-
- dateString := date.Format("2006/01/02 15:04:05.000000")
-
- event := Event{
- level: WARN,
- msg: "TEST MESSAGE TEST\n",
- caller: "CALLER",
- filename: "FULL/FILENAME",
- line: 1,
- time: date,
- }
-
- assert.Equal(t, WARN, b.GetLevel())
-
- expected := fmt.Sprintf("%s%s %s:%d [%s] %s", prefix, dateString, "FILENAME", event.line, strings.ToUpper(event.level.String()), event.msg)
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event.level = INFO
- expected = ""
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event.level = ERROR
- expected = fmt.Sprintf("%s%s %s:%d [%s] %s", prefix, dateString, "FILENAME", event.line, strings.ToUpper(event.level.String()), event.msg)
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event.level = DEBUG
- expected = ""
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event.level = CRITICAL
- expected = fmt.Sprintf("%s%s %s:%d [%s] %s", prefix, dateString, "FILENAME", event.line, strings.ToUpper(event.level.String()), event.msg)
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event.level = TRACE
- expected = ""
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- b.Close()
- assert.True(t, closed)
-}
-
-func TestBaseLoggerMultiLineNoFlagsRegexp(t *testing.T) {
- var written []byte
- var closed bool
-
- c := CallbackWriteCloser{
- callback: func(p []byte, close bool) {
- written = p
- closed = close
- },
- }
- prefix := ""
- b := WriterLogger{
- Level: DEBUG,
- StacktraceLevel: ERROR,
- Flags: -1,
- Prefix: prefix,
- Expression: "FILENAME",
- }
- b.NewWriterLogger(c)
-
- location, _ := time.LoadLocation("EST")
-
- date := time.Date(2019, time.January, 13, 22, 3, 30, 115, location)
-
- event := Event{
- level: DEBUG,
- msg: "TEST\nMESSAGE\nTEST",
- caller: "CALLER",
- filename: "FULL/FILENAME",
- line: 1,
- time: date,
- }
-
- assert.Equal(t, DEBUG, b.GetLevel())
-
- expected := "TEST\n\tMESSAGE\n\tTEST\n"
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event.filename = "ELSEWHERE"
-
- b.LogEvent(&event)
- assert.Equal(t, "", string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event.caller = "FILENAME"
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-
- event = Event{
- level: DEBUG,
- msg: "TEST\nFILENAME\nTEST",
- caller: "CALLER",
- filename: "FULL/ELSEWHERE",
- line: 1,
- time: date,
- }
- expected = "TEST\n\tFILENAME\n\tTEST\n"
- b.LogEvent(&event)
- assert.Equal(t, expected, string(written))
- assert.False(t, closed)
- written = written[:0]
-}
-
-func TestBrokenRegexp(t *testing.T) {
- var closed bool
-
- c := CallbackWriteCloser{
- callback: func(p []byte, close bool) {
- closed = close
- },
- }
-
- b := WriterLogger{
- Level: DEBUG,
- StacktraceLevel: ERROR,
- Flags: -1,
- Prefix: prefix,
- Expression: "\\",
- }
- b.NewWriterLogger(c)
- assert.Empty(t, b.regexp)
- b.Close()
- assert.True(t, closed)
-}
diff --git a/modules/private/manager.go b/modules/private/manager.go
index 5853db34e4..3448f2e34c 100644
--- a/modules/private/manager.go
+++ b/modules/private/manager.go
@@ -75,18 +75,18 @@ func SetLogSQL(ctx context.Context, on bool) ResponseExtra {
// LoggerOptions represents the options for the add logger call
type LoggerOptions struct {
- Group string
- Name string
+ Logger string
+ Writer string
Mode string
Config map[string]interface{}
}
// AddLogger adds a logger
-func AddLogger(ctx context.Context, group, name, mode string, config map[string]interface{}) ResponseExtra {
+func AddLogger(ctx context.Context, logger, writer, mode string, config map[string]interface{}) ResponseExtra {
reqURL := setting.LocalURL + "api/internal/manager/add-logger"
req := newInternalRequest(ctx, reqURL, "POST", LoggerOptions{
- Group: group,
- Name: name,
+ Logger: logger,
+ Writer: writer,
Mode: mode,
Config: config,
})
@@ -94,8 +94,8 @@ func AddLogger(ctx context.Context, group, name, mode string, config map[string]
}
// RemoveLogger removes a logger
-func RemoveLogger(ctx context.Context, group, name string) ResponseExtra {
- reqURL := setting.LocalURL + fmt.Sprintf("api/internal/manager/remove-logger/%s/%s", url.PathEscape(group), url.PathEscape(name))
+func RemoveLogger(ctx context.Context, logger, writer string) ResponseExtra {
+ reqURL := setting.LocalURL + fmt.Sprintf("api/internal/manager/remove-logger/%s/%s", url.PathEscape(logger), url.PathEscape(writer))
req := newInternalRequest(ctx, reqURL, "POST")
return requestJSONUserMsg(req, "Removed")
}
diff --git a/modules/setting/config_provider.go b/modules/setting/config_provider.go
index ce9ef72248..37f5754ffd 100644
--- a/modules/setting/config_provider.go
+++ b/modules/setting/config_provider.go
@@ -33,6 +33,58 @@ type ConfigProvider interface {
Save() error
}
+// ConfigSectionKey only searches the keys in the given section, but it is O(n).
+// ini package has a special behavior: with "[sec] a=1" and an empty "[sec.sub]",
+// then in "[sec.sub]", Key()/HasKey() can always see "a=1" because it always tries parent sections.
+// It returns nil if the key doesn't exist.
+func ConfigSectionKey(sec ConfigSection, key string) *ini.Key {
+ if sec == nil {
+ return nil
+ }
+ for _, k := range sec.Keys() {
+ if k.Name() == key {
+ return k
+ }
+ }
+ return nil
+}
+
+func ConfigSectionKeyString(sec ConfigSection, key string, def ...string) string {
+ k := ConfigSectionKey(sec, key)
+ if k != nil && k.String() != "" {
+ return k.String()
+ }
+ if len(def) > 0 {
+ return def[0]
+ }
+ return ""
+}
+
+// ConfigInheritedKey works like ini.Section.Key(), but it always returns a new key instance, it is O(n) because NewKey is O(n)
+// and the returned key is safe to be used with "MustXxx", it doesn't change the parent's values.
+// Otherwise, ini.Section.Key().MustXxx would pollute the parent section's keys.
+// It never returns nil.
+func ConfigInheritedKey(sec ConfigSection, key string) *ini.Key {
+ k := sec.Key(key)
+ if k != nil && k.String() != "" {
+ newKey, _ := sec.NewKey(k.Name(), k.String())
+ return newKey
+ }
+ newKey, _ := sec.NewKey(key, "")
+ return newKey
+}
+
+func ConfigInheritedKeyString(sec ConfigSection, key string, def ...string) string {
+ k := sec.Key(key)
+ if k != nil && k.String() != "" {
+ return k.String()
+ }
+ if len(def) > 0 {
+ return def[0]
+ }
+ return ""
+}
+
type iniFileConfigProvider struct {
opts *Options
*ini.File
diff --git a/modules/setting/config_provider_test.go b/modules/setting/config_provider_test.go
new file mode 100644
index 0000000000..76f7048d59
--- /dev/null
+++ b/modules/setting/config_provider_test.go
@@ -0,0 +1,66 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestConfigProviderBehaviors(t *testing.T) {
+ t.Run("BuggyKeyOverwritten", func(t *testing.T) {
+ cfg, _ := NewConfigProviderFromData(`
+[foo]
+key =
+`)
+ sec := cfg.Section("foo")
+ secSub := cfg.Section("foo.bar")
+ secSub.Key("key").MustString("1") // try to read a key from subsection
+ assert.Equal(t, "1", sec.Key("key").String()) // TODO: BUGGY! the key in [foo] is overwritten
+ })
+
+ t.Run("SubsectionSeeParentKeys", func(t *testing.T) {
+ cfg, _ := NewConfigProviderFromData(`
+[foo]
+key = 123
+`)
+ secSub := cfg.Section("foo.bar.xxx")
+ assert.Equal(t, "123", secSub.Key("key").String())
+ })
+}
+
+func TestConfigProviderHelper(t *testing.T) {
+ cfg, _ := NewConfigProviderFromData(`
+[foo]
+empty =
+key = 123
+`)
+
+ sec := cfg.Section("foo")
+ secSub := cfg.Section("foo.bar")
+
+ // test empty key
+ assert.Equal(t, "def", ConfigSectionKeyString(sec, "empty", "def"))
+ assert.Equal(t, "xyz", ConfigSectionKeyString(secSub, "empty", "xyz"))
+
+ // test non-inherited key, only see the keys in current section
+ assert.NotNil(t, ConfigSectionKey(sec, "key"))
+ assert.Nil(t, ConfigSectionKey(secSub, "key"))
+
+ // test default behavior
+ assert.Equal(t, "123", ConfigSectionKeyString(sec, "key"))
+ assert.Equal(t, "", ConfigSectionKeyString(secSub, "key"))
+ assert.Equal(t, "def", ConfigSectionKeyString(secSub, "key", "def"))
+
+ assert.Equal(t, "123", ConfigInheritedKeyString(secSub, "key"))
+
+ // Workaround for ini package's BuggyKeyOverwritten behavior
+ assert.Equal(t, "", ConfigSectionKeyString(sec, "empty"))
+ assert.Equal(t, "", ConfigSectionKeyString(secSub, "empty"))
+ assert.Equal(t, "def", ConfigInheritedKey(secSub, "empty").MustString("def"))
+ assert.Equal(t, "def", ConfigInheritedKey(secSub, "empty").MustString("xyz"))
+ assert.Equal(t, "", ConfigSectionKeyString(sec, "empty"))
+ assert.Equal(t, "def", ConfigSectionKeyString(secSub, "empty"))
+}
diff --git a/modules/setting/database.go b/modules/setting/database.go
index 8c4dfb21d7..7a7c7029a4 100644
--- a/modules/setting/database.go
+++ b/modules/setting/database.go
@@ -92,7 +92,7 @@ func loadDBSetting(rootCfg ConfigProvider) {
Database.MaxOpenConns = sec.Key("MAX_OPEN_CONNS").MustInt(0)
Database.IterateBufferSize = sec.Key("ITERATE_BUFFER_SIZE").MustInt(50)
- Database.LogSQL = sec.Key("LOG_SQL").MustBool(true)
+ Database.LogSQL = sec.Key("LOG_SQL").MustBool(false)
Database.DBConnectRetries = sec.Key("DB_RETRIES").MustInt(10)
Database.DBConnectBackoff = sec.Key("DB_RETRY_BACKOFF").MustDuration(3 * time.Second)
Database.AutoMigration = sec.Key("AUTO_MIGRATION").MustBool(true)
diff --git a/modules/setting/log.go b/modules/setting/log.go
index d9a9e5af8f..af64ea8d85 100644
--- a/modules/setting/log.go
+++ b/modules/setting/log.go
@@ -10,384 +10,251 @@ import (
"path"
"path/filepath"
"strings"
- "sync"
- "code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
)
-var (
- filenameSuffix = ""
- descriptionLock = sync.RWMutex{}
- logDescriptions = make(map[string]*LogDescription)
-)
+type LogGlobalConfig struct {
+ RootPath string
-// Log settings
-var Log struct {
+ Mode string
Level log.Level
- StacktraceLogLevel string
- RootPath string
- EnableSSHLog bool
- EnableXORMLog bool
+ StacktraceLogLevel log.Level
+ BufferLen int
- DisableRouterLog bool
+ EnableSSHLog bool
- EnableAccessLog bool
AccessLogTemplate string
- BufferLength int64
RequestIDHeaders []string
}
-// GetLogDescriptions returns a race safe set of descriptions
-func GetLogDescriptions() map[string]*LogDescription {
- descriptionLock.RLock()
- defer descriptionLock.RUnlock()
- descs := make(map[string]*LogDescription, len(logDescriptions))
- for k, v := range logDescriptions {
- subLogDescriptions := make([]SubLogDescription, len(v.SubLogDescriptions))
- copy(subLogDescriptions, v.SubLogDescriptions)
-
- descs[k] = &LogDescription{
- Name: v.Name,
- SubLogDescriptions: subLogDescriptions,
- }
- }
- return descs
-}
-
-// AddLogDescription adds a set of descriptions to the complete description
-func AddLogDescription(key string, description *LogDescription) {
- descriptionLock.Lock()
- defer descriptionLock.Unlock()
- logDescriptions[key] = description
-}
+var Log LogGlobalConfig
-// AddSubLogDescription adds a sub log description
-func AddSubLogDescription(key string, subLogDescription SubLogDescription) bool {
- descriptionLock.Lock()
- defer descriptionLock.Unlock()
- desc, ok := logDescriptions[key]
- if !ok {
- return false
- }
- for i, sub := range desc.SubLogDescriptions {
- if sub.Name == subLogDescription.Name {
- desc.SubLogDescriptions[i] = subLogDescription
- return true
- }
- }
- desc.SubLogDescriptions = append(desc.SubLogDescriptions, subLogDescription)
- return true
-}
+const accessLogTemplateDefault = `{{.Ctx.RemoteHost}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}" "{{.Ctx.Req.UserAgent}}"`
-// RemoveSubLogDescription removes a sub log description
-func RemoveSubLogDescription(key, name string) bool {
- descriptionLock.Lock()
- defer descriptionLock.Unlock()
- desc, ok := logDescriptions[key]
- if !ok {
- return false
- }
- for i, sub := range desc.SubLogDescriptions {
- if sub.Name == name {
- desc.SubLogDescriptions = append(desc.SubLogDescriptions[:i], desc.SubLogDescriptions[i+1:]...)
- return true
- }
- }
- return false
-}
+func loadLogGlobalFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("log")
-type defaultLogOptions struct {
- levelName string // LogLevel
- flags string
- filename string // path.Join(LogRootPath, "gitea.log")
- bufferLength int64
- disableConsole bool
-}
+ Log.Level = log.LevelFromString(sec.Key("LEVEL").MustString(log.INFO.String()))
+ Log.StacktraceLogLevel = log.LevelFromString(sec.Key("STACKTRACE_LEVEL").MustString(log.NONE.String()))
+ Log.BufferLen = sec.Key("BUFFER_LEN").MustInt(10000)
+ Log.Mode = sec.Key("MODE").MustString("console")
-func newDefaultLogOptions() defaultLogOptions {
- return defaultLogOptions{
- levelName: Log.Level.String(),
- flags: "stdflags",
- filename: filepath.Join(Log.RootPath, "gitea.log"),
- bufferLength: 10000,
- disableConsole: false,
+ Log.RootPath = sec.Key("ROOT_PATH").MustString(path.Join(AppWorkPath, "log"))
+ if !filepath.IsAbs(Log.RootPath) {
+ Log.RootPath = filepath.Join(AppWorkPath, Log.RootPath)
}
-}
+ Log.RootPath = util.FilePathJoinAbs(Log.RootPath)
-// SubLogDescription describes a sublogger
-type SubLogDescription struct {
- Name string
- Provider string
- Config string
-}
+ Log.EnableSSHLog = sec.Key("ENABLE_SSH_LOG").MustBool(false)
-// LogDescription describes a named logger
-type LogDescription struct {
- Name string
- SubLogDescriptions []SubLogDescription
+ Log.AccessLogTemplate = sec.Key("ACCESS_LOG_TEMPLATE").MustString(accessLogTemplateDefault)
+ Log.RequestIDHeaders = sec.Key("REQUEST_ID_HEADERS").Strings(",")
}
-func getLogLevel(section ConfigSection, key string, defaultValue log.Level) log.Level {
- value := section.Key(key).MustString(defaultValue.String())
- return log.FromString(value)
-}
+func prepareLoggerConfig(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("log")
-func getStacktraceLogLevel(section ConfigSection, key, defaultValue string) string {
- value := section.Key(key).MustString(defaultValue)
- return log.FromString(value).String()
-}
+ if !sec.HasKey("logger.default.MODE") {
+ sec.Key("logger.default.MODE").MustString(",")
+ }
-func loadLogFrom(rootCfg ConfigProvider) {
- sec := rootCfg.Section("log")
- Log.Level = getLogLevel(sec, "LEVEL", log.INFO)
- Log.StacktraceLogLevel = getStacktraceLogLevel(sec, "STACKTRACE_LEVEL", "None")
- Log.RootPath = sec.Key("ROOT_PATH").MustString(path.Join(AppWorkPath, "log"))
- forcePathSeparator(Log.RootPath)
- Log.BufferLength = sec.Key("BUFFER_LEN").MustInt64(10000)
+ deprecatedSetting(rootCfg, "log", "ACCESS", "log", "logger.access.MODE", "1.21")
+ deprecatedSetting(rootCfg, "log", "ENABLE_ACCESS_LOG", "log", "logger.access.MODE", "1.21")
+ if val := sec.Key("ACCESS").String(); val != "" {
+ sec.Key("logger.access.MODE").MustString(val)
+ }
+ if sec.HasKey("ENABLE_ACCESS_LOG") && !sec.Key("ENABLE_ACCESS_LOG").MustBool() {
+ sec.Key("logger.access.MODE").SetValue("")
+ }
- Log.EnableSSHLog = sec.Key("ENABLE_SSH_LOG").MustBool(false)
- Log.EnableAccessLog = sec.Key("ENABLE_ACCESS_LOG").MustBool(false)
- Log.AccessLogTemplate = sec.Key("ACCESS_LOG_TEMPLATE").MustString(
- `{{.Ctx.RemoteHost}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}" "{{.Ctx.Req.UserAgent}}"`,
- )
- Log.RequestIDHeaders = sec.Key("REQUEST_ID_HEADERS").Strings(",")
- // the `MustString` updates the default value, and `log.ACCESS` is used by `generateNamedLogger("access")` later
- _ = rootCfg.Section("log").Key("ACCESS").MustString("file")
+ deprecatedSetting(rootCfg, "log", "ROUTER", "log", "logger.router.MODE", "1.21")
+ deprecatedSetting(rootCfg, "log", "DISABLE_ROUTER_LOG", "log", "logger.router.MODE", "1.21")
+ if val := sec.Key("ROUTER").String(); val != "" {
+ sec.Key("logger.router.MODE").MustString(val)
+ }
+ if !sec.HasKey("logger.router.MODE") {
+ sec.Key("logger.router.MODE").MustString(",") // use default logger
+ }
+ if sec.HasKey("DISABLE_ROUTER_LOG") && sec.Key("DISABLE_ROUTER_LOG").MustBool() {
+ sec.Key("logger.router.MODE").SetValue("")
+ }
- sec.Key("ROUTER").MustString("console")
- // Allow [log] DISABLE_ROUTER_LOG to override [server] DISABLE_ROUTER_LOG
- Log.DisableRouterLog = sec.Key("DISABLE_ROUTER_LOG").MustBool(Log.DisableRouterLog)
+ deprecatedSetting(rootCfg, "log", "XORM", "log", "logger.xorm.MODE", "1.21")
+ deprecatedSetting(rootCfg, "log", "ENABLE_XORM_LOG", "log", "logger.xorm.MODE", "1.21")
+ if val := sec.Key("XORM").String(); val != "" {
+ sec.Key("logger.xorm.MODE").MustString(val)
+ }
+ if !sec.HasKey("logger.xorm.MODE") {
+ sec.Key("logger.xorm.MODE").MustString(",") // use default logger
+ }
+ if sec.HasKey("ENABLE_XORM_LOG") && !sec.Key("ENABLE_XORM_LOG").MustBool() {
+ sec.Key("logger.xorm.MODE").SetValue("")
+ }
+}
- Log.EnableXORMLog = rootCfg.Section("log").Key("ENABLE_XORM_LOG").MustBool(true)
+func LogPrepareFilenameForWriter(fileName, defaultFileName string) string {
+ if fileName == "" {
+ fileName = defaultFileName
+ }
+ if !filepath.IsAbs(fileName) {
+ fileName = filepath.Join(Log.RootPath, fileName)
+ } else {
+ fileName = filepath.Clean(fileName)
+ }
+ if err := os.MkdirAll(filepath.Dir(fileName), os.ModePerm); err != nil {
+ panic(fmt.Sprintf("unable to create directory for log %q: %v", fileName, err.Error()))
+ }
+ return fileName
}
-func generateLogConfig(sec ConfigSection, name string, defaults defaultLogOptions) (mode, jsonConfig, levelName string) {
- level := getLogLevel(sec, "LEVEL", Log.Level)
- levelName = level.String()
- stacktraceLevelName := getStacktraceLogLevel(sec, "STACKTRACE_LEVEL", Log.StacktraceLogLevel)
- stacktraceLevel := log.FromString(stacktraceLevelName)
- mode = name
- keys := sec.Keys()
- logPath := defaults.filename
- flags := log.FlagsFromString(defaults.flags)
- expression := ""
- prefix := ""
- for _, key := range keys {
- switch key.Name() {
- case "MODE":
- mode = key.MustString(name)
- case "FILE_NAME":
- logPath = key.MustString(defaults.filename)
- forcePathSeparator(logPath)
- if !filepath.IsAbs(logPath) {
- logPath = path.Join(Log.RootPath, logPath)
- }
- case "FLAGS":
- flags = log.FlagsFromString(key.MustString(defaults.flags))
- case "EXPRESSION":
- expression = key.MustString("")
- case "PREFIX":
- prefix = key.MustString("")
- }
+func loadLogModeByName(rootCfg ConfigProvider, loggerName, modeName string) (writerName, writerType string, writerMode log.WriterMode, err error) {
+ sec := rootCfg.Section("log." + modeName)
+
+ writerMode = log.WriterMode{}
+ writerType = ConfigSectionKeyString(sec, "MODE")
+ if writerType == "" {
+ writerType = modeName
}
- logConfig := map[string]interface{}{
- "level": level.String(),
- "expression": expression,
- "prefix": prefix,
- "flags": flags,
- "stacktraceLevel": stacktraceLevel.String(),
+ writerName = modeName
+ defaultFlags := "stdflags"
+ defaultFilaName := "gitea.log"
+ if loggerName == "access" {
+ // "access" logger is special, by default it doesn't have output flags, so it also needs a new writer name to avoid conflicting with other writers.
+ // so "access" logger's writer name is usually "file.access" or "console.access"
+ writerName += ".access"
+ defaultFlags = "none"
+ defaultFilaName = "access.log"
}
- // Generate log configuration.
- switch mode {
+ writerMode.Level = log.LevelFromString(ConfigInheritedKeyString(sec, "LEVEL", Log.Level.String()))
+ writerMode.StacktraceLevel = log.LevelFromString(ConfigInheritedKeyString(sec, "STACKTRACE_LEVEL", Log.StacktraceLogLevel.String()))
+ writerMode.Prefix = ConfigInheritedKeyString(sec, "PREFIX")
+ writerMode.Expression = ConfigInheritedKeyString(sec, "EXPRESSION")
+ writerMode.Flags = log.FlagsFromString(ConfigInheritedKeyString(sec, "FLAGS", defaultFlags))
+
+ switch writerType {
case "console":
- useStderr := sec.Key("STDERR").MustBool(false)
- logConfig["stderr"] = useStderr
+ useStderr := ConfigInheritedKey(sec, "STDERR").MustBool(false)
+ defaultCanColor := log.CanColorStdout
if useStderr {
- logConfig["colorize"] = sec.Key("COLORIZE").MustBool(log.CanColorStderr)
- } else {
- logConfig["colorize"] = sec.Key("COLORIZE").MustBool(log.CanColorStdout)
+ defaultCanColor = log.CanColorStderr
}
-
+ writerOption := log.WriterConsoleOption{Stderr: useStderr}
+ writerMode.Colorize = ConfigInheritedKey(sec, "COLORIZE").MustBool(defaultCanColor)
+ writerMode.WriterOption = writerOption
case "file":
- if err := os.MkdirAll(path.Dir(logPath), os.ModePerm); err != nil {
- panic(err.Error())
- }
-
- logConfig["filename"] = logPath + filenameSuffix
- logConfig["rotate"] = sec.Key("LOG_ROTATE").MustBool(true)
- logConfig["maxsize"] = 1 << uint(sec.Key("MAX_SIZE_SHIFT").MustInt(28))
- logConfig["daily"] = sec.Key("DAILY_ROTATE").MustBool(true)
- logConfig["maxdays"] = sec.Key("MAX_DAYS").MustInt(7)
- logConfig["compress"] = sec.Key("COMPRESS").MustBool(true)
- logConfig["compressionLevel"] = sec.Key("COMPRESSION_LEVEL").MustInt(-1)
+ fileName := LogPrepareFilenameForWriter(ConfigInheritedKey(sec, "FILE_NAME").String(), defaultFilaName)
+ writerOption := log.WriterFileOption{}
+ writerOption.FileName = fileName + filenameSuffix // FIXME: the suffix doesn't seem right, see its related comments
+ writerOption.LogRotate = ConfigInheritedKey(sec, "LOG_ROTATE").MustBool(true)
+ writerOption.MaxSize = 1 << uint(ConfigInheritedKey(sec, "MAX_SIZE_SHIFT").MustInt(28))
+ writerOption.DailyRotate = ConfigInheritedKey(sec, "DAILY_ROTATE").MustBool(true)
+ writerOption.MaxDays = ConfigInheritedKey(sec, "MAX_DAYS").MustInt(7)
+ writerOption.Compress = ConfigInheritedKey(sec, "COMPRESS").MustBool(true)
+ writerOption.CompressionLevel = ConfigInheritedKey(sec, "COMPRESSION_LEVEL").MustInt(-1)
+ writerMode.WriterOption = writerOption
case "conn":
- logConfig["reconnectOnMsg"] = sec.Key("RECONNECT_ON_MSG").MustBool()
- logConfig["reconnect"] = sec.Key("RECONNECT").MustBool()
- logConfig["net"] = sec.Key("PROTOCOL").In("tcp", []string{"tcp", "unix", "udp"})
- logConfig["addr"] = sec.Key("ADDR").MustString(":7020")
- case "smtp":
- logConfig["username"] = sec.Key("USER").MustString("example@example.com")
- logConfig["password"] = sec.Key("PASSWD").MustString("******")
- logConfig["host"] = sec.Key("HOST").MustString("127.0.0.1:25")
- sendTos := strings.Split(sec.Key("RECEIVERS").MustString(""), ",")
- for i, address := range sendTos {
- sendTos[i] = strings.TrimSpace(address)
+ writerOption := log.WriterConnOption{}
+ writerOption.ReconnectOnMsg = ConfigInheritedKey(sec, "RECONNECT_ON_MSG").MustBool()
+ writerOption.Reconnect = ConfigInheritedKey(sec, "RECONNECT").MustBool()
+ writerOption.Protocol = ConfigInheritedKey(sec, "PROTOCOL").In("tcp", []string{"tcp", "unix", "udp"})
+ writerOption.Addr = ConfigInheritedKey(sec, "ADDR").MustString(":7020")
+ writerMode.WriterOption = writerOption
+ default:
+ if !log.HasEventWriter(writerType) {
+ return "", "", writerMode, fmt.Errorf("invalid log writer type (mode): %s", writerType)
}
- logConfig["sendTos"] = sendTos
- logConfig["subject"] = sec.Key("SUBJECT").MustString("Diagnostic message from Gitea")
}
- logConfig["colorize"] = sec.Key("COLORIZE").MustBool(false)
- byteConfig, err := json.Marshal(logConfig)
- if err != nil {
- log.Error("Failed to marshal log configuration: %v %v", logConfig, err)
- return
- }
- jsonConfig = string(byteConfig)
- return mode, jsonConfig, levelName
+ return writerName, writerType, writerMode, nil
}
-func generateNamedLogger(rootCfg ConfigProvider, key string, options defaultLogOptions) *LogDescription {
- description := LogDescription{
- Name: key,
- }
-
- sections := strings.Split(rootCfg.Section("log").Key(strings.ToUpper(key)).MustString(""), ",")
-
- for i := 0; i < len(sections); i++ {
- sections[i] = strings.TrimSpace(sections[i])
- }
+var filenameSuffix = ""
- for _, name := range sections {
- if len(name) == 0 || (name == "console" && options.disableConsole) {
- continue
- }
- sec, err := rootCfg.GetSection("log." + name + "." + key)
- if err != nil {
- sec, _ = rootCfg.NewSection("log." + name + "." + key)
- }
+// RestartLogsWithPIDSuffix restarts the logs with a PID suffix on files
+// FIXME: it seems not right, it breaks log rotating or log collectors
+func RestartLogsWithPIDSuffix() {
+ filenameSuffix = fmt.Sprintf(".%d", os.Getpid())
+ initAllLoggers() // when forking, before restarting, rename logger file and re-init all loggers
+}
- provider, config, levelName := generateLogConfig(sec, name, options)
+func InitLoggersForTest() {
+ initAllLoggers()
+}
- if err := log.NewNamedLogger(key, options.bufferLength, name, provider, config); err != nil {
- // Maybe panic here?
- log.Error("Could not create new named logger: %v", err.Error())
- }
+// initAllLoggers creates all the log services
+func initAllLoggers() {
+ initManagedLoggers(log.GetManager(), CfgProvider)
- description.SubLogDescriptions = append(description.SubLogDescriptions, SubLogDescription{
- Name: name,
- Provider: provider,
- Config: config,
- })
- log.Info("%s Log: %s(%s:%s)", util.ToTitleCase(key), util.ToTitleCase(name), provider, levelName)
- }
+ golog.SetFlags(0)
+ golog.SetPrefix("")
+ golog.SetOutput(log.LoggerToWriter(log.GetLogger(log.DEFAULT).Info))
+}
- AddLogDescription(key, &description)
+func initManagedLoggers(manager *log.LoggerManager, cfg ConfigProvider) {
+ loadLogGlobalFrom(cfg)
+ prepareLoggerConfig(cfg)
- return &description
+ initLoggerByName(manager, cfg, log.DEFAULT) // default
+ initLoggerByName(manager, cfg, "access")
+ initLoggerByName(manager, cfg, "router")
+ initLoggerByName(manager, cfg, "xorm")
}
-// initLogFrom initializes logging with settings from configuration provider
-func initLogFrom(rootCfg ConfigProvider) {
+func initLoggerByName(manager *log.LoggerManager, rootCfg ConfigProvider, loggerName string) {
sec := rootCfg.Section("log")
- options := newDefaultLogOptions()
- options.bufferLength = Log.BufferLength
+ keyPrefix := "logger." + loggerName
- description := LogDescription{
- Name: log.DEFAULT,
+ disabled := sec.HasKey(keyPrefix+".MODE") && sec.Key(keyPrefix+".MODE").String() == ""
+ if disabled {
+ return
}
- sections := strings.Split(sec.Key("MODE").MustString("console"), ",")
+ modeVal := sec.Key(keyPrefix + ".MODE").String()
+ if modeVal == "," {
+ modeVal = Log.Mode
+ }
- useConsole := false
- for _, name := range sections {
- name = strings.TrimSpace(name)
- if name == "" {
+ var eventWriters []log.EventWriter
+ modes := strings.Split(modeVal, ",")
+ for _, modeName := range modes {
+ modeName = strings.TrimSpace(modeName)
+ if modeName == "" {
continue
}
- if name == "console" {
- useConsole = true
- }
-
- sec, err := rootCfg.GetSection("log." + name + ".default")
+ writerName, writerType, writerMode, err := loadLogModeByName(rootCfg, loggerName, modeName)
if err != nil {
- sec, err = rootCfg.GetSection("log." + name)
+ log.FallbackErrorf("Failed to load writer mode %q for logger %s: %v", modeName, loggerName, err)
+ continue
+ }
+ if writerMode.BufferLen == 0 {
+ writerMode.BufferLen = Log.BufferLen
+ }
+ eventWriter := manager.GetSharedWriter(writerName)
+ if eventWriter == nil {
+ eventWriter, err = manager.NewSharedWriter(writerName, writerType, writerMode)
if err != nil {
- sec, _ = rootCfg.NewSection("log." + name)
+ log.FallbackErrorf("Failed to create event writer for logger %s: %v", loggerName, err)
+ continue
}
}
-
- provider, config, levelName := generateLogConfig(sec, name, options)
- log.NewLogger(options.bufferLength, name, provider, config)
- description.SubLogDescriptions = append(description.SubLogDescriptions, SubLogDescription{
- Name: name,
- Provider: provider,
- Config: config,
- })
- log.Info("Gitea Log Mode: %s(%s:%s)", util.ToTitleCase(name), util.ToTitleCase(provider), levelName)
- }
-
- AddLogDescription(log.DEFAULT, &description)
-
- if !useConsole {
- log.Info("According to the configuration, subsequent logs will not be printed to the console")
- if err := log.DelLogger("console"); err != nil {
- log.Fatal("Cannot delete console logger: %v", err)
- }
+ eventWriters = append(eventWriters, eventWriter)
}
- // Finally redirect the default golog to here
- golog.SetFlags(0)
- golog.SetPrefix("")
- golog.SetOutput(log.NewLoggerAsWriter("INFO", log.GetLogger(log.DEFAULT)))
+ manager.GetLogger(loggerName).RemoveAllWriters().AddWriters(eventWriters...)
}
-// RestartLogsWithPIDSuffix restarts the logs with a PID suffix on files
-func RestartLogsWithPIDSuffix() {
- filenameSuffix = fmt.Sprintf(".%d", os.Getpid())
- InitLogs(false)
+func InitSQLLoggersForCli(level log.Level) {
+ log.SetConsoleLogger("xorm", "console", level)
}
-// InitLogs creates all the log services
-func InitLogs(disableConsole bool) {
- initLogFrom(CfgProvider)
-
- if !Log.DisableRouterLog {
- options := newDefaultLogOptions()
- options.filename = filepath.Join(Log.RootPath, "router.log")
- options.flags = "date,time" // For the router we don't want any prefixed flags
- options.bufferLength = Log.BufferLength
- generateNamedLogger(CfgProvider, "router", options)
- }
-
- if Log.EnableAccessLog {
- options := newDefaultLogOptions()
- options.filename = filepath.Join(Log.RootPath, "access.log")
- options.flags = "" // For the router we don't want any prefixed flags
- options.bufferLength = Log.BufferLength
- generateNamedLogger(CfgProvider, "access", options)
- }
-
- initSQLLogFrom(CfgProvider, disableConsole)
-}
-
-// InitSQLLog initializes xorm logger setting
-func InitSQLLog(disableConsole bool) {
- initSQLLogFrom(CfgProvider, disableConsole)
+func IsAccessLogEnabled() bool {
+ return log.IsLoggerEnabled("access")
}
-func initSQLLogFrom(rootCfg ConfigProvider, disableConsole bool) {
- if Log.EnableXORMLog {
- options := newDefaultLogOptions()
- options.filename = filepath.Join(Log.RootPath, "xorm.log")
- options.bufferLength = Log.BufferLength
- options.disableConsole = disableConsole
-
- rootCfg.Section("log").Key("XORM").MustString(",")
- generateNamedLogger(rootCfg, "xorm", options)
- }
+func IsRouteLogEnabled() bool {
+ return log.IsLoggerEnabled("router")
}
diff --git a/modules/setting/log_test.go b/modules/setting/log_test.go
new file mode 100644
index 0000000000..c07651f548
--- /dev/null
+++ b/modules/setting/log_test.go
@@ -0,0 +1,387 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func initLoggersByConfig(t *testing.T, config string) (*log.LoggerManager, func()) {
+ oldLogConfig := Log
+ Log = LogGlobalConfig{}
+ defer func() {
+ Log = oldLogConfig
+ }()
+
+ cfg, err := NewConfigProviderFromData(config)
+ assert.NoError(t, err)
+
+ manager := log.NewManager()
+ initManagedLoggers(manager, cfg)
+ return manager, manager.Close
+}
+
+func toJSON(v interface{}) string {
+ b, _ := json.MarshalIndent(v, "", "\t")
+ return string(b)
+}
+
+func TestLogConfigDefault(t *testing.T) {
+ manager, managerClose := initLoggersByConfig(t, ``)
+ defer managerClose()
+
+ writerDump := `
+{
+ "console": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": false
+ },
+ "WriterType": "console"
+ }
+}
+`
+
+ dump := manager.GetLogger(log.DEFAULT).DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+
+ dump = manager.GetLogger("router").DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("xorm").DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+}
+
+func TestLogConfigDisable(t *testing.T) {
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+logger.router.MODE =
+logger.xorm.MODE =
+`)
+ defer managerClose()
+
+ writerDump := `
+{
+ "console": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": false
+ },
+ "WriterType": "console"
+ }
+}
+`
+
+ dump := manager.GetLogger(log.DEFAULT).DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+
+ dump = manager.GetLogger("router").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+
+ dump = manager.GetLogger("xorm").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+}
+
+func TestLogConfigLegacyDefault(t *testing.T) {
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+MODE = console
+`)
+ defer managerClose()
+
+ writerDump := `
+{
+ "console": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": false
+ },
+ "WriterType": "console"
+ }
+}
+`
+
+ dump := manager.GetLogger(log.DEFAULT).DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+
+ dump = manager.GetLogger("router").DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("xorm").DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+}
+
+func TestLogConfigLegacyMode(t *testing.T) {
+ tempDir := t.TempDir()
+
+ tempPath := func(file string) string {
+ return filepath.Join(tempDir, file)
+ }
+
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+ROOT_PATH = `+tempDir+`
+MODE = file
+ROUTER = file
+ACCESS = file
+`)
+ defer managerClose()
+
+ writerDump := `
+{
+ "file": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Compress": true,
+ "CompressionLevel": -1,
+ "DailyRotate": true,
+ "FileName": "$FILENAME",
+ "LogRotate": true,
+ "MaxDays": 7,
+ "MaxSize": 268435456
+ },
+ "WriterType": "file"
+ }
+}
+`
+ writerDumpAccess := `
+{
+ "file.access": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "none",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Compress": true,
+ "CompressionLevel": -1,
+ "DailyRotate": true,
+ "FileName": "$FILENAME",
+ "LogRotate": true,
+ "MaxDays": 7,
+ "MaxSize": 268435456
+ },
+ "WriterType": "file"
+ }
+}
+`
+ dump := manager.GetLogger(log.DEFAULT).DumpWriters()
+ require.JSONEq(t, strings.ReplaceAll(writerDump, "$FILENAME", tempPath("gitea.log")), toJSON(dump))
+
+ dump = manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, strings.ReplaceAll(writerDumpAccess, "$FILENAME", tempPath("access.log")), toJSON(dump))
+
+ dump = manager.GetLogger("router").DumpWriters()
+ require.JSONEq(t, strings.ReplaceAll(writerDump, "$FILENAME", tempPath("gitea.log")), toJSON(dump))
+}
+
+func TestLogConfigLegacyModeDisable(t *testing.T) {
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+ROUTER = file
+ACCESS = file
+DISABLE_ROUTER_LOG = true
+ENABLE_ACCESS_LOG = false
+`)
+ defer managerClose()
+
+ dump := manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+
+ dump = manager.GetLogger("router").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+}
+
+func TestLogConfigNewConfig(t *testing.T) {
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+logger.access.MODE = console
+logger.xorm.MODE = console, console-1
+
+[log.console]
+LEVEL = warn
+
+[log.console-1]
+MODE = console
+LEVEL = error
+STDERR = true
+`)
+ defer managerClose()
+
+ writerDump := `
+{
+ "console": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "warn",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": false
+ },
+ "WriterType": "console"
+ },
+ "console-1": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "error",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": true
+ },
+ "WriterType": "console"
+ }
+}
+`
+ writerDumpAccess := `
+{
+ "console.access": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "none",
+ "Level": "warn",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": false
+ },
+ "WriterType": "console"
+ }
+}
+`
+ dump := manager.GetLogger("xorm").DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, writerDumpAccess, toJSON(dump))
+}
+
+func TestLogConfigModeFile(t *testing.T) {
+ tempDir := t.TempDir()
+
+ tempPath := func(file string) string {
+ return filepath.Join(tempDir, file)
+ }
+
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+ROOT_PATH = `+tempDir+`
+BUFFER_LEN = 10
+MODE = file, file1
+
+[log.file1]
+MODE = file
+LEVEL = error
+STACKTRACE_LEVEL = fatal
+EXPRESSION = filter
+FLAGS = medfile
+PREFIX = "[Prefix] "
+FILE_NAME = file-xxx.log
+LOG_ROTATE = false
+MAX_SIZE_SHIFT = 1
+DAILY_ROTATE = false
+MAX_DAYS = 90
+COMPRESS = false
+COMPRESSION_LEVEL = 4
+`)
+ defer managerClose()
+
+ writerDump := `
+{
+ "file": {
+ "BufferLen": 10,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Compress": true,
+ "CompressionLevel": -1,
+ "DailyRotate": true,
+ "FileName": "$FILENAME-0",
+ "LogRotate": true,
+ "MaxDays": 7,
+ "MaxSize": 268435456
+ },
+ "WriterType": "file"
+ },
+ "file1": {
+ "BufferLen": 10,
+ "Colorize": false,
+ "Expression": "filter",
+ "Flags": "medfile",
+ "Level": "error",
+ "Prefix": "[Prefix] ",
+ "StacktraceLevel": "fatal",
+ "WriterOption": {
+ "Compress": false,
+ "CompressionLevel": 4,
+ "DailyRotate": false,
+ "FileName": "$FILENAME-1",
+ "LogRotate": false,
+ "MaxDays": 90,
+ "MaxSize": 2
+ },
+ "WriterType": "file"
+ }
+}
+`
+
+ dump := manager.GetLogger(log.DEFAULT).DumpWriters()
+ expected := writerDump
+ expected = strings.ReplaceAll(expected, "$FILENAME-0", tempPath("gitea.log"))
+ expected = strings.ReplaceAll(expected, "$FILENAME-1", tempPath("file-xxx.log"))
+ require.JSONEq(t, expected, toJSON(dump))
+}
diff --git a/modules/setting/repository.go b/modules/setting/repository.go
index 900b56cc52..5520c992b9 100644
--- a/modules/setting/repository.go
+++ b/modules/setting/repository.go
@@ -278,7 +278,6 @@ func loadRepositoryFrom(rootCfg ConfigProvider) {
Repository.MaxCreationLimit = sec.Key("MAX_CREATION_LIMIT").MustInt(-1)
Repository.DefaultBranch = sec.Key("DEFAULT_BRANCH").MustString(Repository.DefaultBranch)
RepoRootPath = sec.Key("ROOT").MustString(path.Join(AppDataPath, "gitea-repositories"))
- forcePathSeparator(RepoRootPath)
if !filepath.IsAbs(RepoRootPath) {
RepoRootPath = filepath.Join(AppWorkPath, RepoRootPath)
} else {
diff --git a/modules/setting/server.go b/modules/setting/server.go
index 1839062685..d937faca10 100644
--- a/modules/setting/server.go
+++ b/modules/setting/server.go
@@ -317,7 +317,6 @@ func loadServerFrom(rootCfg ConfigProvider) {
PortToRedirect = sec.Key("PORT_TO_REDIRECT").MustString("80")
RedirectorUseProxyProtocol = sec.Key("REDIRECTOR_USE_PROXY_PROTOCOL").MustBool(UseProxyProtocol)
OfflineMode = sec.Key("OFFLINE_MODE").MustBool()
- Log.DisableRouterLog = sec.Key("DISABLE_ROUTER_LOG").MustBool()
if len(StaticRootPath) == 0 {
StaticRootPath = AppWorkPath
}
diff --git a/modules/setting/setting.go b/modules/setting/setting.go
index b085a7b321..8f20ef0856 100644
--- a/modules/setting/setting.go
+++ b/modules/setting/setting.go
@@ -115,7 +115,7 @@ func init() {
// We can rely on log.CanColorStdout being set properly because modules/log/console_windows.go comes before modules/setting/setting.go lexicographically
// By default set this logger at Info - we'll change it later, but we need to start with something.
- log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "info", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout))
+ log.SetConsoleLogger(log.DEFAULT, "console", log.INFO)
var err error
if AppPath, err = getAppPath(); err != nil {
@@ -124,12 +124,6 @@ func init() {
AppWorkPath = getWorkPath(AppPath)
}
-func forcePathSeparator(path string) {
- if strings.Contains(path, "\\") {
- log.Fatal("Do not use '\\' or '\\\\' in paths, instead, please use '/' in all places")
- }
-}
-
// IsRunUserMatchCurrentUser returns false if configured run user does not match
// actual user that runs the app. The first return value is the actual user name.
// This check is ignored under Windows since SSH remote login is not the main
@@ -218,9 +212,9 @@ func Init(opts *Options) {
// loadCommonSettingsFrom loads common configurations from a configuration provider.
func loadCommonSettingsFrom(cfg ConfigProvider) {
- // WARNNING: don't change the sequence except you know what you are doing.
+ // WARNING: don't change the sequence except you know what you are doing.
loadRunModeFrom(cfg)
- loadLogFrom(cfg)
+ loadLogGlobalFrom(cfg)
loadServerFrom(cfg)
loadSSHFrom(cfg)
@@ -282,10 +276,11 @@ func mustCurrentRunUserMatch(rootCfg ConfigProvider) {
// LoadSettings initializes the settings for normal start up
func LoadSettings() {
+ initAllLoggers()
+
loadDBSetting(CfgProvider)
loadServiceFrom(CfgProvider)
loadOAuth2ClientFrom(CfgProvider)
- InitLogs(false)
loadCacheFrom(CfgProvider)
loadSessionFrom(CfgProvider)
loadCorsFrom(CfgProvider)
diff --git a/modules/ssh/ssh.go b/modules/ssh/ssh.go
index 9ec14f2caa..4bf57eafb7 100644
--- a/modules/ssh/ssh.go
+++ b/modules/ssh/ssh.go
@@ -223,9 +223,7 @@ func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool {
// validate the cert for this principal
if err := c.CheckCert(principal, cert); err != nil {
// User is presenting an invalid certificate - STOP any further processing
- if log.IsError() {
- log.Error("Invalid Certificate KeyID %s with Signature Fingerprint %s presented for Principal: %s from %s", cert.KeyId, gossh.FingerprintSHA256(cert.SignatureKey), principal, ctx.RemoteAddr())
- }
+ log.Error("Invalid Certificate KeyID %s with Signature Fingerprint %s presented for Principal: %s from %s", cert.KeyId, gossh.FingerprintSHA256(cert.SignatureKey), principal, ctx.RemoteAddr())
log.Warn("Failed authentication attempt from %s", ctx.RemoteAddr())
return false
@@ -239,10 +237,8 @@ func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool {
return true
}
- if log.IsWarn() {
- log.Warn("From %s Fingerprint: %s is a certificate, but no valid principals found", ctx.RemoteAddr(), gossh.FingerprintSHA256(key))
- log.Warn("Failed authentication attempt from %s", ctx.RemoteAddr())
- }
+ log.Warn("From %s Fingerprint: %s is a certificate, but no valid principals found", ctx.RemoteAddr(), gossh.FingerprintSHA256(key))
+ log.Warn("Failed authentication attempt from %s", ctx.RemoteAddr())
return false
}
@@ -253,10 +249,8 @@ func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool {
pkey, err := asymkey_model.SearchPublicKeyByContent(ctx, strings.TrimSpace(string(gossh.MarshalAuthorizedKey(key))))
if err != nil {
if asymkey_model.IsErrKeyNotExist(err) {
- if log.IsWarn() {
- log.Warn("Unknown public key: %s from %s", gossh.FingerprintSHA256(key), ctx.RemoteAddr())
- log.Warn("Failed authentication attempt from %s", ctx.RemoteAddr())
- }
+ log.Warn("Unknown public key: %s from %s", gossh.FingerprintSHA256(key), ctx.RemoteAddr())
+ log.Warn("Failed authentication attempt from %s", ctx.RemoteAddr())
return false
}
log.Error("SearchPublicKeyByContent: %v", err)
diff --git a/modules/templates/htmlrenderer.go b/modules/templates/htmlrenderer.go
index d60be88727..311e5b741d 100644
--- a/modules/templates/htmlrenderer.go
+++ b/modules/templates/htmlrenderer.go
@@ -126,7 +126,7 @@ func wrapFatal(msg string) {
if msg == "" {
return
}
- log.FatalWithSkip(1, "Unable to compile templates, %s", msg)
+ log.Fatal("Unable to compile templates, %s", msg)
}
type templateErrorPrettier struct {
diff --git a/modules/test/logchecker.go b/modules/test/logchecker.go
index 8f8c753c76..7bf234f560 100644
--- a/modules/test/logchecker.go
+++ b/modules/test/logchecker.go
@@ -4,7 +4,8 @@
package test
import (
- "strconv"
+ "context"
+ "fmt"
"strings"
"sync"
"sync/atomic"
@@ -14,9 +15,7 @@ import (
)
type LogChecker struct {
- logger *log.MultiChannelledLogger
- loggerName string
- eventLoggerName string
+ *log.EventWriterBaseImpl
filterMessages []string
filtered []bool
@@ -27,54 +26,44 @@ type LogChecker struct {
mu sync.Mutex
}
-func (lc *LogChecker) LogEvent(event *log.Event) error {
+func (lc *LogChecker) Run(ctx context.Context) {
+ for {
+ select {
+ case <-ctx.Done():
+ return
+ case event, ok := <-lc.Queue:
+ if !ok {
+ return
+ }
+ lc.checkLogEvent(event)
+ }
+ }
+}
+
+func (lc *LogChecker) checkLogEvent(event *log.EventFormatted) {
lc.mu.Lock()
defer lc.mu.Unlock()
for i, msg := range lc.filterMessages {
- if strings.Contains(event.GetMsg(), msg) {
+ if strings.Contains(event.Origin.MsgSimpleText, msg) {
lc.filtered[i] = true
}
}
- if strings.Contains(event.GetMsg(), lc.stopMark) {
+ if strings.Contains(event.Origin.MsgSimpleText, lc.stopMark) {
lc.stopped = true
}
- return nil
-}
-
-func (lc *LogChecker) Close() {}
-
-func (lc *LogChecker) Flush() {}
-
-func (lc *LogChecker) GetLevel() log.Level {
- return log.TRACE
-}
-
-func (lc *LogChecker) GetStacktraceLevel() log.Level {
- return log.NONE
-}
-
-func (lc *LogChecker) GetName() string {
- return lc.eventLoggerName
-}
-
-func (lc *LogChecker) ReleaseReopen() error {
- return nil
}
var checkerIndex int64
-func NewLogChecker(loggerName string) (logChecker *LogChecker, cancel func()) {
- logger := log.GetLogger(loggerName)
+func NewLogChecker(namePrefix string) (logChecker *LogChecker, cancel func()) {
+ logger := log.GetManager().GetLogger(namePrefix)
newCheckerIndex := atomic.AddInt64(&checkerIndex, 1)
- lc := &LogChecker{
- logger: logger,
- loggerName: loggerName,
- eventLoggerName: "TestLogChecker-" + strconv.FormatInt(newCheckerIndex, 10),
- }
- if err := logger.AddLogger(lc); err != nil {
- panic(err) // it's impossible
- }
- return lc, func() { _, _ = logger.DelLogger(lc.GetName()) }
+ writerName := namePrefix + "-" + fmt.Sprint(newCheckerIndex)
+
+ lc := &LogChecker{}
+ lc.EventWriterBaseImpl = log.NewEventWriterBase(writerName, "test-log-checker", log.WriterMode{})
+ logger.AddWriters(lc)
+ return lc, func() { _ = logger.RemoveWriter(writerName) }
}
// Filter will make the `Check` function to check if these logs are outputted.
diff --git a/modules/test/logchecker_test.go b/modules/test/logchecker_test.go
index 4dfea8c3e3..6b093ab1b3 100644
--- a/modules/test/logchecker_test.go
+++ b/modules/test/logchecker_test.go
@@ -13,8 +13,6 @@ import (
)
func TestLogChecker(t *testing.T) {
- _ = log.NewLogger(1000, "console", "console", `{"level":"info","stacktracelevel":"NONE","stderr":true}`)
-
lc, cleanup := NewLogChecker(log.DEFAULT)
defer cleanup()
diff --git a/modules/testlogger/testlogger.go b/modules/testlogger/testlogger.go
index cc80e86c81..b4275e6005 100644
--- a/modules/testlogger/testlogger.go
+++ b/modules/testlogger/testlogger.go
@@ -13,7 +13,6 @@ import (
"testing"
"time"
- "code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/queue"
)
@@ -24,19 +23,14 @@ var (
SlowFlush = 5 * time.Second
)
-// TestLogger is a logger which will write to the testing log
-type TestLogger struct {
- log.WriterLogger
-}
-
var WriterCloser = &testLoggerWriterCloser{}
type testLoggerWriterCloser struct {
sync.RWMutex
- t []*testing.TB
+ t []testing.TB
}
-func (w *testLoggerWriterCloser) pushT(t *testing.TB) {
+func (w *testLoggerWriterCloser) pushT(t testing.TB) {
w.Lock()
w.t = append(w.t, t)
w.Unlock()
@@ -48,7 +42,7 @@ func (w *testLoggerWriterCloser) Write(p []byte) (int, error) {
w.RLock()
defer w.RUnlock()
- var t *testing.TB
+ var t testing.TB
if len(w.t) > 0 {
t = w.t[len(w.t)-1]
}
@@ -57,33 +51,13 @@ func (w *testLoggerWriterCloser) Write(p []byte) (int, error) {
p = p[:len(p)-1]
}
- if t == nil || *t == nil {
+ if t == nil {
// if there is no running test, the log message should be outputted to console, to avoid losing important information.
// the "???" prefix is used to match the "===" and "+++" in PrintCurrentTest
return fmt.Fprintf(os.Stdout, "??? [TestLogger] %s\n", p)
}
- defer func() {
- err := recover()
- if err == nil {
- return
- }
- var errString string
- errErr, ok := err.(error)
- if ok {
- errString = errErr.Error()
- } else {
- errString, ok = err.(string)
- }
- if !ok {
- panic(err)
- }
- if !strings.HasPrefix(errString, "Log in goroutine after ") {
- panic(err)
- }
- }()
-
- (*t).Log(string(p))
+ t.Log(string(p))
return len(p), nil
}
@@ -106,8 +80,8 @@ func (w *testLoggerWriterCloser) Reset() {
if t == nil {
continue
}
- fmt.Fprintf(os.Stdout, "Unclosed logger writer in test: %s", (*t).Name())
- (*t).Errorf("Unclosed logger writer in test: %s", (*t).Name())
+ _, _ = fmt.Fprintf(os.Stdout, "Unclosed logger writer in test: %s", t.Name())
+ t.Errorf("Unclosed logger writer in test: %s", t.Name())
}
w.t = nil
}
@@ -124,25 +98,25 @@ func PrintCurrentTest(t testing.TB, skip ...int) func() {
_, filename, line, _ := runtime.Caller(actualSkip)
if log.CanColorStdout {
- fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", fmt.Formatter(log.NewColoredValue(t.Name())), strings.TrimPrefix(filename, prefix), line)
+ _, _ = fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", fmt.Formatter(log.NewColoredValue(t.Name())), strings.TrimPrefix(filename, prefix), line)
} else {
- fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", t.Name(), strings.TrimPrefix(filename, prefix), line)
+ _, _ = fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", t.Name(), strings.TrimPrefix(filename, prefix), line)
}
- WriterCloser.pushT(&t)
+ WriterCloser.pushT(t)
return func() {
took := time.Since(start)
if took > SlowTest {
if log.CanColorStdout {
- fmt.Fprintf(os.Stdout, "+++ %s is a slow test (took %v)\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgYellow)), fmt.Formatter(log.NewColoredValue(took, log.Bold, log.FgYellow)))
+ _, _ = fmt.Fprintf(os.Stdout, "+++ %s is a slow test (took %v)\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgYellow)), fmt.Formatter(log.NewColoredValue(took, log.Bold, log.FgYellow)))
} else {
- fmt.Fprintf(os.Stdout, "+++ %s is a slow test (took %v)\n", t.Name(), took)
+ _, _ = fmt.Fprintf(os.Stdout, "+++ %s is a slow test (took %v)\n", t.Name(), took)
}
}
timer := time.AfterFunc(SlowFlush, func() {
if log.CanColorStdout {
- fmt.Fprintf(os.Stdout, "+++ %s ... still flushing after %v ...\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgRed)), SlowFlush)
+ _, _ = fmt.Fprintf(os.Stdout, "+++ %s ... still flushing after %v ...\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgRed)), SlowFlush)
} else {
- fmt.Fprintf(os.Stdout, "+++ %s ... still flushing after %v ...\n", t.Name(), SlowFlush)
+ _, _ = fmt.Fprintf(os.Stdout, "+++ %s ... still flushing after %v ...\n", t.Name(), SlowFlush)
}
})
if err := queue.GetManager().FlushAll(context.Background(), time.Minute); err != nil {
@@ -152,9 +126,9 @@ func PrintCurrentTest(t testing.TB, skip ...int) func() {
flushTook := time.Since(start) - took
if flushTook > SlowFlush {
if log.CanColorStdout {
- fmt.Fprintf(os.Stdout, "+++ %s had a slow clean-up flush (took %v)\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgRed)), fmt.Formatter(log.NewColoredValue(flushTook, log.Bold, log.FgRed)))
+ _, _ = fmt.Fprintf(os.Stdout, "+++ %s had a slow clean-up flush (took %v)\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgRed)), fmt.Formatter(log.NewColoredValue(flushTook, log.Bold, log.FgRed)))
} else {
- fmt.Fprintf(os.Stdout, "+++ %s had a slow clean-up flush (took %v)\n", t.Name(), flushTook)
+ _, _ = fmt.Fprintf(os.Stdout, "+++ %s had a slow clean-up flush (took %v)\n", t.Name(), flushTook)
}
}
WriterCloser.popT()
@@ -168,40 +142,20 @@ func Printf(format string, args ...interface{}) {
args[i] = log.NewColoredValue(args[i])
}
}
- fmt.Fprintf(os.Stdout, "\t"+format, args...)
-}
-
-// NewTestLogger creates a TestLogger as a log.LoggerProvider
-func NewTestLogger() log.LoggerProvider {
- logger := &TestLogger{}
- logger.Colorize = log.CanColorStdout
- logger.Level = log.TRACE
- return logger
-}
-
-// Init inits connection writer with json config.
-// json config only need key "level".
-func (log *TestLogger) Init(config string) error {
- err := json.Unmarshal([]byte(config), log)
- if err != nil {
- return err
- }
- log.NewWriterLogger(WriterCloser)
- return nil
-}
-
-// Flush when log should be flushed
-func (log *TestLogger) Flush() {
+ _, _ = fmt.Fprintf(os.Stdout, "\t"+format, args...)
}
-// ReleaseReopen does nothing
-func (log *TestLogger) ReleaseReopen() error {
- return nil
+// TestLogEventWriter is a logger which will write to the testing log
+type TestLogEventWriter struct {
+ *log.EventWriterBaseImpl
}
-// GetName returns the default name for this implementation
-func (log *TestLogger) GetName() string {
- return "test"
+// NewTestLoggerWriter creates a TestLogEventWriter as a log.LoggerProvider
+func NewTestLoggerWriter(name string, mode log.WriterMode) log.EventWriter {
+ w := &TestLogEventWriter{}
+ w.EventWriterBaseImpl = log.NewEventWriterBase(name, "test-log-writer", mode)
+ w.OutputWriteCloser = WriterCloser
+ return w
}
func init() {
diff --git a/modules/util/rotatingfilewriter/writer.go b/modules/util/rotatingfilewriter/writer.go
new file mode 100644
index 0000000000..5243bfe353
--- /dev/null
+++ b/modules/util/rotatingfilewriter/writer.go
@@ -0,0 +1,246 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package rotatingfilewriter
+
+import (
+ "bufio"
+ "compress/gzip"
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+ "time"
+
+ "code.gitea.io/gitea/modules/graceful/releasereopen"
+ "code.gitea.io/gitea/modules/util"
+)
+
+type Options struct {
+ Rotate bool
+ MaximumSize int64
+ RotateDaily bool
+ KeepDays int
+ Compress bool
+ CompressionLevel int
+}
+
+type RotatingFileWriter struct {
+ mu sync.Mutex
+ fd *os.File
+
+ currentSize int64
+ openDate int
+
+ options Options
+
+ cancelReleaseReopen func()
+}
+
+var ErrorPrintf func(format string, args ...interface{})
+
+// errorf tries to print error messages. Since this writer could be used by a logger system, this is the last chance to show the error in some cases
+func errorf(format string, args ...interface{}) {
+ if ErrorPrintf != nil {
+ ErrorPrintf("rotatingfilewriter: "+format+"\n", args...)
+ }
+}
+
+// Open creates a new rotating file writer.
+// Notice: if a file is opened by two rotators, there will be conflicts when rotating.
+// In the future, there should be "rotating file manager"
+func Open(filename string, options *Options) (*RotatingFileWriter, error) {
+ if options == nil {
+ options = &Options{}
+ }
+
+ rfw := &RotatingFileWriter{
+ options: *options,
+ }
+
+ if err := rfw.open(filename); err != nil {
+ return nil, err
+ }
+
+ rfw.cancelReleaseReopen = releasereopen.GetManager().Register(rfw)
+ return rfw, nil
+}
+
+func (rfw *RotatingFileWriter) Write(b []byte) (int, error) {
+ if rfw.options.Rotate && ((rfw.options.MaximumSize > 0 && rfw.currentSize >= rfw.options.MaximumSize) || (rfw.options.RotateDaily && time.Now().Day() != rfw.openDate)) {
+ if err := rfw.DoRotate(); err != nil {
+ // if this writer is used by a logger system, it's the logger system's responsibility to handle/show the error
+ return 0, err
+ }
+ }
+
+ n, err := rfw.fd.Write(b)
+ if err == nil {
+ rfw.currentSize += int64(n)
+ }
+ return n, err
+}
+
+func (rfw *RotatingFileWriter) Flush() error {
+ return rfw.fd.Sync()
+}
+
+func (rfw *RotatingFileWriter) Close() error {
+ rfw.mu.Lock()
+ if rfw.cancelReleaseReopen != nil {
+ rfw.cancelReleaseReopen()
+ rfw.cancelReleaseReopen = nil
+ }
+ rfw.mu.Unlock()
+ return rfw.fd.Close()
+}
+
+func (rfw *RotatingFileWriter) open(filename string) error {
+ fd, err := os.OpenFile(filename, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0o660)
+ if err != nil {
+ return err
+ }
+
+ rfw.fd = fd
+
+ finfo, err := fd.Stat()
+ if err != nil {
+ return err
+ }
+ rfw.currentSize = finfo.Size()
+ rfw.openDate = finfo.ModTime().Day()
+
+ return nil
+}
+
+func (rfw *RotatingFileWriter) ReleaseReopen() error {
+ return errors.Join(
+ rfw.fd.Close(),
+ rfw.open(rfw.fd.Name()),
+ )
+}
+
+// DoRotate the log file creating a backup like xx.2013-01-01.2
+func (rfw *RotatingFileWriter) DoRotate() error {
+ if !rfw.options.Rotate {
+ return nil
+ }
+
+ rfw.mu.Lock()
+ defer rfw.mu.Unlock()
+
+ prefix := fmt.Sprintf("%s.%s.", rfw.fd.Name(), time.Now().Format("2006-01-02"))
+
+ var err error
+ fname := ""
+ for i := 1; err == nil && i <= 999; i++ {
+ fname = prefix + fmt.Sprintf("%03d", i)
+ _, err = os.Lstat(fname)
+ if rfw.options.Compress && err != nil {
+ _, err = os.Lstat(fname + ".gz")
+ }
+ }
+ // return error if the last file checked still existed
+ if err == nil {
+ return fmt.Errorf("cannot find free file to rename %s", rfw.fd.Name())
+ }
+
+ fd := rfw.fd
+ if err := fd.Close(); err != nil { // close file before rename
+ return err
+ }
+
+ if err := util.Rename(fd.Name(), fname); err != nil {
+ return err
+ }
+
+ if rfw.options.Compress {
+ go func() {
+ err := compressOldFile(fname, rfw.options.CompressionLevel)
+ if err != nil {
+ errorf("DoRotate: %v", err)
+ }
+ }()
+ }
+
+ if err := rfw.open(fd.Name()); err != nil {
+ return err
+ }
+
+ go deleteOldFiles(
+ filepath.Dir(fd.Name()),
+ filepath.Base(fd.Name()),
+ time.Now().AddDate(0, 0, -rfw.options.KeepDays),
+ )
+
+ return nil
+}
+
+func compressOldFile(fname string, compressionLevel int) error {
+ reader, err := os.Open(fname)
+ if err != nil {
+ return fmt.Errorf("compressOldFile: failed to open existing file %s: %w", fname, err)
+ }
+ defer reader.Close()
+
+ buffer := bufio.NewReader(reader)
+ fnameGz := fname + ".gz"
+ fw, err := os.OpenFile(fnameGz, os.O_WRONLY|os.O_CREATE, 0o660)
+ if err != nil {
+ return fmt.Errorf("compressOldFile: failed to open new file %s: %w", fnameGz, err)
+ }
+ defer fw.Close()
+
+ zw, err := gzip.NewWriterLevel(fw, compressionLevel)
+ if err != nil {
+ return fmt.Errorf("compressOldFile: failed to create gzip writer: %w", err)
+ }
+ defer zw.Close()
+
+ _, err = buffer.WriteTo(zw)
+ if err != nil {
+ _ = zw.Close()
+ _ = fw.Close()
+ _ = util.Remove(fname + ".gz")
+ return fmt.Errorf("compressOldFile: failed to write to gz file: %w", err)
+ }
+ _ = reader.Close()
+
+ err = util.Remove(fname)
+ if err != nil {
+ return fmt.Errorf("compressOldFile: failed to delete old file: %w", err)
+ }
+ return nil
+}
+
+func deleteOldFiles(dir, prefix string, removeBefore time.Time) {
+ err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) (returnErr error) {
+ defer func() {
+ if r := recover(); r != nil {
+ returnErr = fmt.Errorf("unable to delete old file '%s', error: %+v", path, r)
+ }
+ }()
+
+ if err != nil {
+ return err
+ }
+ if d.IsDir() {
+ return nil
+ }
+ info, err := d.Info()
+ if err != nil {
+ return err
+ }
+ if info.ModTime().Before(removeBefore) {
+ if strings.HasPrefix(filepath.Base(path), prefix) {
+ return util.Remove(path)
+ }
+ }
+ return nil
+ })
+ if err != nil {
+ errorf("deleteOldFiles: failed to delete old file: %v", err)
+ }
+}
diff --git a/modules/util/rotatingfilewriter/writer_test.go b/modules/util/rotatingfilewriter/writer_test.go
new file mode 100644
index 0000000000..88392797b3
--- /dev/null
+++ b/modules/util/rotatingfilewriter/writer_test.go
@@ -0,0 +1,48 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package rotatingfilewriter
+
+import (
+ "compress/gzip"
+ "io"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCompressOldFile(t *testing.T) {
+ tmpDir := t.TempDir()
+ fname := filepath.Join(tmpDir, "test")
+ nonGzip := filepath.Join(tmpDir, "test-nonGzip")
+
+ f, err := os.OpenFile(fname, os.O_CREATE|os.O_WRONLY, 0o660)
+ assert.NoError(t, err)
+ ng, err := os.OpenFile(nonGzip, os.O_CREATE|os.O_WRONLY, 0o660)
+ assert.NoError(t, err)
+
+ for i := 0; i < 999; i++ {
+ f.WriteString("This is a test file\n")
+ ng.WriteString("This is a test file\n")
+ }
+ f.Close()
+ ng.Close()
+
+ err = compressOldFile(fname, gzip.DefaultCompression)
+ assert.NoError(t, err)
+
+ _, err = os.Lstat(fname + ".gz")
+ assert.NoError(t, err)
+
+ f, err = os.Open(fname + ".gz")
+ assert.NoError(t, err)
+ zr, err := gzip.NewReader(f)
+ assert.NoError(t, err)
+ data, err := io.ReadAll(zr)
+ assert.NoError(t, err)
+ original, err := os.ReadFile(nonGzip)
+ assert.NoError(t, err)
+ assert.Equal(t, original, data)
+}
diff --git a/modules/web/routing/logger.go b/modules/web/routing/logger.go
index d1b0ff0cda..b58065aa73 100644
--- a/modules/web/routing/logger.go
+++ b/modules/web/routing/logger.go
@@ -5,6 +5,7 @@ package routing
import (
"net/http"
+ "strings"
"time"
"code.gitea.io/gitea/modules/context"
@@ -25,18 +26,18 @@ func NewLoggerHandler() func(next http.Handler) http.Handler {
}
var (
- startMessage = log.NewColoredValueBytes("started ", log.DEBUG.Color())
- slowMessage = log.NewColoredValueBytes("slow ", log.WARN.Color())
- pollingMessage = log.NewColoredValueBytes("polling ", log.INFO.Color())
- failedMessage = log.NewColoredValueBytes("failed ", log.WARN.Color())
- completedMessage = log.NewColoredValueBytes("completed", log.INFO.Color())
- unknownHandlerMessage = log.NewColoredValueBytes("completed", log.ERROR.Color())
+ startMessage = log.NewColoredValue("started ", log.DEBUG.ColorAttributes()...)
+ slowMessage = log.NewColoredValue("slow ", log.WARN.ColorAttributes()...)
+ pollingMessage = log.NewColoredValue("polling ", log.INFO.ColorAttributes()...)
+ failedMessage = log.NewColoredValue("failed ", log.WARN.ColorAttributes()...)
+ completedMessage = log.NewColoredValue("completed", log.INFO.ColorAttributes()...)
+ unknownHandlerMessage = log.NewColoredValue("completed", log.ERROR.ColorAttributes()...)
)
func logPrinter(logger log.Logger) func(trigger Event, record *requestRecord) {
return func(trigger Event, record *requestRecord) {
if trigger == StartEvent {
- if !logger.IsTrace() {
+ if !logger.LevelEnabled(log.TRACE) {
// for performance, if the "started" message shouldn't be logged, we just return as early as possible
// developers can set the router log level to TRACE to get the "started" request messages.
return
@@ -59,12 +60,12 @@ func logPrinter(logger log.Logger) func(trigger Event, record *requestRecord) {
if trigger == StillExecutingEvent {
message := slowMessage
- level := log.WARN
+ logf := logger.Warn
if isLongPolling {
- level = log.INFO
+ logf = logger.Info
message = pollingMessage
}
- _ = logger.Log(0, level, "router: %s %v %s for %s, elapsed %v @ %s",
+ logf("router: %s %v %s for %s, elapsed %v @ %s",
message,
log.ColoredMethod(req.Method), req.RequestURI, req.RemoteAddr,
log.ColoredTime(time.Since(record.startTime)),
@@ -74,7 +75,7 @@ func logPrinter(logger log.Logger) func(trigger Event, record *requestRecord) {
}
if panicErr != nil {
- _ = logger.Log(0, log.WARN, "router: %s %v %s for %s, panic in %v @ %s, err=%v",
+ logger.Warn("router: %s %v %s for %s, panic in %v @ %s, err=%v",
failedMessage,
log.ColoredMethod(req.Method), req.RequestURI, req.RemoteAddr,
log.ColoredTime(time.Since(record.startTime)),
@@ -88,14 +89,17 @@ func logPrinter(logger log.Logger) func(trigger Event, record *requestRecord) {
if v, ok := record.responseWriter.(context.ResponseWriter); ok {
status = v.Status()
}
- level := log.INFO
+ logf := log.Info
+ if strings.HasPrefix(req.RequestURI, "/assets/") {
+ logf = log.Trace
+ }
message := completedMessage
if isUnknownHandler {
- level = log.ERROR
+ logf = log.Error
message = unknownHandlerMessage
}
- _ = logger.Log(0, level, "router: %s %v %s for %s, %v %v in %v @ %s",
+ logf("router: %s %v %s for %s, %v %v in %v @ %s",
message,
log.ColoredMethod(req.Method), req.RequestURI, req.RemoteAddr,
log.ColoredStatus(status), log.ColoredStatus(status, http.StatusText(status)), log.ColoredTime(time.Since(record.startTime)),