* Removed unused method.
* No prefix for data uris.
* Added test to prevent regressions.
}
case html.ElementNode:
if node.Data == "img" {
- attrs := node.Attr
- for idx, attr := range attrs {
+ for _, attr := range node.Attr {
if attr.Key != "src" {
continue
}
- link := []byte(attr.Val)
- if len(link) > 0 && !IsLink(link) {
+ if len(attr.Val) > 0 && !isLinkStr(attr.Val) && !strings.HasPrefix(attr.Val, "data:image/") {
prefix := ctx.URLPrefix
if ctx.IsWiki {
prefix = util.URLJoin(prefix, "wiki", "raw")
}
prefix = strings.Replace(prefix, "/src/", "/media/", 1)
- lnk := string(link)
- lnk = util.URLJoin(prefix, lnk)
- link = []byte(lnk)
+ attr.Val = util.URLJoin(prefix, attr.Val)
}
- node.Attr[idx].Val = string(link)
}
} else if node.Data == "a" {
visitText = false
assert.NoError(t, err)
assert.NotContains(t, res.String(), "<html")
}
+
+func TestIssue16020(t *testing.T) {
+ setting.AppURL = AppURL
+ setting.AppSubURL = AppSubURL
+
+ var localMetas = map[string]string{
+ "user": "go-gitea",
+ "repo": "gitea",
+ }
+
+ data := `<img src="data:image/png;base64,i//V"/>`
+
+ var res strings.Builder
+ err := PostProcess(&RenderContext{
+ URLPrefix: "https://example.com",
+ Metas: localMetas,
+ }, strings.NewReader(data), &res)
+ assert.NoError(t, err)
+ assert.Equal(t, data, res.String())
+}
NewSanitizer()
return sanitizer.policy.SanitizeReader(r)
}
-
-// SanitizeBytes takes a []byte slice that contains a HTML fragment or document and applies policy whitelist.
-func SanitizeBytes(b []byte) []byte {
- if len(b) == 0 {
- // nothing to sanitize
- return b
- }
- NewSanitizer()
- return sanitizer.policy.SanitizeBytes(b)
-}
for i := 0; i < len(testCases); i += 2 {
assert.Equal(t, testCases[i+1], Sanitize(testCases[i]))
- assert.Equal(t, testCases[i+1], string(SanitizeBytes([]byte(testCases[i]))))
}
}