summaryrefslogtreecommitdiffstats
path: root/modules/markdown
diff options
context:
space:
mode:
authorLunny Xiao <xiaolunwen@gmail.com>2017-09-17 01:17:57 +0800
committerLauris BH <lauris@nix.lv>2017-09-16 20:17:57 +0300
commit52e11b24bf5e395d83ea58c1b0fd6922efe16add (patch)
treef00c9da35c1f2afc3446b8607217e4d4315959ec /modules/markdown
parent911ca0215377b34559f2304a22dce863e219b255 (diff)
downloadgitea-52e11b24bf5e395d83ea58c1b0fd6922efe16add.tar.gz
gitea-52e11b24bf5e395d83ea58c1b0fd6922efe16add.zip
Restructure markup & markdown to prepare for multiple markup languageā€¦ (#2411)
* restructure markup & markdown to prepare for multiple markup languages support * adjust some functions between markdown and markup * fix tests * improve the comments
Diffstat (limited to 'modules/markdown')
-rw-r--r--modules/markdown/markdown.go574
-rw-r--r--modules/markdown/markdown_test.go566
-rw-r--r--modules/markdown/sanitizer.go58
-rw-r--r--modules/markdown/sanitizer_test.go44
4 files changed, 112 insertions, 1130 deletions
diff --git a/modules/markdown/markdown.go b/modules/markdown/markdown.go
index ed673f2056..6cf2d9eaa1 100644
--- a/modules/markdown/markdown.go
+++ b/modules/markdown/markdown.go
@@ -6,107 +6,14 @@ package markdown
import (
"bytes"
- "fmt"
- "io"
- "net/url"
- "path"
- "path/filepath"
- "regexp"
"strings"
- "code.gitea.io/gitea/modules/base"
- "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/setting"
- "github.com/Unknwon/com"
"github.com/russross/blackfriday"
- "golang.org/x/net/html"
)
-// Issue name styles
-const (
- IssueNameStyleNumeric = "numeric"
- IssueNameStyleAlphanumeric = "alphanumeric"
-)
-
-// IsMarkdownFile reports whether name looks like a Markdown file
-// based on its extension.
-func IsMarkdownFile(name string) bool {
- extension := strings.ToLower(filepath.Ext(name))
- for _, ext := range setting.Markdown.FileExtensions {
- if strings.ToLower(ext) == extension {
- return true
- }
- }
- return false
-}
-
-var (
- // NOTE: All below regex matching do not perform any extra validation.
- // Thus a link is produced even if the user does not exist, the issue does not exist, the commit does not exist, etc.
- // While fast, this is also incorrect and lead to false positives.
-
- // MentionPattern matches string that mentions someone, e.g. @Unknwon
- MentionPattern = regexp.MustCompile(`(\s|^|\W)@[0-9a-zA-Z-_\.]+`)
-
- // IssueNumericPattern matches string that references to a numeric issue, e.g. #1287
- IssueNumericPattern = regexp.MustCompile(`( |^|\()#[0-9]+\b`)
- // IssueAlphanumericPattern matches string that references to an alphanumeric issue, e.g. ABC-1234
- IssueAlphanumericPattern = regexp.MustCompile(`( |^|\()[A-Z]{1,10}-[1-9][0-9]*\b`)
- // CrossReferenceIssueNumericPattern matches string that references a numeric issue in a different repository
- // e.g. gogits/gogs#12345
- CrossReferenceIssueNumericPattern = regexp.MustCompile(`( |^)[0-9a-zA-Z]+/[0-9a-zA-Z]+#[0-9]+\b`)
-
- // Sha1CurrentPattern matches string that represents a commit SHA, e.g. d8a994ef243349f321568f9e36d5c3f444b99cae
- // Although SHA1 hashes are 40 chars long, the regex matches the hash from 7 to 40 chars in length
- // so that abbreviated hash links can be used as well. This matches git and github useability.
- Sha1CurrentPattern = regexp.MustCompile(`(?:^|\s|\()([0-9a-f]{7,40})\b`)
-
- // ShortLinkPattern matches short but difficult to parse [[name|link|arg=test]] syntax
- ShortLinkPattern = regexp.MustCompile(`(\[\[.*?\]\]\w*)`)
-
- // AnySHA1Pattern allows to split url containing SHA into parts
- AnySHA1Pattern = regexp.MustCompile(`(http\S*)://(\S+)/(\S+)/(\S+)/(\S+)/([0-9a-f]{40})(?:/?([^#\s]+)?(?:#(\S+))?)?`)
-
- validLinksPattern = regexp.MustCompile(`^[a-z][\w-]+://`)
-)
-
-// regexp for full links to issues/pulls
-var issueFullPattern *regexp.Regexp
-
-// InitMarkdown initialize regexps for markdown parsing
-func InitMarkdown() {
- getIssueFullPattern()
-}
-
-func getIssueFullPattern() *regexp.Regexp {
- if issueFullPattern == nil {
- appURL := setting.AppURL
- if len(appURL) > 0 && appURL[len(appURL)-1] != '/' {
- appURL += "/"
- }
- issueFullPattern = regexp.MustCompile(appURL +
- `\w+/\w+/(?:issues|pulls)/((?:\w{1,10}-)?[1-9][0-9]*)([\?|#]\S+.(\S+)?)?\b`)
- }
- return issueFullPattern
-}
-
-// isLink reports whether link fits valid format.
-func isLink(link []byte) bool {
- return validLinksPattern.Match(link)
-}
-
-// FindAllMentions matches mention patterns in given content
-// and returns a list of found user names without @ prefix.
-func FindAllMentions(content string) []string {
- mentions := MentionPattern.FindAllString(content, -1)
- for i := range mentions {
- mentions[i] = mentions[i][strings.Index(mentions[i], "@")+1:] // Strip @ character
- }
- return mentions
-}
-
// Renderer is a extended version of underlying render object.
type Renderer struct {
blackfriday.Renderer
@@ -116,13 +23,13 @@ type Renderer struct {
// Link defines how formal links should be processed to produce corresponding HTML elements.
func (r *Renderer) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) {
- if len(link) > 0 && !isLink(link) {
+ if len(link) > 0 && !markup.IsLink(link) {
if link[0] != '#' {
lnk := string(link)
if r.isWikiMarkdown {
- lnk = URLJoin("wiki", lnk)
+ lnk = markup.URLJoin("wiki", lnk)
}
- mLink := URLJoin(r.urlPrefix, lnk)
+ mLink := markup.URLJoin(r.urlPrefix, lnk)
link = []byte(mLink)
}
}
@@ -190,11 +97,11 @@ var (
func (r *Renderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {
prefix := r.urlPrefix
if r.isWikiMarkdown {
- prefix = URLJoin(prefix, "wiki", "src")
+ prefix = markup.URLJoin(prefix, "wiki", "src")
}
prefix = strings.Replace(prefix, "/src/", "/raw/", 1)
if len(link) > 0 {
- if isLink(link) {
+ if markup.IsLink(link) {
// External link with .svg suffix usually means CI status.
// TODO: define a keyword to allow non-svg images render as external link.
if bytes.HasSuffix(link, svgSuffix) || bytes.Contains(link, svgSuffixWithMark) {
@@ -203,7 +110,7 @@ func (r *Renderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byt
}
} else {
lnk := string(link)
- lnk = URLJoin(prefix, lnk)
+ lnk = markup.URLJoin(prefix, lnk)
lnk = strings.Replace(lnk, " ", "+", -1)
link = []byte(lnk)
}
@@ -216,351 +123,6 @@ func (r *Renderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byt
out.WriteString("</a>")
}
-// cutoutVerbosePrefix cutouts URL prefix including sub-path to
-// return a clean unified string of request URL path.
-func cutoutVerbosePrefix(prefix string) string {
- if len(prefix) == 0 || prefix[0] != '/' {
- return prefix
- }
- count := 0
- for i := 0; i < len(prefix); i++ {
- if prefix[i] == '/' {
- count++
- }
- if count >= 3+setting.AppSubURLDepth {
- return prefix[:i]
- }
- }
- return prefix
-}
-
-// URLJoin joins url components, like path.Join, but preserving contents
-func URLJoin(base string, elems ...string) string {
- u, err := url.Parse(base)
- if err != nil {
- log.Error(4, "URLJoin: Invalid base URL %s", base)
- return ""
- }
- joinArgs := make([]string, 0, len(elems)+1)
- joinArgs = append(joinArgs, u.Path)
- joinArgs = append(joinArgs, elems...)
- u.Path = path.Join(joinArgs...)
- return u.String()
-}
-
-// RenderIssueIndexPattern renders issue indexes to corresponding links.
-func RenderIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
- urlPrefix = cutoutVerbosePrefix(urlPrefix)
-
- pattern := IssueNumericPattern
- if metas["style"] == IssueNameStyleAlphanumeric {
- pattern = IssueAlphanumericPattern
- }
-
- ms := pattern.FindAll(rawBytes, -1)
- for _, m := range ms {
- if m[0] == ' ' || m[0] == '(' {
- m = m[1:] // ignore leading space or opening parentheses
- }
- var link string
- if metas == nil {
- link = fmt.Sprintf(`<a href="%s">%s</a>`, URLJoin(urlPrefix, "issues", string(m[1:])), m)
- } else {
- // Support for external issue tracker
- if metas["style"] == IssueNameStyleAlphanumeric {
- metas["index"] = string(m)
- } else {
- metas["index"] = string(m[1:])
- }
- link = fmt.Sprintf(`<a href="%s">%s</a>`, com.Expand(metas["format"], metas), m)
- }
- rawBytes = bytes.Replace(rawBytes, m, []byte(link), 1)
- }
- return rawBytes
-}
-
-// IsSameDomain checks if given url string has the same hostname as current Gitea instance
-func IsSameDomain(s string) bool {
- if strings.HasPrefix(s, "/") {
- return true
- }
- if uapp, err := url.Parse(setting.AppURL); err == nil {
- if u, err := url.Parse(s); err == nil {
- return u.Host == uapp.Host
- }
- return false
- }
- return false
-}
-
-// renderFullSha1Pattern renders SHA containing URLs
-func renderFullSha1Pattern(rawBytes []byte, urlPrefix string) []byte {
- ms := AnySHA1Pattern.FindAllSubmatch(rawBytes, -1)
- for _, m := range ms {
- all := m[0]
- protocol := string(m[1])
- paths := string(m[2])
- path := protocol + "://" + paths
- author := string(m[3])
- repoName := string(m[4])
- path = URLJoin(path, author, repoName)
- ltype := "src"
- itemType := m[5]
- if IsSameDomain(paths) {
- ltype = string(itemType)
- } else if string(itemType) == "commit" {
- ltype = "commit"
- }
- sha := m[6]
- var subtree string
- if len(m) > 7 && len(m[7]) > 0 {
- subtree = string(m[7])
- }
- var line []byte
- if len(m) > 8 && len(m[8]) > 0 {
- line = m[8]
- }
- urlSuffix := ""
- text := base.ShortSha(string(sha))
- if subtree != "" {
- urlSuffix = "/" + subtree
- text += urlSuffix
- }
- if line != nil {
- value := string(line)
- urlSuffix += "#"
- urlSuffix += value
- text += " ("
- text += value
- text += ")"
- }
- rawBytes = bytes.Replace(rawBytes, all, []byte(fmt.Sprintf(
- `<a href="%s">%s</a>`, URLJoin(path, ltype, string(sha))+urlSuffix, text)), -1)
- }
- return rawBytes
-}
-
-// RenderFullIssuePattern renders issues-like URLs
-func RenderFullIssuePattern(rawBytes []byte) []byte {
- ms := getIssueFullPattern().FindAllSubmatch(rawBytes, -1)
- for _, m := range ms {
- all := m[0]
- id := string(m[1])
- text := "#" + id
- // TODO if m[2] is not nil, then link is to a comment,
- // and we should indicate that in the text somehow
- rawBytes = bytes.Replace(rawBytes, all, []byte(fmt.Sprintf(
- `<a href="%s">%s</a>`, string(all), text)), -1)
- }
- return rawBytes
-}
-
-func firstIndexOfByte(sl []byte, target byte) int {
- for i := 0; i < len(sl); i++ {
- if sl[i] == target {
- return i
- }
- }
- return -1
-}
-
-func lastIndexOfByte(sl []byte, target byte) int {
- for i := len(sl) - 1; i >= 0; i-- {
- if sl[i] == target {
- return i
- }
- }
- return -1
-}
-
-// RenderShortLinks processes [[syntax]]
-//
-// noLink flag disables making link tags when set to true
-// so this function just replaces the whole [[...]] with the content text
-//
-// isWikiMarkdown is a flag to choose linking url prefix
-func RenderShortLinks(rawBytes []byte, urlPrefix string, noLink bool, isWikiMarkdown bool) []byte {
- ms := ShortLinkPattern.FindAll(rawBytes, -1)
- for _, m := range ms {
- orig := bytes.TrimSpace(m)
- m = orig[2:]
- tailPos := lastIndexOfByte(m, ']') + 1
- tail := []byte{}
- if tailPos < len(m) {
- tail = m[tailPos:]
- m = m[:tailPos-1]
- }
- m = m[:len(m)-2]
- props := map[string]string{}
-
- // MediaWiki uses [[link|text]], while GitHub uses [[text|link]]
- // It makes page handling terrible, but we prefer GitHub syntax
- // And fall back to MediaWiki only when it is obvious from the look
- // Of text and link contents
- sl := bytes.Split(m, []byte("|"))
- for _, v := range sl {
- switch bytes.Count(v, []byte("=")) {
-
- // Piped args without = sign, these are mandatory arguments
- case 0:
- {
- sv := string(v)
- if props["name"] == "" {
- if isLink(v) {
- // If we clearly see it is a link, we save it so
-
- // But first we need to ensure, that if both mandatory args provided
- // look like links, we stick to GitHub syntax
- if props["link"] != "" {
- props["name"] = props["link"]
- }
-
- props["link"] = strings.TrimSpace(sv)
- } else {
- props["name"] = sv
- }
- } else {
- props["link"] = strings.TrimSpace(sv)
- }
- }
-
- // Piped args with = sign, these are optional arguments
- case 1:
- {
- sep := firstIndexOfByte(v, '=')
- key, val := string(v[:sep]), html.UnescapeString(string(v[sep+1:]))
- lastCharIndex := len(val) - 1
- if (val[0] == '"' || val[0] == '\'') && (val[lastCharIndex] == '"' || val[lastCharIndex] == '\'') {
- val = val[1:lastCharIndex]
- }
- props[key] = val
- }
- }
- }
-
- var name string
- var link string
- if props["link"] != "" {
- link = props["link"]
- } else if props["name"] != "" {
- link = props["name"]
- }
- if props["title"] != "" {
- name = props["title"]
- } else if props["name"] != "" {
- name = props["name"]
- } else {
- name = link
- }
-
- name += string(tail)
- image := false
- ext := filepath.Ext(string(link))
- if ext != "" {
- switch ext {
- case ".jpg", ".jpeg", ".png", ".tif", ".tiff", ".webp", ".gif", ".bmp", ".ico", ".svg":
- {
- image = true
- }
- }
- }
- absoluteLink := isLink([]byte(link))
- if !absoluteLink {
- link = strings.Replace(link, " ", "+", -1)
- }
- if image {
- if !absoluteLink {
- if IsSameDomain(urlPrefix) {
- urlPrefix = strings.Replace(urlPrefix, "/src/", "/raw/", 1)
- }
- if isWikiMarkdown {
- link = URLJoin("wiki", "raw", link)
- }
- link = URLJoin(urlPrefix, link)
- }
- title := props["title"]
- if title == "" {
- title = props["alt"]
- }
- if title == "" {
- title = path.Base(string(name))
- }
- alt := props["alt"]
- if alt == "" {
- alt = name
- }
- if alt != "" {
- alt = `alt="` + alt + `"`
- }
- name = fmt.Sprintf(`<img src="%s" %s title="%s" />`, link, alt, title)
- } else if !absoluteLink {
- if isWikiMarkdown {
- link = URLJoin("wiki", link)
- }
- link = URLJoin(urlPrefix, link)
- }
- if noLink {
- rawBytes = bytes.Replace(rawBytes, orig, []byte(name), -1)
- } else {
- rawBytes = bytes.Replace(rawBytes, orig,
- []byte(fmt.Sprintf(`<a href="%s">%s</a>`, link, name)), -1)
- }
- }
- return rawBytes
-}
-
-// RenderCrossReferenceIssueIndexPattern renders issue indexes from other repositories to corresponding links.
-func RenderCrossReferenceIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
- ms := CrossReferenceIssueNumericPattern.FindAll(rawBytes, -1)
- for _, m := range ms {
- if m[0] == ' ' || m[0] == '(' {
- m = m[1:] // ignore leading space or opening parentheses
- }
-
- repo := string(bytes.Split(m, []byte("#"))[0])
- issue := string(bytes.Split(m, []byte("#"))[1])
-
- link := fmt.Sprintf(`<a href="%s">%s</a>`, URLJoin(setting.AppURL, repo, "issues", issue), m)
- rawBytes = bytes.Replace(rawBytes, m, []byte(link), 1)
- }
- return rawBytes
-}
-
-// renderSha1CurrentPattern renders SHA1 strings to corresponding links that assumes in the same repository.
-func renderSha1CurrentPattern(rawBytes []byte, urlPrefix string) []byte {
- ms := Sha1CurrentPattern.FindAllSubmatch(rawBytes, -1)
- for _, m := range ms {
- hash := m[1]
- // The regex does not lie, it matches the hash pattern.
- // However, a regex cannot know if a hash actually exists or not.
- // We could assume that a SHA1 hash should probably contain alphas AND numerics
- // but that is not always the case.
- // Although unlikely, deadbeef and 1234567 are valid short forms of SHA1 hash
- // as used by git and github for linking and thus we have to do similar.
- rawBytes = bytes.Replace(rawBytes, hash, []byte(fmt.Sprintf(
- `<a href="%s">%s</a>`, URLJoin(urlPrefix, "commit", string(hash)), base.ShortSha(string(hash)))), -1)
- }
- return rawBytes
-}
-
-// RenderSpecialLink renders mentions, indexes and SHA1 strings to corresponding links.
-func RenderSpecialLink(rawBytes []byte, urlPrefix string, metas map[string]string, isWikiMarkdown bool) []byte {
- ms := MentionPattern.FindAll(rawBytes, -1)
- for _, m := range ms {
- m = m[bytes.Index(m, []byte("@")):]
- rawBytes = bytes.Replace(rawBytes, m,
- []byte(fmt.Sprintf(`<a href="%s">%s</a>`, URLJoin(setting.AppURL, string(m[1:])), m)), -1)
- }
-
- rawBytes = RenderFullIssuePattern(rawBytes)
- rawBytes = RenderShortLinks(rawBytes, urlPrefix, false, isWikiMarkdown)
- rawBytes = RenderIssueIndexPattern(rawBytes, urlPrefix, metas)
- rawBytes = RenderCrossReferenceIssueIndexPattern(rawBytes, urlPrefix, metas)
- rawBytes = renderFullSha1Pattern(rawBytes, urlPrefix)
- rawBytes = renderSha1CurrentPattern(rawBytes, urlPrefix)
- return rawBytes
-}
-
// RenderRaw renders Markdown to HTML without handling special links.
func RenderRaw(body []byte, urlPrefix string, wikiMarkdown bool) []byte {
htmlFlags := 0
@@ -589,107 +151,6 @@ func RenderRaw(body []byte, urlPrefix string, wikiMarkdown bool) []byte {
}
var (
- leftAngleBracket = []byte("</")
- rightAngleBracket = []byte(">")
-)
-
-var noEndTags = []string{"img", "input", "br", "hr"}
-
-// PostProcess treats different types of HTML differently,
-// and only renders special links for plain text blocks.
-func PostProcess(rawHTML []byte, urlPrefix string, metas map[string]string, isWikiMarkdown bool) []byte {
- startTags := make([]string, 0, 5)
- var buf bytes.Buffer
- tokenizer := html.NewTokenizer(bytes.NewReader(rawHTML))
-
-OUTER_LOOP:
- for html.ErrorToken != tokenizer.Next() {
- token := tokenizer.Token()
- switch token.Type {
- case html.TextToken:
- buf.Write(RenderSpecialLink([]byte(token.String()), urlPrefix, metas, isWikiMarkdown))
-
- case html.StartTagToken:
- buf.WriteString(token.String())
- tagName := token.Data
- // If this is an excluded tag, we skip processing all output until a close tag is encountered.
- if strings.EqualFold("a", tagName) || strings.EqualFold("code", tagName) || strings.EqualFold("pre", tagName) {
- stackNum := 1
- for html.ErrorToken != tokenizer.Next() {
- token = tokenizer.Token()
-
- // Copy the token to the output verbatim
- buf.Write(RenderShortLinks([]byte(token.String()), urlPrefix, true, isWikiMarkdown))
-
- if token.Type == html.StartTagToken && !com.IsSliceContainsStr(noEndTags, token.Data) {
- stackNum++
- }
-
- // If this is the close tag to the outer-most, we are done
- if token.Type == html.EndTagToken {
- stackNum--
-
- if stackNum <= 0 && strings.EqualFold(tagName, token.Data) {
- break
- }
- }
- }
- continue OUTER_LOOP
- }
-
- if !com.IsSliceContainsStr(noEndTags, tagName) {
- startTags = append(startTags, tagName)
- }
-
- case html.EndTagToken:
- if len(startTags) == 0 {
- buf.WriteString(token.String())
- break
- }
-
- buf.Write(leftAngleBracket)
- buf.WriteString(startTags[len(startTags)-1])
- buf.Write(rightAngleBracket)
- startTags = startTags[:len(startTags)-1]
- default:
- buf.WriteString(token.String())
- }
- }
-
- if io.EOF == tokenizer.Err() {
- return buf.Bytes()
- }
-
- // If we are not at the end of the input, then some other parsing error has occurred,
- // so return the input verbatim.
- return rawHTML
-}
-
-// Render renders Markdown to HTML with all specific handling stuff.
-func render(rawBytes []byte, urlPrefix string, metas map[string]string, isWikiMarkdown bool) []byte {
- urlPrefix = strings.Replace(urlPrefix, " ", "+", -1)
- result := RenderRaw(rawBytes, urlPrefix, isWikiMarkdown)
- result = PostProcess(result, urlPrefix, metas, isWikiMarkdown)
- result = SanitizeBytes(result)
- return result
-}
-
-// Render renders Markdown to HTML with all specific handling stuff.
-func Render(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
- return render(rawBytes, urlPrefix, metas, false)
-}
-
-// RenderString renders Markdown to HTML with special links and returns string type.
-func RenderString(raw, urlPrefix string, metas map[string]string) string {
- return string(render([]byte(raw), urlPrefix, metas, false))
-}
-
-// RenderWiki renders markdown wiki page to HTML and return HTML string
-func RenderWiki(rawBytes []byte, urlPrefix string, metas map[string]string) string {
- return string(render(rawBytes, urlPrefix, metas, true))
-}
-
-var (
// MarkupName describes markup's name
MarkupName = "markdown"
)
@@ -714,5 +175,26 @@ func (Parser) Extensions() []string {
// Render implements markup.Parser
func (Parser) Render(rawBytes []byte, urlPrefix string, metas map[string]string, isWiki bool) []byte {
- return render(rawBytes, urlPrefix, metas, isWiki)
+ return RenderRaw(rawBytes, urlPrefix, isWiki)
+}
+
+// Render renders Markdown to HTML with all specific handling stuff.
+func Render(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
+ return markup.Render("a.md", rawBytes, urlPrefix, metas)
+}
+
+// RenderString renders Markdown to HTML with special links and returns string type.
+func RenderString(raw, urlPrefix string, metas map[string]string) string {
+ return markup.RenderString("a.md", raw, urlPrefix, metas)
+}
+
+// RenderWiki renders markdown wiki page to HTML and return HTML string
+func RenderWiki(rawBytes []byte, urlPrefix string, metas map[string]string) string {
+ return markup.RenderWiki("a.md", rawBytes, urlPrefix, metas)
+}
+
+// IsMarkdownFile reports whether name looks like a Markdown file
+// based on its extension.
+func IsMarkdownFile(name string) bool {
+ return markup.IsMarkupFile(name, MarkupName)
}
diff --git a/modules/markdown/markdown_test.go b/modules/markdown/markdown_test.go
index 4506a29b1e..1b57e4f203 100644
--- a/modules/markdown/markdown_test.go
+++ b/modules/markdown/markdown_test.go
@@ -7,12 +7,13 @@ package markdown_test
import (
"fmt"
"strconv"
- "testing"
-
"strings"
+ "testing"
. "code.gitea.io/gitea/modules/markdown"
+ "code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/setting"
+
"github.com/stretchr/testify/assert"
)
@@ -24,24 +25,24 @@ var numericMetas = map[string]string{
"format": "https://someurl.com/{user}/{repo}/{index}",
"user": "someUser",
"repo": "someRepo",
- "style": IssueNameStyleNumeric,
+ "style": markup.IssueNameStyleNumeric,
}
var alphanumericMetas = map[string]string{
"format": "https://someurl.com/{user}/{repo}/{index}",
"user": "someUser",
"repo": "someRepo",
- "style": IssueNameStyleAlphanumeric,
+ "style": markup.IssueNameStyleAlphanumeric,
}
// numericLink an HTML to a numeric-style issue
func numericIssueLink(baseURL string, index int) string {
- return link(URLJoin(baseURL, strconv.Itoa(index)), fmt.Sprintf("#%d", index))
+ return link(markup.URLJoin(baseURL, strconv.Itoa(index)), fmt.Sprintf("#%d", index))
}
// alphanumLink an HTML link to an alphanumeric-style issue
func alphanumIssueLink(baseURL string, name string) string {
- return link(URLJoin(baseURL, name), name)
+ return link(markup.URLJoin(baseURL, name), name)
}
// urlContentsLink an HTML link whose contents is the target URL
@@ -56,175 +57,7 @@ func link(href, contents string) string {
func testRenderIssueIndexPattern(t *testing.T, input, expected string, metas map[string]string) {
assert.Equal(t, expected,
- string(RenderIssueIndexPattern([]byte(input), AppSubURL, metas)))
-}
-
-func TestURLJoin(t *testing.T) {
- type test struct {
- Expected string
- Base string
- Elements []string
- }
- newTest := func(expected, base string, elements ...string) test {
- return test{Expected: expected, Base: base, Elements: elements}
- }
- for _, test := range []test{
- newTest("https://try.gitea.io/a/b/c",
- "https://try.gitea.io", "a/b", "c"),
- newTest("https://try.gitea.io/a/b/c",
- "https://try.gitea.io/", "/a/b/", "/c/"),
- newTest("https://try.gitea.io/a/c",
- "https://try.gitea.io/", "/a/./b/", "../c/"),
- newTest("a/b/c",
- "a", "b/c/"),
- newTest("a/b/d",
- "a/", "b/c/", "/../d/"),
- } {
- assert.Equal(t, test.Expected, URLJoin(test.Base, test.Elements...))
- }
-}
-
-func TestRender_IssueIndexPattern(t *testing.T) {
- // numeric: render inputs without valid mentions
- test := func(s string) {
- testRenderIssueIndexPattern(t, s, s, nil)
- testRenderIssueIndexPattern(t, s, s, numericMetas)
- }
-
- // should not render anything when there are no mentions
- test("")
- test("this is a test")
- test("test 123 123 1234")
- test("#")
- test("# # #")
- test("# 123")
- test("#abcd")
- test("##1234")
- test("test#1234")
- test("#1234test")
- test(" test #1234test")
-
- // should not render issue mention without leading space
- test("test#54321 issue")
-
- // should not render issue mention without trailing space
- test("test #54321issue")
-}
-
-func TestRender_IssueIndexPattern2(t *testing.T) {
- setting.AppURL = AppURL
- setting.AppSubURL = AppSubURL
-
- // numeric: render inputs with valid mentions
- test := func(s, expectedFmt string, indices ...int) {
- links := make([]interface{}, len(indices))
- for i, index := range indices {
- links[i] = numericIssueLink(URLJoin(setting.AppSubURL, "issues"), index)
- }
- expectedNil := fmt.Sprintf(expectedFmt, links...)
- testRenderIssueIndexPattern(t, s, expectedNil, nil)
-
- for i, index := range indices {
- links[i] = numericIssueLink("https://someurl.com/someUser/someRepo/", index)
- }
- expectedNum := fmt.Sprintf(expectedFmt, links...)
- testRenderIssueIndexPattern(t, s, expectedNum, numericMetas)
- }
-
- // should render freestanding mentions
- test("#1234 test", "%s test", 1234)
- test("test #8 issue", "test %s issue", 8)
- test("test issue #1234", "test issue %s", 1234)
-
- // should render mentions in parentheses
- test("(#54321 issue)", "(%s issue)", 54321)
- test("test (#9801 extra) issue", "test (%s extra) issue", 9801)
- test("test (#1)", "test (%s)", 1)
-
- // should render multiple issue mentions in the same line
- test("#54321 #1243", "%s %s", 54321, 1243)
- test("wow (#54321 #1243)", "wow (%s %s)", 54321, 1243)
- test("(#4)(#5)", "(%s)(%s)", 4, 5)
- test("#1 (#4321) test", "%s (%s) test", 1, 4321)
-}
-
-func TestRender_IssueIndexPattern3(t *testing.T) {
- setting.AppURL = AppURL
- setting.AppSubURL = AppSubURL
-
- // alphanumeric: render inputs without valid mentions
- test := func(s string) {
- testRenderIssueIndexPattern(t, s, s, alphanumericMetas)
- }
- test("")
- test("this is a test")
- test("test 123 123 1234")
- test("#")
- test("##1234")
- test("# 123")
- test("#abcd")
- test("test #123")
- test("abc-1234") // issue prefix must be capital
- test("ABc-1234") // issue prefix must be _all_ capital
- test("ABCDEFGHIJK-1234") // the limit is 10 characters in the prefix
- test("ABC1234") // dash is required
- test("test ABC- test") // number is required
- test("test -1234 test") // prefix is required
- test("testABC-123 test") // leading space is required
- test("test ABC-123test") // trailing space is required
- test("ABC-0123") // no leading zero
-}
-
-func TestRender_IssueIndexPattern4(t *testing.T) {
- setting.AppURL = AppURL
- setting.AppSubURL = AppSubURL
-
- // alphanumeric: render inputs with valid mentions
- test := func(s, expectedFmt string, names ...string) {
- links := make([]interface{}, len(names))
- for i, name := range names {
- links[i] = alphanumIssueLink("https://someurl.com/someUser/someRepo/", name)
- }
- expected := fmt.Sprintf(expectedFmt, links...)
- testRenderIssueIndexPattern(t, s, expected, alphanumericMetas)
- }
- test("OTT-1234 test", "%s test", "OTT-1234")
- test("test T-12 issue", "test %s issue", "T-12")
- test("test issue ABCDEFGHIJ-1234567890", "test issue %s", "ABCDEFGHIJ-1234567890")
-}
-
-func TestRender_AutoLink(t *testing.T) {
- setting.AppURL = AppURL
- setting.AppSubURL = AppSubURL
-
- test := func(input, expected string) {
- buffer := RenderSpecialLink([]byte(input), setting.AppSubURL, nil, false)
- assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(string(buffer)))
- buffer = RenderSpecialLink([]byte(input), setting.AppSubURL, nil, true)
- assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(string(buffer)))
- }
-
- // render valid issue URLs
- test(URLJoin(setting.AppSubURL, "issues", "3333"),
- numericIssueLink(URLJoin(setting.AppSubURL, "issues"), 3333))
-
- // render external issue URLs
- for _, externalURL := range []string{
- "http://1111/2222/ssss-issues/3333?param=blah&blahh=333",
- "http://test.com/issues/33333",
- "https://issues/333"} {
- test(externalURL, externalURL)
- }
-
- // render valid commit URLs
- tmp := URLJoin(AppSubURL, "commit", "d8a994ef243349f321568f9e36d5c3f444b99cae")
- test(tmp, "<a href=\""+tmp+"\">d8a994ef24</a>")
- tmp += "#diff-2"
- test(tmp, "<a href=\""+tmp+"\">d8a994ef24 (diff-2)</a>")
-
- // render other commit URLs
- tmp = "https://external-link.gogs.io/gogs/gogs/commit/d8a994ef243349f321568f9e36d5c3f444b99cae#diff-2"
- test(tmp, "<a href=\""+tmp+"\">d8a994ef24 (diff-2)</a>")
+ string(markup.RenderIssueIndexPattern([]byte(input), AppSubURL, metas)))
}
func TestRender_StandardLinks(t *testing.T) {
@@ -241,8 +74,8 @@ func TestRender_StandardLinks(t *testing.T) {
googleRendered := `<p><a href="https://google.com/" rel="nofollow">https://google.com/</a></p>`
test("<https://google.com/>", googleRendered, googleRendered)
- lnk := URLJoin(AppSubURL, "WikiPage")
- lnkWiki := URLJoin(AppSubURL, "wiki", "WikiPage")
+ lnk := markup.URLJoin(AppSubURL, "WikiPage")
+ lnkWiki := markup.URLJoin(AppSubURL, "wiki", "WikiPage")
test("[WikiPage](WikiPage)",
`<p><a href="`+lnk+`" rel="nofollow">WikiPage</a></p>`,
`<p><a href="`+lnkWiki+`" rel="nofollow">WikiPage</a></p>`)
@@ -251,7 +84,7 @@ func TestRender_StandardLinks(t *testing.T) {
func TestRender_ShortLinks(t *testing.T) {
setting.AppURL = AppURL
setting.AppSubURL = AppSubURL
- tree := URLJoin(AppSubURL, "src", "master")
+ tree := markup.URLJoin(AppSubURL, "src", "master")
test := func(input, expected, expectedWiki string) {
buffer := RenderString(input, tree, nil)
@@ -260,13 +93,13 @@ func TestRender_ShortLinks(t *testing.T) {
assert.Equal(t, strings.TrimSpace(expectedWiki), strings.TrimSpace(string(buffer)))
}
- rawtree := URLJoin(AppSubURL, "raw", "master")
- url := URLJoin(tree, "Link")
- otherUrl := URLJoin(tree, "OtherLink")
- imgurl := URLJoin(rawtree, "Link.jpg")
- urlWiki := URLJoin(AppSubURL, "wiki", "Link")
- otherUrlWiki := URLJoin(AppSubURL, "wiki", "OtherLink")
- imgurlWiki := URLJoin(AppSubURL, "wiki", "raw", "Link.jpg")
+ rawtree := markup.URLJoin(AppSubURL, "raw", "master")
+ url := markup.URLJoin(tree, "Link")
+ otherUrl := markup.URLJoin(tree, "OtherLink")
+ imgurl := markup.URLJoin(rawtree, "Link.jpg")
+ urlWiki := markup.URLJoin(AppSubURL, "wiki", "Link")
+ otherUrlWiki := markup.URLJoin(AppSubURL, "wiki", "OtherLink")
+ imgurlWiki := markup.URLJoin(AppSubURL, "wiki", "raw", "Link.jpg")
favicon := "http://google.com/favicon.ico"
test(
@@ -311,27 +144,26 @@ func TestRender_ShortLinks(t *testing.T) {
`<p><a href="`+urlWiki+`" rel="nofollow">Link</a> <a href="`+otherUrlWiki+`" rel="nofollow">OtherLink</a></p>`)
}
-func TestRender_Commits(t *testing.T) {
- setting.AppURL = AppURL
- setting.AppSubURL = AppSubURL
-
- test := func(input, expected string) {
- buffer := RenderString(input, setting.AppSubURL, nil)
- assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(string(buffer)))
+func TestMisc_IsMarkdownFile(t *testing.T) {
+ setting.Markdown.FileExtensions = []string{".md", ".markdown", ".mdown", ".mkd"}
+ trueTestCases := []string{
+ "test.md",
+ "wow.MARKDOWN",
+ "LOL.mDoWn",
+ }
+ falseTestCases := []string{
+ "test",
+ "abcdefg",
+ "abcdefghijklmnopqrstuvwxyz",
+ "test.md.test",
}
- var sha = "b6dd6210eaebc915fd5be5579c58cce4da2e2579"
- var commit = URLJoin(AppSubURL, "commit", sha)
- var subtree = URLJoin(commit, "src")
- var tree = strings.Replace(subtree, "/commit/", "/tree/", -1)
- var src = strings.Replace(subtree, "/commit/", "/src/", -1)
-
- test(sha, `<p><a href="`+commit+`" rel="nofollow">b6dd6210ea</a></p>`)
- test(sha[:7], `<p><a href="`+commit[:len(commit)-(40-7)]+`" rel="nofollow">b6dd621</a></p>`)
- test(sha[:39], `<p><a href="`+commit[:len(commit)-(40-39)]+`" rel="nofollow">b6dd6210ea</a></p>`)
- test(commit, `<p><a href="`+commit+`" rel="nofollow">b6dd6210ea</a></p>`)
- test(tree, `<p><a href="`+src+`" rel="nofollow">b6dd6210ea/src</a></p>`)
- test("commit "+sha, `<p>commit <a href="`+commit+`" rel="nofollow">b6dd6210ea</a></p>`)
+ for _, testCase := range trueTestCases {
+ assert.True(t, IsMarkdownFile(testCase))
+ }
+ for _, testCase := range falseTestCases {
+ assert.False(t, IsMarkdownFile(testCase))
+ }
}
func TestRender_Images(t *testing.T) {
@@ -345,7 +177,7 @@ func TestRender_Images(t *testing.T) {
url := "../../.images/src/02/train.jpg"
title := "Train"
- result := URLJoin(AppSubURL, url)
+ result := markup.URLJoin(AppSubURL, url)
test(
"!["+title+"]("+url+")",
@@ -356,143 +188,6 @@ func TestRender_Images(t *testing.T) {
`<p><a href="`+result+`" rel="nofollow"><img src="`+result+`" alt="`+title+`" title="`+title+`"/></a></p>`)
}
-func TestRender_CrossReferences(t *testing.T) {
- setting.AppURL = AppURL
- setting.AppSubURL = AppSubURL
-
- test := func(input, expected string) {
- buffer := RenderString(input, setting.AppSubURL, nil)
- assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(string(buffer)))
- }
-
- test(
- "gogits/gogs#12345",
- `<p><a href="`+URLJoin(AppURL, "gogits", "gogs", "issues", "12345")+`" rel="nofollow">gogits/gogs#12345</a></p>`)
-}
-
-func TestRender_FullIssueURLs(t *testing.T) {
- setting.AppURL = AppURL
- setting.AppSubURL = AppSubURL
-
- test := func(input, expected string) {
- result := RenderFullIssuePattern([]byte(input))
- assert.Equal(t, expected, string(result))
- }
- test("Here is a link https://git.osgeo.org/gogs/postgis/postgis/pulls/6",
- "Here is a link https://git.osgeo.org/gogs/postgis/postgis/pulls/6")
- test("Look here http://localhost:3000/person/repo/issues/4",
- `Look here <a href="http://localhost:3000/person/repo/issues/4">#4</a>`)
- test("http://localhost:3000/person/repo/issues/4#issuecomment-1234",
- `<a href="http://localhost:3000/person/repo/issues/4#issuecomment-1234">#4</a>`)
-}
-
-func TestRegExp_MentionPattern(t *testing.T) {
- trueTestCases := []string{
- "@Unknwon",
- "@ANT_123",
- "@xxx-DiN0-z-A..uru..s-xxx",
- " @lol ",
- " @Te/st",
- }
- falseTestCases := []string{
- "@ 0",
- "@ ",
- "@",
- "",
- "ABC",
- }
-
- for _, testCase := range trueTestCases {
- res := MentionPattern.MatchString(testCase)
- if !res {
- println()
- println(testCase)
- }
- assert.True(t, res)
- }
- for _, testCase := range falseTestCases {
- res := MentionPattern.MatchString(testCase)
- if res {
- println()
- println(testCase)
- }
- assert.False(t, res)
- }
-}
-
-func TestRegExp_IssueNumericPattern(t *testing.T) {
- trueTestCases := []string{
- "#1234",
- "#0",
- "#1234567890987654321",
- }
- falseTestCases := []string{
- "# 1234",
- "# 0",
- "# ",
- "#",
- "#ABC",
- "#1A2B",
- "",
- "ABC",
- }
-
- for _, testCase := range trueTestCases {
- assert.True(t, IssueNumericPattern.MatchString(testCase))
- }
- for _, testCase := range falseTestCases {
- assert.False(t, IssueNumericPattern.MatchString(testCase))
- }
-}
-
-func TestRegExp_IssueAlphanumericPattern(t *testing.T) {
- trueTestCases := []string{
- "ABC-1234",
- "A-1",
- "RC-80",
- "ABCDEFGHIJ-1234567890987654321234567890",
- }
- falseTestCases := []string{
- "RC-08",
- "PR-0",
- "ABCDEFGHIJK-1",
- "PR_1",
- "",
- "#ABC",
- "",
- "ABC",
- "GG-",
- "rm-1",
- }
-
- for _, testCase := range trueTestCases {
- assert.True(t, IssueAlphanumericPattern.MatchString(testCase))
- }
- for _, testCase := range falseTestCases {
- assert.False(t, IssueAlphanumericPattern.MatchString(testCase))
- }
-}
-
-func TestRegExp_Sha1CurrentPattern(t *testing.T) {
- trueTestCases := []string{
- "d8a994ef243349f321568f9e36d5c3f444b99cae",
- "abcdefabcdefabcdefabcdefabcdefabcdefabcd",
- }
- falseTestCases := []string{
- "test",
- "abcdefg",
- "abcdefghijklmnopqrstuvwxyzabcdefghijklmn",
- "abcdefghijklmnopqrstuvwxyzabcdefghijklmO",
- }
-
- for _, testCase := range trueTestCases {
- assert.True(t, Sha1CurrentPattern.MatchString(testCase))
- }
- for _, testCase := range falseTestCases {
- assert.False(t, Sha1CurrentPattern.MatchString(testCase))
- }
-}
-
func TestRegExp_ShortLinkPattern(t *testing.T) {
trueTestCases := []string{
"[[stuff]]",
@@ -510,139 +205,13 @@ func TestRegExp_ShortLinkPattern(t *testing.T) {
}
for _, testCase := range trueTestCases {
- assert.True(t, ShortLinkPattern.MatchString(testCase))
+ assert.True(t, markup.ShortLinkPattern.MatchString(testCase))
}
for _, testCase := range falseTestCases {
- assert.False(t, ShortLinkPattern.MatchString(testCase))
+ assert.False(t, markup.ShortLinkPattern.MatchString(testCase))
}
}
-func TestRegExp_AnySHA1Pattern(t *testing.T) {
- testCases := map[string][]string{
- "https://github.com/jquery/jquery/blob/a644101ed04d0beacea864ce805e0c4f86ba1cd1/test/unit/event.js#L2703": {
- "https",
- "github.com",
- "jquery",
- "jquery",
- "blob",
- "a644101ed04d0beacea864ce805e0c4f86ba1cd1",
- "test/unit/event.js",
- "L2703",
- },
- "https://github.com/jquery/jquery/blob/a644101ed04d0beacea864ce805e0c4f86ba1cd1/test/unit/event.js": {
- "https",
- "github.com",
- "jquery",
- "jquery",
- "blob",
- "a644101ed04d0beacea864ce805e0c4f86ba1cd1",
- "test/unit/event.js",
- "",
- },
- "https://github.com/jquery/jquery/commit/0705be475092aede1eddae01319ec931fb9c65fc": {
- "https",
- "github.com",
- "jquery",
- "jquery",
- "commit",
- "0705be475092aede1eddae01319ec931fb9c65fc",
- "",
- "",
- },
- "https://github.com/jquery/jquery/tree/0705be475092aede1eddae01319ec931fb9c65fc/src": {
- "https",
- "github.com",
- "jquery",
- "jquery",
- "tree",
- "0705be475092aede1eddae01319ec931fb9c65fc",
- "src",
- "",
- },
- "https://try.gogs.io/gogs/gogs/commit/d8a994ef243349f321568f9e36d5c3f444b99cae#diff-2": {
- "https",
- "try.gogs.io",
- "gogs",
- "gogs",
- "commit",
- "d8a994ef243349f321568f9e36d5c3f444b99cae",
- "",
- "diff-2",
- },
- }
-
- for k, v := range testCases {
- assert.Equal(t, AnySHA1Pattern.FindStringSubmatch(k)[1:], v)
- }
-}
-
-func TestMisc_IsMarkdownFile(t *testing.T) {
- setting.Markdown.FileExtensions = []string{".md", ".markdown", ".mdown", ".mkd"}
- trueTestCases := []string{
- "test.md",
- "wow.MARKDOWN",
- "LOL.mDoWn",
- }
- falseTestCases := []string{
- "test",
- "abcdefg",
- "abcdefghijklmnopqrstuvwxyz",
- "test.md.test",
- }
-
- for _, testCase := range trueTestCases {
- assert.True(t, IsMarkdownFile(testCase))
- }
- for _, testCase := range falseTestCases {
- assert.False(t, IsMarkdownFile(testCase))
- }
-}
-
-func TestMisc_IsSameDomain(t *testing.T) {
- setting.AppURL = AppURL
- setting.AppSubURL = AppSubURL
-
- var sha = "b6dd6210eaebc915fd5be5579c58cce4da2e2579"
- var commit = URLJoin(AppSubURL, "commit", sha)
-
- assert.True(t, IsSameDomain(commit))
- assert.False(t, IsSameDomain("http://google.com/ncr"))
- assert.False(t, IsSameDomain("favicon.ico"))
-}
-
-// Test cases without ambiguous links
-var sameCases = []string{
- // dear imgui wiki markdown extract: special wiki syntax
- `Wiki! Enjoy :)
-- [[Links, Language bindings, Engine bindings|Links]]
-- [[Tips]]
-
-Ideas and codes
-
-- Bezier widget (by @r-lyeh) ` + AppURL + `ocornut/imgui/issues/786
-- Node graph editors https://github.com/ocornut/imgui/issues/306
-- [[Memory Editor|memory_editor_example]]
-- [[Plot var helper|plot_var_example]]`,
- // wine-staging wiki home extract: tables, special wiki syntax, images
- `## What is Wine Staging?
-**Wine Staging** on website [wine-staging.com](http://wine-staging.com).
-
-## Quick Links
-Here are some links to the most important topics. You can find the full list of pages at the sidebar.
-
-| [[images/icon-install.png]] | [[Installation]] |
-|--------------------------------|----------------------------------------------------------|
-| [[images/icon-usage.png]] | [[Usage]] |
-`,
- // libgdx wiki page: inline images with special syntax
- `[Excelsior JET](http://www.excelsiorjet.com/) allows you to create native executables for Windows, Linux and Mac OS X.
-
-1. [Package your libGDX application](https://github.com/libgdx/libgdx/wiki/Gradle-on-the-Commandline#packaging-for-the-desktop)
-[[images/1.png]]
-2. Perform a test run by hitting the Run! button.
-[[images/2.png]]`,
-}
-
func testAnswers(baseURLContent, baseURLImages string) []string {
return []string{
`<p>Wiki! Enjoy :)</p>
@@ -697,24 +266,41 @@ func testAnswers(baseURLContent, baseURLImages string) []string {
}
}
-func TestTotal_RenderString(t *testing.T) {
- answers := testAnswers(URLJoin(AppSubURL, "src", "master/"), URLJoin(AppSubURL, "raw", "master/"))
+// Test cases without ambiguous links
+var sameCases = []string{
+ // dear imgui wiki markdown extract: special wiki syntax
+ `Wiki! Enjoy :)
+- [[Links, Language bindings, Engine bindings|Links]]
+- [[Tips]]
- for i := 0; i < len(sameCases); i++ {
- line := RenderString(sameCases[i], URLJoin(AppSubURL, "src", "master/"), nil)
- assert.Equal(t, answers[i], line)
- }
+Ideas and codes
- testCases := []string{}
+- Bezier widget (by @r-lyeh) ` + AppURL + `ocornut/imgui/issues/786
+- Node graph editors https://github.com/ocornut/imgui/issues/306
+- [[Memory Editor|memory_editor_example]]
+- [[Plot var helper|plot_var_example]]`,
+ // wine-staging wiki home extract: tables, special wiki syntax, images
+ `## What is Wine Staging?
+**Wine Staging** on website [wine-staging.com](http://wine-staging.com).
- for i := 0; i < len(testCases); i += 2 {
- line := RenderString(testCases[i], AppSubURL, nil)
- assert.Equal(t, testCases[i+1], line)
- }
+## Quick Links
+Here are some links to the most important topics. You can find the full list of pages at the sidebar.
+
+| [[images/icon-install.png]] | [[Installation]] |
+|--------------------------------|----------------------------------------------------------|
+| [[images/icon-usage.png]] | [[Usage]] |
+`,
+ // libgdx wiki page: inline images with special syntax
+ `[Excelsior JET](http://www.excelsiorjet.com/) allows you to create native executables for Windows, Linux and Mac OS X.
+
+1. [Package your libGDX application](https://github.com/libgdx/libgdx/wiki/Gradle-on-the-Commandline#packaging-for-the-desktop)
+[[images/1.png]]
+2. Perform a test run by hitting the Run! button.
+[[images/2.png]]`,
}
func TestTotal_RenderWiki(t *testing.T) {
- answers := testAnswers(URLJoin(AppSubURL, "wiki/"), URLJoin(AppSubURL, "wiki", "raw/"))
+ answers := testAnswers(markup.URLJoin(AppSubURL, "wiki/"), markup.URLJoin(AppSubURL, "wiki", "raw/"))
for i := 0; i < len(sameCases); i++ {
line := RenderWiki([]byte(sameCases[i]), AppSubURL, nil)
@@ -739,3 +325,19 @@ func TestTotal_RenderWiki(t *testing.T) {
assert.Equal(t, testCases[i+1], line)
}
}
+
+func TestTotal_RenderString(t *testing.T) {
+ answers := testAnswers(markup.URLJoin(AppSubURL, "src", "master/"), markup.URLJoin(AppSubURL, "raw", "master/"))
+
+ for i := 0; i < len(sameCases); i++ {
+ line := RenderString(sameCases[i], markup.URLJoin(AppSubURL, "src", "master/"), nil)
+ assert.Equal(t, answers[i], line)
+ }
+
+ testCases := []string{}
+
+ for i := 0; i < len(testCases); i += 2 {
+ line := RenderString(testCases[i], AppSubURL, nil)
+ assert.Equal(t, testCases[i+1], line)
+ }
+}
diff --git a/modules/markdown/sanitizer.go b/modules/markdown/sanitizer.go
deleted file mode 100644
index cc00c9a1a3..0000000000
--- a/modules/markdown/sanitizer.go
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright 2017 The Gitea Authors. All rights reserved.
-// Copyright 2017 The Gogs Authors. All rights reserved.
-// Use of this source code is governed by a MIT-style
-// license that can be found in the LICENSE file.
-
-package markdown
-
-import (
- "regexp"
- "sync"
-
- "code.gitea.io/gitea/modules/setting"
-
- "github.com/microcosm-cc/bluemonday"
-)
-
-// Sanitizer is a protection wrapper of *bluemonday.Policy which does not allow
-// any modification to the underlying policies once it's been created.
-type Sanitizer struct {
- policy *bluemonday.Policy
- init sync.Once
-}
-
-var sanitizer = &Sanitizer{}
-
-// NewSanitizer initializes sanitizer with allowed attributes based on settings.
-// Multiple calls to this function will only create one instance of Sanitizer during
-// entire application lifecycle.
-func NewSanitizer() {
- sanitizer.init.Do(func() {
- sanitizer.policy = bluemonday.UGCPolicy()
- // We only want to allow HighlightJS specific classes for code blocks
- sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^language-\w+$`)).OnElements("code")
-
- // Checkboxes
- sanitizer.policy.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input")
- sanitizer.policy.AllowAttrs("checked", "disabled").OnElements("input")
-
- // Custom URL-Schemes
- sanitizer.policy.AllowURLSchemes(setting.Markdown.CustomURLSchemes...)
- })
-}
-
-// Sanitize takes a string that contains a HTML fragment or document and applies policy whitelist.
-func Sanitize(s string) string {
- NewSanitizer()
- return sanitizer.policy.Sanitize(s)
-}
-
-// SanitizeBytes takes a []byte slice that contains a HTML fragment or document and applies policy whitelist.
-func SanitizeBytes(b []byte) []byte {
- if len(b) == 0 {
- // nothing to sanitize
- return b
- }
- NewSanitizer()
- return sanitizer.policy.SanitizeBytes(b)
-}
diff --git a/modules/markdown/sanitizer_test.go b/modules/markdown/sanitizer_test.go
deleted file mode 100644
index 77a4b33c84..0000000000
--- a/modules/markdown/sanitizer_test.go
+++ /dev/null
@@ -1,44 +0,0 @@
-// Copyright 2017 The Gitea Authors. All rights reserved.
-// Copyright 2017 The Gogs Authors. All rights reserved.
-// Use of this source code is governed by a MIT-style
-// license that can be found in the LICENSE file.
-
-package markdown
-
-import (
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func Test_Sanitizer(t *testing.T) {
- NewSanitizer()
- testCases := []string{
- // Regular
- `<a onblur="alert(secret)" href="http://www.google.com">Google</a>`, `<a href="http://www.google.com" rel="nofollow">Google</a>`,
-
- // Code highlighting class
- `<code class="random string"></code>`, `<code></code>`,
- `<code class="language-random ui tab active menu attached animating sidebar following bar center"></code>`, `<code></code>`,
- `<code class="language-go"></code>`, `<code class="language-go"></code>`,
-
- // Input checkbox
- `<input type="hidden">`, ``,
- `<input type="checkbox">`, `<input type="checkbox">`,
- `<input checked disabled autofocus>`, `<input checked="" disabled="">`,
-
- // Code highlight injection
- `<code class="language-random&#32;ui&#32;tab&#32;active&#32;menu&#32;attached&#32;animating&#32;sidebar&#32;following&#32;bar&#32;center"></code>`, `<code></code>`,
- `<code class="language-lol&#32;ui&#32;tab&#32;active&#32;menu&#32;attached&#32;animating&#32;sidebar&#32;following&#32;bar&#32;center">
-<code class="language-lol&#32;ui&#32;container&#32;input&#32;huge&#32;basic&#32;segment&#32;center">&nbsp;</code>
-<img src="https://try.gogs.io/img/favicon.png" width="200" height="200">
-<code class="language-lol&#32;ui&#32;container&#32;input&#32;massive&#32;basic&#32;segment">Hello there! Something has gone wrong, we are working on it.</code>
-<code class="language-lol&#32;ui&#32;container&#32;input&#32;huge&#32;basic&#32;segment">In the meantime, play a game with us at&nbsp;<a href="http://example.com/">example.com</a>.</code>
-</code>`, "<code>\n<code>\u00a0</code>\n<img src=\"https://try.gogs.io/img/favicon.png\" width=\"200\" height=\"200\">\n<code>Hello there! Something has gone wrong, we are working on it.</code>\n<code>In the meantime, play a game with us at\u00a0<a href=\"http://example.com/\" rel=\"nofollow\">example.com</a>.</code>\n</code>",
- }
-
- for i := 0; i < len(testCases); i += 2 {
- assert.Equal(t, testCases[i+1], Sanitize(testCases[i]))
- assert.Equal(t, testCases[i+1], string(SanitizeBytes([]byte(testCases[i]))))
- }
-}