forkjo/modules/base/markdown.go

355 lines
9.4 KiB
Go
Raw Normal View History

2014-04-10 20:20:58 +02:00
// Copyright 2014 The Gogs Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package base
import (
"bytes"
"fmt"
"io"
2014-04-10 20:20:58 +02:00
"net/http"
"path"
"path/filepath"
"regexp"
"strings"
2015-11-20 11:37:51 +01:00
"github.com/Unknwon/com"
2014-10-04 23:15:22 +02:00
"github.com/russross/blackfriday"
"golang.org/x/net/html"
2014-10-04 23:15:22 +02:00
"github.com/gogits/gogs/modules/setting"
2014-04-10 20:20:58 +02:00
)
// TODO: put this into 'markdown' module.
2014-04-10 20:20:58 +02:00
func isletter(c byte) bool {
return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
}
func isalnum(c byte) bool {
return (c >= '0' && c <= '9') || isletter(c)
}
2016-01-14 07:20:03 +01:00
var validLinksPattern = regexp.MustCompile(`^[a-z][\w-]+://`)
2014-04-10 20:20:58 +02:00
func isLink(link []byte) bool {
2016-01-14 07:20:03 +01:00
return validLinksPattern.Match(link)
2014-04-10 20:20:58 +02:00
}
func IsMarkdownFile(name string) bool {
name = strings.ToLower(name)
switch filepath.Ext(name) {
2014-08-23 15:13:55 +02:00
case ".md", ".markdown", ".mdown", ".mkd":
2014-04-10 20:20:58 +02:00
return true
}
return false
}
func IsTextFile(data []byte) (string, bool) {
contentType := http.DetectContentType(data)
if strings.Index(contentType, "text/") != -1 {
return contentType, true
}
return contentType, false
}
func IsImageFile(data []byte) (string, bool) {
contentType := http.DetectContentType(data)
if strings.Index(contentType, "image/") != -1 {
return contentType, true
}
return contentType, false
}
// IsReadmeFile returns true if given file name suppose to be a README file.
2014-04-10 20:20:58 +02:00
func IsReadmeFile(name string) bool {
name = strings.ToLower(name)
if len(name) < 6 {
return false
} else if len(name) == 6 {
if name == "readme" {
return true
}
return false
2014-04-10 20:20:58 +02:00
}
if name[:7] == "readme." {
2014-04-10 20:20:58 +02:00
return true
}
return false
}
2016-01-09 03:59:04 +01:00
var (
MentionPattern = regexp.MustCompile(`(\s|^)@[0-9a-zA-Z_\.]+`)
commitPattern = regexp.MustCompile(`(\s|^)https?.*commit/[0-9a-zA-Z]+(#+[0-9a-zA-Z-]*)?`)
issueFullPattern = regexp.MustCompile(`(\s|^)https?.*issues/[0-9]+(#+[0-9a-zA-Z-]*)?`)
issueIndexPattern = regexp.MustCompile(`( |^|\()#[0-9]+\b`)
sha1CurrentPattern = regexp.MustCompile(`\b[0-9a-f]{40}\b`)
)
2014-04-10 20:20:58 +02:00
type CustomRender struct {
2014-10-04 23:15:22 +02:00
blackfriday.Renderer
2014-04-10 20:20:58 +02:00
urlPrefix string
}
2016-01-09 03:59:04 +01:00
func (r *CustomRender) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) {
2014-04-10 20:20:58 +02:00
if len(link) > 0 && !isLink(link) {
if link[0] == '#' {
// link = append([]byte(options.urlPrefix), link...)
} else {
2016-01-09 03:59:04 +01:00
link = []byte(path.Join(r.urlPrefix, string(link)))
}
}
r.Renderer.Link(out, link, title, content)
}
func (r *CustomRender) AutoLink(out *bytes.Buffer, link []byte, kind int) {
if kind != 1 {
r.Renderer.AutoLink(out, link, kind)
return
}
// This method could only possibly serve one link at a time, no need to find all.
m := commitPattern.Find(link)
if m != nil {
m = bytes.TrimSpace(m)
i := strings.Index(string(m), "commit/")
j := strings.Index(string(m), "#")
if j == -1 {
j = len(m)
}
out.WriteString(fmt.Sprintf(` <code><a href="%s">%s</a></code>`, m, ShortSha(string(m[i+7:j]))))
return
}
m = issueFullPattern.Find(link)
if m != nil {
m = bytes.TrimSpace(m)
i := strings.Index(string(m), "issues/")
j := strings.Index(string(m), "#")
if j == -1 {
j = len(m)
2014-04-10 20:20:58 +02:00
}
2016-01-09 03:59:04 +01:00
out.WriteString(fmt.Sprintf(` <a href="%s">#%s</a>`, m, ShortSha(string(m[i+7:j]))))
return
2014-04-10 20:20:58 +02:00
}
2016-01-09 03:59:04 +01:00
r.Renderer.AutoLink(out, link, kind)
2014-04-10 20:20:58 +02:00
}
2016-01-13 13:25:52 +01:00
func (options *CustomRender) ListItem(out *bytes.Buffer, text []byte, flags int) {
switch {
case bytes.HasPrefix(text, []byte("[ ] ")):
text = append([]byte(`<input type="checkbox" disabled="" />`), text[3:]...)
case bytes.HasPrefix(text, []byte("[x] ")):
text = append([]byte(`<input type="checkbox" disabled="" checked="" />`), text[3:]...)
}
options.Renderer.ListItem(out, text, flags)
}
2015-11-20 11:37:51 +01:00
var (
svgSuffix = []byte(".svg")
svgSuffixWithMark = []byte(".svg?")
2016-02-05 04:51:40 +01:00
spaceBytes = []byte(" ")
spaceEncodedBytes = []byte("%20")
2015-11-20 11:37:51 +01:00
)
2016-01-09 03:59:04 +01:00
func (r *CustomRender) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {
prefix := strings.Replace(r.urlPrefix, "/src/", "/raw/", 1)
2015-11-20 11:37:51 +01:00
if len(link) > 0 {
if isLink(link) {
// External link with .svg suffix usually means CI status.
if bytes.HasSuffix(link, svgSuffix) || bytes.Contains(link, svgSuffixWithMark) {
2016-01-09 03:59:04 +01:00
r.Renderer.Image(out, link, title, alt)
2015-11-20 11:37:51 +01:00
return
}
} else {
if link[0] != '/' {
prefix += "/"
}
2016-02-05 04:51:40 +01:00
link = bytes.Replace([]byte((prefix + string(link))), spaceBytes, spaceEncodedBytes, -1)
fmt.Println(333, string(link))
}
2014-10-15 05:44:34 +02:00
}
out.WriteString(`<a href="`)
out.Write(link)
out.WriteString(`">`)
2016-01-09 03:59:04 +01:00
r.Renderer.Image(out, link, title, alt)
out.WriteString("</a>")
2014-10-15 05:44:34 +02:00
}
2015-11-15 23:37:26 +01:00
func cutoutVerbosePrefix(prefix string) string {
count := 0
for i := 0; i < len(prefix); i++ {
if prefix[i] == '/' {
count++
}
if count >= 3+setting.AppSubUrlDepth {
2015-11-15 23:37:26 +01:00
return prefix[:i]
}
}
2015-11-15 23:37:26 +01:00
return prefix
}
2015-12-05 03:30:33 +01:00
func RenderIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
2015-11-15 23:37:26 +01:00
urlPrefix = cutoutVerbosePrefix(urlPrefix)
ms := issueIndexPattern.FindAll(rawBytes, -1)
2014-04-10 20:20:58 +02:00
for _, m := range ms {
var space string
m2 := m
if m2[0] != '#' {
space = string(m2[0])
m2 = m2[1:]
}
2015-12-05 03:30:33 +01:00
if metas == nil {
rawBytes = bytes.Replace(rawBytes, m, []byte(fmt.Sprintf(`%s<a href="%s/issues/%s">%s</a>`,
space, urlPrefix, m2[1:], m2)), 1)
} else {
// Support for external issue tracker
metas["index"] = string(m2[1:])
rawBytes = bytes.Replace(rawBytes, m, []byte(fmt.Sprintf(`%s<a href="%s">%s</a>`,
space, com.Expand(metas["format"], metas), m2)), 1)
}
}
return rawBytes
}
func RenderSpecialLink(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
ms := MentionPattern.FindAll(rawBytes, -1)
for _, m := range ms {
m = bytes.TrimSpace(m)
rawBytes = bytes.Replace(rawBytes, m,
[]byte(fmt.Sprintf(`<a href="%s/%s">%s</a>`, setting.AppSubUrl, m[1:], m)), -1)
}
rawBytes = RenderIssueIndexPattern(rawBytes, urlPrefix, metas)
rawBytes = RenderSha1CurrentPattern(rawBytes, urlPrefix)
return rawBytes
}
func RenderSha1CurrentPattern(rawBytes []byte, urlPrefix string) []byte {
ms := sha1CurrentPattern.FindAll(rawBytes, -1)
for _, m := range ms {
rawBytes = bytes.Replace(rawBytes, m, []byte(fmt.Sprintf(
`<a href="%s/commit/%s"><code>%s</code></a>`, urlPrefix, m, ShortSha(string(m)))), -1)
2014-04-10 20:20:58 +02:00
}
return rawBytes
}
2014-05-05 19:08:01 +02:00
func RenderRawMarkdown(body []byte, urlPrefix string) []byte {
2014-04-10 20:20:58 +02:00
htmlFlags := 0
2014-10-04 23:15:22 +02:00
htmlFlags |= blackfriday.HTML_SKIP_STYLE
htmlFlags |= blackfriday.HTML_OMIT_CONTENTS
2014-04-10 20:20:58 +02:00
renderer := &CustomRender{
2014-10-04 23:15:22 +02:00
Renderer: blackfriday.HtmlRenderer(htmlFlags, "", ""),
2014-04-10 20:20:58 +02:00
urlPrefix: urlPrefix,
}
// set up the parser
extensions := 0
2014-10-04 23:15:22 +02:00
extensions |= blackfriday.EXTENSION_NO_INTRA_EMPHASIS
extensions |= blackfriday.EXTENSION_TABLES
extensions |= blackfriday.EXTENSION_FENCED_CODE
extensions |= blackfriday.EXTENSION_AUTOLINK
extensions |= blackfriday.EXTENSION_STRIKETHROUGH
extensions |= blackfriday.EXTENSION_SPACE_HEADERS
extensions |= blackfriday.EXTENSION_NO_EMPTY_LINE_BEFORE_BLOCK
if setting.Markdown.EnableHardLineBreak {
extensions |= blackfriday.EXTENSION_HARD_LINE_BREAK
}
2014-10-04 23:15:22 +02:00
body = blackfriday.Markdown(body, renderer, extensions)
2014-05-05 19:08:01 +02:00
return body
}
2015-11-20 07:52:11 +01:00
var (
leftAngleBracket = []byte("</")
rightAngleBracket = []byte(">")
)
2015-11-20 11:37:51 +01:00
var noEndTags = []string{"img", "input", "br", "hr"}
// PostProcessMarkdown treats different types of HTML differently,
// and only renders special links for plain text blocks.
2015-12-05 03:30:33 +01:00
func PostProcessMarkdown(rawHtml []byte, urlPrefix string, metas map[string]string) []byte {
2015-11-20 11:37:51 +01:00
startTags := make([]string, 0, 5)
var buf bytes.Buffer
tokenizer := html.NewTokenizer(bytes.NewReader(rawHtml))
2015-11-20 11:37:51 +01:00
OUTER_LOOP:
for html.ErrorToken != tokenizer.Next() {
token := tokenizer.Token()
switch token.Type {
case html.TextToken:
2015-12-05 03:30:33 +01:00
buf.Write(RenderSpecialLink([]byte(token.String()), urlPrefix, metas))
case html.StartTagToken:
buf.WriteString(token.String())
tagName := token.Data
// If this is an excluded tag, we skip processing all output until a close tag is encountered.
if strings.EqualFold("a", tagName) || strings.EqualFold("code", tagName) || strings.EqualFold("pre", tagName) {
2015-11-22 03:06:11 +01:00
stackNum := 1
for html.ErrorToken != tokenizer.Next() {
token = tokenizer.Token()
2015-11-20 11:37:51 +01:00
// Copy the token to the output verbatim
buf.WriteString(token.String())
2015-11-22 03:06:11 +01:00
if token.Type == html.StartTagToken {
stackNum++
}
// If this is the close tag to the outer-most, we are done
if token.Type == html.EndTagToken {
2015-11-22 03:06:11 +01:00
stackNum--
if stackNum <= 0 && strings.EqualFold(tagName, token.Data) {
2015-11-22 03:06:11 +01:00
break
}
}
}
2015-11-20 11:37:51 +01:00
continue OUTER_LOOP
}
if !com.IsSliceContainsStr(noEndTags, token.Data) {
startTags = append(startTags, token.Data)
}
2015-11-20 07:52:11 +01:00
case html.EndTagToken:
2015-11-25 01:28:24 +01:00
if len(startTags) == 0 {
2015-11-25 01:29:35 +01:00
buf.WriteString(token.String())
2015-11-25 01:28:24 +01:00
break
}
2015-11-20 07:52:11 +01:00
buf.Write(leftAngleBracket)
2015-11-20 11:37:51 +01:00
buf.WriteString(startTags[len(startTags)-1])
2015-11-20 07:52:11 +01:00
buf.Write(rightAngleBracket)
2015-11-20 11:37:51 +01:00
startTags = startTags[:len(startTags)-1]
default:
buf.WriteString(token.String())
}
}
if io.EOF == tokenizer.Err() {
return buf.Bytes()
}
// If we are not at the end of the input, then some other parsing error has occurred,
// so return the input verbatim.
return rawHtml
}
2015-12-05 03:30:33 +01:00
func RenderMarkdown(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
2016-01-09 03:59:04 +01:00
result := RenderRawMarkdown(rawBytes, urlPrefix)
2015-12-05 03:30:33 +01:00
result = PostProcessMarkdown(result, urlPrefix, metas)
result = Sanitizer.SanitizeBytes(result)
return result
}
2015-12-05 03:30:33 +01:00
func RenderMarkdownString(raw, urlPrefix string, metas map[string]string) string {
return string(RenderMarkdown([]byte(raw), urlPrefix, metas))
}