forked from kevadesu/forgejo
format with gofumpt (#18184)
* gofumpt -w -l . * gofumpt -w -l -extra . * Add linter * manual fix * change make fmt
This commit is contained in:
parent
1d98d205f5
commit
54e9ee37a7
423 changed files with 1585 additions and 1758 deletions
|
@ -178,8 +178,7 @@ func NewFootnoteList() *FootnoteList {
|
|||
|
||||
var footnoteListKey = parser.NewContextKey()
|
||||
|
||||
type footnoteBlockParser struct {
|
||||
}
|
||||
type footnoteBlockParser struct{}
|
||||
|
||||
var defaultFootnoteBlockParser = &footnoteBlockParser{}
|
||||
|
||||
|
@ -265,8 +264,7 @@ func (b *footnoteBlockParser) CanAcceptIndentedLine() bool {
|
|||
return false
|
||||
}
|
||||
|
||||
type footnoteParser struct {
|
||||
}
|
||||
type footnoteParser struct{}
|
||||
|
||||
var defaultFootnoteParser = &footnoteParser{}
|
||||
|
||||
|
@ -337,8 +335,7 @@ func (s *footnoteParser) Parse(parent ast.Node, block text.Reader, pc parser.Con
|
|||
return NewFootnoteLink(index, name)
|
||||
}
|
||||
|
||||
type footnoteASTTransformer struct {
|
||||
}
|
||||
type footnoteASTTransformer struct{}
|
||||
|
||||
var defaultFootnoteASTTransformer = &footnoteASTTransformer{}
|
||||
|
||||
|
@ -357,7 +354,7 @@ func (a *footnoteASTTransformer) Transform(node *ast.Document, reader text.Reade
|
|||
}
|
||||
pc.Set(footnoteListKey, nil)
|
||||
for footnote := list.FirstChild(); footnote != nil; {
|
||||
var container ast.Node = footnote
|
||||
container := footnote
|
||||
next := footnote.NextSibling()
|
||||
if fc := container.LastChild(); fc != nil && ast.IsParagraph(fc) {
|
||||
container = fc
|
||||
|
|
|
@ -8,12 +8,10 @@ import (
|
|||
"mvdan.cc/xurls/v2"
|
||||
)
|
||||
|
||||
var (
|
||||
// NOTE: All below regex matching do not perform any extra validation.
|
||||
// Thus a link is produced even if the linked entity does not exist.
|
||||
// While fast, this is also incorrect and lead to false positives.
|
||||
// TODO: fix invalid linking issue
|
||||
// NOTE: All below regex matching do not perform any extra validation.
|
||||
// Thus a link is produced even if the linked entity does not exist.
|
||||
// While fast, this is also incorrect and lead to false positives.
|
||||
// TODO: fix invalid linking issue
|
||||
|
||||
// LinkRegex is a regexp matching a valid link
|
||||
LinkRegex, _ = xurls.StrictMatchingScheme("https?://")
|
||||
)
|
||||
// LinkRegex is a regexp matching a valid link
|
||||
var LinkRegex, _ = xurls.StrictMatchingScheme("https?://")
|
||||
|
|
|
@ -20,8 +20,7 @@ import (
|
|||
|
||||
var wwwURLRegxp = regexp.MustCompile(`^www\.[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}((?:/|[#?])[-a-zA-Z0-9@:%_\+.~#!?&//=\(\);,'">\^{}\[\]` + "`" + `]*)?`)
|
||||
|
||||
type linkifyParser struct {
|
||||
}
|
||||
type linkifyParser struct{}
|
||||
|
||||
var defaultLinkifyParser = &linkifyParser{}
|
||||
|
||||
|
@ -36,10 +35,12 @@ func (s *linkifyParser) Trigger() []byte {
|
|||
return []byte{' ', '*', '_', '~', '('}
|
||||
}
|
||||
|
||||
var protoHTTP = []byte("http:")
|
||||
var protoHTTPS = []byte("https:")
|
||||
var protoFTP = []byte("ftp:")
|
||||
var domainWWW = []byte("www.")
|
||||
var (
|
||||
protoHTTP = []byte("http:")
|
||||
protoHTTPS = []byte("https:")
|
||||
protoFTP = []byte("ftp:")
|
||||
domainWWW = []byte("www.")
|
||||
)
|
||||
|
||||
func (s *linkifyParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node {
|
||||
if pc.IsInLinkLabel() {
|
||||
|
@ -58,7 +59,7 @@ func (s *linkifyParser) Parse(parent ast.Node, block text.Reader, pc parser.Cont
|
|||
|
||||
var m []int
|
||||
var protocol []byte
|
||||
var typ ast.AutoLinkType = ast.AutoLinkURL
|
||||
typ := ast.AutoLinkURL
|
||||
if bytes.HasPrefix(line, protoHTTP) || bytes.HasPrefix(line, protoHTTPS) || bytes.HasPrefix(line, protoFTP) {
|
||||
m = LinkRegex.FindSubmatchIndex(line)
|
||||
}
|
||||
|
@ -139,8 +140,7 @@ func (s *linkifyParser) CloseBlock(parent ast.Node, pc parser.Context) {
|
|||
// nothing to do
|
||||
}
|
||||
|
||||
type linkify struct {
|
||||
}
|
||||
type linkify struct{}
|
||||
|
||||
// Linkify is an extension that allow you to parse text that seems like a URL.
|
||||
var Linkify = &linkify{}
|
||||
|
|
|
@ -22,8 +22,7 @@ func init() {
|
|||
}
|
||||
|
||||
// Renderer implements markup.Renderer for csv files
|
||||
type Renderer struct {
|
||||
}
|
||||
type Renderer struct{}
|
||||
|
||||
// Name implements markup.Renderer
|
||||
func (Renderer) Name() string {
|
||||
|
@ -83,7 +82,7 @@ func writeField(w io.Writer, element, class, field string) error {
|
|||
|
||||
// Render implements markup.Renderer
|
||||
func (Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error {
|
||||
var tmpBlock = bufio.NewWriter(output)
|
||||
tmpBlock := bufio.NewWriter(output)
|
||||
|
||||
// FIXME: don't read all to memory
|
||||
rawBytes, err := io.ReadAll(input)
|
||||
|
|
|
@ -15,7 +15,7 @@ import (
|
|||
|
||||
func TestRenderCSV(t *testing.T) {
|
||||
var render Renderer
|
||||
var kases = map[string]string{
|
||||
kases := map[string]string{
|
||||
"a": "<table class=\"data-table\"><tr><th class=\"line-num\">1</th><th>a</th></tr></table>",
|
||||
"1,2": "<table class=\"data-table\"><tr><th class=\"line-num\">1</th><th>1</th><th>2</th></tr></table>",
|
||||
"1;2\n3;4": "<table class=\"data-table\"><tr><th class=\"line-num\">1</th><th>1</th><th>2</th></tr><tr><td class=\"line-num\">2</td><td>3</td><td>4</td></tr></table>",
|
||||
|
|
|
@ -202,7 +202,7 @@ func RenderCommitMessage(
|
|||
ctx *RenderContext,
|
||||
content string,
|
||||
) (string, error) {
|
||||
var procs = commitMessageProcessors
|
||||
procs := commitMessageProcessors
|
||||
if ctx.DefaultLink != "" {
|
||||
// we don't have to fear data races, because being
|
||||
// commitMessageProcessors of fixed len and cap, every time we append
|
||||
|
@ -238,7 +238,7 @@ func RenderCommitMessageSubject(
|
|||
ctx *RenderContext,
|
||||
content string,
|
||||
) (string, error) {
|
||||
var procs = commitMessageSubjectProcessors
|
||||
procs := commitMessageSubjectProcessors
|
||||
if ctx.DefaultLink != "" {
|
||||
// we don't have to fear data races, because being
|
||||
// commitMessageSubjectProcessors of fixed len and cap, every time we
|
||||
|
@ -291,8 +291,10 @@ func RenderEmoji(
|
|||
return renderProcessString(&RenderContext{}, emojiProcessors, content)
|
||||
}
|
||||
|
||||
var tagCleaner = regexp.MustCompile(`<((?:/?\w+/\w+)|(?:/[\w ]+/)|(/?[hH][tT][mM][lL]\b)|(/?[hH][eE][aA][dD]\b))`)
|
||||
var nulCleaner = strings.NewReplacer("\000", "")
|
||||
var (
|
||||
tagCleaner = regexp.MustCompile(`<((?:/?\w+/\w+)|(?:/[\w ]+/)|(/?[hH][tT][mM][lL]\b)|(/?[hH][eE][aA][dD]\b))`)
|
||||
nulCleaner = strings.NewReplacer("\000", "")
|
||||
)
|
||||
|
||||
func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output io.Writer) error {
|
||||
defer ctx.Cancel()
|
||||
|
|
|
@ -15,9 +15,11 @@ import (
|
|||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const TestAppURL = "http://localhost:3000/"
|
||||
const TestOrgRepo = "gogits/gogs"
|
||||
const TestRepoURL = TestAppURL + TestOrgRepo + "/"
|
||||
const (
|
||||
TestAppURL = "http://localhost:3000/"
|
||||
TestOrgRepo = "gogits/gogs"
|
||||
TestRepoURL = TestAppURL + TestOrgRepo + "/"
|
||||
)
|
||||
|
||||
// alphanumLink an HTML link to an alphanumeric-style issue
|
||||
func alphanumIssueLink(baseURL, class, name string) string {
|
||||
|
|
|
@ -38,17 +38,17 @@ func TestRender_Commits(t *testing.T) {
|
|||
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer))
|
||||
}
|
||||
|
||||
var sha = "65f1bf27bc3bf70f64657658635e66094edbcb4d"
|
||||
var repo = TestRepoURL
|
||||
var commit = util.URLJoin(repo, "commit", sha)
|
||||
var tree = util.URLJoin(repo, "tree", sha, "src")
|
||||
sha := "65f1bf27bc3bf70f64657658635e66094edbcb4d"
|
||||
repo := TestRepoURL
|
||||
commit := util.URLJoin(repo, "commit", sha)
|
||||
tree := util.URLJoin(repo, "tree", sha, "src")
|
||||
|
||||
var file = util.URLJoin(repo, "commit", sha, "example.txt")
|
||||
var fileWithExtra = file + ":"
|
||||
var fileWithHash = file + "#L2"
|
||||
var fileWithHasExtra = file + "#L2:"
|
||||
var commitCompare = util.URLJoin(repo, "compare", sha+"..."+sha)
|
||||
var commitCompareWithHash = commitCompare + "#L2"
|
||||
file := util.URLJoin(repo, "commit", sha, "example.txt")
|
||||
fileWithExtra := file + ":"
|
||||
fileWithHash := file + "#L2"
|
||||
fileWithHasExtra := file + "#L2:"
|
||||
commitCompare := util.URLJoin(repo, "compare", sha+"..."+sha)
|
||||
commitCompareWithHash := commitCompare + "#L2"
|
||||
|
||||
test(sha, `<p><a href="`+commit+`" rel="nofollow"><code>65f1bf27bc</code></a></p>`)
|
||||
test(sha[:7], `<p><a href="`+commit[:len(commit)-(40-7)]+`" rel="nofollow"><code>65f1bf2</code></a></p>`)
|
||||
|
@ -102,8 +102,8 @@ func TestRender_CrossReferences(t *testing.T) {
|
|||
func TestMisc_IsSameDomain(t *testing.T) {
|
||||
setting.AppURL = TestAppURL
|
||||
|
||||
var sha = "b6dd6210eaebc915fd5be5579c58cce4da2e2579"
|
||||
var commit = util.URLJoin(TestRepoURL, "commit", sha)
|
||||
sha := "b6dd6210eaebc915fd5be5579c58cce4da2e2579"
|
||||
commit := util.URLJoin(TestRepoURL, "commit", sha)
|
||||
|
||||
assert.True(t, IsSameDomain(commit))
|
||||
assert.False(t, IsSameDomain("http://google.com/ncr"))
|
||||
|
@ -291,7 +291,7 @@ func TestRender_emoji(t *testing.T) {
|
|||
`<p><span class="emoji" aria-label="`+emoji.GemojiData[i].Description+`">`+emoji.GemojiData[i].Emoji+`</span></p>`)
|
||||
}
|
||||
|
||||
//Text that should be turned into or recognized as emoji
|
||||
// Text that should be turned into or recognized as emoji
|
||||
test(
|
||||
":gitea:",
|
||||
`<p><span class="emoji" aria-label="gitea"><img alt=":gitea:" src="`+setting.StaticURLPrefix+`/assets/img/emoji/gitea.png"/></span></p>`)
|
||||
|
@ -472,7 +472,7 @@ func TestRender_RelativeImages(t *testing.T) {
|
|||
func Test_ParseClusterFuzz(t *testing.T) {
|
||||
setting.AppURL = TestAppURL
|
||||
|
||||
var localMetas = map[string]string{
|
||||
localMetas := map[string]string{
|
||||
"user": "go-gitea",
|
||||
"repo": "gitea",
|
||||
}
|
||||
|
@ -502,7 +502,7 @@ func Test_ParseClusterFuzz(t *testing.T) {
|
|||
func TestIssue16020(t *testing.T) {
|
||||
setting.AppURL = TestAppURL
|
||||
|
||||
var localMetas = map[string]string{
|
||||
localMetas := map[string]string{
|
||||
"user": "go-gitea",
|
||||
"repo": "gitea",
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc pa
|
|||
metaData := meta.GetItems(pc)
|
||||
firstChild := node.FirstChild()
|
||||
createTOC := false
|
||||
var toc = []Header{}
|
||||
toc := []Header{}
|
||||
rc := &RenderConfig{
|
||||
Meta: "table",
|
||||
Icon: "table",
|
||||
|
|
|
@ -28,12 +28,16 @@ import (
|
|||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
var converter goldmark.Markdown
|
||||
var once = sync.Once{}
|
||||
var (
|
||||
converter goldmark.Markdown
|
||||
once = sync.Once{}
|
||||
)
|
||||
|
||||
var urlPrefixKey = parser.NewContextKey()
|
||||
var isWikiKey = parser.NewContextKey()
|
||||
var renderMetasKey = parser.NewContextKey()
|
||||
var (
|
||||
urlPrefixKey = parser.NewContextKey()
|
||||
isWikiKey = parser.NewContextKey()
|
||||
renderMetasKey = parser.NewContextKey()
|
||||
)
|
||||
|
||||
type limitWriter struct {
|
||||
w io.Writer
|
||||
|
@ -134,7 +138,6 @@ func actualRender(ctx *markup.RenderContext, input io.Reader, output io.Writer)
|
|||
util.Prioritized(NewHTMLRenderer(), 10),
|
||||
),
|
||||
)
|
||||
|
||||
})
|
||||
|
||||
lw := &limitWriter{
|
||||
|
@ -190,10 +193,8 @@ func render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error
|
|||
return actualRender(ctx, input, output)
|
||||
}
|
||||
|
||||
var (
|
||||
// MarkupName describes markup's name
|
||||
MarkupName = "markdown"
|
||||
)
|
||||
// MarkupName describes markup's name
|
||||
var MarkupName = "markdown"
|
||||
|
||||
func init() {
|
||||
markup.RegisterRenderer(Renderer{})
|
||||
|
|
|
@ -18,9 +18,11 @@ import (
|
|||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const AppURL = "http://localhost:3000/"
|
||||
const Repo = "gogits/gogs"
|
||||
const AppSubURL = AppURL + Repo + "/"
|
||||
const (
|
||||
AppURL = "http://localhost:3000/"
|
||||
Repo = "gogits/gogs"
|
||||
AppSubURL = AppURL + Repo + "/"
|
||||
)
|
||||
|
||||
// these values should match the Repo const above
|
||||
var localMetas = map[string]string{
|
||||
|
@ -120,7 +122,6 @@ func TestRender_Images(t *testing.T) {
|
|||
test(
|
||||
"[]("+href+")",
|
||||
`<p><a href="`+href+`" rel="nofollow"><img src="`+result+`" alt="`+title+`"/></a></p>`)
|
||||
|
||||
}
|
||||
|
||||
func testAnswers(baseURLContent, baseURLImages string) []string {
|
||||
|
|
|
@ -147,8 +147,10 @@ func StripMarkdown(rawBytes []byte) (string, []string) {
|
|||
return string(buf), links
|
||||
}
|
||||
|
||||
var stripParser parser.Parser
|
||||
var once = sync.Once{}
|
||||
var (
|
||||
stripParser parser.Parser
|
||||
once = sync.Once{}
|
||||
)
|
||||
|
||||
// StripMarkdownBytes parses markdown content by removing all markup and code blocks
|
||||
// in order to extract links and other references
|
||||
|
|
|
@ -52,7 +52,8 @@ A HIDDEN ` + "`" + `GHOST` + "`" + ` IN THIS LINE.
|
|||
},
|
||||
[]string{
|
||||
"link",
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
"Simply closes: #29 yes",
|
||||
[]string{
|
||||
|
|
|
@ -27,8 +27,7 @@ func init() {
|
|||
}
|
||||
|
||||
// Renderer implements markup.Renderer for orgmode
|
||||
type Renderer struct {
|
||||
}
|
||||
type Renderer struct{}
|
||||
|
||||
// Name implements markup.Renderer
|
||||
func (Renderer) Name() string {
|
||||
|
|
|
@ -15,9 +15,11 @@ import (
|
|||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const AppURL = "http://localhost:3000/"
|
||||
const Repo = "gogits/gogs"
|
||||
const AppSubURL = AppURL + Repo + "/"
|
||||
const (
|
||||
AppURL = "http://localhost:3000/"
|
||||
Repo = "gogits/gogs"
|
||||
AppSubURL = AppURL + Repo + "/"
|
||||
)
|
||||
|
||||
func TestRender_StandardLinks(t *testing.T) {
|
||||
setting.AppURL = AppURL
|
||||
|
|
|
@ -86,7 +86,8 @@ func createDefaultPolicy() *bluemonday.Policy {
|
|||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^((icon(\s+[\p{L}\p{N}_-]+)+)|(emoji))$|^([a-z][a-z0-9]{0,2})$|^` + keywordClass + `$`)).OnElements("span")
|
||||
|
||||
// Allow generally safe attributes
|
||||
generalSafeAttrs := []string{"abbr", "accept", "accept-charset",
|
||||
generalSafeAttrs := []string{
|
||||
"abbr", "accept", "accept-charset",
|
||||
"accesskey", "action", "align", "alt",
|
||||
"aria-describedby", "aria-hidden", "aria-label", "aria-labelledby",
|
||||
"axis", "border", "cellpadding", "cellspacing", "char",
|
||||
|
|
|
@ -59,5 +59,4 @@ func TestSanitizeNonEscape(t *testing.T) {
|
|||
if strings.Contains(string(output), "<script>") {
|
||||
t.Errorf("un-escaped <script> in output: %q", output)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue