mirror of
https://github.com/kovetskiy/mark.git
synced 2025-06-02 03:32:42 +08:00
Restructure code for more testaability
Move a number of funcs/files in the top-level `main` package into a new `util` package, so test logic can directly invoke functions like RunMark(), etc. The main.go has been trimmed down to minimal sizing, with former supporting funcs moved into `util` package, so they can be run by unit tests. Signed-off-by: Rich Scott <richscott@sent.com>
This commit is contained in:
parent
87160e8dd6
commit
ddc0ab9fbf
496
main.go
496
main.go
@ -1,29 +1,9 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/kovetskiy/lorg"
|
||||
"github.com/kovetskiy/mark/attachment"
|
||||
"github.com/kovetskiy/mark/confluence"
|
||||
"github.com/kovetskiy/mark/includes"
|
||||
"github.com/kovetskiy/mark/macro"
|
||||
mark "github.com/kovetskiy/mark/markdown"
|
||||
"github.com/kovetskiy/mark/metadata"
|
||||
"github.com/kovetskiy/mark/page"
|
||||
"github.com/kovetskiy/mark/stdlib"
|
||||
"github.com/kovetskiy/mark/vfs"
|
||||
"github.com/reconquest/karma-go"
|
||||
"github.com/kovetskiy/mark/util"
|
||||
"github.com/reconquest/pkg/log"
|
||||
"github.com/urfave/cli/v2"
|
||||
"github.com/urfave/cli/v2/altsrc"
|
||||
@ -151,7 +131,7 @@ var flags = []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "config",
|
||||
Aliases: []string{"c"},
|
||||
Value: configFilePath(),
|
||||
Value: util.ConfigFilePath(),
|
||||
Usage: "use the specified configuration file.",
|
||||
TakesFile: true,
|
||||
EnvVars: []string{"MARK_CONFIG"},
|
||||
@ -221,485 +201,19 @@ func main() {
|
||||
return altsrc.NewTomlSourceFromFile(filePath)
|
||||
} else {
|
||||
// Fall back to default if config is unset and path exists
|
||||
_, err := os.Stat(configFilePath())
|
||||
_, err := os.Stat(util.ConfigFilePath())
|
||||
if os.IsNotExist(err) {
|
||||
return &altsrc.MapInputSource{}, nil
|
||||
}
|
||||
return altsrc.NewTomlSourceFromFile(configFilePath())
|
||||
return altsrc.NewTomlSourceFromFile(util.ConfigFilePath())
|
||||
}
|
||||
}),
|
||||
EnableBashCompletion: true,
|
||||
HideHelpCommand: true,
|
||||
Action: RunMark,
|
||||
Action: util.RunMark,
|
||||
}
|
||||
|
||||
if err := app.Run(os.Args); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func RunMark(cCtx *cli.Context) error {
|
||||
if err := setLogLevel(cCtx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if cCtx.String("color") == "never" {
|
||||
log.GetLogger().SetFormat(
|
||||
lorg.NewFormat(
|
||||
`${time:2006-01-02 15:04:05.000} ${level:%s:left:true} ${prefix}%s`,
|
||||
),
|
||||
)
|
||||
log.GetLogger().SetOutput(os.Stderr)
|
||||
}
|
||||
|
||||
creds, err := GetCredentials(cCtx.String("username"), cCtx.String("password"), cCtx.String("target-url"), cCtx.String("base-url"), cCtx.Bool("compile-only"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
api := confluence.NewAPI(creds.BaseURL, creds.Username, creds.Password)
|
||||
|
||||
files, err := doublestar.FilepathGlob(cCtx.String("files"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(files) == 0 {
|
||||
msg := "No files matched"
|
||||
if cCtx.Bool("ci") {
|
||||
log.Warning(msg)
|
||||
} else {
|
||||
log.Fatal(msg)
|
||||
}
|
||||
}
|
||||
|
||||
log.Debug("config:")
|
||||
for _, f := range cCtx.Command.Flags {
|
||||
flag := f.Names()
|
||||
if flag[0] == "password" {
|
||||
log.Debugf(nil, "%20s: %v", flag[0], "******")
|
||||
} else {
|
||||
log.Debugf(nil, "%20s: %v", flag[0], cCtx.Value(flag[0]))
|
||||
}
|
||||
}
|
||||
|
||||
fatalErrorHandler := NewErrorHandler(cCtx.Bool("continue-on-error"))
|
||||
|
||||
// Loop through files matched by glob pattern
|
||||
for _, file := range files {
|
||||
log.Infof(
|
||||
nil,
|
||||
"processing %s",
|
||||
file,
|
||||
)
|
||||
|
||||
target := processFile(file, api, cCtx, creds.PageID, creds.Username, fatalErrorHandler)
|
||||
|
||||
if target != nil { // on dry-run or compile-only, the target is nil
|
||||
log.Infof(
|
||||
nil,
|
||||
"page successfully updated: %s",
|
||||
creds.BaseURL+target.Links.Full,
|
||||
)
|
||||
fmt.Println(creds.BaseURL + target.Links.Full)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func processFile(
|
||||
file string,
|
||||
api *confluence.API,
|
||||
cCtx *cli.Context,
|
||||
pageID string,
|
||||
username string,
|
||||
fatalErrorHandler *FatalErrorHandler,
|
||||
) *confluence.PageInfo {
|
||||
markdown, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to read file %q", file)
|
||||
return nil
|
||||
}
|
||||
|
||||
markdown = bytes.ReplaceAll(markdown, []byte("\r\n"), []byte("\n"))
|
||||
|
||||
parents := strings.Split(cCtx.String("parents"), cCtx.String("parents-delimiter"))
|
||||
|
||||
meta, markdown, err := metadata.ExtractMeta(markdown, cCtx.String("space"), cCtx.Bool("title-from-h1"), parents, cCtx.Bool("title-append-generated-hash"))
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to extract metadata from file %q", file)
|
||||
return nil
|
||||
}
|
||||
|
||||
if pageID != "" && meta != nil {
|
||||
log.Warning(
|
||||
`specified file contains metadata, ` +
|
||||
`but it will be ignored due specified command line URL`,
|
||||
)
|
||||
|
||||
meta = nil
|
||||
}
|
||||
|
||||
if pageID == "" && meta == nil {
|
||||
fatalErrorHandler.Handle(nil, "specified file doesn't contain metadata and URL is not specified via command line or doesn't contain pageId GET-parameter")
|
||||
return nil
|
||||
}
|
||||
|
||||
if meta.Space == "" {
|
||||
fatalErrorHandler.Handle(nil, "space is not set ('Space' header is not set and '--space' option is not set)")
|
||||
return nil
|
||||
}
|
||||
|
||||
if meta.Title == "" {
|
||||
fatalErrorHandler.Handle(nil, "page title is not set ('Title' header is not set and '--title-from-h1' option and 'h1_title' config is not set or there is no H1 in the file)")
|
||||
return nil
|
||||
}
|
||||
|
||||
stdlib, err := stdlib.New(api)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to retrieve standard library")
|
||||
return nil
|
||||
}
|
||||
|
||||
templates := stdlib.Templates
|
||||
|
||||
var recurse bool
|
||||
|
||||
for {
|
||||
templates, markdown, recurse, err = includes.ProcessIncludes(
|
||||
filepath.Dir(file),
|
||||
cCtx.String("include-path"),
|
||||
markdown,
|
||||
templates,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to process includes")
|
||||
return nil
|
||||
}
|
||||
|
||||
if !recurse {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
macros, markdown, err := macro.ExtractMacros(
|
||||
filepath.Dir(file),
|
||||
cCtx.String("include-path"),
|
||||
markdown,
|
||||
templates,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to extract macros")
|
||||
return nil
|
||||
}
|
||||
|
||||
macros = append(macros, stdlib.Macros...)
|
||||
|
||||
for _, macro := range macros {
|
||||
markdown, err = macro.Apply(markdown)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to apply macro")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
links, err := page.ResolveRelativeLinks(api, meta, markdown, filepath.Dir(file), cCtx.String("space"), cCtx.Bool("title-from-h1"), parents, cCtx.Bool("title-append-generated-hash"))
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to resolve relative links")
|
||||
return nil
|
||||
}
|
||||
|
||||
markdown = page.SubstituteLinks(markdown, links)
|
||||
|
||||
if cCtx.Bool("dry-run") {
|
||||
_, _, err := page.ResolvePage(cCtx.Bool("dry-run"), api, meta)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to resolve page location")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if cCtx.Bool("compile-only") || cCtx.Bool("dry-run") {
|
||||
if cCtx.Bool("drop-h1") {
|
||||
log.Info(
|
||||
"the leading H1 heading will be excluded from the Confluence output",
|
||||
)
|
||||
}
|
||||
html, _ := mark.CompileMarkdown(markdown, stdlib, file, cCtx.String("mermaid-provider"), cCtx.Float64("mermaid-scale"), cCtx.Bool("drop-h1"), cCtx.Bool("strip-linebreaks"))
|
||||
fmt.Println(html)
|
||||
return nil
|
||||
}
|
||||
|
||||
var target *confluence.PageInfo
|
||||
|
||||
if meta != nil {
|
||||
parent, page, err := page.ResolvePage(cCtx.Bool("dry-run"), api, meta)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(karma.Describe("title", meta.Title).Reason(err), "unable to resolve %s", meta.Type)
|
||||
return nil
|
||||
}
|
||||
|
||||
if page == nil {
|
||||
page, err = api.CreatePage(
|
||||
meta.Space,
|
||||
meta.Type,
|
||||
parent,
|
||||
meta.Title,
|
||||
``,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "can't create %s %q", meta.Type, meta.Title)
|
||||
return nil
|
||||
}
|
||||
// (issues/139): A delay between the create and update call
|
||||
// helps mitigate a 409 conflict that can occur when attempting
|
||||
// to update a page just after it was created.
|
||||
time.Sleep(1 * time.Second)
|
||||
}
|
||||
|
||||
target = page
|
||||
} else {
|
||||
if pageID == "" {
|
||||
fatalErrorHandler.Handle(nil, "URL should provide 'pageId' GET-parameter")
|
||||
return nil
|
||||
}
|
||||
|
||||
page, err := api.GetPageByID(pageID)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to retrieve page by id")
|
||||
return nil
|
||||
}
|
||||
|
||||
target = page
|
||||
}
|
||||
|
||||
// Resolve attachments created from <!-- Attachment: --> directive
|
||||
localAttachments, err := attachment.ResolveLocalAttachments(vfs.LocalOS, filepath.Dir(file), meta.Attachments)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to locate attachments")
|
||||
return nil
|
||||
}
|
||||
|
||||
attaches, err := attachment.ResolveAttachments(
|
||||
api,
|
||||
target,
|
||||
localAttachments,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to create/update attachments")
|
||||
return nil
|
||||
}
|
||||
|
||||
markdown = attachment.CompileAttachmentLinks(markdown, attaches)
|
||||
|
||||
if cCtx.Bool("drop-h1") {
|
||||
log.Info(
|
||||
"the leading H1 heading will be excluded from the Confluence output",
|
||||
)
|
||||
}
|
||||
|
||||
html, inlineAttachments := mark.CompileMarkdown(markdown, stdlib, file, cCtx.String("mermaid-provider"), cCtx.Float64("mermaid-scale"), cCtx.Bool("drop-h1"), cCtx.Bool("strip-linebreaks"))
|
||||
|
||||
// Resolve attachements detected from markdown
|
||||
_, err = attachment.ResolveAttachments(
|
||||
api,
|
||||
target,
|
||||
inlineAttachments,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to create/update attachments")
|
||||
return nil
|
||||
}
|
||||
|
||||
{
|
||||
var buffer bytes.Buffer
|
||||
|
||||
err := stdlib.Templates.ExecuteTemplate(
|
||||
&buffer,
|
||||
"ac:layout",
|
||||
struct {
|
||||
Layout string
|
||||
Sidebar string
|
||||
Body string
|
||||
}{
|
||||
Layout: meta.Layout,
|
||||
Sidebar: meta.Sidebar,
|
||||
Body: html,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to execute layout template")
|
||||
return nil
|
||||
}
|
||||
|
||||
html = buffer.String()
|
||||
}
|
||||
|
||||
var finalVersionMessage string
|
||||
var shouldUpdatePage bool = true
|
||||
|
||||
if cCtx.Bool("changes-only") {
|
||||
contentHash := getSHA1Hash(html)
|
||||
|
||||
log.Debugf(
|
||||
nil,
|
||||
"content hash: %s",
|
||||
contentHash,
|
||||
)
|
||||
|
||||
versionPattern := `\[v([a-f0-9]{40})]$`
|
||||
re := regexp.MustCompile(versionPattern)
|
||||
|
||||
matches := re.FindStringSubmatch(target.Version.Message)
|
||||
|
||||
if len(matches) > 1 {
|
||||
log.Debugf(
|
||||
nil,
|
||||
"previous content hash: %s",
|
||||
matches[1],
|
||||
)
|
||||
|
||||
if matches[1] == contentHash {
|
||||
log.Infof(
|
||||
nil,
|
||||
"page %q is already up to date",
|
||||
target.Title,
|
||||
)
|
||||
shouldUpdatePage = false
|
||||
}
|
||||
}
|
||||
|
||||
finalVersionMessage = fmt.Sprintf("%s [v%s]", cCtx.String("version-message"), contentHash)
|
||||
} else {
|
||||
finalVersionMessage = cCtx.String("version-message")
|
||||
}
|
||||
|
||||
if shouldUpdatePage {
|
||||
err = api.UpdatePage(target, html, cCtx.Bool("minor-edit"), finalVersionMessage, meta.Labels, meta.ContentAppearance, meta.Emoji)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to update page")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if !updateLabels(api, target, meta, fatalErrorHandler) { // on error updating labels, return nil
|
||||
return nil
|
||||
}
|
||||
|
||||
if cCtx.Bool("edit-lock") {
|
||||
log.Infof(
|
||||
nil,
|
||||
`edit locked on page %q by user %q to prevent manual edits`,
|
||||
target.Title,
|
||||
username,
|
||||
)
|
||||
|
||||
err := api.RestrictPageUpdates(target, username)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to restrict page updates")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return target
|
||||
}
|
||||
|
||||
func updateLabels(api *confluence.API, target *confluence.PageInfo, meta *metadata.Meta, fatalErrorHandler *FatalErrorHandler) bool {
|
||||
labelInfo, err := api.GetPageLabels(target, "global")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
log.Debug("Page Labels:")
|
||||
log.Debug(labelInfo.Labels)
|
||||
|
||||
log.Debug("Meta Labels:")
|
||||
log.Debug(meta.Labels)
|
||||
|
||||
delLabels := determineLabelsToRemove(labelInfo, meta)
|
||||
log.Debug("Del Labels:")
|
||||
log.Debug(delLabels)
|
||||
|
||||
addLabels := determineLabelsToAdd(meta, labelInfo)
|
||||
log.Debug("Add Labels:")
|
||||
log.Debug(addLabels)
|
||||
|
||||
if len(addLabels) > 0 {
|
||||
_, err = api.AddPageLabels(target, addLabels)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "error adding labels")
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
for _, label := range delLabels {
|
||||
_, err = api.DeletePageLabel(target, label)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "error deleting labels")
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Page has label but label not in Metadata
|
||||
func determineLabelsToRemove(labelInfo *confluence.LabelInfo, meta *metadata.Meta) []string {
|
||||
var labels []string
|
||||
for _, label := range labelInfo.Labels {
|
||||
if !slices.ContainsFunc(meta.Labels, func(metaLabel string) bool {
|
||||
return strings.EqualFold(metaLabel, label.Name)
|
||||
}) {
|
||||
labels = append(labels, label.Name)
|
||||
}
|
||||
}
|
||||
return labels
|
||||
}
|
||||
|
||||
// Metadata has label but Page does not have it
|
||||
func determineLabelsToAdd(meta *metadata.Meta, labelInfo *confluence.LabelInfo) []string {
|
||||
var labels []string
|
||||
for _, metaLabel := range meta.Labels {
|
||||
if !slices.ContainsFunc(labelInfo.Labels, func(label confluence.Label) bool {
|
||||
return strings.EqualFold(label.Name, metaLabel)
|
||||
}) {
|
||||
labels = append(labels, metaLabel)
|
||||
}
|
||||
}
|
||||
return labels
|
||||
}
|
||||
|
||||
func configFilePath() string {
|
||||
fp, err := os.UserConfigDir()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return filepath.Join(fp, "mark")
|
||||
}
|
||||
|
||||
func setLogLevel(cCtx *cli.Context) error {
|
||||
logLevel := cCtx.String("log-level")
|
||||
switch strings.ToUpper(logLevel) {
|
||||
case lorg.LevelTrace.String():
|
||||
log.SetLevel(lorg.LevelTrace)
|
||||
case lorg.LevelDebug.String():
|
||||
log.SetLevel(lorg.LevelDebug)
|
||||
case lorg.LevelInfo.String():
|
||||
log.SetLevel(lorg.LevelInfo)
|
||||
case lorg.LevelWarning.String():
|
||||
log.SetLevel(lorg.LevelWarning)
|
||||
case lorg.LevelError.String():
|
||||
log.SetLevel(lorg.LevelError)
|
||||
case lorg.LevelFatal.String():
|
||||
log.SetLevel(lorg.LevelFatal)
|
||||
default:
|
||||
return fmt.Errorf("unknown log level: %s", logLevel)
|
||||
}
|
||||
log.GetLevel()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getSHA1Hash(input string) string {
|
||||
hash := sha1.New()
|
||||
hash.Write([]byte(input))
|
||||
return hex.EncodeToString(hash.Sum(nil))
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import (
|
||||
"flag"
|
||||
"testing"
|
||||
|
||||
"github.com/kovetskiy/mark/util"
|
||||
"github.com/reconquest/pkg/log"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/urfave/cli/v2"
|
||||
@ -33,7 +34,7 @@ func Test_setLogLevel(t *testing.T) {
|
||||
set.String("log-level", tt.args.lvl, "")
|
||||
cliCtx := cli.NewContext(nil, set, nil)
|
||||
|
||||
err := setLogLevel(cliCtx)
|
||||
err := util.SetLogLevel(cliCtx)
|
||||
if tt.expectedErr != "" {
|
||||
assert.EqualError(t, err, tt.expectedErr)
|
||||
} else {
|
||||
|
@ -1,4 +1,4 @@
|
||||
package mark
|
||||
package mark_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
@ -8,7 +8,9 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
mark "github.com/kovetskiy/mark/markdown"
|
||||
"github.com/kovetskiy/mark/stdlib"
|
||||
"github.com/kovetskiy/mark/util"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/urfave/cli/v2"
|
||||
"github.com/urfave/cli/v2/altsrc"
|
||||
@ -53,7 +55,7 @@ func TestCompileMarkdown(t *testing.T) {
|
||||
panic(err)
|
||||
}
|
||||
markdown, htmlname, html := loadData(t, filename, "")
|
||||
actual, _ := CompileMarkdown(markdown, lib, filename, "", 1.0, false, false)
|
||||
actual, _ := mark.CompileMarkdown(markdown, lib, filename, "", 1.0, false, false)
|
||||
test.EqualValues(string(html), actual, filename+" vs "+htmlname)
|
||||
}
|
||||
}
|
||||
@ -86,7 +88,7 @@ func TestCompileMarkdownDropH1(t *testing.T) {
|
||||
variant = ""
|
||||
}
|
||||
markdown, htmlname, html := loadData(t, filename, variant)
|
||||
actual, _ := CompileMarkdown(markdown, lib, filename, "", 1.0, true, false)
|
||||
actual, _ := mark.CompileMarkdown(markdown, lib, filename, "", 1.0, true, false)
|
||||
test.EqualValues(string(html), actual, filename+" vs "+htmlname)
|
||||
}
|
||||
}
|
||||
@ -120,7 +122,7 @@ func TestCompileMarkdownStripNewlines(t *testing.T) {
|
||||
}
|
||||
|
||||
markdown, htmlname, html := loadData(t, filename, variant)
|
||||
actual, _ := CompileMarkdown(markdown, lib, filename, "", 1.0, false, true)
|
||||
actual, _ := mark.CompileMarkdown(markdown, lib, filename, "", 1.0, false, true)
|
||||
test.EqualValues(string(html), actual, filename+" vs "+htmlname)
|
||||
}
|
||||
}
|
||||
@ -158,7 +160,7 @@ func TestContinueOnError(t *testing.T) {
|
||||
Flags: flags,
|
||||
EnableBashCompletion: true,
|
||||
HideHelpCommand: true,
|
||||
Action: RunMark,
|
||||
Action: util.RunMark,
|
||||
}
|
||||
|
||||
filePath := filepath.Join("testdata", "batch-tests", "*.md")
|
||||
|
@ -1,4 +1,4 @@
|
||||
package main
|
||||
package util
|
||||
|
||||
import (
|
||||
"errors"
|
495
util/cli.go
Normal file
495
util/cli.go
Normal file
@ -0,0 +1,495 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/kovetskiy/lorg"
|
||||
"github.com/kovetskiy/mark/attachment"
|
||||
"github.com/kovetskiy/mark/confluence"
|
||||
"github.com/kovetskiy/mark/includes"
|
||||
"github.com/kovetskiy/mark/macro"
|
||||
mark "github.com/kovetskiy/mark/markdown"
|
||||
"github.com/kovetskiy/mark/metadata"
|
||||
"github.com/kovetskiy/mark/page"
|
||||
"github.com/kovetskiy/mark/stdlib"
|
||||
"github.com/kovetskiy/mark/vfs"
|
||||
"github.com/reconquest/karma-go"
|
||||
"github.com/reconquest/pkg/log"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func RunMark(cCtx *cli.Context) error {
|
||||
if err := SetLogLevel(cCtx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if cCtx.String("color") == "never" {
|
||||
log.GetLogger().SetFormat(
|
||||
lorg.NewFormat(
|
||||
`${time:2006-01-02 15:04:05.000} ${level:%s:left:true} ${prefix}%s`,
|
||||
),
|
||||
)
|
||||
log.GetLogger().SetOutput(os.Stderr)
|
||||
}
|
||||
|
||||
creds, err := GetCredentials(cCtx.String("username"), cCtx.String("password"), cCtx.String("target-url"), cCtx.String("base-url"), cCtx.Bool("compile-only"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
api := confluence.NewAPI(creds.BaseURL, creds.Username, creds.Password)
|
||||
|
||||
files, err := doublestar.FilepathGlob(cCtx.String("files"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(files) == 0 {
|
||||
msg := "No files matched"
|
||||
if cCtx.Bool("ci") {
|
||||
log.Warning(msg)
|
||||
} else {
|
||||
log.Fatal(msg)
|
||||
}
|
||||
}
|
||||
|
||||
log.Debug("config:")
|
||||
for _, f := range cCtx.Command.Flags {
|
||||
flag := f.Names()
|
||||
if flag[0] == "password" {
|
||||
log.Debugf(nil, "%20s: %v", flag[0], "******")
|
||||
} else {
|
||||
log.Debugf(nil, "%20s: %v", flag[0], cCtx.Value(flag[0]))
|
||||
}
|
||||
}
|
||||
|
||||
fatalErrorHandler := NewErrorHandler(cCtx.Bool("continue-on-error"))
|
||||
|
||||
// Loop through files matched by glob pattern
|
||||
for _, file := range files {
|
||||
log.Infof(
|
||||
nil,
|
||||
"processing %s",
|
||||
file,
|
||||
)
|
||||
|
||||
target := processFile(file, api, cCtx, creds.PageID, creds.Username, fatalErrorHandler)
|
||||
|
||||
if target != nil { // on dry-run or compile-only, the target is nil
|
||||
log.Infof(
|
||||
nil,
|
||||
"page successfully updated: %s",
|
||||
creds.BaseURL+target.Links.Full,
|
||||
)
|
||||
fmt.Println(creds.BaseURL + target.Links.Full)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func processFile(
|
||||
file string,
|
||||
api *confluence.API,
|
||||
cCtx *cli.Context,
|
||||
pageID string,
|
||||
username string,
|
||||
fatalErrorHandler *FatalErrorHandler,
|
||||
) *confluence.PageInfo {
|
||||
markdown, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to read file %q", file)
|
||||
return nil
|
||||
}
|
||||
|
||||
markdown = bytes.ReplaceAll(markdown, []byte("\r\n"), []byte("\n"))
|
||||
|
||||
parents := strings.Split(cCtx.String("parents"), cCtx.String("parents-delimiter"))
|
||||
|
||||
meta, markdown, err := metadata.ExtractMeta(markdown, cCtx.String("space"), cCtx.Bool("title-from-h1"), parents, cCtx.Bool("title-append-generated-hash"))
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to extract metadata from file %q", file)
|
||||
return nil
|
||||
}
|
||||
|
||||
if pageID != "" && meta != nil {
|
||||
log.Warning(
|
||||
`specified file contains metadata, ` +
|
||||
`but it will be ignored due specified command line URL`,
|
||||
)
|
||||
|
||||
meta = nil
|
||||
}
|
||||
|
||||
if pageID == "" && meta == nil {
|
||||
fatalErrorHandler.Handle(nil, "specified file doesn't contain metadata and URL is not specified via command line or doesn't contain pageId GET-parameter")
|
||||
return nil
|
||||
}
|
||||
|
||||
if meta.Space == "" {
|
||||
fatalErrorHandler.Handle(nil, "space is not set ('Space' header is not set and '--space' option is not set)")
|
||||
return nil
|
||||
}
|
||||
|
||||
if meta.Title == "" {
|
||||
fatalErrorHandler.Handle(nil, "page title is not set ('Title' header is not set and '--title-from-h1' option and 'h1_title' config is not set or there is no H1 in the file)")
|
||||
return nil
|
||||
}
|
||||
|
||||
stdlib, err := stdlib.New(api)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to retrieve standard library")
|
||||
return nil
|
||||
}
|
||||
|
||||
templates := stdlib.Templates
|
||||
|
||||
var recurse bool
|
||||
|
||||
for {
|
||||
templates, markdown, recurse, err = includes.ProcessIncludes(
|
||||
filepath.Dir(file),
|
||||
cCtx.String("include-path"),
|
||||
markdown,
|
||||
templates,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to process includes")
|
||||
return nil
|
||||
}
|
||||
|
||||
if !recurse {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
macros, markdown, err := macro.ExtractMacros(
|
||||
filepath.Dir(file),
|
||||
cCtx.String("include-path"),
|
||||
markdown,
|
||||
templates,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to extract macros")
|
||||
return nil
|
||||
}
|
||||
|
||||
macros = append(macros, stdlib.Macros...)
|
||||
|
||||
for _, macro := range macros {
|
||||
markdown, err = macro.Apply(markdown)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to apply macro")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
links, err := page.ResolveRelativeLinks(api, meta, markdown, filepath.Dir(file), cCtx.String("space"), cCtx.Bool("title-from-h1"), parents, cCtx.Bool("title-append-generated-hash"))
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to resolve relative links")
|
||||
return nil
|
||||
}
|
||||
|
||||
markdown = page.SubstituteLinks(markdown, links)
|
||||
|
||||
if cCtx.Bool("dry-run") {
|
||||
_, _, err := page.ResolvePage(cCtx.Bool("dry-run"), api, meta)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to resolve page location")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if cCtx.Bool("compile-only") || cCtx.Bool("dry-run") {
|
||||
if cCtx.Bool("drop-h1") {
|
||||
log.Info(
|
||||
"the leading H1 heading will be excluded from the Confluence output",
|
||||
)
|
||||
}
|
||||
html, _ := mark.CompileMarkdown(markdown, stdlib, file, cCtx.String("mermaid-provider"), cCtx.Float64("mermaid-scale"), cCtx.Bool("drop-h1"), cCtx.Bool("strip-linebreaks"))
|
||||
fmt.Println(html)
|
||||
return nil
|
||||
}
|
||||
|
||||
var target *confluence.PageInfo
|
||||
|
||||
if meta != nil {
|
||||
parent, page, err := page.ResolvePage(cCtx.Bool("dry-run"), api, meta)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(karma.Describe("title", meta.Title).Reason(err), "unable to resolve %s", meta.Type)
|
||||
return nil
|
||||
}
|
||||
|
||||
if page == nil {
|
||||
page, err = api.CreatePage(
|
||||
meta.Space,
|
||||
meta.Type,
|
||||
parent,
|
||||
meta.Title,
|
||||
``,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "can't create %s %q", meta.Type, meta.Title)
|
||||
return nil
|
||||
}
|
||||
// (issues/139): A delay between the create and update call
|
||||
// helps mitigate a 409 conflict that can occur when attempting
|
||||
// to update a page just after it was created.
|
||||
time.Sleep(1 * time.Second)
|
||||
}
|
||||
|
||||
target = page
|
||||
} else {
|
||||
if pageID == "" {
|
||||
fatalErrorHandler.Handle(nil, "URL should provide 'pageId' GET-parameter")
|
||||
return nil
|
||||
}
|
||||
|
||||
page, err := api.GetPageByID(pageID)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to retrieve page by id")
|
||||
return nil
|
||||
}
|
||||
|
||||
target = page
|
||||
}
|
||||
|
||||
// Resolve attachments created from <!-- Attachment: --> directive
|
||||
localAttachments, err := attachment.ResolveLocalAttachments(vfs.LocalOS, filepath.Dir(file), meta.Attachments)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to locate attachments")
|
||||
return nil
|
||||
}
|
||||
|
||||
attaches, err := attachment.ResolveAttachments(
|
||||
api,
|
||||
target,
|
||||
localAttachments,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to create/update attachments")
|
||||
return nil
|
||||
}
|
||||
|
||||
markdown = attachment.CompileAttachmentLinks(markdown, attaches)
|
||||
|
||||
if cCtx.Bool("drop-h1") {
|
||||
log.Info(
|
||||
"the leading H1 heading will be excluded from the Confluence output",
|
||||
)
|
||||
}
|
||||
|
||||
html, inlineAttachments := mark.CompileMarkdown(markdown, stdlib, file, cCtx.String("mermaid-provider"), cCtx.Float64("mermaid-scale"), cCtx.Bool("drop-h1"), cCtx.Bool("strip-linebreaks"))
|
||||
|
||||
// Resolve attachements detected from markdown
|
||||
_, err = attachment.ResolveAttachments(
|
||||
api,
|
||||
target,
|
||||
inlineAttachments,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to create/update attachments")
|
||||
return nil
|
||||
}
|
||||
|
||||
{
|
||||
var buffer bytes.Buffer
|
||||
|
||||
err := stdlib.Templates.ExecuteTemplate(
|
||||
&buffer,
|
||||
"ac:layout",
|
||||
struct {
|
||||
Layout string
|
||||
Sidebar string
|
||||
Body string
|
||||
}{
|
||||
Layout: meta.Layout,
|
||||
Sidebar: meta.Sidebar,
|
||||
Body: html,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to execute layout template")
|
||||
return nil
|
||||
}
|
||||
|
||||
html = buffer.String()
|
||||
}
|
||||
|
||||
var finalVersionMessage string
|
||||
var shouldUpdatePage bool = true
|
||||
|
||||
if cCtx.Bool("changes-only") {
|
||||
contentHash := getSHA1Hash(html)
|
||||
|
||||
log.Debugf(
|
||||
nil,
|
||||
"content hash: %s",
|
||||
contentHash,
|
||||
)
|
||||
|
||||
versionPattern := `\[v([a-f0-9]{40})]$`
|
||||
re := regexp.MustCompile(versionPattern)
|
||||
|
||||
matches := re.FindStringSubmatch(target.Version.Message)
|
||||
|
||||
if len(matches) > 1 {
|
||||
log.Debugf(
|
||||
nil,
|
||||
"previous content hash: %s",
|
||||
matches[1],
|
||||
)
|
||||
|
||||
if matches[1] == contentHash {
|
||||
log.Infof(
|
||||
nil,
|
||||
"page %q is already up to date",
|
||||
target.Title,
|
||||
)
|
||||
shouldUpdatePage = false
|
||||
}
|
||||
}
|
||||
|
||||
finalVersionMessage = fmt.Sprintf("%s [v%s]", cCtx.String("version-message"), contentHash)
|
||||
} else {
|
||||
finalVersionMessage = cCtx.String("version-message")
|
||||
}
|
||||
|
||||
if shouldUpdatePage {
|
||||
err = api.UpdatePage(target, html, cCtx.Bool("minor-edit"), finalVersionMessage, meta.Labels, meta.ContentAppearance, meta.Emoji)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to update page")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if !updateLabels(api, target, meta, fatalErrorHandler) { // on error updating labels, return nil
|
||||
return nil
|
||||
}
|
||||
|
||||
if cCtx.Bool("edit-lock") {
|
||||
log.Infof(
|
||||
nil,
|
||||
`edit locked on page %q by user %q to prevent manual edits`,
|
||||
target.Title,
|
||||
username,
|
||||
)
|
||||
|
||||
err := api.RestrictPageUpdates(target, username)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to restrict page updates")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return target
|
||||
}
|
||||
|
||||
func updateLabels(api *confluence.API, target *confluence.PageInfo, meta *metadata.Meta, fatalErrorHandler *FatalErrorHandler) bool {
|
||||
labelInfo, err := api.GetPageLabels(target, "global")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
log.Debug("Page Labels:")
|
||||
log.Debug(labelInfo.Labels)
|
||||
|
||||
log.Debug("Meta Labels:")
|
||||
log.Debug(meta.Labels)
|
||||
|
||||
delLabels := determineLabelsToRemove(labelInfo, meta)
|
||||
log.Debug("Del Labels:")
|
||||
log.Debug(delLabels)
|
||||
|
||||
addLabels := determineLabelsToAdd(meta, labelInfo)
|
||||
log.Debug("Add Labels:")
|
||||
log.Debug(addLabels)
|
||||
|
||||
if len(addLabels) > 0 {
|
||||
_, err = api.AddPageLabels(target, addLabels)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "error adding labels")
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
for _, label := range delLabels {
|
||||
_, err = api.DeletePageLabel(target, label)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "error deleting labels")
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Page has label but label not in Metadata
|
||||
func determineLabelsToRemove(labelInfo *confluence.LabelInfo, meta *metadata.Meta) []string {
|
||||
var labels []string
|
||||
for _, label := range labelInfo.Labels {
|
||||
if !slices.ContainsFunc(meta.Labels, func(metaLabel string) bool {
|
||||
return strings.EqualFold(metaLabel, label.Name)
|
||||
}) {
|
||||
labels = append(labels, label.Name)
|
||||
}
|
||||
}
|
||||
return labels
|
||||
}
|
||||
|
||||
// Metadata has label but Page does not have it
|
||||
func determineLabelsToAdd(meta *metadata.Meta, labelInfo *confluence.LabelInfo) []string {
|
||||
var labels []string
|
||||
for _, metaLabel := range meta.Labels {
|
||||
if !slices.ContainsFunc(labelInfo.Labels, func(label confluence.Label) bool {
|
||||
return strings.EqualFold(label.Name, metaLabel)
|
||||
}) {
|
||||
labels = append(labels, metaLabel)
|
||||
}
|
||||
}
|
||||
return labels
|
||||
}
|
||||
|
||||
func ConfigFilePath() string {
|
||||
fp, err := os.UserConfigDir()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return filepath.Join(fp, "mark")
|
||||
}
|
||||
|
||||
func SetLogLevel(cCtx *cli.Context) error {
|
||||
logLevel := cCtx.String("log-level")
|
||||
switch strings.ToUpper(logLevel) {
|
||||
case lorg.LevelTrace.String():
|
||||
log.SetLevel(lorg.LevelTrace)
|
||||
case lorg.LevelDebug.String():
|
||||
log.SetLevel(lorg.LevelDebug)
|
||||
case lorg.LevelInfo.String():
|
||||
log.SetLevel(lorg.LevelInfo)
|
||||
case lorg.LevelWarning.String():
|
||||
log.SetLevel(lorg.LevelWarning)
|
||||
case lorg.LevelError.String():
|
||||
log.SetLevel(lorg.LevelError)
|
||||
case lorg.LevelFatal.String():
|
||||
log.SetLevel(lorg.LevelFatal)
|
||||
default:
|
||||
return fmt.Errorf("unknown log level: %s", logLevel)
|
||||
}
|
||||
log.GetLevel()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getSHA1Hash(input string) string {
|
||||
hash := sha1.New()
|
||||
hash.Write([]byte(input))
|
||||
return hex.EncodeToString(hash.Sum(nil))
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package main
|
||||
package util
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
@ -7,28 +7,28 @@ import (
|
||||
)
|
||||
|
||||
type FatalErrorHandler struct {
|
||||
ContinueOnError bool
|
||||
ContinueOnError bool
|
||||
}
|
||||
|
||||
func NewErrorHandler(continueOnError bool) *FatalErrorHandler {
|
||||
return &FatalErrorHandler{
|
||||
ContinueOnError: continueOnError,
|
||||
}
|
||||
return &FatalErrorHandler{
|
||||
ContinueOnError: continueOnError,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *FatalErrorHandler) Handle(err error, format string, args ...interface{}) {
|
||||
|
||||
if err == nil {
|
||||
if h.ContinueOnError {
|
||||
if err == nil {
|
||||
if h.ContinueOnError {
|
||||
log.Error(fmt.Sprintf(format, args...))
|
||||
return
|
||||
}
|
||||
log.Fatal(fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
if h.ContinueOnError {
|
||||
log.Errorf(err, format, args...)
|
||||
return
|
||||
}
|
||||
log.Fatalf(err, format, args...)
|
||||
}
|
||||
|
||||
if h.ContinueOnError {
|
||||
log.Errorf(err, format, args...)
|
||||
return
|
||||
}
|
||||
log.Fatalf(err, format, args...)
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user