Compare commits

..

No commits in common. "master" and "v16.0.0" have entirely different histories.

39 changed files with 601 additions and 2276 deletions

View File

@ -39,7 +39,7 @@ jobs:
- name: Check out code into the Go module directory
uses: actions/checkout@v6
- name: markdownlint-cli2-action
uses: DavidAnson/markdownlint-cli2-action@v23
uses: DavidAnson/markdownlint-cli2-action@v22
# Executes Unit Tests
ci-unit-tests:

View File

@ -1,4 +1,4 @@
FROM golang:1.26.2 AS builder
FROM golang:1.26.1 AS builder
ENV GOPATH="/go"
WORKDIR /go/src/github.com/kovetskiy/mark
COPY / .

101
README.md
View File

@ -56,12 +56,11 @@ Also, optional following headers are supported:
* blogpost: [Blog post](https://confluence.atlassian.com/doc/blog-posts-834222533.html) in `Space`. Cannot have `Parent`(s)
```markdown
<!-- Content-Appearance: (full-width|fixed|default) -->
<!-- Content-Appearance: (full-width|fixed) -->
```
* (default) full-width: content will fill the full page width
* fixed: content will be rendered in a fixed narrow view
* default: sets the Confluence property value to `"default"`, which is the narrow layout as set by the Confluence UI. Note: `fixed` maps to a different Confluence property value and can cause misaligned page title and body content — use `default` instead for the narrow layout.
```markdown
<!-- Sidebar: <h2>Test</h2> -->
@ -282,56 +281,21 @@ More details at Confluence [Code Block Macro](https://confluence.atlassian.com/d
### Block Quotes
#### GitHub Alerts Support
You can now use GitHub-style alert syntax in your markdown, and Mark will automatically convert them to Confluence macros:
```markdown
> [!NOTE]
> This creates a blue info box - perfect for helpful information!
> [!TIP]
> This creates a green tip box - great for best practices and suggestions!
> [!IMPORTANT]
> This creates a blue info box - ideal for critical information!
> [!WARNING]
> This creates a yellow warning box - use for important warnings!
> [!CAUTION]
> This creates a red warning box - perfect for dangerous situations!
```
#### Technical Details
Block Quotes are converted to Confluence Info/Warn/Note box when the following conditions are met:
Block Quotes are converted to Confluence Info/Warn/Note box when the following conditions are met
1. The BlockQuote is on the root level of the document (not nested)
2. The first line of the BlockQuote contains one of the following patterns `Info/Warn/Note` or [GitHub MD Alerts style](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#alerts) `[!NOTE]/[!TIP]/[!IMPORTANT]/[!WARNING]/[!CAUTION]`
1. The first line of the BlockQuote contains one of the following patterns `Info/Warn/Note` or [Github MD Alerts style](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#alerts) `[!NOTE]/[!TIP]/[!IMPORTANT]/[!WARNING]/[!CAUTION]`
| GitHub Alerts | Confluence | Description |
| --------------- | ------------ | ------------- |
| `[!TIP]` (green lightbulb) | Tip (green checkmark in circle) | Helpful suggestions and best practices |
| `[!NOTE]` (blue I in circle) | Info (blue I in circle) | General information and notes |
| `[!IMPORTANT]` (purple exclamation mark in speech bubble) | Info (blue I in circle) | Critical information that needs attention |
| `[!WARNING]` (yellow exclamation mark in triangle) | Note (yellow exclamation mark in triangle) | Important warnings and cautions |
| `[!CAUTION]` (red exclamation mark in hexagon) | Warning (red exclamation mark in hexagon) | Dangerous situations requiring immediate attention |
| Github Alerts | Confluence |
| --- | --- |
| Tip (green lightbulb) | Tip (green checkmark in circle) |
| Note (blue I in circle) | Info (blue I in circle) |
| Important (purple exclamation mark in speech bubble) | Info (blue I in circle) |
| Warning (yellow exclamation mark in triangle) | Note (yellow exclamation mark in triangle) |
| Caution (red exclamation mark in hexagon) | Warning (red exclamation mark in hexagon) |
In any other case the default behaviour will be resumed and html `<blockquote>` tag will be used
### Task Lists
Mark supports [GitHub Flavored Markdown task lists](https://github.github.com/gfm/#task-list-items-extension-).
Task lists are automatically converted to Confluence `ac:task-list` elements.
```markdown
- [x] Finished task
- [ ] Unfinished task
```
If a list is "mixed" (contains both tasks and regular list items), it will fall back to a standard HTML list with textual markers like `[x]` or `[ ]` to ensure validity in Confluence storage format.
## Template & Macros
By default, mark provides several built-in templates and macros:
@ -774,7 +738,7 @@ Currently this is not compatible with the automated upload of inline images.
### Render Mermaid Diagram
Confluence doesn't provide [mermaid.js](https://github.com/mermaid-js/mermaid) support natively. Mark provides a convenient way to enable the feature like [GitHub does](https://github.blog/2022-02-14-include-diagrams-markdown-files-mermaid/).
Confluence doesn't provide [mermaid.js](https://github.com/mermaid-js/mermaid) support natively. Mark provides a convenient way to enable the feature like [Github does](https://github.blog/2022-02-14-include-diagrams-markdown-files-mermaid/).
As long as you have a code block marked as "mermaid", mark will automatically render it as a PNG image and attach it to the page as a rendered version of the code block.
```mermaid title diagrams_example
@ -811,12 +775,18 @@ brew tap kovetskiy/mark
brew install mark
```
### Go Install
### Go Install / Go Get
```bash
go install github.com/kovetskiy/mark/v16/cmd/mark@latest
```
For older versions
```bash
go get -v github.com/kovetskiy/mark/v16/cmd/mark
```
### Releases
[Download a release from the Releases page](https://github.com/kovetskiy/mark/releases)
@ -848,7 +818,7 @@ USAGE:
mark [global options]
VERSION:
v16.x.x
v15.5.0@9a26f657c7f2d708ae53722cad03b048c72c9db2
DESCRIPTION:
Mark is a tool to update Atlassian Confluence pages from markdown. Documentation is available here: https://github.com/kovetskiy/mark
@ -877,11 +847,10 @@ GLOBAL OPTIONS:
--space string use specified space key. If the space key is not specified, it must be set in the page metadata. [$MARK_SPACE]
--parents string A list containing the parents of the document separated by parents-delimiter (default: '/'). These will be prepended to the ones defined in the document itself. [$MARK_PARENTS]
--parents-delimiter string The delimiter used for the parents list (default: "/") [$MARK_PARENTS_DELIMITER]
--content-appearance string default content appearance for pages without a Content-Appearance header. Possible values: full-width, fixed, default. [$MARK_CONTENT_APPEARANCE]
--content-appearance string default content appearance for pages without a Content-Appearance header. Possible values: full-width, fixed. [$MARK_CONTENT_APPEARANCE]
--mermaid-scale float defines the scaling factor for mermaid renderings. (default: 1) [$MARK_MERMAID_SCALE]
--include-path string Path for shared includes, used as a fallback if the include doesn't exist in the current directory. [$MARK_INCLUDE_PATH]
--changes-only Avoids re-uploading pages that haven't changed since the last run. [$MARK_CHANGES_ONLY]
--preserve-comments Fetch and preserve inline comments on existing Confluence pages. [$MARK_PRESERVE_COMMENTS]
--d2-scale float defines the scaling factor for d2 renderings. (default: 1) [$MARK_D2_SCALE]
--features string [ --features string ] Enables optional features. Current features: d2, mermaid, mention, mkdocsadmonitions (default: "mermaid", "mention") [$MARK_FEATURES]
--insecure-skip-tls-verify skip TLS certificate verification (useful for self-signed certificates) [$MARK_INSECURE_SKIP_TLS_VERIFY]
@ -905,8 +874,6 @@ image-align = "center"
**NOTE**: Labels aren't supported when using `minor-edit`!
**NOTE**: See [Preserving Inline Comments](#preserving-inline-comments) for a detailed description of the `--preserve-comments` flag.
**NOTE**: The system specific locations are described in here:
<https://pkg.go.dev/os#UserConfigDir>.
Currently, these are:
@ -977,34 +944,6 @@ mark -f "**/docs/*.md"
We recommend to lint your markdown files with [markdownlint-cli2](https://github.com/DavidAnson/markdownlint-cli2) before publishing them to confluence to catch any conversion errors early.
### Preserving Inline Comments
When collaborators leave inline comments on a Confluence page, updating the page via `mark` will normally erase those comments because the stored body is fully replaced. The `--preserve-comments` flag re-attaches inline comment markers to the new page body before uploading, so existing review threads survive updates.
```bash
mark --preserve-comments -f docs/page.md
```
Or via environment variable:
```bash
MARK_PRESERVE_COMMENTS=true mark -f docs/page.md
```
**How it works:**
1. Before uploading, `mark` fetches the current page body and all inline comment markers from the Confluence API.
2. For each existing `<ac:inline-comment-marker>` tag it records the content wrapped by that marker plus a short context window immediately before the opening tag and immediately after the closing tag in the old body (not around the raw selection text, so the context is stable even when the marker wraps additional inline markup such as `<strong>`).
3. It searches the new body for the same selected text and picks the occurrence whose surrounding context best matches the original (using Levenshtein distance), so the marker lands in the right place even if nearby text has shifted.
4. The updated body—with all markers re-embedded—is then uploaded as normal.
**Limitations:**
* If the commented text was deleted from the document, the inline comment cannot be relocated and will be lost. `mark` logs a warning in this case.
* Overlapping selections (two comments anchored to the same stretch of text) are detected; the earlier overlapping match is dropped with a warning, and the later one (higher byte offset) is kept, rather than producing malformed markup.
* `--preserve-comments` is automatically skipped for newly created pages (there are no comments to preserve yet).
* When combined with `--changes-only`, the comment-preservation API calls are skipped entirely on runs where the page content has not changed, avoiding unnecessary round-trips.
## Issues, Bugs & Contributions
I've started the project to solve my own problem and open sourced the solution so anyone who has a problem like me can solve it too.

View File

@ -4,7 +4,6 @@ import (
"bytes"
"crypto/sha256"
"encoding/hex"
"fmt"
"image"
_ "image/gif"
_ "image/jpeg"
@ -13,14 +12,14 @@ import (
"net/url"
"path"
"path/filepath"
"cmp"
"slices"
"sort"
"strconv"
"strings"
"github.com/kovetskiy/mark/v16/confluence"
"github.com/kovetskiy/mark/v16/vfs"
"github.com/rs/zerolog/log"
"github.com/reconquest/karma-go"
"github.com/reconquest/pkg/log"
)
const (
@ -49,16 +48,12 @@ func ResolveAttachments(
attachments []Attachment,
) ([]Attachment, error) {
for i := range attachments {
// Skip checksum computation if already set (e.g. by mermaid/d2 renderers
// which use the source content as the stable checksum rather than the
// rendered PNG bytes, which may be non-deterministic across environments).
if attachments[i].Checksum != "" {
continue
}
checksum, err := GetChecksum(bytes.NewReader(attachments[i].FileBytes))
if err != nil {
return nil, fmt.Errorf("unable to get checksum for attachment %q: %w", attachments[i].Name, err)
return nil, karma.Format(
err,
"unable to get checksum for attachment: %q", attachments[i].Name,
)
}
attachments[i].Checksum = checksum
@ -66,7 +61,7 @@ func ResolveAttachments(
remotes, err := api.GetAttachments(page.ID)
if err != nil {
return nil, fmt.Errorf("unable to get attachments for page %s: %w", page.ID, err)
return nil, karma.Format(err, "unable to get attachments for page %s", page.ID)
}
existing := []Attachment{}
@ -106,7 +101,7 @@ func ResolveAttachments(
}
for i, attachment := range creating {
log.Info().Msgf("creating attachment: %q", attachment.Name)
log.Infof(nil, "creating attachment: %q", attachment.Name)
info, err := api.CreateAttachment(
page.ID,
@ -115,7 +110,11 @@ func ResolveAttachments(
bytes.NewReader(attachment.FileBytes),
)
if err != nil {
return nil, fmt.Errorf("unable to create attachment %q: %w", attachment.Name, err)
return nil, karma.Format(
err,
"unable to create attachment %q",
attachment.Name,
)
}
attachment.ID = info.ID
@ -128,7 +127,7 @@ func ResolveAttachments(
}
for i, attachment := range updating {
log.Info().Msgf("updating attachment: %q", attachment.Name)
log.Infof(nil, "updating attachment: %q", attachment.Name)
info, err := api.UpdateAttachment(
page.ID,
@ -138,7 +137,11 @@ func ResolveAttachments(
bytes.NewReader(attachment.FileBytes),
)
if err != nil {
return nil, fmt.Errorf("unable to update attachment %q: %w", attachment.Name, err)
return nil, karma.Format(
err,
"unable to update attachment %q",
attachment.Name,
)
}
attachment.Link = path.Join(
@ -150,7 +153,7 @@ func ResolveAttachments(
}
for i := range existing {
log.Info().Msgf("keeping unmodified attachment: %q", existing[i].Name)
log.Infof(nil, "keeping unmodified attachment: %q", existing[i].Name)
}
attachments = []Attachment{}
@ -170,7 +173,10 @@ func ResolveLocalAttachments(opener vfs.Opener, base string, replacements []stri
for i := range attachments {
checksum, err := GetChecksum(bytes.NewReader(attachments[i].FileBytes))
if err != nil {
return nil, fmt.Errorf("unable to get checksum for attachment %q: %w", attachments[i].Name, err)
return nil, karma.Format(
err,
"unable to get checksum for attachment: %q", attachments[i].Name,
)
}
attachments[i].Checksum = checksum
@ -198,7 +204,7 @@ func prepareAttachment(opener vfs.Opener, base, name string) (Attachment, error)
attachmentPath := filepath.Join(base, name)
file, err := opener.Open(attachmentPath)
if err != nil {
return Attachment{}, fmt.Errorf("unable to open file %q: %w", attachmentPath, err)
return Attachment{}, karma.Format(err, "unable to open file: %q", attachmentPath)
}
defer func() {
_ = file.Close()
@ -206,7 +212,7 @@ func prepareAttachment(opener vfs.Opener, base, name string) (Attachment, error)
fileBytes, err := io.ReadAll(file)
if err != nil {
return Attachment{}, fmt.Errorf("unable to read file %q: %w", attachmentPath, err)
return Attachment{}, karma.Format(err, "unable to read file: %q", attachmentPath)
}
attachment := Attachment{
@ -243,8 +249,8 @@ func CompileAttachmentLinks(markdown []byte, attachments []Attachment) []byte {
// attachments/a.jpg
// attachments/a.jpg.jpg
// so we replace longer and then shorter
slices.SortStableFunc(replaces, func(a, b string) int {
return cmp.Compare(len(b), len(a))
sort.SliceStable(replaces, func(i, j int) bool {
return len(replaces[i]) > len(replaces[j])
})
for _, replace := range replaces {
@ -254,7 +260,7 @@ func CompileAttachmentLinks(markdown []byte, attachments []Attachment) []byte {
if bytes.Contains(markdown, []byte("attachment://"+replace)) {
from := "attachment://" + replace
log.Debug().Msgf("replacing legacy link: %q -> %q", from, to)
log.Debugf(nil, "replacing legacy link: %q -> %q", from, to)
markdown = bytes.ReplaceAll(
markdown,
@ -268,7 +274,7 @@ func CompileAttachmentLinks(markdown []byte, attachments []Attachment) []byte {
if bytes.Contains(markdown, []byte(replace)) {
from := replace
log.Debug().Msgf("replacing link: %q -> %q", from, to)
log.Debugf(nil, "replacing link: %q -> %q", from, to)
markdown = bytes.ReplaceAll(
markdown,
@ -280,7 +286,7 @@ func CompileAttachmentLinks(markdown []byte, attachments []Attachment) []byte {
}
if !found {
log.Warn().Msgf("unused attachment: %s", replace)
log.Warningf(nil, "unused attachment: %s", replace)
}
}

View File

@ -6,7 +6,7 @@ import (
"os"
"github.com/kovetskiy/mark/v16/util"
"github.com/rs/zerolog/log"
"github.com/reconquest/pkg/log"
"github.com/urfave/cli/v3"
)
@ -34,6 +34,6 @@ func main() {
}
if err := cmd.Run(context.TODO(), os.Args); err != nil {
log.Fatal().Msg(err.Error())
log.Fatal(err)
}
}

View File

@ -13,8 +13,9 @@ import (
"unicode/utf8"
"github.com/kovetskiy/gopencils"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/kovetskiy/lorg"
"github.com/reconquest/karma-go"
"github.com/reconquest/pkg/log"
)
type User struct {
@ -58,12 +59,6 @@ type PageInfo struct {
Title string `json:"title"`
} `json:"ancestors"`
Body struct {
Storage struct {
Value string `json:"value"`
} `json:"storage"`
} `json:"body"`
Links struct {
Full string `json:"webui"`
Base string `json:"-"` // Not from JSON; populated from response _links.base
@ -91,29 +86,6 @@ type LabelInfo struct {
Labels []Label `json:"results"`
Size int `json:"number"`
}
type InlineCommentProperties struct {
OriginalSelection string `json:"originalSelection"`
MarkerRef string `json:"markerRef"`
}
type InlineCommentExtensions struct {
Location string `json:"location"`
InlineProperties InlineCommentProperties `json:"inlineProperties"`
}
type InlineCommentResult struct {
Extensions InlineCommentExtensions `json:"extensions"`
}
type InlineComments struct {
Links struct {
Context string `json:"context"`
Next string `json:"next"`
} `json:"_links"`
Results []InlineCommentResult `json:"results"`
}
type form struct {
buffer io.Reader
writer *multipart.Writer
@ -123,8 +95,8 @@ type tracer struct {
prefix string
}
func (tracer *tracer) Printf(format string, args ...any) {
log.Trace().Msgf(tracer.prefix+" "+format, args...)
func (tracer *tracer) Printf(format string, args ...interface{}) {
log.Tracef(nil, tracer.prefix+" "+format, args...)
}
func NewAPI(baseURL string, username string, password string, insecureSkipVerify bool) *API {
@ -160,7 +132,7 @@ func NewAPI(baseURL string, username string, password string, insecureSkipVerify
json := gopencils.Api(baseURL+"/rpc/json-rpc/confluenceservice-v2", auth, httpClient, 3)
if zerolog.GlobalLevel() == zerolog.TraceLevel {
if log.GetLevel() == lorg.LevelTrace {
rest.Logger = &tracer{"rest:"}
json.Logger = &tracer{"json-rpc:"}
}
@ -175,7 +147,11 @@ func NewAPI(baseURL string, username string, password string, insecureSkipVerify
func (api *API) FindRootPage(space string) (*PageInfo, error) {
page, err := api.FindPage(space, ``, "page")
if err != nil {
return nil, fmt.Errorf("can't obtain first page from space %q: %w", space, err)
return nil, karma.Format(
err,
"can't obtain first page from space %q",
space,
)
}
if page == nil {
@ -377,7 +353,11 @@ func (api *API) UpdateAttachment(
err = json.Unmarshal(result, &extendedResponse)
if err != nil {
return info, fmt.Errorf("unable to unmarshal JSON response as full response format (JSON=%s): %w", string(result), err)
return info, karma.Format(
err,
"unable to unmarshal JSON response as full response format: %s",
string(result),
)
}
if len(extendedResponse.Results) > 0 {
@ -397,7 +377,11 @@ func (api *API) UpdateAttachment(
var shortResponse AttachmentInfo
err = json.Unmarshal(result, &shortResponse)
if err != nil {
return info, fmt.Errorf("unable to unmarshal JSON response as short response format (JSON=%s): %w", string(result), err)
return info, karma.Format(
err,
"unable to unmarshal JSON response as short response format: %s",
string(result),
)
}
return shortResponse, nil
@ -411,27 +395,42 @@ func getAttachmentPayload(name, comment string, reader io.Reader) (*form, error)
content, err := writer.CreateFormFile("file", name)
if err != nil {
return nil, fmt.Errorf("unable to create form file: %w", err)
return nil, karma.Format(
err,
"unable to create form file",
)
}
_, err = io.Copy(content, reader)
if err != nil {
return nil, fmt.Errorf("unable to copy i/o between form-file and file: %w", err)
return nil, karma.Format(
err,
"unable to copy i/o between form-file and file",
)
}
commentWriter, err := writer.CreateFormField("comment")
if err != nil {
return nil, fmt.Errorf("unable to create form field for comment: %w", err)
return nil, karma.Format(
err,
"unable to create form field for comment",
)
}
_, err = commentWriter.Write([]byte(comment))
if err != nil {
return nil, fmt.Errorf("unable to write comment in form-field: %w", err)
return nil, karma.Format(
err,
"unable to write comment in form-field",
)
}
err = writer.Close()
if err != nil {
return nil, fmt.Errorf("unable to close form-writer: %w", err)
return nil, karma.Format(
err,
"unable to close form-writer",
)
}
return &form{
@ -493,13 +492,9 @@ func (api *API) GetAttachments(pageID string) ([]AttachmentInfo, error) {
}
func (api *API) GetPageByID(pageID string) (*PageInfo, error) {
return api.GetPageByIDExpanded(pageID, "ancestors,version")
}
func (api *API) GetPageByIDExpanded(pageID string, expand string) (*PageInfo, error) {
request, err := api.rest.Res(
"content/"+pageID, &PageInfo{},
).Get(map[string]string{"expand": expand})
).Get(map[string]string{"expand": "ancestors,version"})
if err != nil {
return nil, err
}
@ -511,44 +506,6 @@ func (api *API) GetPageByIDExpanded(pageID string, expand string) (*PageInfo, er
return request.Response.(*PageInfo), nil
}
func (api *API) GetInlineComments(pageID string) (*InlineComments, error) {
const pageSize = 100
all := &InlineComments{}
start := 0
for {
result := &InlineComments{}
request, err := api.rest.Res(
"content/"+pageID+"/child/comment", result,
).Get(map[string]string{
"expand": "extensions.inlineProperties",
"limit": fmt.Sprintf("%d", pageSize),
"start": fmt.Sprintf("%d", start),
})
if err != nil {
return nil, err
}
if request.Raw.StatusCode != http.StatusOK {
return nil, newErrorStatusNotOK(request)
}
if all.Links.Context == "" {
all.Links = result.Links
}
all.Results = append(all.Results, result.Results...)
if len(result.Results) < pageSize || result.Links.Next == "" {
break
}
start += len(result.Results)
}
return all, nil
}
func (api *API) CreatePage(
space string,
pageType string,
@ -556,21 +513,21 @@ func (api *API) CreatePage(
title string,
body string,
) (*PageInfo, error) {
payload := map[string]any{
payload := map[string]interface{}{
"type": pageType,
"title": title,
"space": map[string]any{
"space": map[string]interface{}{
"key": space,
},
"body": map[string]any{
"storage": map[string]any{
"body": map[string]interface{}{
"storage": map[string]interface{}{
"representation": "storage",
"value": body,
},
},
"metadata": map[string]any{
"properties": map[string]any{
"editor": map[string]any{
"metadata": map[string]interface{}{
"properties": map[string]interface{}{
"editor": map[string]interface{}{
"value": "v2",
},
},
@ -578,7 +535,7 @@ func (api *API) CreatePage(
}
if parent != nil {
payload["ancestors"] = []map[string]any{
payload["ancestors"] = []map[string]interface{}{
{"id": parent.ID},
}
}
@ -599,20 +556,20 @@ func (api *API) CreatePage(
func (api *API) UpdatePage(page *PageInfo, newContent string, minorEdit bool, versionMessage string, appearance string, emojiString string) error {
nextPageVersion := page.Version.Number + 1
oldAncestors := []map[string]any{}
oldAncestors := []map[string]interface{}{}
if page.Type != "blogpost" && len(page.Ancestors) > 0 {
// picking only the last one, which is required by confluence
oldAncestors = []map[string]any{
oldAncestors = []map[string]interface{}{
{"id": page.Ancestors[len(page.Ancestors)-1].ID},
}
}
properties := map[string]any{
properties := map[string]interface{}{
// Fix to set full-width as has changed on Confluence APIs again.
// https://jira.atlassian.com/browse/CONFCLOUD-65447
//
"content-appearance-published": map[string]any{
"content-appearance-published": map[string]interface{}{
"value": appearance,
},
// content-appearance-draft should not be set as this is impacted by
@ -626,37 +583,37 @@ func (api *API) UpdatePage(page *PageInfo, newContent string, minorEdit bool, ve
}
unicodeHex := fmt.Sprintf("%x", r)
properties["emoji-title-draft"] = map[string]any{
properties["emoji-title-draft"] = map[string]interface{}{
"value": unicodeHex,
}
properties["emoji-title-published"] = map[string]any{
properties["emoji-title-published"] = map[string]interface{}{
"value": unicodeHex,
}
}
payload := map[string]any{
payload := map[string]interface{}{
"id": page.ID,
"type": page.Type,
"title": page.Title,
"version": map[string]any{
"version": map[string]interface{}{
"number": nextPageVersion,
"minorEdit": minorEdit,
"message": versionMessage,
},
"ancestors": oldAncestors,
"body": map[string]any{
"storage": map[string]any{
"body": map[string]interface{}{
"storage": map[string]interface{}{
"value": newContent,
"representation": "storage",
},
},
"metadata": map[string]any{
"metadata": map[string]interface{}{
"properties": properties,
},
}
request, err := api.rest.Res(
"content/"+page.ID, &map[string]any{},
"content/"+page.ID, &map[string]interface{}{},
).Put(payload)
if err != nil {
return err
@ -671,10 +628,10 @@ func (api *API) UpdatePage(page *PageInfo, newContent string, minorEdit bool, ve
func (api *API) AddPageLabels(page *PageInfo, newLabels []string) (*LabelInfo, error) {
labels := []map[string]any{}
labels := []map[string]interface{}{}
for _, label := range newLabels {
if label != "" {
item := map[string]any{
item := map[string]interface{}{
"prefix": "global",
"name": label,
}
@ -778,10 +735,13 @@ func (api *API) GetUserByName(name string) (*User, error) {
if err != nil {
return nil, err
}
if request.Raw.StatusCode != http.StatusOK {
return nil, newErrorStatusNotOK(request)
}
// Try old path
if request.Raw.StatusCode != http.StatusOK || len(response.Results) == 0 {
request, err = api.rest.
if len(response.Results) == 0 {
request, err := api.rest.
Res("search", &response).
Get(map[string]string{
"cql": fmt.Sprintf("user.fullname~%q", name),
@ -796,7 +756,11 @@ func (api *API) GetUserByName(name string) (*User, error) {
if len(response.Results) == 0 {
return nil, fmt.Errorf("user with name %q is not found", name)
return nil, karma.
Describe("name", name).
Reason(
"user with given name is not found",
)
}
return &response.Results[0].User, nil
@ -836,17 +800,17 @@ func (api *API) RestrictPageUpdatesCloud(
user = currentUser
}
var result any
var result interface{}
request, err := api.rest.
Res("content").
Id(page.ID).
Res("restriction", &result).
Post([]map[string]any{
Post([]map[string]interface{}{
{
"operation": "update",
"restrictions": map[string]any{
"user": []map[string]any{
"restrictions": map[string]interface{}{
"user": []map[string]interface{}{
{
"type": "known",
"accountId": user.AccountID,
@ -872,15 +836,15 @@ func (api *API) RestrictPageUpdatesServer(
) error {
var (
err error
result any
result interface{}
)
request, err := api.json.Res(
"setContentPermissions", &result,
).Post([]any{
).Post([]interface{}{
page.ID,
"Edit",
[]map[string]any{
[]map[string]interface{}{
{
"userName": allowedUser,
},

View File

@ -14,7 +14,7 @@ import (
"github.com/chromedp/chromedp"
"github.com/kovetskiy/mark/v16/attachment"
"github.com/rs/zerolog/log"
"github.com/reconquest/pkg/log"
"oss.terrastruct.com/d2/d2graph"
"oss.terrastruct.com/d2/d2layouts/d2dagrelayout"
@ -59,7 +59,7 @@ func ProcessD2(title string, d2Diagram []byte, scale float64) (attachment.Attach
return attachment.Attachment{}, err
}
log.Debug().Msgf("Rendering: %q", title)
log.Debugf(nil, "Rendering: %q", title)
pngBytes, boxModel, err := convertSVGtoPNG(ctx, out, scale)
if err != nil {
return attachment.Attachment{}, err
@ -73,7 +73,7 @@ func ProcessD2(title string, d2Diagram []byte, scale float64) (attachment.Attach
checkSum, err := attachment.GetChecksum(bytes.NewReader(d2Bytes))
log.Debug().Msgf("Checksum: %q -> %s", title, checkSum)
log.Debugf(nil, "Checksum: %q -> %s", title, checkSum)
if err != nil {
return attachment.Attachment{}, err

29
go.mod
View File

@ -1,21 +1,24 @@
module github.com/kovetskiy/mark/v16
go 1.26
go 1.25.0
require (
github.com/bmatcuk/doublestar/v4 v4.10.0
github.com/chromedp/cdproto v0.0.0-20260321001828-e3e3800016bc
github.com/chromedp/chromedp v0.15.1
github.com/dreampuf/mermaid.go v0.2.0
github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d
github.com/chromedp/chromedp v0.14.2
github.com/dreampuf/mermaid.go v0.0.39
github.com/kovetskiy/gopencils v0.0.0-20250404051442-0b776066936a
github.com/rs/zerolog v1.35.1
github.com/kovetskiy/lorg v1.2.1-0.20240830111423-ba4fe8b6f7c4
github.com/reconquest/karma-go v1.5.0
github.com/reconquest/pkg v1.3.1-0.20240901105413-68c2adbf2b64
github.com/reconquest/regexputil-go v0.0.0-20160905154124-38573e70c1f4
github.com/stefanfritsch/goldmark-admonitions v1.1.1
github.com/stretchr/testify v1.11.1
github.com/urfave/cli-altsrc/v3 v3.1.0
github.com/urfave/cli/v3 v3.8.0
github.com/yuin/goldmark v1.8.2
github.com/urfave/cli/v3 v3.7.0
github.com/yuin/goldmark v1.7.17
go.yaml.in/yaml/v3 v3.0.4
golang.org/x/text v0.36.0
golang.org/x/text v0.35.0
oss.terrastruct.com/d2 v0.7.1
oss.terrastruct.com/util-go v0.0.0-20250213174338-243d8661088a
)
@ -30,7 +33,7 @@ require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dlclark/regexp2 v1.11.4 // indirect
github.com/dop251/goja v0.0.0-20240927123429-241b342198c2 // indirect
github.com/go-json-experiment/json v0.0.0-20260214004413-d219187c3433 // indirect
github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 // indirect
github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect
github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect
@ -39,17 +42,17 @@ require (
github.com/google/pprof v0.0.0-20240927180334-d43a67379298 // indirect
github.com/kr/pretty v0.3.1 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mazznoer/csscolorparser v0.1.5 // indirect
github.com/orisano/pixelmatch v0.0.0-20230914042517-fa304d1dc785 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/reconquest/cog v0.0.0-20240830113510-c7ba12d0beeb // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/rogpeppe/go-internal v1.11.0 // indirect
github.com/zazab/zhash v0.0.0-20221031090444-2b0d50417446 // indirect
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // indirect
golang.org/x/image v0.38.0 // indirect
golang.org/x/image v0.20.0 // indirect
golang.org/x/net v0.44.0 // indirect
golang.org/x/sys v0.42.0 // indirect
golang.org/x/sys v0.36.0 // indirect
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect

54
go.sum
View File

@ -18,10 +18,10 @@ github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsVi
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/bmatcuk/doublestar/v4 v4.10.0 h1:zU9WiOla1YA122oLM6i4EXvGW62DvKZVxIe6TYWexEs=
github.com/bmatcuk/doublestar/v4 v4.10.0/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/chromedp/cdproto v0.0.0-20260321001828-e3e3800016bc h1:wkN/LMi5vc60pBRWx6qpbk/aEvq3/ZVNpnMvsw8PVVU=
github.com/chromedp/cdproto v0.0.0-20260321001828-e3e3800016bc/go.mod h1:cbyjALe67vDvlvdiG9369P8w5U2w6IshwtyD2f2Tvag=
github.com/chromedp/chromedp v0.15.1 h1:EJWiPm7BNqDqjYy6U0lTSL5wNH+iNt9GjC3a4gfjNyQ=
github.com/chromedp/chromedp v0.15.1/go.mod h1:CdTHtUqD/dqaFw/cvFWtTydoEQS44wLBuwbMR9EkOY4=
github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d h1:ZtA1sedVbEW7EW80Iz2GR3Ye6PwbJAJXjv7D74xG6HU=
github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d/go.mod h1:NItd7aLkcfOA/dcMXvl8p1u+lQqioRMq/SqDp71Pb/k=
github.com/chromedp/chromedp v0.14.2 h1:r3b/WtwM50RsBZHMUm9fsNhhzRStTHrKdr2zmwbZSzM=
github.com/chromedp/chromedp v0.14.2/go.mod h1:rHzAv60xDE7VNy/MYtTUrYreSc0ujt2O1/C3bzctYBo=
github.com/chromedp/sysutil v1.1.0 h1:PUFNv5EcprjqXZD9nJb9b/c9ibAbxiYo4exNWZyipwM=
github.com/chromedp/sysutil v1.1.0/go.mod h1:WiThHUdltqCNKGc4gaU50XgYjwjYIhKWoHGPTUfWTJ8=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
@ -31,10 +31,10 @@ github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yA
github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/dop251/goja v0.0.0-20240927123429-241b342198c2 h1:Ux9RXuPQmTB4C1MKagNLme0krvq8ulewfor+ORO/QL4=
github.com/dop251/goja v0.0.0-20240927123429-241b342198c2/go.mod h1:MxLav0peU43GgvwVgNbLAj1s/bSGboKkhuULvq/7hx4=
github.com/dreampuf/mermaid.go v0.2.0 h1:dghdUGw7zoeISIHRMOzHdQ/A7gpHv+dKtVO/ntPXFeo=
github.com/dreampuf/mermaid.go v0.2.0/go.mod h1:9jSzOKzV59UX8Gc9EJ5xuiJeldHpTEmKxF2pwu42r2g=
github.com/go-json-experiment/json v0.0.0-20260214004413-d219187c3433 h1:vymEbVwYFP/L05h5TKQxvkXoKxNvTpjxYKdF1Nlwuao=
github.com/go-json-experiment/json v0.0.0-20260214004413-d219187c3433/go.mod h1:tphK2c80bpPhMOI4v6bIc2xWywPfbqi1Z06+RcrMkDg=
github.com/dreampuf/mermaid.go v0.0.39 h1:K7R+FaAOxKd32/yic9SVz0u9bedS5nV/6nUgGnKdJuY=
github.com/dreampuf/mermaid.go v0.0.39/go.mod h1:xBmQWWnPFQl7HIfEz+KnZ+BpXPJl9qXe9aISIPJGsAM=
github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 h1:iizUGZ9pEquQS5jTGkh4AqeeHCMbfbjeb0zMt0aEFzs=
github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2/go.mod h1:TiCD2a1pcmjd7YnhGH0f/zKNcCD06B029pHhzV23c2M=
github.com/go-sourcemap/sourcemap v2.1.4+incompatible h1:a+iTbH5auLKxaNwQFg0B+TCYl6lbukKPc7b5x0n1s6Q=
github.com/go-sourcemap/sourcemap v2.1.4+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
@ -51,6 +51,8 @@ github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUq
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/kovetskiy/gopencils v0.0.0-20250404051442-0b776066936a h1:OPt6gCghZXQ/WZpT6EhGkA7v+YMAYzcCb8SPQWmsb/8=
github.com/kovetskiy/gopencils v0.0.0-20250404051442-0b776066936a/go.mod h1:gRW37oDEg9LzOHApv31YzxKBICcCmPtDogaImsxZ6xc=
github.com/kovetskiy/lorg v1.2.1-0.20240830111423-ba4fe8b6f7c4 h1:2eV8tF1u58dqRJMlFUD/Df26BxcIlGVy71rZHN+aNoI=
github.com/kovetskiy/lorg v1.2.1-0.20240830111423-ba4fe8b6f7c4/go.mod h1:p1RuSvyflTF/G4ubeATGurCRKWkULOrN/4PUAEFRq0s=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
@ -62,10 +64,6 @@ github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80 h1:6Yzfa6GP0rIo/kUL
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mazznoer/csscolorparser v0.1.5 h1:Wr4uNIE+pHWN3TqZn2SGpA2nLRG064gB7WdSfSS5cz4=
github.com/mazznoer/csscolorparser v0.1.5/go.mod h1:OQRVvgCyHDCAquR1YWfSwwaDcM0LhnSffGnlbOew/3I=
github.com/orisano/pixelmatch v0.0.0-20230914042517-fa304d1dc785 h1:J1//5K/6QF10cZ59zLcVNFGmBfiSrH8Cho/lNrViK9s=
@ -73,26 +71,34 @@ github.com/orisano/pixelmatch v0.0.0-20230914042517-fa304d1dc785/go.mod h1:nZgzb
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/reconquest/cog v0.0.0-20240830113510-c7ba12d0beeb h1:hJ1ExqE2lTMgTRmjmSiC2hm+sMXCCjjbyiGo3irbEW8=
github.com/reconquest/cog v0.0.0-20240830113510-c7ba12d0beeb/go.mod h1:n+lvvNLeoQmYVvYTFGCtLvoyD9Wz46RO3yCk6GKyZ/4=
github.com/reconquest/karma-go v1.5.0 h1:Chn4LtauwnvKfz13ZbmGNrRLKO1NciExHQSOBOsQqt4=
github.com/reconquest/karma-go v1.5.0/go.mod h1:52XRXXa2ec/VNrlCirwasdJfNmjI1O87q098gmqILh0=
github.com/reconquest/pkg v1.3.1-0.20240901105413-68c2adbf2b64 h1:OBNLiZay5PYLmGRXGIMEgWSIgbSjOj8nHZxqwLbSsF4=
github.com/reconquest/pkg v1.3.1-0.20240901105413-68c2adbf2b64/go.mod h1:r1Z1JNh3in9xLWbhv5u7cdox9vvGFjlKp89VI10Jrdo=
github.com/reconquest/regexputil-go v0.0.0-20160905154124-38573e70c1f4 h1:bcDXaTFC09IIg13Z8gfQHk4gSu001ET7ssW/wKRvPzg=
github.com/reconquest/regexputil-go v0.0.0-20160905154124-38573e70c1f4/go.mod h1:OI1di2iiFSwX3D70iZjzdmCPPfssjOl+HX40tI3VaXA=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
github.com/rs/zerolog v1.35.1 h1:m7xQeoiLIiV0BCEY4Hs+j2NG4Gp2o2KPKmhnnLiazKI=
github.com/rs/zerolog v1.35.1/go.mod h1:EjML9kdfa/RMA7h/6z6pYmq1ykOuA8/mjWaEvGI+jcw=
github.com/stefanfritsch/goldmark-admonitions v1.1.1 h1:SncsICdQrIYYaq02Ta+zyc9gNmMfYqQH2qwLSCJYxA4=
github.com/stefanfritsch/goldmark-admonitions v1.1.1/go.mod h1:cOZK5O0gE6eWfpxTdjGUmeONW2IL9j3Zujv3KlZWlLo=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/urfave/cli-altsrc/v3 v3.1.0 h1:6E5+kXeAWmRxXlPgdEVf9VqVoTJ2MJci0UMpUi/w/bA=
github.com/urfave/cli-altsrc/v3 v3.1.0/go.mod h1:VcWVTGXcL3nrXUDJZagHAeUX702La3PKeWav7KpISqA=
github.com/urfave/cli/v3 v3.8.0 h1:XqKPrm0q4P0q5JpoclYoCAv0/MIvH/jZ2umzuf8pNTI=
github.com/urfave/cli/v3 v3.8.0/go.mod h1:ysVLtOEmg2tOy6PknnYVhDoouyC/6N42TMeoMzskhso=
github.com/urfave/cli/v3 v3.7.0 h1:AGSnbUyjtLiM+WJUb4dzXKldl/gL+F8OwmRDtVr6g2U=
github.com/urfave/cli/v3 v3.7.0/go.mod h1:ysVLtOEmg2tOy6PknnYVhDoouyC/6N42TMeoMzskhso=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yuin/goldmark v1.8.2 h1:kEGpgqJXdgbkhcOgBxkC0X0PmoPG1ZyoZ117rDVp4zE=
github.com/yuin/goldmark v1.8.2/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
github.com/yuin/goldmark v1.7.17 h1:p36OVWwRb246iHxA/U4p8OPEpOTESm4n+g+8t0EE5uA=
github.com/yuin/goldmark v1.7.17/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
github.com/zazab/zhash v0.0.0-20221031090444-2b0d50417446 h1:75pcOSsb40+ub185cJI7g5uykl9Uu76rD5ONzK/4s40=
github.com/zazab/zhash v0.0.0-20221031090444-2b0d50417446/go.mod h1:NtepZ8TEXErPsmQDMUoN72f8aIy4+xNinSJ3f1giess=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
@ -101,8 +107,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk=
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY=
golang.org/x/image v0.38.0 h1:5l+q+Y9JDC7mBOMjo4/aPhMDcxEptsX+Tt3GgRQRPuE=
golang.org/x/image v0.38.0/go.mod h1:/3f6vaXC+6CEanU4KJxbcUZyEePbyKbaLoDOe4ehFYY=
golang.org/x/image v0.20.0 h1:7cVCUjQwfL18gyBJOmYvptfSHS8Fb3YUDtfLIZ7Nbpw=
golang.org/x/image v0.20.0/go.mod h1:0a88To4CYVBAHp5FXJm8o7QbUl37Vd85ply1vyD8auM=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@ -123,8 +129,8 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.42.0 h1:omrd2nAlyT5ESRdCLYdm3+fMfNFE/+Rf4bDIQImRJeo=
golang.org/x/sys v0.42.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw=
golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k=
golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
@ -134,8 +140,8 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.36.0 h1:JfKh3XmcRPqZPKevfXVpI1wXPTqbkE5f7JA92a55Yxg=
golang.org/x/text v0.36.0/go.mod h1:NIdBknypM8iqVmPiuco0Dh6P5Jcdk8lJL0CUebqK164=
golang.org/x/text v0.35.0 h1:JOVx6vVDFokkpaq1AEptVzLTpDe9KGpj5tR4/X+ybL8=
golang.org/x/text v0.35.0/go.mod h1:khi/HExzZJ2pGnjenulevKNX1W67CUy0AsXcNubPGCA=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=

View File

@ -11,7 +11,8 @@ import (
"go.yaml.in/yaml/v3"
"github.com/rs/zerolog/log"
"github.com/reconquest/karma-go"
"github.com/reconquest/pkg/log"
)
// <!-- Include: <template path>
@ -35,6 +36,7 @@ func LoadTemplate(
) (*template.Template, error) {
var (
name = strings.TrimSuffix(path, filepath.Ext(path))
facts = karma.Describe("name", name)
)
if template := templates.Lookup(name); template != nil {
@ -49,7 +51,11 @@ func LoadTemplate(
body, err = os.ReadFile(filepath.Join(includePath, path))
}
if err != nil {
return nil, fmt.Errorf("unable to read template file %q: %w", path, err)
err = facts.Format(
err,
"unable to read template file",
)
return nil, err
}
}
@ -62,7 +68,12 @@ func LoadTemplate(
templates, err = templates.New(name).Delims(left, right).Parse(string(body))
if err != nil {
return nil, fmt.Errorf("unable to parse template %q: %w", name, err)
err = facts.Format(
err,
"unable to parse template",
)
return nil, err
}
return templates, nil
@ -74,15 +85,23 @@ func ProcessIncludes(
contents []byte,
templates *template.Template,
) (*template.Template, []byte, bool, error) {
formatVardump := func(
data map[string]any,
) string {
var parts []string
vardump := func(
facts *karma.Context,
data map[string]interface{},
) *karma.Context {
for key, value := range data {
parts = append(parts, fmt.Sprintf("%s=%v", key, value))
key = "var " + key
facts = facts.Describe(
key,
strings.ReplaceAll(
fmt.Sprint(value),
"\n",
"\n"+strings.Repeat(" ", len(key)+2),
),
)
}
return strings.Join(parts, ", ")
return facts
}
var (
@ -105,7 +124,9 @@ func ProcessIncludes(
left = string(groups[3])
right = string(groups[4])
config = groups[5]
data = map[string]any{}
data = map[string]interface{}{}
facts = karma.Describe("path", path)
)
if delimsNone == "none" {
@ -115,16 +136,21 @@ func ProcessIncludes(
err = yaml.Unmarshal(config, &data)
if err != nil {
err = fmt.Errorf("unable to unmarshal template data config (path=%q, config=%q): %w", path, string(config), err)
err = facts.
Describe("config", string(config)).
Format(
err,
"unable to unmarshal template data config",
)
return spec
}
log.Trace().Interface("vardump", data).Msgf("including template %q", path)
log.Tracef(vardump(facts, data), "including template %q", path)
templates, err = LoadTemplate(base, includePath, path, left, right, templates)
if err != nil {
err = fmt.Errorf("unable to load template %q: %w", path, err)
err = facts.Format(err, "unable to load template")
return spec
}
@ -132,7 +158,10 @@ func ProcessIncludes(
err = templates.Execute(&buffer, data)
if err != nil {
err = fmt.Errorf("unable to execute template %q (vars: %s): %w", path, formatVardump(data), err)
err = vardump(facts, data).Format(
err,
"unable to execute template",
)
return spec
}

View File

@ -8,7 +8,9 @@ import (
"text/template"
"github.com/kovetskiy/mark/v16/includes"
"github.com/rs/zerolog/log"
"github.com/reconquest/karma-go"
"github.com/reconquest/pkg/log"
"github.com/reconquest/regexputil-go"
"go.yaml.in/yaml/v3"
)
@ -37,11 +39,14 @@ func (macro *Macro) Apply(
content = macro.Regexp.ReplaceAllFunc(
content,
func(match []byte) []byte {
config := map[string]any{}
config := map[string]interface{}{}
err = yaml.Unmarshal([]byte(macro.Config), &config)
if err != nil {
err = fmt.Errorf("unable to unmarshal macros config template: %w", err)
err = karma.Format(
err,
"unable to unmarshal macros config template",
)
return match
}
@ -52,7 +57,10 @@ func (macro *Macro) Apply(
macro.Regexp.FindSubmatch(match),
))
if err != nil {
err = fmt.Errorf("unable to execute macros template: %w", err)
err = karma.Format(
err,
"unable to execute macros template",
)
return match
}
@ -63,21 +71,21 @@ func (macro *Macro) Apply(
return content, err
}
func (macro *Macro) configure(node any, groups [][]byte) any {
func (macro *Macro) configure(node interface{}, groups [][]byte) interface{} {
switch node := node.(type) {
case map[any]any:
case map[interface{}]interface{}:
for key, value := range node {
node[key] = macro.configure(value, groups)
}
return node
case map[string]any:
case map[string]interface{}:
for key, value := range node {
node[key] = macro.configure(value, groups)
}
return node
case []any:
case []interface{}:
for key, value := range node {
node[key] = macro.configure(value, groups)
}
@ -118,19 +126,26 @@ func ExtractMacros(
groups := reMacroDirective.FindStringSubmatch(string(spec))
var (
expr = groups[reMacroDirective.SubexpIndex("expr")]
template = groups[reMacroDirective.SubexpIndex("template")]
config = groups[reMacroDirective.SubexpIndex("config")]
expr = regexputil.Subexp(reMacroDirective, groups, "expr")
template = regexputil.Subexp(
reMacroDirective,
groups,
"template",
)
config = regexputil.Subexp(reMacroDirective, groups, "config")
)
var macro Macro
if strings.HasPrefix(template, "#") {
cfg := map[string]any{}
cfg := map[string]interface{}{}
err = yaml.Unmarshal([]byte(config), &cfg)
if err != nil {
err = fmt.Errorf("unable to unmarshal macros config template: %w", err)
err = karma.Format(
err,
"unable to unmarshal macros config template",
)
return nil
}
@ -147,35 +162,44 @@ func ExtractMacros(
macro.Template, err = templates.New(template).Parse(body)
if err != nil {
err = fmt.Errorf("unable to parse template: %w", err)
err = karma.Format(
err,
"unable to parse template",
)
return nil
}
} else {
macro.Template, err = includes.LoadTemplate(base, includePath, template, "{{", "}}", templates)
if err != nil {
err = fmt.Errorf("unable to load template: %w", err)
err = karma.Format(err, "unable to load template")
return nil
}
}
facts := karma.
Describe("template", template).
Describe("expr", expr)
macro.Regexp, err = regexp.Compile(expr)
if err != nil {
err = fmt.Errorf("unable to compile macros regexp (expr=%q, template=%q): %w", expr, template, err)
err = facts.
Format(
err,
"unable to compile macros regexp",
)
return nil
}
macro.Config = config
log.Trace().
Interface("vardump", map[string]any{
"expr": expr,
"template": template,
"config": macro.Config,
}).
Msgf("loaded macro %q", expr)
log.Tracef(
facts.Describe("config", macro.Config),
"loaded macro %q",
expr,
)
macros = append(macros, macro)

399
mark.go
View File

@ -4,9 +4,7 @@ import (
"bytes"
"crypto/sha1"
"encoding/hex"
"errors"
"fmt"
stdhtml "html"
"io"
"os"
"path/filepath"
@ -14,7 +12,6 @@ import (
"slices"
"strings"
"time"
"unicode/utf8"
"github.com/bmatcuk/doublestar/v4"
"github.com/kovetskiy/mark/v16/attachment"
@ -27,11 +24,10 @@ import (
"github.com/kovetskiy/mark/v16/stdlib"
"github.com/kovetskiy/mark/v16/types"
"github.com/kovetskiy/mark/v16/vfs"
"github.com/rs/zerolog/log"
"github.com/reconquest/karma-go"
"github.com/reconquest/pkg/log"
)
var markerRegex = regexp.MustCompile(`(?s)<ac:inline-comment-marker ac:ref="([^"]+)">(.*?)</ac:inline-comment-marker>`)
// Config holds all configuration options for running Mark.
type Config struct {
// Connection settings
@ -63,7 +59,6 @@ type Config struct {
VersionMessage string
EditLock bool
ChangesOnly bool
PreserveComments bool
// Rendering
DropH1 bool
@ -101,20 +96,20 @@ func Run(config Config) error {
if len(files) == 0 {
msg := "no files matched"
if config.CI {
log.Warn().Msg(msg)
log.Warning(msg)
} else {
return errors.New(msg)
return fmt.Errorf("%s", msg)
}
}
var hasErrors bool
for _, file := range files {
log.Info().Msgf("processing %s", file)
log.Infof(nil, "processing %s", file)
target, err := ProcessFile(file, api, config)
if err != nil {
if config.ContinueOnError {
log.Error().Err(err).Msgf("processing %s", file)
log.Errorf(err, "processing %s", file)
hasErrors = true
continue
}
@ -122,7 +117,7 @@ func Run(config Config) error {
}
if target != nil {
log.Info().Msgf("page successfully updated: %s", api.BaseURL+target.Links.Full)
log.Infof(nil, "page successfully updated: %s", api.BaseURL+target.Links.Full)
if _, err := fmt.Fprintln(config.output(), api.BaseURL+target.Links.Full); err != nil {
return err
}
@ -161,7 +156,7 @@ func ProcessFile(file string, api *confluence.API, config Config) (*confluence.P
}
if config.PageID != "" && meta != nil {
log.Warn().Msg(
log.Warning(
`specified file contains metadata, ` +
`but it will be ignored due specified command line URL`,
)
@ -260,7 +255,7 @@ func ProcessFile(file string, api *confluence.API, config Config) (*confluence.P
if config.CompileOnly || config.DryRun {
if config.DropH1 {
log.Info().Msg("the leading H1 heading will be excluded from the Confluence output")
log.Info("the leading H1 heading will be excluded from the Confluence output")
}
imageAlign, err := getImageAlign(config.ImageAlign, meta)
@ -287,12 +282,11 @@ func ProcessFile(file string, api *confluence.API, config Config) (*confluence.P
}
var target *confluence.PageInfo
var pageCreated bool
if meta != nil {
parent, pg, err := page.ResolvePage(false, api, meta)
if err != nil {
return nil, fmt.Errorf("error resolving page %q: %w", meta.Title, err)
return nil, karma.Describe("title", meta.Title).Reason(err)
}
if pg == nil {
@ -304,7 +298,6 @@ func ProcessFile(file string, api *confluence.API, config Config) (*confluence.P
// conflict that can occur when attempting to update a page just
// after it was created. See issues/139.
time.Sleep(1 * time.Second)
pageCreated = true
}
target = pg
@ -342,7 +335,7 @@ func ProcessFile(file string, api *confluence.API, config Config) (*confluence.P
markdown = attachment.CompileAttachmentLinks(markdown, attaches)
if config.DropH1 {
log.Info().Msg("the leading H1 heading will be excluded from the Confluence output")
log.Info("the leading H1 heading will be excluded from the Confluence output")
}
imageAlign, err := getImageAlign(config.ImageAlign, meta)
@ -406,13 +399,13 @@ func ProcessFile(file string, api *confluence.API, config Config) (*confluence.P
if config.ChangesOnly {
contentHash := sha1Hash(html)
log.Debug().Msgf("content hash: %s", contentHash)
log.Debugf(nil, "content hash: %s", contentHash)
re := regexp.MustCompile(`\[v([a-f0-9]{40})]$`)
if matches := re.FindStringSubmatch(target.Version.Message); len(matches) > 1 {
log.Debug().Msgf("previous content hash: %s", matches[1])
log.Debugf(nil, "previous content hash: %s", matches[1])
if matches[1] == contentHash {
log.Info().Msgf("page %q is already up to date", target.Title)
log.Infof(nil, "page %q is already up to date", target.Title)
shouldUpdatePage = false
}
}
@ -422,27 +415,6 @@ func ProcessFile(file string, api *confluence.API, config Config) (*confluence.P
finalVersionMessage = config.VersionMessage
}
// Only fetch the old body and inline comments when we know the page will
// actually be updated. This avoids unnecessary API round-trips for no-op
// runs (e.g. when --changes-only determines the content is unchanged).
if shouldUpdatePage && config.PreserveComments && !pageCreated {
pg, err := api.GetPageByIDExpanded(target.ID, "ancestors,version,body.storage")
if err != nil {
return nil, fmt.Errorf("unable to retrieve page body for comments: %w", err)
}
target = pg
comments, err := api.GetInlineComments(target.ID)
if err != nil {
return nil, fmt.Errorf("unable to retrieve inline comments: %w", err)
}
html, err = mergeComments(html, target.Body.Storage.Value, comments)
if err != nil {
return nil, fmt.Errorf("unable to merge inline comments: %w", err)
}
}
if shouldUpdatePage {
err = api.UpdatePage(
target,
@ -464,7 +436,8 @@ func ProcessFile(file string, api *confluence.API, config Config) (*confluence.P
}
if config.EditLock {
log.Info().Msgf(
log.Infof(
nil,
`edit locked on page %q by user %q to prevent manual edits`,
target.Title,
config.Username,
@ -483,18 +456,18 @@ func updateLabels(api *confluence.API, target *confluence.PageInfo, metaLabels [
return err
}
log.Debug().Msg("Page Labels:")
log.Debug().Interface("labels", labelInfo.Labels).Send()
log.Debug().Msg("Meta Labels:")
log.Debug().Interface("labels", metaLabels).Send()
log.Debug("Page Labels:")
log.Debug(labelInfo.Labels)
log.Debug("Meta Labels:")
log.Debug(metaLabels)
delLabels := determineLabelsToRemove(labelInfo, metaLabels)
log.Debug().Msg("Del Labels:")
log.Debug().Interface("labels", delLabels).Send()
log.Debug("Del Labels:")
log.Debug(delLabels)
addLabels := determineLabelsToAdd(metaLabels, labelInfo)
log.Debug().Msg("Add Labels:")
log.Debug().Interface("labels", addLabels).Send()
log.Debug("Add Labels:")
log.Debug(addLabels)
if len(addLabels) > 0 {
if _, err = api.AddPageLabels(target, addLabels); err != nil {
@ -559,327 +532,3 @@ func sha1Hash(input string) string {
h.Write([]byte(input))
return hex.EncodeToString(h.Sum(nil))
}
// htmlEscapeText escapes only the characters that Confluence storage HTML
// always encodes in text nodes (&, <, >). Unlike html.EscapeString it does NOT
// escape single-quotes or double-quotes, because those are frequently left
// unescaped inside text nodes by the Confluence editor and by mark's own
// renderer, so escaping them would prevent the selection-search from finding
// a valid match.
var htmlTextReplacer = strings.NewReplacer("&", "&amp;", "<", "&lt;", ">", "&gt;")
func htmlEscapeText(s string) string {
return htmlTextReplacer.Replace(s)
}
// truncateSelection returns a truncated preview of s for use in log messages,
// capped at maxRunes runes, with an ellipsis appended when trimmed.
func truncateSelection(s string, maxRunes int) string {
runes := []rune(s)
if len(runes) <= maxRunes {
return s
}
return string(runes[:maxRunes]) + "…"
}
// contextBefore returns up to maxBytes of s ending at byteEnd, trimmed
// forward to the nearest valid UTF-8 rune start so the slice is never
// split across a multi-byte sequence.
func contextBefore(s string, byteEnd, maxBytes int) string {
start := byteEnd - maxBytes
if start < 0 {
start = 0
}
for start < byteEnd && !utf8.RuneStart(s[start]) {
start++
}
return s[start:byteEnd]
}
// contextAfter returns up to maxBytes of s starting at byteStart, trimmed
// back to the nearest valid UTF-8 rune start so the slice is never split
// across a multi-byte sequence.
func contextAfter(s string, byteStart, maxBytes int) string {
end := byteStart + maxBytes
if end >= len(s) {
return s[byteStart:]
}
for end > byteStart && !utf8.RuneStart(s[end]) {
end--
}
return s[byteStart:end]
}
func levenshteinDistance(s1, s2 string) int {
r1 := []rune(s1)
r2 := []rune(s2)
if len(r1) == 0 {
return len(r2)
}
if len(r2) == 0 {
return len(r1)
}
// Use two rolling rows instead of a full matrix to reduce allocations
// from O(m×n) to O(n). Swap r1/r2 so r2 is the shorter string, keeping
// the row width (len(r2)+1) as small as possible.
if len(r1) < len(r2) {
r1, r2 = r2, r1
}
prev := make([]int, len(r2)+1)
curr := make([]int, len(r2)+1)
for j := range prev {
prev[j] = j
}
for i := 1; i <= len(r1); i++ {
curr[0] = i
for j := 1; j <= len(r2); j++ {
cost := 0
if r1[i-1] != r2[j-1] {
cost = 1
}
curr[j] = min(
prev[j]+1, // deletion
curr[j-1]+1, // insertion
prev[j-1]+cost, // substitution
)
}
prev, curr = curr, prev
}
return prev[len(r2)]
}
type commentContext struct {
before string
after string
}
// mergeComments re-embeds inline comment markers from the Confluence API into
// newBody (the updated storage HTML about to be uploaded). It extracts context
// from each existing marker in oldBody and uses Levenshtein distance to
// relocate each marker to the best-matching position in newBody, so comment
// threads survive page edits even when the surrounding text has shifted.
//
// At most maxCandidates occurrences of each selection are evaluated with
// Levenshtein distance; further occurrences are ignored to bound CPU cost on
// pages where a selection is short or very common.
const maxCandidates = 100
// contextWindowBytes is the number of bytes of surrounding text captured as
// context around each inline-comment marker. It is used both when extracting
// context from oldBody and when scoring candidates in newBody.
const contextWindowBytes = 100
func mergeComments(newBody string, oldBody string, comments *confluence.InlineComments) (string, error) {
if comments == nil {
return newBody, nil
}
// 1. Extract context for each comment from oldBody
contexts := make(map[string]commentContext)
matches := markerRegex.FindAllStringSubmatchIndex(oldBody, -1)
for _, match := range matches {
ref := oldBody[match[2]:match[3]]
// context around the tag
before := contextBefore(oldBody, match[0], contextWindowBytes)
after := contextAfter(oldBody, match[1], contextWindowBytes)
contexts[ref] = commentContext{
before: before,
after: after,
}
}
type replacement struct {
start int
end int
ref string
selection string
}
var replacements []replacement
seenRefs := make(map[string]bool)
for _, comment := range comments.Results {
if comment.Extensions.Location != "inline" {
log.Debug().
Str("location", comment.Extensions.Location).
Str("ref", comment.Extensions.InlineProperties.MarkerRef).
Msg("comment ignored during inline marker merge: not an inline comment")
continue
}
ref := comment.Extensions.InlineProperties.MarkerRef
selection := comment.Extensions.InlineProperties.OriginalSelection
if seenRefs[ref] {
// Multiple results share the same MarkerRef (e.g. threaded replies).
// The marker only needs to be inserted once; skip duplicates.
continue
}
// Mark ref as seen immediately so subsequent results for the same ref
// (threaded replies) are always deduplicated, even if this one is dropped.
seenRefs[ref] = true
if selection == "" {
log.Warn().
Str("ref", ref).
Msg("inline comment skipped: original selection is empty; comment will be lost")
continue
}
ctx, hasCtx := contexts[ref]
// Build the list of forms to search for in newBody. The escaped form
// is tried first (normal XML text nodes). The raw form is appended as a
// fallback for text inside CDATA-backed macro bodies (e.g. ac:code),
// where < and > are stored unescaped inside <![CDATA[...]]>.
escapedSelection := htmlEscapeText(selection)
searchForms := []string{escapedSelection}
if selection != escapedSelection {
searchForms = append(searchForms, selection)
}
var bestStart = -1
var bestEnd = -1
var minDistance = 1000000
// Iterate over search forms; stop as soon as we have a definitive best.
candidates := 0
stopSearch := false
for _, form := range searchForms {
if stopSearch {
break
}
currentPos := 0
for {
index := strings.Index(newBody[currentPos:], form)
if index == -1 {
break
}
start := currentPos + index
end := start + len(form)
// Skip candidates that start or end in the middle of a multi-byte
// UTF-8 rune; such a match would produce invalid UTF-8 output.
if !utf8.RuneStart(newBody[start]) || (end < len(newBody) && !utf8.RuneStart(newBody[end])) {
currentPos = start + 1
continue
}
candidates++
if candidates > maxCandidates {
stopSearch = true
break
}
if !hasCtx {
// No context available; use the first occurrence.
bestStart = start
bestEnd = end
stopSearch = true
break
}
newBefore := contextBefore(newBody, start, contextWindowBytes)
newAfter := contextAfter(newBody, end, contextWindowBytes)
// Fast path: exact context match is the best possible result.
if newBefore == ctx.before && newAfter == ctx.after {
bestStart = start
bestEnd = end
stopSearch = true
break
}
// Lower-bound pruning: Levenshtein distance is at least the
// absolute difference in rune counts. Use rune counts (not byte
// lengths) to match the unit levenshteinDistance operates on,
// avoiding false skips for multibyte UTF-8 content.
lbBefore := utf8.RuneCountInString(ctx.before) - utf8.RuneCountInString(newBefore)
if lbBefore < 0 {
lbBefore = -lbBefore
}
lbAfter := utf8.RuneCountInString(ctx.after) - utf8.RuneCountInString(newAfter)
if lbAfter < 0 {
lbAfter = -lbAfter
}
if lbBefore+lbAfter >= minDistance {
currentPos = start + 1
continue
}
distance := levenshteinDistance(ctx.before, newBefore) + levenshteinDistance(ctx.after, newAfter)
if distance < minDistance {
minDistance = distance
bestStart = start
bestEnd = end
}
currentPos = start + 1
}
}
if bestStart != -1 {
replacements = append(replacements, replacement{
start: bestStart,
end: bestEnd,
ref: ref,
selection: selection,
})
} else {
log.Warn().
Str("ref", ref).
Str("selection_preview", truncateSelection(selection, 50)).
Msg("inline comment dropped: selected text not found in new body; comment will be lost")
}
}
// Sort replacements from back to front to avoid offset issues.
// Use a stable sort with ref as a tie-breaker so the ordering is
// deterministic when two markers resolve to the same start offset.
slices.SortStableFunc(replacements, func(a, b replacement) int {
if a.start != b.start {
return b.start - a.start
}
if a.ref < b.ref {
return -1
}
if a.ref > b.ref {
return 1
}
return 0
})
// Apply replacements back-to-front. Track the minimum start of any
// applied replacement so that overlapping candidates (whose end exceeds
// that boundary) are dropped rather than producing nested or malformed
// <ac:inline-comment-marker> tags.
minAppliedStart := len(newBody)
for _, r := range replacements {
if r.end > minAppliedStart {
// This replacement overlaps with an already-applied one.
// Drop it and warn so the user knows the comment was skipped.
log.Warn().
Str("ref", r.ref).
Str("selection_preview", truncateSelection(r.selection, 50)).
Int("start", r.start).
Int("end", r.end).
Int("conflicting_start", minAppliedStart).
Msg("inline comment marker dropped: selection overlaps an already-placed marker")
continue
}
minAppliedStart = r.start
selection := newBody[r.start:r.end]
withComment := fmt.Sprintf(
`<ac:inline-comment-marker ac:ref="%s">%s</ac:inline-comment-marker>`,
stdhtml.EscapeString(r.ref),
selection,
)
newBody = newBody[:r.start] + withComment + newBody[r.end:]
}
return newBody, nil
}

View File

@ -1,369 +0,0 @@
package mark
import (
"testing"
"github.com/kovetskiy/mark/v16/confluence"
"github.com/stretchr/testify/assert"
)
// ---------------------------------------------------------------------------
// Helper function unit tests
// ---------------------------------------------------------------------------
func TestTruncateSelection(t *testing.T) {
assert.Equal(t, "hello", truncateSelection("hello", 10))
assert.Equal(t, "hello", truncateSelection("hello", 5))
assert.Equal(t, "hell…", truncateSelection("hello", 4))
assert.Equal(t, "", truncateSelection("", 5))
// Multibyte runes count as single units.
assert.Equal(t, "世界…", truncateSelection("世界 is the world", 2))
}
func TestLevenshteinDistance(t *testing.T) {
tests := []struct {
s1, s2 string
want int
}{
{"", "", 0},
{"abc", "", 3},
{"", "abc", 3},
{"abc", "abc", 0},
{"abc", "axc", 1}, // one substitution
{"abc", "ab", 1}, // one deletion
{"ab", "abc", 1}, // one insertion
{"kitten", "sitting", 3},
// Multibyte: é is one rune, so distance from "héllo" to "hello" is 1.
{"héllo", "hello", 1},
}
for _, tt := range tests {
t.Run(tt.s1+"/"+tt.s2, func(t *testing.T) {
assert.Equal(t, tt.want, levenshteinDistance(tt.s1, tt.s2))
})
}
}
func TestContextBefore(t *testing.T) {
// Basic cases.
assert.Equal(t, "", contextBefore("hello", 0, 10))
assert.Equal(t, "hello", contextBefore("hello", 5, 10))
assert.Equal(t, "llo", contextBefore("hello", 5, 3))
// "héllo" is 6 bytes (h=1, é=2, l=1, l=1, o=1).
// maxBytes=4 → raw start=2, which lands mid-rune (é's continuation byte).
// Should advance to byte 3 (first 'l').
assert.Equal(t, "llo", contextBefore("héllo", 6, 4))
}
func TestContextAfter(t *testing.T) {
// Basic cases.
assert.Equal(t, "", contextAfter("hello", 5, 10))
assert.Equal(t, "hello", contextAfter("hello", 0, 10))
assert.Equal(t, "hel", contextAfter("hello", 0, 3))
// "héllo" is 6 bytes. contextAfter(s, 0, 2) → raw end=2 (é's continuation
// byte), which is not a rune start. Should back up to 1, returning just "h".
assert.Equal(t, "h", contextAfter("héllo", 0, 2))
}
// makeComments builds an InlineComments value from alternating
// (selection, markerRef) pairs, all with location "inline".
func makeComments(pairs ...string) *confluence.InlineComments {
c := &confluence.InlineComments{}
for i := 0; i+1 < len(pairs); i += 2 {
selection, ref := pairs[i], pairs[i+1]
c.Results = append(c.Results, confluence.InlineCommentResult{
Extensions: confluence.InlineCommentExtensions{
Location: "inline",
InlineProperties: confluence.InlineCommentProperties{
OriginalSelection: selection,
MarkerRef: ref,
},
},
})
}
return c
}
func TestMergeComments(t *testing.T) {
body := "<p>Hello world</p>"
oldBody := `<p>Hello <ac:inline-comment-marker ac:ref="uuid-123">world</ac:inline-comment-marker></p>`
comments := makeComments("world", "uuid-123")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
assert.Equal(t, `<p>Hello <ac:inline-comment-marker ac:ref="uuid-123">world</ac:inline-comment-marker></p>`, result)
}
func TestMergeComments_Escaping(t *testing.T) {
body := "<p>Hello &amp; world</p>"
oldBody := `<p>Hello <ac:inline-comment-marker ac:ref="uuid-456">&amp;</ac:inline-comment-marker> world</p>`
comments := makeComments("&", "uuid-456")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
assert.Equal(t, `<p>Hello <ac:inline-comment-marker ac:ref="uuid-456">&amp;</ac:inline-comment-marker> world</p>`, result)
}
func TestMergeComments_Disambiguation(t *testing.T) {
body := "<p>Item one. Item two. Item one.</p>"
// Comment is on the second "Item one."
oldBody := `<p>Item one. Item two. <ac:inline-comment-marker ac:ref="uuid-1">Item one.</ac:inline-comment-marker></p>`
comments := makeComments("Item one.", "uuid-1")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
// Context should correctly pick the second occurrence
assert.Equal(t, `<p>Item one. Item two. <ac:inline-comment-marker ac:ref="uuid-1">Item one.</ac:inline-comment-marker></p>`, result)
}
// TestMergeComments_SelectionMissing verifies that a comment whose selection
// no longer appears in the new body is dropped without returning an error or panicking.
// A warning is logged so the user knows the comment was not relocated.
func TestMergeComments_SelectionMissing(t *testing.T) {
body := "<p>Completely different content</p>"
oldBody := `<p><ac:inline-comment-marker ac:ref="uuid-gone">old text</ac:inline-comment-marker></p>`
comments := makeComments("old text", "uuid-gone")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
// Comment is dropped; body is returned unchanged.
assert.Equal(t, body, result)
}
// TestMergeComments_OverlappingSelections verifies that when two comments
// reference overlapping text regions the later one (by position) is kept and
// the earlier overlapping one is dropped rather than corrupting the body.
func TestMergeComments_OverlappingSelections(t *testing.T) {
body := "<p>foo bar baz</p>"
// Neither comment has a marker in oldBody, so no positional context is
// available; the algorithm falls back to a plain string search.
oldBody := "<p>foo bar baz</p>"
// "foo bar" starts at 3, ends at 10; "bar baz" starts at 7, ends at 14.
// They overlap on "bar". The later match (uuid-B at position 7) wins.
comments := makeComments("foo bar", "uuid-A", "bar baz", "uuid-B")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
assert.Equal(t, `<p>foo <ac:inline-comment-marker ac:ref="uuid-B">bar baz</ac:inline-comment-marker></p>`, result)
}
// TestMergeComments_NilComments verifies that a nil comments pointer is
// handled gracefully and the new body is returned unchanged.
func TestMergeComments_NilComments(t *testing.T) {
body := "<p>Hello world</p>"
result, err := mergeComments(body, "", nil)
assert.NoError(t, err)
assert.Equal(t, body, result)
}
// TestMergeComments_HTMLEntities verifies that selections containing HTML
// entities (&lt;, &gt;) are matched correctly. The API returns raw (unescaped)
// text for OriginalSelection; htmlEscapeText encodes &, < and > to their
// entity forms before searching.
func TestMergeComments_HTMLEntities(t *testing.T) {
body := `<p>Hello &lt;world&gt; it&#39;s me</p>`
oldBody := `<p>Hello <ac:inline-comment-marker ac:ref="uuid-ent">&lt;world&gt;</ac:inline-comment-marker> it&#39;s me</p>`
// The API returns the raw (unescaped) selection text.
comments := makeComments("<world>", "uuid-ent")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
assert.Equal(t, `<p>Hello <ac:inline-comment-marker ac:ref="uuid-ent">&lt;world&gt;</ac:inline-comment-marker> it&#39;s me</p>`, result)
}
// TestMergeComments_ApostropheEncoded verifies the known limitation: when a
// selection includes an apostrophe that Confluence stores as the numeric
// entity &#39; in the page body, mergeComments cannot locate the selection
// (htmlEscapeText does not encode ' to &#39;) and the comment is dropped with
// a warning rather than panicking or producing invalid output.
func TestMergeComments_ApostropheEncoded(t *testing.T) {
// New body uses &#39; entity (as Confluence sometimes stores apostrophes).
body := `<p>Hello &lt;world&gt; it&#39;s me</p>`
// Old body has the comment marker around a selection that includes an apostrophe.
oldBody := `<p>Hello <ac:inline-comment-marker ac:ref="uuid-apos-enc">&lt;world&gt; it&#39;s</ac:inline-comment-marker> me</p>`
// The API returns the raw unescaped selection including a literal apostrophe.
comments := makeComments("<world> it's", "uuid-apos-enc")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
// The comment is dropped (body unchanged) because htmlEscapeText("it's")
// produces "it's", which doesn't match "it&#39;s" in the new body.
assert.Equal(t, body, result)
}
// TestMergeComments_ApostropheSelection verifies that a selection containing a
// literal apostrophe is found when the new body also contains a literal
// apostrophe (as mark's renderer typically emits). This exercises the
// htmlEscapeText path which intentionally does not encode ' or ".
func TestMergeComments_ApostropheSelection(t *testing.T) {
body := `<p>Hello it's a test</p>`
oldBody := `<p>Hello <ac:inline-comment-marker ac:ref="uuid-apos">it's</ac:inline-comment-marker> a test</p>`
// The API returns the raw (unescaped) selection text with a literal apostrophe.
comments := makeComments("it's", "uuid-apos")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
assert.Equal(t, `<p>Hello <ac:inline-comment-marker ac:ref="uuid-apos">it's</ac:inline-comment-marker> a test</p>`, result)
}
// TestMergeComments_NestedTags verifies that a marker whose stored content
// contains nested inline tags (e.g. <strong>) is still recognised by
// markerRegex and the comment is correctly relocated into the new body.
func TestMergeComments_NestedTags(t *testing.T) {
// The new body contains plain bold text (no marker yet).
body := "<p>Hello <strong>world</strong></p>"
// The old body already has the marker wrapping the bold tag.
oldBody := `<p>Hello <ac:inline-comment-marker ac:ref="uuid-nested"><strong>world</strong></ac:inline-comment-marker></p>`
// The API returns the raw selected text without markup.
comments := makeComments("world", "uuid-nested")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
assert.Equal(t, `<p>Hello <strong><ac:inline-comment-marker ac:ref="uuid-nested">world</ac:inline-comment-marker></strong></p>`, result)
}
// TestMergeComments_EmptySelection verifies that a comment with an empty
// OriginalSelection is skipped without panicking and the body is returned
// unchanged.
func TestMergeComments_EmptySelection(t *testing.T) {
body := "<p>Hello world</p>"
comments := makeComments("", "uuid-empty")
result, err := mergeComments(body, body, comments)
assert.NoError(t, err)
assert.Equal(t, body, result)
}
// TestMergeComments_DuplicateMarkerRef verifies that multiple comment results
// sharing the same MarkerRef (e.g. threaded replies) produce exactly one
// <ac:inline-comment-marker> insertion rather than nested duplicates.
func TestMergeComments_DuplicateMarkerRef(t *testing.T) {
body := "<p>Hello world</p>"
oldBody := `<p>Hello <ac:inline-comment-marker ac:ref="uuid-dup">world</ac:inline-comment-marker></p>`
// Two results with identical ref — simulates threaded replies.
comments := makeComments("world", "uuid-dup", "world", "uuid-dup")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
assert.Equal(t, `<p>Hello <ac:inline-comment-marker ac:ref="uuid-dup">world</ac:inline-comment-marker></p>`, result)
}
// ---------------------------------------------------------------------------
// Additional mergeComments scenario tests
// ---------------------------------------------------------------------------
// TestMergeComments_MultipleComments verifies that two non-overlapping comments
// are both correctly re-embedded via back-to-front replacement.
func TestMergeComments_MultipleComments(t *testing.T) {
body := "<p>Hello world and foo bar</p>"
oldBody := `<p>Hello <ac:inline-comment-marker ac:ref="uuid-1">world</ac:inline-comment-marker> and foo <ac:inline-comment-marker ac:ref="uuid-2">bar</ac:inline-comment-marker></p>`
comments := makeComments("world", "uuid-1", "bar", "uuid-2")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
assert.Equal(t, `<p>Hello <ac:inline-comment-marker ac:ref="uuid-1">world</ac:inline-comment-marker> and foo <ac:inline-comment-marker ac:ref="uuid-2">bar</ac:inline-comment-marker></p>`, result)
}
// TestMergeComments_EmptyResults verifies that an InlineComments value with a
// non-nil but empty Results slice is handled gracefully.
func TestMergeComments_EmptyResults(t *testing.T) {
body := "<p>Hello world</p>"
result, err := mergeComments(body, body, &confluence.InlineComments{})
assert.NoError(t, err)
assert.Equal(t, body, result)
}
// TestMergeComments_NonInlineLocation verifies that page-level comments
// (location != "inline") are silently skipped and the body is unchanged.
func TestMergeComments_NonInlineLocation(t *testing.T) {
body := "<p>Hello world</p>"
comments := &confluence.InlineComments{
Results: []confluence.InlineCommentResult{
{
Extensions: confluence.InlineCommentExtensions{
Location: "page",
InlineProperties: confluence.InlineCommentProperties{
OriginalSelection: "Hello",
MarkerRef: "uuid-page",
},
},
},
},
}
result, err := mergeComments(body, body, comments)
assert.NoError(t, err)
assert.Equal(t, body, result)
}
// TestMergeComments_NoContext verifies that when a comment's MarkerRef has no
// corresponding marker in oldBody (no context available) the first occurrence
// of the selection in the new body is used.
func TestMergeComments_NoContext(t *testing.T) {
body := "<p>foo bar foo</p>"
oldBody := "<p>foo bar foo</p>" // no markers → no context
comments := makeComments("foo", "uuid-noctx")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
// First occurrence of "foo" is at position 3.
assert.Equal(t, `<p><ac:inline-comment-marker ac:ref="uuid-noctx">foo</ac:inline-comment-marker> bar foo</p>`, result)
}
// TestMergeComments_UTF8 verifies that selections and bodies containing
// multibyte UTF-8 characters are handled correctly.
func TestMergeComments_UTF8(t *testing.T) {
body := "<p>こんにちは世界</p>"
oldBody := `<p>こんにちは<ac:inline-comment-marker ac:ref="uuid-jp">世界</ac:inline-comment-marker></p>`
comments := makeComments("世界", "uuid-jp")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
assert.Equal(t, `<p>こんにちは<ac:inline-comment-marker ac:ref="uuid-jp">世界</ac:inline-comment-marker></p>`, result)
}
// TestMergeComments_SelectionWithQuotes verifies that a selection containing
// apostrophes or double-quotes is found correctly in the new body even though
// html.EscapeString would encode those characters. Only &, <, > should be
// escaped when searching.
func TestMergeComments_SelectionWithQuotes(t *testing.T) {
body := `<p>It's a "test" page</p>`
oldBody := `<p>It's a <ac:inline-comment-marker ac:ref="uuid-q">"test"</ac:inline-comment-marker> page</p>`
comments := makeComments(`"test"`, "uuid-q")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
assert.Equal(t, `<p>It's a <ac:inline-comment-marker ac:ref="uuid-q">"test"</ac:inline-comment-marker> page</p>`, result)
}
// TestMergeComments_DuplicateMarkerRefDropped verifies that when multiple
// comment results share the same MarkerRef and the selection cannot be found,
// only a single warning is emitted (not one per result).
func TestMergeComments_DuplicateMarkerRefDropped(t *testing.T) {
body := "<p>Hello world</p>"
// Duplicate refs, but selection "gone" is not present in body or oldBody.
comments := makeComments("gone", "uuid-dup2", "gone", "uuid-dup2")
result, err := mergeComments(body, body, comments)
assert.NoError(t, err)
assert.Equal(t, body, result) // body unchanged, single warning logged
}
// TestMergeComments_CDATASelection verifies that a selection inside a
// CDATA-backed macro body (e.g. ac:code) is matched even though < and > are
// stored as raw characters rather than HTML entities. The raw form is tried as
// a fallback when the escaped form is not found.
func TestMergeComments_CDATASelection(t *testing.T) {
// New body contains a code macro with CDATA — raw < and > in the content.
body := `<ac:structured-macro ac:name="code"><ac:plain-text-body><![CDATA[func foo() { return <nil> }]]></ac:plain-text-body></ac:structured-macro>`
// Old body has the marker around the raw selection inside CDATA.
oldBody := `<ac:structured-macro ac:name="code"><ac:plain-text-body><![CDATA[func foo() { return <ac:inline-comment-marker ac:ref="uuid-cdata"><nil></ac:inline-comment-marker> }]]></ac:plain-text-body></ac:structured-macro>`
// The API returns the raw (unescaped) selection.
comments := makeComments("<nil>", "uuid-cdata")
result, err := mergeComments(body, oldBody, comments)
assert.NoError(t, err)
// The raw selection "<nil>" should be found and wrapped with a marker.
assert.Equal(t, `<ac:structured-macro ac:name="code"><ac:plain-text-body><![CDATA[func foo() { return <ac:inline-comment-marker ac:ref="uuid-cdata"><nil></ac:inline-comment-marker> }]]></ac:plain-text-body></ac:structured-macro>`, result)
}

View File

@ -8,9 +8,8 @@ import (
cparser "github.com/kovetskiy/mark/v16/parser"
crenderer "github.com/kovetskiy/mark/v16/renderer"
"github.com/kovetskiy/mark/v16/stdlib"
ctransformer "github.com/kovetskiy/mark/v16/transformer"
"github.com/kovetskiy/mark/v16/types"
"github.com/rs/zerolog/log"
"github.com/reconquest/pkg/log"
mkDocsParser "github.com/stefanfritsch/goldmark-admonitions"
"github.com/yuin/goldmark"
@ -21,9 +20,8 @@ import (
"github.com/yuin/goldmark/util"
)
// ConfluenceLegacyExtension is the original goldmark extension without GitHub Alerts support
// This extension is preserved for backward compatibility and testing purposes
type ConfluenceLegacyExtension struct {
// Renderer renders anchor [Node]s.
type ConfluenceExtension struct {
html.Config
Stdlib *stdlib.Lib
Path string
@ -31,9 +29,9 @@ type ConfluenceLegacyExtension struct {
Attachments []attachment.Attachment
}
// NewConfluenceLegacyExtension creates a new instance of the legacy ConfluenceRenderer
func NewConfluenceLegacyExtension(stdlib *stdlib.Lib, path string, cfg types.MarkConfig) *ConfluenceLegacyExtension {
return &ConfluenceLegacyExtension{
// NewConfluenceRenderer creates a new instance of the ConfluenceRenderer
func NewConfluenceExtension(stdlib *stdlib.Lib, path string, cfg types.MarkConfig) *ConfluenceExtension {
return &ConfluenceExtension{
Config: html.NewConfig(),
Stdlib: stdlib,
Path: path,
@ -42,14 +40,14 @@ func NewConfluenceLegacyExtension(stdlib *stdlib.Lib, path string, cfg types.Mar
}
}
func (c *ConfluenceLegacyExtension) Attach(a attachment.Attachment) {
func (c *ConfluenceExtension) Attach(a attachment.Attachment) {
c.Attachments = append(c.Attachments, a)
}
func (c *ConfluenceLegacyExtension) Extend(m goldmark.Markdown) {
func (c *ConfluenceExtension) Extend(m goldmark.Markdown) {
m.Renderer().AddOptions(renderer.WithNodeRenderers(
util.Prioritized(crenderer.NewConfluenceTextLegacyRenderer(c.MarkConfig.StripNewlines), 100),
util.Prioritized(crenderer.NewConfluenceTextRenderer(c.MarkConfig.StripNewlines), 100),
util.Prioritized(crenderer.NewConfluenceBlockQuoteRenderer(), 100),
util.Prioritized(crenderer.NewConfluenceCodeBlockRenderer(c.Stdlib, c.Path), 100),
util.Prioritized(crenderer.NewConfluenceFencedCodeBlockRenderer(c.Stdlib, c, c.MarkConfig), 100),
@ -92,10 +90,10 @@ func (c *ConfluenceLegacyExtension) Extend(m goldmark.Markdown) {
))
}
// compileMarkdownWithExtension is a shared helper to eliminate code duplication
// between different compilation approaches
func compileMarkdownWithExtension(markdown []byte, ext goldmark.Extender, logMessage string) (string, error) {
log.Trace().Msgf(logMessage, string(markdown))
func CompileMarkdown(markdown []byte, stdlib *stdlib.Lib, path string, cfg types.MarkConfig) (string, []attachment.Attachment, error) {
log.Tracef(nil, "rendering markdown:\n%s", string(markdown))
confluenceExtension := NewConfluenceExtension(stdlib, path, cfg)
converter := goldmark.New(
goldmark.WithExtensions(
@ -104,7 +102,7 @@ func compileMarkdownWithExtension(markdown []byte, ext goldmark.Extender, logMes
extension.NewTable(
extension.WithTableCellAlignMethod(extension.TableCellAlignStyle),
),
ext,
confluenceExtension,
extension.GFM,
),
goldmark.WithParserOptions(
@ -121,128 +119,12 @@ func compileMarkdownWithExtension(markdown []byte, ext goldmark.Extender, logMes
err := converter.Convert(markdown, &buf, parser.WithContext(ctx))
if err != nil {
return "", err
return "", nil, err
}
html := buf.Bytes()
log.Trace().Msgf("rendered markdown to html:\n%s", string(html))
return string(html), nil
}
// CompileMarkdown compiles markdown to Confluence Storage Format with GitHub Alerts support
// This is the main function that now uses the enhanced GitHub Alerts transformer by default
// for superior processing of [!NOTE], [!TIP], [!WARNING], [!CAUTION], [!IMPORTANT] syntax.
// Note: This is a breaking change from previous versions which rendered these markers literally.
func CompileMarkdown(markdown []byte, stdlib *stdlib.Lib, path string, cfg types.MarkConfig) (string, []attachment.Attachment, error) {
// Use the enhanced GitHub Alerts extension for better processing
ghAlertsExtension := NewConfluenceExtension(stdlib, path, cfg)
html, err := compileMarkdownWithExtension(markdown, ghAlertsExtension, "rendering markdown with GitHub Alerts support:\n%s")
return html, ghAlertsExtension.Attachments, err
}
// CompileMarkdownLegacy compiles markdown using the legacy approach without GitHub Alerts transformer
// This function is preserved for backward compatibility and testing purposes
func CompileMarkdownLegacy(markdown []byte, stdlib *stdlib.Lib, path string, cfg types.MarkConfig) (string, []attachment.Attachment, error) {
confluenceExtension := NewConfluenceLegacyExtension(stdlib, path, cfg)
html, err := compileMarkdownWithExtension(markdown, confluenceExtension, "rendering markdown with legacy renderer:\n%s")
return html, confluenceExtension.Attachments, err
}
// ConfluenceExtension is a goldmark extension for GitHub Alerts with Transformer approach
// This extension provides superior GitHub Alert processing by transforming [!NOTE], [!TIP], etc.
// into proper Confluence macros while maintaining full compatibility with existing functionality.
// This is now the primary/default extension.
type ConfluenceExtension struct {
html.Config
Stdlib *stdlib.Lib
Path string
MarkConfig types.MarkConfig
Attachments []attachment.Attachment
}
// NewConfluenceExtension creates a new instance of the GitHub Alerts extension
// This is the improved standalone version that doesn't depend on feature flags
func NewConfluenceExtension(stdlib *stdlib.Lib, path string, cfg types.MarkConfig) *ConfluenceExtension {
return &ConfluenceExtension{
Config: html.NewConfig(),
Stdlib: stdlib,
Path: path,
MarkConfig: cfg,
Attachments: []attachment.Attachment{},
}
}
func (c *ConfluenceExtension) Attach(a attachment.Attachment) {
c.Attachments = append(c.Attachments, a)
}
// Extend extends the Goldmark processor with GitHub Alerts transformer and renderers
// This method registers all necessary components for GitHub Alert processing:
// 1. Core renderers for standard markdown elements
// 2. GitHub Alerts specific renderers (blockquote and text) with higher priority
// 3. GitHub Alerts AST transformer for preprocessing
func (c *ConfluenceExtension) Extend(m goldmark.Markdown) {
// Register core renderers (excluding blockquote and text which we'll replace)
m.Renderer().AddOptions(renderer.WithNodeRenderers(
util.Prioritized(crenderer.NewConfluenceCodeBlockRenderer(c.Stdlib, c.Path), 100),
util.Prioritized(crenderer.NewConfluenceFencedCodeBlockRenderer(c.Stdlib, c, c.MarkConfig), 100),
util.Prioritized(crenderer.NewConfluenceHTMLBlockRenderer(c.Stdlib), 100),
util.Prioritized(crenderer.NewConfluenceHeadingRenderer(c.MarkConfig.DropFirstH1), 100),
util.Prioritized(crenderer.NewConfluenceImageRenderer(c.Stdlib, c, c.Path, c.MarkConfig.ImageAlign), 100),
util.Prioritized(crenderer.NewConfluenceParagraphRenderer(), 100),
util.Prioritized(crenderer.NewConfluenceLinkRenderer(), 100),
util.Prioritized(crenderer.NewConfluenceTaskListRenderer(), 100),
))
// Add GitHub Alerts specific renderers with higher priority to override defaults
// These renderers handle both GitHub Alerts and legacy blockquote syntax
m.Renderer().AddOptions(renderer.WithNodeRenderers(
util.Prioritized(crenderer.NewConfluenceGHAlertsBlockQuoteRenderer(), 200),
util.Prioritized(crenderer.NewConfluenceTextRenderer(c.MarkConfig.StripNewlines), 200),
))
// Add the GitHub Alerts AST transformer that preprocesses [!TYPE] syntax
m.Parser().AddOptions(parser.WithASTTransformers(
util.Prioritized(ctransformer.NewGHAlertsTransformer(), 100),
))
// Add mkdocsadmonitions support if requested
if slices.Contains(c.MarkConfig.Features, "mkdocsadmonitions") {
m.Parser().AddOptions(
parser.WithBlockParsers(
util.Prioritized(mkDocsParser.NewAdmonitionParser(), 100),
),
)
m.Renderer().AddOptions(renderer.WithNodeRenderers(
util.Prioritized(crenderer.NewConfluenceMkDocsAdmonitionRenderer(), 100),
))
}
// Add mention support if requested
if slices.Contains(c.MarkConfig.Features, "mention") {
m.Parser().AddOptions(
parser.WithInlineParsers(
util.Prioritized(cparser.NewMentionParser(), 99),
),
)
m.Renderer().AddOptions(renderer.WithNodeRenderers(
util.Prioritized(crenderer.NewConfluenceMentionRenderer(c.Stdlib), 100),
))
}
// Add confluence tag parser for <ac:*/> tags
m.Parser().AddOptions(parser.WithInlineParsers(
util.Prioritized(cparser.NewConfluenceTagParser(), 199),
))
}
// CompileMarkdownWithTransformer compiles markdown using the transformer approach for GitHub Alerts
// This function provides enhanced GitHub Alert processing while maintaining full compatibility
// with existing markdown functionality. It transforms [!NOTE], [!TIP], etc. into proper titles.
// This is an alias for CompileMarkdown for backward compatibility.
func CompileMarkdownWithTransformer(markdown []byte, stdlib *stdlib.Lib, path string, cfg types.MarkConfig) (string, []attachment.Attachment, error) {
return CompileMarkdown(markdown, stdlib, path, cfg)
log.Tracef(nil, "rendered markdown to html:\n%s", string(html))
return string(html), confluenceExtension.Attachments, nil
}

View File

@ -1,312 +0,0 @@
package mark_test
import (
"testing"
mark "github.com/kovetskiy/mark/v16/markdown"
"github.com/kovetskiy/mark/v16/stdlib"
"github.com/kovetskiy/mark/v16/types"
"github.com/stretchr/testify/assert"
)
func TestGHAlertsTransformerVsLegacyRenderer(t *testing.T) {
testCases := []struct {
name string
markdown string
expectMacro bool
expectClean bool // Whether the [!TYPE] syntax should be cleaned up
description string
}{
{
name: "GitHub Alert NOTE",
markdown: "> [!NOTE]\n> This is a test note.",
expectMacro: true,
expectClean: true,
description: "GitHub Alert [!NOTE] syntax should be converted to Confluence info macro",
},
{
name: "GitHub Alert TIP",
markdown: "> [!TIP]\n> This is a helpful tip.",
expectMacro: true,
expectClean: true,
description: "GitHub Alert [!TIP] syntax should be converted to Confluence tip macro",
},
{
name: "GitHub Alert WARNING",
markdown: "> [!WARNING]\n> This is a warning message.",
expectMacro: true,
expectClean: true,
description: "GitHub Alert [!WARNING] syntax should be converted to Confluence note macro",
},
{
name: "GitHub Alert CAUTION",
markdown: "> [!CAUTION]\n> Be very careful here.",
expectMacro: true,
expectClean: true,
description: "GitHub Alert [!CAUTION] syntax should be converted to Confluence warning macro",
},
{
name: "GitHub Alert IMPORTANT",
markdown: "> [!IMPORTANT]\n> This is very important.",
expectMacro: true,
expectClean: true,
description: "GitHub Alert [!IMPORTANT] syntax should be converted to Confluence info macro",
},
{
name: "Legacy blockquote with info",
markdown: "> info: This is legacy info syntax.",
expectMacro: true,
expectClean: false,
description: "Legacy info: syntax should be converted to Confluence info macro",
},
{
name: "Regular blockquote",
markdown: "> This is just a regular blockquote.",
expectMacro: false,
expectClean: false,
description: "Regular blockquotes should remain as HTML blockquote elements",
},
}
stdlib, err := stdlib.New(nil)
if err != nil {
t.Fatalf("Failed to create stdlib: %v", err)
}
cfg := types.MarkConfig{
Features: []string{},
StripNewlines: false,
DropFirstH1: false,
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
t.Logf("Testing: %s", tc.description)
// Test with GitHub Alerts transformer (primary approach)
transformerResult, transformerAttachments, err := mark.CompileMarkdown([]byte(tc.markdown), stdlib, "/test", cfg)
assert.NoError(t, err)
// Test with legacy renderer
legacyResult, legacyAttachments, err := mark.CompileMarkdownLegacy([]byte(tc.markdown), stdlib, "/test", cfg)
assert.NoError(t, err)
// Basic checks
assert.NotEmpty(t, transformerResult, "Transformer result should not be empty")
assert.NotEmpty(t, legacyResult, "Legacy result should not be empty")
assert.Empty(t, transformerAttachments, "Should have no attachments")
assert.Empty(t, legacyAttachments, "Should have no attachments")
// Check for Confluence macro presence
if tc.expectMacro {
assert.Contains(t, transformerResult, "structured-macro", "Transformer should produce Confluence macro")
// Legacy renderer should NOT handle GitHub Alert syntax - it should treat as plain blockquote
if tc.expectClean {
// This is a GitHub Alert case - legacy should produce blockquote, transformer should produce macro
assert.Contains(t, legacyResult, "<blockquote>", "Legacy renderer should treat GitHub Alerts as regular blockquotes")
} else {
// This is a legacy syntax case (like "info:") - both should produce macro
assert.Contains(t, legacyResult, "structured-macro", "Legacy renderer should produce Confluence macro for legacy syntax")
}
} else {
assert.Contains(t, transformerResult, "<blockquote>", "Regular blockquote should use HTML blockquote")
assert.Contains(t, legacyResult, "<blockquote>", "Regular blockquote should use HTML blockquote")
} // Check for GitHub Alert syntax cleanup (only for transformer with GitHub Alert syntax)
if tc.expectClean {
// Transformer should clean up the [!TYPE] syntax
assert.NotContains(t, transformerResult, "[!", "Transformer should remove GitHub Alert syntax markers")
// Legacy renderer might not clean it up (depending on implementation)
// We'll just log what it produces for comparison
t.Logf("Transformer result: %s", transformerResult)
t.Logf("Legacy result: %s", legacyResult)
} else {
// For non-GitHub Alert cases, both should behave similarly
t.Logf("Transformer result: %s", transformerResult)
t.Logf("Legacy result: %s", legacyResult)
}
})
}
}
func TestBasicTransformerFunctionality(t *testing.T) {
testMarkdown := "> [!NOTE]\n> This is a test note."
stdlib, err := stdlib.New(nil)
if err != nil {
t.Fatalf("Failed to create stdlib: %v", err)
}
cfg := types.MarkConfig{
Features: []string{},
StripNewlines: false,
DropFirstH1: false,
}
result, attachments, err := mark.CompileMarkdown([]byte(testMarkdown), stdlib, "/test", cfg)
assert.NoError(t, err)
// Basic checks
assert.NotEmpty(t, result)
assert.Empty(t, attachments)
assert.Contains(t, result, "structured-macro")
// This test should now pass because we fixed the transformer
assert.NotContains(t, result, "[!NOTE]", "The GitHub Alert syntax should be cleaned up")
t.Logf("Transformer result: %s", result)
}
// TestCompatibilityWithExistingFeatures tests that the transformer approach is fully compatible
// with existing non-blockquote functionality from the original markdown tests
func TestCompatibilityWithExistingFeatures(t *testing.T) {
testCases := []struct {
name string
markdown string
config types.MarkConfig
description string
}{
{
name: "Headers Basic",
markdown: `# Header 1
## Header 2
### Header 3`,
config: types.MarkConfig{
Features: []string{},
StripNewlines: false,
DropFirstH1: false,
},
description: "Basic header rendering should be identical",
},
{
name: "Headers with DropFirstH1",
markdown: `# Header 1
## Header 2
### Header 3`,
config: types.MarkConfig{
Features: []string{},
StripNewlines: false,
DropFirstH1: true,
},
description: "Header rendering with DropFirstH1 should be identical",
},
{
name: "Code Blocks",
markdown: "`inline code`\n\n```bash\necho \"hello\"\n```",
config: types.MarkConfig{
Features: []string{},
StripNewlines: false,
DropFirstH1: false,
},
description: "Code block rendering should be identical",
},
{
name: "Links and Images",
markdown: `[Link](https://example.com)
![Image](test.png)
[Page Link](ac:Page)`,
config: types.MarkConfig{
Features: []string{},
StripNewlines: false,
DropFirstH1: false,
},
description: "Links and images should be rendered identically",
},
{
name: "Tables",
markdown: `| Header 1 | Header 2 |
|----------|----------|
| Row 1 | Row 2 |`,
config: types.MarkConfig{
Features: []string{},
StripNewlines: false,
DropFirstH1: false,
},
description: "Table rendering should be identical",
},
{
name: "Mixed Content",
markdown: `# Title
Some **bold** and *italic* text.
- List item 1
- List item 2
` + "`inline code`" + ` and:
` + "```javascript\nconsole.log(\"test\");\n```" + `
[Link](https://example.com)`,
config: types.MarkConfig{
Features: []string{},
StripNewlines: false,
DropFirstH1: false,
},
description: "Mixed content should be rendered identically",
},
{
name: "Strip Newlines",
markdown: `Line 1
Line 2
Line 3`,
config: types.MarkConfig{
Features: []string{},
StripNewlines: true,
DropFirstH1: false,
},
description: "StripNewlines functionality should work identically",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
t.Logf("Testing: %s", tc.description)
stdlib, err := stdlib.New(nil)
if err != nil {
t.Fatalf("Failed to create stdlib: %v", err)
}
// Test with GitHub Alerts transformer (primary approach)
transformerResult, transformerAttachments, err := mark.CompileMarkdown([]byte(tc.markdown), stdlib, "/test", tc.config)
assert.NoError(t, err)
// Test with legacy renderer (original approach)
legacyResult, legacyAttachments, err := mark.CompileMarkdownLegacy([]byte(tc.markdown), stdlib, "/test", tc.config)
assert.NoError(t, err)
// Basic checks
assert.NotEmpty(t, transformerResult, "Transformer result should not be empty")
assert.NotEmpty(t, legacyResult, "Legacy result should not be empty")
assert.Equal(t, len(transformerAttachments), len(legacyAttachments), "Attachment counts should match")
// The key compatibility test: results should be identical for non-blockquote content
if transformerResult != legacyResult {
t.Errorf("COMPATIBILITY ISSUE: Results differ for %s\n"+
"Transformer result:\n%s\n\n"+
"Legacy result:\n%s\n\n"+
"Diff (transformer vs legacy):",
tc.name, transformerResult, legacyResult)
// Log the differences for debugging
t.Logf("Transformer length: %d", len(transformerResult))
t.Logf("Legacy length: %d", len(legacyResult))
// Character-by-character comparison for debugging
for i := 0; i < len(transformerResult) && i < len(legacyResult); i++ {
if transformerResult[i] != legacyResult[i] {
t.Logf("First difference at position %d: transformer='%c'(%d) vs legacy='%c'(%d)",
i, transformerResult[i], transformerResult[i], legacyResult[i], legacyResult[i])
break
}
}
} else {
t.Logf("✅ Perfect compatibility for %s", tc.name)
}
})
}
}

View File

@ -10,7 +10,7 @@ import (
mermaid "github.com/dreampuf/mermaid.go"
"github.com/kovetskiy/mark/v16/attachment"
"github.com/rs/zerolog/log"
"github.com/reconquest/pkg/log"
)
var renderTimeout = 120 * time.Second
@ -19,15 +19,14 @@ func ProcessMermaidLocally(title string, mermaidDiagram []byte, scale float64) (
ctx, cancel := context.WithTimeout(context.TODO(), renderTimeout)
defer cancel()
log.Debug().Msgf("Setting up Mermaid renderer: %q", title)
renderer, err := mermaid.NewRenderEngine(ctx, nil)
log.Debugf(nil, "Setting up Mermaid renderer: %q", title)
renderer, err := mermaid.NewRenderEngine(ctx)
if err != nil {
return attachment.Attachment{}, err
}
defer renderer.Cancel()
log.Debug().Msgf("Rendering: %q", title)
log.Debugf(nil, "Rendering: %q", title)
pngBytes, boxModel, err := renderer.RenderAsScaledPng(string(mermaidDiagram), scale)
if err != nil {
return attachment.Attachment{}, err
@ -40,7 +39,7 @@ func ProcessMermaidLocally(title string, mermaidDiagram []byte, scale float64) (
mermaidBytes := append(mermaidDiagram, scaleAsBytes...)
checkSum, err := attachment.GetChecksum(bytes.NewReader(mermaidBytes))
log.Debug().Msgf("Checksum: %q -> %s", title, checkSum)
log.Debugf(nil, "Checksum: %q -> %s", title, checkSum)
if err != nil {
return attachment.Attachment{}, err

View File

@ -9,7 +9,7 @@ import (
"regexp"
"strings"
"github.com/rs/zerolog/log"
"github.com/reconquest/pkg/log"
"golang.org/x/text/cases"
"golang.org/x/text/language"
)
@ -46,7 +46,6 @@ type Meta struct {
const (
FullWidthContentAppearance = "full-width"
FixedContentAppearance = "fixed"
DefaultContentAppearance = "default"
)
var (
@ -123,12 +122,9 @@ func ExtractMeta(data []byte, spaceFromCli string, titleFromH1 bool, titleFromFi
continue
case ContentAppearance:
switch strings.TrimSpace(value) {
case FixedContentAppearance:
if strings.TrimSpace(value) == FixedContentAppearance {
meta.ContentAppearance = FixedContentAppearance
case DefaultContentAppearance:
meta.ContentAppearance = DefaultContentAppearance
default:
} else {
meta.ContentAppearance = FullWidthContentAppearance
}
@ -136,9 +132,12 @@ func ExtractMeta(data []byte, spaceFromCli string, titleFromH1 bool, titleFromFi
meta.ImageAlign = strings.ToLower(strings.TrimSpace(value))
default:
log.Error().
Err(nil).
Msgf(`encountered unknown header %q line: %#v`, header, line)
log.Errorf(
nil,
`encountered unknown header %q line: %#v`,
header,
line,
)
continue
}
@ -170,12 +169,9 @@ func ExtractMeta(data []byte, spaceFromCli string, titleFromH1 bool, titleFromFi
// Use the global content appearance flag if the header is not set in the document
if meta != nil && defaultContentAppearance != "" && meta.ContentAppearance == "" {
switch strings.TrimSpace(defaultContentAppearance) {
case FixedContentAppearance:
if strings.TrimSpace(defaultContentAppearance) == FixedContentAppearance {
meta.ContentAppearance = FixedContentAppearance
case DefaultContentAppearance:
meta.ContentAppearance = DefaultContentAppearance
default:
} else {
meta.ContentAppearance = FullWidthContentAppearance
}
} else if meta != nil && meta.ContentAppearance == "" {
@ -197,7 +193,11 @@ func ExtractMeta(data []byte, spaceFromCli string, titleFromH1 bool, titleFromFi
pathHash := sha256.Sum256([]byte(path))
// postfix is an 8-character hexadecimal string representation of the first 4 out of 32 bytes of the hash
meta.Title = fmt.Sprintf("%s - %x", meta.Title, pathHash[0:4])
log.Debug().Msgf("appended hash to page title: %s", meta.Title)
log.Debugf(
nil,
"appended hash to page title: %s",
meta.Title,
)
}
// Remove trailing spaces from title

View File

@ -88,22 +88,4 @@ func TestExtractMetaContentAppearance(t *testing.T) {
assert.NotNil(t, meta)
assert.Equal(t, FullWidthContentAppearance, meta.ContentAppearance)
})
t.Run("default appearance via cli flag", func(t *testing.T) {
data := []byte("<!-- Space: DOC -->\n<!-- Title: Example -->\n\nbody\n")
meta, _, err := ExtractMeta(data, "", false, false, "", nil, false, DefaultContentAppearance)
assert.NoError(t, err)
assert.NotNil(t, meta)
assert.Equal(t, DefaultContentAppearance, meta.ContentAppearance)
})
t.Run("default appearance via header", func(t *testing.T) {
data := []byte("<!-- Space: DOC -->\n<!-- Title: Example -->\n<!-- Content-Appearance: default -->\n\nbody\n")
meta, _, err := ExtractMeta(data, "", false, false, "", nil, false, "")
assert.NoError(t, err)
assert.NotNil(t, meta)
assert.Equal(t, DefaultContentAppearance, meta.ContentAppearance)
})
}

View File

@ -5,7 +5,8 @@ import (
"strings"
"github.com/kovetskiy/mark/v16/confluence"
"github.com/rs/zerolog/log"
"github.com/reconquest/karma-go"
"github.com/reconquest/pkg/log"
)
func EnsureAncestry(
@ -21,14 +22,18 @@ func EnsureAncestry(
for i, title := range ancestry {
page, err := api.FindPage(space, title, "page")
if err != nil {
return nil, fmt.Errorf("error during finding parent page with title %q: %w", title, err)
return nil, karma.Format(
err,
`error during finding parent page with title %q`,
title,
)
}
if page == nil {
break
}
log.Debug().Msgf("parent page %q exists: %s", title, page.Links.Full)
log.Debugf(nil, "parent page %q exists: %s", title, page.Links.Full)
rest = ancestry[i:]
parent = page
@ -39,7 +44,11 @@ func EnsureAncestry(
} else {
page, err := api.FindRootPage(space)
if err != nil {
return nil, fmt.Errorf("can't find root page for space %q: %w", space, err)
return nil, karma.Format(
err,
"can't find root page for space %q",
space,
)
}
parent = page
@ -48,8 +57,8 @@ func EnsureAncestry(
return parent, nil
}
log.Debug().
Msgf(
log.Debugf(
nil,
"empty pages under %q to be created: %s",
parent.Title,
strings.Join(rest, ` > `),
@ -59,14 +68,18 @@ func EnsureAncestry(
for _, title := range rest {
page, err := api.CreatePage(space, "page", parent, title, ``)
if err != nil {
return nil, fmt.Errorf("error during creating parent page with title %q: %w", title, err)
return nil, karma.Format(
err,
`error during creating parent page with title %q`,
title,
)
}
parent = page
}
} else {
log.Info().
Msgf(
log.Infof(
nil,
"skipping page creation due to enabled dry-run mode, "+
"need to create %d pages: %v",
len(rest),
@ -95,11 +108,15 @@ func ValidateAncestry(
if len(page.Ancestors) < 1 {
homepage, err := api.FindHomePage(space)
if err != nil {
return nil, fmt.Errorf("can't obtain home page from space %q: %w", space, err)
return nil, karma.Format(
err,
"can't obtain home page from space %q",
space,
)
}
if page.ID == homepage.ID {
log.Debug().Msgf("page is homepage for space %q", space)
log.Debugf(nil, "page is homepage for space %q", space)
isHomepage = true
} else {
return nil, fmt.Errorf(`page %q has no parents`, page.Title)
@ -131,10 +148,10 @@ func ValidateAncestry(
}
if !valid {
return nil, fmt.Errorf(
"the page has fewer parents than expected: title=%q, actual=[%s], expected=[%s]",
page.Title, strings.Join(actual, " > "), strings.Join(ancestry, " > "),
)
return nil, karma.Describe("title", page.Title).
Describe("actual", strings.Join(actual, " > ")).
Describe("expected", strings.Join(ancestry, " > ")).
Format(nil, "the page has fewer parents than expected")
}
}
@ -156,9 +173,11 @@ func ValidateAncestry(
list = append(list, ancestor.Title)
}
return nil, fmt.Errorf(
"unexpected ancestry tree, did not find expected parent page %q in the tree: actual=[%s]",
parent, strings.Join(list, "; "),
return nil, karma.Describe("expected parent", parent).
Describe("list", strings.Join(list, "; ")).
Format(
nil,
"unexpected ancestry tree, did not find expected parent page in the tree",
)
}
}

View File

@ -14,7 +14,8 @@ import (
"github.com/kovetskiy/mark/v16/confluence"
"github.com/kovetskiy/mark/v16/metadata"
"github.com/rs/zerolog/log"
"github.com/reconquest/karma-go"
"github.com/reconquest/pkg/log"
)
type LinkSubstitution struct {
@ -50,8 +51,8 @@ func ResolveRelativeLinks(
links := []LinkSubstitution{}
for _, match := range matches {
log.Trace().
Msgf(
log.Tracef(
nil,
"found a relative link: full=%s filename=%s hash=%s",
match.full,
match.filename,
@ -59,7 +60,7 @@ func ResolveRelativeLinks(
)
resolved, err := resolveLink(api, base, match, spaceForLinks, titleFromH1, titleFromFilename, parents, titleAppendGeneratedHash)
if err != nil {
return nil, fmt.Errorf("resolve link %q: %w", match.full, err)
return nil, karma.Format(err, "resolve link: %q", match.full)
}
if resolved == "" {
@ -90,7 +91,7 @@ func resolveLink(
if len(link.filename) > 0 {
filepath := filepath.Join(base, link.filename)
log.Trace().Msgf("filepath: %s", filepath)
log.Tracef(nil, "filepath: %s", filepath)
stat, err := os.Stat(filepath)
if err != nil {
return "", nil
@ -102,13 +103,13 @@ func resolveLink(
linkContents, err := os.ReadFile(filepath)
if err != nil {
return "", fmt.Errorf("read file %s: %w", filepath, err)
return "", karma.Format(err, "read file: %s", filepath)
}
contentType := http.DetectContentType(linkContents)
// Check if the MIME type starts with "text/"
if !strings.HasPrefix(contentType, "text/") {
log.Debug().Msgf("Ignoring link to file %q: detected content type %v", filepath, contentType)
log.Debugf(nil, "Ignoring link to file %q: detected content type %v", filepath, contentType)
return "", nil
}
@ -122,9 +123,8 @@ func resolveLink(
// not markdown or have mark required metadata
linkMeta, _, err := metadata.ExtractMeta(linkContents, spaceForLinks, titleFromH1, titleFromFilename, filepath, parents, titleAppendGeneratedHash, "")
if err != nil {
log.Error().
Err(err).
Msgf(
log.Errorf(
err,
"unable to extract metadata from %q; ignoring the relative link",
filepath,
)
@ -136,8 +136,8 @@ func resolveLink(
return "", nil
}
log.Trace().
Msgf(
log.Tracef(
nil,
"extracted metadata: space=%s title=%s",
linkMeta.Space,
linkMeta.Title,
@ -145,7 +145,13 @@ func resolveLink(
result, err = getConfluenceLink(api, linkMeta.Space, linkMeta.Title)
if err != nil {
return "", fmt.Errorf("find confluence page (file=%s, space=%s, title=%s): %w", filepath, linkMeta.Space, linkMeta.Title, err)
return "", karma.Format(
err,
"find confluence page: %s / %s / %s",
filepath,
linkMeta.Space,
linkMeta.Title,
)
}
if result == "" {
@ -166,7 +172,7 @@ func SubstituteLinks(markdown []byte, links []LinkSubstitution) []byte {
continue
}
log.Trace().Msgf("substitute link: %q -> %q", link.From, link.To)
log.Tracef(nil, "substitute link: %q -> %q", link.From, link.To)
markdown = bytes.ReplaceAll(
markdown,
@ -210,14 +216,14 @@ func getConfluenceLink(
// Try to find as a page first
page, err := api.FindPage(space, title, "page")
if err != nil {
return "", fmt.Errorf("api: find page %q in space %q: %w", title, space, err)
return "", karma.Format(err, "api: find page")
}
// If not found as a page, try to find as a blog post
if page == nil {
page, err = api.FindPage(space, title, "blogpost")
if err != nil {
return "", fmt.Errorf("api: find blogpost %q in space %q: %w", title, space, err)
return "", karma.Format(err, "api: find blogpost")
}
}
@ -235,7 +241,7 @@ func getConfluenceLink(
tiny, err := GenerateTinyLink(baseURL, page.ID)
if err != nil {
return "", fmt.Errorf("generate tiny link for page %s: %w", page.ID, err)
return "", karma.Format(err, "generate tiny link for page %s", page.ID)
}
return tiny, nil

View File

@ -1,12 +1,12 @@
package page
import (
"fmt"
"strings"
"github.com/kovetskiy/mark/v16/confluence"
"github.com/kovetskiy/mark/v16/metadata"
"github.com/rs/zerolog/log"
"github.com/reconquest/karma-go"
"github.com/reconquest/pkg/log"
)
func ResolvePage(
@ -15,16 +15,20 @@ func ResolvePage(
meta *metadata.Meta,
) (*confluence.PageInfo, *confluence.PageInfo, error) {
if meta == nil {
return nil, nil, fmt.Errorf("metadata is empty")
return nil, nil, karma.Format(nil, "metadata is empty")
}
page, err := api.FindPage(meta.Space, meta.Title, meta.Type)
if err != nil {
return nil, nil, fmt.Errorf("error while finding page %q: %w", meta.Title, err)
return nil, nil, karma.Format(
err,
"error while finding page %q",
meta.Title,
)
}
if meta.Type == "blogpost" {
log.Info().
Msgf(
log.Infof(
nil,
"blog post will be stored as: %s",
meta.Title,
)
@ -35,7 +39,11 @@ func ResolvePage(
// check to see if home page is in Parents
homepage, err := api.FindHomePage(meta.Space)
if err != nil {
return nil, nil, fmt.Errorf("can't obtain home page from space %q: %w", meta.Space, err)
return nil, nil, karma.Format(
err,
"can't obtain home page from space %q",
meta.Space,
)
}
skipHomeAncestry := false
@ -61,8 +69,8 @@ func ResolvePage(
}
if page == nil {
log.Warn().
Msgf(
log.Warningf(
nil,
"page %q is not found ",
ancestry[len(ancestry)-1],
)
@ -71,8 +79,8 @@ func ResolvePage(
path := meta.Parents
path = append(path, meta.Title)
log.Debug().
Msgf(
log.Debugf(
nil,
"resolving page path: ??? > %s",
strings.Join(path, ` > `),
)
@ -85,7 +93,11 @@ func ResolvePage(
meta.Parents,
)
if err != nil {
return nil, nil, fmt.Errorf("can't create ancestry tree %q: %w", strings.Join(meta.Parents, ` > `), err)
return nil, nil, karma.Format(
err,
"can't create ancestry tree: %s",
strings.Join(meta.Parents, ` > `),
)
}
titles := []string{}
@ -95,7 +107,8 @@ func ResolvePage(
titles = append(titles, parent.Title)
log.Info().Msgf(
log.Infof(
nil,
"page will be stored under path: %s > %s",
strings.Join(titles, ` > `),
meta.Title,

View File

@ -64,9 +64,18 @@ func LegacyBlockQuoteClassifier() BlockQuoteClassifier {
}
}
func GHAlertsBlockQuoteClassifier() BlockQuoteClassifier {
return BlockQuoteClassifier{
patternMap: map[string]*regexp.Regexp{
"info": regexp.MustCompile(`(?i)^\!(note|important)`),
"note": regexp.MustCompile(`(?i)^\!warning`),
"warn": regexp.MustCompile(`(?i)^\!caution`),
"tip": regexp.MustCompile(`(?i)^\!tip`),
},
}
}
// ClassifyingBlockQuote compares a string against a set of patterns and returns a BlockQuoteType
// Note: GitHub Alerts ([!NOTE], [!TIP], etc.) are now handled by the superior transformer approach
// in the GitHub Alerts extension, not by this legacy blockquote renderer
func (classifier BlockQuoteClassifier) ClassifyingBlockQuote(literal string) BlockQuoteType {
var t = None
@ -84,11 +93,10 @@ func (classifier BlockQuoteClassifier) ClassifyingBlockQuote(literal string) Blo
}
// ParseBlockQuoteType parses the first line of a blockquote and returns its type
// Note: This legacy function only handles traditional "info:", "note:", etc. syntax
// GitHub Alerts ([!NOTE], [!TIP], etc.) are handled by the GitHub Alerts transformer
func ParseBlockQuoteType(node ast.Node, source []byte) BlockQuoteType {
var t = None
var legacyClassifier = LegacyBlockQuoteClassifier()
var ghAlertsClassifier = GHAlertsBlockQuoteClassifier()
countParagraphs := 0
_ = ast.Walk(node, func(node ast.Node, entering bool) (ast.WalkStatus, error) {
@ -101,6 +109,27 @@ func ParseBlockQuoteType(node ast.Node, source []byte) BlockQuoteType {
if node.Kind() == ast.KindText {
n := node.(*ast.Text)
t = legacyClassifier.ClassifyingBlockQuote(string(n.Value(source)))
// If the node is a text node but classification returned none do not give up!
// Find the next two sibling nodes midNode and rightNode,
// 1. If both are also a text node
// 2. and the original node (node) text value is '['
// 3. and the rightNode text value is ']'
// It means with high degree of confidence that the original md doc contains a Github alert type of blockquote
// Classifying the next text type node (midNode) will confirm that.
if t == None {
midNode := node.NextSibling()
if midNode != nil && midNode.Kind() == ast.KindText {
rightNode := midNode.NextSibling()
midTextNode := midNode.(*ast.Text)
if rightNode != nil && rightNode.Kind() == ast.KindText {
rightTextNode := rightNode.(*ast.Text)
if string(n.Value(source)) == "[" && string(rightTextNode.Value(source)) == "]" {
t = ghAlertsClassifier.ClassifyingBlockQuote(string(midTextNode.Value(source)))
}
}
}
}
countParagraphs += 1
}
if node.Kind() == ast.KindHTMLBlock {

View File

@ -11,6 +11,7 @@ import (
"github.com/kovetskiy/mark/v16/mermaid"
"github.com/kovetskiy/mark/v16/stdlib"
"github.com/kovetskiy/mark/v16/types"
"github.com/reconquest/pkg/log"
"github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/renderer"
@ -134,8 +135,8 @@ func (r *ConfluenceFencedCodeBlockRenderer) renderFencedCodeBlock(writer util.Bu
if lang == "d2" && slices.Contains(r.MarkConfig.Features, "d2") {
attachment, err := d2.ProcessD2(title, lval, r.MarkConfig.D2Scale)
if err != nil {
line, col := GetLineCol(source, node.Pos())
return ast.WalkStop, fmt.Errorf("line %d, col %d: d2 rendering failed: %v", line, col, err)
log.Debugf(nil, "error: %v", err)
return ast.WalkStop, err
}
r.Attachments.Attach(attachment)
@ -178,8 +179,8 @@ func (r *ConfluenceFencedCodeBlockRenderer) renderFencedCodeBlock(writer util.Bu
} else if lang == "mermaid" && slices.Contains(r.MarkConfig.Features, "mermaid") {
attachment, err := mermaid.ProcessMermaidLocally(title, lval, r.MarkConfig.MermaidScale)
if err != nil {
line, col := GetLineCol(source, node.Pos())
return ast.WalkStop, fmt.Errorf("line %d, col %d: mermaid rendering failed: %v", line, col, err)
log.Debugf(nil, "error: %v", err)
return ast.WalkStop, err
}
r.Attachments.Attach(attachment)

View File

@ -1,150 +0,0 @@
package renderer
import (
"fmt"
"github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/renderer"
"github.com/yuin/goldmark/renderer/html"
"github.com/yuin/goldmark/util"
)
type ConfluenceGHAlertsBlockQuoteRenderer struct {
html.Config
LevelMap BlockQuoteLevelMap
BlockQuoteNode ast.Node
}
// NewConfluenceGHAlertsBlockQuoteRenderer creates a new instance of the renderer for GitHub Alerts
func NewConfluenceGHAlertsBlockQuoteRenderer(opts ...html.Option) renderer.NodeRenderer {
return &ConfluenceGHAlertsBlockQuoteRenderer{
Config: html.NewConfig(),
LevelMap: nil,
BlockQuoteNode: nil,
}
}
// RegisterFuncs implements NodeRenderer.RegisterFuncs
func (r *ConfluenceGHAlertsBlockQuoteRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
reg.Register(ast.KindBlockquote, r.renderBlockQuote)
}
// Define GitHub Alert to Confluence macro mapping
func (r *ConfluenceGHAlertsBlockQuoteRenderer) getConfluenceMacroName(alertType string) string {
switch alertType {
case "note":
return "info"
case "tip":
return "tip"
case "important":
return "info"
case "warning":
return "note"
case "caution":
return "warning"
default:
return "info"
}
}
func (r *ConfluenceGHAlertsBlockQuoteRenderer) renderBlockQuote(writer util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
if r.LevelMap == nil {
r.LevelMap = GenerateBlockQuoteLevel(node)
}
// Check if this blockquote has been transformed by the GHAlerts transformer
if alertTypeBytes, hasAttribute := node.Attribute([]byte("gh-alert-type")); hasAttribute && alertTypeBytes != nil {
if alertTypeStr, ok := alertTypeBytes.([]byte); ok {
return r.renderGHAlert(writer, source, node, entering, string(alertTypeStr))
}
}
// Fall back to legacy blockquote rendering for non-GitHub Alert blockquotes
return r.renderLegacyBlockQuote(writer, source, node, entering)
}
func (r *ConfluenceGHAlertsBlockQuoteRenderer) renderGHAlert(writer util.BufWriter, source []byte, node ast.Node, entering bool, alertType string) (ast.WalkStatus, error) {
quoteLevel := r.LevelMap.Level(node)
if quoteLevel == 0 && entering {
r.BlockQuoteNode = node
macroName := r.getConfluenceMacroName(alertType)
prefix := fmt.Sprintf("<ac:structured-macro ac:name=\"%s\"><ac:parameter ac:name=\"icon\">true</ac:parameter><ac:rich-text-body>\n", macroName)
if _, err := writer.Write([]byte(prefix)); err != nil {
return ast.WalkStop, err
}
return ast.WalkContinue, nil
}
if quoteLevel == 0 && !entering && node == r.BlockQuoteNode {
suffix := "</ac:rich-text-body></ac:structured-macro>\n"
if _, err := writer.Write([]byte(suffix)); err != nil {
return ast.WalkStop, err
}
return ast.WalkContinue, nil
}
// For nested blockquotes or continuing the content, use default rendering
if quoteLevel > 0 {
if entering {
if _, err := writer.WriteString("<blockquote>\n"); err != nil {
return ast.WalkStop, err
}
} else {
if _, err := writer.WriteString("</blockquote>\n"); err != nil {
return ast.WalkStop, err
}
}
} else if quoteLevel == 0 && alertType == "" {
// This handles the fallback case for non-alert blockquotes if called accidentally
if entering {
if _, err := writer.WriteString("<blockquote>\n"); err != nil {
return ast.WalkStop, err
}
} else {
if _, err := writer.WriteString("</blockquote>\n"); err != nil {
return ast.WalkStop, err
}
}
}
return ast.WalkContinue, nil
}
func (r *ConfluenceGHAlertsBlockQuoteRenderer) renderLegacyBlockQuote(writer util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
// Legacy blockquote handling (same as original ParseBlockQuoteType logic)
quoteType := ParseBlockQuoteType(node, source)
quoteLevel := r.LevelMap.Level(node)
if quoteLevel == 0 && entering && quoteType != None {
r.BlockQuoteNode = node
prefix := fmt.Sprintf("<ac:structured-macro ac:name=\"%s\"><ac:parameter ac:name=\"icon\">true</ac:parameter><ac:rich-text-body>\n", quoteType)
if _, err := writer.Write([]byte(prefix)); err != nil {
return ast.WalkStop, err
}
return ast.WalkContinue, nil
}
if quoteLevel == 0 && !entering && node == r.BlockQuoteNode {
suffix := "</ac:rich-text-body></ac:structured-macro>\n"
if _, err := writer.Write([]byte(suffix)); err != nil {
return ast.WalkStop, err
}
return ast.WalkContinue, nil
}
// For nested blockquotes or regular blockquotes (at root level with no macro type)
if quoteLevel > 0 || (quoteLevel == 0 && quoteType == None) {
if entering {
if _, err := writer.WriteString("<blockquote>\n"); err != nil {
return ast.WalkStop, err
}
} else {
if _, err := writer.WriteString("</blockquote>\n"); err != nil {
return ast.WalkStop, err
}
}
}
return ast.WalkContinue, nil
}

View File

@ -145,8 +145,7 @@ func (r *ConfluenceImageRenderer) renderImage(writer util.BufWriter, source []by
)
} else {
if len(attachments) == 0 {
line, col := GetLineCol(source, node.Pos())
return ast.WalkStop, fmt.Errorf("line %d, col %d: no attachment resolved for %q", line, col, string(n.Destination))
return ast.WalkStop, fmt.Errorf("no attachment resolved for %q", string(n.Destination))
}
r.Attachments.Attach(attachments[0])

View File

@ -10,15 +10,23 @@ import (
"github.com/yuin/goldmark/util"
)
// ConfluenceTextRenderer slightly alters the default goldmark behavior for
// inline text block. It allows for soft breaks
// (c.f. https://spec.commonmark.org/0.30/#softbreak)
// to be rendered into HTML as either '\n' (the goldmark default)
// or as ' '.
// This latter option is useful for Confluence,
// which inserts <br> tags into uploaded HTML where it sees '\n'.
// See also https://sembr.org/ for partial motivation.
type ConfluenceTextRenderer struct {
html.Config
softBreak rune
}
// NewConfluenceTextRenderer creates a new instance of the renderer with GitHub Alerts support
func NewConfluenceTextRenderer(stripNewlines bool, opts ...html.Option) renderer.NodeRenderer {
// NewConfluenceTextRenderer creates a new instance of the ConfluenceTextRenderer
func NewConfluenceTextRenderer(stripNL bool, opts ...html.Option) renderer.NodeRenderer {
sb := '\n'
if stripNewlines {
if stripNL {
sb = ' '
}
return &ConfluenceTextRenderer{
@ -27,36 +35,18 @@ func NewConfluenceTextRenderer(stripNewlines bool, opts ...html.Option) renderer
}
}
// RegisterFuncs implements NodeRenderer.RegisterFuncs
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
func (r *ConfluenceTextRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
reg.Register(ast.KindText, r.renderText)
}
// renderText handles text rendering and supports GitHub Alerts replacement content.
// This is an enhanced version of the default goldmark text renderer that checks
// for replacement-content attributes before falling back to standard behavior.
// Note: This logic is partially duplicated from ConfluenceTextLegacyRenderer.renderText
// but includes additional GitHub Alerts support. We keep them separate to maintain
// clean legacy vs enhanced implementation paths.
// This is taken from https://github.com/yuin/goldmark/blob/v1.6.0/renderer/html/html.go#L719
// with the hardcoded '\n' for soft breaks swapped for the configurable r.softBreak
func (r *ConfluenceTextRenderer) renderText(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
if !entering {
return ast.WalkContinue, nil
}
n := node.(*ast.Text)
// Check if this text node has replacement content from the GHAlerts transformer
if replacementContent, hasAttribute := node.Attribute([]byte("replacement-content")); hasAttribute && replacementContent != nil {
if contentBytes, ok := replacementContent.([]byte); ok {
_, err := w.Write(contentBytes)
if err != nil {
return ast.WalkStop, err
}
return ast.WalkContinue, nil
}
}
// Default text rendering behavior (same as original ConfluenceTextRenderer)
segment := n.Segment
if n.IsRaw() {
r.Writer.RawWrite(w, segment.Value(source))
@ -97,7 +87,6 @@ func (r *ConfluenceTextRenderer) renderText(w util.BufWriter, source []byte, nod
}
}
}
return ast.WalkContinue, nil
}

View File

@ -1,90 +0,0 @@
package renderer
import (
"unicode/utf8"
"github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/renderer"
"github.com/yuin/goldmark/renderer/html"
"github.com/yuin/goldmark/util"
)
// ConfluenceTextLegacyRenderer slightly alters the default goldmark behavior for
// inline text block. It allows for soft breaks
// (c.f. https://spec.commonmark.org/0.30/#softbreak)
// to be rendered into HTML as either '\n' (the goldmark default)
// or as ' '.
// This latter option is useful for Confluence,
// which inserts <br> tags into uploaded HTML where it sees '\n'.
// See also https://sembr.org/ for partial motivation.
type ConfluenceTextLegacyRenderer struct {
html.Config
softBreak rune
}
// NewConfluenceTextLegacyRenderer creates a new instance of the ConfluenceTextRenderer (legacy version)
func NewConfluenceTextLegacyRenderer(stripNL bool, opts ...html.Option) renderer.NodeRenderer {
sb := '\n'
if stripNL {
sb = ' '
}
return &ConfluenceTextLegacyRenderer{
Config: html.NewConfig(),
softBreak: sb,
}
}
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
func (r *ConfluenceTextLegacyRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
reg.Register(ast.KindText, r.renderText)
}
// This is taken from https://github.com/yuin/goldmark/blob/v1.6.0/renderer/html/html.go#L719
// with the hardcoded '\n' for soft breaks swapped for the configurable r.softBreak
func (r *ConfluenceTextLegacyRenderer) renderText(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
if !entering {
return ast.WalkContinue, nil
}
n := node.(*ast.Text)
segment := n.Segment
if n.IsRaw() {
r.Writer.RawWrite(w, segment.Value(source))
} else {
value := segment.Value(source)
r.Writer.Write(w, value)
if n.HardLineBreak() || (n.SoftLineBreak() && r.HardWraps) {
if r.XHTML {
_, _ = w.WriteString("<br />\n")
} else {
_, _ = w.WriteString("<br>\n")
}
} else if n.SoftLineBreak() {
if r.EastAsianLineBreaks != html.EastAsianLineBreaksNone && len(value) != 0 {
sibling := node.NextSibling()
if sibling != nil && sibling.Kind() == ast.KindText {
if siblingText := sibling.(*ast.Text).Value(source); len(siblingText) != 0 {
thisLastRune := util.ToRune(value, len(value)-1)
siblingFirstRune, _ := utf8.DecodeRune(siblingText)
// Inline the softLineBreak function as it's not public
writeLineBreak := false
switch r.EastAsianLineBreaks {
case html.EastAsianLineBreaksNone:
writeLineBreak = false
case html.EastAsianLineBreaksSimple:
writeLineBreak = !util.IsEastAsianWideRune(thisLastRune) || !util.IsEastAsianWideRune(siblingFirstRune)
case html.EastAsianLineBreaksCSS3Draft:
writeLineBreak = eastAsianLineBreaksCSS3DraftSoftLineBreak(thisLastRune, siblingFirstRune)
}
if writeLineBreak {
_ = w.WriteByte(byte(r.softBreak))
}
}
}
} else {
_ = w.WriteByte(byte(r.softBreak))
}
}
}
return ast.WalkContinue, nil
}

View File

@ -1,19 +0,0 @@
package renderer
// GetLineCol returns the 1-based line and column for a given byte offset in the source.
func GetLineCol(source []byte, offset int) (line, col int) {
line = 1
col = 1
if offset > len(source) {
offset = len(source)
}
for i := 0; i < offset; i++ {
if source[i] == '\n' {
line++
col = 1
} else {
col++
}
}
return line, col
}

View File

@ -1,13 +1,14 @@
package stdlib
import (
"fmt"
"html"
"strings"
"text/template"
"github.com/kovetskiy/mark/v16/confluence"
"github.com/rs/zerolog/log"
"github.com/reconquest/pkg/log"
"github.com/reconquest/karma-go"
)
type Lib struct {
@ -41,7 +42,7 @@ func templates(api *confluence.API) (*template.Template, error) {
}
user, err := api.GetUserByName(name)
if err != nil {
log.Error().Err(err).Send()
log.Error(err)
}
return user
@ -444,7 +445,12 @@ func templates(api *confluence.API) (*template.Template, error) {
} {
templates, err = templates.New(name).Parse(body)
if err != nil {
return nil, fmt.Errorf("unable to parse template %q (body=%s): %w", name, body, err)
return nil, karma.
Describe("template", body).
Format(
err,
"unable to parse template",
)
}
}

View File

@ -48,7 +48,7 @@ b</p>
<h2 id="GH-Alerts-Heading">GH Alerts Heading</h2>
<h3 id="Note-Type-Alert-Heading">Note Type Alert Heading</h3>
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Note</p>
<p>[!NOTE]</p>
<ul>
<li>Note bullet 1</li>
<li>Note bullet 2</li>
@ -56,7 +56,7 @@ b</p>
</ac:rich-text-body></ac:structured-macro>
<h3 id="Tip-Type-Alert-Heading">Tip Type Alert Heading</h3>
<ac:structured-macro ac:name="tip"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Tip</p>
<p>[!TIP]</p>
<ul>
<li>Tip bullet 1</li>
<li>Tip bullet 2</li>
@ -64,7 +64,7 @@ b</p>
</ac:rich-text-body></ac:structured-macro>
<h3 id="Warning-Type-Alert-Heading">Warning Type Alert Heading</h3>
<ac:structured-macro ac:name="note"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Warning</p>
<p>[!WARNING]</p>
<ul>
<li>Warning bullet 1</li>
<li>Warning bullet 2</li>
@ -72,14 +72,14 @@ b</p>
</ac:rich-text-body></ac:structured-macro>
<h3 id="Important/Caution-Type-Alert-Heading">Important/Caution Type Alert Heading</h3>
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Important</p>
<p>[!IMPORTANT]</p>
<ul>
<li>Important bullet 1</li>
<li>Important bullet 2</li>
</ul>
</ac:rich-text-body></ac:structured-macro>
<ac:structured-macro ac:name="warning"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Caution</p>
<p>[!CAUTION]</p>
<ul>
<li>Important bullet 1</li>
<li>Important bullet 2</li>

View File

@ -46,7 +46,7 @@
<h2 id="GH-Alerts-Heading">GH Alerts Heading</h2>
<h3 id="Note-Type-Alert-Heading">Note Type Alert Heading</h3>
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Note</p>
<p>[!NOTE]</p>
<ul>
<li>Note bullet 1</li>
<li>Note bullet 2</li>
@ -54,7 +54,7 @@
</ac:rich-text-body></ac:structured-macro>
<h3 id="Tip-Type-Alert-Heading">Tip Type Alert Heading</h3>
<ac:structured-macro ac:name="tip"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Tip</p>
<p>[!TIP]</p>
<ul>
<li>Tip bullet 1</li>
<li>Tip bullet 2</li>
@ -62,7 +62,7 @@
</ac:rich-text-body></ac:structured-macro>
<h3 id="Warning-Type-Alert-Heading">Warning Type Alert Heading</h3>
<ac:structured-macro ac:name="note"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Warning</p>
<p>[!WARNING]</p>
<ul>
<li>Warning bullet 1</li>
<li>Warning bullet 2</li>
@ -70,14 +70,14 @@
</ac:rich-text-body></ac:structured-macro>
<h3 id="Important/Caution-Type-Alert-Heading">Important/Caution Type Alert Heading</h3>
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Important</p>
<p>[!IMPORTANT]</p>
<ul>
<li>Important bullet 1</li>
<li>Important bullet 2</li>
</ul>
</ac:rich-text-body></ac:structured-macro>
<ac:structured-macro ac:name="warning"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Caution</p>
<p>[!CAUTION]</p>
<ul>
<li>Important bullet 1</li>
<li>Important bullet 2</li>

10
testdata/quotes.html vendored
View File

@ -49,7 +49,7 @@ b</p>
<h2 id="GH-Alerts-Heading">GH Alerts Heading</h2>
<h3 id="Note-Type-Alert-Heading">Note Type Alert Heading</h3>
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Note</p>
<p>[!NOTE]</p>
<ul>
<li>Note bullet 1</li>
<li>Note bullet 2</li>
@ -57,7 +57,7 @@ b</p>
</ac:rich-text-body></ac:structured-macro>
<h3 id="Tip-Type-Alert-Heading">Tip Type Alert Heading</h3>
<ac:structured-macro ac:name="tip"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Tip</p>
<p>[!TIP]</p>
<ul>
<li>Tip bullet 1</li>
<li>Tip bullet 2</li>
@ -65,7 +65,7 @@ b</p>
</ac:rich-text-body></ac:structured-macro>
<h3 id="Warning-Type-Alert-Heading">Warning Type Alert Heading</h3>
<ac:structured-macro ac:name="note"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Warning</p>
<p>[!WARNING]</p>
<ul>
<li>Warning bullet 1</li>
<li>Warning bullet 2</li>
@ -73,14 +73,14 @@ b</p>
</ac:rich-text-body></ac:structured-macro>
<h3 id="Important/Caution-Type-Alert-Heading">Important/Caution Type Alert Heading</h3>
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Important</p>
<p>[!IMPORTANT]</p>
<ul>
<li>Important bullet 1</li>
<li>Important bullet 2</li>
</ul>
</ac:rich-text-body></ac:structured-macro>
<ac:structured-macro ac:name="warning"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
<p>Caution</p>
<p>[!CAUTION]</p>
<ul>
<li>Important bullet 1</li>
<li>Important bullet 2</li>

View File

@ -1,99 +0,0 @@
# GitHub Alerts Transformer
This directory contains the GitHub Alerts transformer that enables Mark to convert GitHub-style alert syntax into Confluence macros.
## Overview
The GitHub Alerts transformer processes markdown with GitHub Alert syntax like `[!NOTE]`, `[!TIP]`, `[!WARNING]`, `[!CAUTION]`, and `[!IMPORTANT]` and converts them into appropriate Confluence structured macros.
## Supported Alert Types
| GitHub Alert | Confluence Macro | Description |
|--------------|-----------------|-------------|
| `[!NOTE]` | `info` | General information |
| `[!TIP]` | `tip` | Helpful suggestions |
| `[!IMPORTANT]` | `info` | Critical information |
| `[!WARNING]` | `note` | Important warnings |
| `[!CAUTION]` | `warning` | Dangerous situations |
## Usage Example
### Input Markdown
```markdown
# Test GitHub Alerts
## Note Alert
> [!NOTE]
> This is a note alert with **markdown** formatting.
>
> - Item 1
> - Item 2
## Tip Alert
> [!TIP]
> This is a tip alert.
## Warning Alert
> [!WARNING]
> This is a warning alert.
## Regular Blockquote
> This is a regular blockquote without GitHub Alert syntax.
```
### Output (Confluence Storage Format)
The transformer converts GitHub Alert syntax into Confluence structured macros:
```xml
<ac:structured-macro ac:name="info">
<ac:parameter ac:name="icon">true</ac:parameter>
<ac:rich-text-body>
<p>Note</p>
<p>This is a note alert with <strong>markdown</strong> formatting.</p>
<ul>
<li>Item 1</li>
<li>Item 2</li>
</ul>
</ac:rich-text-body>
</ac:structured-macro>
```
## Key Features
- **GitHub Compatibility**: Full support for GitHub's alert syntax
- **Markdown Preservation**: All markdown formatting within alerts is preserved
- **Fallback Support**: Regular blockquotes without alert syntax remain unchanged
- **User-Friendly Labels**: Adds readable labels (Note, Tip, Warning, etc.) to alert content
- **Confluence Integration**: Maps to appropriate Confluence macro types for optimal display
## Implementation
The transformer works by:
1. **AST Transformation**: Modifies the goldmark AST before rendering
2. **Pattern Matching**: Identifies GitHub Alert patterns in blockquotes
3. **Content Enhancement**: Adds user-friendly labels and processes nested markdown
4. **Macro Generation**: Converts to appropriate Confluence structured macros
## Backward Compatibility
- Legacy `info:`, `tip:`, `warning:` syntax continues to work
- Regular blockquotes remain unchanged
- Full compatibility with existing Mark features
## Testing
The transformer is thoroughly tested with:
- All GitHub Alert types (`[!NOTE]`, `[!TIP]`, `[!WARNING]`, `[!CAUTION]`, `[!IMPORTANT]`)
- Nested markdown formatting (bold, italic, lists, etc.)
- Mixed content scenarios
- Backward compatibility with legacy syntax
- Edge cases and error conditions
See `../markdown/transformer_comparison_test.go` for comprehensive test coverage.

View File

@ -1,143 +0,0 @@
package transformer
import (
"strings"
"github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/parser"
"github.com/yuin/goldmark/text"
)
// GHAlertsTransformer transforms GitHub Alert syntax ([!NOTE], [!TIP], etc.)
// into a custom AST node that can be rendered as Confluence macros
type GHAlertsTransformer struct{}
// NewGHAlertsTransformer creates a new GitHub Alerts transformer
func NewGHAlertsTransformer() *GHAlertsTransformer {
return &GHAlertsTransformer{}
}
// Transform implements the parser.ASTTransformer interface
func (t *GHAlertsTransformer) Transform(doc *ast.Document, reader text.Reader, pc parser.Context) {
_ = ast.Walk(doc, func(node ast.Node, entering bool) (ast.WalkStatus, error) {
if !entering {
return ast.WalkContinue, nil
}
// Only process blockquote nodes
blockquote, ok := node.(*ast.Blockquote)
if !ok {
return ast.WalkContinue, nil
}
// Check if this blockquote contains GitHub Alert syntax
alertType := t.extractAlertType(blockquote, reader)
if alertType == "" {
return ast.WalkContinue, nil
}
// Transform the blockquote into a GitHub Alert node
t.transformBlockquote(blockquote, alertType, reader)
return ast.WalkContinue, nil
})
}
// extractAlertType checks if the blockquote starts with GitHub Alert syntax and returns the alert type
func (t *GHAlertsTransformer) extractAlertType(blockquote *ast.Blockquote, reader text.Reader) string {
// Look for the first paragraph in the blockquote
firstChild := blockquote.FirstChild()
if firstChild == nil || firstChild.Kind() != ast.KindParagraph {
return ""
}
paragraph := firstChild.(*ast.Paragraph)
// Check if the paragraph starts with the GitHub Alert pattern [!TYPE]
firstText := paragraph.FirstChild()
if firstText == nil || firstText.Kind() != ast.KindText {
return ""
}
// Look for the pattern: [!ALERTTYPE]
// We need to check for three consecutive text nodes: "[", "!ALERTTYPE", "]"
// This is the intended behavior for GitHub Alerts which should be at the very start.
// Note: We follow GitHub's strict syntax here and don't allow whitespace between
// brackets and exclamation mark (e.g., [! NOTE] is not recognized).
currentNode := firstText
var nodes []ast.Node
// Collect up to 3 text nodes
for i := 0; i < 3 && currentNode != nil && currentNode.Kind() == ast.KindText; i++ {
nodes = append(nodes, currentNode)
currentNode = currentNode.NextSibling()
}
if len(nodes) < 3 {
return ""
}
leftText := nodes[0].(*ast.Text)
middleText := nodes[1].(*ast.Text)
rightText := nodes[2].(*ast.Text)
leftContent := string(leftText.Segment.Value(reader.Source()))
middleContent := string(middleText.Segment.Value(reader.Source()))
rightContent := string(rightText.Segment.Value(reader.Source()))
// Check for the exact pattern
if leftContent == "[" && rightContent == "]" && strings.HasPrefix(middleContent, "!") {
alertType := strings.ToLower(strings.TrimPrefix(middleContent, "!"))
// Validate it's a recognized GitHub Alert type
switch alertType {
case "note", "tip", "important", "warning", "caution":
return alertType
}
}
return ""
}
// transformBlockquote modifies the blockquote to remove the GitHub Alert syntax
// and adds metadata for rendering
func (t *GHAlertsTransformer) transformBlockquote(blockquote *ast.Blockquote, alertType string, reader text.Reader) {
// Set a custom attribute to identify this as a GitHub Alert
blockquote.SetAttribute([]byte("gh-alert-type"), []byte(alertType))
// Find and remove/replace the GitHub Alert syntax from the first paragraph
firstChild := blockquote.FirstChild()
if firstChild != nil && firstChild.Kind() == ast.KindParagraph {
paragraph := firstChild.(*ast.Paragraph)
t.splitAlertParagraph(blockquote, paragraph, alertType, reader)
}
}
// splitAlertParagraph removes the [!TYPE] syntax and creates a separate paragraph for the title
func (t *GHAlertsTransformer) splitAlertParagraph(blockquote *ast.Blockquote, paragraph *ast.Paragraph, alertType string, reader text.Reader) {
// Generate user-friendly title
title := strings.ToUpper(alertType[:1]) + alertType[1:]
// Create a new paragraph for the title
titleParagraph := ast.NewParagraph()
titleText := ast.NewText()
titleText.Segment = text.NewSegment(0, 0) // Dummy segment, we'll use attribute for content
titleText.SetAttribute([]byte("replacement-content"), []byte(title))
titleParagraph.AppendChild(titleParagraph, titleText)
// Insert the title paragraph before the current one
blockquote.InsertBefore(blockquote, paragraph, titleParagraph)
// Remove the first three nodes ([ !TYPE ]) from the original paragraph
currentNode := paragraph.FirstChild()
for i := 0; i < 3 && currentNode != nil; i++ {
next := currentNode.NextSibling()
paragraph.RemoveChild(paragraph, currentNode)
currentNode = next
}
// If the original paragraph is now empty, remove it
if paragraph.FirstChild() == nil {
blockquote.RemoveChild(blockquote, paragraph)
}
}

View File

@ -2,11 +2,12 @@ package util
import (
"errors"
"fmt"
"io"
"net/url"
"os"
"strings"
"github.com/reconquest/karma-go"
)
type Credentials struct {
@ -39,7 +40,10 @@ func GetCredentials(
if password == "-" {
stdin, err := io.ReadAll(os.Stdin)
if err != nil {
return nil, fmt.Errorf("unable to read password from stdin: %w", err)
return nil, karma.Format(
err,
"unable to read password from stdin",
)
}
password = strings.TrimSpace(string(stdin))
@ -51,7 +55,10 @@ func GetCredentials(
url, err := url.Parse(targetURL)
if err != nil {
return nil, fmt.Errorf("unable to parse %q as url: %w", targetURL, err)
return nil, karma.Format(
err,
"unable to parse %q as url", targetURL,
)
}
if url.Host == "" && baseURL == "" {

View File

@ -7,9 +7,9 @@ import (
"path/filepath"
"strings"
"github.com/kovetskiy/lorg"
mark "github.com/kovetskiy/mark/v16"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/reconquest/pkg/log"
"github.com/urfave/cli/v3"
)
@ -18,54 +18,14 @@ func RunMark(ctx context.Context, cmd *cli.Command) error {
return err
}
zerolog.TimeFieldFormat = "2006-01-02 15:04:05.000"
output := zerolog.ConsoleWriter{
Out: os.Stderr,
TimeFormat: "2006-01-02 15:04:05.000",
FormatLevel: func(i any) string {
var l string
if ll, ok := i.(string); ok {
switch ll {
case "trace":
l = "TRACE"
case "debug":
l = "DEBUG"
case "info":
l = "INFO"
case "warn":
l = "WARNING"
case "error":
l = "ERROR"
case "fatal":
l = "FATAL"
case "panic":
l = "PANIC"
default:
l = strings.ToUpper(ll)
}
} else {
l = strings.ToUpper(fmt.Sprintf("%s", i))
}
return l
},
FormatFieldName: func(i any) string {
return ""
},
FormatFieldValue: func(i any) string {
return fmt.Sprintf("%s", i)
},
FormatErrFieldName: func(i any) string {
return ""
},
FormatErrFieldValue: func(i any) string {
return fmt.Sprintf("%s", i)
},
}
if cmd.String("color") == "never" {
output.NoColor = true
log.GetLogger().SetFormat(
lorg.NewFormat(
`${time:2006-01-02 15:04:05.000} ${level:%s:left:true} ${prefix}%s`,
),
)
log.GetLogger().SetOutput(os.Stderr)
}
log.Logger = zerolog.New(output).With().Timestamp().Logger()
creds, err := GetCredentials(
cmd.String("username"),
@ -78,13 +38,13 @@ func RunMark(ctx context.Context, cmd *cli.Command) error {
return err
}
log.Debug().Msg("config:")
log.Debug("config:")
for _, f := range cmd.Flags {
flag := f.Names()
if flag[0] == "password" {
log.Debug().Msgf("%20s: %v", flag[0], "******")
log.Debugf(nil, "%20s: %v", flag[0], "******")
} else {
log.Debug().Msgf("%20s: %v", flag[0], cmd.Value(flag[0]))
log.Debugf(nil, "%20s: %v", flag[0], cmd.Value(flag[0]))
}
}
@ -115,7 +75,6 @@ func RunMark(ctx context.Context, cmd *cli.Command) error {
VersionMessage: cmd.String("version-message"),
EditLock: cmd.Bool("edit-lock"),
ChangesOnly: cmd.Bool("changes-only"),
PreserveComments: cmd.Bool("preserve-comments"),
DropH1: cmd.Bool("drop-h1"),
StripLinebreaks: cmd.Bool("strip-linebreaks"),
@ -134,7 +93,7 @@ func RunMark(ctx context.Context, cmd *cli.Command) error {
func ConfigFilePath() string {
fp, err := os.UserConfigDir()
if err != nil {
log.Fatal().Err(err).Send()
log.Fatal(err)
}
return filepath.Join(fp, "mark.toml")
}
@ -142,18 +101,18 @@ func ConfigFilePath() string {
func SetLogLevel(cmd *cli.Command) error {
logLevel := cmd.String("log-level")
switch strings.ToUpper(logLevel) {
case "TRACE":
zerolog.SetGlobalLevel(zerolog.TraceLevel)
case "DEBUG":
zerolog.SetGlobalLevel(zerolog.DebugLevel)
case "INFO":
zerolog.SetGlobalLevel(zerolog.InfoLevel)
case "WARNING":
zerolog.SetGlobalLevel(zerolog.WarnLevel)
case "ERROR":
zerolog.SetGlobalLevel(zerolog.ErrorLevel)
case "FATAL":
zerolog.SetGlobalLevel(zerolog.FatalLevel)
case lorg.LevelTrace.String():
log.SetLevel(lorg.LevelTrace)
case lorg.LevelDebug.String():
log.SetLevel(lorg.LevelDebug)
case lorg.LevelInfo.String():
log.SetLevel(lorg.LevelInfo)
case lorg.LevelWarning.String():
log.SetLevel(lorg.LevelWarning)
case lorg.LevelError.String():
log.SetLevel(lorg.LevelError)
case lorg.LevelFatal.String():
log.SetLevel(lorg.LevelFatal)
default:
return fmt.Errorf("unknown log level: %s", logLevel)
}

View File

@ -4,7 +4,7 @@ import (
"context"
"testing"
"github.com/rs/zerolog"
"github.com/reconquest/pkg/log"
"github.com/stretchr/testify/assert"
"github.com/urfave/cli/v3"
)
@ -83,22 +83,22 @@ func Test_setLogLevel(t *testing.T) {
}
tests := map[string]struct {
args args
want zerolog.Level
want log.Level
expectedErr string
}{
"invalid": {args: args{lvl: "INVALID"}, want: zerolog.InfoLevel, expectedErr: "unknown log level: INVALID"},
"empty": {args: args{lvl: ""}, want: zerolog.InfoLevel, expectedErr: "unknown log level: "},
"info": {args: args{lvl: "INFO"}, want: zerolog.InfoLevel},
"debug": {args: args{lvl: "DEBUG"}, want: zerolog.DebugLevel},
"trace": {args: args{lvl: "TRACE"}, want: zerolog.TraceLevel},
"warning": {args: args{lvl: "WARNING"}, want: zerolog.WarnLevel},
"error": {args: args{lvl: "ERROR"}, want: zerolog.ErrorLevel},
"fatal": {args: args{lvl: "FATAL"}, want: zerolog.FatalLevel},
"invalid": {args: args{lvl: "INVALID"}, want: log.LevelInfo, expectedErr: "unknown log level: INVALID"},
"empty": {args: args{lvl: ""}, want: log.LevelInfo, expectedErr: "unknown log level: "},
"info": {args: args{lvl: log.LevelInfo.String()}, want: log.LevelInfo},
"debug": {args: args{lvl: log.LevelDebug.String()}, want: log.LevelDebug},
"trace": {args: args{lvl: log.LevelTrace.String()}, want: log.LevelTrace},
"warning": {args: args{lvl: log.LevelWarning.String()}, want: log.LevelWarning},
"error": {args: args{lvl: log.LevelError.String()}, want: log.LevelError},
"fatal": {args: args{lvl: log.LevelFatal.String()}, want: log.LevelFatal},
}
for name, tt := range tests {
t.Run(name, func(t *testing.T) {
prev := zerolog.GlobalLevel()
t.Cleanup(func() { zerolog.SetGlobalLevel(prev) })
prev := log.GetLevel()
t.Cleanup(func() { log.SetLevel(prev) })
cmd := &cli.Command{
Name: "test",
Flags: []cli.Flag{
@ -114,7 +114,7 @@ func Test_setLogLevel(t *testing.T) {
assert.EqualError(t, err, tt.expectedErr)
} else {
assert.NoError(t, err)
assert.Equal(t, tt.want, zerolog.GlobalLevel())
assert.Equal(t, tt.want, log.GetLevel())
}
})
}

View File

@ -1,7 +1,9 @@
package util
import (
"github.com/rs/zerolog/log"
"fmt"
"github.com/reconquest/pkg/log"
)
type FatalErrorHandler struct {
@ -14,19 +16,19 @@ func NewErrorHandler(continueOnError bool) *FatalErrorHandler {
}
}
func (h *FatalErrorHandler) Handle(err error, format string, args ...any) {
func (h *FatalErrorHandler) Handle(err error, format string, args ...interface{}) {
if err == nil {
if h.ContinueOnError {
log.Error().Msgf(format, args...)
log.Error(fmt.Sprintf(format, args...))
return
}
log.Fatal().Msgf(format, args...)
log.Fatal(fmt.Sprintf(format, args...))
}
if h.ContinueOnError {
log.Error().Err(err).Msgf(format, args...)
log.Errorf(err, format, args...)
return
}
log.Fatal().Err(err).Msgf(format, args...)
log.Fatalf(err, format, args...)
}

View File

@ -169,7 +169,7 @@ var Flags = []cli.Flag{
&cli.StringFlag{
Name: "content-appearance",
Value: "",
Usage: "default content appearance for pages without a Content-Appearance header. Possible values: full-width, fixed, default.",
Usage: "default content appearance for pages without a Content-Appearance header. Possible values: full-width, fixed.",
Sources: cli.NewValueSourceChain(
cli.EnvVar("MARK_CONTENT_APPEARANCE"),
altsrctoml.TOML("content-appearance", altsrc.NewStringPtrSourcer(&filename)),
@ -194,12 +194,6 @@ var Flags = []cli.Flag{
Usage: "Avoids re-uploading pages that haven't changed since the last run.",
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_CHANGES_ONLY"), altsrctoml.TOML("changes-only", altsrc.NewStringPtrSourcer(&filename))),
},
&cli.BoolFlag{
Name: "preserve-comments",
Value: false,
Usage: "Fetch and preserve inline comments on existing Confluence pages.",
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_PRESERVE_COMMENTS"), altsrctoml.TOML("preserve-comments", altsrc.NewStringPtrSourcer(&filename))),
},
&cli.FloatFlag{
Name: "d2-scale",
Value: 1.0,
@ -236,11 +230,11 @@ func CheckFlags(context context.Context, command *cli.Command) (context.Context,
contentAppearance := strings.TrimSpace(command.String("content-appearance"))
if contentAppearance != "" {
switch contentAppearance {
case "full-width", "fixed", "default":
case "full-width", "fixed":
// ok
default:
return context, fmt.Errorf(
"invalid value for --content-appearance: %q (expected: full-width, fixed, or default)",
"invalid value for --content-appearance: %q (expected: full-width or fixed)",
contentAppearance,
)
}