mirror of
https://github.com/kovetskiy/mark.git
synced 2025-06-08 15:32:40 +08:00
Compare commits
504 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
bf542ab684 | ||
![]() |
58cdd5608f | ||
![]() |
6767d655c7 | ||
![]() |
c57256cb7b | ||
![]() |
926945f884 | ||
![]() |
760ee5a2eb | ||
![]() |
3cc39ffe79 | ||
![]() |
d1aee4d571 | ||
![]() |
b7ef416472 | ||
![]() |
7562d0499e | ||
![]() |
2d89511ac1 | ||
![]() |
1d00316ae5 | ||
![]() |
5649939297 | ||
![]() |
4ac93b556c | ||
![]() |
d9a96f3700 | ||
![]() |
92634869e3 | ||
![]() |
5cbd0fd6eb | ||
![]() |
f8a3945f62 | ||
![]() |
6c33afc866 | ||
![]() |
ef09fd27f8 | ||
![]() |
1fa01dff70 | ||
![]() |
d789261c9a | ||
![]() |
dda17fcb55 | ||
![]() |
a77a538ab5 | ||
![]() |
f24d8c8957 | ||
![]() |
a0c6abfa6d | ||
![]() |
b630876c22 | ||
![]() |
ddc0ab9fbf | ||
![]() |
87160e8dd6 | ||
![]() |
d88b81a6b8 | ||
![]() |
7f5144a1d1 | ||
![]() |
7f5dfae904 | ||
![]() |
024259e480 | ||
![]() |
ff015e2c24 | ||
![]() |
f3c5a77a85 | ||
![]() |
0b8caa078b | ||
![]() |
d820ee4bf4 | ||
![]() |
203d4439ef | ||
![]() |
f8229c8acb | ||
![]() |
b30b0491a8 | ||
![]() |
c87b6821d4 | ||
![]() |
b2f0e80b12 | ||
![]() |
f2b2a7a309 | ||
![]() |
8d05975142 | ||
![]() |
076165c137 | ||
![]() |
611e8e9b94 | ||
![]() |
15a3c10ed1 | ||
![]() |
ec5ee6eb0a | ||
![]() |
ea2bae39da | ||
![]() |
1a0e452910 | ||
![]() |
f0b4d460a9 | ||
![]() |
f3e27aaa50 | ||
![]() |
25c187f741 | ||
![]() |
213088b960 | ||
![]() |
5504fd4c11 | ||
![]() |
9486f0bbcf | ||
![]() |
f1c3b2afcd | ||
![]() |
fbfd36a16c | ||
![]() |
c5d0a8b8b7 | ||
![]() |
5a245519fe | ||
![]() |
ebe77984c6 | ||
![]() |
5accce3b17 | ||
![]() |
c63201159d | ||
![]() |
f25d8876fc | ||
![]() |
2ba35118bf | ||
![]() |
959ddc2171 | ||
![]() |
0bb85b672b | ||
![]() |
9cc00551ca | ||
![]() |
96db0f8f24 | ||
![]() |
7206729968 | ||
![]() |
7d05b6f286 | ||
![]() |
1962ce7c25 | ||
![]() |
4d77464f5e | ||
![]() |
52d0cd94db | ||
![]() |
0acd97b434 | ||
![]() |
82e1879c57 | ||
![]() |
bbcabbe419 | ||
![]() |
4a058a0da9 | ||
![]() |
4d241e069a | ||
![]() |
060a4ee100 | ||
![]() |
3d96781f47 | ||
![]() |
649c20d4f2 | ||
![]() |
876626098b | ||
![]() |
e7a3877ded | ||
![]() |
82aebec1eb | ||
![]() |
0bdeb4de3d | ||
![]() |
d6e932adf0 | ||
![]() |
699370a677 | ||
![]() |
9eb44f95fe | ||
![]() |
b0f337c4a3 | ||
![]() |
2af50c627f | ||
![]() |
5d2adc8a23 | ||
![]() |
4305957d47 | ||
![]() |
33e5d1ff19 | ||
![]() |
d5c41f6f1f | ||
![]() |
dc8842106b | ||
![]() |
2c71b50438 | ||
![]() |
a93b54d784 | ||
![]() |
035db7b7b3 | ||
![]() |
c001ad98cf | ||
![]() |
0e4d5507b0 | ||
![]() |
091ee8c0b9 | ||
![]() |
b7f17bde8b | ||
![]() |
e5398b4ee1 | ||
![]() |
f7dda910c5 | ||
![]() |
1e91fe184f | ||
![]() |
63ebfb853c | ||
![]() |
24438a2a27 | ||
![]() |
0e8ce186ca | ||
![]() |
ca6df8ec90 | ||
![]() |
02c7543fbd | ||
![]() |
ed9d64c663 | ||
![]() |
eb39e035f0 | ||
![]() |
b7f4b5c6a2 | ||
![]() |
39f47c74cf | ||
![]() |
54efab4fe5 | ||
![]() |
6a8ed8f441 | ||
![]() |
fad9866b41 | ||
![]() |
a48678fd0a | ||
![]() |
1eb1b2454c | ||
![]() |
349224d2aa | ||
![]() |
9c25ab1e88 | ||
![]() |
2d9039c581 | ||
![]() |
7991fe4c10 | ||
![]() |
f727c860b4 | ||
![]() |
29c4e3020e | ||
![]() |
f54cceac83 | ||
![]() |
c7e8b13b10 | ||
![]() |
2dbae733f1 | ||
![]() |
9e2f0143e9 | ||
![]() |
313247f41a | ||
![]() |
b48110a706 | ||
![]() |
0ba9ff0653 | ||
![]() |
ecf47cba38 | ||
![]() |
a4722fe53e | ||
![]() |
8355d98969 | ||
![]() |
49ab5abad5 | ||
![]() |
4a3a69b997 | ||
![]() |
d168ec8b14 | ||
![]() |
cdee2c4e0c | ||
![]() |
796b1e5adb | ||
![]() |
ecd563e095 | ||
![]() |
44649c682b | ||
![]() |
ae87b63cd6 | ||
![]() |
1f89187083 | ||
![]() |
7063332234 | ||
![]() |
b6d1d4e899 | ||
![]() |
818c9821e1 | ||
![]() |
5199fff49a | ||
![]() |
94a00c5e22 | ||
![]() |
cb9d9f0298 | ||
![]() |
4daee45079 | ||
![]() |
75c09e78e0 | ||
![]() |
c62541d5ae | ||
![]() |
73629a2838 | ||
![]() |
9b91ee3344 | ||
![]() |
b7cb1824ae | ||
![]() |
6b70249e7f | ||
![]() |
a324dae511 | ||
![]() |
14e6efd2cb | ||
![]() |
c1fbd12b5b | ||
![]() |
ea60ad3170 | ||
![]() |
e3d713939b | ||
![]() |
b7d64ce03a | ||
![]() |
093d62e6d1 | ||
![]() |
d5758e21e8 | ||
![]() |
dc99e34373 | ||
![]() |
41358cf9b8 | ||
![]() |
3bbf5e277a | ||
![]() |
a706499803 | ||
![]() |
78f7666697 | ||
![]() |
8dd83bdf62 | ||
![]() |
a04f41161f | ||
![]() |
ac8ff7f0b7 | ||
![]() |
0073834e85 | ||
![]() |
53e367cb06 | ||
![]() |
3ef2a244d9 | ||
![]() |
cfc76d87ce | ||
![]() |
aaae918478 | ||
![]() |
21d76ec25a | ||
![]() |
165226a491 | ||
![]() |
834896b2f0 | ||
![]() |
a8d762ab83 | ||
![]() |
10dc8c0a62 | ||
![]() |
d64bece68d | ||
![]() |
da25cf19d2 | ||
![]() |
ab1b3c2e60 | ||
![]() |
9c0941edd4 | ||
![]() |
38d974bde3 | ||
![]() |
b752586ff3 | ||
![]() |
f63b499887 | ||
![]() |
15d9295781 | ||
![]() |
4c865b4546 | ||
![]() |
118575684d | ||
![]() |
292bd0d713 | ||
![]() |
04bf20c0a3 | ||
![]() |
40015ed86f | ||
![]() |
8f1772455e | ||
![]() |
28d2f31a60 | ||
![]() |
df9166c513 | ||
![]() |
523ec6dbff | ||
![]() |
d269369911 | ||
![]() |
8e16f6f29c | ||
![]() |
afd15a20db | ||
![]() |
d0904f6a47 | ||
![]() |
521486148d | ||
![]() |
b0124f74a1 | ||
![]() |
c5134d926a | ||
![]() |
dc65135fa8 | ||
![]() |
26fbd62a03 | ||
![]() |
9d03f096aa | ||
![]() |
3157a8c4d9 | ||
![]() |
da3b31b3a9 | ||
![]() |
f0d7bfdb75 | ||
![]() |
20c65ca373 | ||
![]() |
11896e43ec | ||
![]() |
de50b62875 | ||
![]() |
295d17e6f3 | ||
![]() |
f57b4245f9 | ||
![]() |
10d0778adf | ||
![]() |
737b9239f8 | ||
![]() |
b426d5c6b1 | ||
![]() |
8b72bc8e20 | ||
![]() |
4c81bbe7f9 | ||
![]() |
3c7512b7e1 | ||
![]() |
d147aac98b | ||
![]() |
cbe0b05dab | ||
![]() |
bf16d55d68 | ||
![]() |
ac982f3077 | ||
![]() |
179700f3e4 | ||
![]() |
e18b30b38d | ||
![]() |
bc5c7ae2e7 | ||
![]() |
494ddce3d1 | ||
![]() |
9fb9d893b2 | ||
![]() |
4591586b01 | ||
![]() |
857bc871e0 | ||
![]() |
d07e8ec291 | ||
![]() |
bec6d38aed | ||
![]() |
383e4ae2d0 | ||
![]() |
e41572c20f | ||
![]() |
d8109c6e82 | ||
![]() |
063b6e7e1b | ||
![]() |
22bf46026b | ||
![]() |
278488ed88 | ||
![]() |
80d3be268d | ||
![]() |
0988a7aba1 | ||
![]() |
28ee195a77 | ||
![]() |
3307f329d9 | ||
![]() |
dbb5237338 | ||
![]() |
0b6dd09013 | ||
![]() |
dbadf47954 | ||
![]() |
9a37fc4713 | ||
![]() |
6d8d70a415 | ||
![]() |
cb5e4aa28a | ||
![]() |
6a63913a1e | ||
![]() |
d67cc63871 | ||
![]() |
08281712cc | ||
![]() |
484f988f32 | ||
![]() |
5602297459 | ||
![]() |
679eb7fb48 | ||
![]() |
f0bb70651e | ||
![]() |
ae5163d470 | ||
![]() |
3271218a9a | ||
![]() |
2aa1606d11 | ||
![]() |
eba2396010 | ||
![]() |
622be6e25f | ||
![]() |
21d0a7d8bc | ||
![]() |
13c594981e | ||
![]() |
af221d50c7 | ||
![]() |
72cfc04ab6 | ||
![]() |
fd97ee70f9 | ||
![]() |
2b756daf37 | ||
![]() |
51accf008f | ||
![]() |
73f2693421 | ||
![]() |
abd0c8bad6 | ||
![]() |
d9d560eda0 | ||
![]() |
88c070f524 | ||
![]() |
4c3d417725 | ||
![]() |
2f44bcb6be | ||
![]() |
262853f6c0 | ||
![]() |
d1f69bc704 | ||
![]() |
2773e15056 | ||
![]() |
1c1f82a55e | ||
![]() |
2e52ae8847 | ||
![]() |
c6d01eb3de | ||
![]() |
f75924b1a9 | ||
![]() |
20afbc534a | ||
![]() |
768b470322 | ||
![]() |
657d4f5c61 | ||
![]() |
1b6ab893ef | ||
![]() |
429bbf9bff | ||
![]() |
7969bcbfdd | ||
![]() |
eade059cad | ||
![]() |
9bb9f45326 | ||
![]() |
a29feb1e96 | ||
![]() |
deb7cd9cbc | ||
![]() |
1285947ab3 | ||
![]() |
943a356508 | ||
![]() |
8193f2d466 | ||
![]() |
80d906417c | ||
![]() |
6e4a912b11 | ||
![]() |
e7e61ba234 | ||
![]() |
a60dd52442 | ||
![]() |
974de93ef1 | ||
![]() |
238ae29b59 | ||
![]() |
98e15ed7ff | ||
![]() |
fca934f90c | ||
![]() |
6265c7ff81 | ||
![]() |
fef66925d1 | ||
![]() |
93218f1e69 | ||
![]() |
a3dc8bed05 | ||
![]() |
3dd6d0ab99 | ||
![]() |
700cbc139a | ||
![]() |
30b0785fae | ||
![]() |
67c9b248dd | ||
![]() |
d00d94afa3 | ||
![]() |
530ff5cc3c | ||
![]() |
9840c01e8a | ||
![]() |
21cf51efd7 | ||
![]() |
20d3d1f05c | ||
![]() |
eab5655456 | ||
![]() |
9e656ece15 | ||
![]() |
4b5e9c23ec | ||
![]() |
c0bc7dcd35 | ||
![]() |
cea051057a | ||
![]() |
2a9f830817 | ||
![]() |
ca2dbe5fc4 | ||
![]() |
e754bf17a0 | ||
![]() |
df3e95f5fd | ||
![]() |
24aa73e243 | ||
![]() |
fb8e589bc4 | ||
![]() |
20ba4dd8fe | ||
![]() |
738400c292 | ||
![]() |
4d16a1f606 | ||
![]() |
e0222ae6bd | ||
![]() |
8debc23476 | ||
![]() |
b82e18f8b8 | ||
![]() |
ce9573a86e | ||
![]() |
cd08a70ba9 | ||
![]() |
d09ee6ea22 | ||
![]() |
adee0189bd | ||
![]() |
1b3c7b4127 | ||
![]() |
8a1bd88577 | ||
![]() |
fb8213bc0d | ||
![]() |
667e7be221 | ||
![]() |
5ff8daf5e1 | ||
![]() |
7204ef21f6 | ||
![]() |
1a4ae1afb5 | ||
![]() |
6083ddcc88 | ||
![]() |
2f39cd80e7 | ||
![]() |
abc91203bb | ||
![]() |
4ef1d47b5c | ||
![]() |
8deecfd67a | ||
![]() |
bde4b70242 | ||
![]() |
721dd1b642 | ||
![]() |
7eb7ccc68e | ||
![]() |
843089aea0 | ||
![]() |
ab5407091b | ||
![]() |
8c02497b5f | ||
![]() |
5920dbf67a | ||
![]() |
90d8729a7d | ||
![]() |
3e558ac2e3 | ||
![]() |
f4bbbb19ca | ||
![]() |
0b745c25e4 | ||
![]() |
55b58bd59b | ||
![]() |
5f3238b48a | ||
![]() |
ef71aa1792 | ||
![]() |
1ebb29eba0 | ||
![]() |
78345736d5 | ||
![]() |
d276b7e78a | ||
![]() |
49c25c2c29 | ||
![]() |
e1d8eccfff | ||
![]() |
5d2c1b0d84 | ||
![]() |
c70eb550fe | ||
![]() |
abfd01cc74 | ||
![]() |
e52d40c922 | ||
![]() |
39dfdec099 | ||
![]() |
ac2132b9c6 | ||
![]() |
9ec7728d08 | ||
![]() |
4c812741ac | ||
![]() |
3c7bd6133f | ||
![]() |
5428cc6833 | ||
![]() |
a6ac341799 | ||
![]() |
9ae3e658d2 | ||
![]() |
7b359b19f7 | ||
![]() |
5ad1a4abe5 | ||
![]() |
f95835469c | ||
![]() |
2f34e93973 | ||
![]() |
c57d8e63e7 | ||
![]() |
9a4f9ff32d | ||
![]() |
2251e8ad8a | ||
![]() |
c8709eecd1 | ||
![]() |
49eb97b434 | ||
![]() |
b15f1f8e70 | ||
![]() |
9a7146c7d7 | ||
![]() |
8d58ff26a3 | ||
![]() |
851a8047f3 | ||
![]() |
2cbb942bc4 | ||
![]() |
9cde833a18 | ||
![]() |
f1e861c2fa | ||
![]() |
08f37fe2b9 | ||
![]() |
ac8133a8b9 | ||
![]() |
f3ff1dc098 | ||
![]() |
d017d31f61 | ||
![]() |
50f78ee6a1 | ||
![]() |
f0e00468e9 | ||
![]() |
07926611cf | ||
![]() |
1bd022b065 | ||
![]() |
f3ba7c57a3 | ||
![]() |
cc98590e9d | ||
![]() |
4626544585 | ||
![]() |
3797116d97 | ||
![]() |
e635d1f945 | ||
![]() |
ee0a6e23e7 | ||
![]() |
3fcfcde298 | ||
![]() |
4e47ae14a7 | ||
![]() |
b1eea83d49 | ||
![]() |
a5221cbedd | ||
![]() |
c64d6e77dc | ||
![]() |
97313e59e1 | ||
![]() |
11e7e82671 | ||
![]() |
ccee37ce22 | ||
![]() |
2ea132f84d | ||
![]() |
5d65fddd43 | ||
![]() |
b68abf9c0f | ||
![]() |
8d0ad4d298 | ||
![]() |
6d0aa09969 | ||
![]() |
a4fe28ed22 | ||
![]() |
76c60c33d5 | ||
![]() |
75717374a8 | ||
![]() |
a9bb6df01f | ||
![]() |
76cc033af3 | ||
![]() |
e1e9015f51 | ||
![]() |
99119d8aff | ||
![]() |
74e67141ba | ||
![]() |
5fd862c450 | ||
![]() |
e72577327c | ||
![]() |
1daedcd88b | ||
![]() |
403efe3acc | ||
![]() |
07aa3700eb | ||
![]() |
12510a1208 | ||
![]() |
7dbf14a7e3 | ||
![]() |
82d418e17d | ||
![]() |
7cd0ebd782 | ||
![]() |
400b85de11 | ||
![]() |
b17a955ec7 | ||
![]() |
bf4bbbe20a | ||
![]() |
398cc7375e | ||
![]() |
12674bab9c | ||
![]() |
2fc8ba3614 | ||
![]() |
c51629a9b8 | ||
![]() |
3d95418d9f | ||
![]() |
a3064a64af | ||
![]() |
c17a3d02d0 | ||
![]() |
931d9f706a | ||
![]() |
f338ed90fd | ||
![]() |
c22c6c1e43 | ||
![]() |
7f6466a882 | ||
![]() |
e190cdfb34 | ||
![]() |
cb1093d6b5 | ||
![]() |
58ad4ff85b | ||
![]() |
4b2f1cd739 | ||
![]() |
ffd572899a | ||
![]() |
7c68fec718 | ||
![]() |
6633ff22ba | ||
![]() |
9bd12bc2e5 | ||
![]() |
c355ed66b1 | ||
![]() |
2db0de7228 | ||
![]() |
abb1fc9c05 | ||
![]() |
8b28912866 | ||
![]() |
1d8e43add2 | ||
![]() |
f86dd2723a | ||
![]() |
8018b54627 | ||
![]() |
ff17a4034c | ||
![]() |
d4008a5b72 | ||
![]() |
f6e542c6c2 | ||
![]() |
63fe97beaa | ||
![]() |
bcf2acb39f | ||
![]() |
27e7af5b85 | ||
![]() |
f8e70476bd | ||
![]() |
4ea476ace2 | ||
![]() |
b82421a2a6 | ||
![]() |
b17dd7d76b | ||
![]() |
055af49f89 | ||
![]() |
ec4b258635 | ||
![]() |
b33512b2b1 | ||
![]() |
eda2e4704d | ||
![]() |
e26903dcee | ||
![]() |
eb97b42c4d | ||
![]() |
7fe00ba1f7 | ||
![]() |
bd18cb0d25 | ||
![]() |
a33a0efcd3 | ||
![]() |
a9b9cf6e85 | ||
![]() |
88b6e0bdcd | ||
![]() |
8f90e5ad8b | ||
![]() |
3fa9362cd8 | ||
![]() |
25875438dc | ||
![]() |
fdb70feb7b | ||
![]() |
b654795a32 | ||
![]() |
951b276a9b | ||
![]() |
da99cbb9d7 | ||
![]() |
f39aeb10d7 | ||
![]() |
ccbd9d9307 | ||
![]() |
7145cb49b3 |
413
.all-contributorsrc
Normal file
413
.all-contributorsrc
Normal file
@ -0,0 +1,413 @@
|
||||
{
|
||||
"projectName": "mark",
|
||||
"projectOwner": "kovetskiy",
|
||||
"repoType": "github",
|
||||
"repoHost": "https://github.com",
|
||||
"files": [
|
||||
"README.md"
|
||||
],
|
||||
"imageSize": 100,
|
||||
"commit": true,
|
||||
"commitConvention": "none",
|
||||
"contributors": [
|
||||
{
|
||||
"login": "mrueg",
|
||||
"name": "Manuel Rüger",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/489370?v=4",
|
||||
"profile": "https://mastodon.social/@mrueg",
|
||||
"contributions": [
|
||||
"maintenance",
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "kovetskiy",
|
||||
"name": "Egor Kovetskiy",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/8445924?v=4",
|
||||
"profile": "https://github.com/kovetskiy",
|
||||
"contributions": [
|
||||
"maintenance",
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "klauern",
|
||||
"name": "Nick Klauer",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/4735?v=4",
|
||||
"profile": "https://klauer.dev/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "rofafor",
|
||||
"name": "Rolf Ahrenberg",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/9297850?v=4",
|
||||
"profile": "https://github.com/rofafor",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "csoutherland",
|
||||
"name": "Charles Southerland",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/840471?v=4",
|
||||
"profile": "https://github.com/csoutherland",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "snejus",
|
||||
"name": "Šarūnas Nejus",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/16212750?v=4",
|
||||
"profile": "https://github.com/snejus",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "brnv",
|
||||
"name": "Alexey Baranov",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1925213?v=4",
|
||||
"profile": "https://github.com/brnv",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "princespaghetti",
|
||||
"name": "Anthony Barbieri",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/2935312?v=4",
|
||||
"profile": "https://github.com/princespaghetti",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "dauc",
|
||||
"name": "Devin Auclair",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/29129213?v=4",
|
||||
"profile": "https://github.com/dauc",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "GezimSejdiu",
|
||||
"name": "Gezim Sejdiu",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/5259296?v=4",
|
||||
"profile": "https://gezimsejdiu.github.io/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "jcavar",
|
||||
"name": "Josip Ćavar",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/3751289?v=4",
|
||||
"profile": "https://github.com/jcavar",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Hi-Fi",
|
||||
"name": "Juho Saarinen",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1499780?v=4",
|
||||
"profile": "https://github.com/Hi-Fi",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "lukiffer",
|
||||
"name": "Luke Fritz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/2278911?v=4",
|
||||
"profile": "https://github.com/lukiffer",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "MattyRad",
|
||||
"name": "Matt Radford",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1143595?v=4",
|
||||
"profile": "https://github.com/MattyRad",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Planktonette",
|
||||
"name": "Planktonette",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/5514719?v=4",
|
||||
"profile": "https://github.com/Planktonette",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "teopost",
|
||||
"name": "Stefano Teodorani",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/2573389?v=4",
|
||||
"profile": "http://www.stefanoteodorani.it/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "tillepille",
|
||||
"name": "Tim Schrumpf",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/16536696?v=4",
|
||||
"profile": "https://github.com/tillepille",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "tyler-copilot",
|
||||
"name": "Tyler Cole",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/18539108?v=4",
|
||||
"profile": "https://github.com/tyler-copilot",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "elgreco247",
|
||||
"name": "elgreco247",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/8968417?v=4",
|
||||
"profile": "https://github.com/elgreco247",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "emead-indeed",
|
||||
"name": "emead-indeed",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/44018145?v=4",
|
||||
"profile": "https://github.com/emead-indeed",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "wbh1",
|
||||
"name": "Will Hegedus",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/11506822?v=4",
|
||||
"profile": "https://wbhegedus.me/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "carnei-ro",
|
||||
"name": "Leandro Carneiro",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/42899277?v=4",
|
||||
"profile": "https://github.com/carnei-ro",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "beeme1mr",
|
||||
"name": "beeme1mr",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/682996?v=4",
|
||||
"profile": "https://github.com/beeme1mr",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Taldrain",
|
||||
"name": "Taldrain",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1081600?v=4",
|
||||
"profile": "https://github.com/Taldrain",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "eitchugo",
|
||||
"name": "Hugo Cisneiros",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/349457?v=4",
|
||||
"profile": "http://www.devin.com.br/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "jevfok",
|
||||
"name": "jevfok",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/54530686?v=4",
|
||||
"profile": "https://github.com/jevfok",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "mmiranda",
|
||||
"name": "Mateus Miranda",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/16670310?v=4",
|
||||
"profile": "https://dev.to/mmiranda",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Skeeve",
|
||||
"name": "Stephan Hradek",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/725404?v=4",
|
||||
"profile": "https://github.com/Skeeve",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "dreampuf",
|
||||
"name": "Dreampuf",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/353644?v=4",
|
||||
"profile": "http://huangx.in/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "JAndritsch",
|
||||
"name": "Joel Andritsch",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/190611?v=4",
|
||||
"profile": "https://github.com/JAndritsch",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "guoweis-outreach",
|
||||
"name": "guoweis-outreach",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/639243?v=4",
|
||||
"profile": "https://github.com/guoweis-outreach",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "klysunkin",
|
||||
"name": "klysunkin",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/2611187?v=4",
|
||||
"profile": "https://github.com/klysunkin",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "EppO",
|
||||
"name": "Florent Monbillard",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/6111?v=4",
|
||||
"profile": "https://github.com/EppO",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "jfreeland",
|
||||
"name": "Joey Freeland",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/30938344?v=4",
|
||||
"profile": "https://github.com/jfreeland",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "prokod",
|
||||
"name": "Noam Asor",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/877414?v=4",
|
||||
"profile": "https://github.com/prokod",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "PhilippReinke",
|
||||
"name": "Philipp",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/81698819?v=4",
|
||||
"profile": "https://github.com/PhilippReinke",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "vpommier",
|
||||
"name": "Pommier Vincent",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/8139328?v=4",
|
||||
"profile": "https://github.com/vpommier",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "ToruKawaguchi",
|
||||
"name": "Toru Kawaguchi",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/17423222?v=4",
|
||||
"profile": "https://github.com/ToruKawaguchi",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "willgorman",
|
||||
"name": "Will Gorman",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/49793?v=4",
|
||||
"profile": "https://coaxialflutter.com/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "zgriesinger",
|
||||
"name": "Zackery Griesinger",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/15172516?v=4",
|
||||
"profile": "https://zackery.dev/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "chrisjaimon2012",
|
||||
"name": "cc-chris",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/57173930?v=4",
|
||||
"profile": "https://github.com/chrisjaimon2012",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "datsickkunt",
|
||||
"name": "datsickkunt",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/105289244?v=4",
|
||||
"profile": "https://github.com/datsickkunt",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "recrtl",
|
||||
"name": "recrtl",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/14078835?v=4",
|
||||
"profile": "https://github.com/recrtl",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "seletskiy",
|
||||
"name": "Stanislav Seletskiy",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/674812?v=4",
|
||||
"profile": "https://github.com/seletskiy",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
}
|
||||
],
|
||||
"contributorsPerLine": 7
|
||||
}
|
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@ -0,0 +1 @@
|
||||
/docker
|
43
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
43
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
---
|
||||
name: Report a bug
|
||||
about: Create a bug report to help us improve mark
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## What happened?
|
||||
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
## What did you expect to happen?
|
||||
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
## How can we reproduce the behavior you experienced?
|
||||
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
1. Step 1
|
||||
2. Step 2
|
||||
3. Step 3
|
||||
4. Step 4
|
||||
|
||||
In case this is related to specific markdown, please provide a minimal markdown example here.
|
||||
|
||||
## Further Information (please complete the following information)
|
||||
|
||||
* Mark Version (`mark --version`): [e.g. v9.1.4]
|
||||
* Mark Parameters: [e.g. `--drop-h1 --title-from-h1`]
|
||||
* Confluence Hosting: [e.g. Cloud, Server or Datacenter]
|
||||
* Confluence Version: [e.g. v7.13]
|
||||
* Environment specific Information: [e.g. running in Github Actions, or on Mac OS X, etc.]
|
||||
|
||||
## Logs or other output
|
||||
|
||||
Please provide logs, other kind of output here.
|
||||
|
||||
## Additional context
|
||||
|
||||
Add any other context about the problem here.
|
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@ -0,0 +1 @@
|
||||
blank_issues_enabled: true
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for mark
|
||||
title: ''
|
||||
labels: feature
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is and what the feature provides.
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gomod"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
113
.github/workflows/ci.yml
vendored
Normal file
113
.github/workflows/ci.yml
vendored
Normal file
@ -0,0 +1,113 @@
|
||||
name: continuous-integration
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- '*'
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
GO_VERSION: "~1.24"
|
||||
|
||||
jobs:
|
||||
# Runs Golangci-lint on the source code
|
||||
ci-go-lint:
|
||||
name: ci-go-lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go 1.x
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
id: go
|
||||
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v8
|
||||
|
||||
# Runs markdown-lint on the markdown files
|
||||
ci-markdown-lint:
|
||||
name: ci-markdown-lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v4
|
||||
- name: markdownlint-cli2-action
|
||||
uses: DavidAnson/markdownlint-cli2-action@v20
|
||||
|
||||
# Executes Unit Tests
|
||||
ci-unit-tests:
|
||||
name: ci-unit-tests
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go 1.x
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
id: go
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
make test
|
||||
|
||||
# Builds mark binary
|
||||
ci-build:
|
||||
name: ci-build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go 1.x
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
id: go
|
||||
|
||||
- name: Build mark
|
||||
run: |
|
||||
make build
|
||||
|
||||
# Build and push Dockerimage
|
||||
ci-docker-build:
|
||||
name: ci-docker-build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build only (on commits)
|
||||
uses: docker/build-push-action@v6
|
||||
if: ${{ github.ref_type != 'tag' }}
|
||||
with:
|
||||
push: false
|
||||
tags: kovetskiy/mark:latest
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
if: ${{ github.ref_type == 'tag' }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Build and push (on tag)
|
||||
uses: docker/build-push-action@v6
|
||||
if: ${{ github.ref_type == 'tag' }}
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
kovetskiy/mark:${{ github.ref_name }}
|
||||
kovetskiy/mark:latest
|
12
.github/workflows/goreleaser.yml
vendored
12
.github/workflows/goreleaser.yml
vendored
@ -10,18 +10,18 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set Up Go
|
||||
uses: actions/setup-go@v2
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.14
|
||||
go-version: "1.24"
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v2
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
version: latest
|
||||
args: release --rm-dist
|
||||
version: "~> 2"
|
||||
args: release --clean
|
||||
env:
|
||||
GOPATH: /home/runner/work/
|
||||
GITHUB_TOKEN: ${{ secrets.TOKEN }}
|
||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,2 +1,6 @@
|
||||
/mark
|
||||
/docker
|
||||
.idea/
|
||||
/mark.test
|
||||
/profile.cov
|
||||
.vscode
|
||||
|
@ -1,5 +1,6 @@
|
||||
# This is an example goreleaser.yaml file with some sane defaults.
|
||||
# Make sure to check the documentation at http://goreleaser.com
|
||||
version: 2
|
||||
before:
|
||||
hooks:
|
||||
# You may remove this if you don't use go modules.
|
||||
@ -9,13 +10,21 @@ builds:
|
||||
- CGO_ENABLED=0
|
||||
gcflags:
|
||||
- all=-trimpath={{.Env.GOPATH}}
|
||||
goos:
|
||||
- darwin
|
||||
- linux
|
||||
# windows fails with an error https://github.com/kovetskiy/mark/runs/5034726426?check_suite_focus=true
|
||||
# - windows
|
||||
goarch:
|
||||
- amd64
|
||||
- arm64
|
||||
archives:
|
||||
- replacements:
|
||||
darwin: Darwin
|
||||
linux: Linux
|
||||
windows: Windows
|
||||
386: i386
|
||||
amd64: x86_64
|
||||
- name_template: >-
|
||||
{{ .ProjectName }}_
|
||||
{{- title .Os }}_
|
||||
{{- if eq .Arch "amd64" }}x86_64
|
||||
{{- else if eq .Arch "386" }}i386
|
||||
{{- else }}{{ .Arch }}{{ end }}
|
||||
checksum:
|
||||
name_template: 'checksums.txt'
|
||||
snapshot:
|
||||
@ -26,3 +35,27 @@ changelog:
|
||||
exclude:
|
||||
- '^docs:'
|
||||
- '^test:'
|
||||
|
||||
# Publish on Homebrew Tap
|
||||
brews:
|
||||
-
|
||||
name: mark
|
||||
repository:
|
||||
owner: kovetskiy
|
||||
name: homebrew-mark
|
||||
branch: master
|
||||
|
||||
commit_author:
|
||||
name: Egor Kovetskiy
|
||||
email: e.kovetskiy@gmail.com
|
||||
|
||||
commit_msg_template: "Brew formula update for {{ .ProjectName }} version {{ .Tag }}"
|
||||
|
||||
directory: Formula
|
||||
|
||||
homepage: "https://github.com/kovetskiy/mark"
|
||||
description: "Sync your markdown files with Confluence pages."
|
||||
license: "Apache 2.0"
|
||||
|
||||
test: |
|
||||
system "#{bin}/program", "version"
|
||||
|
12
.markdownlint-cli2.jsonc
Normal file
12
.markdownlint-cli2.jsonc
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"globs": [
|
||||
"*.md",
|
||||
".github/**/*.md"
|
||||
],
|
||||
|
||||
// ToDo: Following rules can't be fixed automatically. They should be enabled when fixed.
|
||||
"config": {
|
||||
"MD013": false, // https://github.com/markdownlint/markdownlint/blob/main/docs/RULES.md#md013---line-length
|
||||
"MD033": false // https://github.com/markdownlint/markdownlint/blob/main/docs/RULES.md#md033---inline-html
|
||||
}
|
||||
}
|
21
Dockerfile
21
Dockerfile
@ -1,13 +1,18 @@
|
||||
FROM golang:latest
|
||||
FROM golang:1.24.4 AS builder
|
||||
ENV GOPATH="/go"
|
||||
WORKDIR /go/src/github.com/kovetskiy/mark
|
||||
COPY / .
|
||||
RUN make get
|
||||
RUN make build
|
||||
RUN make get \
|
||||
&& make build
|
||||
|
||||
FROM alpine:latest
|
||||
RUN apk --no-cache add ca-certificates bash
|
||||
COPY --from=0 /go/src/github.com/kovetskiy/mark/mark /bin/
|
||||
RUN mkdir -p /docs
|
||||
FROM chromedp/headless-shell:latest
|
||||
RUN apt-get update \
|
||||
&& apt-get upgrade -qq \
|
||||
&& apt-get install --no-install-recommends -qq ca-certificates bash sed git dumb-init \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY --from=builder /go/src/github.com/kovetskiy/mark/mark /bin/
|
||||
WORKDIR /docs
|
||||
ENTRYPOINT ["/bin/mark"]
|
||||
|
||||
ENTRYPOINT ["dumb-init", "--"]
|
||||
|
211
LICENSE
211
LICENSE
@ -1,22 +1,201 @@
|
||||
“Commons Clause” License Condition v1.0
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
The Software is provided to you by the Licensor under the License, as defined
|
||||
below, subject to the following condition.
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
Without limiting other conditions in the License, the grant of rights under the
|
||||
License will not include, and the License does not grant to you, the right to
|
||||
Sell the Software.
|
||||
1. Definitions.
|
||||
|
||||
For purposes of the foregoing, “Sell” means practicing any or all of the rights
|
||||
granted to you under the License to provide to third parties, for a fee or other
|
||||
consideration (including without limitation fees for hosting or consulting/
|
||||
support services related to the Software), a product or service whose value
|
||||
derives, entirely or substantially, from the functionality of the Software. Any
|
||||
license notice or attribution required by the License must also include this
|
||||
Commons Clause License Condition notice.
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
Software: Mark — github.com/kovetskiy/mark
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
License: Apache 2.0
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
Licensor: Egor Kovetskiy
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2024 Egor Kovetskiy
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
10
Makefile
10
Makefile
@ -1,14 +1,9 @@
|
||||
NAME = $(notdir $(PWD))
|
||||
|
||||
VERSION = $(shell printf "%s.%s" \
|
||||
$$(git rev-list --count HEAD) \
|
||||
$$(git rev-parse --short HEAD) \
|
||||
)
|
||||
VERSION = $(shell git describe --tags --abbrev=0)
|
||||
|
||||
GO111MODULE = on
|
||||
|
||||
BRANCH = $(shell git rev-parse --abbrev-ref HEAD)
|
||||
|
||||
REMOTE = kovetskiy
|
||||
|
||||
version:
|
||||
@ -23,6 +18,9 @@ build:
|
||||
-ldflags "-X main.version=$(VERSION)" \
|
||||
-gcflags "-trimpath $(GOPATH)/src"
|
||||
|
||||
test:
|
||||
go test -race -coverprofile=profile.cov ./... -v
|
||||
|
||||
image:
|
||||
@echo :: building image $(NAME):$(VERSION)
|
||||
@docker build -t $(NAME):$(VERSION) -f Dockerfile .
|
||||
|
34
Taskfile.yml
34
Taskfile.yml
@ -1,14 +1,40 @@
|
||||
version: '2'
|
||||
version: '3'
|
||||
|
||||
vars:
|
||||
version: 7.13.0
|
||||
pwd:
|
||||
sh: pwd
|
||||
|
||||
tasks:
|
||||
confluence:
|
||||
volume:
|
||||
cmds:
|
||||
- docker run -v {{ .pwd }}/docker:/var/atlassian/application-data/confluence
|
||||
- mkdir -p docker/{{.version}}
|
||||
|
||||
network:
|
||||
desc: create docker network
|
||||
cmds:
|
||||
- docker network create confluence || true
|
||||
|
||||
postgres:
|
||||
desc: start postgres for confluence
|
||||
deps: [network, volume]
|
||||
cmds:
|
||||
- docker run -it -p 5432:5432
|
||||
--name confluence-postgres
|
||||
--network confluence
|
||||
-v {{.pwd}}/docker/{{.version}}/postgres:/var/lib/postgresql/data
|
||||
-e POSTGRES_PASSWORD=confluence
|
||||
-e POSTGRES_DB=confluence
|
||||
-e POSTGRES_USER=confluence
|
||||
postgres
|
||||
|
||||
confluence:
|
||||
desc: start confluence server
|
||||
deps: [network, volume]
|
||||
cmds:
|
||||
- docker run -v {{ .pwd }}/docker/{{.version}}/confluence:/var/atlassian/application-data/confluence
|
||||
--name="confluence"
|
||||
--network confluence
|
||||
-p 8090:8090
|
||||
-p 8091:8091
|
||||
atlassian/confluence-server
|
||||
atlassian/confluence-server:{{.version}}
|
||||
|
296
attachment/attachment.go
Normal file
296
attachment/attachment.go
Normal file
@ -0,0 +1,296 @@
|
||||
package attachment
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"net/url"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/kovetskiy/mark/confluence"
|
||||
"github.com/kovetskiy/mark/vfs"
|
||||
"github.com/reconquest/karma-go"
|
||||
"github.com/reconquest/pkg/log"
|
||||
)
|
||||
|
||||
const (
|
||||
AttachmentChecksumPrefix = `mark:checksum: `
|
||||
)
|
||||
|
||||
type Attachment struct {
|
||||
ID string
|
||||
Name string
|
||||
Filename string
|
||||
FileBytes []byte
|
||||
Checksum string
|
||||
Link string
|
||||
Width string
|
||||
Height string
|
||||
Replace string
|
||||
}
|
||||
|
||||
type Attacher interface {
|
||||
Attach(Attachment)
|
||||
}
|
||||
|
||||
func ResolveAttachments(
|
||||
api *confluence.API,
|
||||
page *confluence.PageInfo,
|
||||
attachments []Attachment,
|
||||
) ([]Attachment, error) {
|
||||
for i := range attachments {
|
||||
checksum, err := GetChecksum(bytes.NewReader(attachments[i].FileBytes))
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
"unable to get checksum for attachment: %q", attachments[i].Name,
|
||||
)
|
||||
}
|
||||
|
||||
attachments[i].Checksum = checksum
|
||||
}
|
||||
|
||||
remotes, err := api.GetAttachments(page.ID)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
existing := []Attachment{}
|
||||
creating := []Attachment{}
|
||||
updating := []Attachment{}
|
||||
for _, attachment := range attachments {
|
||||
var found bool
|
||||
var same bool
|
||||
for _, remote := range remotes {
|
||||
if remote.Filename == attachment.Filename {
|
||||
same = attachment.Checksum == strings.TrimPrefix(
|
||||
remote.Metadata.Comment,
|
||||
AttachmentChecksumPrefix,
|
||||
)
|
||||
|
||||
attachment.ID = remote.ID
|
||||
attachment.Link = path.Join(
|
||||
remote.Links.Context,
|
||||
remote.Links.Download,
|
||||
)
|
||||
|
||||
found = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
if same {
|
||||
existing = append(existing, attachment)
|
||||
} else {
|
||||
updating = append(updating, attachment)
|
||||
}
|
||||
} else {
|
||||
creating = append(creating, attachment)
|
||||
}
|
||||
}
|
||||
|
||||
for i, attachment := range creating {
|
||||
log.Infof(nil, "creating attachment: %q", attachment.Name)
|
||||
|
||||
info, err := api.CreateAttachment(
|
||||
page.ID,
|
||||
attachment.Filename,
|
||||
AttachmentChecksumPrefix+attachment.Checksum,
|
||||
bytes.NewReader(attachment.FileBytes),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
"unable to create attachment %q",
|
||||
attachment.Name,
|
||||
)
|
||||
}
|
||||
|
||||
attachment.ID = info.ID
|
||||
attachment.Link = path.Join(
|
||||
info.Links.Context,
|
||||
info.Links.Download,
|
||||
)
|
||||
|
||||
creating[i] = attachment
|
||||
}
|
||||
|
||||
for i, attachment := range updating {
|
||||
log.Infof(nil, "updating attachment: %q", attachment.Name)
|
||||
|
||||
info, err := api.UpdateAttachment(
|
||||
page.ID,
|
||||
attachment.ID,
|
||||
attachment.Filename,
|
||||
AttachmentChecksumPrefix+attachment.Checksum,
|
||||
bytes.NewReader(attachment.FileBytes),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
"unable to update attachment %q",
|
||||
attachment.Name,
|
||||
)
|
||||
}
|
||||
|
||||
attachment.Link = path.Join(
|
||||
info.Links.Context,
|
||||
info.Links.Download,
|
||||
)
|
||||
|
||||
updating[i] = attachment
|
||||
}
|
||||
|
||||
for i := range existing {
|
||||
log.Infof(nil, "keeping unmodified attachment: %q", attachments[i].Name)
|
||||
}
|
||||
|
||||
attachments = []Attachment{}
|
||||
attachments = append(attachments, existing...)
|
||||
attachments = append(attachments, creating...)
|
||||
attachments = append(attachments, updating...)
|
||||
|
||||
return attachments, nil
|
||||
}
|
||||
|
||||
func ResolveLocalAttachments(opener vfs.Opener, base string, replacements []string) ([]Attachment, error) {
|
||||
attachments, err := prepareAttachments(opener, base, replacements)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, attachment := range attachments {
|
||||
checksum, err := GetChecksum(bytes.NewReader(attachment.FileBytes))
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
"unable to get checksum for attachment: %q", attachment.Name,
|
||||
)
|
||||
}
|
||||
|
||||
attachment.Checksum = checksum
|
||||
}
|
||||
return attachments, err
|
||||
}
|
||||
|
||||
// prepareAttachements creates an array of attachement objects based on an array of filepaths
|
||||
func prepareAttachments(opener vfs.Opener, base string, replacements []string) ([]Attachment, error) {
|
||||
attachments := []Attachment{}
|
||||
for _, name := range replacements {
|
||||
attachment, err := prepareAttachment(opener, base, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
attachments = append(attachments, attachment)
|
||||
}
|
||||
|
||||
return attachments, nil
|
||||
}
|
||||
|
||||
// prepareAttachement opens the file, reads its content and creates an attachement object
|
||||
func prepareAttachment(opener vfs.Opener, base, name string) (Attachment, error) {
|
||||
attachmentPath := filepath.Join(base, name)
|
||||
file, err := opener.Open(attachmentPath)
|
||||
if err != nil {
|
||||
return Attachment{}, karma.Format(err, "unable to open file: %q", attachmentPath)
|
||||
}
|
||||
defer func() {
|
||||
_ = file.Close()
|
||||
}()
|
||||
|
||||
fileBytes, err := io.ReadAll(file)
|
||||
if err != nil {
|
||||
return Attachment{}, karma.Format(err, "unable to read file: %q", attachmentPath)
|
||||
}
|
||||
|
||||
return Attachment{
|
||||
Name: name,
|
||||
Filename: strings.ReplaceAll(name, "/", "_"),
|
||||
FileBytes: fileBytes,
|
||||
Replace: name,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func CompileAttachmentLinks(markdown []byte, attachments []Attachment) []byte {
|
||||
links := map[string]string{}
|
||||
replaces := []string{}
|
||||
|
||||
for _, attachment := range attachments {
|
||||
links[attachment.Replace] = parseAttachmentLink(attachment.Link)
|
||||
replaces = append(replaces, attachment.Replace)
|
||||
}
|
||||
|
||||
// sort by length so first items will have bigger length
|
||||
// it's helpful for replacing in case of following names
|
||||
// attachments/a.jpg
|
||||
// attachments/a.jpg.jpg
|
||||
// so we replace longer and then shorter
|
||||
sort.SliceStable(replaces, func(i, j int) bool {
|
||||
return len(replaces[i]) > len(replaces[j])
|
||||
})
|
||||
|
||||
for _, replace := range replaces {
|
||||
to := links[replace]
|
||||
|
||||
found := false
|
||||
if bytes.Contains(markdown, []byte("attachment://"+replace)) {
|
||||
from := "attachment://" + replace
|
||||
|
||||
log.Debugf(nil, "replacing legacy link: %q -> %q", from, to)
|
||||
|
||||
markdown = bytes.ReplaceAll(
|
||||
markdown,
|
||||
[]byte(from),
|
||||
[]byte(to),
|
||||
)
|
||||
|
||||
found = true
|
||||
}
|
||||
|
||||
if bytes.Contains(markdown, []byte(replace)) {
|
||||
from := replace
|
||||
|
||||
log.Debugf(nil, "replacing link: %q -> %q", from, to)
|
||||
|
||||
markdown = bytes.ReplaceAll(
|
||||
markdown,
|
||||
[]byte(from),
|
||||
[]byte(to),
|
||||
)
|
||||
|
||||
found = true
|
||||
}
|
||||
|
||||
if !found {
|
||||
log.Warningf(nil, "unused attachment: %s", replace)
|
||||
}
|
||||
}
|
||||
|
||||
return markdown
|
||||
}
|
||||
|
||||
func GetChecksum(reader io.Reader) (string, error) {
|
||||
hash := sha256.New()
|
||||
if _, err := io.Copy(hash, reader); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return hex.EncodeToString(hash.Sum(nil)), nil
|
||||
}
|
||||
|
||||
func parseAttachmentLink(attachLink string) string {
|
||||
uri, err := url.ParseRequestURI(attachLink)
|
||||
if err != nil {
|
||||
return strings.ReplaceAll(attachLink, "&", "&")
|
||||
} else {
|
||||
return uri.Path +
|
||||
"?" + url.QueryEscape(uri.Query().Encode())
|
||||
}
|
||||
}
|
90
attachment/attachment_test.go
Normal file
90
attachment/attachment_test.go
Normal file
@ -0,0 +1,90 @@
|
||||
package attachment
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var (
|
||||
replacements = []string{
|
||||
"image1.jpg",
|
||||
"images/image2.jpg",
|
||||
"../image3.jpg",
|
||||
}
|
||||
)
|
||||
|
||||
type bufferCloser struct {
|
||||
*bytes.Buffer
|
||||
}
|
||||
|
||||
func (bufferCloser) Close() error { return nil }
|
||||
|
||||
type virtualOpener struct {
|
||||
PathToBuf map[string]*bufferCloser
|
||||
}
|
||||
|
||||
func (o *virtualOpener) Open(name string) (io.ReadWriteCloser, error) {
|
||||
if buf, ok := o.PathToBuf[name]; ok {
|
||||
return buf, nil
|
||||
}
|
||||
return nil, os.ErrNotExist
|
||||
}
|
||||
|
||||
func TestPrepareAttachmentsWithWorkDirBase(t *testing.T) {
|
||||
|
||||
testingOpener := &virtualOpener{
|
||||
PathToBuf: map[string]*bufferCloser{
|
||||
"image1.jpg": {bytes.NewBuffer(nil)},
|
||||
"images/image2.jpg": {bytes.NewBuffer(nil)},
|
||||
"../image3.jpg": {bytes.NewBuffer(nil)},
|
||||
},
|
||||
}
|
||||
|
||||
attaches, err := prepareAttachments(testingOpener, ".", replacements)
|
||||
t.Logf("attaches: %v", err)
|
||||
if err != nil {
|
||||
println(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
assert.Equal(t, "image1.jpg", attaches[0].Name)
|
||||
assert.Equal(t, "image1.jpg", attaches[0].Replace)
|
||||
|
||||
assert.Equal(t, "images/image2.jpg", attaches[1].Name)
|
||||
assert.Equal(t, "images/image2.jpg", attaches[1].Replace)
|
||||
|
||||
assert.Equal(t, "../image3.jpg", attaches[2].Name)
|
||||
assert.Equal(t, "../image3.jpg", attaches[2].Replace)
|
||||
|
||||
assert.Equal(t, len(attaches), 3)
|
||||
}
|
||||
|
||||
func TestPrepareAttachmentsWithSubDirBase(t *testing.T) {
|
||||
|
||||
testingOpener := &virtualOpener{
|
||||
PathToBuf: map[string]*bufferCloser{
|
||||
"a/b/image1.jpg": {bytes.NewBuffer(nil)},
|
||||
"a/b/images/image2.jpg": {bytes.NewBuffer(nil)},
|
||||
"a/image3.jpg": {bytes.NewBuffer(nil)},
|
||||
},
|
||||
}
|
||||
|
||||
attaches, err := prepareAttachments(testingOpener, "a/b", replacements)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
assert.Equal(t, "image1.jpg", attaches[0].Name)
|
||||
assert.Equal(t, "image1.jpg", attaches[0].Replace)
|
||||
|
||||
assert.Equal(t, "images/image2.jpg", attaches[1].Name)
|
||||
assert.Equal(t, "images/image2.jpg", attaches[1].Replace)
|
||||
|
||||
assert.Equal(t, "../image3.jpg", attaches[2].Name)
|
||||
assert.Equal(t, "../image3.jpg", attaches[2].Replace)
|
||||
|
||||
assert.Equal(t, len(attaches), 3)
|
||||
}
|
27
config.go
27
config.go
@ -1,27 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/kovetskiy/ko"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Username string `env:"MARK_USERNAME" toml:"username"`
|
||||
Password string `env:"MARK_PASSWORD" toml:"password"`
|
||||
BaseURL string `env:"MARK_BASE_URL" toml:"base_url"`
|
||||
}
|
||||
|
||||
func LoadConfig(path string) (*Config, error) {
|
||||
config := &Config{}
|
||||
err := ko.Load(path, config)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return config, nil
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
@ -2,21 +2,24 @@ package confluence
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/bndr/gopencils"
|
||||
"github.com/kovetskiy/gopencils"
|
||||
"github.com/kovetskiy/lorg"
|
||||
"github.com/reconquest/karma-go"
|
||||
"github.com/reconquest/pkg/log"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
AccountID string `json:"accountId"`
|
||||
AccountID string `json:"accountId,omitempty"`
|
||||
UserKey string `json:"userKey,omitempty"`
|
||||
}
|
||||
|
||||
type API struct {
|
||||
@ -24,19 +27,34 @@ type API struct {
|
||||
|
||||
// it's deprecated accordingly to Atlassian documentation,
|
||||
// but it's only way to set permissions
|
||||
json *gopencils.Resource
|
||||
json *gopencils.Resource
|
||||
BaseURL string
|
||||
}
|
||||
|
||||
type SpaceInfo struct {
|
||||
ID int `json:"id"`
|
||||
Key string `json:"key"`
|
||||
Name string `json:"name"`
|
||||
|
||||
Homepage PageInfo `json:"homepage"`
|
||||
|
||||
Links struct {
|
||||
Full string `json:"webui"`
|
||||
} `json:"_links"`
|
||||
}
|
||||
|
||||
type PageInfo struct {
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Type string `json:"type"`
|
||||
|
||||
Version struct {
|
||||
Number int64 `json:"number"`
|
||||
Number int64 `json:"number"`
|
||||
Message string `json:"message"`
|
||||
} `json:"version"`
|
||||
|
||||
Ancestors []struct {
|
||||
Id string `json:"id"`
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
} `json:"ancestors"`
|
||||
|
||||
@ -57,26 +75,60 @@ type AttachmentInfo struct {
|
||||
} `json:"_links"`
|
||||
}
|
||||
|
||||
type Label struct {
|
||||
ID string `json:"id"`
|
||||
Prefix string `json:"prefix"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
type LabelInfo struct {
|
||||
Labels []Label `json:"results"`
|
||||
Size int `json:"number"`
|
||||
}
|
||||
type form struct {
|
||||
buffer io.Reader
|
||||
writer *multipart.Writer
|
||||
}
|
||||
|
||||
type tracer struct {
|
||||
prefix string
|
||||
}
|
||||
|
||||
func (tracer *tracer) Printf(format string, args ...interface{}) {
|
||||
log.Tracef(nil, tracer.prefix+" "+format, args...)
|
||||
}
|
||||
|
||||
func NewAPI(baseURL string, username string, password string) *API {
|
||||
auth := &gopencils.BasicAuth{username, password}
|
||||
var auth *gopencils.BasicAuth
|
||||
if username != "" {
|
||||
auth = &gopencils.BasicAuth{
|
||||
Username: username,
|
||||
Password: password,
|
||||
}
|
||||
}
|
||||
rest := gopencils.Api(baseURL+"/rest/api", auth, 3) // set option for 3 retries on failure
|
||||
if username == "" {
|
||||
if rest.Headers == nil {
|
||||
rest.Headers = http.Header{}
|
||||
}
|
||||
rest.SetHeader("Authorization", fmt.Sprintf("Bearer %s", password))
|
||||
}
|
||||
|
||||
json := gopencils.Api(baseURL+"/rpc/json-rpc/confluenceservice-v2", auth, 3)
|
||||
|
||||
if log.GetLevel() == lorg.LevelTrace {
|
||||
rest.Logger = &tracer{"rest:"}
|
||||
json.Logger = &tracer{"json-rpc:"}
|
||||
}
|
||||
|
||||
return &API{
|
||||
rest: gopencils.Api(baseURL+"/rest/api", auth),
|
||||
|
||||
json: gopencils.Api(
|
||||
baseURL+"/rpc/json-rpc/confluenceservice-v2",
|
||||
auth,
|
||||
),
|
||||
rest: rest,
|
||||
json: json,
|
||||
BaseURL: strings.TrimSuffix(baseURL, "/"),
|
||||
}
|
||||
}
|
||||
|
||||
func (api *API) FindRootPage(space string) (*PageInfo, error) {
|
||||
page, err := api.FindPage(space, ``)
|
||||
page, err := api.FindPage(space, ``, "page")
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
@ -97,12 +149,35 @@ func (api *API) FindRootPage(space string) (*PageInfo, error) {
|
||||
}
|
||||
|
||||
return &PageInfo{
|
||||
ID: page.Ancestors[0].Id,
|
||||
ID: page.Ancestors[0].ID,
|
||||
Title: page.Ancestors[0].Title,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (api *API) FindPage(space string, title string) (*PageInfo, error) {
|
||||
func (api *API) FindHomePage(space string) (*PageInfo, error) {
|
||||
payload := map[string]string{
|
||||
"expand": "homepage",
|
||||
}
|
||||
|
||||
request, err := api.rest.Res(
|
||||
"space/"+space, &SpaceInfo{},
|
||||
).Get(payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode == http.StatusNotFound || request.Raw.StatusCode != http.StatusOK {
|
||||
return nil, newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
return &request.Response.(*SpaceInfo).Homepage, nil
|
||||
}
|
||||
|
||||
func (api *API) FindPage(
|
||||
space string,
|
||||
title string,
|
||||
pageType string,
|
||||
) (*PageInfo, error) {
|
||||
result := struct {
|
||||
Results []PageInfo `json:"results"`
|
||||
}{}
|
||||
@ -110,6 +185,7 @@ func (api *API) FindPage(space string, title string) (*PageInfo, error) {
|
||||
payload := map[string]string{
|
||||
"spaceKey": space,
|
||||
"expand": "ancestors,version",
|
||||
"type": pageType,
|
||||
}
|
||||
|
||||
if title != "" {
|
||||
@ -125,7 +201,7 @@ func (api *API) FindPage(space string, title string) (*PageInfo, error) {
|
||||
|
||||
// allow 404 because it's fine if page is not found,
|
||||
// the function will return nil, nil
|
||||
if request.Raw.StatusCode != 404 && request.Raw.StatusCode != 200 {
|
||||
if request.Raw.StatusCode != http.StatusNotFound && request.Raw.StatusCode != http.StatusOK {
|
||||
return nil, newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
@ -140,11 +216,11 @@ func (api *API) CreateAttachment(
|
||||
pageID string,
|
||||
name string,
|
||||
comment string,
|
||||
path string,
|
||||
reader io.Reader,
|
||||
) (AttachmentInfo, error) {
|
||||
var info AttachmentInfo
|
||||
|
||||
form, err := getAttachmentPayload(name, comment, path)
|
||||
form, err := getAttachmentPayload(name, comment, reader)
|
||||
if err != nil {
|
||||
return AttachmentInfo{}, err
|
||||
}
|
||||
@ -161,7 +237,11 @@ func (api *API) CreateAttachment(
|
||||
)
|
||||
|
||||
resource.Payload = form.buffer
|
||||
oldHeaders := resource.Headers.Clone()
|
||||
resource.Headers = http.Header{}
|
||||
if resource.Api.BasicAuth == nil {
|
||||
resource.Headers.Set("Authorization", oldHeaders.Get("Authorization"))
|
||||
}
|
||||
|
||||
resource.SetHeader("Content-Type", form.writer.FormDataContentType())
|
||||
resource.SetHeader("X-Atlassian-Token", "no-check")
|
||||
@ -171,13 +251,13 @@ func (api *API) CreateAttachment(
|
||||
return info, err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode != 200 {
|
||||
if request.Raw.StatusCode != http.StatusOK {
|
||||
return info, newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
if len(result.Results) == 0 {
|
||||
return info, errors.New(
|
||||
"Confluence REST API for creating attachments returned " +
|
||||
"the Confluence REST API for creating attachments returned " +
|
||||
"0 json objects, expected at least 1",
|
||||
)
|
||||
}
|
||||
@ -195,33 +275,43 @@ func (api *API) CreateAttachment(
|
||||
return info, nil
|
||||
}
|
||||
|
||||
// UpdateAttachment uploads a new version of the same attachment if the
|
||||
// checksums differs from the previous one.
|
||||
// It also handles a case where Confluence returns sort of "short" variant of
|
||||
// the response instead of an extended one.
|
||||
func (api *API) UpdateAttachment(
|
||||
pageID string,
|
||||
attachID string,
|
||||
name string,
|
||||
comment string,
|
||||
path string,
|
||||
reader io.Reader,
|
||||
) (AttachmentInfo, error) {
|
||||
var info AttachmentInfo
|
||||
|
||||
form, err := getAttachmentPayload(name, comment, path)
|
||||
form, err := getAttachmentPayload(name, comment, reader)
|
||||
if err != nil {
|
||||
return AttachmentInfo{}, err
|
||||
}
|
||||
|
||||
var result struct {
|
||||
var extendedResponse struct {
|
||||
Links struct {
|
||||
Context string `json:"context"`
|
||||
} `json:"_links"`
|
||||
Results []AttachmentInfo `json:"results"`
|
||||
}
|
||||
|
||||
var result json.RawMessage
|
||||
|
||||
resource := api.rest.Res(
|
||||
"content/"+pageID+"/child/attachment/"+attachID+"/data", &result,
|
||||
)
|
||||
|
||||
resource.Payload = form.buffer
|
||||
oldHeaders := resource.Headers.Clone()
|
||||
resource.Headers = http.Header{}
|
||||
if resource.Api.BasicAuth == nil {
|
||||
resource.Headers.Set("Authorization", oldHeaders.Get("Authorization"))
|
||||
}
|
||||
|
||||
resource.SetHeader("Content-Type", form.writer.FormDataContentType())
|
||||
resource.SetHeader("X-Atlassian-Token", "no-check")
|
||||
@ -231,47 +321,52 @@ func (api *API) UpdateAttachment(
|
||||
return info, err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode != 200 {
|
||||
if request.Raw.StatusCode != http.StatusOK {
|
||||
return info, newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
if len(result.Results) == 0 {
|
||||
return info, errors.New(
|
||||
"Confluence REST API for creating attachments returned " +
|
||||
"0 json objects, expected at least 1",
|
||||
err = json.Unmarshal(result, &extendedResponse)
|
||||
if err != nil {
|
||||
return info, karma.Format(
|
||||
err,
|
||||
"unable to unmarshal JSON response as full response format: %s",
|
||||
string(result),
|
||||
)
|
||||
}
|
||||
|
||||
for i, info := range result.Results {
|
||||
if info.Links.Context == "" {
|
||||
info.Links.Context = result.Links.Context
|
||||
if len(extendedResponse.Results) > 0 {
|
||||
for i, info := range extendedResponse.Results {
|
||||
if info.Links.Context == "" {
|
||||
info.Links.Context = extendedResponse.Links.Context
|
||||
}
|
||||
|
||||
extendedResponse.Results[i] = info
|
||||
}
|
||||
|
||||
result.Results[i] = info
|
||||
info = extendedResponse.Results[0]
|
||||
|
||||
return info, nil
|
||||
}
|
||||
|
||||
info = result.Results[0]
|
||||
var shortResponse AttachmentInfo
|
||||
err = json.Unmarshal(result, &shortResponse)
|
||||
if err != nil {
|
||||
return info, karma.Format(
|
||||
err,
|
||||
"unable to unmarshal JSON response as short response format: %s",
|
||||
string(result),
|
||||
)
|
||||
}
|
||||
|
||||
return info, nil
|
||||
return shortResponse, nil
|
||||
}
|
||||
|
||||
func getAttachmentPayload(name, comment, path string) (*form, error) {
|
||||
func getAttachmentPayload(name, comment string, reader io.Reader) (*form, error) {
|
||||
var (
|
||||
payload = bytes.NewBuffer(nil)
|
||||
writer = multipart.NewWriter(payload)
|
||||
)
|
||||
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
"unable to open file: %q",
|
||||
path,
|
||||
)
|
||||
}
|
||||
|
||||
defer file.Close()
|
||||
|
||||
content, err := writer.CreateFormFile("file", name)
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
@ -280,7 +375,7 @@ func getAttachmentPayload(name, comment, path string) (*form, error) {
|
||||
)
|
||||
}
|
||||
|
||||
_, err = io.Copy(content, file)
|
||||
_, err = io.Copy(content, reader)
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
@ -328,6 +423,7 @@ func (api *API) GetAttachments(pageID string) ([]AttachmentInfo, error) {
|
||||
|
||||
payload := map[string]string{
|
||||
"expand": "version,container",
|
||||
"limit": "1000",
|
||||
}
|
||||
|
||||
request, err := api.rest.Res(
|
||||
@ -337,7 +433,7 @@ func (api *API) GetAttachments(pageID string) ([]AttachmentInfo, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode != 200 {
|
||||
if request.Raw.StatusCode != http.StatusOK {
|
||||
return nil, newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
@ -360,7 +456,7 @@ func (api *API) GetPageByID(pageID string) (*PageInfo, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode != 200 {
|
||||
if request.Raw.StatusCode != http.StatusOK {
|
||||
return nil, newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
@ -369,12 +465,13 @@ func (api *API) GetPageByID(pageID string) (*PageInfo, error) {
|
||||
|
||||
func (api *API) CreatePage(
|
||||
space string,
|
||||
pageType string,
|
||||
parent *PageInfo,
|
||||
title string,
|
||||
body string,
|
||||
) (*PageInfo, error) {
|
||||
payload := map[string]interface{}{
|
||||
"type": "page",
|
||||
"type": pageType,
|
||||
"title": title,
|
||||
"space": map[string]interface{}{
|
||||
"key": space,
|
||||
@ -385,6 +482,13 @@ func (api *API) CreatePage(
|
||||
"value": body,
|
||||
},
|
||||
},
|
||||
"metadata": map[string]interface{}{
|
||||
"properties": map[string]interface{}{
|
||||
"editor": map[string]interface{}{
|
||||
"value": "v2",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if parent != nil {
|
||||
@ -400,45 +504,66 @@ func (api *API) CreatePage(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode != 200 {
|
||||
if request.Raw.StatusCode != http.StatusOK {
|
||||
return nil, newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
return request.Response.(*PageInfo), nil
|
||||
}
|
||||
|
||||
func (api *API) UpdatePage(
|
||||
page *PageInfo, newContent string,
|
||||
) error {
|
||||
func (api *API) UpdatePage(page *PageInfo, newContent string, minorEdit bool, versionMessage string, newLabels []string, appearance string, emojiString string) error {
|
||||
nextPageVersion := page.Version.Number + 1
|
||||
oldAncestors := []map[string]interface{}{}
|
||||
|
||||
if len(page.Ancestors) == 0 {
|
||||
return fmt.Errorf(
|
||||
"page %q info does not contain any information about parents",
|
||||
page.ID,
|
||||
)
|
||||
if page.Type != "blogpost" && len(page.Ancestors) > 0 {
|
||||
// picking only the last one, which is required by confluence
|
||||
oldAncestors = []map[string]interface{}{
|
||||
{"id": page.Ancestors[len(page.Ancestors)-1].ID},
|
||||
}
|
||||
}
|
||||
|
||||
// picking only the last one, which is required by confluence
|
||||
oldAncestors := []map[string]interface{}{
|
||||
{"id": page.Ancestors[len(page.Ancestors)-1].Id},
|
||||
properties := map[string]interface{}{
|
||||
// Fix to set full-width as has changed on Confluence APIs again.
|
||||
// https://jira.atlassian.com/browse/CONFCLOUD-65447
|
||||
//
|
||||
"content-appearance-published": map[string]interface{}{
|
||||
"value": appearance,
|
||||
},
|
||||
// content-appearance-draft should not be set as this is impacted by
|
||||
// the user editor default configurations - which caused the sporadic published widths.
|
||||
}
|
||||
|
||||
if emojiString != "" {
|
||||
r, _ := utf8.DecodeRuneInString(emojiString)
|
||||
unicodeHex := fmt.Sprintf("%x", r)
|
||||
|
||||
properties["emoji-title-draft"] = map[string]interface{}{
|
||||
"value": unicodeHex,
|
||||
}
|
||||
properties["emoji-title-published"] = map[string]interface{}{
|
||||
"value": unicodeHex,
|
||||
}
|
||||
}
|
||||
|
||||
payload := map[string]interface{}{
|
||||
"id": page.ID,
|
||||
"type": "page",
|
||||
"type": page.Type,
|
||||
"title": page.Title,
|
||||
"version": map[string]interface{}{
|
||||
"number": nextPageVersion,
|
||||
"minorEdit": false,
|
||||
"minorEdit": minorEdit,
|
||||
"message": versionMessage,
|
||||
},
|
||||
"ancestors": oldAncestors,
|
||||
"body": map[string]interface{}{
|
||||
"storage": map[string]interface{}{
|
||||
"value": string(newContent),
|
||||
"value": newContent,
|
||||
"representation": "storage",
|
||||
},
|
||||
},
|
||||
"metadata": map[string]interface{}{
|
||||
"properties": properties,
|
||||
},
|
||||
}
|
||||
|
||||
request, err := api.rest.Res(
|
||||
@ -448,13 +573,73 @@ func (api *API) UpdatePage(
|
||||
return err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode != 200 {
|
||||
if request.Raw.StatusCode != http.StatusOK {
|
||||
return newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (api *API) AddPageLabels(page *PageInfo, newLabels []string) (*LabelInfo, error) {
|
||||
|
||||
labels := []map[string]interface{}{}
|
||||
for _, label := range newLabels {
|
||||
if label != "" {
|
||||
item := map[string]interface{}{
|
||||
"prefix": "global",
|
||||
"name": label,
|
||||
}
|
||||
labels = append(labels, item)
|
||||
}
|
||||
}
|
||||
|
||||
payload := labels
|
||||
|
||||
request, err := api.rest.Res(
|
||||
"content/"+page.ID+"/label", &LabelInfo{},
|
||||
).Post(payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode != http.StatusOK {
|
||||
return nil, newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
return request.Response.(*LabelInfo), nil
|
||||
}
|
||||
|
||||
func (api *API) DeletePageLabel(page *PageInfo, label string) (*LabelInfo, error) {
|
||||
|
||||
request, err := api.rest.Res(
|
||||
"content/"+page.ID+"/label", &LabelInfo{},
|
||||
).SetQuery(map[string]string{"name": label}).Delete()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode != http.StatusOK && request.Raw.StatusCode != http.StatusNoContent {
|
||||
return nil, newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
return request.Response.(*LabelInfo), nil
|
||||
}
|
||||
|
||||
func (api *API) GetPageLabels(page *PageInfo, prefix string) (*LabelInfo, error) {
|
||||
|
||||
request, err := api.rest.Res(
|
||||
"content/"+page.ID+"/label", &LabelInfo{},
|
||||
).Get(map[string]string{"prefix": prefix})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode != http.StatusOK {
|
||||
return nil, newErrorStatusNotOK(request)
|
||||
}
|
||||
return request.Response.(*LabelInfo), nil
|
||||
}
|
||||
|
||||
func (api *API) GetUserByName(name string) (*User, error) {
|
||||
var response struct {
|
||||
Results []struct {
|
||||
@ -462,6 +647,7 @@ func (api *API) GetUserByName(name string) (*User, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Try the new path first
|
||||
_, err := api.rest.
|
||||
Res("search").
|
||||
Res("user", &response).
|
||||
@ -472,7 +658,20 @@ func (api *API) GetUserByName(name string) (*User, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Try old path
|
||||
if len(response.Results) == 0 {
|
||||
_, err := api.rest.
|
||||
Res("search", &response).
|
||||
Get(map[string]string{
|
||||
"cql": fmt.Sprintf("user.fullname~%q", name),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if len(response.Results) == 0 {
|
||||
|
||||
return nil, karma.
|
||||
Describe("name", name).
|
||||
Reason(
|
||||
@ -481,7 +680,6 @@ func (api *API) GetUserByName(name string) (*User, error) {
|
||||
}
|
||||
|
||||
return &response.Results[0].User, nil
|
||||
|
||||
}
|
||||
|
||||
func (api *API) GetCurrentUser() (*User, error) {
|
||||
@ -530,7 +728,7 @@ func (api *API) RestrictPageUpdatesCloud(
|
||||
return err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode != 200 {
|
||||
if request.Raw.StatusCode != http.StatusOK {
|
||||
return newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
@ -551,15 +749,17 @@ func (api *API) RestrictPageUpdatesServer(
|
||||
).Post([]interface{}{
|
||||
page.ID,
|
||||
"Edit",
|
||||
map[string]interface{}{
|
||||
"userName": allowedUser,
|
||||
[]map[string]interface{}{
|
||||
{
|
||||
"userName": allowedUser,
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode != 200 {
|
||||
if request.Raw.StatusCode != http.StatusOK {
|
||||
return newErrorStatusNotOK(request)
|
||||
}
|
||||
|
||||
@ -579,7 +779,7 @@ func (api *API) RestrictPageUpdates(
|
||||
) error {
|
||||
var err error
|
||||
|
||||
if strings.HasSuffix(api.rest.Api.BaseUrl.Host, "atlassian.net") {
|
||||
if strings.HasSuffix(api.rest.Api.BaseUrl.Host, "jira.com") || strings.HasSuffix(api.rest.Api.BaseUrl.Host, "atlassian.net") {
|
||||
err = api.RestrictPageUpdatesCloud(page, allowedUser)
|
||||
} else {
|
||||
err = api.RestrictPageUpdatesServer(page, allowedUser)
|
||||
@ -589,24 +789,26 @@ func (api *API) RestrictPageUpdates(
|
||||
}
|
||||
|
||||
func newErrorStatusNotOK(request *gopencils.Resource) error {
|
||||
if request.Raw.StatusCode == 401 {
|
||||
if request.Raw.StatusCode == http.StatusUnauthorized {
|
||||
return errors.New(
|
||||
"Confluence API returned unexpected status: 401 (Unauthorized)",
|
||||
"the Confluence API returned unexpected status: 401 (Unauthorized)",
|
||||
)
|
||||
}
|
||||
|
||||
if request.Raw.StatusCode == 404 {
|
||||
if request.Raw.StatusCode == http.StatusNotFound {
|
||||
return errors.New(
|
||||
"Confluence API returned unexpected status: 404 (Not Found)",
|
||||
"the Confluence API returned unexpected status: 404 (Not Found)",
|
||||
)
|
||||
}
|
||||
|
||||
output, _ := ioutil.ReadAll(request.Raw.Body)
|
||||
defer request.Raw.Body.Close()
|
||||
output, _ := io.ReadAll(request.Raw.Body)
|
||||
defer func() {
|
||||
_ = request.Raw.Body.Close()
|
||||
}()
|
||||
|
||||
return fmt.Errorf(
|
||||
"Confluence API returned unexpected status: %v, "+
|
||||
"output: %s",
|
||||
"the Confluence API returned unexpected status: %v, "+
|
||||
"output: %q",
|
||||
request.Raw.Status, output,
|
||||
)
|
||||
}
|
107
d2/d2.go
Normal file
107
d2/d2.go
Normal file
@ -0,0 +1,107 @@
|
||||
package d2
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/chromedp/cdproto/dom"
|
||||
"github.com/chromedp/chromedp"
|
||||
|
||||
"github.com/kovetskiy/mark/attachment"
|
||||
"github.com/reconquest/pkg/log"
|
||||
|
||||
"oss.terrastruct.com/d2/d2graph"
|
||||
"oss.terrastruct.com/d2/d2layouts/d2dagrelayout"
|
||||
"oss.terrastruct.com/d2/d2lib"
|
||||
"oss.terrastruct.com/d2/d2renderers/d2svg"
|
||||
"oss.terrastruct.com/d2/d2themes/d2themescatalog"
|
||||
d2log "oss.terrastruct.com/d2/lib/log"
|
||||
"oss.terrastruct.com/d2/lib/textmeasure"
|
||||
"oss.terrastruct.com/util-go/go2"
|
||||
)
|
||||
|
||||
var renderTimeout = 120 * time.Second
|
||||
|
||||
func ProcessD2(title string, d2Diagram []byte, scale float64) (attachment.Attachment, error) {
|
||||
ctx, cancel := context.WithTimeout(context.TODO(), renderTimeout)
|
||||
ctx = d2log.WithDefault(ctx)
|
||||
defer cancel()
|
||||
|
||||
ruler, err := textmeasure.NewRuler()
|
||||
if err != nil {
|
||||
return attachment.Attachment{}, err
|
||||
}
|
||||
layoutResolver := func(engine string) (d2graph.LayoutGraph, error) {
|
||||
return d2dagrelayout.DefaultLayout, nil
|
||||
}
|
||||
renderOpts := &d2svg.RenderOpts{
|
||||
Pad: go2.Pointer(int64(5)),
|
||||
ThemeID: &d2themescatalog.GrapeSoda.ID,
|
||||
}
|
||||
compileOpts := &d2lib.CompileOptions{
|
||||
LayoutResolver: layoutResolver,
|
||||
Ruler: ruler,
|
||||
}
|
||||
|
||||
diagram, _, err := d2lib.Compile(ctx, string(d2Diagram), compileOpts, renderOpts)
|
||||
if err != nil {
|
||||
return attachment.Attachment{}, err
|
||||
}
|
||||
|
||||
out, err := d2svg.Render(diagram, renderOpts)
|
||||
if err != nil {
|
||||
return attachment.Attachment{}, err
|
||||
}
|
||||
|
||||
log.Debugf(nil, "Rendering: %q", title)
|
||||
pngBytes, boxModel, err := convertSVGtoPNG(ctx, out, scale)
|
||||
if err != nil {
|
||||
return attachment.Attachment{}, err
|
||||
}
|
||||
|
||||
checkSum, err := attachment.GetChecksum(bytes.NewReader(d2Diagram))
|
||||
log.Debugf(nil, "Checksum: %q -> %s", title, checkSum)
|
||||
|
||||
if err != nil {
|
||||
return attachment.Attachment{}, err
|
||||
}
|
||||
if title == "" {
|
||||
title = checkSum
|
||||
}
|
||||
|
||||
fileName := title + ".png"
|
||||
|
||||
return attachment.Attachment{
|
||||
ID: "",
|
||||
Name: title,
|
||||
Filename: fileName,
|
||||
FileBytes: pngBytes,
|
||||
Checksum: checkSum,
|
||||
Replace: title,
|
||||
Width: strconv.FormatInt(boxModel.Width, 10),
|
||||
Height: strconv.FormatInt(boxModel.Height, 10),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func convertSVGtoPNG(ctx context.Context, svg []byte, scale float64) (png []byte, m *dom.BoxModel, err error) {
|
||||
var (
|
||||
result []byte
|
||||
model *dom.BoxModel
|
||||
)
|
||||
ctx, cancel := chromedp.NewContext(ctx)
|
||||
defer cancel()
|
||||
|
||||
err = chromedp.Run(ctx,
|
||||
chromedp.Navigate(fmt.Sprintf("data:image/svg+xml;base64,%s", base64.StdEncoding.EncodeToString(svg))),
|
||||
chromedp.ScreenshotScale(`document.querySelector("svg > svg")`, scale, &result, chromedp.ByJSPath),
|
||||
chromedp.Dimensions(`document.querySelector("svg > svg")`, &model, chromedp.ByJSPath),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return result, model, err
|
||||
}
|
102
d2/d2_test.go
Normal file
102
d2/d2_test.go
Normal file
@ -0,0 +1,102 @@
|
||||
package d2
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/kovetskiy/mark/attachment"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var diagram string = `d2
|
||||
vars: {
|
||||
d2-config: {
|
||||
layout-engine: elk
|
||||
# Terminal theme code
|
||||
theme-id: 300
|
||||
}
|
||||
}
|
||||
network: {
|
||||
cell tower: {
|
||||
satellites: {
|
||||
shape: stored_data
|
||||
style.multiple: true
|
||||
}
|
||||
|
||||
transmitter
|
||||
|
||||
satellites -> transmitter: send
|
||||
satellites -> transmitter: send
|
||||
satellites -> transmitter: send
|
||||
}
|
||||
|
||||
online portal: {
|
||||
ui: {shape: hexagon}
|
||||
}
|
||||
|
||||
data processor: {
|
||||
storage: {
|
||||
shape: cylinder
|
||||
style.multiple: true
|
||||
}
|
||||
}
|
||||
|
||||
cell tower.transmitter -> data processor.storage: phone logs
|
||||
}
|
||||
|
||||
user: {
|
||||
shape: person
|
||||
width: 130
|
||||
}
|
||||
|
||||
user -> network.cell tower: make call
|
||||
user -> network.online portal.ui: access {
|
||||
style.stroke-dash: 3
|
||||
}
|
||||
|
||||
api server -> network.online portal.ui: display
|
||||
api server -> logs: persist
|
||||
logs: {shape: page; style.multiple: true}
|
||||
|
||||
network.data processor -> api server
|
||||
`
|
||||
|
||||
func TestExtractD2Image(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
markdown []byte
|
||||
scale float64
|
||||
want attachment.Attachment
|
||||
wantErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{"example", []byte(diagram), 1.0, attachment.Attachment{
|
||||
// This is only the PNG Magic Header
|
||||
FileBytes: []byte{0x89, 0x50, 0x4e, 0x47, 0xd, 0xa, 0x1a, 0xa},
|
||||
Filename: "example.png",
|
||||
Name: "example",
|
||||
Replace: "example",
|
||||
Checksum: "58fa387384181445e2d8f90a8c7fda945cb75174f73e8b9853ff59b9e0103ddd",
|
||||
ID: "",
|
||||
Width: "198",
|
||||
Height: "441",
|
||||
},
|
||||
assert.NoError},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := ProcessD2(tt.name, tt.markdown, tt.scale)
|
||||
if !tt.wantErr(t, err, fmt.Sprintf("processD2(%v, %v)", tt.name, string(tt.markdown))) {
|
||||
return
|
||||
}
|
||||
assert.Equal(t, tt.want.Filename, got.Filename, "processD2(%v, %v)", tt.name, string(tt.markdown))
|
||||
// We only test for the header as png changes based on system png library
|
||||
assert.Equal(t, tt.want.FileBytes, got.FileBytes[0:8], "processD2(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.Name, got.Name, "processD2(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.Replace, got.Replace, "processD2(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.Checksum, got.Checksum, "processD2(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.ID, got.ID, "processD2(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.Width, got.Width, "processD2(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.Height, got.Height, "processD2(%v, %v)", tt.name, string(tt.markdown))
|
||||
})
|
||||
}
|
||||
}
|
41
docker-compose.yaml
Normal file
41
docker-compose.yaml
Normal file
@ -0,0 +1,41 @@
|
||||
version: "3.5"
|
||||
services:
|
||||
|
||||
markbuilder:
|
||||
image: golang:latest
|
||||
environment:
|
||||
# Set them in your environment or .env
|
||||
- GOOS=${GOOS?Missing GOOS.}
|
||||
- GOARCH=${GOARCH?Missing GOARCH.}
|
||||
# Example Values
|
||||
# MacOS 64-bit
|
||||
# - GOOS=darwin
|
||||
# - GOARCH=amd64
|
||||
|
||||
# MacOS 32-bit
|
||||
# - GOOS=darwin
|
||||
# - GOARCH=386
|
||||
|
||||
# Linux 64-bit
|
||||
# - GOOS=linux
|
||||
# - GOARCH=amd64
|
||||
|
||||
# Linux 32-bit
|
||||
# - GOOS=linux
|
||||
# - GOARCH=386
|
||||
|
||||
# Windows 64-bit
|
||||
# - GOOS=windows
|
||||
# - GOARCH=amd64
|
||||
|
||||
# Windows 32-bit
|
||||
# - GOOS=windows
|
||||
# - GOARCH=386
|
||||
|
||||
volumes:
|
||||
- type: bind
|
||||
source: ./
|
||||
target: /go/src/github.com/kovetskiy/mark
|
||||
|
||||
working_dir: /go/src/github.com/kovetskiy/mark/
|
||||
command: make build
|
67
go.mod
67
go.mod
@ -1,21 +1,58 @@
|
||||
module github.com/kovetskiy/mark
|
||||
|
||||
go 1.14
|
||||
go 1.24.0
|
||||
|
||||
toolchain go1.24.2
|
||||
|
||||
require (
|
||||
github.com/BurntSushi/toml v0.3.1 // indirect
|
||||
github.com/bndr/gopencils v0.0.0-20161113114152-22e283ad7611
|
||||
github.com/go-yaml/yaml v2.1.0+incompatible // indirect
|
||||
github.com/iancoleman/strcase v0.0.0-20191112232945-16388991a334 // indirect
|
||||
github.com/kovetskiy/godocs v0.0.0-20160817104724-2d9428f80f34
|
||||
github.com/kovetskiy/ko v0.0.0-20190324102900-26b8dd0988bf
|
||||
github.com/kovetskiy/lorg v0.0.0-20180412114932-05d42d7f98ba
|
||||
github.com/kovetskiy/toml v0.2.0 // indirect
|
||||
github.com/reconquest/cog v0.0.0-20190411204516-c6b6b90dcd40
|
||||
github.com/reconquest/karma-go v0.0.0-20190930125156-7b5c19ad6eab
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1
|
||||
github.com/chromedp/cdproto v0.0.0-20250403032234-65de8f5d025b
|
||||
github.com/chromedp/chromedp v0.13.6
|
||||
github.com/dreampuf/mermaid.go v0.0.27
|
||||
github.com/kovetskiy/gopencils v0.0.0-20250404051442-0b776066936a
|
||||
github.com/kovetskiy/lorg v1.2.1-0.20240830111423-ba4fe8b6f7c4
|
||||
github.com/reconquest/karma-go v1.5.0
|
||||
github.com/reconquest/pkg v1.3.1-0.20240901105413-68c2adbf2b64
|
||||
github.com/reconquest/regexputil-go v0.0.0-20160905154124-38573e70c1f4
|
||||
github.com/russross/blackfriday v1.5.2
|
||||
github.com/stretchr/testify v1.5.1 // indirect
|
||||
github.com/zazab/zhash v0.0.0-20170403032415-ad45b89afe7a // indirect
|
||||
gopkg.in/yaml.v2 v2.2.8
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/urfave/cli-altsrc/v3 v3.0.1
|
||||
github.com/urfave/cli/v3 v3.3.3
|
||||
github.com/yuin/goldmark v1.7.12
|
||||
golang.org/x/tools v0.34.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
oss.terrastruct.com/d2 v0.7.0
|
||||
oss.terrastruct.com/util-go v0.0.0-20250213174338-243d8661088a
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/BurntSushi/toml v1.5.0 // indirect
|
||||
github.com/PuerkitoBio/goquery v1.10.0 // indirect
|
||||
github.com/alecthomas/chroma/v2 v2.14.0 // indirect
|
||||
github.com/andybalholm/cascadia v1.3.2 // indirect
|
||||
github.com/chromedp/sysutil v1.1.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dlclark/regexp2 v1.11.4 // indirect
|
||||
github.com/dop251/goja v0.0.0-20240927123429-241b342198c2 // indirect
|
||||
github.com/go-json-experiment/json v0.0.0-20250211171154-1ae217ad3535 // indirect
|
||||
github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect
|
||||
github.com/gobwas/httphead v0.1.0 // indirect
|
||||
github.com/gobwas/pool v0.2.1 // indirect
|
||||
github.com/gobwas/ws v1.4.0 // indirect
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
|
||||
github.com/google/pprof v0.0.0-20240927180334-d43a67379298 // indirect
|
||||
github.com/kr/pretty v0.3.1 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/mazznoer/csscolorparser v0.1.5 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/reconquest/cog v0.0.0-20240830113510-c7ba12d0beeb // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/zazab/zhash v0.0.0-20221031090444-2b0d50417446 // indirect
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // indirect
|
||||
golang.org/x/image v0.20.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||
gonum.org/v1/plot v0.14.0 // indirect
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
|
||||
)
|
||||
|
196
go.sum
196
go.sum
@ -1,38 +1,168 @@
|
||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/bndr/gopencils v0.0.0-20161113114152-22e283ad7611 h1:hqtAgYVdJDEoCOqABNtiNgVlGFXmn5zN0i7h7a/mh68=
|
||||
github.com/bndr/gopencils v0.0.0-20161113114152-22e283ad7611/go.mod h1:h/74eddHMsY5P4bCkKTVWWZ+J6nsKMNvDEetFHG7PIY=
|
||||
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/go-yaml/yaml v2.1.0+incompatible h1:RYi2hDdss1u4YE7GwixGzWwVo47T8UQwnTLB6vQiq+o=
|
||||
github.com/go-yaml/yaml v2.1.0+incompatible/go.mod h1:w2MrLa16VYP0jy6N7M5kHaCkaLENm+P+Tv+MfurjSw0=
|
||||
github.com/iancoleman/strcase v0.0.0-20191112232945-16388991a334 h1:VHgatEHNcBFEB7inlalqfNqw65aNkM1lGX2yt3NmbS8=
|
||||
github.com/iancoleman/strcase v0.0.0-20191112232945-16388991a334/go.mod h1:SK73tn/9oHe+/Y0h39VT4UCxmurVJkR5NA7kMEAOgSE=
|
||||
github.com/kovetskiy/godocs v0.0.0-20160817104724-2d9428f80f34 h1:bUWVdQQT5QLGFudBaGJE9v3Zf0EdDiDd5h8vAj9sMVg=
|
||||
github.com/kovetskiy/godocs v0.0.0-20160817104724-2d9428f80f34/go.mod h1:u+dvDLWaaxNkHMvhUK1IBXTNS3fhDvdy3BqQrZM8w/k=
|
||||
github.com/kovetskiy/ko v0.0.0-20190324102900-26b8dd0988bf h1:4QsqgCcPoqDB91dcp4GffoV6TjwfVURaWpjKWFi0ae0=
|
||||
github.com/kovetskiy/ko v0.0.0-20190324102900-26b8dd0988bf/go.mod h1:5RTDadc76NCMKavfnEcGrGVdoQ02h8dLHBUEN4h3xsM=
|
||||
github.com/kovetskiy/lorg v0.0.0-20180412114932-05d42d7f98ba h1:684OcooHjET2b2XWy4ZyIkZJ8CJ3GhHSCqLDeVIwsBo=
|
||||
github.com/kovetskiy/lorg v0.0.0-20180412114932-05d42d7f98ba/go.mod h1:B8HeKAukXULNzWWsW5k/SQyDkiQZPn7lTBJDB46MZ9I=
|
||||
github.com/kovetskiy/toml v0.2.0 h1:tMsPGWE3ejTjXop10/17b/tDtbwQJZdBfc0e+l3WndA=
|
||||
github.com/kovetskiy/toml v0.2.0/go.mod h1:+nh++V8wCesSlfPA3DSXGO1hiAHDVHDqem4ixTsWuRY=
|
||||
git.sr.ht/~sbinet/gg v0.5.0 h1:6V43j30HM623V329xA9Ntq+WJrMjDxRjuAB1LFWF5m8=
|
||||
git.sr.ht/~sbinet/gg v0.5.0/go.mod h1:G2C0eRESqlKhS7ErsNey6HHrqU1PwsnCQlekFi9Q2Oo=
|
||||
github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg=
|
||||
github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||
github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
|
||||
github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
|
||||
github.com/PuerkitoBio/goquery v1.10.0 h1:6fiXdLuUvYs2OJSvNRqlNPoBm6YABE226xrbavY5Wv4=
|
||||
github.com/PuerkitoBio/goquery v1.10.0/go.mod h1:TjZZl68Q3eGHNBA8CWaxAN7rOU1EbDz3CWuolcO5Yu4=
|
||||
github.com/Shopify/toxiproxy/v2 v2.12.0 h1:d1x++lYZg/zijXPPcv7PH0MvHMzEI5aX/YuUi/Sw+yg=
|
||||
github.com/Shopify/toxiproxy/v2 v2.12.0/go.mod h1:R9Z38Pw6k2cGZWXHe7tbxjGW9azmY1KbDQJ1kd+h7Tk=
|
||||
github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b h1:slYM766cy2nI3BwyRiyQj/Ud48djTMtMebDqepE95rw=
|
||||
github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM=
|
||||
github.com/alecthomas/assert/v2 v2.7.0 h1:QtqSACNS3tF7oasA8CU6A6sXZSBDqnm7RfpLl9bZqbE=
|
||||
github.com/alecthomas/assert/v2 v2.7.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
|
||||
github.com/alecthomas/chroma/v2 v2.14.0 h1:R3+wzpnUArGcQz7fCETQBzO5n9IMNi13iIs46aU4V9E=
|
||||
github.com/alecthomas/chroma/v2 v2.14.0/go.mod h1:QolEbTfmUHIMVpBqxeDnNBj2uoeI4EbYP4i6n68SG4I=
|
||||
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
|
||||
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
||||
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38=
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
|
||||
github.com/campoy/embedmd v1.0.0 h1:V4kI2qTJJLf4J29RzI/MAt2c3Bl4dQSYPuflzwFH2hY=
|
||||
github.com/campoy/embedmd v1.0.0/go.mod h1:oxyr9RCiSXg0M3VJ3ks0UGfp98BpSSGr0kpiX3MzVl8=
|
||||
github.com/chromedp/cdproto v0.0.0-20250403032234-65de8f5d025b h1:jJmiCljLNTaq/O1ju9Bzz2MPpFlmiTn0F7LwCoeDZVw=
|
||||
github.com/chromedp/cdproto v0.0.0-20250403032234-65de8f5d025b/go.mod h1:NItd7aLkcfOA/dcMXvl8p1u+lQqioRMq/SqDp71Pb/k=
|
||||
github.com/chromedp/chromedp v0.13.6 h1:xlNunMyzS5bu3r/QKrb3fzX6ow3WBQ6oao+J65PGZxk=
|
||||
github.com/chromedp/chromedp v0.13.6/go.mod h1:h8GPP6ZtLMLsU8zFbTcb7ZDGCvCy8j/vRoFmRltQx9A=
|
||||
github.com/chromedp/sysutil v1.1.0 h1:PUFNv5EcprjqXZD9nJb9b/c9ibAbxiYo4exNWZyipwM=
|
||||
github.com/chromedp/sysutil v1.1.0/go.mod h1:WiThHUdltqCNKGc4gaU50XgYjwjYIhKWoHGPTUfWTJ8=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo=
|
||||
github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/dop251/goja v0.0.0-20240927123429-241b342198c2 h1:Ux9RXuPQmTB4C1MKagNLme0krvq8ulewfor+ORO/QL4=
|
||||
github.com/dop251/goja v0.0.0-20240927123429-241b342198c2/go.mod h1:MxLav0peU43GgvwVgNbLAj1s/bSGboKkhuULvq/7hx4=
|
||||
github.com/dreampuf/mermaid.go v0.0.27 h1:uriWHpcc4clTaAUdJqpyDzyGvAZumeLb61n2VBxc0ZQ=
|
||||
github.com/dreampuf/mermaid.go v0.0.27/go.mod h1:13PeW5y49ouLGlP3RdZm6ke+lQIcz3z7rdVoqRkt5hY=
|
||||
github.com/go-fonts/liberation v0.3.1 h1:9RPT2NhUpxQ7ukUvz3jeUckmN42T9D9TpjtQcqK/ceM=
|
||||
github.com/go-fonts/liberation v0.3.1/go.mod h1:jdJ+cqF+F4SUL2V+qxBth8fvBpBDS7yloUL5Fi8GTGY=
|
||||
github.com/go-json-experiment/json v0.0.0-20250211171154-1ae217ad3535 h1:yE7argOs92u+sSCRgqqe6eF+cDaVhSPlioy1UkA0p/w=
|
||||
github.com/go-json-experiment/json v0.0.0-20250211171154-1ae217ad3535/go.mod h1:BWmvoE1Xia34f3l/ibJweyhrT+aROb/FQ6d+37F0e2s=
|
||||
github.com/go-latex/latex v0.0.0-20230307184459-12ec69307ad9 h1:NxXI5pTAtpEaU49bpLpQoDsu1zrteW/vxzTz8Cd2UAs=
|
||||
github.com/go-latex/latex v0.0.0-20230307184459-12ec69307ad9/go.mod h1:gWuR/CrFDDeVRFQwHPvsv9soJVB/iqymhuZQuJ3a9OM=
|
||||
github.com/go-pdf/fpdf v0.8.0 h1:IJKpdaagnWUeSkUFUjTcSzTppFxmv8ucGQyNPQWxYOQ=
|
||||
github.com/go-pdf/fpdf v0.8.0/go.mod h1:gfqhcNwXrsd3XYKte9a7vM3smvU/jB4ZRDrmWSxpfdc=
|
||||
github.com/go-sourcemap/sourcemap v2.1.4+incompatible h1:a+iTbH5auLKxaNwQFg0B+TCYl6lbukKPc7b5x0n1s6Q=
|
||||
github.com/go-sourcemap/sourcemap v2.1.4+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
||||
github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
|
||||
github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
|
||||
github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
|
||||
github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
|
||||
github.com/gobwas/ws v1.4.0 h1:CTaoG1tojrh4ucGPcoJFiAQUAsEWekEWvLy7GsVNqGs=
|
||||
github.com/gobwas/ws v1.4.0/go.mod h1:G3gNqMNtPppf5XUz7O4shetPpcZ1VJ7zt18dlUeakrc=
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g=
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
|
||||
github.com/google/pprof v0.0.0-20240927180334-d43a67379298 h1:dMHbguTqGtorivvHTaOnbYp+tFzrw5M9gjkU4lCplgg=
|
||||
github.com/google/pprof v0.0.0-20240927180334-d43a67379298/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||
github.com/kovetskiy/gopencils v0.0.0-20250404051442-0b776066936a h1:OPt6gCghZXQ/WZpT6EhGkA7v+YMAYzcCb8SPQWmsb/8=
|
||||
github.com/kovetskiy/gopencils v0.0.0-20250404051442-0b776066936a/go.mod h1:gRW37oDEg9LzOHApv31YzxKBICcCmPtDogaImsxZ6xc=
|
||||
github.com/kovetskiy/lorg v1.2.1-0.20240830111423-ba4fe8b6f7c4 h1:2eV8tF1u58dqRJMlFUD/Df26BxcIlGVy71rZHN+aNoI=
|
||||
github.com/kovetskiy/lorg v1.2.1-0.20240830111423-ba4fe8b6f7c4/go.mod h1:p1RuSvyflTF/G4ubeATGurCRKWkULOrN/4PUAEFRq0s=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80 h1:6Yzfa6GP0rIo/kULo2bwGEkFvCePZ3qHDDTC3/J9Swo=
|
||||
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/mazznoer/csscolorparser v0.1.5 h1:Wr4uNIE+pHWN3TqZn2SGpA2nLRG064gB7WdSfSS5cz4=
|
||||
github.com/mazznoer/csscolorparser v0.1.5/go.mod h1:OQRVvgCyHDCAquR1YWfSwwaDcM0LhnSffGnlbOew/3I=
|
||||
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde h1:x0TT0RDC7UhAVbbWWBzr41ElhJx5tXPWkIHA2HWPRuw=
|
||||
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/reconquest/cog v0.0.0-20190411204516-c6b6b90dcd40 h1:zUobRDLI5W17xv1Y5Z6jgBGdrh6JQgzxpkgvI9ecbzU=
|
||||
github.com/reconquest/cog v0.0.0-20190411204516-c6b6b90dcd40/go.mod h1:IYiTfZ8/UKTz5svWOy+2ri5NuS+pJ3ynXMg8V0IHkXU=
|
||||
github.com/reconquest/karma-go v0.0.0-20190930125156-7b5c19ad6eab h1:NPOguOXVFBXRaBt4uPQLhvLkNwQgT0M+PZwxBGxgrrQ=
|
||||
github.com/reconquest/karma-go v0.0.0-20190930125156-7b5c19ad6eab/go.mod h1:oTXKs9J7KQ1gCpnvSwCbH9vlvELZFfUSbEbrr2ABeo0=
|
||||
github.com/reconquest/cog v0.0.0-20240830113510-c7ba12d0beeb h1:hJ1ExqE2lTMgTRmjmSiC2hm+sMXCCjjbyiGo3irbEW8=
|
||||
github.com/reconquest/cog v0.0.0-20240830113510-c7ba12d0beeb/go.mod h1:n+lvvNLeoQmYVvYTFGCtLvoyD9Wz46RO3yCk6GKyZ/4=
|
||||
github.com/reconquest/karma-go v1.5.0 h1:Chn4LtauwnvKfz13ZbmGNrRLKO1NciExHQSOBOsQqt4=
|
||||
github.com/reconquest/karma-go v1.5.0/go.mod h1:52XRXXa2ec/VNrlCirwasdJfNmjI1O87q098gmqILh0=
|
||||
github.com/reconquest/pkg v1.3.1-0.20240901105413-68c2adbf2b64 h1:OBNLiZay5PYLmGRXGIMEgWSIgbSjOj8nHZxqwLbSsF4=
|
||||
github.com/reconquest/pkg v1.3.1-0.20240901105413-68c2adbf2b64/go.mod h1:r1Z1JNh3in9xLWbhv5u7cdox9vvGFjlKp89VI10Jrdo=
|
||||
github.com/reconquest/regexputil-go v0.0.0-20160905154124-38573e70c1f4 h1:bcDXaTFC09IIg13Z8gfQHk4gSu001ET7ssW/wKRvPzg=
|
||||
github.com/reconquest/regexputil-go v0.0.0-20160905154124-38573e70c1f4/go.mod h1:OI1di2iiFSwX3D70iZjzdmCPPfssjOl+HX40tI3VaXA=
|
||||
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
|
||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/zazab/zhash v0.0.0-20170403032415-ad45b89afe7a h1:8gf6DUwu6F8Fh3rN8Ei9TM66KkWrNC04FP3HlcbxPuQ=
|
||||
github.com/zazab/zhash v0.0.0-20170403032415-ad45b89afe7a/go.mod h1:P+yVThXQrjx7yGmgsdI4WQ/XDDmcyBMZzK1b39TXteA=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/urfave/cli-altsrc/v3 v3.0.1 h1:v+gHk59syLk8ao9rYybZs43+D5ut/gzj0omqQ1XYl8k=
|
||||
github.com/urfave/cli-altsrc/v3 v3.0.1/go.mod h1:8UtsKKcxFVzvaoySFPfvQOk413T+IXJhaCWyyoPW3yM=
|
||||
github.com/urfave/cli/v3 v3.3.3 h1:byCBaVdIXuLPIDm5CYZRVG6NvT7tv1ECqdU4YzlEa3I=
|
||||
github.com/urfave/cli/v3 v3.3.3/go.mod h1:FJSKtM/9AiiTOJL4fJ6TbMUkxBXn7GO9guZqoZtpYpo=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yuin/goldmark v1.7.12 h1:YwGP/rrea2/CnCtUHgjuolG/PnMxdQtPMO5PvaE2/nY=
|
||||
github.com/yuin/goldmark v1.7.12/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
|
||||
github.com/zazab/zhash v0.0.0-20221031090444-2b0d50417446 h1:75pcOSsb40+ub185cJI7g5uykl9Uu76rD5ONzK/4s40=
|
||||
github.com/zazab/zhash v0.0.0-20221031090444-2b0d50417446/go.mod h1:NtepZ8TEXErPsmQDMUoN72f8aIy4+xNinSJ3f1giess=
|
||||
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk=
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY=
|
||||
golang.org/x/image v0.20.0 h1:7cVCUjQwfL18gyBJOmYvptfSHS8Fb3YUDtfLIZ7Nbpw=
|
||||
golang.org/x/image v0.20.0/go.mod h1:0a88To4CYVBAHp5FXJm8o7QbUl37Vd85ply1vyD8auM=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||
golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw=
|
||||
golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
|
||||
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo=
|
||||
golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY=
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
|
||||
gonum.org/v1/plot v0.14.0 h1:+LBDVFYwFe4LHhdP8coW6296MBEY4nQ+Y4vuUpJopcE=
|
||||
gonum.org/v1/plot v0.14.0/go.mod h1:MLdR9424SJed+5VqC6MsouEpig9pZX2VZ57H9ko2bXU=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
oss.terrastruct.com/d2 v0.7.0 h1:nFTap/RgAQtm1aAmUOOJxO8vgSCj3SLILcOkStnyHeI=
|
||||
oss.terrastruct.com/d2 v0.7.0/go.mod h1:QseS95MrwfSRDJcFmVpBBIKuPIr8/RUoR3526QQ3rVk=
|
||||
oss.terrastruct.com/util-go v0.0.0-20250213174338-243d8661088a h1:UXF/Z9i9tOx/wqGUOn/T12wZeez1Gg0sAVKKl7YUDwM=
|
||||
oss.terrastruct.com/util-go v0.0.0-20250213174338-243d8661088a/go.mod h1:eMWv0sOtD9T2RUl90DLWfuShZCYp4NrsqNpI8eqO6U4=
|
||||
rsc.io/pdf v0.1.1 h1:k1MczvYDUvJBe93bYd7wrZLLUEcLZAuF824/I4e5Xr4=
|
||||
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
||||
|
@ -3,31 +3,35 @@ package includes
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/kovetskiy/mark/pkg/log"
|
||||
"github.com/reconquest/karma-go"
|
||||
"github.com/reconquest/pkg/log"
|
||||
)
|
||||
|
||||
var (
|
||||
reIncludeDirective = regexp.MustCompile(
|
||||
// <!-- Include: <template path>
|
||||
// <optional yaml data> -->
|
||||
|
||||
`(?s)` + // dot capture newlines
|
||||
/**/ `<!--\s*Include:\s*(?P<template>\S+)\s*` +
|
||||
/* */ `(\n(?P<config>.*?))?-->`,
|
||||
)
|
||||
// <!-- Include: <template path>
|
||||
//
|
||||
// (Delims: (none | "<left>","<right>"))?
|
||||
// <optional yaml data> -->
|
||||
var reIncludeDirective = regexp.MustCompile(
|
||||
`(?s)` +
|
||||
`<!--\s*Include:\s*(?P<template>.+?)\s*` +
|
||||
`(?:\n\s*Delims:\s*(?:(none|"(?P<left>.*?)"\s*,\s*"(?P<right>.*?)")))?\s*` +
|
||||
`(?:\n(?P<config>.*?))?-->`,
|
||||
)
|
||||
|
||||
func LoadTemplate(
|
||||
base string,
|
||||
includePath string,
|
||||
path string,
|
||||
left string,
|
||||
right string,
|
||||
templates *template.Template,
|
||||
) (*template.Template, error) {
|
||||
var (
|
||||
@ -41,17 +45,28 @@ func LoadTemplate(
|
||||
|
||||
var body []byte
|
||||
|
||||
body, err := ioutil.ReadFile(path)
|
||||
body, err := os.ReadFile(filepath.Join(base, path))
|
||||
if err != nil {
|
||||
err = facts.Format(
|
||||
err,
|
||||
"unable to read template file",
|
||||
)
|
||||
if includePath != "" {
|
||||
body, err = os.ReadFile(filepath.Join(includePath, path))
|
||||
}
|
||||
if err != nil {
|
||||
err = facts.Format(
|
||||
err,
|
||||
"unable to read template file",
|
||||
)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
templates, err = templates.New(name).Parse(string(body))
|
||||
body = bytes.ReplaceAll(
|
||||
body,
|
||||
[]byte("\r\n"),
|
||||
[]byte("\n"),
|
||||
)
|
||||
|
||||
templates, err = templates.New(name).Delims(left, right).Parse(string(body))
|
||||
if err != nil {
|
||||
err = facts.Format(
|
||||
err,
|
||||
@ -65,6 +80,8 @@ func LoadTemplate(
|
||||
}
|
||||
|
||||
func ProcessIncludes(
|
||||
base string,
|
||||
includePath string,
|
||||
contents []byte,
|
||||
templates *template.Template,
|
||||
) (*template.Template, []byte, bool, error) {
|
||||
@ -102,12 +119,21 @@ func ProcessIncludes(
|
||||
groups := reIncludeDirective.FindSubmatch(spec)
|
||||
|
||||
var (
|
||||
path, config = string(groups[1]), groups[2]
|
||||
data = map[string]interface{}{}
|
||||
path = string(groups[1])
|
||||
delimsNone = string(groups[2])
|
||||
left = string(groups[3])
|
||||
right = string(groups[4])
|
||||
config = groups[5]
|
||||
data = map[string]interface{}{}
|
||||
|
||||
facts = karma.Describe("path", path)
|
||||
)
|
||||
|
||||
if delimsNone == "none" {
|
||||
left = "\x00"
|
||||
right = "\x01"
|
||||
}
|
||||
|
||||
err = yaml.Unmarshal(config, &data)
|
||||
if err != nil {
|
||||
err = facts.
|
||||
@ -122,10 +148,9 @@ func ProcessIncludes(
|
||||
|
||||
log.Tracef(vardump(facts, data), "including template %q", path)
|
||||
|
||||
templates, err = LoadTemplate(path, templates)
|
||||
templates, err = LoadTemplate(base, includePath, path, left, right, templates)
|
||||
if err != nil {
|
||||
err = facts.Format(err, "unable to load template")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -7,11 +7,11 @@ import (
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/kovetskiy/mark/pkg/log"
|
||||
"github.com/kovetskiy/mark/pkg/mark/includes"
|
||||
"github.com/kovetskiy/mark/includes"
|
||||
"github.com/reconquest/karma-go"
|
||||
"github.com/reconquest/pkg/log"
|
||||
"github.com/reconquest/regexputil-go"
|
||||
"gopkg.in/yaml.v2"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var reMacroDirective = regexp.MustCompile(
|
||||
@ -21,7 +21,7 @@ var reMacroDirective = regexp.MustCompile(
|
||||
|
||||
`(?s)` + // dot capture newlines
|
||||
/**/ `<!--\s*Macro:\s*(?P<expr>[^\n]+)\n` +
|
||||
/* */ `\s*Template:\s*(?P<template>\S+)\s*` +
|
||||
/* */ `\s*Template:\s*(?P<template>.+?)\s*` +
|
||||
/* */ `(?P<config>\n.*?)?-->`,
|
||||
)
|
||||
|
||||
@ -105,6 +105,8 @@ func (macro *Macro) configure(node interface{}, groups [][]byte) interface{} {
|
||||
}
|
||||
|
||||
func ExtractMacros(
|
||||
base string,
|
||||
includePath string,
|
||||
contents []byte,
|
||||
templates *template.Template,
|
||||
) ([]Macro, []byte, error) {
|
||||
@ -123,18 +125,55 @@ func ExtractMacros(
|
||||
|
||||
var (
|
||||
expr = regexputil.Subexp(reMacroDirective, groups, "expr")
|
||||
template = regexputil.Subexp(reMacroDirective, groups, "template")
|
||||
config = regexputil.Subexp(reMacroDirective, groups, "config")
|
||||
|
||||
macro Macro
|
||||
template = regexputil.Subexp(
|
||||
reMacroDirective,
|
||||
groups,
|
||||
"template",
|
||||
)
|
||||
config = regexputil.Subexp(reMacroDirective, groups, "config")
|
||||
)
|
||||
|
||||
macro.Template, err = includes.LoadTemplate(template, templates)
|
||||
var macro Macro
|
||||
|
||||
if err != nil {
|
||||
err = karma.Format(err, "unable to load template")
|
||||
if strings.HasPrefix(template, "#") {
|
||||
cfg := map[string]interface{}{}
|
||||
|
||||
return nil
|
||||
err = yaml.Unmarshal([]byte(config), &cfg)
|
||||
if err != nil {
|
||||
err = karma.Format(
|
||||
err,
|
||||
"unable to unmarshal macros config template",
|
||||
)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
body, ok := cfg[template[1:]].(string)
|
||||
if !ok {
|
||||
err = fmt.Errorf(
|
||||
"the template config doesn't have '%s' field",
|
||||
template[1:],
|
||||
)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
macro.Template, err = templates.New(template).Parse(body)
|
||||
if err != nil {
|
||||
err = karma.Format(
|
||||
err,
|
||||
"unable to parse template",
|
||||
)
|
||||
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
macro.Template, err = includes.LoadTemplate(base, includePath, template, "{{", "}}", templates)
|
||||
if err != nil {
|
||||
err = karma.Format(err, "unable to load template")
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
facts := karma.
|
332
main.go
332
main.go
@ -1,331 +1,33 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/kovetskiy/godocs"
|
||||
"github.com/kovetskiy/mark/pkg/confluence"
|
||||
"github.com/kovetskiy/mark/pkg/log"
|
||||
"github.com/kovetskiy/mark/pkg/mark"
|
||||
"github.com/kovetskiy/mark/pkg/mark/includes"
|
||||
"github.com/kovetskiy/mark/pkg/mark/macro"
|
||||
"github.com/kovetskiy/mark/pkg/mark/stdlib"
|
||||
"github.com/reconquest/karma-go"
|
||||
"github.com/kovetskiy/mark/util"
|
||||
"github.com/reconquest/pkg/log"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
const (
|
||||
usage = `mark - tool for updating Atlassian Confluence pages from markdown.
|
||||
|
||||
This is very usable if you store documentation to your orthodox software in git
|
||||
repository and don't want to do a handjob with updating Confluence page using
|
||||
fucking tinymce wysiwyg enterprise core editor.
|
||||
|
||||
You can store a user credentials in the configuration file, which should be
|
||||
located in ~/.config/mark with following format:
|
||||
username = "smith"
|
||||
password = "matrixishere"
|
||||
base_url = "http://confluence.local"
|
||||
where 'smith' it's your username, 'matrixishere' it's your password and
|
||||
'http://confluence.local' is base URL for your Confluence instance.
|
||||
|
||||
Mark understands extended file format, which, still being valid markdown,
|
||||
contains several metadata headers, which can be used to locate page inside
|
||||
Confluence instance and update it accordingly.
|
||||
|
||||
File in extended format should follow specification:
|
||||
|
||||
<!-- Space: <space key> -->
|
||||
<!-- Parent: <parent 1> -->
|
||||
<!-- Parent: <parent 2> -->
|
||||
<!-- Title: <title> -->
|
||||
|
||||
<page contents>
|
||||
|
||||
There can be any number of 'Parent' headers, if mark can't find specified
|
||||
parent by title, it will be created.
|
||||
|
||||
Also, optional following headers are supported:
|
||||
|
||||
* <!-- Layout: (article|plain) -->
|
||||
|
||||
- (default) article: content will be put in narrow column for ease of
|
||||
reading;
|
||||
- plain: content will fill all page;
|
||||
|
||||
Mark supports Go templates, which can be included into article by using path
|
||||
to the template relative to current working dir, e.g.:
|
||||
|
||||
<!-- Include: <path> -->
|
||||
|
||||
Templates may accept configuration data in YAML format which immediately
|
||||
follows include tag:
|
||||
|
||||
<!-- Include: <path>
|
||||
<yaml-data> -->
|
||||
|
||||
Mark also supports macro definitions, which are defined as regexps which will
|
||||
be replaced with specified template:
|
||||
|
||||
<!-- Macro: <regexp>
|
||||
Template: <path>
|
||||
<yaml-data> -->
|
||||
|
||||
Capture groups can be defined in the macro's <regexp> which can be later
|
||||
referenced in the <yaml-data> using ${<number>} syntax, where <number> is
|
||||
number of a capture group in regexp (${0} is used for entire regexp match), for
|
||||
example:
|
||||
|
||||
<!-- Macro: MYJIRA-\d+
|
||||
Template: ac:jira:ticket
|
||||
Ticket: ${0} -->
|
||||
|
||||
By default, mark provides several built-in templates and macros:
|
||||
|
||||
* template 'ac:status' to include badge-like text, which accepts following
|
||||
parameters:
|
||||
- Title: text to display in the badge
|
||||
- Color: color to use as background/border for badge
|
||||
- Grey
|
||||
- Yellow
|
||||
- Red
|
||||
- Blue
|
||||
- Subtle: specify to fill badge with background or not
|
||||
- true
|
||||
- false
|
||||
|
||||
See: https://confluence.atlassian.com/conf59/status-macro-792499207.html
|
||||
|
||||
* template 'ac:jira:ticket' to include JIRA ticket link. Parameters:
|
||||
- Ticket: Jira ticket number like BUGS-123.
|
||||
|
||||
* macro '@{...}' to mention user by name specified in the braces.
|
||||
|
||||
Usage:
|
||||
mark [options] [-u <username>] [-p <token>] [-k] [-l <url>] -f <file>
|
||||
mark [options] [-u <username>] [-p <password>] [-k] [-b <url>] -f <file>
|
||||
mark [options] [-u <username>] [-p <password>] [-k] [-n] -c <file>
|
||||
mark -v | --version
|
||||
mark -h | --help
|
||||
|
||||
Options:
|
||||
-u <username> Use specified username for updating Confluence page.
|
||||
-p <token> Use specified token for updating Confluence page.
|
||||
-l <url> Edit specified Confluence page.
|
||||
If -l is not specified, file should contain metadata (see
|
||||
above).
|
||||
-b --base-url <url> Base URL for Confluence.
|
||||
Alternative option for base_url config field.
|
||||
-f <file> Use specified markdown file for converting to html.
|
||||
-k Lock page editing to current user only to prevent accidental
|
||||
manual edits over Confluence Web UI.
|
||||
--dry-run Resolve page and ancestry, show resulting HTML and exit.
|
||||
--compile-only Show resulting HTML and don't update Confluence page content.
|
||||
--debug Enable debug logs.
|
||||
--trace Enable trace logs.
|
||||
-h --help Show this screen and call 911.
|
||||
-v --version Show version.
|
||||
`
|
||||
version = "13.0.0"
|
||||
usage = "A tool for updating Atlassian Confluence pages from markdown."
|
||||
description = `Mark is a tool to update Atlassian Confluence pages from markdown. Documentation is available here: https://github.com/kovetskiy/mark`
|
||||
)
|
||||
|
||||
func main() {
|
||||
args, err := godocs.Parse(usage, "mark 2.0", godocs.UsePager)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
cmd := &cli.Command{
|
||||
Name: "mark",
|
||||
Usage: usage,
|
||||
Description: description,
|
||||
Version: version,
|
||||
Flags: util.Flags,
|
||||
EnableShellCompletion: true,
|
||||
HideHelpCommand: true,
|
||||
Action: util.RunMark,
|
||||
}
|
||||
|
||||
var (
|
||||
targetFile, _ = args["-f"].(string)
|
||||
compileOnly = args["--compile-only"].(bool)
|
||||
dryRun = args["--dry-run"].(bool)
|
||||
editLock = args["-k"].(bool)
|
||||
)
|
||||
|
||||
log.Init(args["--debug"].(bool), args["--trace"].(bool))
|
||||
|
||||
config, err := LoadConfig(filepath.Join(os.Getenv("HOME"), ".config/mark"))
|
||||
if err != nil {
|
||||
if err := cmd.Run(context.TODO(), os.Args); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
creds, err := GetCredentials(args, config)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
api := confluence.NewAPI(creds.BaseURL, creds.Username, creds.Password)
|
||||
|
||||
markdown, err := ioutil.ReadFile(targetFile)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
meta, markdown, err := mark.ExtractMeta(markdown)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
stdlib, err := stdlib.New(api)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
templates := stdlib.Templates
|
||||
|
||||
var recurse bool
|
||||
|
||||
for {
|
||||
templates, markdown, recurse, err = includes.ProcessIncludes(
|
||||
markdown,
|
||||
templates,
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if !recurse {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
macros, markdown, err := macro.ExtractMacros(markdown, templates)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
macros = append(macros, stdlib.Macros...)
|
||||
|
||||
for _, macro := range macros {
|
||||
markdown, err = macro.Apply(markdown)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
if dryRun {
|
||||
compileOnly = true
|
||||
|
||||
_, _, err := mark.ResolvePage(dryRun, api, meta)
|
||||
if err != nil {
|
||||
log.Fatalf(err, "unable to resolve page location")
|
||||
}
|
||||
}
|
||||
|
||||
if compileOnly {
|
||||
fmt.Println(mark.CompileMarkdown(markdown, stdlib))
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
if creds.PageID != "" && meta != nil {
|
||||
log.Warning(
|
||||
`specified file contains metadata, ` +
|
||||
`but it will be ignored due specified command line URL`,
|
||||
)
|
||||
|
||||
meta = nil
|
||||
}
|
||||
|
||||
if creds.PageID == "" && meta == nil {
|
||||
log.Fatal(
|
||||
`specified file doesn't contain metadata ` +
|
||||
`and URL is not specified via command line ` +
|
||||
`or doesn't contain pageId GET-parameter`,
|
||||
)
|
||||
}
|
||||
|
||||
var target *confluence.PageInfo
|
||||
|
||||
if meta != nil {
|
||||
parent, page, err := mark.ResolvePage(dryRun, api, meta)
|
||||
if err != nil {
|
||||
log.Fatalf(
|
||||
karma.Describe("title", meta.Title).Reason(err),
|
||||
"unable to resolve page",
|
||||
)
|
||||
}
|
||||
|
||||
if page == nil {
|
||||
page, err = api.CreatePage(meta.Space, parent, meta.Title, ``)
|
||||
if err != nil {
|
||||
log.Fatalf(
|
||||
err,
|
||||
"can't create page %q",
|
||||
meta.Title,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
target = page
|
||||
} else {
|
||||
if creds.PageID == "" {
|
||||
log.Fatalf(nil, "URL should provide 'pageId' GET-parameter")
|
||||
}
|
||||
|
||||
page, err := api.GetPageByID(creds.PageID)
|
||||
if err != nil {
|
||||
log.Fatalf(err, "unable to retrieve page by id")
|
||||
}
|
||||
|
||||
target = page
|
||||
}
|
||||
|
||||
attaches, err := mark.ResolveAttachments(api, target, ".", meta.Attachments)
|
||||
if err != nil {
|
||||
log.Fatalf(err, "unable to create/update attachments")
|
||||
}
|
||||
|
||||
markdown = mark.CompileAttachmentLinks(markdown, attaches)
|
||||
|
||||
html := mark.CompileMarkdown(markdown, stdlib)
|
||||
|
||||
{
|
||||
var buffer bytes.Buffer
|
||||
|
||||
err := stdlib.Templates.ExecuteTemplate(
|
||||
&buffer,
|
||||
"ac:layout",
|
||||
struct {
|
||||
Layout string
|
||||
Body string
|
||||
}{
|
||||
Layout: meta.Layout,
|
||||
Body: html,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
html = buffer.String()
|
||||
}
|
||||
|
||||
err = api.UpdatePage(target, html)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if editLock {
|
||||
log.Infof(
|
||||
nil,
|
||||
`edit locked on page %q by user %q to prevent manual edits`,
|
||||
target.Title,
|
||||
creds.Username,
|
||||
)
|
||||
|
||||
err := api.RestrictPageUpdates(
|
||||
target,
|
||||
creds.Username,
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Printf(
|
||||
"page successfully updated: %s\n",
|
||||
creds.BaseURL+target.Links.Full,
|
||||
)
|
||||
}
|
||||
|
51
main_test.go
Normal file
51
main_test.go
Normal file
@ -0,0 +1,51 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/kovetskiy/mark/util"
|
||||
"github.com/reconquest/pkg/log"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
func Test_setLogLevel(t *testing.T) {
|
||||
type args struct {
|
||||
lvl string
|
||||
}
|
||||
tests := map[string]struct {
|
||||
args args
|
||||
want log.Level
|
||||
expectedErr string
|
||||
}{
|
||||
"invalid": {args: args{lvl: "INVALID"}, want: log.LevelInfo, expectedErr: "unknown log level: INVALID"},
|
||||
"empty": {args: args{lvl: ""}, want: log.LevelInfo, expectedErr: "unknown log level: "},
|
||||
"info": {args: args{lvl: log.LevelInfo.String()}, want: log.LevelInfo},
|
||||
"debug": {args: args{lvl: log.LevelDebug.String()}, want: log.LevelDebug},
|
||||
"trace": {args: args{lvl: log.LevelTrace.String()}, want: log.LevelTrace},
|
||||
"warning": {args: args{lvl: log.LevelWarning.String()}, want: log.LevelWarning},
|
||||
"error": {args: args{lvl: log.LevelError.String()}, want: log.LevelError},
|
||||
"fatal": {args: args{lvl: log.LevelFatal.String()}, want: log.LevelFatal},
|
||||
}
|
||||
for name, tt := range tests {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
cmd := &cli.Command{
|
||||
Name: "test",
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "log-level",
|
||||
Value: tt.args.lvl,
|
||||
Usage: "set the log level. Possible values: TRACE, DEBUG, INFO, WARNING, ERROR, FATAL.",
|
||||
},
|
||||
},
|
||||
}
|
||||
err := util.SetLogLevel(cmd)
|
||||
if tt.expectedErr != "" {
|
||||
assert.EqualError(t, err, tt.expectedErr)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.want, log.GetLevel())
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
103
markdown/markdown.go
Normal file
103
markdown/markdown.go
Normal file
@ -0,0 +1,103 @@
|
||||
package mark
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"github.com/kovetskiy/mark/attachment"
|
||||
cparser "github.com/kovetskiy/mark/parser"
|
||||
crenderer "github.com/kovetskiy/mark/renderer"
|
||||
"github.com/kovetskiy/mark/stdlib"
|
||||
"github.com/kovetskiy/mark/types"
|
||||
"github.com/reconquest/pkg/log"
|
||||
"github.com/yuin/goldmark"
|
||||
|
||||
"github.com/yuin/goldmark/extension"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
// Renderer renders anchor [Node]s.
|
||||
type ConfluenceExtension struct {
|
||||
html.Config
|
||||
Stdlib *stdlib.Lib
|
||||
Path string
|
||||
MarkConfig types.MarkConfig
|
||||
Attachments []attachment.Attachment
|
||||
}
|
||||
|
||||
// NewConfluenceRenderer creates a new instance of the ConfluenceRenderer
|
||||
func NewConfluenceExtension(stdlib *stdlib.Lib, path string, cfg types.MarkConfig) *ConfluenceExtension {
|
||||
return &ConfluenceExtension{
|
||||
Config: html.NewConfig(),
|
||||
Stdlib: stdlib,
|
||||
Path: path,
|
||||
MarkConfig: cfg,
|
||||
Attachments: []attachment.Attachment{},
|
||||
}
|
||||
}
|
||||
|
||||
func (c *ConfluenceExtension) Attach(a attachment.Attachment) {
|
||||
c.Attachments = append(c.Attachments, a)
|
||||
}
|
||||
|
||||
func (c *ConfluenceExtension) Extend(m goldmark.Markdown) {
|
||||
|
||||
m.Renderer().AddOptions(renderer.WithNodeRenderers(
|
||||
util.Prioritized(crenderer.NewConfluenceTextRenderer(c.MarkConfig.StripNewlines), 100),
|
||||
util.Prioritized(crenderer.NewConfluenceBlockQuoteRenderer(), 100),
|
||||
util.Prioritized(crenderer.NewConfluenceCodeBlockRenderer(c.Stdlib, c.Path), 100),
|
||||
util.Prioritized(crenderer.NewConfluenceFencedCodeBlockRenderer(c.Stdlib, c, c.MarkConfig), 100),
|
||||
util.Prioritized(crenderer.NewConfluenceHTMLBlockRenderer(c.Stdlib), 100),
|
||||
util.Prioritized(crenderer.NewConfluenceHeadingRenderer(c.MarkConfig.DropFirstH1), 100),
|
||||
util.Prioritized(crenderer.NewConfluenceImageRenderer(c.Stdlib, c, c.Path), 100),
|
||||
util.Prioritized(crenderer.NewConfluenceParagraphRenderer(), 100),
|
||||
util.Prioritized(crenderer.NewConfluenceLinkRenderer(), 100),
|
||||
))
|
||||
|
||||
m.Parser().AddOptions(parser.WithInlineParsers(
|
||||
// Must be registered with a higher priority than goldmark's linkParser to make sure goldmark doesn't parse
|
||||
// the <ac:*/> tags.
|
||||
util.Prioritized(cparser.NewConfluenceTagParser(), 199),
|
||||
))
|
||||
}
|
||||
|
||||
func CompileMarkdown(markdown []byte, stdlib *stdlib.Lib, path string, cfg types.MarkConfig) (string, []attachment.Attachment) {
|
||||
log.Tracef(nil, "rendering markdown:\n%s", string(markdown))
|
||||
|
||||
confluenceExtension := NewConfluenceExtension(stdlib, path, cfg)
|
||||
|
||||
converter := goldmark.New(
|
||||
goldmark.WithExtensions(
|
||||
extension.Footnote,
|
||||
extension.DefinitionList,
|
||||
extension.NewTable(
|
||||
extension.WithTableCellAlignMethod(extension.TableCellAlignStyle),
|
||||
),
|
||||
confluenceExtension,
|
||||
extension.GFM,
|
||||
),
|
||||
goldmark.WithParserOptions(
|
||||
parser.WithAutoHeadingID(),
|
||||
),
|
||||
goldmark.WithRendererOptions(
|
||||
html.WithUnsafe(),
|
||||
html.WithXHTML(),
|
||||
))
|
||||
|
||||
ctx := parser.NewContext(parser.WithIDs(&cparser.ConfluenceIDs{Values: map[string]bool{}}))
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := converter.Convert(markdown, &buf, parser.WithContext(ctx))
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
html := buf.Bytes()
|
||||
|
||||
log.Tracef(nil, "rendered markdown to html:\n%s", string(html))
|
||||
|
||||
return string(html), confluenceExtension.Attachments
|
||||
}
|
183
markdown/markdown_test.go
Normal file
183
markdown/markdown_test.go
Normal file
@ -0,0 +1,183 @@
|
||||
package mark_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
mark "github.com/kovetskiy/mark/markdown"
|
||||
"github.com/kovetskiy/mark/stdlib"
|
||||
"github.com/kovetskiy/mark/types"
|
||||
"github.com/kovetskiy/mark/util"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
func loadData(t *testing.T, filename, variant string) ([]byte, string, []byte) {
|
||||
t.Helper()
|
||||
basename := filepath.Base(filename)
|
||||
testname := strings.TrimSuffix(basename, ".md")
|
||||
htmlname := filepath.Join(filepath.Dir(filename), testname+variant+".html")
|
||||
|
||||
markdown, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
html, err := os.ReadFile(htmlname)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return markdown, htmlname, html
|
||||
}
|
||||
|
||||
func TestCompileMarkdown(t *testing.T) {
|
||||
_, filename, _, _ := runtime.Caller(0)
|
||||
dir := path.Join(path.Dir(filename), "..")
|
||||
err := os.Chdir(dir)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
test := assert.New(t)
|
||||
|
||||
testcases, err := filepath.Glob("testdata/*.md")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, filename := range testcases {
|
||||
lib, err := stdlib.New(nil)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
markdown, htmlname, html := loadData(t, filename, "")
|
||||
|
||||
cfg := types.MarkConfig{
|
||||
MermaidProvider: "",
|
||||
MermaidScale: 1.0,
|
||||
DropFirstH1: false,
|
||||
StripNewlines: false,
|
||||
Features: []string{},
|
||||
}
|
||||
|
||||
actual, _ := mark.CompileMarkdown(markdown, lib, filename, cfg)
|
||||
test.EqualValues(strings.TrimSuffix(string(html), "\n"), strings.TrimSuffix(actual, "\n"), filename+" vs "+htmlname)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompileMarkdownDropH1(t *testing.T) {
|
||||
_, filename, _, _ := runtime.Caller(0)
|
||||
dir := path.Join(path.Dir(filename), "..")
|
||||
err := os.Chdir(dir)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
test := assert.New(t)
|
||||
|
||||
testcases, err := filepath.Glob("testdata/*.md")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, filename := range testcases {
|
||||
lib, err := stdlib.New(nil)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
var variant string
|
||||
switch filename {
|
||||
case "testdata/quotes.md", "testdata/header.md":
|
||||
variant = "-droph1"
|
||||
default:
|
||||
variant = ""
|
||||
}
|
||||
markdown, htmlname, html := loadData(t, filename, variant)
|
||||
|
||||
cfg := types.MarkConfig{
|
||||
MermaidProvider: "",
|
||||
MermaidScale: 1.0,
|
||||
DropFirstH1: true,
|
||||
StripNewlines: false,
|
||||
Features: []string{},
|
||||
}
|
||||
|
||||
actual, _ := mark.CompileMarkdown(markdown, lib, filename, cfg)
|
||||
test.EqualValues(strings.TrimSuffix(string(html), "\n"), strings.TrimSuffix(actual, "\n"), filename+" vs "+htmlname)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompileMarkdownStripNewlines(t *testing.T) {
|
||||
_, filename, _, _ := runtime.Caller(0)
|
||||
dir := path.Join(path.Dir(filename), "..")
|
||||
err := os.Chdir(dir)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
test := assert.New(t)
|
||||
|
||||
testcases, err := filepath.Glob("testdata/*.md")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, filename := range testcases {
|
||||
lib, err := stdlib.New(nil)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
var variant string
|
||||
switch filename {
|
||||
case "testdata/quotes.md", "testdata/codes.md", "testdata/newlines.md", "testdata/macro-include.md":
|
||||
variant = "-stripnewlines"
|
||||
default:
|
||||
variant = ""
|
||||
}
|
||||
|
||||
markdown, htmlname, html := loadData(t, filename, variant)
|
||||
|
||||
cfg := types.MarkConfig{
|
||||
MermaidProvider: "",
|
||||
MermaidScale: 1.0,
|
||||
DropFirstH1: false,
|
||||
StripNewlines: true,
|
||||
Features: []string{},
|
||||
}
|
||||
|
||||
actual, _ := mark.CompileMarkdown(markdown, lib, filename, cfg)
|
||||
test.EqualValues(strings.TrimSuffix(string(html), "\n"), strings.TrimSuffix(actual, "\n"), filename+" vs "+htmlname)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func TestContinueOnError(t *testing.T) {
|
||||
cmd := &cli.Command{
|
||||
Name: "temp-mark",
|
||||
Usage: "test usage",
|
||||
Description: "mark unit tests",
|
||||
Version: "TEST-VERSION",
|
||||
Flags: util.Flags,
|
||||
EnableShellCompletion: true,
|
||||
HideHelpCommand: true,
|
||||
Action: util.RunMark,
|
||||
}
|
||||
|
||||
filePath := filepath.Join("testdata", "batch-tests", "*.md")
|
||||
argList := []string{
|
||||
"",
|
||||
"--log-level", "INFO",
|
||||
"--compile-only",
|
||||
"--continue-on-error",
|
||||
"--files", filePath,
|
||||
}
|
||||
|
||||
err := cmd.Run(context.TODO(), argList)
|
||||
assert.NoError(t, err, "App should run without errors when continue-on-error is enabled")
|
||||
}
|
55
mermaid/mermaid.go
Normal file
55
mermaid/mermaid.go
Normal file
@ -0,0 +1,55 @@
|
||||
package mermaid
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
mermaid "github.com/dreampuf/mermaid.go"
|
||||
"github.com/kovetskiy/mark/attachment"
|
||||
"github.com/reconquest/pkg/log"
|
||||
)
|
||||
|
||||
var renderTimeout = 120 * time.Second
|
||||
|
||||
func ProcessMermaidLocally(title string, mermaidDiagram []byte, scale float64) (attachment.Attachment, error) {
|
||||
ctx, cancel := context.WithTimeout(context.TODO(), renderTimeout)
|
||||
defer cancel()
|
||||
|
||||
log.Debugf(nil, "Setting up Mermaid renderer: %q", title)
|
||||
renderer, err := mermaid.NewRenderEngine(ctx)
|
||||
|
||||
if err != nil {
|
||||
return attachment.Attachment{}, err
|
||||
}
|
||||
|
||||
log.Debugf(nil, "Rendering: %q", title)
|
||||
pngBytes, boxModel, err := renderer.RenderAsScaledPng(string(mermaidDiagram), scale)
|
||||
if err != nil {
|
||||
return attachment.Attachment{}, err
|
||||
}
|
||||
|
||||
checkSum, err := attachment.GetChecksum(bytes.NewReader(mermaidDiagram))
|
||||
log.Debugf(nil, "Checksum: %q -> %s", title, checkSum)
|
||||
|
||||
if err != nil {
|
||||
return attachment.Attachment{}, err
|
||||
}
|
||||
if title == "" {
|
||||
title = checkSum
|
||||
}
|
||||
|
||||
fileName := title + ".png"
|
||||
|
||||
return attachment.Attachment{
|
||||
ID: "",
|
||||
Name: title,
|
||||
Filename: fileName,
|
||||
FileBytes: pngBytes,
|
||||
Checksum: checkSum,
|
||||
Replace: title,
|
||||
Width: strconv.FormatInt(boxModel.Width, 10),
|
||||
Height: strconv.FormatInt(boxModel.Height, 10),
|
||||
}, nil
|
||||
}
|
49
mermaid/mermaid_test.go
Normal file
49
mermaid/mermaid_test.go
Normal file
@ -0,0 +1,49 @@
|
||||
package mermaid
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/kovetskiy/mark/attachment"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestExtractMermaidImage(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
markdown []byte
|
||||
scale float64
|
||||
want attachment.Attachment
|
||||
wantErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{"example", []byte("graph TD;\n A-->B;"), 1.0, attachment.Attachment{
|
||||
// This is only the PNG Magic Header
|
||||
FileBytes: []byte{0x89, 0x50, 0x4e, 0x47, 0xd, 0xa, 0x1a, 0xa},
|
||||
Filename: "example.png",
|
||||
Name: "example",
|
||||
Replace: "example",
|
||||
Checksum: "1743a4f31ab66244591f06c8056e08053b8e0a554eb9a38709af6e9d145ac84f",
|
||||
ID: "",
|
||||
Width: "87",
|
||||
Height: "174",
|
||||
},
|
||||
assert.NoError},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := ProcessMermaidLocally(tt.name, tt.markdown, tt.scale)
|
||||
if !tt.wantErr(t, err, fmt.Sprintf("processMermaidLocally(%v, %v)", tt.name, string(tt.markdown))) {
|
||||
return
|
||||
}
|
||||
assert.Equal(t, tt.want.Filename, got.Filename, "processMermaidLocally(%v, %v)", tt.name, string(tt.markdown))
|
||||
// We only test for the header as png changes based on system png library
|
||||
assert.Equal(t, tt.want.FileBytes, got.FileBytes[0:8], "processMermaidLocally(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.Name, got.Name, "processMermaidLocally(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.Replace, got.Replace, "processMermaidLocally(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.Checksum, got.Checksum, "processMermaidLocally(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.ID, got.ID, "processMermaidLocally(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.Width, got.Width, "processMermaidLocally(%v, %v)", tt.name, string(tt.markdown))
|
||||
assert.Equal(t, tt.want.Height, got.Height, "processMermaidLocally(%v, %v)", tt.name, string(tt.markdown))
|
||||
})
|
||||
}
|
||||
}
|
199
metadata/metadata.go
Normal file
199
metadata/metadata.go
Normal file
@ -0,0 +1,199 @@
|
||||
package metadata
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/reconquest/pkg/log"
|
||||
)
|
||||
|
||||
const (
|
||||
HeaderParent = `Parent`
|
||||
HeaderSpace = `Space`
|
||||
HeaderType = `Type`
|
||||
HeaderTitle = `Title`
|
||||
HeaderLayout = `Layout`
|
||||
HeaderEmoji = `Emoji`
|
||||
HeaderAttachment = `Attachment`
|
||||
HeaderLabel = `Label`
|
||||
HeaderInclude = `Include`
|
||||
HeaderSidebar = `Sidebar`
|
||||
ContentAppearance = `Content-Appearance`
|
||||
)
|
||||
|
||||
type Meta struct {
|
||||
Parents []string
|
||||
Space string
|
||||
Type string
|
||||
Title string
|
||||
Layout string
|
||||
Sidebar string
|
||||
Emoji string
|
||||
Attachments []string
|
||||
Labels []string
|
||||
ContentAppearance string
|
||||
}
|
||||
|
||||
const (
|
||||
FullWidthContentAppearance = "full-width"
|
||||
FixedContentAppearance = "fixed"
|
||||
)
|
||||
|
||||
var (
|
||||
reHeaderPatternV2 = regexp.MustCompile(`<!--\s*([^:]+):\s*(.*)\s*-->`)
|
||||
reHeaderPatternMacro = regexp.MustCompile(`<!-- Macro: .*`)
|
||||
)
|
||||
|
||||
func ExtractMeta(data []byte, spaceFromCli string, titleFromH1 bool, parents []string, titleAppendGeneratedHash bool) (*Meta, []byte, error) {
|
||||
var (
|
||||
meta *Meta
|
||||
offset int
|
||||
)
|
||||
|
||||
scanner := bufio.NewScanner(bytes.NewBuffer(data))
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
offset += len(line) + 1
|
||||
|
||||
matches := reHeaderPatternV2.FindStringSubmatch(line)
|
||||
if matches == nil {
|
||||
matches = reHeaderPatternMacro.FindStringSubmatch(line)
|
||||
// If we have a match, then we started reading a macro.
|
||||
// We want to keep it in the document for it to be read by ExtractMacros
|
||||
if matches != nil {
|
||||
offset -= len(line) + 1
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if meta == nil {
|
||||
meta = &Meta{}
|
||||
meta.Type = "page" // Default if not specified
|
||||
meta.ContentAppearance = FullWidthContentAppearance // Default to full-width for backwards compatibility
|
||||
}
|
||||
|
||||
//nolint:staticcheck
|
||||
header := strings.Title(matches[1])
|
||||
|
||||
var value string
|
||||
if len(matches) > 1 {
|
||||
value = strings.TrimSpace(matches[2])
|
||||
}
|
||||
|
||||
switch header {
|
||||
case HeaderParent:
|
||||
meta.Parents = append(meta.Parents, value)
|
||||
|
||||
case HeaderSpace:
|
||||
meta.Space = strings.TrimSpace(value)
|
||||
|
||||
case HeaderType:
|
||||
meta.Type = strings.TrimSpace(value)
|
||||
|
||||
case HeaderTitle:
|
||||
meta.Title = strings.TrimSpace(value)
|
||||
|
||||
case HeaderLayout:
|
||||
meta.Layout = strings.TrimSpace(value)
|
||||
|
||||
case HeaderSidebar:
|
||||
meta.Layout = "article"
|
||||
meta.Sidebar = strings.TrimSpace(value)
|
||||
|
||||
case HeaderEmoji:
|
||||
meta.Emoji = strings.TrimSpace(value)
|
||||
|
||||
case HeaderAttachment:
|
||||
meta.Attachments = append(meta.Attachments, value)
|
||||
|
||||
case HeaderLabel:
|
||||
meta.Labels = append(meta.Labels, value)
|
||||
|
||||
case HeaderInclude:
|
||||
// Includes are parsed by a different func
|
||||
continue
|
||||
|
||||
case ContentAppearance:
|
||||
if strings.TrimSpace(value) == FixedContentAppearance {
|
||||
meta.ContentAppearance = FixedContentAppearance
|
||||
} else {
|
||||
meta.ContentAppearance = FullWidthContentAppearance
|
||||
}
|
||||
|
||||
default:
|
||||
log.Errorf(
|
||||
nil,
|
||||
`encountered unknown header %q line: %#v`,
|
||||
header,
|
||||
line,
|
||||
)
|
||||
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if titleFromH1 || spaceFromCli != "" {
|
||||
if meta == nil {
|
||||
meta = &Meta{}
|
||||
}
|
||||
|
||||
if meta.Type == "" {
|
||||
meta.Type = "page"
|
||||
}
|
||||
|
||||
if meta.ContentAppearance == "" {
|
||||
meta.ContentAppearance = FullWidthContentAppearance // Default to full-width for backwards compatibility
|
||||
}
|
||||
|
||||
if titleFromH1 && meta.Title == "" {
|
||||
meta.Title = ExtractDocumentLeadingH1(data)
|
||||
}
|
||||
if spaceFromCli != "" && meta.Space == "" {
|
||||
meta.Space = spaceFromCli
|
||||
}
|
||||
}
|
||||
|
||||
if meta == nil {
|
||||
return nil, data, nil
|
||||
}
|
||||
|
||||
// Prepend parent pages that are defined via the cli flag
|
||||
if len(parents) > 0 && parents[0] != "" {
|
||||
meta.Parents = append(parents, meta.Parents...)
|
||||
}
|
||||
|
||||
// deterministically generate a hash from the page's parents, space, and title
|
||||
if titleAppendGeneratedHash {
|
||||
path := strings.Join(append(meta.Parents, meta.Space, meta.Title), "/")
|
||||
pathHash := sha256.Sum256([]byte(path))
|
||||
// postfix is an 8-character hexadecimal string representation of the first 4 out of 32 bytes of the hash
|
||||
meta.Title = fmt.Sprintf("%s - %x", meta.Title, pathHash[0:4])
|
||||
log.Debugf(
|
||||
nil,
|
||||
"appended hash to page title: %s",
|
||||
meta.Title,
|
||||
)
|
||||
}
|
||||
|
||||
return meta, data[offset:], nil
|
||||
}
|
||||
|
||||
// ExtractDocumentLeadingH1 will extract leading H1 heading
|
||||
func ExtractDocumentLeadingH1(markdown []byte) string {
|
||||
h1 := regexp.MustCompile(`#[^#]\s*(.*)\s*\n`)
|
||||
groups := h1.FindSubmatch(markdown)
|
||||
if groups == nil {
|
||||
return ""
|
||||
} else {
|
||||
return string(groups[1])
|
||||
}
|
||||
}
|
30
metadata/metadata_test.go
Normal file
30
metadata/metadata_test.go
Normal file
@ -0,0 +1,30 @@
|
||||
package metadata
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path"
|
||||
"runtime"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestExtractDocumentLeadingH1(t *testing.T) {
|
||||
_, filename, _, _ := runtime.Caller(0)
|
||||
dir := path.Join(path.Dir(filename), "..")
|
||||
err := os.Chdir(dir)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
filename = "testdata/header.md"
|
||||
|
||||
markdown, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
actual := ExtractDocumentLeadingH1(markdown)
|
||||
|
||||
assert.Equal(t, "a", actual)
|
||||
}
|
@ -1,12 +1,12 @@
|
||||
package mark
|
||||
package page
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/kovetskiy/mark/pkg/confluence"
|
||||
"github.com/kovetskiy/mark/pkg/log"
|
||||
"github.com/kovetskiy/mark/confluence"
|
||||
"github.com/reconquest/karma-go"
|
||||
"github.com/reconquest/pkg/log"
|
||||
)
|
||||
|
||||
func EnsureAncestry(
|
||||
@ -20,7 +20,7 @@ func EnsureAncestry(
|
||||
rest := ancestry
|
||||
|
||||
for i, title := range ancestry {
|
||||
page, err := api.FindPage(space, title)
|
||||
page, err := api.FindPage(space, title, "page")
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
@ -66,7 +66,7 @@ func EnsureAncestry(
|
||||
|
||||
if !dryRun {
|
||||
for _, title := range rest {
|
||||
page, err := api.CreatePage(space, parent, title, ``)
|
||||
page, err := api.CreatePage(space, "page", parent, title, ``)
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
@ -95,7 +95,7 @@ func ValidateAncestry(
|
||||
space string,
|
||||
ancestry []string,
|
||||
) (*confluence.PageInfo, error) {
|
||||
page, err := api.FindPage(space, ancestry[len(ancestry)-1])
|
||||
page, err := api.FindPage(space, ancestry[len(ancestry)-1], "page")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -104,23 +104,62 @@ func ValidateAncestry(
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
isHomepage := false
|
||||
if len(page.Ancestors) < 1 {
|
||||
return nil, fmt.Errorf(`page %q has no parents`, page.Title)
|
||||
homepage, err := api.FindHomePage(space)
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
"can't obtain home page from space %q",
|
||||
space,
|
||||
)
|
||||
}
|
||||
|
||||
if page.ID == homepage.ID {
|
||||
log.Debugf(nil, "page is homepage for space %q", space)
|
||||
isHomepage = true
|
||||
} else {
|
||||
return nil, fmt.Errorf(`page %q has no parents`, page.Title)
|
||||
}
|
||||
}
|
||||
|
||||
if len(page.Ancestors) < len(ancestry) {
|
||||
return nil, fmt.Errorf(
|
||||
"page %q has fewer parents than specified: %s",
|
||||
page.Title,
|
||||
strings.Join(ancestry, ` > `),
|
||||
)
|
||||
if !isHomepage && len(page.Ancestors) < len(ancestry) {
|
||||
actual := []string{}
|
||||
for _, ancestor := range page.Ancestors {
|
||||
actual = append(actual, ancestor.Title)
|
||||
}
|
||||
|
||||
valid := false
|
||||
|
||||
if len(actual) == len(ancestry)-1 {
|
||||
broken := false
|
||||
for i := 0; i < len(actual); i++ {
|
||||
if actual[i] != ancestry[i] {
|
||||
broken = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !broken {
|
||||
if ancestry[len(ancestry)-1] == page.Title {
|
||||
valid = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !valid {
|
||||
return nil, karma.Describe("title", page.Title).
|
||||
Describe("actual", strings.Join(actual, " > ")).
|
||||
Describe("expected", strings.Join(ancestry, " > ")).
|
||||
Format(nil, "the page has fewer parents than expected")
|
||||
}
|
||||
}
|
||||
|
||||
for _, parent := range ancestry[:len(ancestry)-1] {
|
||||
found := false
|
||||
|
||||
// skipping root article title
|
||||
for _, ancestor := range page.Ancestors[1:] {
|
||||
for _, ancestor := range page.Ancestors {
|
||||
if ancestor.Title == parent {
|
||||
found = true
|
||||
break
|
||||
@ -130,7 +169,7 @@ func ValidateAncestry(
|
||||
if !found {
|
||||
list := []string{}
|
||||
|
||||
for _, ancestor := range page.Ancestors[1:] {
|
||||
for _, ancestor := range page.Ancestors {
|
||||
list = append(list, ancestor.Title)
|
||||
}
|
||||
|
220
page/link.go
Normal file
220
page/link.go
Normal file
@ -0,0 +1,220 @@
|
||||
package page
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
|
||||
"github.com/kovetskiy/mark/confluence"
|
||||
"github.com/kovetskiy/mark/metadata"
|
||||
"github.com/reconquest/karma-go"
|
||||
"github.com/reconquest/pkg/log"
|
||||
"golang.org/x/tools/godoc/util"
|
||||
)
|
||||
|
||||
type LinkSubstitution struct {
|
||||
From string
|
||||
To string
|
||||
}
|
||||
|
||||
type markdownLink struct {
|
||||
full string
|
||||
filename string
|
||||
hash string
|
||||
}
|
||||
|
||||
func ResolveRelativeLinks(
|
||||
api *confluence.API,
|
||||
meta *metadata.Meta,
|
||||
markdown []byte,
|
||||
base string,
|
||||
spaceFromCli string,
|
||||
titleFromH1 bool,
|
||||
parents []string,
|
||||
titleAppendGeneratedHash bool,
|
||||
) ([]LinkSubstitution, error) {
|
||||
matches := parseLinks(string(markdown))
|
||||
|
||||
links := []LinkSubstitution{}
|
||||
for _, match := range matches {
|
||||
log.Tracef(
|
||||
nil,
|
||||
"found a relative link: full=%s filename=%s hash=%s",
|
||||
match.full,
|
||||
match.filename,
|
||||
match.hash,
|
||||
)
|
||||
resolved, err := resolveLink(api, base, match, spaceFromCli, titleFromH1, parents, titleAppendGeneratedHash)
|
||||
if err != nil {
|
||||
return nil, karma.Format(err, "resolve link: %q", match.full)
|
||||
}
|
||||
|
||||
if resolved == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
links = append(links, LinkSubstitution{
|
||||
From: match.full,
|
||||
To: resolved,
|
||||
})
|
||||
}
|
||||
|
||||
return links, nil
|
||||
}
|
||||
|
||||
func resolveLink(
|
||||
api *confluence.API,
|
||||
base string,
|
||||
link markdownLink,
|
||||
spaceFromCli string,
|
||||
titleFromH1 bool,
|
||||
parents []string,
|
||||
titleAppendGeneratedHash bool,
|
||||
) (string, error) {
|
||||
var result string
|
||||
|
||||
if len(link.filename) > 0 {
|
||||
filepath := filepath.Join(base, link.filename)
|
||||
|
||||
log.Tracef(nil, "filepath: %s", filepath)
|
||||
stat, err := os.Stat(filepath)
|
||||
if err != nil {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if stat.IsDir() {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
linkContents, err := os.ReadFile(filepath)
|
||||
|
||||
if !util.IsText(linkContents) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return "", karma.Format(err, "read file: %s", filepath)
|
||||
}
|
||||
|
||||
linkContents = bytes.ReplaceAll(
|
||||
linkContents,
|
||||
[]byte("\r\n"),
|
||||
[]byte("\n"),
|
||||
)
|
||||
|
||||
// This helps to determine if found link points to file that's
|
||||
// not markdown or have mark required metadata
|
||||
linkMeta, _, err := metadata.ExtractMeta(linkContents, spaceFromCli, titleFromH1, parents, titleAppendGeneratedHash)
|
||||
if err != nil {
|
||||
log.Errorf(
|
||||
err,
|
||||
"unable to extract metadata from %q; ignoring the relative link",
|
||||
filepath,
|
||||
)
|
||||
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if linkMeta == nil {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
log.Tracef(
|
||||
nil,
|
||||
"extracted metadata: space=%s title=%s",
|
||||
linkMeta.Space,
|
||||
linkMeta.Title,
|
||||
)
|
||||
|
||||
result, err = getConfluenceLink(api, linkMeta.Space, linkMeta.Title)
|
||||
if err != nil {
|
||||
return "", karma.Format(
|
||||
err,
|
||||
"find confluence page: %s / %s / %s",
|
||||
filepath,
|
||||
linkMeta.Space,
|
||||
linkMeta.Title,
|
||||
)
|
||||
}
|
||||
|
||||
if result == "" {
|
||||
return "", nil
|
||||
}
|
||||
}
|
||||
|
||||
if len(link.hash) > 0 {
|
||||
result = result + "#" + link.hash
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func SubstituteLinks(markdown []byte, links []LinkSubstitution) []byte {
|
||||
for _, link := range links {
|
||||
if link.From == link.To {
|
||||
continue
|
||||
}
|
||||
|
||||
log.Tracef(nil, "substitute link: %q -> %q", link.From, link.To)
|
||||
|
||||
markdown = bytes.ReplaceAll(
|
||||
markdown,
|
||||
[]byte(fmt.Sprintf("](%s)", link.From)),
|
||||
[]byte(fmt.Sprintf("](%s)", link.To)),
|
||||
)
|
||||
}
|
||||
|
||||
return markdown
|
||||
}
|
||||
|
||||
func parseLinks(markdown string) []markdownLink {
|
||||
// Matches links but not inline images
|
||||
re := regexp.MustCompile(`[^\!]\[.+\]\((([^\)#]+)?#?([^\)]+)?)\)`)
|
||||
matches := re.FindAllStringSubmatch(markdown, -1)
|
||||
|
||||
links := make([]markdownLink, len(matches))
|
||||
for i, match := range matches {
|
||||
links[i] = markdownLink{
|
||||
full: match[1],
|
||||
filename: match[2],
|
||||
hash: match[3],
|
||||
}
|
||||
}
|
||||
|
||||
return links
|
||||
}
|
||||
|
||||
// getConfluenceLink build (to be) link for Confluence, and tries to verify from
|
||||
// API if there's real link available
|
||||
func getConfluenceLink(
|
||||
api *confluence.API,
|
||||
space, title string,
|
||||
) (string, error) {
|
||||
link := fmt.Sprintf(
|
||||
"%s/display/%s/%s",
|
||||
api.BaseURL,
|
||||
space,
|
||||
url.QueryEscape(title),
|
||||
)
|
||||
|
||||
page, err := api.FindPage(space, title, "page")
|
||||
if err != nil {
|
||||
return "", karma.Format(err, "api: find page")
|
||||
}
|
||||
|
||||
if page != nil {
|
||||
link = api.BaseURL + page.Links.Full
|
||||
}
|
||||
|
||||
linkUrl, err := url.Parse(link)
|
||||
if err != nil {
|
||||
return "", karma.Format(err, "parse URL: %s", link)
|
||||
}
|
||||
// Confluence supports relative links to reference other pages:
|
||||
// https://confluence.atlassian.com/doc/links-776656293.html
|
||||
linkPath := linkUrl.Path
|
||||
return linkPath, nil
|
||||
}
|
53
page/link_test.go
Normal file
53
page/link_test.go
Normal file
@ -0,0 +1,53 @@
|
||||
package page
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestParseLinks(t *testing.T) {
|
||||
markdown := `
|
||||
[example1](../path/to/example.md#second-heading)
|
||||
[example2](../path/to/example.md)
|
||||
[example3](#heading-in-document)
|
||||
[Text link that should be put as attachment](../path/to/example.txt)
|
||||
[Image link that should be put as attachment](../path/to/example.png)
|
||||
[relative link without dots](relative-link-without-dots.md)
|
||||
[relative link without dots but with hash](relative-link-without-dots-but-with-hash.md#hash)
|
||||
[example [example]](example.md)
|
||||
`
|
||||
|
||||
links := parseLinks(markdown)
|
||||
|
||||
assert.Equal(t, "../path/to/example.md#second-heading", links[0].full)
|
||||
assert.Equal(t, "../path/to/example.md", links[0].filename)
|
||||
assert.Equal(t, "second-heading", links[0].hash)
|
||||
|
||||
assert.Equal(t, "../path/to/example.md", links[1].full)
|
||||
assert.Equal(t, "../path/to/example.md", links[1].filename)
|
||||
assert.Equal(t, "", links[1].hash)
|
||||
|
||||
assert.Equal(t, "#heading-in-document", links[2].full)
|
||||
assert.Equal(t, "", links[2].filename)
|
||||
assert.Equal(t, "heading-in-document", links[2].hash)
|
||||
|
||||
assert.Equal(t, "../path/to/example.txt", links[3].full)
|
||||
assert.Equal(t, "../path/to/example.txt", links[3].filename)
|
||||
assert.Equal(t, "", links[3].hash)
|
||||
|
||||
assert.Equal(t, "../path/to/example.png", links[4].full)
|
||||
assert.Equal(t, "../path/to/example.png", links[4].filename)
|
||||
assert.Equal(t, "", links[4].hash)
|
||||
|
||||
assert.Equal(t, "relative-link-without-dots.md", links[5].full)
|
||||
assert.Equal(t, "relative-link-without-dots.md", links[5].filename)
|
||||
assert.Equal(t, "", links[5].hash)
|
||||
|
||||
assert.Equal(t, "relative-link-without-dots-but-with-hash.md#hash", links[6].full)
|
||||
assert.Equal(t, "relative-link-without-dots-but-with-hash.md", links[6].filename)
|
||||
assert.Equal(t, "hash", links[6].hash)
|
||||
|
||||
assert.Equal(t, "example.md", links[7].full)
|
||||
assert.Equal(t, len(links), 8)
|
||||
}
|
@ -1,19 +1,20 @@
|
||||
package mark
|
||||
package page
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/kovetskiy/mark/pkg/confluence"
|
||||
"github.com/kovetskiy/mark/pkg/log"
|
||||
"github.com/kovetskiy/mark/confluence"
|
||||
"github.com/kovetskiy/mark/metadata"
|
||||
"github.com/reconquest/karma-go"
|
||||
"github.com/reconquest/pkg/log"
|
||||
)
|
||||
|
||||
func ResolvePage(
|
||||
dryRun bool,
|
||||
api *confluence.API,
|
||||
meta *Meta,
|
||||
meta *metadata.Meta,
|
||||
) (*confluence.PageInfo, *confluence.PageInfo, error) {
|
||||
page, err := api.FindPage(meta.Space, meta.Title)
|
||||
page, err := api.FindPage(meta.Space, meta.Title, meta.Type)
|
||||
if err != nil {
|
||||
return nil, nil, karma.Format(
|
||||
err,
|
||||
@ -22,8 +23,35 @@ func ResolvePage(
|
||||
)
|
||||
}
|
||||
|
||||
if meta.Type == "blogpost" {
|
||||
log.Infof(
|
||||
nil,
|
||||
"blog post will be stored as: %s",
|
||||
meta.Title,
|
||||
)
|
||||
|
||||
return nil, page, nil
|
||||
}
|
||||
|
||||
// check to see if home page is in Parents
|
||||
homepage, err := api.FindHomePage(meta.Space)
|
||||
if err != nil {
|
||||
return nil, nil, karma.Format(
|
||||
err,
|
||||
"can't obtain home page from space %q",
|
||||
meta.Space,
|
||||
)
|
||||
}
|
||||
|
||||
skipHomeAncestry := false
|
||||
if len(meta.Parents) > 0 {
|
||||
if homepage.Title == meta.Parents[0] {
|
||||
skipHomeAncestry = true
|
||||
}
|
||||
}
|
||||
|
||||
ancestry := meta.Parents
|
||||
if page != nil {
|
||||
if page != nil && !skipHomeAncestry {
|
||||
ancestry = append(ancestry, page.Title)
|
||||
}
|
||||
|
55
parser/confluenceids.go
Normal file
55
parser/confluenceids.go
Normal file
@ -0,0 +1,55 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type ConfluenceIDs struct {
|
||||
Values map[string]bool
|
||||
}
|
||||
|
||||
// https://github.com/yuin/goldmark/blob/d9c03f07f08c2d36f23afe52dda865f05320ac86/parser/parser.go#L75
|
||||
func (s *ConfluenceIDs) Generate(value []byte, kind ast.NodeKind) []byte {
|
||||
value = util.TrimLeftSpace(value)
|
||||
value = util.TrimRightSpace(value)
|
||||
result := []byte{}
|
||||
for i := 0; i < len(value); {
|
||||
v := value[i]
|
||||
l := util.UTF8Len(v)
|
||||
i += int(l)
|
||||
if l != 1 {
|
||||
continue
|
||||
}
|
||||
if util.IsAlphaNumeric(v) || v == '/' || v == '_' || v == '.' {
|
||||
result = append(result, v)
|
||||
} else if util.IsSpace(v) || v == '-' {
|
||||
result = append(result, '-')
|
||||
}
|
||||
}
|
||||
if len(result) == 0 {
|
||||
if kind == ast.KindHeading {
|
||||
result = []byte("heading")
|
||||
} else {
|
||||
result = []byte("id")
|
||||
}
|
||||
}
|
||||
if _, ok := s.Values[util.BytesToReadOnlyString(result)]; !ok {
|
||||
s.Values[util.BytesToReadOnlyString(result)] = true
|
||||
return result
|
||||
}
|
||||
for i := 1; ; i++ {
|
||||
newResult := fmt.Sprintf("%s-%d", result, i)
|
||||
if _, ok := s.Values[newResult]; !ok {
|
||||
s.Values[newResult] = true
|
||||
return []byte(newResult)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func (s *ConfluenceIDs) Put(value []byte) {
|
||||
s.Values[util.BytesToReadOnlyString(value)] = true
|
||||
}
|
114
parser/confluencetags.go
Normal file
114
parser/confluencetags.go
Normal file
@ -0,0 +1,114 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
// NewConfluenceTagParser returns an inline parser that parses <ac:* /> and <ri:* /> tags to ensure that Confluence specific tags are parsed
|
||||
// as ast.KindRawHtml so they are not escaped at render time. The parser must be registered with a higher priority
|
||||
// than goldmark's linkParser. Otherwise, the linkParser would parse the <ac:* /> tags.
|
||||
func NewConfluenceTagParser() parser.InlineParser {
|
||||
return &confluenceTagParser{}
|
||||
}
|
||||
|
||||
var _ parser.InlineParser = (*confluenceTagParser)(nil)
|
||||
|
||||
// confluenceTagParser is a stripped down version of goldmark's rawHTMLParser.
|
||||
// See: https://github.com/yuin/goldmark/blob/master/parser/raw_html.go
|
||||
type confluenceTagParser struct {
|
||||
}
|
||||
|
||||
func (s *confluenceTagParser) Trigger() []byte {
|
||||
return []byte{'<'}
|
||||
}
|
||||
|
||||
func (s *confluenceTagParser) Parse(_ ast.Node, block text.Reader, pc parser.Context) ast.Node {
|
||||
line, _ := block.PeekLine()
|
||||
if len(line) > 1 && util.IsAlphaNumeric(line[1]) {
|
||||
return s.parseMultiLineRegexp(openTagRegexp, block, pc)
|
||||
}
|
||||
if len(line) > 2 && line[1] == '/' && util.IsAlphaNumeric(line[2]) {
|
||||
return s.parseMultiLineRegexp(closeTagRegexp, block, pc)
|
||||
}
|
||||
if len(line) > 2 && line[1] == '!' && line[2] >= 'A' && line[2] <= 'Z' {
|
||||
return s.parseUntil(block, closeDecl, pc)
|
||||
}
|
||||
if bytes.HasPrefix(line, openCDATA) {
|
||||
return s.parseUntil(block, closeCDATA, pc)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var tagnamePattern = `([A-Za-z][A-Za-z0-9-]*)`
|
||||
|
||||
var spaceOrOneNewline = `(?:[ \t]|(?:\r\n|\n){0,1})`
|
||||
var attributePattern = `(?:[\r\n \t]+[a-zA-Z_:][a-zA-Z0-9:._-]*(?:[\r\n \t]*=[\r\n \t]*(?:[^\"'=<>` + "`" + `\x00-\x20]+|'[^']*'|"[^"]*"))?)`
|
||||
|
||||
// Only match <ac:*/> and <ri:*/> tags
|
||||
var openTagRegexp = regexp.MustCompile("^<(ac|ri):" + tagnamePattern + attributePattern + `*` + spaceOrOneNewline + `*/?>`)
|
||||
var closeTagRegexp = regexp.MustCompile("^</ac:" + tagnamePattern + spaceOrOneNewline + `*>`)
|
||||
|
||||
var openCDATA = []byte("<![CDATA[")
|
||||
var closeCDATA = []byte("]]>")
|
||||
var closeDecl = []byte(">")
|
||||
|
||||
func (s *confluenceTagParser) parseUntil(block text.Reader, closer []byte, _ parser.Context) ast.Node {
|
||||
savedLine, savedSegment := block.Position()
|
||||
node := ast.NewRawHTML()
|
||||
for {
|
||||
line, segment := block.PeekLine()
|
||||
if line == nil {
|
||||
break
|
||||
}
|
||||
index := bytes.Index(line, closer)
|
||||
if index > -1 {
|
||||
node.Segments.Append(segment.WithStop(segment.Start + index + len(closer)))
|
||||
block.Advance(index + len(closer))
|
||||
return node
|
||||
}
|
||||
node.Segments.Append(segment)
|
||||
block.AdvanceLine()
|
||||
}
|
||||
block.SetPosition(savedLine, savedSegment)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *confluenceTagParser) parseMultiLineRegexp(reg *regexp.Regexp, block text.Reader, _ parser.Context) ast.Node {
|
||||
sline, ssegment := block.Position()
|
||||
if block.Match(reg) {
|
||||
node := ast.NewRawHTML()
|
||||
eline, esegment := block.Position()
|
||||
block.SetPosition(sline, ssegment)
|
||||
for {
|
||||
line, segment := block.PeekLine()
|
||||
if line == nil {
|
||||
break
|
||||
}
|
||||
l, _ := block.Position()
|
||||
start := segment.Start
|
||||
if l == sline {
|
||||
start = ssegment.Start
|
||||
}
|
||||
end := segment.Stop
|
||||
if l == eline {
|
||||
end = esegment.Start
|
||||
}
|
||||
|
||||
node.Segments.Append(text.NewSegment(start, end))
|
||||
if l == eline {
|
||||
block.Advance(end - start)
|
||||
break
|
||||
} else {
|
||||
block.AdvanceLine()
|
||||
}
|
||||
}
|
||||
return node
|
||||
}
|
||||
return nil
|
||||
}
|
101
pkg/log/log.go
101
pkg/log/log.go
@ -1,101 +0,0 @@
|
||||
package log
|
||||
|
||||
import (
|
||||
"github.com/kovetskiy/lorg"
|
||||
"github.com/reconquest/cog"
|
||||
"github.com/reconquest/karma-go"
|
||||
)
|
||||
|
||||
var (
|
||||
log *cog.Logger
|
||||
)
|
||||
|
||||
func Init(debug, trace bool) {
|
||||
stderr := lorg.NewLog()
|
||||
stderr.SetIndentLines(true)
|
||||
stderr.SetFormat(
|
||||
lorg.NewFormat("${time} ${level:[%s]:right:short} ${prefix}%s"),
|
||||
)
|
||||
|
||||
log = cog.NewLogger(stderr)
|
||||
|
||||
if debug {
|
||||
log.SetLevel(lorg.LevelDebug)
|
||||
}
|
||||
|
||||
if trace {
|
||||
log.SetLevel(lorg.LevelTrace)
|
||||
}
|
||||
}
|
||||
|
||||
func Fatalf(
|
||||
reason error,
|
||||
message string,
|
||||
args ...interface{},
|
||||
) {
|
||||
log.Fatalf(reason, message, args...)
|
||||
}
|
||||
|
||||
func Errorf(
|
||||
reason error,
|
||||
message string,
|
||||
args ...interface{},
|
||||
) {
|
||||
log.Errorf(reason, message, args...)
|
||||
}
|
||||
|
||||
func Warningf(
|
||||
reason error,
|
||||
message string,
|
||||
args ...interface{},
|
||||
) {
|
||||
log.Warningf(reason, message, args...)
|
||||
}
|
||||
|
||||
func Infof(
|
||||
context *karma.Context,
|
||||
message string,
|
||||
args ...interface{},
|
||||
) {
|
||||
log.Infof(context, message, args...)
|
||||
}
|
||||
|
||||
func Debugf(
|
||||
context *karma.Context,
|
||||
message string,
|
||||
args ...interface{},
|
||||
) {
|
||||
log.Debugf(context, message, args...)
|
||||
}
|
||||
|
||||
func Tracef(
|
||||
context *karma.Context,
|
||||
message string,
|
||||
args ...interface{},
|
||||
) {
|
||||
log.Tracef(context, message, args...)
|
||||
}
|
||||
|
||||
func Fatal(values ...interface{}) {
|
||||
log.Fatal(values...)
|
||||
}
|
||||
|
||||
func Error(values ...interface{}) {
|
||||
log.Error(values...)
|
||||
}
|
||||
|
||||
func Warning(values ...interface{}) {
|
||||
log.Warning(values...)
|
||||
}
|
||||
|
||||
func Info(values ...interface{}) {
|
||||
log.Info(values...)
|
||||
}
|
||||
|
||||
func Debug(values ...interface{}) {
|
||||
log.Debug(values...)
|
||||
}
|
||||
|
||||
func Trace(values ...interface{}) {
|
||||
log.Trace(values...)
|
||||
}
|
@ -1,218 +0,0 @@
|
||||
package mark
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/kovetskiy/mark/pkg/confluence"
|
||||
"github.com/kovetskiy/mark/pkg/log"
|
||||
"github.com/reconquest/karma-go"
|
||||
)
|
||||
|
||||
const (
|
||||
AttachmentChecksumPrefix = `mark:checksum: `
|
||||
)
|
||||
|
||||
type Attachment struct {
|
||||
ID string
|
||||
Name string
|
||||
Filename string
|
||||
Path string
|
||||
Checksum string
|
||||
Link string
|
||||
}
|
||||
|
||||
func ResolveAttachments(
|
||||
api *confluence.API,
|
||||
page *confluence.PageInfo,
|
||||
base string,
|
||||
names []string,
|
||||
) ([]Attachment, error) {
|
||||
attaches := []Attachment{}
|
||||
for _, name := range names {
|
||||
attach := Attachment{
|
||||
Name: name,
|
||||
Filename: strings.ReplaceAll(name, "/", "_"),
|
||||
Path: filepath.Join(base, name),
|
||||
}
|
||||
|
||||
checksum, err := getChecksum(attach.Path)
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
"unable to get checksum for attachment: %q", attach.Name,
|
||||
)
|
||||
}
|
||||
|
||||
attach.Checksum = checksum
|
||||
|
||||
attaches = append(attaches, attach)
|
||||
}
|
||||
|
||||
remotes, err := api.GetAttachments(page.ID)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
existing := []Attachment{}
|
||||
creating := []Attachment{}
|
||||
updating := []Attachment{}
|
||||
for _, attach := range attaches {
|
||||
var found bool
|
||||
var same bool
|
||||
for _, remote := range remotes {
|
||||
if remote.Filename == attach.Filename {
|
||||
same = attach.Checksum == strings.TrimPrefix(
|
||||
remote.Metadata.Comment,
|
||||
AttachmentChecksumPrefix,
|
||||
)
|
||||
|
||||
attach.ID = remote.ID
|
||||
attach.Link = path.Join(
|
||||
remote.Links.Context,
|
||||
remote.Links.Download,
|
||||
)
|
||||
|
||||
found = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
if same {
|
||||
existing = append(existing, attach)
|
||||
} else {
|
||||
updating = append(updating, attach)
|
||||
}
|
||||
} else {
|
||||
creating = append(creating, attach)
|
||||
}
|
||||
}
|
||||
|
||||
for i, attach := range creating {
|
||||
log.Infof(nil, "creating attachment: %q", attach.Name)
|
||||
|
||||
info, err := api.CreateAttachment(
|
||||
page.ID,
|
||||
attach.Filename,
|
||||
AttachmentChecksumPrefix+attach.Checksum,
|
||||
attach.Path,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
"unable to create attachment %q",
|
||||
attach.Name,
|
||||
)
|
||||
}
|
||||
|
||||
attach.ID = info.ID
|
||||
attach.Link = path.Join(
|
||||
info.Links.Context,
|
||||
info.Links.Download,
|
||||
)
|
||||
|
||||
creating[i] = attach
|
||||
}
|
||||
|
||||
for i, attach := range updating {
|
||||
log.Infof(nil, "updating attachment: %q", attach.Name)
|
||||
|
||||
info, err := api.UpdateAttachment(
|
||||
page.ID,
|
||||
attach.ID,
|
||||
attach.Name,
|
||||
AttachmentChecksumPrefix+attach.Checksum,
|
||||
attach.Path,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
"unable to update attachment %q",
|
||||
attach.Name,
|
||||
)
|
||||
}
|
||||
|
||||
attach.Link = path.Join(
|
||||
info.Links.Context,
|
||||
info.Links.Download,
|
||||
)
|
||||
|
||||
updating[i] = attach
|
||||
}
|
||||
|
||||
attaches = []Attachment{}
|
||||
attaches = append(attaches, existing...)
|
||||
attaches = append(attaches, creating...)
|
||||
attaches = append(attaches, updating...)
|
||||
|
||||
return attaches, nil
|
||||
}
|
||||
|
||||
func CompileAttachmentLinks(markdown []byte, attaches []Attachment) []byte {
|
||||
links := map[string]string{}
|
||||
names := []string{}
|
||||
|
||||
for _, attach := range attaches {
|
||||
uri, err := url.ParseRequestURI(attach.Link)
|
||||
if err != nil {
|
||||
links[attach.Name] = strings.ReplaceAll("&", "&", attach.Link)
|
||||
} else {
|
||||
links[attach.Name] = uri.Path +
|
||||
"?" + url.QueryEscape(uri.Query().Encode())
|
||||
}
|
||||
|
||||
names = append(names, attach.Name)
|
||||
}
|
||||
|
||||
// sort by length so first items will have bigger length
|
||||
// it's helpful for replacing in case of following names
|
||||
// attachments/a.jpg
|
||||
// attachments/a.jpg.jpg
|
||||
// so we replace longer and then shorter
|
||||
sort.SliceStable(names, func(i, j int) bool {
|
||||
return len(names[i]) > len(names[j])
|
||||
})
|
||||
|
||||
for _, name := range names {
|
||||
from := `attachment://` + name
|
||||
to := links[name]
|
||||
|
||||
log.Debugf(nil, "replacing: %q -> %q", from, to)
|
||||
|
||||
markdown = bytes.ReplaceAll(
|
||||
markdown,
|
||||
[]byte(from),
|
||||
[]byte(to),
|
||||
)
|
||||
}
|
||||
|
||||
return markdown
|
||||
}
|
||||
|
||||
func getChecksum(filename string) (string, error) {
|
||||
file, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return "", karma.Format(
|
||||
err,
|
||||
"unable to open file",
|
||||
)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
hash := sha256.New()
|
||||
if _, err := io.Copy(hash, file); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return hex.EncodeToString(hash.Sum(nil)), nil
|
||||
}
|
@ -1,92 +0,0 @@
|
||||
package mark
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
|
||||
"github.com/kovetskiy/mark/pkg/log"
|
||||
"github.com/kovetskiy/mark/pkg/mark/stdlib"
|
||||
"github.com/russross/blackfriday"
|
||||
)
|
||||
|
||||
type ConfluenceRenderer struct {
|
||||
blackfriday.Renderer
|
||||
|
||||
Stdlib *stdlib.Lib
|
||||
}
|
||||
|
||||
func (renderer ConfluenceRenderer) BlockCode(
|
||||
out *bytes.Buffer,
|
||||
text []byte,
|
||||
lang string,
|
||||
) {
|
||||
renderer.Stdlib.Templates.ExecuteTemplate(
|
||||
out,
|
||||
"ac:code",
|
||||
struct {
|
||||
Language string
|
||||
Text string
|
||||
}{
|
||||
lang,
|
||||
string(text),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// compileMarkdown will replace tags like <ac:rich-tech-body> with escaped
|
||||
// equivalent, because blackfriday markdown parser replaces that tags with
|
||||
// <a href="ac:rich-text-body">ac:rich-text-body</a> for whatever reason.
|
||||
func CompileMarkdown(
|
||||
markdown []byte,
|
||||
stdlib *stdlib.Lib,
|
||||
) string {
|
||||
log.Tracef(nil, "rendering markdown:\n%s", string(markdown))
|
||||
|
||||
colon := regexp.MustCompile(`---BLACKFRIDAY-COLON---`)
|
||||
|
||||
tags := regexp.MustCompile(`<(/?\S+?):(\S+?)>`)
|
||||
|
||||
markdown = tags.ReplaceAll(
|
||||
markdown,
|
||||
[]byte(`<$1`+colon.String()+`$2>`),
|
||||
)
|
||||
|
||||
renderer := ConfluenceRenderer{
|
||||
Renderer: blackfriday.HtmlRenderer(
|
||||
blackfriday.HTML_USE_XHTML|
|
||||
blackfriday.HTML_USE_SMARTYPANTS|
|
||||
blackfriday.HTML_SMARTYPANTS_FRACTIONS|
|
||||
blackfriday.HTML_SMARTYPANTS_DASHES|
|
||||
blackfriday.HTML_SMARTYPANTS_LATEX_DASHES,
|
||||
"", "",
|
||||
),
|
||||
|
||||
Stdlib: stdlib,
|
||||
}
|
||||
|
||||
html := blackfriday.MarkdownOptions(
|
||||
markdown,
|
||||
renderer,
|
||||
blackfriday.Options{
|
||||
Extensions: blackfriday.EXTENSION_NO_INTRA_EMPHASIS |
|
||||
blackfriday.EXTENSION_TABLES |
|
||||
blackfriday.EXTENSION_FENCED_CODE |
|
||||
blackfriday.EXTENSION_AUTOLINK |
|
||||
blackfriday.EXTENSION_LAX_HTML_BLOCKS |
|
||||
blackfriday.EXTENSION_STRIKETHROUGH |
|
||||
blackfriday.EXTENSION_SPACE_HEADERS |
|
||||
blackfriday.EXTENSION_HEADER_IDS |
|
||||
blackfriday.EXTENSION_AUTO_HEADER_IDS |
|
||||
blackfriday.EXTENSION_TITLEBLOCK |
|
||||
blackfriday.EXTENSION_BACKSLASH_LINE_BREAK |
|
||||
blackfriday.EXTENSION_DEFINITION_LISTS |
|
||||
blackfriday.EXTENSION_NO_EMPTY_LINE_BEFORE_BLOCK,
|
||||
},
|
||||
)
|
||||
|
||||
html = colon.ReplaceAll(html, []byte(`:`))
|
||||
|
||||
log.Tracef(nil, "rendered markdown to html:\n%s", string(html))
|
||||
|
||||
return string(html)
|
||||
}
|
123
pkg/mark/meta.go
123
pkg/mark/meta.go
@ -1,123 +0,0 @@
|
||||
package mark
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/kovetskiy/mark/pkg/log"
|
||||
)
|
||||
|
||||
const (
|
||||
HeaderParent = `Parent`
|
||||
HeaderSpace = `Space`
|
||||
HeaderTitle = `Title`
|
||||
HeaderLayout = `Layout`
|
||||
HeaderAttachment = `Attachment`
|
||||
)
|
||||
|
||||
type Meta struct {
|
||||
Parents []string
|
||||
Space string
|
||||
Title string
|
||||
Layout string
|
||||
Attachments []string
|
||||
}
|
||||
|
||||
var (
|
||||
reHeaderPatternV1 = regexp.MustCompile(`\[\]:\s*#\s*\(([^:]+):\s*(.*)\)`)
|
||||
reHeaderPatternV2 = regexp.MustCompile(`<!--\s*([^:]+):\s*(.*)\s*-->`)
|
||||
)
|
||||
|
||||
func ExtractMeta(data []byte) (*Meta, []byte, error) {
|
||||
var (
|
||||
meta *Meta
|
||||
offset int
|
||||
)
|
||||
|
||||
scanner := bufio.NewScanner(bytes.NewBuffer(data))
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
offset += len(line) + 1
|
||||
|
||||
matches := reHeaderPatternV2.FindStringSubmatch(line)
|
||||
if matches == nil {
|
||||
matches = reHeaderPatternV1.FindStringSubmatch(line)
|
||||
if matches == nil {
|
||||
break
|
||||
}
|
||||
|
||||
log.Warningf(
|
||||
fmt.Errorf(`legacy header usage found: %s`, line),
|
||||
"please use new header format: <!-- %s: %s -->",
|
||||
matches[1],
|
||||
matches[2],
|
||||
)
|
||||
}
|
||||
|
||||
if meta == nil {
|
||||
meta = &Meta{}
|
||||
}
|
||||
|
||||
header := strings.Title(matches[1])
|
||||
|
||||
var value string
|
||||
if len(matches) > 1 {
|
||||
value = strings.TrimSpace(matches[2])
|
||||
}
|
||||
|
||||
switch header {
|
||||
case HeaderParent:
|
||||
meta.Parents = append(meta.Parents, value)
|
||||
|
||||
case HeaderSpace:
|
||||
meta.Space = strings.TrimSpace(value)
|
||||
|
||||
case HeaderTitle:
|
||||
meta.Title = strings.TrimSpace(value)
|
||||
|
||||
case HeaderLayout:
|
||||
meta.Layout = strings.TrimSpace(value)
|
||||
|
||||
case HeaderAttachment:
|
||||
meta.Attachments = append(meta.Attachments, value)
|
||||
|
||||
default:
|
||||
log.Errorf(
|
||||
nil,
|
||||
`encountered unknown header %q line: %#v`,
|
||||
header,
|
||||
line,
|
||||
)
|
||||
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if meta == nil {
|
||||
return nil, data, nil
|
||||
}
|
||||
|
||||
if meta.Space == "" {
|
||||
return nil, nil, fmt.Errorf(
|
||||
"space key is not set (%s header is not set)",
|
||||
HeaderSpace,
|
||||
)
|
||||
}
|
||||
|
||||
if meta.Title == "" {
|
||||
return nil, nil, fmt.Errorf(
|
||||
"page title is not set (%s header is not set)",
|
||||
HeaderTitle,
|
||||
)
|
||||
}
|
||||
|
||||
return meta, data[offset:], nil
|
||||
}
|
@ -1,153 +0,0 @@
|
||||
package stdlib
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/kovetskiy/mark/pkg/confluence"
|
||||
"github.com/kovetskiy/mark/pkg/log"
|
||||
"github.com/kovetskiy/mark/pkg/mark/macro"
|
||||
|
||||
"github.com/reconquest/karma-go"
|
||||
)
|
||||
|
||||
type Lib struct {
|
||||
Macros []macro.Macro
|
||||
Templates *template.Template
|
||||
}
|
||||
|
||||
func New(api *confluence.API) (*Lib, error) {
|
||||
var (
|
||||
lib Lib
|
||||
err error
|
||||
)
|
||||
|
||||
lib.Templates, err = templates(api)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
lib.Macros, err = macros(lib.Templates)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &lib, nil
|
||||
}
|
||||
|
||||
func macros(templates *template.Template) ([]macro.Macro, error) {
|
||||
text := func(line ...string) []byte {
|
||||
return []byte(strings.Join(line, "\n"))
|
||||
}
|
||||
|
||||
macros, _, err := macro.ExtractMacros(
|
||||
[]byte(text(
|
||||
`<!-- Macro: @\{([^}]+)\}`,
|
||||
` Template: ac:link:user`,
|
||||
` Name: ${1} -->`,
|
||||
|
||||
// TODO(seletskiy): more macros here
|
||||
)),
|
||||
|
||||
templates,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return macros, nil
|
||||
}
|
||||
|
||||
func templates(api *confluence.API) (*template.Template, error) {
|
||||
text := func(line ...string) string {
|
||||
return strings.Join(line, ``)
|
||||
}
|
||||
|
||||
templates := template.New(`stdlib`).Funcs(
|
||||
template.FuncMap{
|
||||
"user": func(name string) *confluence.User {
|
||||
user, err := api.GetUserByName(name)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
}
|
||||
|
||||
return user
|
||||
},
|
||||
|
||||
// The only way to escape CDATA end marker ']]>' is to split it
|
||||
// into two CDATA sections.
|
||||
"cdata": func(data string) string {
|
||||
return strings.ReplaceAll(
|
||||
data,
|
||||
"]]>",
|
||||
"]]><![CDATA[]]]]><![CDATA[>",
|
||||
)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
var err error
|
||||
|
||||
for name, body := range map[string]string{
|
||||
// This template is used to select whole article layout
|
||||
`ac:layout`: text(
|
||||
`{{ if eq .Layout "article" }}`,
|
||||
/**/ `<ac:layout>`,
|
||||
/**/ `<ac:layout-section ac:type="two_right_sidebar">`,
|
||||
/**/ `<ac:layout-cell>{{ .Body }}</ac:layout-cell>`,
|
||||
/**/ `<ac:layout-cell></ac:layout-cell>`,
|
||||
/**/ `</ac:layout-section>`,
|
||||
/**/ `</ac:layout>`,
|
||||
`{{ else }}`,
|
||||
/**/ `{{ .Body }}`,
|
||||
`{{ end }}`,
|
||||
),
|
||||
|
||||
// This template is used for rendering code in ```
|
||||
`ac:code`: text(
|
||||
`<ac:structured-macro ac:name="code">`,
|
||||
`<ac:parameter ac:name="language">{{ .Language }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="collapse">false</ac:parameter>`,
|
||||
`<ac:plain-text-body><![CDATA[{{ .Text | cdata }}]]></ac:plain-text-body>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
`ac:status`: text(
|
||||
`<ac:structured-macro ac:name="status">`,
|
||||
`<ac:parameter ac:name="colour">{{ or .Color "Grey" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="title">{{ or .Title .Color }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="subtle">{{ or .Subtle false }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
`ac:link:user`: text(
|
||||
`{{ with .Name | user }}`,
|
||||
/**/ `<ac:link>`,
|
||||
/**/ `<ri:user ri:account-id="{{ .AccountID }}"/>`,
|
||||
/**/ `</ac:link>`,
|
||||
`{{ else }}`,
|
||||
/**/ `{{ .Name }}`,
|
||||
`{{ end }}`,
|
||||
),
|
||||
|
||||
`ac:jira:ticket`: text(
|
||||
`<ac:structured-macro ac:name="jira">`,
|
||||
`<ac:parameter ac:name="key">{{ .Ticket }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
// TODO(seletskiy): more templates here
|
||||
} {
|
||||
templates, err = templates.New(name).Parse(body)
|
||||
if err != nil {
|
||||
return nil, karma.
|
||||
Describe("template", body).
|
||||
Format(
|
||||
err,
|
||||
"unable to parse template",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return templates, nil
|
||||
}
|
221
renderer/blockquote.go
Normal file
221
renderer/blockquote.go
Normal file
@ -0,0 +1,221 @@
|
||||
package renderer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type ConfluenceBlockQuoteRenderer struct {
|
||||
html.Config
|
||||
LevelMap BlockQuoteLevelMap
|
||||
}
|
||||
|
||||
// NewConfluenceRenderer creates a new instance of the ConfluenceRenderer
|
||||
func NewConfluenceBlockQuoteRenderer(opts ...html.Option) renderer.NodeRenderer {
|
||||
return &ConfluenceBlockQuoteRenderer{
|
||||
Config: html.NewConfig(),
|
||||
LevelMap: nil,
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
|
||||
func (r *ConfluenceBlockQuoteRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindBlockquote, r.renderBlockQuote)
|
||||
}
|
||||
|
||||
// Define BlockQuoteType enum
|
||||
type BlockQuoteType int
|
||||
|
||||
const (
|
||||
Info BlockQuoteType = iota
|
||||
Note
|
||||
Warn
|
||||
Tip
|
||||
None
|
||||
)
|
||||
|
||||
func (t BlockQuoteType) String() string {
|
||||
return []string{"info", "note", "warning", "tip", "none"}[t]
|
||||
}
|
||||
|
||||
type BlockQuoteLevelMap map[ast.Node]int
|
||||
|
||||
func (m BlockQuoteLevelMap) Level(node ast.Node) int {
|
||||
return m[node]
|
||||
}
|
||||
|
||||
type BlockQuoteClassifier struct {
|
||||
patternMap map[string]*regexp.Regexp
|
||||
}
|
||||
|
||||
func LegacyBlockQuoteClassifier() BlockQuoteClassifier {
|
||||
return BlockQuoteClassifier{
|
||||
patternMap: map[string]*regexp.Regexp{
|
||||
"info": regexp.MustCompile(`(?i)info`),
|
||||
"note": regexp.MustCompile(`(?i)note`),
|
||||
"warn": regexp.MustCompile(`(?i)warn`),
|
||||
"tip": regexp.MustCompile(`(?i)tip`),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func GHAlertsBlockQuoteClassifier() BlockQuoteClassifier {
|
||||
return BlockQuoteClassifier{
|
||||
patternMap: map[string]*regexp.Regexp{
|
||||
"info": regexp.MustCompile(`(?i)^\!(note|important)`),
|
||||
"note": regexp.MustCompile(`(?i)^\!warning`),
|
||||
"warn": regexp.MustCompile(`(?i)^\!caution`),
|
||||
"tip": regexp.MustCompile(`(?i)^\!tip`),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// ClassifyingBlockQuote compares a string against a set of patterns and returns a BlockQuoteType
|
||||
func (classifier BlockQuoteClassifier) ClassifyingBlockQuote(literal string) BlockQuoteType {
|
||||
|
||||
var t = None
|
||||
switch {
|
||||
case classifier.patternMap["info"].MatchString(literal):
|
||||
t = Info
|
||||
case classifier.patternMap["note"].MatchString(literal):
|
||||
t = Note
|
||||
case classifier.patternMap["warn"].MatchString(literal):
|
||||
t = Warn
|
||||
case classifier.patternMap["tip"].MatchString(literal):
|
||||
t = Tip
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
// ParseBlockQuoteType parses the first line of a blockquote and returns its type
|
||||
func ParseBlockQuoteType(node ast.Node, source []byte) BlockQuoteType {
|
||||
var t = None
|
||||
var legacyClassifier = LegacyBlockQuoteClassifier()
|
||||
var ghAlertsClassifier = GHAlertsBlockQuoteClassifier()
|
||||
|
||||
countParagraphs := 0
|
||||
_ = ast.Walk(node, func(node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
|
||||
if node.Kind() == ast.KindParagraph && entering {
|
||||
countParagraphs += 1
|
||||
}
|
||||
// Type of block quote should be defined on the first blockquote line
|
||||
if countParagraphs < 2 && entering {
|
||||
if node.Kind() == ast.KindText {
|
||||
n := node.(*ast.Text)
|
||||
t = legacyClassifier.ClassifyingBlockQuote(string(n.Value(source)))
|
||||
// If the node is a text node but classification returned none do not give up!
|
||||
// Find the next two sibling nodes midNode and rightNode,
|
||||
// 1. If both are also a text node
|
||||
// 2. and the original node (node) text value is '['
|
||||
// 3. and the rightNode text value is ']'
|
||||
// It means with high degree of confidence that the original md doc contains a Github alert type of blockquote
|
||||
// Classifying the next text type node (midNode) will confirm that.
|
||||
if t == None {
|
||||
midNode := node.NextSibling()
|
||||
|
||||
if midNode != nil && midNode.Kind() == ast.KindText {
|
||||
rightNode := midNode.NextSibling()
|
||||
midTextNode := midNode.(*ast.Text)
|
||||
if rightNode != nil && rightNode.Kind() == ast.KindText {
|
||||
rightTextNode := rightNode.(*ast.Text)
|
||||
if string(n.Value(source)) == "[" && string(rightTextNode.Value(source)) == "]" {
|
||||
t = ghAlertsClassifier.ClassifyingBlockQuote(string(midTextNode.Value(source)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
countParagraphs += 1
|
||||
}
|
||||
if node.Kind() == ast.KindHTMLBlock {
|
||||
|
||||
n := node.(*ast.HTMLBlock)
|
||||
for i := 0; i < n.BaseBlock.Lines().Len(); i++ {
|
||||
line := n.BaseBlock.Lines().At(i)
|
||||
t = legacyClassifier.ClassifyingBlockQuote(string(line.Value(source)))
|
||||
if t != None {
|
||||
break
|
||||
}
|
||||
}
|
||||
countParagraphs += 1
|
||||
}
|
||||
} else if countParagraphs > 1 && entering {
|
||||
return ast.WalkStop, nil
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
})
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
// GenerateBlockQuoteLevel walks a given node and returns a map of blockquote levels
|
||||
func GenerateBlockQuoteLevel(someNode ast.Node) BlockQuoteLevelMap {
|
||||
|
||||
// We define state variable that track BlockQuote level while we walk the tree
|
||||
blockQuoteLevel := 0
|
||||
blockQuoteLevelMap := make(map[ast.Node]int)
|
||||
|
||||
rootNode := someNode
|
||||
for rootNode.Parent() != nil {
|
||||
rootNode = rootNode.Parent()
|
||||
}
|
||||
_ = ast.Walk(rootNode, func(node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if node.Kind() == ast.KindBlockquote && entering {
|
||||
blockQuoteLevelMap[node] = blockQuoteLevel
|
||||
blockQuoteLevel += 1
|
||||
}
|
||||
if node.Kind() == ast.KindBlockquote && !entering {
|
||||
blockQuoteLevel -= 1
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
})
|
||||
return blockQuoteLevelMap
|
||||
}
|
||||
|
||||
// renderBlockQuote will render a BlockQuote
|
||||
func (r *ConfluenceBlockQuoteRenderer) renderBlockQuote(writer util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
// Initialize BlockQuote level map
|
||||
if r.LevelMap == nil {
|
||||
r.LevelMap = GenerateBlockQuoteLevel(node)
|
||||
}
|
||||
|
||||
quoteType := ParseBlockQuoteType(node, source)
|
||||
quoteLevel := r.LevelMap.Level(node)
|
||||
|
||||
if quoteLevel == 0 && entering && quoteType != None {
|
||||
prefix := fmt.Sprintf("<ac:structured-macro ac:name=\"%s\"><ac:parameter ac:name=\"icon\">true</ac:parameter><ac:rich-text-body>\n", quoteType)
|
||||
if _, err := writer.Write([]byte(prefix)); err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
if quoteLevel == 0 && !entering && quoteType != None {
|
||||
suffix := "</ac:rich-text-body></ac:structured-macro>\n"
|
||||
if _, err := writer.Write([]byte(suffix)); err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
return r.goldmarkRenderBlockquote(writer, source, node, entering)
|
||||
}
|
||||
|
||||
// goldmarkRenderBlockquote is the default renderBlockquote implementation from https://github.com/yuin/goldmark/blob/9d6f314b99ca23037c93d76f248be7b37de6220a/renderer/html/html.go#L286
|
||||
func (r *ConfluenceBlockQuoteRenderer) goldmarkRenderBlockquote(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if entering {
|
||||
if n.Attributes() != nil {
|
||||
_, _ = w.WriteString("<blockquote")
|
||||
html.RenderAttributes(w, n, html.BlockquoteAttributeFilter)
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("<blockquote>\n")
|
||||
}
|
||||
} else {
|
||||
_, _ = w.WriteString("</blockquote>\n")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
77
renderer/codeblock.go
Normal file
77
renderer/codeblock.go
Normal file
@ -0,0 +1,77 @@
|
||||
package renderer
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/kovetskiy/mark/stdlib"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type ConfluenceCodeBlockRenderer struct {
|
||||
html.Config
|
||||
Stdlib *stdlib.Lib
|
||||
}
|
||||
|
||||
// NewConfluenceRenderer creates a new instance of the ConfluenceRenderer
|
||||
func NewConfluenceCodeBlockRenderer(stdlib *stdlib.Lib, path string, opts ...html.Option) renderer.NodeRenderer {
|
||||
return &ConfluenceCodeBlockRenderer{
|
||||
Config: html.NewConfig(),
|
||||
Stdlib: stdlib,
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
|
||||
func (r *ConfluenceCodeBlockRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindCodeBlock, r.renderCodeBlock)
|
||||
}
|
||||
|
||||
// renderCodeBlock renders a CodeBlock
|
||||
func (r *ConfluenceCodeBlockRenderer) renderCodeBlock(writer util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
linenumbers := false
|
||||
firstline := 0
|
||||
theme := ""
|
||||
collapse := false
|
||||
lang := ""
|
||||
title := ""
|
||||
|
||||
var lval []byte
|
||||
|
||||
lines := node.Lines().Len()
|
||||
for i := 0; i < lines; i++ {
|
||||
line := node.Lines().At(i)
|
||||
lval = append(lval, line.Value(source)...)
|
||||
}
|
||||
err := r.Stdlib.Templates.ExecuteTemplate(
|
||||
writer,
|
||||
"ac:code",
|
||||
struct {
|
||||
Language string
|
||||
Collapse bool
|
||||
Title string
|
||||
Theme string
|
||||
Linenumbers bool
|
||||
Firstline int
|
||||
Text string
|
||||
}{
|
||||
lang,
|
||||
collapse,
|
||||
title,
|
||||
theme,
|
||||
linenumbers,
|
||||
firstline,
|
||||
strings.TrimSuffix(string(lval), "\n"),
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
|
||||
return ast.WalkContinue, nil
|
||||
}
|
221
renderer/fencedcodeblock.go
Normal file
221
renderer/fencedcodeblock.go
Normal file
@ -0,0 +1,221 @@
|
||||
package renderer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/kovetskiy/mark/attachment"
|
||||
"github.com/kovetskiy/mark/d2"
|
||||
"github.com/kovetskiy/mark/mermaid"
|
||||
"github.com/kovetskiy/mark/stdlib"
|
||||
"github.com/kovetskiy/mark/types"
|
||||
"github.com/reconquest/pkg/log"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type ConfluenceFencedCodeBlockRenderer struct {
|
||||
html.Config
|
||||
Stdlib *stdlib.Lib
|
||||
MarkConfig types.MarkConfig
|
||||
Attachments attachment.Attacher
|
||||
}
|
||||
|
||||
var reBlockDetails = regexp.MustCompile(
|
||||
// (<Lang>|-) (collapse|<theme>|\d)* (title <title>)?
|
||||
|
||||
`^(?:(\w*)|-)\s*\b(\S.*?\S?)??\s*(?:\btitle\s+(\S.*\S?))?$`,
|
||||
)
|
||||
|
||||
// NewConfluenceRenderer creates a new instance of the ConfluenceRenderer
|
||||
func NewConfluenceFencedCodeBlockRenderer(stdlib *stdlib.Lib, attachments attachment.Attacher, cfg types.MarkConfig, opts ...html.Option) renderer.NodeRenderer {
|
||||
return &ConfluenceFencedCodeBlockRenderer{
|
||||
Config: html.NewConfig(),
|
||||
Stdlib: stdlib,
|
||||
MarkConfig: cfg,
|
||||
Attachments: attachments,
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
|
||||
func (r *ConfluenceFencedCodeBlockRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindFencedCodeBlock, r.renderFencedCodeBlock)
|
||||
}
|
||||
|
||||
func ParseLanguage(lang string) string {
|
||||
// lang takes the following form: language? "collapse"? ("title"? <any string>*)?
|
||||
// let's split it by spaces
|
||||
paramlist := strings.Fields(lang)
|
||||
|
||||
// get the word in question, aka the first one
|
||||
first := lang
|
||||
if len(paramlist) > 0 {
|
||||
first = paramlist[0]
|
||||
}
|
||||
|
||||
if first == "collapse" || first == "title" {
|
||||
// collapsing or including a title without a language
|
||||
return ""
|
||||
}
|
||||
// the default case with language being the first one
|
||||
return first
|
||||
}
|
||||
|
||||
func ParseTitle(lang string) string {
|
||||
index := strings.Index(lang, "title")
|
||||
if index >= 0 {
|
||||
// it's found, check if title is given and return it
|
||||
start := index + 6
|
||||
if len(lang) > start {
|
||||
return lang[start:]
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// renderFencedCodeBlock renders a FencedCodeBlock
|
||||
func (r *ConfluenceFencedCodeBlockRenderer) renderFencedCodeBlock(writer util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
var info []byte
|
||||
nodeFencedCodeBlock := node.(*ast.FencedCodeBlock)
|
||||
if nodeFencedCodeBlock.Info != nil {
|
||||
segment := nodeFencedCodeBlock.Info.Segment
|
||||
info = segment.Value(source)
|
||||
}
|
||||
groups := reBlockDetails.FindStringSubmatch(string(info))
|
||||
linenumbers := false
|
||||
firstline := 0
|
||||
theme := ""
|
||||
collapse := false
|
||||
lang := ""
|
||||
var options []string
|
||||
title := ""
|
||||
if len(groups) > 0 {
|
||||
lang, options, title = groups[1], strings.Fields(groups[2]), groups[3]
|
||||
for _, option := range options {
|
||||
if option == "collapse" {
|
||||
collapse = true
|
||||
continue
|
||||
}
|
||||
if option == "nocollapse" {
|
||||
collapse = false
|
||||
continue
|
||||
}
|
||||
var i int
|
||||
if _, err := fmt.Sscanf(option, "%d", &i); err == nil {
|
||||
linenumbers = i > 0
|
||||
firstline = i
|
||||
continue
|
||||
}
|
||||
theme = option
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var lval []byte
|
||||
|
||||
lines := node.Lines().Len()
|
||||
for i := 0; i < lines; i++ {
|
||||
line := node.Lines().At(i)
|
||||
lval = append(lval, line.Value(source)...)
|
||||
}
|
||||
|
||||
if lang == "d2" && slices.Contains(r.MarkConfig.Features, "d2") {
|
||||
attachment, err := d2.ProcessD2(title, lval, r.MarkConfig.D2Scale)
|
||||
if err != nil {
|
||||
log.Debugf(nil, "error: %v", err)
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
r.Attachments.Attach(attachment)
|
||||
err = r.Stdlib.Templates.ExecuteTemplate(
|
||||
writer,
|
||||
"ac:image",
|
||||
struct {
|
||||
Width string
|
||||
Height string
|
||||
Title string
|
||||
Alt string
|
||||
Attachment string
|
||||
Url string
|
||||
}{
|
||||
attachment.Width,
|
||||
attachment.Height,
|
||||
attachment.Name,
|
||||
"",
|
||||
attachment.Filename,
|
||||
"",
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
|
||||
} else if lang == "mermaid" && slices.Contains(r.MarkConfig.Features, "mermaid") && r.MarkConfig.MermaidProvider == "mermaid-go" {
|
||||
attachment, err := mermaid.ProcessMermaidLocally(title, lval, r.MarkConfig.MermaidScale)
|
||||
if err != nil {
|
||||
log.Debugf(nil, "error: %v", err)
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
r.Attachments.Attach(attachment)
|
||||
err = r.Stdlib.Templates.ExecuteTemplate(
|
||||
writer,
|
||||
"ac:image",
|
||||
struct {
|
||||
Width string
|
||||
Height string
|
||||
Title string
|
||||
Alt string
|
||||
Attachment string
|
||||
Url string
|
||||
}{
|
||||
attachment.Width,
|
||||
attachment.Height,
|
||||
attachment.Name,
|
||||
"",
|
||||
attachment.Filename,
|
||||
"",
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
|
||||
} else {
|
||||
err := r.Stdlib.Templates.ExecuteTemplate(
|
||||
writer,
|
||||
"ac:code",
|
||||
struct {
|
||||
Language string
|
||||
Collapse bool
|
||||
Title string
|
||||
Theme string
|
||||
Linenumbers bool
|
||||
Firstline int
|
||||
Text string
|
||||
}{
|
||||
lang,
|
||||
collapse,
|
||||
title,
|
||||
theme,
|
||||
linenumbers,
|
||||
firstline,
|
||||
strings.TrimSuffix(string(lval), "\n"),
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
}
|
||||
|
||||
return ast.WalkContinue, nil
|
||||
}
|
57
renderer/heading.go
Normal file
57
renderer/heading.go
Normal file
@ -0,0 +1,57 @@
|
||||
package renderer
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type ConfluenceHeadingRenderer struct {
|
||||
html.Config
|
||||
DropFirstH1 bool
|
||||
}
|
||||
|
||||
// NewConfluenceRenderer creates a new instance of the ConfluenceRenderer
|
||||
func NewConfluenceHeadingRenderer(dropFirstH1 bool, opts ...html.Option) renderer.NodeRenderer {
|
||||
return &ConfluenceHeadingRenderer{
|
||||
Config: html.NewConfig(),
|
||||
DropFirstH1: dropFirstH1,
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
|
||||
func (r *ConfluenceHeadingRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindHeading, r.renderHeading)
|
||||
}
|
||||
|
||||
func (r *ConfluenceHeadingRenderer) renderHeading(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.Heading)
|
||||
|
||||
// If this is the first h1 heading of the document and we want to drop it, let's not render it at all.
|
||||
if n.Level == 1 && r.DropFirstH1 {
|
||||
if !entering {
|
||||
r.DropFirstH1 = false
|
||||
}
|
||||
return ast.WalkSkipChildren, nil
|
||||
}
|
||||
|
||||
return r.goldmarkRenderHeading(w, source, node, entering)
|
||||
}
|
||||
|
||||
func (r *ConfluenceHeadingRenderer) goldmarkRenderHeading(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.Heading)
|
||||
if entering {
|
||||
_, _ = w.WriteString("<h")
|
||||
_ = w.WriteByte("0123456"[n.Level])
|
||||
if n.Attributes() != nil {
|
||||
html.RenderAttributes(w, node, html.HeadingAttributeFilter)
|
||||
}
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("</h")
|
||||
_ = w.WriteByte("0123456"[n.Level])
|
||||
_, _ = w.WriteString(">\n")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
110
renderer/htmlblock.go
Normal file
110
renderer/htmlblock.go
Normal file
@ -0,0 +1,110 @@
|
||||
package renderer
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/kovetskiy/mark/stdlib"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type ConfluenceHTMLBlockRenderer struct {
|
||||
html.Config
|
||||
}
|
||||
|
||||
// NewConfluenceRenderer creates a new instance of the ConfluenceRenderer
|
||||
func NewConfluenceHTMLBlockRenderer(stdlib *stdlib.Lib, opts ...html.Option) renderer.NodeRenderer {
|
||||
return &ConfluenceHTMLBlockRenderer{
|
||||
Config: html.NewConfig(),
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
|
||||
func (r *ConfluenceHTMLBlockRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindHTMLBlock, r.renderHTMLBlock)
|
||||
}
|
||||
|
||||
func (r *ConfluenceHTMLBlockRenderer) renderHTMLBlock(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return r.goldmarkRenderHTMLBlock(w, source, node, entering)
|
||||
}
|
||||
|
||||
n := node.(*ast.HTMLBlock)
|
||||
l := n.Lines().Len()
|
||||
for i := 0; i < l; i++ {
|
||||
line := n.Lines().At(i)
|
||||
|
||||
switch strings.Trim(string(line.Value(source)), "\n") {
|
||||
case "<!-- ac:layout -->":
|
||||
_, _ = w.WriteString("<ac:layout>\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:layout end -->":
|
||||
_, _ = w.WriteString("</ac:layout>\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:layout-section type:single -->":
|
||||
_, _ = w.WriteString("<ac:layout-section ac:type=\"single\">\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:layout-section type:two_equal -->":
|
||||
_, _ = w.WriteString("<ac:layout-section ac:type=\"two_equal\">\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:layout-section type:two_left_sidebar -->":
|
||||
_, _ = w.WriteString("<ac:layout-section ac:type=\"two_left_sidebar\">\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:layout-section type:two_right_sidebar -->":
|
||||
_, _ = w.WriteString("<ac:layout-section ac:type=\"two_right_sidebar\">\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:layout-section type:three -->":
|
||||
_, _ = w.WriteString("<ac:layout-section ac:type=\"three\">\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:layout-section type:three_with_sidebars -->":
|
||||
_, _ = w.WriteString("<ac:layout-section ac:type=\"three_with_sidebars\">\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:layout-section end -->":
|
||||
_, _ = w.WriteString("</ac:layout-section>\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:layout-cell -->":
|
||||
_, _ = w.WriteString("<ac:layout-cell>\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:layout-cell end -->":
|
||||
_, _ = w.WriteString("</ac:layout-cell>\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:placeholder -->":
|
||||
_, _ = w.WriteString("<ac:placeholder>\n")
|
||||
return ast.WalkContinue, nil
|
||||
case "<!-- ac:placeholder end -->":
|
||||
_, _ = w.WriteString("</ac:placeholder>\n")
|
||||
return ast.WalkContinue, nil
|
||||
|
||||
}
|
||||
}
|
||||
return r.goldmarkRenderHTMLBlock(w, source, node, entering)
|
||||
|
||||
}
|
||||
|
||||
func (r *ConfluenceHTMLBlockRenderer) goldmarkRenderHTMLBlock(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.HTMLBlock)
|
||||
if entering {
|
||||
if r.Unsafe {
|
||||
l := n.Lines().Len()
|
||||
for i := 0; i < l; i++ {
|
||||
line := n.Lines().At(i)
|
||||
r.Writer.SecureWrite(w, line.Value(source))
|
||||
}
|
||||
} else {
|
||||
_, _ = w.WriteString("<!-- raw HTML omitted -->\n")
|
||||
}
|
||||
} else {
|
||||
if n.HasClosure() {
|
||||
if r.Unsafe {
|
||||
closure := n.ClosureLine
|
||||
r.Writer.SecureWrite(w, closure.Value(source))
|
||||
} else {
|
||||
_, _ = w.WriteString("<!-- raw HTML omitted -->\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
118
renderer/image.go
Normal file
118
renderer/image.go
Normal file
@ -0,0 +1,118 @@
|
||||
package renderer
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/kovetskiy/mark/attachment"
|
||||
"github.com/kovetskiy/mark/stdlib"
|
||||
"github.com/kovetskiy/mark/vfs"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type ConfluenceImageRenderer struct {
|
||||
html.Config
|
||||
Stdlib *stdlib.Lib
|
||||
Path string
|
||||
Attachments attachment.Attacher
|
||||
}
|
||||
|
||||
// NewConfluenceRenderer creates a new instance of the ConfluenceRenderer
|
||||
func NewConfluenceImageRenderer(stdlib *stdlib.Lib, attachments attachment.Attacher, path string, opts ...html.Option) renderer.NodeRenderer {
|
||||
return &ConfluenceImageRenderer{
|
||||
Config: html.NewConfig(),
|
||||
Stdlib: stdlib,
|
||||
Path: path,
|
||||
Attachments: attachments,
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
|
||||
func (r *ConfluenceImageRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindImage, r.renderImage)
|
||||
}
|
||||
|
||||
// renderImage renders an inline image
|
||||
func (r *ConfluenceImageRenderer) renderImage(writer util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
n := node.(*ast.Image)
|
||||
|
||||
attachments, err := attachment.ResolveLocalAttachments(vfs.LocalOS, filepath.Dir(r.Path), []string{string(n.Destination)})
|
||||
|
||||
// We were unable to resolve it locally, treat as URL
|
||||
if err != nil {
|
||||
escapedURL := string(n.Destination)
|
||||
escapedURL = strings.ReplaceAll(escapedURL, "&", "&")
|
||||
|
||||
err = r.Stdlib.Templates.ExecuteTemplate(
|
||||
writer,
|
||||
"ac:image",
|
||||
struct {
|
||||
Width string
|
||||
Height string
|
||||
Title string
|
||||
Alt string
|
||||
Attachment string
|
||||
Url string
|
||||
}{
|
||||
"",
|
||||
"",
|
||||
string(n.Title),
|
||||
string(nodeToHTMLText(n, source)),
|
||||
"",
|
||||
escapedURL,
|
||||
},
|
||||
)
|
||||
} else {
|
||||
|
||||
r.Attachments.Attach(attachments[0])
|
||||
|
||||
err = r.Stdlib.Templates.ExecuteTemplate(
|
||||
writer,
|
||||
"ac:image",
|
||||
struct {
|
||||
Width string
|
||||
Height string
|
||||
Title string
|
||||
Alt string
|
||||
Attachment string
|
||||
Url string
|
||||
}{
|
||||
"",
|
||||
"",
|
||||
string(n.Title),
|
||||
string(nodeToHTMLText(n, source)),
|
||||
attachments[0].Filename,
|
||||
"",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
|
||||
return ast.WalkSkipChildren, nil
|
||||
}
|
||||
|
||||
// https://github.com/yuin/goldmark/blob/c446c414ef3a41fb562da0ae5badd18f1502c42f/renderer/html/html.go
|
||||
func nodeToHTMLText(n ast.Node, source []byte) []byte {
|
||||
var buf bytes.Buffer
|
||||
for c := n.FirstChild(); c != nil; c = c.NextSibling() {
|
||||
if s, ok := c.(*ast.String); ok && s.IsCode() {
|
||||
buf.Write(s.Value)
|
||||
} else if t, ok := c.(*ast.Text); ok {
|
||||
buf.Write(util.EscapeHTML(t.Value(source)))
|
||||
} else {
|
||||
buf.Write(nodeToHTMLText(c, source))
|
||||
}
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
92
renderer/link.go
Normal file
92
renderer/link.go
Normal file
@ -0,0 +1,92 @@
|
||||
package renderer
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type ConfluenceLinkRenderer struct {
|
||||
html.Config
|
||||
}
|
||||
|
||||
// NewConfluenceRenderer creates a new instance of the ConfluenceRenderer
|
||||
func NewConfluenceLinkRenderer(opts ...html.Option) renderer.NodeRenderer {
|
||||
return &ConfluenceLinkRenderer{
|
||||
Config: html.NewConfig(),
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
|
||||
func (r *ConfluenceLinkRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindLink, r.renderLink)
|
||||
}
|
||||
|
||||
// renderLink renders links specifically for confluence
|
||||
func (r *ConfluenceLinkRenderer) renderLink(writer util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.Link)
|
||||
if string(n.Destination[0:3]) == "ac:" {
|
||||
if entering {
|
||||
_, err := writer.Write([]byte("<ac:link><ri:page ri:content-title=\""))
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
|
||||
if len(string(n.Destination)) < 4 {
|
||||
//nolint:staticcheck
|
||||
_, err := writer.Write(node.Text(source))
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
} else {
|
||||
_, err := writer.Write(n.Destination[3:])
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
|
||||
}
|
||||
_, err = writer.Write([]byte("\"/><ac:plain-text-link-body><![CDATA["))
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
|
||||
//nolint:staticcheck
|
||||
_, err = writer.Write(node.Text(source))
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
|
||||
_, err = writer.Write([]byte("]]></ac:plain-text-link-body></ac:link>"))
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
}
|
||||
return ast.WalkSkipChildren, nil
|
||||
}
|
||||
return r.goldmarkRenderLink(writer, source, node, entering)
|
||||
}
|
||||
|
||||
// goldmarkRenderLink is the default renderLink implementation from https://github.com/yuin/goldmark/blob/9d6f314b99ca23037c93d76f248be7b37de6220a/renderer/html/html.go#L552
|
||||
func (r *ConfluenceLinkRenderer) goldmarkRenderLink(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
n := node.(*ast.Link)
|
||||
if entering {
|
||||
_, _ = w.WriteString("<a href=\"")
|
||||
if r.Unsafe || !html.IsDangerousURL(n.Destination) {
|
||||
_, _ = w.Write(util.EscapeHTML(util.URLEscape(n.Destination, true)))
|
||||
}
|
||||
_ = w.WriteByte('"')
|
||||
if n.Title != nil {
|
||||
_, _ = w.WriteString(` title="`)
|
||||
r.Writer.Write(w, n.Title)
|
||||
_ = w.WriteByte('"')
|
||||
}
|
||||
if n.Attributes() != nil {
|
||||
html.RenderAttributes(w, n, html.LinkAttributeFilter)
|
||||
}
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("</a>")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
44
renderer/paragraph.go
Normal file
44
renderer/paragraph.go
Normal file
@ -0,0 +1,44 @@
|
||||
package renderer
|
||||
|
||||
import (
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
type ConfluenceParagraphRenderer struct {
|
||||
html.Config
|
||||
}
|
||||
|
||||
// NewConfluenceRenderer creates a new instance of the ConfluenceRenderer
|
||||
func NewConfluenceParagraphRenderer(opts ...html.Option) renderer.NodeRenderer {
|
||||
return &ConfluenceParagraphRenderer{
|
||||
Config: html.NewConfig(),
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
|
||||
func (r *ConfluenceParagraphRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindParagraph, r.renderParagraph)
|
||||
}
|
||||
|
||||
func (r *ConfluenceParagraphRenderer) renderParagraph(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if entering {
|
||||
if n.FirstChild().Kind() != ast.KindRawHTML {
|
||||
if n.Attributes() != nil {
|
||||
_, _ = w.WriteString("<p")
|
||||
html.RenderAttributes(w, n, html.ParagraphAttributeFilter)
|
||||
_ = w.WriteByte('>')
|
||||
} else {
|
||||
_, _ = w.WriteString("<p>")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if n.FirstChild().Kind() != ast.KindRawHTML {
|
||||
_, _ = w.WriteString("</p>")
|
||||
}
|
||||
_, _ = w.WriteString("\n")
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
136
renderer/text.go
Normal file
136
renderer/text.go
Normal file
@ -0,0 +1,136 @@
|
||||
package renderer
|
||||
|
||||
import (
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/renderer"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
// ConfluenceTextRenderer slightly alters the default goldmark behavior for
|
||||
// inline text block. It allows for soft breaks
|
||||
// (c.f. https://spec.commonmark.org/0.30/#softbreak)
|
||||
// to be rendered into HTML as either '\n' (the goldmark default)
|
||||
// or as ' '.
|
||||
// This latter option is useful for Confluence,
|
||||
// which inserts <br> tags into uploaded HTML where it sees '\n'.
|
||||
// See also https://sembr.org/ for partial motivation.
|
||||
type ConfluenceTextRenderer struct {
|
||||
html.Config
|
||||
softBreak rune
|
||||
}
|
||||
|
||||
// NewConfluenceTextRenderer creates a new instance of the ConfluenceTextRenderer
|
||||
func NewConfluenceTextRenderer(stripNL bool, opts ...html.Option) renderer.NodeRenderer {
|
||||
sb := '\n'
|
||||
if stripNL {
|
||||
sb = ' '
|
||||
}
|
||||
return &ConfluenceTextRenderer{
|
||||
Config: html.NewConfig(),
|
||||
softBreak: sb,
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterFuncs implements NodeRenderer.RegisterFuncs .
|
||||
func (r *ConfluenceTextRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||
reg.Register(ast.KindText, r.renderText)
|
||||
}
|
||||
|
||||
// This is taken from https://github.com/yuin/goldmark/blob/v1.6.0/renderer/html/html.go#L719
|
||||
// with the hardcoded '\n' for soft breaks swapped for the configurable r.softBreak
|
||||
func (r *ConfluenceTextRenderer) renderText(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
n := node.(*ast.Text)
|
||||
segment := n.Segment
|
||||
if n.IsRaw() {
|
||||
r.Writer.RawWrite(w, segment.Value(source))
|
||||
} else {
|
||||
value := segment.Value(source)
|
||||
r.Writer.Write(w, value)
|
||||
if n.HardLineBreak() || (n.SoftLineBreak() && r.HardWraps) {
|
||||
if r.XHTML {
|
||||
_, _ = w.WriteString("<br />\n")
|
||||
} else {
|
||||
_, _ = w.WriteString("<br>\n")
|
||||
}
|
||||
} else if n.SoftLineBreak() {
|
||||
if r.EastAsianLineBreaks != html.EastAsianLineBreaksNone && len(value) != 0 {
|
||||
sibling := node.NextSibling()
|
||||
if sibling != nil && sibling.Kind() == ast.KindText {
|
||||
if siblingText := sibling.(*ast.Text).Value(source); len(siblingText) != 0 {
|
||||
thisLastRune := util.ToRune(value, len(value)-1)
|
||||
siblingFirstRune, _ := utf8.DecodeRune(siblingText)
|
||||
// Inline the softLineBreak function as it's not public
|
||||
writeLineBreak := false
|
||||
switch r.EastAsianLineBreaks {
|
||||
case html.EastAsianLineBreaksNone:
|
||||
writeLineBreak = false
|
||||
case html.EastAsianLineBreaksSimple:
|
||||
writeLineBreak = !util.IsEastAsianWideRune(thisLastRune) || !util.IsEastAsianWideRune(siblingFirstRune)
|
||||
case html.EastAsianLineBreaksCSS3Draft:
|
||||
writeLineBreak = eastAsianLineBreaksCSS3DraftSoftLineBreak(thisLastRune, siblingFirstRune)
|
||||
}
|
||||
|
||||
if writeLineBreak {
|
||||
_ = w.WriteByte(byte(r.softBreak))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
_ = w.WriteByte(byte(r.softBreak))
|
||||
}
|
||||
}
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
func eastAsianLineBreaksCSS3DraftSoftLineBreak(thisLastRune rune, siblingFirstRune rune) bool {
|
||||
// Implements CSS text level3 Segment Break Transformation Rules with some enhancements.
|
||||
// References:
|
||||
// - https://www.w3.org/TR/2020/WD-css-text-3-20200429/#line-break-transform
|
||||
// - https://github.com/w3c/csswg-drafts/issues/5086
|
||||
|
||||
// Rule1:
|
||||
// If the character immediately before or immediately after the segment break is
|
||||
// the zero-width space character (U+200B), then the break is removed, leaving behind the zero-width space.
|
||||
if thisLastRune == '\u200B' || siblingFirstRune == '\u200B' {
|
||||
return false
|
||||
}
|
||||
|
||||
// Rule2:
|
||||
// Otherwise, if the East Asian Width property of both the character before and after the segment break is
|
||||
// F, W, or H (not A), and neither side is Hangul, then the segment break is removed.
|
||||
thisLastRuneEastAsianWidth := util.EastAsianWidth(thisLastRune)
|
||||
siblingFirstRuneEastAsianWidth := util.EastAsianWidth(siblingFirstRune)
|
||||
if (thisLastRuneEastAsianWidth == "F" ||
|
||||
thisLastRuneEastAsianWidth == "W" ||
|
||||
thisLastRuneEastAsianWidth == "H") &&
|
||||
(siblingFirstRuneEastAsianWidth == "F" ||
|
||||
siblingFirstRuneEastAsianWidth == "W" ||
|
||||
siblingFirstRuneEastAsianWidth == "H") {
|
||||
return unicode.Is(unicode.Hangul, thisLastRune) || unicode.Is(unicode.Hangul, siblingFirstRune)
|
||||
}
|
||||
|
||||
// Rule3:
|
||||
// Otherwise, if either the character before or after the segment break belongs to
|
||||
// the space-discarding character set and it is a Unicode Punctuation (P*) or U+3000,
|
||||
// then the segment break is removed.
|
||||
if util.IsSpaceDiscardingUnicodeRune(thisLastRune) ||
|
||||
unicode.IsPunct(thisLastRune) ||
|
||||
thisLastRune == '\u3000' ||
|
||||
util.IsSpaceDiscardingUnicodeRune(siblingFirstRune) ||
|
||||
unicode.IsPunct(siblingFirstRune) ||
|
||||
siblingFirstRune == '\u3000' {
|
||||
return false
|
||||
}
|
||||
|
||||
// Rule4:
|
||||
// Otherwise, the segment break is converted to a space (U+0020).
|
||||
return true
|
||||
}
|
467
stdlib/stdlib.go
Normal file
467
stdlib/stdlib.go
Normal file
@ -0,0 +1,467 @@
|
||||
package stdlib
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/kovetskiy/mark/confluence"
|
||||
"github.com/kovetskiy/mark/macro"
|
||||
"github.com/reconquest/pkg/log"
|
||||
|
||||
"github.com/reconquest/karma-go"
|
||||
)
|
||||
|
||||
type Lib struct {
|
||||
Macros []macro.Macro
|
||||
Templates *template.Template
|
||||
}
|
||||
|
||||
func New(api *confluence.API) (*Lib, error) {
|
||||
var (
|
||||
lib Lib
|
||||
err error
|
||||
)
|
||||
|
||||
lib.Templates, err = templates(api)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
lib.Macros, err = macros(lib.Templates)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &lib, nil
|
||||
}
|
||||
|
||||
func macros(templates *template.Template) ([]macro.Macro, error) {
|
||||
text := func(line ...string) []byte {
|
||||
return []byte(strings.Join(line, "\n"))
|
||||
}
|
||||
|
||||
macros, _, err := macro.ExtractMacros(
|
||||
"",
|
||||
"",
|
||||
text(
|
||||
`<!-- Macro: @\{([^}]+)\}`,
|
||||
` Template: ac:link:user`,
|
||||
` Name: ${1} -->`,
|
||||
|
||||
// TODO(seletskiy): more macros here
|
||||
),
|
||||
|
||||
templates,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return macros, nil
|
||||
}
|
||||
|
||||
func templates(api *confluence.API) (*template.Template, error) {
|
||||
text := func(line ...string) string {
|
||||
return strings.Join(line, ``)
|
||||
}
|
||||
|
||||
templates := template.New(`stdlib`).Funcs(
|
||||
template.FuncMap{
|
||||
"user": func(name string) *confluence.User {
|
||||
user, err := api.GetUserByName(name)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
}
|
||||
|
||||
return user
|
||||
},
|
||||
|
||||
// The only way to escape CDATA end marker ']]>' is to split it
|
||||
// into two CDATA sections.
|
||||
"cdata": func(data string) string {
|
||||
return strings.ReplaceAll(
|
||||
data,
|
||||
"]]>",
|
||||
"]]><![CDATA[]]]]><![CDATA[>",
|
||||
)
|
||||
},
|
||||
"convertAttachment": func(data string) string {
|
||||
return strings.ReplaceAll(
|
||||
data,
|
||||
"/",
|
||||
"_",
|
||||
)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
var err error
|
||||
|
||||
for name, body := range map[string]string{
|
||||
// This template is used to select whole article layout
|
||||
`ac:layout`: text(
|
||||
`{{ if eq .Layout "article" }}`,
|
||||
/**/ `<ac:layout>`,
|
||||
/**/ `<ac:layout-section ac:type="two_right_sidebar">`,
|
||||
/**/ `<ac:layout-cell>{{ .Body }}</ac:layout-cell>`,
|
||||
/**/ `<ac:layout-cell>{{ .Sidebar }}</ac:layout-cell>`,
|
||||
/**/ `</ac:layout-section>`,
|
||||
/**/ `</ac:layout>`,
|
||||
`{{ else }}`,
|
||||
/**/ `{{ .Body }}`,
|
||||
`{{ end }}`,
|
||||
),
|
||||
|
||||
// This template is used for rendering code in ```
|
||||
`ac:code`: text(
|
||||
`<ac:structured-macro ac:name="{{ if eq .Language "mermaid" }}cloudscript-confluence-mermaid{{ else }}code{{ end }}">`,
|
||||
/**/ `{{ if eq .Language "mermaid" }}<ac:parameter ac:name="showSource">true</ac:parameter>{{ else }}`,
|
||||
/**/ `<ac:parameter ac:name="language">{{ .Language }}</ac:parameter>{{ end }}`,
|
||||
/**/ `<ac:parameter ac:name="collapse">{{ .Collapse }}</ac:parameter>`,
|
||||
/**/ `{{ if .Theme }}<ac:parameter ac:name="theme">{{ .Theme }}</ac:parameter>{{ end }}`,
|
||||
/**/ `{{ if .Linenumbers }}<ac:parameter ac:name="linenumbers">{{ .Linenumbers }}</ac:parameter>{{ end }}`,
|
||||
/**/ `{{ if .Firstline }}<ac:parameter ac:name="firstline">{{ .Firstline }}</ac:parameter>{{ end }}`,
|
||||
/**/ `{{ if .Title }}<ac:parameter ac:name="title">{{ .Title }}</ac:parameter>{{ end }}`,
|
||||
/**/ `<ac:plain-text-body><![CDATA[{{ .Text | cdata }}]]></ac:plain-text-body>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
`ac:status`: text(
|
||||
`<ac:structured-macro ac:name="status">`,
|
||||
`<ac:parameter ac:name="colour">{{ or .Color "Grey" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="title">{{ or .Title .Color }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="subtle">{{ or .Subtle false }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
`ac:link:user`: text(
|
||||
`{{ with .Name | user }}`,
|
||||
/**/ `<ac:link>`,
|
||||
/**/ `{{ if .AccountID }}`,
|
||||
/****/ `<ri:user ri:account-id="{{ .AccountID }}" />`,
|
||||
/**/ `{{ else }}`,
|
||||
/****/ `<ri:user ri:userkey="{{ .UserKey }}" />`,
|
||||
/**/ `{{ end }}`,
|
||||
/**/ `</ac:link>`,
|
||||
`{{ else }}`,
|
||||
/**/ `{{ .Name }}`,
|
||||
`{{ end }}`,
|
||||
),
|
||||
|
||||
`ac:jira:ticket`: text(
|
||||
`<ac:structured-macro ac:name="jira">`,
|
||||
`<ac:parameter ac:name="key">{{ .Ticket }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* Used for rendering Jira Filters */
|
||||
|
||||
`ac:jira:filter`: text(
|
||||
`<ac:structured-macro ac:name="jira">`,
|
||||
`<ac:parameter ac:name="server">{{ or .Server "System JIRA" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="jqlQuery">{{ .JQL }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/doc/jira-issues-macro-139380.html */
|
||||
`ac:jiraissues`: text(
|
||||
`<ac:structured-macro ac:name="jiraissues">`,
|
||||
`<ac:parameter ac:name="anonymous">{{ or .Anonymous false }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="baseurl"><ri:url ri:value="{{ or .BaseURL .URL }}" /></ac:parameter>`,
|
||||
`<ac:parameter ac:name="columns">{{ or .Columns "type;key;summary;assignee;reporter;priority;status;resolution;created;updated;due" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="count">{{ or .Count false }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="cache">{{ or .Cache "on" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="height">{{ or .Height 480 }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="renderMode">{{ or .RenderMode "static" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="title">{{ or .Title "Jira Issues" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="url"><ri:url ri:value="{{ .URL }}" /></ac:parameter>`,
|
||||
`<ac:parameter ac:name="width">{{ or .Width "100%" }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/info-tip-note-and-warning-macros-792499127.html */
|
||||
|
||||
`ac:box`: text(
|
||||
`<ac:structured-macro ac:name="{{ .Name }}">`,
|
||||
`<ac:parameter ac:name="icon">{{ or .Icon "false" }}</ac:parameter>`,
|
||||
`{{ if .Title }}<ac:parameter ac:name="title">{{ .Title }}</ac:parameter>{{ end }}`,
|
||||
`<ac:rich-text-body>{{ .Body }}</ac:rich-text-body>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/table-of-contents-macro-792499210.html */
|
||||
|
||||
`ac:toc`: text(
|
||||
`<ac:structured-macro ac:name="toc">`,
|
||||
`<ac:parameter ac:name="printable">{{ or .Printable "true" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="style">{{ or .Style "disc" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="maxLevel">{{ or .MaxLevel "7" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="indent">{{ or .Indent "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="minLevel">{{ or .MinLevel "1" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="exclude">{{ or .Exclude "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="type">{{ or .Type "list" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="outline">{{ or .Outline "clear" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="include">{{ or .Include "" }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/doc/children-display-macro-139501.html */
|
||||
|
||||
`ac:children`: text(
|
||||
`<ac:structured-macro ac:name="children">`,
|
||||
`{{ if .Reverse }}<ac:parameter ac:name="reverse">{{ or .Reverse }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Sort }}<ac:parameter ac:name="sort">{{ .Sort }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Style }}<ac:parameter ac:name="style">{{ .Style }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Page }}`,
|
||||
/**/ `<ac:parameter ac:name="page">`,
|
||||
/**/ `<ac:link>`,
|
||||
/**/ `<ri:page ri:content-title="{{ .Page }}"/>`,
|
||||
/**/ `</ac:link>`,
|
||||
/**/ `</ac:parameter>`,
|
||||
`{{ end }}`,
|
||||
`{{ if .Excerpt }}<ac:parameter ac:name="excerptType">{{ .Excerpt }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .First }}<ac:parameter ac:name="first">{{ .First }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Depth }}<ac:parameter ac:name="depth">{{ .Depth }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .All }}<ac:parameter ac:name="all">{{ .All }}</ac:parameter>{{ end }}`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/doc/confluence-storage-format-790796544.html */
|
||||
|
||||
`ac:emoticon`: text(
|
||||
`<ac:emoticon ac:name="{{ .Name }}"/>`,
|
||||
),
|
||||
|
||||
`ac:image`: text(
|
||||
`<ac:image`,
|
||||
`{{ if .Width }} ac:width="{{ .Width }}"{{ end }}`,
|
||||
`{{ if .Height }} ac:height="{{ .Height }}"{{ end }}`,
|
||||
`{{ if .Title }} ac:title="{{ .Title }}"{{ end }}`,
|
||||
`{{ if .Alt }} ac:alt="{{ .Alt }}"{{ end }}>`,
|
||||
`{{ if .Attachment }}<ri:attachment ri:filename="{{ .Attachment | convertAttachment }}"/>{{ end }}`,
|
||||
`{{ if .Url }}<ri:url ri:value="{{ .Url }}"/>{{ end }}`,
|
||||
`</ac:image>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/doc/widget-connector-macro-171180449.html#WidgetConnectorMacro-YouTube */
|
||||
|
||||
`ac:youtube`: text(
|
||||
`<ac:structured-macro ac:name="widget">`,
|
||||
`<ac:parameter ac:name="overlay">youtube</ac:parameter>`,
|
||||
`<ac:parameter ac:name="_template">com/atlassian/confluence/extra/widgetconnector/templates/youtube.vm</ac:parameter>`,
|
||||
`<ac:parameter ac:name="width">{{ or .Width "640px" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="height">{{ or .Height "360px" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="url"><ri:url ri:value="{{ .URL }}" /></ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://support.atlassian.com/confluence-cloud/docs/insert-the-iframe-macro/ */
|
||||
|
||||
`ac:iframe`: text(
|
||||
`<ac:structured-macro ac:name="iframe">`,
|
||||
`<ac:parameter ac:name="src"><ri:url ri:value="{{ .URL }}" /></ac:parameter>`,
|
||||
`{{ if .Frameborder }}<ac:parameter ac:name="frameborder">{{ .Frameborder }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Scrolling }}<ac:parameter ac:name="id">{{ .Scrolling }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Align }}<ac:parameter ac:name="align">{{ .Align }}</ac:parameter>{{ end }}`,
|
||||
`<ac:parameter ac:name="width">{{ or .Width "640px" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="height">{{ or .Height "360px" }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/doc/blog-posts-macro-139470.html */
|
||||
|
||||
`ac:blog-posts`: text(
|
||||
`<ac:structured-macro ac:name="blog-posts">`,
|
||||
`{{ if .Content }}<ac:parameter ac:name="content">{{ .Content }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Spaces }}<ac:parameter ac:name="spaces">{{ .Spaces }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Author }}<ac:parameter ac:name="author">{{ .Author }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Time }}<ac:parameter ac:name="time">{{ .Time }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Reverse }}<ac:parameter ac:name="reverse">{{ .Reverse }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Sort }}<ac:parameter ac:name="sort">{{ .Sort }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Max }}<ac:parameter ac:name="max">{{ .Max }}</ac:parameter>{{ end }}`,
|
||||
`{{ if .Label }}<ac:parameter ac:name="label">{{ .Label }}</ac:parameter>{{ end }}`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/include-page-macro-792499125.html */
|
||||
|
||||
`ac:include`: text(
|
||||
`<ac:structured-macro ac:name="include">`,
|
||||
`<ac:parameter ac:name="">`,
|
||||
`<ac:link>`,
|
||||
`<ri:page ri:content-title="{{ .Page }}" {{if .Space }}ri:space-key="{{ .Space }}"{{ end }}/>`,
|
||||
`</ac:link>`,
|
||||
`</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/excerpt-include-macro-792499101.html */
|
||||
/* https://support.atlassian.com/confluence-cloud/docs/insert-the-excerpt-include-macro/ */
|
||||
|
||||
`ac:excerpt-include`: text(
|
||||
`<ac:macro ac:name="excerpt-include">`,
|
||||
`{{ if .Name }}<ac:parameter ac:name="name">{{ .Name }}</ac:parameter>{{ end }}`,
|
||||
`<ac:parameter ac:name="nopanel">{{ if .NoPanel }}{{ .NoPanel }}{{ else }}false{{ end }}</ac:parameter>`,
|
||||
`<ac:default-parameter>{{ .Page }}</ac:default-parameter>`,
|
||||
`</ac:macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/excerpt-macro-792499102.html */
|
||||
/* https://support.atlassian.com/confluence-cloud/docs/insert-the-excerpt-macro/ */
|
||||
|
||||
`ac:excerpt`: text(
|
||||
`<ac:structured-macro ac:name="excerpt">`,
|
||||
`{{ if .Name }}<ac:parameter ac:name="name">{{ .Name }}</ac:parameter>{{ end }}`,
|
||||
`<ac:parameter ac:name="hidden">{{ if .Hidden }}{{ .Hidden }}{{ else }}false{{ end }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="atlassian-macro-output-type">{{ if .OutputType }}{{ .OutputType }}{{ else }}BLOCK{{ end }}</ac:parameter>`,
|
||||
`<ac:rich-text-body>`,
|
||||
`{{ .Excerpt }}`,
|
||||
`</ac:rich-text-body>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/anchor-macro-792499068.html */
|
||||
|
||||
`ac:anchor`: text(
|
||||
`<ac:structured-macro ac:name="anchor">`,
|
||||
`<ac:parameter ac:name="">{{ .Anchor }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/expand-macro-792499106.html */
|
||||
|
||||
`ac:expand`: text(
|
||||
`<ac:structured-macro ac:name="expand">`,
|
||||
`<ac:parameter ac:name="title">{{ .Title }}</ac:parameter>`,
|
||||
`<ac:rich-text-body>{{ .Body }}</ac:rich-text-body>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/user-profile-macro-792499223.html */
|
||||
|
||||
`ac:profile`: text(
|
||||
`{{ with .Name | user }}`,
|
||||
`<ac:structured-macro ac:name="profile">`,
|
||||
`<ac:parameter ac:name="user">`,
|
||||
`{{ if .AccountID }}`,
|
||||
/**/ `<ri:user ri:account-id="{{ .AccountID }}" />`,
|
||||
`{{ else }}`,
|
||||
/**/ `<ri:user ri:userkey="{{ .UserKey }}" />`,
|
||||
`{{ end }}`,
|
||||
`</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
`{{ end }}`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/content-by-label-macro-792499087.html */
|
||||
|
||||
`ac:contentbylabel`: text(
|
||||
`<ac:structured-macro ac:name="contentbylabel" ac:schema-version="3">`,
|
||||
`<ac:parameter ac:name="cql">{{ .CQL }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/page-properties-report-macro-792499165.html */
|
||||
|
||||
`ac:detailssummary`: text(
|
||||
`<ac:structured-macro ac:name="detailssummary" ac:schema-version="2">`,
|
||||
`<ac:parameter ac:name="headings">{{ .Headings }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="firstcolumn">{{ .FirstColumn }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="sortBy">{{ .SortBy }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="cql">{{ .CQL }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/page-properties-macro-792499154.html */
|
||||
|
||||
`ac:details`: text(
|
||||
`<ac:structured-macro ac:name="details" ac:schema-version="1">`,
|
||||
`<ac:rich-text-body>{{ .Body }}</ac:rich-text-body>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/page-tree-macro-792499177.html */
|
||||
|
||||
`ac:pagetree`: text(
|
||||
`<ac:structured-macro ac:name="pagetree" ac:schema-version="1">`,
|
||||
`<ac:parameter ac:name="root">`,
|
||||
`<ac:link>`,
|
||||
`<ri:page ri:content-title="{{ or .Title "@self" }}"/>`,
|
||||
`</ac:link>`,
|
||||
`</ac:parameter>`,
|
||||
`<ac:parameter ac:name="sort">{{ or .Sort "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="excerpt">{{ or .Excerpt "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="reverse">{{ or .Reverse "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="searchBox">{{ or .SearchBox "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="expandCollapseAll">{{ or .ExpandCollapseAll "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="startDepth">{{ or .StartDepth "" }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/page-tree-search-macro-792499178.html */
|
||||
|
||||
`ac:pagetreesearch`: text(
|
||||
`<ac:structured-macro ac:name="pagetreesearch">`,
|
||||
`{{ if .Root }}<ac:parameter ac:name="root">{{ .Root }}</ac:parameter>{{ end }}`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/panel-macro-792499179.html */
|
||||
|
||||
`ac:panel`: text(
|
||||
`<ac:structured-macro ac:name="panel">`,
|
||||
`<ac:parameter ac:name="bgColor">{{ or .BGColor "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="titleBGColor">{{ or .TitleBGColor "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="title">{{ or .Title "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="borderStyle">{{ or .BorderStyle "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="borderColor">{{ or .BorderColor "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="titleColor">{{ or .TitleColor "" }}</ac:parameter>`,
|
||||
`<ac:rich-text-body>{{ .Body }}</ac:rich-text-body>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
/* https://confluence.atlassian.com/conf59/recently-updated-macro-792499187.html */
|
||||
`ac:recently-updated`: text(
|
||||
`<ac:structured-macro ac:name="recently-updated">`,
|
||||
`{{ if .Spaces }}<ac:parameter ac:name="spaces"><ri:space ri:space-key={{ .Spaces }}/></ac:parameter>{{ end }}`,
|
||||
`<ac:parameter ac:name="showProfilePic">{{ or .ShowProfilePic "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="types">{{ or .Types "page, comment, blogpost" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="max">{{ or .Max "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="labels">{{ or .Labels "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="hideHeading">{{ or .HideHeading "" }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="theme">{{ or .Theme "" }}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
/* https://confluence.atlassian.com/conf59/column-macro-792499085.html */
|
||||
`ac:column`: text(
|
||||
`<ac:structured-macro ac:name="column">`,
|
||||
`<ac:parameter ac:name="width">{{ or .Width "" }}</ac:parameter>`,
|
||||
`<ac:rich-text-body>{{ or .Body "" }}</ac:rich-text-body>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
/* https://confluence.atlassian.com/conf59/multimedia-macro-792499140.html */
|
||||
`ac:multimedia`: text(
|
||||
`<ac:structured-macro ac:name="multimedia">`,
|
||||
`<ac:parameter ac:name="width">{{ or .Width 500 }}</ac:parameter>`,
|
||||
`<ac:parameter ac:name="name">`,
|
||||
`<ri:attachment ri:filename="{{ .Name | convertAttachment }}"/>`,
|
||||
`</ac:parameter>`,
|
||||
`<ac:parameter ac:name="autoplay">{{ or .AutoPlay "false"}}</ac:parameter>`,
|
||||
`</ac:structured-macro>`,
|
||||
),
|
||||
|
||||
// TODO(seletskiy): more templates here
|
||||
} {
|
||||
templates, err = templates.New(name).Parse(body)
|
||||
if err != nil {
|
||||
return nil, karma.
|
||||
Describe("template", body).
|
||||
Format(
|
||||
err,
|
||||
"unable to parse template",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return templates, nil
|
||||
}
|
6
testdata/batch-tests/bad-test.md
vendored
Normal file
6
testdata/batch-tests/bad-test.md
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
## Foo
|
||||
|
||||
> **TL;DR:** Thingy!
|
||||
> More stuff
|
||||
|
||||
Foo
|
6
testdata/batch-tests/errord-test.md
vendored
Normal file
6
testdata/batch-tests/errord-test.md
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
## Foo
|
||||
|
||||
> **TL;DR:** Thingy!
|
||||
> More stuff
|
||||
|
||||
Foo
|
10
testdata/batch-tests/good-test.md
vendored
Normal file
10
testdata/batch-tests/good-test.md
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<!-- Space: BatchTests -->
|
||||
<!-- Title: Hello World -->
|
||||
<!-- Title: Good Test -->
|
||||
|
||||
## Foo
|
||||
|
||||
> **TL;DR:** Thingy!
|
||||
> More stuff
|
||||
|
||||
Foo
|
15
testdata/batch-tests/invalid-test.md
vendored
Normal file
15
testdata/batch-tests/invalid-test.md
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
# a
|
||||
|
||||
## b
|
||||
|
||||
### c
|
||||
|
||||
#### d
|
||||
|
||||
##### e
|
||||
|
||||
# f
|
||||
|
||||
## g
|
||||
|
||||
# This/is some_Heading.yml
|
19
testdata/batch-tests/valid-test.md
vendored
Normal file
19
testdata/batch-tests/valid-test.md
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
<!-- Space: BatchTests -->
|
||||
<!-- Title: Hello World -->
|
||||
<!-- Title: Working Test -->
|
||||
|
||||
# a
|
||||
|
||||
## b
|
||||
|
||||
### c
|
||||
|
||||
#### d
|
||||
|
||||
##### e
|
||||
|
||||
# f
|
||||
|
||||
## g
|
||||
|
||||
# This/is some_Heading.yml
|
68
testdata/codes-stripnewlines.html
vendored
Normal file
68
testdata/codes-stripnewlines.html
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
<p><code>inline</code></p>
|
||||
<ac:structured-macro ac:name="code"><ac:parameter ac:name="language"></ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[some code]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">bash</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[code bash]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">bash</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[with a newline
|
||||
]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">unknown</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[unknown code]]></ac:plain-text-body></ac:structured-macro><p>text text 2</p>
|
||||
<ac:structured-macro ac:name="code"><ac:parameter ac:name="language">unknown</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[unknown code 2]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">sh</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:parameter ac:name="title">A b c</ac:parameter><ac:plain-text-body><![CDATA[no-collapse-title]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">bash</ac:parameter><ac:parameter ac:name="collapse">true</ac:parameter><ac:parameter ac:name="title">A b c</ac:parameter><ac:plain-text-body><![CDATA[collapse-and-title]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">c</ac:parameter><ac:parameter ac:name="collapse">true</ac:parameter><ac:plain-text-body><![CDATA[collapse-no-title]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">nested</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[code
|
||||
``` more code ```
|
||||
even more code]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language"></ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[indented code block
|
||||
with multiple lines]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="cloudscript-confluence-mermaid"><ac:parameter ac:name="showSource">true</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="cloudscript-confluence-mermaid"><ac:parameter ac:name="showSource">true</ac:parameter><ac:parameter ac:name="collapse">true</ac:parameter><ac:parameter ac:name="title">my mermaid graph</ac:parameter><ac:plain-text-body><![CDATA[graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="cloudscript-confluence-mermaid"><ac:parameter ac:name="showSource">true</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:parameter ac:name="title">my mermaid graph</ac:parameter><ac:plain-text-body><![CDATA[graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">d2</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[vars: {
|
||||
d2-config: {
|
||||
layout-engine: elk
|
||||
# Terminal theme code
|
||||
theme-id: 300
|
||||
}
|
||||
}
|
||||
network: {
|
||||
cell tower: {
|
||||
satellites: {
|
||||
shape: stored_data
|
||||
style.multiple: true
|
||||
}
|
||||
|
||||
transmitter
|
||||
|
||||
satellites -> transmitter: send
|
||||
satellites -> transmitter: send
|
||||
satellites -> transmitter: send
|
||||
}
|
||||
|
||||
online portal: {
|
||||
ui: {shape: hexagon}
|
||||
}
|
||||
|
||||
data processor: {
|
||||
storage: {
|
||||
shape: cylinder
|
||||
style.multiple: true
|
||||
}
|
||||
}
|
||||
|
||||
cell tower.transmitter -> data processor.storage: phone logs
|
||||
}
|
||||
|
||||
user: {
|
||||
shape: person
|
||||
width: 130
|
||||
}
|
||||
|
||||
user -> network.cell tower: make call
|
||||
user -> network.online portal.ui: access {
|
||||
style.stroke-dash: 3
|
||||
}
|
||||
|
||||
api server -> network.online portal.ui: display
|
||||
api server -> logs: persist
|
||||
logs: {shape: page; style.multiple: true}
|
||||
|
||||
network.data processor -> api server]]></ac:plain-text-body></ac:structured-macro>
|
69
testdata/codes.html
vendored
Normal file
69
testdata/codes.html
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
<p><code>inline</code></p>
|
||||
<ac:structured-macro ac:name="code"><ac:parameter ac:name="language"></ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[some code]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">bash</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[code bash]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">bash</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[with a newline
|
||||
]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">unknown</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[unknown code]]></ac:plain-text-body></ac:structured-macro><p>text
|
||||
text 2</p>
|
||||
<ac:structured-macro ac:name="code"><ac:parameter ac:name="language">unknown</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[unknown code 2]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">sh</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:parameter ac:name="title">A b c</ac:parameter><ac:plain-text-body><![CDATA[no-collapse-title]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">bash</ac:parameter><ac:parameter ac:name="collapse">true</ac:parameter><ac:parameter ac:name="title">A b c</ac:parameter><ac:plain-text-body><![CDATA[collapse-and-title]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">c</ac:parameter><ac:parameter ac:name="collapse">true</ac:parameter><ac:plain-text-body><![CDATA[collapse-no-title]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">nested</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[code
|
||||
``` more code ```
|
||||
even more code]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language"></ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[indented code block
|
||||
with multiple lines]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="cloudscript-confluence-mermaid"><ac:parameter ac:name="showSource">true</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="cloudscript-confluence-mermaid"><ac:parameter ac:name="showSource">true</ac:parameter><ac:parameter ac:name="collapse">true</ac:parameter><ac:parameter ac:name="title">my mermaid graph</ac:parameter><ac:plain-text-body><![CDATA[graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="cloudscript-confluence-mermaid"><ac:parameter ac:name="showSource">true</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:parameter ac:name="title">my mermaid graph</ac:parameter><ac:plain-text-body><![CDATA[graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;]]></ac:plain-text-body></ac:structured-macro><ac:structured-macro ac:name="code"><ac:parameter ac:name="language">d2</ac:parameter><ac:parameter ac:name="collapse">false</ac:parameter><ac:plain-text-body><![CDATA[vars: {
|
||||
d2-config: {
|
||||
layout-engine: elk
|
||||
# Terminal theme code
|
||||
theme-id: 300
|
||||
}
|
||||
}
|
||||
network: {
|
||||
cell tower: {
|
||||
satellites: {
|
||||
shape: stored_data
|
||||
style.multiple: true
|
||||
}
|
||||
|
||||
transmitter
|
||||
|
||||
satellites -> transmitter: send
|
||||
satellites -> transmitter: send
|
||||
satellites -> transmitter: send
|
||||
}
|
||||
|
||||
online portal: {
|
||||
ui: {shape: hexagon}
|
||||
}
|
||||
|
||||
data processor: {
|
||||
storage: {
|
||||
shape: cylinder
|
||||
style.multiple: true
|
||||
}
|
||||
}
|
||||
|
||||
cell tower.transmitter -> data processor.storage: phone logs
|
||||
}
|
||||
|
||||
user: {
|
||||
shape: person
|
||||
width: 130
|
||||
}
|
||||
|
||||
user -> network.cell tower: make call
|
||||
user -> network.online portal.ui: access {
|
||||
style.stroke-dash: 3
|
||||
}
|
||||
|
||||
api server -> network.online portal.ui: display
|
||||
api server -> logs: persist
|
||||
logs: {shape: page; style.multiple: true}
|
||||
|
||||
network.data processor -> api server]]></ac:plain-text-body></ac:structured-macro>
|
120
testdata/codes.md
vendored
Normal file
120
testdata/codes.md
vendored
Normal file
@ -0,0 +1,120 @@
|
||||
`inline`
|
||||
|
||||
```
|
||||
some code
|
||||
```
|
||||
```bash
|
||||
code bash
|
||||
```
|
||||
|
||||
```bash
|
||||
with a newline
|
||||
|
||||
```
|
||||
|
||||
```unknown
|
||||
unknown code
|
||||
```
|
||||
text
|
||||
text 2
|
||||
```unknown
|
||||
unknown code 2
|
||||
```
|
||||
|
||||
```sh title A b c
|
||||
no-collapse-title
|
||||
```
|
||||
|
||||
```bash collapse title A b c
|
||||
collapse-and-title
|
||||
```
|
||||
|
||||
```c collapse
|
||||
collapse-no-title
|
||||
```
|
||||
|
||||
```nested
|
||||
code
|
||||
``` more code ```
|
||||
even more code
|
||||
```
|
||||
|
||||
indented code block
|
||||
with multiple lines
|
||||
|
||||
```mermaid
|
||||
graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;
|
||||
```
|
||||
|
||||
```mermaid collapse title my mermaid graph
|
||||
graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;
|
||||
```
|
||||
|
||||
```mermaid title my mermaid graph
|
||||
graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;
|
||||
```
|
||||
|
||||
```d2
|
||||
vars: {
|
||||
d2-config: {
|
||||
layout-engine: elk
|
||||
# Terminal theme code
|
||||
theme-id: 300
|
||||
}
|
||||
}
|
||||
network: {
|
||||
cell tower: {
|
||||
satellites: {
|
||||
shape: stored_data
|
||||
style.multiple: true
|
||||
}
|
||||
|
||||
transmitter
|
||||
|
||||
satellites -> transmitter: send
|
||||
satellites -> transmitter: send
|
||||
satellites -> transmitter: send
|
||||
}
|
||||
|
||||
online portal: {
|
||||
ui: {shape: hexagon}
|
||||
}
|
||||
|
||||
data processor: {
|
||||
storage: {
|
||||
shape: cylinder
|
||||
style.multiple: true
|
||||
}
|
||||
}
|
||||
|
||||
cell tower.transmitter -> data processor.storage: phone logs
|
||||
}
|
||||
|
||||
user: {
|
||||
shape: person
|
||||
width: 130
|
||||
}
|
||||
|
||||
user -> network.cell tower: make call
|
||||
user -> network.online portal.ui: access {
|
||||
style.stroke-dash: 3
|
||||
}
|
||||
|
||||
api server -> network.online portal.ui: display
|
||||
api server -> logs: persist
|
||||
logs: {shape: page; style.multiple: true}
|
||||
|
||||
network.data processor -> api server
|
||||
```
|
7
testdata/header-droph1.html
vendored
Normal file
7
testdata/header-droph1.html
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
<h2 id="b">b</h2>
|
||||
<h3 id="c">c</h3>
|
||||
<h4 id="d">d</h4>
|
||||
<h5 id="e">e</h5>
|
||||
<h1 id="f">f</h1>
|
||||
<h2 id="g">g</h2>
|
||||
<h1 id="This/is-some_Heading.yml">This/is some_Heading.yml</h1>
|
8
testdata/header.html
vendored
Normal file
8
testdata/header.html
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
<h1 id="a">a</h1>
|
||||
<h2 id="b">b</h2>
|
||||
<h3 id="c">c</h3>
|
||||
<h4 id="d">d</h4>
|
||||
<h5 id="e">e</h5>
|
||||
<h1 id="f">f</h1>
|
||||
<h2 id="g">g</h2>
|
||||
<h1 id="This/is-some_Heading.yml">This/is some_Heading.yml</h1>
|
12
testdata/header.md
vendored
Normal file
12
testdata/header.md
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
|
||||
# a
|
||||
## b
|
||||
### c
|
||||
#### d
|
||||
##### e
|
||||
f
|
||||
=
|
||||
g
|
||||
-
|
||||
|
||||
# This/is some_Heading.yml
|
1
testdata/issue-64-broken-link.html
vendored
Normal file
1
testdata/issue-64-broken-link.html
vendored
Normal file
@ -0,0 +1 @@
|
||||
<p><a href="Page2#Page2-Releasev71-22-Feb-2018(intern)">v71</a></p>
|
1
testdata/issue-64-broken-link.md
vendored
Normal file
1
testdata/issue-64-broken-link.md
vendored
Normal file
@ -0,0 +1 @@
|
||||
[v71](Page2#Page2-Releasev71-22-Feb-2018(intern))
|
21
testdata/links.html
vendored
Normal file
21
testdata/links.html
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
<p>Use <a href="https://example.com">https://example.com</a></p>
|
||||
<p>Use <ac:rich-text-body>aaa</ac:rich-text-body></p>
|
||||
<p>Use <ac:link><ri:page ri:content-title="Page"/><ac:plain-text-link-body><![CDATA[page link]]></ac:plain-text-link-body></ac:link></p>
|
||||
<p>Use <ac:link><ri:page ri:content-title="AnotherPage"/><ac:plain-text-link-body><![CDATA[AnotherPage]]></ac:plain-text-link-body></ac:link></p>
|
||||
<p>Use <ac:link><ri:page ri:content-title="Another Page"/><ac:plain-text-link-body><![CDATA[Another Page]]></ac:plain-text-link-body></ac:link></p>
|
||||
<p>Use <ac:link><ri:page ri:content-title="test_link"/><ac:plain-text-link-body><![CDATA[Another Page]]></ac:plain-text-link-body></ac:link></p>
|
||||
<p>Use <ac:link><ri:page ri:content-title="Page With Space"/><ac:plain-text-link-body><![CDATA[page link with spaces]]></ac:plain-text-link-body></ac:link></p>
|
||||
<p><ac:image ac:alt="My Image"><ri:attachment ri:filename="test.png"/></ac:image></p>
|
||||
<p><ac:image ac:alt="My External Image"><ri:url ri:value="http://confluence.atlassian.com/images/logo/confluence_48_trans.png?key1=value1&key2=value2"/></ac:image></p>
|
||||
<p><ac:link><ri:page ri:content-title="test_link"/><ac:plain-text-link-body><![CDATA[My test_link]]></ac:plain-text-link-body></ac:link></p>
|
||||
<p><ac:link><ri:page ri:content-title="test_link_link"/><ac:plain-text-link-body><![CDATA[Another [Link]]]></ac:plain-text-link-body></ac:link></p>
|
||||
<p>Use footnotes link <sup id="fnref:1"><a href="#fn:1" class="footnote-ref" role="doc-noteref">1</a></sup></p>
|
||||
<p>Use <a href="foo">Link [Text]</a></p>
|
||||
<div class="footnotes" role="doc-endnotes">
|
||||
<hr />
|
||||
<ol>
|
||||
<li id="fn:1">
|
||||
<p>a footnote link <a href="#fnref:1" class="footnote-backref" role="doc-backlink">↩︎</a></p>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
26
testdata/links.md
vendored
Normal file
26
testdata/links.md
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
Use <https://example.com>
|
||||
|
||||
Use <ac:rich-text-body>aaa</ac:rich-text-body>
|
||||
|
||||
Use [page link](ac:Page)
|
||||
|
||||
Use [AnotherPage](ac:)
|
||||
|
||||
Use [Another Page](ac:)
|
||||
|
||||
Use [Another Page](ac:test_link)
|
||||
|
||||
Use [page link with spaces](<ac:Page With Space>)
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
[My test_link](ac:test_link)
|
||||
|
||||
[Another [Link]](ac:test_link_link)
|
||||
|
||||
Use footnotes link [^1]
|
||||
[^1]: a footnote link
|
||||
|
||||
Use [Link [Text]](foo)
|
21
testdata/lists.html
vendored
Normal file
21
testdata/lists.html
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
<ul>
|
||||
<li>dash 1-1</li>
|
||||
<li>dash 1-2</li>
|
||||
<li>dash 1-3
|
||||
<ul>
|
||||
<li>dash 1-3-1</li>
|
||||
<li>dash 1-3-2</li>
|
||||
<li>dash 1-3-3
|
||||
<ul>
|
||||
<li>dash 1-3-3-1</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
<p>text</p>
|
||||
<ul>
|
||||
<li>a</li>
|
||||
<li>b</li>
|
||||
<li>c</li>
|
||||
</ul>
|
13
testdata/lists.md
vendored
Normal file
13
testdata/lists.md
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
- dash 1-1
|
||||
- dash 1-2
|
||||
- dash 1-3
|
||||
- dash 1-3-1
|
||||
- dash 1-3-2
|
||||
- dash 1-3-3
|
||||
- dash 1-3-3-1
|
||||
|
||||
text
|
||||
|
||||
* a
|
||||
* b
|
||||
* c
|
18
testdata/macro-include-stripnewlines.html
vendored
Normal file
18
testdata/macro-include-stripnewlines.html
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
<foo>bar</foo>
|
||||
<ac:structured-macro ac:name="info"> <ac:parameter ac:name="icon">true</ac:parameter> <ac:parameter ac:name="title">Attention</ac:parameter> <ac:rich-text-body>This is an info!</ac:rich-text-body> </ac:structured-macro>
|
||||
<ac:structured-macro ac:name="info"> <ac:parameter ac:name="icon">true</ac:parameter> <ac:parameter ac:name="title">Attention</ac:parameter> <ac:rich-text-body>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Header 1</th>
|
||||
<th>Header 2</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Cell A</td>
|
||||
<td>Cell B</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</ac:rich-text-body> </ac:structured-macro>
|
26
testdata/macro-include.html
vendored
Normal file
26
testdata/macro-include.html
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
<foo>bar</foo>
|
||||
<ac:structured-macro ac:name="info">
|
||||
<ac:parameter ac:name="icon">true</ac:parameter>
|
||||
<ac:parameter ac:name="title">Attention</ac:parameter>
|
||||
<ac:rich-text-body>This is an info!</ac:rich-text-body>
|
||||
</ac:structured-macro>
|
||||
<ac:structured-macro ac:name="info">
|
||||
<ac:parameter ac:name="icon">true</ac:parameter>
|
||||
<ac:parameter ac:name="title">Attention</ac:parameter>
|
||||
<ac:rich-text-body>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Header 1</th>
|
||||
<th>Header 2</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Cell A</td>
|
||||
<td>Cell B</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</ac:rich-text-body>
|
||||
</ac:structured-macro>
|
18
testdata/macro-include.md
vendored
Normal file
18
testdata/macro-include.md
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
<foo>bar</foo>
|
||||
|
||||
<ac:structured-macro ac:name="info">
|
||||
<ac:parameter ac:name="icon">true</ac:parameter>
|
||||
<ac:parameter ac:name="title">Attention</ac:parameter>
|
||||
<ac:rich-text-body>This is an info!</ac:rich-text-body>
|
||||
</ac:structured-macro>
|
||||
|
||||
<ac:structured-macro ac:name="info">
|
||||
<ac:parameter ac:name="icon">true</ac:parameter>
|
||||
<ac:parameter ac:name="title">Attention</ac:parameter>
|
||||
<ac:rich-text-body>
|
||||
| Header 1 | Header 2 |
|
||||
|---|---|
|
||||
| Cell A | Cell B |
|
||||
|
||||
</ac:rich-text-body>
|
||||
</ac:structured-macro>
|
8
testdata/newlines-stripnewlines.html
vendored
Normal file
8
testdata/newlines-stripnewlines.html
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
<p>one-1 one-2</p>
|
||||
<p>two-1</p>
|
||||
<p>two-2</p>
|
||||
<p>three-1</p>
|
||||
<p>three-2</p>
|
||||
<p>space-1 space-2</p>
|
||||
<p>2space-1<br />
|
||||
2space-2</p>
|
10
testdata/newlines.html
vendored
Normal file
10
testdata/newlines.html
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<p>one-1
|
||||
one-2</p>
|
||||
<p>two-1</p>
|
||||
<p>two-2</p>
|
||||
<p>three-1</p>
|
||||
<p>three-2</p>
|
||||
<p>space-1
|
||||
space-2</p>
|
||||
<p>2space-1<br />
|
||||
2space-2</p>
|
17
testdata/newlines.md
vendored
Normal file
17
testdata/newlines.md
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
one-1
|
||||
one-2
|
||||
|
||||
two-1
|
||||
|
||||
two-2
|
||||
|
||||
three-1
|
||||
|
||||
|
||||
three-2
|
||||
|
||||
space-1
|
||||
space-2
|
||||
|
||||
2space-1
|
||||
2space-2
|
18
testdata/pagelayout.html
vendored
Normal file
18
testdata/pagelayout.html
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
<ac:layout>
|
||||
<ac:layout-section ac:type="three_with_sidebars">
|
||||
<ac:layout-cell>
|
||||
<p>More Content</p>
|
||||
</ac:layout-cell>
|
||||
<ac:layout-cell>
|
||||
<p>More Content</p>
|
||||
</ac:layout-cell>
|
||||
<ac:layout-cell>
|
||||
<p>Even More Content</p>
|
||||
</ac:layout-cell>
|
||||
</ac:layout-section>
|
||||
<ac:layout-section ac:type="single">
|
||||
<ac:layout-cell>
|
||||
<p>Still More Content</p>
|
||||
</ac:layout-cell>
|
||||
</ac:layout-section>
|
||||
</ac:layout>
|
21
testdata/pagelayout.md
vendored
Normal file
21
testdata/pagelayout.md
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
<!-- ac:layout -->
|
||||
|
||||
<!-- ac:layout-section type:three_with_sidebars -->
|
||||
<!-- ac:layout-cell -->
|
||||
More Content
|
||||
<!-- ac:layout-cell end -->
|
||||
<!-- ac:layout-cell -->
|
||||
More Content
|
||||
<!-- ac:layout-cell end -->
|
||||
<!-- ac:layout-cell -->
|
||||
Even More Content
|
||||
<!-- ac:layout-cell end -->
|
||||
<!-- ac:layout-section end -->
|
||||
|
||||
<!-- ac:layout-section type:single -->
|
||||
<!-- ac:layout-cell -->
|
||||
Still More Content
|
||||
<!-- ac:layout-cell end -->
|
||||
<!-- ac:layout-section end -->
|
||||
|
||||
<!-- ac:layout end -->
|
104
testdata/quotes-droph1.html
vendored
Normal file
104
testdata/quotes-droph1.html
vendored
Normal file
@ -0,0 +1,104 @@
|
||||
<h2 id="First-Heading">First Heading</h2>
|
||||
<ac:structured-macro ac:name="note"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p><strong>NOTES:</strong></p>
|
||||
<ol>
|
||||
<li>Note number one</li>
|
||||
<li>Note number two</li>
|
||||
</ol>
|
||||
<blockquote>
|
||||
<p>a
|
||||
b</p>
|
||||
</blockquote>
|
||||
<p><strong>Warn (Should not be picked as blockquote type)</strong></p>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h2 id="Second-Heading">Second Heading</h2>
|
||||
<ac:structured-macro ac:name="warning"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p><strong>Warn</strong></p>
|
||||
<ul>
|
||||
<li>Warn bullet 1</li>
|
||||
<li>Warn bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<ul>
|
||||
<li>Regular list
|
||||
that runs long</li>
|
||||
</ul>
|
||||
<h2 id="Third-Heading">Third Heading</h2>
|
||||
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<!-- Info -->
|
||||
<p>Test</p>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h2 id="Fourth-Heading---Warn-should-not-get-picked-as-block-quote">Fourth Heading - Warn should not get picked as block quote</h2>
|
||||
<ac:structured-macro ac:name="tip"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p><strong>TIP:</strong></p>
|
||||
<ol>
|
||||
<li>Note number one</li>
|
||||
<li>Note number two</li>
|
||||
</ol>
|
||||
<blockquote>
|
||||
<p>a
|
||||
b</p>
|
||||
</blockquote>
|
||||
<p><strong>Warn (Should not be picked as blockquote type)</strong></p>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h2 id="Simple-Blockquote">Simple Blockquote</h2>
|
||||
<blockquote>
|
||||
<p>This paragraph is a simple blockquote</p>
|
||||
</blockquote>
|
||||
<h2 id="GH-Alerts-Heading">GH Alerts Heading</h2>
|
||||
<h3 id="Note-Type-Alert-Heading">Note Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!NOTE]</p>
|
||||
<ul>
|
||||
<li>Note bullet 1</li>
|
||||
<li>Note bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Tip-Type-Alert-Heading">Tip Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="tip"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!TIP]</p>
|
||||
<ul>
|
||||
<li>Tip bullet 1</li>
|
||||
<li>Tip bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Warning-Type-Alert-Heading">Warning Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="note"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!WARNING]</p>
|
||||
<ul>
|
||||
<li>Warning bullet 1</li>
|
||||
<li>Warning bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Important/Caution-Type-Alert-Heading">Important/Caution Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!IMPORTANT]</p>
|
||||
<ul>
|
||||
<li>Important bullet 1</li>
|
||||
<li>Important bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<ac:structured-macro ac:name="warning"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!CAUTION]</p>
|
||||
<ul>
|
||||
<li>Important bullet 1</li>
|
||||
<li>Important bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Should-not-be-picked-up-and-converted-into-blockquote-macro">Should not be picked up and converted into blockquote macro</h3>
|
||||
<blockquote>
|
||||
<p>[[!NOTE]</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p>[!NOTE</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p>[Hey !NOTE]</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p>[NOTE]</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p><strong>TL;DR:</strong> Thingy!
|
||||
More stuff</p>
|
||||
</blockquote>
|
101
testdata/quotes-stripnewlines.html
vendored
Normal file
101
testdata/quotes-stripnewlines.html
vendored
Normal file
@ -0,0 +1,101 @@
|
||||
<h1 id="Main-Heading">Main Heading</h1>
|
||||
<h2 id="First-Heading">First Heading</h2>
|
||||
<ac:structured-macro ac:name="note"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p><strong>NOTES:</strong></p>
|
||||
<ol>
|
||||
<li>Note number one</li>
|
||||
<li>Note number two</li>
|
||||
</ol>
|
||||
<blockquote>
|
||||
<p>a b</p>
|
||||
</blockquote>
|
||||
<p><strong>Warn (Should not be picked as blockquote type)</strong></p>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h2 id="Second-Heading">Second Heading</h2>
|
||||
<ac:structured-macro ac:name="warning"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p><strong>Warn</strong></p>
|
||||
<ul>
|
||||
<li>Warn bullet 1</li>
|
||||
<li>Warn bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<ul>
|
||||
<li>Regular list that runs long</li>
|
||||
</ul>
|
||||
<h2 id="Third-Heading">Third Heading</h2>
|
||||
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<!-- Info -->
|
||||
<p>Test</p>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h2 id="Fourth-Heading---Warn-should-not-get-picked-as-block-quote">Fourth Heading - Warn should not get picked as block quote</h2>
|
||||
<ac:structured-macro ac:name="tip"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p><strong>TIP:</strong></p>
|
||||
<ol>
|
||||
<li>Note number one</li>
|
||||
<li>Note number two</li>
|
||||
</ol>
|
||||
<blockquote>
|
||||
<p>a b</p>
|
||||
</blockquote>
|
||||
<p><strong>Warn (Should not be picked as blockquote type)</strong></p>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h2 id="Simple-Blockquote">Simple Blockquote</h2>
|
||||
<blockquote>
|
||||
<p>This paragraph is a simple blockquote</p>
|
||||
</blockquote>
|
||||
<h2 id="GH-Alerts-Heading">GH Alerts Heading</h2>
|
||||
<h3 id="Note-Type-Alert-Heading">Note Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!NOTE]</p>
|
||||
<ul>
|
||||
<li>Note bullet 1</li>
|
||||
<li>Note bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Tip-Type-Alert-Heading">Tip Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="tip"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!TIP]</p>
|
||||
<ul>
|
||||
<li>Tip bullet 1</li>
|
||||
<li>Tip bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Warning-Type-Alert-Heading">Warning Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="note"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!WARNING]</p>
|
||||
<ul>
|
||||
<li>Warning bullet 1</li>
|
||||
<li>Warning bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Important/Caution-Type-Alert-Heading">Important/Caution Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!IMPORTANT]</p>
|
||||
<ul>
|
||||
<li>Important bullet 1</li>
|
||||
<li>Important bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<ac:structured-macro ac:name="warning"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!CAUTION]</p>
|
||||
<ul>
|
||||
<li>Important bullet 1</li>
|
||||
<li>Important bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Should-not-be-picked-up-and-converted-into-blockquote-macro">Should not be picked up and converted into blockquote macro</h3>
|
||||
<blockquote>
|
||||
<p>[[!NOTE]</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p>[!NOTE</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p>[Hey !NOTE]</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p>[NOTE]</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p><strong>TL;DR:</strong> Thingy! More stuff</p>
|
||||
</blockquote>
|
105
testdata/quotes.html
vendored
Normal file
105
testdata/quotes.html
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
<h1 id="Main-Heading">Main Heading</h1>
|
||||
<h2 id="First-Heading">First Heading</h2>
|
||||
<ac:structured-macro ac:name="note"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p><strong>NOTES:</strong></p>
|
||||
<ol>
|
||||
<li>Note number one</li>
|
||||
<li>Note number two</li>
|
||||
</ol>
|
||||
<blockquote>
|
||||
<p>a
|
||||
b</p>
|
||||
</blockquote>
|
||||
<p><strong>Warn (Should not be picked as blockquote type)</strong></p>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h2 id="Second-Heading">Second Heading</h2>
|
||||
<ac:structured-macro ac:name="warning"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p><strong>Warn</strong></p>
|
||||
<ul>
|
||||
<li>Warn bullet 1</li>
|
||||
<li>Warn bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<ul>
|
||||
<li>Regular list
|
||||
that runs long</li>
|
||||
</ul>
|
||||
<h2 id="Third-Heading">Third Heading</h2>
|
||||
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<!-- Info -->
|
||||
<p>Test</p>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h2 id="Fourth-Heading---Warn-should-not-get-picked-as-block-quote">Fourth Heading - Warn should not get picked as block quote</h2>
|
||||
<ac:structured-macro ac:name="tip"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p><strong>TIP:</strong></p>
|
||||
<ol>
|
||||
<li>Note number one</li>
|
||||
<li>Note number two</li>
|
||||
</ol>
|
||||
<blockquote>
|
||||
<p>a
|
||||
b</p>
|
||||
</blockquote>
|
||||
<p><strong>Warn (Should not be picked as blockquote type)</strong></p>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h2 id="Simple-Blockquote">Simple Blockquote</h2>
|
||||
<blockquote>
|
||||
<p>This paragraph is a simple blockquote</p>
|
||||
</blockquote>
|
||||
<h2 id="GH-Alerts-Heading">GH Alerts Heading</h2>
|
||||
<h3 id="Note-Type-Alert-Heading">Note Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!NOTE]</p>
|
||||
<ul>
|
||||
<li>Note bullet 1</li>
|
||||
<li>Note bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Tip-Type-Alert-Heading">Tip Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="tip"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!TIP]</p>
|
||||
<ul>
|
||||
<li>Tip bullet 1</li>
|
||||
<li>Tip bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Warning-Type-Alert-Heading">Warning Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="note"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!WARNING]</p>
|
||||
<ul>
|
||||
<li>Warning bullet 1</li>
|
||||
<li>Warning bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Important/Caution-Type-Alert-Heading">Important/Caution Type Alert Heading</h3>
|
||||
<ac:structured-macro ac:name="info"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!IMPORTANT]</p>
|
||||
<ul>
|
||||
<li>Important bullet 1</li>
|
||||
<li>Important bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<ac:structured-macro ac:name="warning"><ac:parameter ac:name="icon">true</ac:parameter><ac:rich-text-body>
|
||||
<p>[!CAUTION]</p>
|
||||
<ul>
|
||||
<li>Important bullet 1</li>
|
||||
<li>Important bullet 2</li>
|
||||
</ul>
|
||||
</ac:rich-text-body></ac:structured-macro>
|
||||
<h3 id="Should-not-be-picked-up-and-converted-into-blockquote-macro">Should not be picked up and converted into blockquote macro</h3>
|
||||
<blockquote>
|
||||
<p>[[!NOTE]</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p>[!NOTE</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p>[Hey !NOTE]</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p>[NOTE]</p>
|
||||
</blockquote>
|
||||
<blockquote>
|
||||
<p><strong>TL;DR:</strong> Thingy!
|
||||
More stuff</p>
|
||||
</blockquote>
|
95
testdata/quotes.md
vendored
Normal file
95
testdata/quotes.md
vendored
Normal file
@ -0,0 +1,95 @@
|
||||
# Main Heading
|
||||
|
||||
## First Heading
|
||||
|
||||
> **NOTES:**
|
||||
>
|
||||
> 1. Note number one
|
||||
> 1. Note number two
|
||||
>
|
||||
>> a
|
||||
>> b
|
||||
>
|
||||
> **Warn (Should not be picked as blockquote type)**
|
||||
|
||||
## Second Heading
|
||||
|
||||
> **Warn**
|
||||
>
|
||||
> * Warn bullet 1
|
||||
> * Warn bullet 2
|
||||
|
||||
* Regular list
|
||||
that runs long
|
||||
|
||||
## Third Heading
|
||||
> <!-- Info -->
|
||||
> Test
|
||||
|
||||
## Fourth Heading - Warn should not get picked as block quote
|
||||
|
||||
> **TIP:**
|
||||
>
|
||||
> 1. Note number one
|
||||
> 1. Note number two
|
||||
>
|
||||
>> a
|
||||
>> b
|
||||
>
|
||||
> **Warn (Should not be picked as blockquote type)**
|
||||
|
||||
## Simple Blockquote
|
||||
|
||||
> This paragraph is a simple blockquote
|
||||
|
||||
## GH Alerts Heading
|
||||
|
||||
### Note Type Alert Heading
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> * Note bullet 1
|
||||
> * Note bullet 2
|
||||
|
||||
### Tip Type Alert Heading
|
||||
|
||||
> [!TIP]
|
||||
>
|
||||
> * Tip bullet 1
|
||||
> * Tip bullet 2
|
||||
|
||||
### Warning Type Alert Heading
|
||||
|
||||
> [!WARNING]
|
||||
>
|
||||
> * Warning bullet 1
|
||||
> * Warning bullet 2
|
||||
|
||||
### Important/Caution Type Alert Heading
|
||||
|
||||
> [!IMPORTANT]
|
||||
>
|
||||
> * Important bullet 1
|
||||
> * Important bullet 2
|
||||
|
||||
|
||||
> [!CAUTION]
|
||||
>
|
||||
> * Important bullet 1
|
||||
> * Important bullet 2
|
||||
|
||||
### Should not be picked up and converted into blockquote macro
|
||||
|
||||
> [[!NOTE]
|
||||
|
||||
|
||||
> [!NOTE
|
||||
|
||||
> [Hey !NOTE]
|
||||
|
||||
> [NOTE]
|
||||
|
||||
|
||||
|
||||
> **TL;DR:** Thingy!
|
||||
> More stuff
|
28
testdata/table.html
vendored
Normal file
28
testdata/table.html
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>HEADER1</th>
|
||||
<th>HEADER2</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>row1</td>
|
||||
<td>row2</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="text-align:center">HEADER1</th>
|
||||
<th style="text-align:right">HEADER2</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="text-align:center">row1</td>
|
||||
<td style="text-align:right">row2</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
7
testdata/table.md
vendored
Normal file
7
testdata/table.md
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
|HEADER1|HEADER2|
|
||||
|---|---|
|
||||
|row1|row2|
|
||||
|
||||
|HEADER1|HEADER2|
|
||||
|:---:|---:|
|
||||
|row1|row2|
|
6
testdata/tags.html
vendored
Normal file
6
testdata/tags.html
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
<b>bold</b>
|
||||
<p><strong>bold</strong></p>
|
||||
<i>vitalik</i>
|
||||
<p><em>vitalik</em></p>
|
||||
<s>strikethrough</s>
|
||||
<p><del>strikethrough</del></p>
|
11
testdata/tags.md
vendored
Normal file
11
testdata/tags.md
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
<b>bold</b>
|
||||
|
||||
**bold**
|
||||
|
||||
<i>vitalik</i>
|
||||
|
||||
*vitalik*
|
||||
|
||||
<s>strikethrough</s>
|
||||
|
||||
~~strikethrough~~
|
BIN
testdata/test.png
vendored
Normal file
BIN
testdata/test.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.9 KiB |
10
types/types.go
Normal file
10
types/types.go
Normal file
@ -0,0 +1,10 @@
|
||||
package types
|
||||
|
||||
type MarkConfig struct {
|
||||
MermaidProvider string
|
||||
MermaidScale float64
|
||||
D2Scale float64
|
||||
DropFirstH1 bool
|
||||
StripNewlines bool
|
||||
Features []string
|
||||
}
|
@ -1,8 +1,10 @@
|
||||
package main
|
||||
package util
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/reconquest/karma-go"
|
||||
@ -16,35 +18,39 @@ type Credentials struct {
|
||||
}
|
||||
|
||||
func GetCredentials(
|
||||
args map[string]interface{},
|
||||
config *Config,
|
||||
) (*Credentials, error) {
|
||||
var (
|
||||
username, _ = args["-u"].(string)
|
||||
password, _ = args["-p"].(string)
|
||||
targetURL, _ = args["-l"].(string)
|
||||
)
|
||||
username string,
|
||||
password string,
|
||||
targetURL string,
|
||||
baseURL string,
|
||||
compileOnly bool,
|
||||
|
||||
) (*Credentials, error) {
|
||||
var err error
|
||||
|
||||
if username == "" {
|
||||
username = config.Username
|
||||
if username == "" {
|
||||
if password == "" {
|
||||
if !compileOnly {
|
||||
return nil, errors.New(
|
||||
"Confluence username should be specified using -u " +
|
||||
"confluence password should be specified using -p " +
|
||||
"flag or be stored in configuration file",
|
||||
)
|
||||
}
|
||||
password = "none"
|
||||
}
|
||||
|
||||
if password == "" {
|
||||
password = config.Password
|
||||
if password == "" {
|
||||
return nil, errors.New(
|
||||
"Confluence password should be specified using -p " +
|
||||
"flag or be stored in configuration file",
|
||||
if password == "-" {
|
||||
stdin, err := io.ReadAll(os.Stdin)
|
||||
if err != nil {
|
||||
return nil, karma.Format(
|
||||
err,
|
||||
"unable to read password from stdin",
|
||||
)
|
||||
}
|
||||
|
||||
password = string(stdin)
|
||||
}
|
||||
|
||||
if compileOnly && targetURL == "" {
|
||||
targetURL = "http://localhost"
|
||||
}
|
||||
|
||||
url, err := url.Parse(targetURL)
|
||||
@ -55,20 +61,15 @@ func GetCredentials(
|
||||
)
|
||||
}
|
||||
|
||||
baseURL := url.Scheme + "://" + url.Host
|
||||
|
||||
if url.Host == "" {
|
||||
var ok bool
|
||||
baseURL, ok = args["--base-url"].(string)
|
||||
if !ok {
|
||||
baseURL = config.BaseURL
|
||||
if baseURL == "" {
|
||||
return nil, errors.New(
|
||||
"Confluence base URL should be specified using -l " +
|
||||
"flag or be stored in configuration file",
|
||||
)
|
||||
}
|
||||
if baseURL == "" {
|
||||
return nil, errors.New(
|
||||
"confluence base URL should be specified using -l " +
|
||||
"flag or be stored in configuration file",
|
||||
)
|
||||
}
|
||||
} else {
|
||||
baseURL = url.Scheme + "://" + url.Host
|
||||
}
|
||||
|
||||
baseURL = strings.TrimRight(baseURL, `/`)
|
514
util/cli.go
Normal file
514
util/cli.go
Normal file
@ -0,0 +1,514 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/kovetskiy/lorg"
|
||||
"github.com/kovetskiy/mark/attachment"
|
||||
"github.com/kovetskiy/mark/confluence"
|
||||
"github.com/kovetskiy/mark/includes"
|
||||
"github.com/kovetskiy/mark/macro"
|
||||
mark "github.com/kovetskiy/mark/markdown"
|
||||
"github.com/kovetskiy/mark/metadata"
|
||||
"github.com/kovetskiy/mark/page"
|
||||
"github.com/kovetskiy/mark/stdlib"
|
||||
"github.com/kovetskiy/mark/types"
|
||||
"github.com/kovetskiy/mark/vfs"
|
||||
"github.com/reconquest/karma-go"
|
||||
"github.com/reconquest/pkg/log"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
func RunMark(ctx context.Context, cmd *cli.Command) error {
|
||||
if err := SetLogLevel(cmd); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if cmd.String("color") == "never" {
|
||||
log.GetLogger().SetFormat(
|
||||
lorg.NewFormat(
|
||||
`${time:2006-01-02 15:04:05.000} ${level:%s:left:true} ${prefix}%s`,
|
||||
),
|
||||
)
|
||||
log.GetLogger().SetOutput(os.Stderr)
|
||||
}
|
||||
|
||||
creds, err := GetCredentials(cmd.String("username"), cmd.String("password"), cmd.String("target-url"), cmd.String("base-url"), cmd.Bool("compile-only"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
api := confluence.NewAPI(creds.BaseURL, creds.Username, creds.Password)
|
||||
|
||||
files, err := doublestar.FilepathGlob(cmd.String("files"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(files) == 0 {
|
||||
msg := "No files matched"
|
||||
if cmd.Bool("ci") {
|
||||
log.Warning(msg)
|
||||
} else {
|
||||
log.Fatal(msg)
|
||||
}
|
||||
}
|
||||
|
||||
log.Debug("config:")
|
||||
for _, f := range cmd.Flags {
|
||||
flag := f.Names()
|
||||
if flag[0] == "password" {
|
||||
log.Debugf(nil, "%20s: %v", flag[0], "******")
|
||||
} else {
|
||||
log.Debugf(nil, "%20s: %v", flag[0], cmd.Value(flag[0]))
|
||||
}
|
||||
}
|
||||
|
||||
fatalErrorHandler := NewErrorHandler(cmd.Bool("continue-on-error"))
|
||||
|
||||
// Loop through files matched by glob pattern
|
||||
for _, file := range files {
|
||||
log.Infof(
|
||||
nil,
|
||||
"processing %s",
|
||||
file,
|
||||
)
|
||||
|
||||
target := processFile(file, api, cmd, creds.PageID, creds.Username, fatalErrorHandler)
|
||||
|
||||
if target != nil { // on dry-run or compile-only, the target is nil
|
||||
log.Infof(
|
||||
nil,
|
||||
"page successfully updated: %s",
|
||||
creds.BaseURL+target.Links.Full,
|
||||
)
|
||||
fmt.Println(creds.BaseURL + target.Links.Full)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func processFile(
|
||||
file string,
|
||||
api *confluence.API,
|
||||
cmd *cli.Command,
|
||||
pageID string,
|
||||
username string,
|
||||
fatalErrorHandler *FatalErrorHandler,
|
||||
) *confluence.PageInfo {
|
||||
markdown, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to read file %q", file)
|
||||
return nil
|
||||
}
|
||||
|
||||
markdown = bytes.ReplaceAll(markdown, []byte("\r\n"), []byte("\n"))
|
||||
|
||||
parents := strings.Split(cmd.String("parents"), cmd.String("parents-delimiter"))
|
||||
|
||||
meta, markdown, err := metadata.ExtractMeta(markdown, cmd.String("space"), cmd.Bool("title-from-h1"), parents, cmd.Bool("title-append-generated-hash"))
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to extract metadata from file %q", file)
|
||||
return nil
|
||||
}
|
||||
|
||||
if pageID != "" && meta != nil {
|
||||
log.Warning(
|
||||
`specified file contains metadata, ` +
|
||||
`but it will be ignored due specified command line URL`,
|
||||
)
|
||||
|
||||
meta = nil
|
||||
}
|
||||
|
||||
if pageID == "" && meta == nil {
|
||||
fatalErrorHandler.Handle(nil, "specified file doesn't contain metadata and URL is not specified via command line or doesn't contain pageId GET-parameter")
|
||||
return nil
|
||||
}
|
||||
|
||||
if meta != nil {
|
||||
if meta.Space == "" {
|
||||
fatalErrorHandler.Handle(nil, "space is not set ('Space' header is not set and '--space' option is not set)")
|
||||
return nil
|
||||
}
|
||||
|
||||
if meta.Title == "" {
|
||||
fatalErrorHandler.Handle(nil, "page title is not set ('Title' header is not set and '--title-from-h1' option and 'h1-title' config is not set or there is no H1 in the file)")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
stdlib, err := stdlib.New(api)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to retrieve standard library")
|
||||
return nil
|
||||
}
|
||||
|
||||
templates := stdlib.Templates
|
||||
|
||||
var recurse bool
|
||||
|
||||
for {
|
||||
templates, markdown, recurse, err = includes.ProcessIncludes(
|
||||
filepath.Dir(file),
|
||||
cmd.String("include-path"),
|
||||
markdown,
|
||||
templates,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to process includes")
|
||||
return nil
|
||||
}
|
||||
|
||||
if !recurse {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
macros, markdown, err := macro.ExtractMacros(
|
||||
filepath.Dir(file),
|
||||
cmd.String("include-path"),
|
||||
markdown,
|
||||
templates,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to extract macros")
|
||||
return nil
|
||||
}
|
||||
|
||||
macros = append(macros, stdlib.Macros...)
|
||||
|
||||
for _, macro := range macros {
|
||||
markdown, err = macro.Apply(markdown)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to apply macro")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
links, err := page.ResolveRelativeLinks(api, meta, markdown, filepath.Dir(file), cmd.String("space"), cmd.Bool("title-from-h1"), parents, cmd.Bool("title-append-generated-hash"))
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to resolve relative links")
|
||||
return nil
|
||||
}
|
||||
|
||||
markdown = page.SubstituteLinks(markdown, links)
|
||||
|
||||
if cmd.Bool("dry-run") {
|
||||
_, _, err := page.ResolvePage(cmd.Bool("dry-run"), api, meta)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to resolve page location")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if cmd.Bool("compile-only") || cmd.Bool("dry-run") {
|
||||
if cmd.Bool("drop-h1") {
|
||||
log.Info(
|
||||
"the leading H1 heading will be excluded from the Confluence output",
|
||||
)
|
||||
}
|
||||
|
||||
cfg := types.MarkConfig{
|
||||
MermaidProvider: cmd.String("mermaid-provider"),
|
||||
MermaidScale: cmd.Float("mermaid-scale"),
|
||||
DropFirstH1: cmd.Bool("drop-h1"),
|
||||
StripNewlines: cmd.Bool("strip-linebreaks"),
|
||||
Features: cmd.StringSlice("features"),
|
||||
}
|
||||
html, _ := mark.CompileMarkdown(markdown, stdlib, file, cfg)
|
||||
fmt.Println(html)
|
||||
return nil
|
||||
}
|
||||
|
||||
var target *confluence.PageInfo
|
||||
|
||||
if meta != nil {
|
||||
parent, page, err := page.ResolvePage(cmd.Bool("dry-run"), api, meta)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(karma.Describe("title", meta.Title).Reason(err), "unable to resolve %s", meta.Type)
|
||||
return nil
|
||||
}
|
||||
|
||||
if page == nil {
|
||||
page, err = api.CreatePage(
|
||||
meta.Space,
|
||||
meta.Type,
|
||||
parent,
|
||||
meta.Title,
|
||||
``,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "can't create %s %q", meta.Type, meta.Title)
|
||||
return nil
|
||||
}
|
||||
// (issues/139): A delay between the create and update call
|
||||
// helps mitigate a 409 conflict that can occur when attempting
|
||||
// to update a page just after it was created.
|
||||
time.Sleep(1 * time.Second)
|
||||
}
|
||||
|
||||
target = page
|
||||
} else {
|
||||
if pageID == "" {
|
||||
fatalErrorHandler.Handle(nil, "URL should provide 'pageId' GET-parameter")
|
||||
return nil
|
||||
}
|
||||
|
||||
page, err := api.GetPageByID(pageID)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to retrieve page by id")
|
||||
return nil
|
||||
}
|
||||
|
||||
target = page
|
||||
}
|
||||
|
||||
// Resolve attachments created from <!-- Attachment: --> directive
|
||||
localAttachments, err := attachment.ResolveLocalAttachments(vfs.LocalOS, filepath.Dir(file), meta.Attachments)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to locate attachments")
|
||||
return nil
|
||||
}
|
||||
|
||||
attaches, err := attachment.ResolveAttachments(
|
||||
api,
|
||||
target,
|
||||
localAttachments,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to create/update attachments")
|
||||
return nil
|
||||
}
|
||||
|
||||
markdown = attachment.CompileAttachmentLinks(markdown, attaches)
|
||||
|
||||
if cmd.Bool("drop-h1") {
|
||||
log.Info(
|
||||
"the leading H1 heading will be excluded from the Confluence output",
|
||||
)
|
||||
}
|
||||
cfg := types.MarkConfig{
|
||||
MermaidProvider: cmd.String("mermaid-provider"),
|
||||
MermaidScale: cmd.Float("mermaid-scale"),
|
||||
DropFirstH1: cmd.Bool("drop-h1"),
|
||||
StripNewlines: cmd.Bool("strip-linebreaks"),
|
||||
Features: cmd.StringSlice("features"),
|
||||
}
|
||||
|
||||
html, inlineAttachments := mark.CompileMarkdown(markdown, stdlib, file, cfg)
|
||||
|
||||
// Resolve attachements detected from markdown
|
||||
_, err = attachment.ResolveAttachments(
|
||||
api,
|
||||
target,
|
||||
inlineAttachments,
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to create/update attachments")
|
||||
return nil
|
||||
}
|
||||
|
||||
{
|
||||
var buffer bytes.Buffer
|
||||
|
||||
err := stdlib.Templates.ExecuteTemplate(
|
||||
&buffer,
|
||||
"ac:layout",
|
||||
struct {
|
||||
Layout string
|
||||
Sidebar string
|
||||
Body string
|
||||
}{
|
||||
Layout: meta.Layout,
|
||||
Sidebar: meta.Sidebar,
|
||||
Body: html,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to execute layout template")
|
||||
return nil
|
||||
}
|
||||
|
||||
html = buffer.String()
|
||||
}
|
||||
|
||||
var finalVersionMessage string
|
||||
var shouldUpdatePage = true
|
||||
|
||||
if cmd.Bool("changes-only") {
|
||||
contentHash := getSHA1Hash(html)
|
||||
|
||||
log.Debugf(
|
||||
nil,
|
||||
"content hash: %s",
|
||||
contentHash,
|
||||
)
|
||||
|
||||
versionPattern := `\[v([a-f0-9]{40})]$`
|
||||
re := regexp.MustCompile(versionPattern)
|
||||
|
||||
matches := re.FindStringSubmatch(target.Version.Message)
|
||||
|
||||
if len(matches) > 1 {
|
||||
log.Debugf(
|
||||
nil,
|
||||
"previous content hash: %s",
|
||||
matches[1],
|
||||
)
|
||||
|
||||
if matches[1] == contentHash {
|
||||
log.Infof(
|
||||
nil,
|
||||
"page %q is already up to date",
|
||||
target.Title,
|
||||
)
|
||||
shouldUpdatePage = false
|
||||
}
|
||||
}
|
||||
|
||||
finalVersionMessage = fmt.Sprintf("%s [v%s]", cmd.String("version-message"), contentHash)
|
||||
} else {
|
||||
finalVersionMessage = cmd.String("version-message")
|
||||
}
|
||||
|
||||
if shouldUpdatePage {
|
||||
err = api.UpdatePage(target, html, cmd.Bool("minor-edit"), finalVersionMessage, meta.Labels, meta.ContentAppearance, meta.Emoji)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to update page")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if !updateLabels(api, target, meta, fatalErrorHandler) { // on error updating labels, return nil
|
||||
return nil
|
||||
}
|
||||
|
||||
if cmd.Bool("edit-lock") {
|
||||
log.Infof(
|
||||
nil,
|
||||
`edit locked on page %q by user %q to prevent manual edits`,
|
||||
target.Title,
|
||||
username,
|
||||
)
|
||||
|
||||
err := api.RestrictPageUpdates(target, username)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "unable to restrict page updates")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return target
|
||||
}
|
||||
|
||||
func updateLabels(api *confluence.API, target *confluence.PageInfo, meta *metadata.Meta, fatalErrorHandler *FatalErrorHandler) bool {
|
||||
labelInfo, err := api.GetPageLabels(target, "global")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
log.Debug("Page Labels:")
|
||||
log.Debug(labelInfo.Labels)
|
||||
|
||||
log.Debug("Meta Labels:")
|
||||
log.Debug(meta.Labels)
|
||||
|
||||
delLabels := determineLabelsToRemove(labelInfo, meta)
|
||||
log.Debug("Del Labels:")
|
||||
log.Debug(delLabels)
|
||||
|
||||
addLabels := determineLabelsToAdd(meta, labelInfo)
|
||||
log.Debug("Add Labels:")
|
||||
log.Debug(addLabels)
|
||||
|
||||
if len(addLabels) > 0 {
|
||||
_, err = api.AddPageLabels(target, addLabels)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "error adding labels")
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
for _, label := range delLabels {
|
||||
_, err = api.DeletePageLabel(target, label)
|
||||
if err != nil {
|
||||
fatalErrorHandler.Handle(err, "error deleting labels")
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Page has label but label not in Metadata
|
||||
func determineLabelsToRemove(labelInfo *confluence.LabelInfo, meta *metadata.Meta) []string {
|
||||
var labels []string
|
||||
for _, label := range labelInfo.Labels {
|
||||
if !slices.ContainsFunc(meta.Labels, func(metaLabel string) bool {
|
||||
return strings.EqualFold(metaLabel, label.Name)
|
||||
}) {
|
||||
labels = append(labels, label.Name)
|
||||
}
|
||||
}
|
||||
return labels
|
||||
}
|
||||
|
||||
// Metadata has label but Page does not have it
|
||||
func determineLabelsToAdd(meta *metadata.Meta, labelInfo *confluence.LabelInfo) []string {
|
||||
var labels []string
|
||||
for _, metaLabel := range meta.Labels {
|
||||
if !slices.ContainsFunc(labelInfo.Labels, func(label confluence.Label) bool {
|
||||
return strings.EqualFold(label.Name, metaLabel)
|
||||
}) {
|
||||
labels = append(labels, metaLabel)
|
||||
}
|
||||
}
|
||||
return labels
|
||||
}
|
||||
|
||||
func ConfigFilePath() string {
|
||||
fp, err := os.UserConfigDir()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return filepath.Join(fp, "mark.toml")
|
||||
}
|
||||
|
||||
func SetLogLevel(cmd *cli.Command) error {
|
||||
logLevel := cmd.String("log-level")
|
||||
switch strings.ToUpper(logLevel) {
|
||||
case lorg.LevelTrace.String():
|
||||
log.SetLevel(lorg.LevelTrace)
|
||||
case lorg.LevelDebug.String():
|
||||
log.SetLevel(lorg.LevelDebug)
|
||||
case lorg.LevelInfo.String():
|
||||
log.SetLevel(lorg.LevelInfo)
|
||||
case lorg.LevelWarning.String():
|
||||
log.SetLevel(lorg.LevelWarning)
|
||||
case lorg.LevelError.String():
|
||||
log.SetLevel(lorg.LevelError)
|
||||
case lorg.LevelFatal.String():
|
||||
log.SetLevel(lorg.LevelFatal)
|
||||
default:
|
||||
return fmt.Errorf("unknown log level: %s", logLevel)
|
||||
}
|
||||
log.GetLevel()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getSHA1Hash(input string) string {
|
||||
hash := sha1.New()
|
||||
hash.Write([]byte(input))
|
||||
return hex.EncodeToString(hash.Sum(nil))
|
||||
}
|
34
util/error_handler.go
Normal file
34
util/error_handler.go
Normal file
@ -0,0 +1,34 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/reconquest/pkg/log"
|
||||
)
|
||||
|
||||
type FatalErrorHandler struct {
|
||||
ContinueOnError bool
|
||||
}
|
||||
|
||||
func NewErrorHandler(continueOnError bool) *FatalErrorHandler {
|
||||
return &FatalErrorHandler{
|
||||
ContinueOnError: continueOnError,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *FatalErrorHandler) Handle(err error, format string, args ...interface{}) {
|
||||
|
||||
if err == nil {
|
||||
if h.ContinueOnError {
|
||||
log.Error(fmt.Sprintf(format, args...))
|
||||
return
|
||||
}
|
||||
log.Fatal(fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
if h.ContinueOnError {
|
||||
log.Errorf(err, format, args...)
|
||||
return
|
||||
}
|
||||
log.Fatalf(err, format, args...)
|
||||
}
|
196
util/flags.go
Normal file
196
util/flags.go
Normal file
@ -0,0 +1,196 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
altsrc "github.com/urfave/cli-altsrc/v3"
|
||||
altsrctoml "github.com/urfave/cli-altsrc/v3/toml"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
var filename string
|
||||
|
||||
var Flags = []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "files",
|
||||
Aliases: []string{"f"},
|
||||
Value: "",
|
||||
Usage: "use specified markdown file(s) for converting to html. Supports file globbing patterns (needs to be quoted).",
|
||||
TakesFile: true,
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_FILES"), altsrctoml.TOML("files", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "continue-on-error",
|
||||
Value: false,
|
||||
Usage: "don't exit if an error occurs while processing a file, continue processing remaining files.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_CONTINUE_ON_ERROR"), altsrctoml.TOML("continue-on-error", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "compile-only",
|
||||
Value: false,
|
||||
Usage: "show resulting HTML and don't update Confluence page content.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_COMPILE_ONLY"), altsrctoml.TOML("compile-only", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "dry-run",
|
||||
Value: false,
|
||||
Usage: "resolve page and ancestry, show resulting HTML and exit.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_DRY_RUN"), altsrctoml.TOML("dry-run", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "edit-lock",
|
||||
Value: false,
|
||||
Aliases: []string{"k"},
|
||||
Usage: "lock page editing to current user only to prevent accidental manual edits over Confluence Web UI.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_EDIT_LOCK"), altsrctoml.TOML("edit-lock", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "drop-h1",
|
||||
Value: false,
|
||||
Usage: "don't include the first H1 heading in Confluence output.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_DROP_H1"), altsrctoml.TOML("drop-h1", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "strip-linebreaks",
|
||||
Value: false,
|
||||
Aliases: []string{"L"},
|
||||
Usage: "remove linebreaks inside of tags, to accommodate non-standard Confluence behavior",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_STRIP_LINEBREAKS"), altsrctoml.TOML("strip-linebreaks", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "title-from-h1",
|
||||
Value: false,
|
||||
Usage: "extract page title from a leading H1 heading. If no H1 heading on a page exists, then title must be set in the page metadata.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_TITLE_FROM_H1"), altsrctoml.TOML("title-from-h1", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "title-append-generated-hash",
|
||||
Value: false,
|
||||
Usage: "appends a short hash generated from the path of the page (space, parents, and title) to the title",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_TITLE_APPEND_GENERATED_HASH"), altsrctoml.TOML("title-append-generated-hash", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "minor-edit",
|
||||
Value: false,
|
||||
Usage: "don't send notifications while updating Confluence page.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_MINOR_EDIT"), altsrctoml.TOML("minor-edit", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "version-message",
|
||||
Value: "",
|
||||
Usage: "add a message to the page version, to explain the edit (default: \"\")",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_VERSION_MESSAGE"), altsrctoml.TOML("version-message", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "color",
|
||||
Value: "auto",
|
||||
Usage: "display logs in color. Possible values: auto, never.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_COLOR"),
|
||||
altsrctoml.TOML("color", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "log-level",
|
||||
Value: "info",
|
||||
Usage: "set the log level. Possible values: TRACE, DEBUG, INFO, WARNING, ERROR, FATAL.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_LOG_LEVEL"), altsrctoml.TOML("log-level", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "username",
|
||||
Aliases: []string{"u"},
|
||||
Value: "",
|
||||
Usage: "use specified username for updating Confluence page.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_USERNAME"),
|
||||
altsrctoml.TOML("username", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "password",
|
||||
Aliases: []string{"p"},
|
||||
Value: "",
|
||||
Usage: "use specified token for updating Confluence page. Specify - as password to read password from stdin, or your Personal access token. Username is not mandatory if personal access token is provided. For more info please see: https://developer.atlassian.com/server/confluence/confluence-server-rest-api/#authentication.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_PASSWORD"), altsrctoml.TOML("password", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "target-url",
|
||||
Aliases: []string{"l"},
|
||||
Value: "",
|
||||
Usage: "edit specified Confluence page. If -l is not specified, file should contain metadata (see above).",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_TARGET_URL"), altsrctoml.TOML("target-url", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "base-url",
|
||||
Aliases: []string{"b"},
|
||||
Value: "",
|
||||
Usage: "base URL for Confluence. Alternative option for base_url config field.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_BASE_URL"),
|
||||
altsrctoml.TOML("base-url", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "config",
|
||||
Aliases: []string{"c"},
|
||||
Value: ConfigFilePath(),
|
||||
Usage: "use the specified configuration file.",
|
||||
TakesFile: true,
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_CONFIG")),
|
||||
Destination: &filename,
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "ci",
|
||||
Value: false,
|
||||
Usage: "run on CI mode. It won't fail if files are not found.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_CI"), altsrctoml.TOML("ci", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "space",
|
||||
Value: "",
|
||||
Usage: "use specified space key. If the space key is not specified, it must be set in the page metadata.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_SPACE"), altsrctoml.TOML("space", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "parents",
|
||||
Value: "",
|
||||
Usage: "A list containing the parents of the document separated by parents-delimiter (default: '/'). These will be prepended to the ones defined in the document itself.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_PARENTS"), altsrctoml.TOML("parents", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "parents-delimiter",
|
||||
Value: "/",
|
||||
Usage: "The delimiter used for the parents list",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_PARENTS_DELIMITER"), altsrctoml.TOML("parents-delimiter", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "mermaid-provider",
|
||||
Value: "cloudscript",
|
||||
Usage: "defines the mermaid provider to use. Supported options are: cloudscript, mermaid-go.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_MERMAID_PROVIDER"), altsrctoml.TOML("mermaid-provider", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.FloatFlag{
|
||||
Name: "mermaid-scale",
|
||||
Value: 1.0,
|
||||
Usage: "defines the scaling factor for mermaid renderings.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_MERMAID_SCALE"), altsrctoml.TOML("mermaid-scale", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "include-path",
|
||||
Value: "",
|
||||
Usage: "Path for shared includes, used as a fallback if the include doesn't exist in the current directory.",
|
||||
TakesFile: true,
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_INCLUDE_PATH"), altsrctoml.TOML("include-path", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "changes-only",
|
||||
Value: false,
|
||||
Usage: "Avoids re-uploading pages that haven't changed since the last run.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_CHANGES_ONLY"), altsrctoml.TOML("changes-only", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
&cli.FloatFlag{
|
||||
Name: "d2-scale",
|
||||
Value: 1.0,
|
||||
Usage: "defines the scaling factor for d2 renderings.",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_D2_SCALE"), altsrctoml.TOML("d2-scale", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
|
||||
&cli.StringSliceFlag{
|
||||
Name: "features",
|
||||
Value: []string{"mermaid"},
|
||||
Usage: "Enables optional features. Current features: d2, mermaid",
|
||||
Sources: cli.NewValueSourceChain(cli.EnvVar("MARK_FEATURES"), altsrctoml.TOML("features", altsrc.NewStringPtrSourcer(&filename))),
|
||||
},
|
||||
}
|
19
vfs/vfs.go
Normal file
19
vfs/vfs.go
Normal file
@ -0,0 +1,19 @@
|
||||
package vfs
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
type Opener interface {
|
||||
Open(name string) (io.ReadWriteCloser, error)
|
||||
}
|
||||
|
||||
type LocalOSOpener struct {
|
||||
}
|
||||
|
||||
func (o LocalOSOpener) Open(name string) (io.ReadWriteCloser, error) {
|
||||
return os.Open(name)
|
||||
}
|
||||
|
||||
var LocalOS = LocalOSOpener{}
|
Loading…
x
Reference in New Issue
Block a user