Compare commits
331 Commits
gt-crashed
...
master
Author | SHA1 | Date | |
---|---|---|---|
60294a40b9 | |||
6825d9f242 | |||
738697bc63 | |||
d822926612 | |||
af064db451 | |||
f85ed1803b | |||
92760a54a9 | |||
e73cbbb72a | |||
b89b3755f7 | |||
d2959856b9 | |||
ca3550a3f4 | |||
d470dd533c | |||
babbd48934 | |||
5ccff44fc9 | |||
33f3a9a0c2 | |||
96903d8627 | |||
4beca346be | |||
37d6000702 | |||
0ce6b89b6c | |||
629a9e32ca | |||
e26334425f | |||
58ec9eed76 | |||
bb7bab403e | |||
cea7ce5e6c | |||
65fb92964f | |||
cccf81e89b | |||
fa6303b762 | |||
02403b5ae3 | |||
e4d0880fea | |||
ee80105f44 | |||
ecb4321551 | |||
56ef3869ca | |||
8fe49106bb | |||
ff9fbb92f9 | |||
a6e49448fa | |||
45c4762201 | |||
8634047be9 | |||
f3929ceece | |||
b4d6940ec6 | |||
7bbf399ae1 | |||
fb64d5c1ae | |||
554fb9000e | |||
47eaafaf5c | |||
78900cc64d | |||
78f1b1474f | |||
94fdf3052f | |||
6072fd8971 | |||
6e58c5631d | |||
8cadf004dd | |||
2bc0b44fa2 | |||
882f33859c | |||
968bfff3bb | |||
f67a24c94c | |||
3c01731fdc | |||
1b66316d03 | |||
62a5c398d1 | |||
23335a7727 | |||
3202717cea | |||
27a28ce543 | |||
47aabb1d4f | |||
eeaf28127b | |||
141942ce3e | |||
c9b214f633 | |||
59b8387728 | |||
6433da508a | |||
5cf1d7bcc6 | |||
28a3d22911 | |||
da459b9d26 | |||
31e27acdff | |||
|
f592e29eef | ||
|
de61e736fa | ||
a22005da27 | |||
5e4db00352 | |||
ef982eb6a1 | |||
089eb8c2eb | |||
5ec6ea6377 | |||
93fc8ffe1c | |||
7dc0bdaac0 | |||
0c45ccb39e | |||
573c929845 | |||
13f9b8050e | |||
53910fa840 | |||
bf0ea4b46d | |||
977922d7a3 | |||
51e84c2404 | |||
8fb6373a83 | |||
9ae7a6ec62 | |||
7979007091 | |||
a264070d5c | |||
642712cdfd | |||
34d77ecedd | |||
3a2d096025 | |||
7cd3f30216 | |||
5c897886e0 | |||
2037af37a3 | |||
82405165e1 | |||
b8262d00ca | |||
6832d9024d | |||
da4c9bf9c4 | |||
2ca24978bc | |||
bba3daebfa | |||
f1f9a7b30c | |||
02bc11d6a7 | |||
48c1707fbe | |||
0c5ede8498 | |||
87fba41704 | |||
0305a68aca | |||
b2a4dc1839 | |||
e1c4f4fb52 | |||
428936c1a7 | |||
f0bca02f73 | |||
536d2a9326 | |||
60939fe2b8 | |||
72f3a633ae | |||
4399531c8d | |||
aec4b58e23 | |||
e7f2910a51 | |||
b4f2564f67 | |||
cd47d22480 | |||
50a8595aa7 | |||
5aeac92772 | |||
27a20e83b0 | |||
80afb07c77 | |||
0185c741a1 | |||
b7fdbb731c | |||
494bfc76a7 | |||
a51583e5fc | |||
e22eb1221d | |||
655ee2e935 | |||
b518ec0c7c | |||
f2b41dd546 | |||
6c4fc47900 | |||
172e72c1f6 | |||
31bdc6dbd2 | |||
1a380405a3 | |||
d876485db9 | |||
ac8fa682e3 | |||
9744a8c6b0 | |||
4c661e0b6d | |||
05269ac364 | |||
8143633b42 | |||
ac41b1721f | |||
c162b9b4bc | |||
4199ed0a6c | |||
004b286835 | |||
06ccc84c29 | |||
2a2a4bb5f1 | |||
fe57ba9497 | |||
3699111416 | |||
14160ee4ca | |||
b09f2310ef | |||
979facfd86 | |||
8983f25b3a | |||
8222d8c7d2 | |||
9fc938407a | |||
349c314318 | |||
efd11cc44c | |||
41779dfdde | |||
ef855a9ae5 | |||
486773af63 | |||
cc99feebac | |||
8b424f4f03 | |||
1d9cb57477 | |||
ebc093c6e8 | |||
0987d4520f | |||
bb75994dd0 | |||
95a457e31e | |||
1a2f5a3a4a | |||
1e97ae3489 | |||
f52e027edd | |||
372c6a6a55 | |||
5c4f6ab55b | |||
eda63bfc3a | |||
056dad228a | |||
9163d0b145 | |||
55e786f01c | |||
6143da2db9 | |||
253a5df692 | |||
55b304581c | |||
92a686de96 | |||
6a3c0332b3 | |||
60cfd01f0d | |||
9361094bf6 | |||
a84ead51e1 | |||
01a68d562c | |||
362659b584 | |||
8488531726 | |||
0a802799b2 | |||
dff10a6705 | |||
fbe8f8d1c3 | |||
90fa52653f | |||
d011f9c1c1 | |||
4f849d2e36 | |||
7a7dfa648d | |||
f5006572e8 | |||
e3f3a62078 | |||
e7411c2075 | |||
4b6853d920 | |||
356ee754bf | |||
473c262454 | |||
ae75337739 | |||
4944923069 | |||
4b0227454b | |||
146d762f5f | |||
9a4a87a45e | |||
ad3e89d891 | |||
5ab022b860 | |||
eeb2330fe8 | |||
baba99ec84 | |||
745e9d8e6e | |||
dc218111e6 | |||
e46329d9e1 | |||
7855e9e0c3 | |||
77ddf8a801 | |||
e20cca500e | |||
a3741918c9 | |||
fe0d65cf5a | |||
91e92c3e6a | |||
daef3951b7 | |||
2dd4377c4d | |||
093f989e28 | |||
d448e38047 | |||
a4cf5edacd | |||
7a14575090 | |||
62745b4612 | |||
7847f95192 | |||
4d913f4461 | |||
8dc8ce4b99 | |||
3ea02ee589 | |||
4a3f68bad3 | |||
d4577a6489 | |||
ed87c46402 | |||
298601dbc5 | |||
006b8c6663 | |||
51ad735e0e | |||
1ee1d2bab9 | |||
2e5a2cb3b0 | |||
51a5a09d6c | |||
cda799d210 | |||
1a44e25dc4 | |||
18bdabdb59 | |||
b7a82ab374 | |||
cf08d18fb2 | |||
57d138c21f | |||
8a2ac498e6 | |||
ee2ade0509 | |||
e9465349e3 | |||
557f1893e3 | |||
e90cf3058a | |||
59f4822e25 | |||
45fa06e715 | |||
68dff9bd8c | |||
71d1cbd9a6 | |||
2cb905e58f | |||
ad8e0f445f | |||
4b75042fab | |||
690ae86e2f | |||
a7931f6a96 | |||
ccab6093a9 | |||
1c935c0de0 | |||
a4c77629ff | |||
f365b3529e | |||
1fe6a8f548 | |||
11dc04e39f | |||
6eada9fe00 | |||
c8c50288cf | |||
781f6fde63 | |||
06b2aff27e | |||
b25234a5bb | |||
415c3f0b66 | |||
c38945da71 | |||
2bcf67a132 | |||
0b66b7513c | |||
b67df075b1 | |||
ed211f2fb8 | |||
a5a01e7ca6 | |||
429e04bef7 | |||
c8ce8d0068 | |||
7784d99153 | |||
2f9bef131f | |||
ae7d315699 | |||
2fe11cb753 | |||
128f5b8d7c | |||
4d4a3f6697 | |||
fdc8b8b265 | |||
726d2950b7 | |||
f0c0a537a0 | |||
84591efdcc | |||
2f3fa9a4d3 | |||
40703573ed | |||
a358c34c20 | |||
c3e3902cd0 | |||
a88af11245 | |||
79b2877154 | |||
a587817790 | |||
ac0912cb75 | |||
bd7f33a223 | |||
a3aed51bc8 | |||
78032b3967 | |||
f3171fa09e | |||
dbd9a495c2 | |||
1d58571d39 | |||
1ea7736b0f | |||
8e9cfe4345 | |||
7ea4c54a9f | |||
52eceaf1d5 | |||
973df93f58 | |||
d25f274289 | |||
c5ebe99e34 | |||
3134d36075 | |||
63e5588483 | |||
f647d51249 | |||
b0422d6a7f | |||
a5fc210776 | |||
|
ede8bf5bd7 | ||
|
06b18c4e50 | ||
77404b9b31 | |||
381257e55c | |||
0d28f0c9f5 | |||
e7b2df3776 | |||
53284f1983 | |||
dc7a4b93d1 | |||
a53cb5823e | |||
ef2b6e7190 | |||
1770fc89cc | |||
0a6dccba99 | |||
162d126aeb | |||
70a06b2c4d | |||
9a3d17e200 | |||
d7bc31b094 | |||
fd35707625 |
21
README.md
21
README.md
@ -1,3 +1,22 @@
|
||||
# MiniDocs
|
||||
|
||||
MiniDocs is a project that includes several minimalistic documentation tools used by the [Grafoscopio](https://mutabit.com/grafoscopio/en.html) community, starting with [Markdeep](https://casual-effects.com/markdeep/) and its integrations with [Lepiter](https://lepiter.io/feenk/introducing-lepiter--knowledge-management--e2p6apqsz5npq7m4xte0kkywn/).
|
||||
MiniDocs is a project that includes several minimalistic documentation tools used by the [Grafoscopio](https://mutabit.com/grafoscopio/en.html) community, starting with [Markdeep](https://casual-effects.com/markdeep/) and its integrations with [Lepiter](https://lepiter.io/feenk/introducing-lepiter--knowledge-management--e2p6apqsz5npq7m4xte0kkywn/) .
|
||||
|
||||
# Installation
|
||||
|
||||
To install it, *first* install [ExoRepo](https://code.tupale.co/Offray/ExoRepo) and then run from a playground:
|
||||
|
||||
```
|
||||
ExoRepo new
repository: 'https://code.sustrato.red/Offray/MiniDocs';
load.
|
||||
```
|
||||
|
||||
# Usage
|
||||
|
||||
Once you have installed MiniDocs, each Lepiter note will provide an export button (1), as showcased here:
|
||||
|
||||
![Exporting to Markdeep with MiniDocs.](https://i.imgur.com/bTZUG0Z.png)
|
||||
|
||||
If you click on it, you will get the right panel in the previous screenshot, showcasing the exported document.
|
||||
And if you click on the "Open in OS" button (2), you will see the document in your web browser, like this:
|
||||
|
||||
![Exported Lepiter note opened in the web browser](https://i.imgur.com/6fxkqZi.png)
|
||||
|
@ -11,27 +11,86 @@ BaselineOfMiniDocs >> baseline: spec [
|
||||
for: #common
|
||||
do: [
|
||||
"Dependencies"
|
||||
self setUpTeapot: spec.
|
||||
self setUpPetitParser: spec.
|
||||
self setUpLepiterBuildingBlocs: spec. "working in v1.0.993"
|
||||
spec
|
||||
baseline: 'Mustache' with: [ spec repository: 'github://noha/mustache'].
|
||||
"self xmlParserHTML: spec."
|
||||
baseline: 'Mustache' with: [ spec repository: 'github://noha/mustache' ];
|
||||
baseline: 'Temple' with: [ spec repository: 'github://astares/Pharo-Temple/src' ];
|
||||
baseline: 'Tealight' with: [ spec repository: 'github://astares/Tealight:main/src' ];
|
||||
baseline: 'DataFrame' with: [ spec repository: 'github://PolyMathOrg/DataFrame/src' ].
|
||||
|
||||
"self fossil: spec."
|
||||
self xmlParserHTML: spec.
|
||||
|
||||
"Packages"
|
||||
spec
|
||||
package: 'MiniDocs'
|
||||
with: [ spec requires: #('Mustache' "'XMLParserHTML'") ]
|
||||
package: 'PetitMarkdown' with: [ spec requires: #('PetitParser')];
|
||||
package: 'MiniDocs'
|
||||
with: [ spec requires: #(
|
||||
'Mustache' 'Temple' "Templating"
|
||||
'Teapot' 'Tealight' "Web server"
|
||||
'PetitMarkdown' 'PetitParser' "Parsers"
|
||||
'DataFrame' "Tabular data"
|
||||
'LepiterBuildingBlocs' "Lepiter utilities")].
|
||||
.
|
||||
|
||||
"Groups"
|
||||
|
||||
].
|
||||
spec
|
||||
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
BaselineOfMiniDocs >> xmlParserHTML: spec [
|
||||
Metacello new
|
||||
baseline: 'XMLParserHTML';
|
||||
repository: 'github://pharo-contributions/XML-XMLParserHTML/src';
|
||||
onConflict: [ :ex | ex useLoaded ];
|
||||
onUpgrade: [ :ex | ex useLoaded ];
|
||||
onDowngrade: [ :ex | ex useLoaded ];
|
||||
onWarningLog;
|
||||
load.
|
||||
spec baseline: 'XMLParserHTML' with: [spec repository: 'github://pharo-contributions/XML-XMLParserHTML/src']
|
||||
BaselineOfMiniDocs >> fossil: spec [
|
||||
| repo |
|
||||
repo := ExoRepo new
|
||||
repository: 'https://code.sustrato.red/Offray/Fossil'.
|
||||
repo load.
|
||||
spec baseline: 'Fossil' with: [ spec repository: 'gitlocal://', repo local fullName ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
BaselineOfMiniDocs >> semanticVersion [
|
||||
^ '0.2.0'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
BaselineOfMiniDocs >> setUpLepiterBuildingBlocs: spec [
|
||||
spec
|
||||
baseline: 'LepiterBuildingBlocs'
|
||||
with: [spec repository: 'github://botwhytho/LepiterBuildingBlocs:main/src']
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
BaselineOfMiniDocs >> setUpPetitParser: spec [
|
||||
spec
|
||||
baseline: 'PetitParser'
|
||||
with: [ spec
|
||||
repository: 'github://moosetechnology/PetitParser:v3.x.x/src';
|
||||
loads: #('Minimal' 'Core' 'Tests' 'Islands')];
|
||||
import: 'PetitParser'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
BaselineOfMiniDocs >> setUpTeapot: spec [
|
||||
|
||||
spec
|
||||
baseline: 'Teapot'
|
||||
with: [ spec
|
||||
repository: 'github://zeroflag/Teapot/source';
|
||||
loads: #('ALL') ];
|
||||
import: 'Teapot'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
BaselineOfMiniDocs >> xmlParserHTML: spec [
|
||||
|
||||
spec
|
||||
baseline: 'XMLParserHTML'
|
||||
with: [ spec
|
||||
repository: 'github://ruidajo/XML-XMLParserHTML/src';
|
||||
loads: #('ALL')];
|
||||
import: 'XMLParserHTML'
|
||||
]
|
||||
|
18
src/MiniDocs/AcroReport.class.st
Normal file
18
src/MiniDocs/AcroReport.class.st
Normal file
@ -0,0 +1,18 @@
|
||||
"
|
||||
I model a possible bridge between TaskWarrior and MiniDocs. (starting DRAFT).
|
||||
"
|
||||
Class {
|
||||
#name : #AcroReport,
|
||||
#superclass : #Object,
|
||||
#category : #MiniDocs
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
AcroReport class >> project: projectName [
|
||||
| jsonReport |
|
||||
jsonReport := (GtSubprocessWithInMemoryOutput new
|
||||
shellCommand: 'task project:', projectName , ' export';
|
||||
runAndWait;
|
||||
stdout).
|
||||
^ STONJSON fromString: jsonReport
|
||||
]
|
57
src/MiniDocs/AlphanumCounter.class.st
Normal file
57
src/MiniDocs/AlphanumCounter.class.st
Normal file
@ -0,0 +1,57 @@
|
||||
Class {
|
||||
#name : #AlphanumCounter,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'letters',
|
||||
'digits',
|
||||
'currentLetter',
|
||||
'currentDigit'
|
||||
],
|
||||
#category : #MiniDocs
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
AlphanumCounter >> current [
|
||||
^ self currentLetter asString, self currentDigit asString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
AlphanumCounter >> currentDigit [
|
||||
|
||||
^ currentDigit ifNil: [ currentDigit := self digits first ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
AlphanumCounter >> currentLetter [
|
||||
^ currentLetter ifNil: [ currentLetter := self letters first ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
AlphanumCounter >> currentLetterIndex [
|
||||
^ self letters detectIndex: [:n | n = self currentLetter]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
AlphanumCounter >> digits [
|
||||
^ digits ifNil: [ digits := 1 to: 9 ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
AlphanumCounter >> digits: aNumbersArray [
|
||||
digits := aNumbersArray
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
AlphanumCounter >> increase [
|
||||
(self currentDigit < self digits last)
|
||||
ifTrue: [ currentDigit := currentDigit + 1 ]
|
||||
ifFalse: [
|
||||
currentLetter := self letters at: (self currentLetterIndex + 1).
|
||||
currentDigit := self digits first
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
AlphanumCounter >> letters [
|
||||
^ letters ifNil: [ letters := $A to: $Z ]
|
||||
]
|
45
src/MiniDocs/Array.extension.st
Normal file
45
src/MiniDocs/Array.extension.st
Normal file
@ -0,0 +1,45 @@
|
||||
Extension { #name : #Array }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Array >> bagOfWordsFor: sentenceArray [
|
||||
"An utility machine training little algorithm.
|
||||
Inspired by https://youtu.be/8qwowmiXANQ?t=1144.
|
||||
This should be moved probably to [Polyglot](https://github.com/pharo-ai/Polyglot),
|
||||
but the repository is pretty innactive (with commits 2 or more years old and no reponse to issues).
|
||||
Meanwhile, it will be in MiniDocs.
|
||||
|
||||
Given the sentence := #('hello' 'how' 'are' 'you')
|
||||
and the testVocabulary := #('hi' 'hello' 'I' 'you' 'bye' 'thank' 'you')
|
||||
then
|
||||
|
||||
testVocabulary bagOfWordsFor: sentence.
|
||||
|
||||
Should give: #(0 1 0 1 0 0 0)
|
||||
"
|
||||
| bagOfWords |
|
||||
bagOfWords := Array new: self size.
|
||||
bagOfWords doWithIndex: [:each :i | bagOfWords at: i put: 0 ].
|
||||
sentenceArray do: [:token | |index|
|
||||
index := self indexOf: token.
|
||||
index > 0
|
||||
ifTrue: [bagOfWords at: index put: 1]
|
||||
].
|
||||
^ bagOfWords
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Array >> replaceWithUniqueNilsAndBooleans [
|
||||
| response |
|
||||
(self includesAny: #(true false nil))
|
||||
ifFalse: [ response := self ]
|
||||
ifTrue: [ | newItem |
|
||||
response := OrderedCollection new.
|
||||
self do: [:item |
|
||||
(item isBoolean or: [ item isNil ])
|
||||
ifTrue: [ newItem := item asString, '-', (NanoID generate copyFrom: 1 to: 3) ]
|
||||
ifFalse: [ newItem := item ].
|
||||
response add: newItem.
|
||||
].
|
||||
].
|
||||
^ response
|
||||
]
|
23
src/MiniDocs/BrAsyncFileWidget.extension.st
Normal file
23
src/MiniDocs/BrAsyncFileWidget.extension.st
Normal file
@ -0,0 +1,23 @@
|
||||
Extension { #name : #BrAsyncFileWidget }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
BrAsyncFileWidget >> url: aUrl [
|
||||
|
||||
| realUrl imageUrl |
|
||||
realUrl := aUrl asZnUrl.
|
||||
|
||||
realUrl scheme = #file ifTrue: [
|
||||
^ self file: realUrl asFileReference ].
|
||||
imageUrl := realUrl.
|
||||
realUrl host = 'www.youtube.com' ifTrue: [ | video |
|
||||
video := LeRawYoutubeReferenceInfo fromYoutubeStringUrl: realUrl asString.
|
||||
imageUrl := (video rawData at: 'thumbnail_url') asUrl.
|
||||
].
|
||||
|
||||
self stencil: [
|
||||
(SkiaImage fromForm:
|
||||
(Form fromBase64String: imageUrl retrieveContents base64Encoded))
|
||||
asElement constraintsDo: [ :c |
|
||||
c horizontal matchParent.
|
||||
c vertical matchParent ] ]
|
||||
]
|
12
src/MiniDocs/ByteString.extension.st
Normal file
12
src/MiniDocs/ByteString.extension.st
Normal file
@ -0,0 +1,12 @@
|
||||
Extension { #name : #ByteString }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
ByteString >> asHTMLComment [
|
||||
^ '<!-- ', self , ' -->'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
ByteString >> email [
|
||||
"Quick fix for importing Lepiter pages that have a plain ByteString field as email."
|
||||
^ self
|
||||
]
|
46
src/MiniDocs/DataFrame.extension.st
Normal file
46
src/MiniDocs/DataFrame.extension.st
Normal file
@ -0,0 +1,46 @@
|
||||
Extension { #name : #DataFrame }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
DataFrame >> asMarkdown [
|
||||
| response |
|
||||
response := '' writeStream.
|
||||
self columnNames do: [ :name | response nextPutAll: '| ' , name , ' ' ].
|
||||
response
|
||||
nextPutAll: '|';
|
||||
cr.
|
||||
self columns size timesRepeat: [ response nextPutAll: '|---' ].
|
||||
response
|
||||
nextPutAll: '|';
|
||||
cr.
|
||||
self asArrayOfRows
|
||||
do: [ :row |
|
||||
row do: [ :cell | response nextPutAll: '| ' , cell asString , ' ' ].
|
||||
response
|
||||
nextPutAll: '|';
|
||||
cr ].
|
||||
^ response contents accentedCharactersCorrection withInternetLineEndings.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
DataFrame >> viewDataFor: aView [
|
||||
<gtView>
|
||||
| columnedList |
|
||||
self numberOfRows >= 1 ifFalse: [ ^ aView empty ].
|
||||
columnedList := aView columnedList
|
||||
title: 'Data';
|
||||
items: [ self transposed columns ];
|
||||
priority: 40.
|
||||
self columnNames
|
||||
withIndexDo: [:aName :anIndex |
|
||||
columnedList
|
||||
column: aName
|
||||
text: [:anItem | anItem at: anIndex ]
|
||||
].
|
||||
^ columnedList
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
DataFrame >> webView [
|
||||
|
||||
^ Pandoc convertString: self asMarkdown from: 'markdown' to: 'html'
|
||||
]
|
6
src/MiniDocs/Dictionary.extension.st
Normal file
6
src/MiniDocs/Dictionary.extension.st
Normal file
@ -0,0 +1,6 @@
|
||||
Extension { #name : #Dictionary }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Dictionary >> treeView [
|
||||
^ self asOrderedDictionary treeView
|
||||
]
|
53
src/MiniDocs/FileLocator.extension.st
Normal file
53
src/MiniDocs/FileLocator.extension.st
Normal file
@ -0,0 +1,53 @@
|
||||
Extension { #name : #FileLocator }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
FileLocator class >> aliases [
|
||||
| fileAliases |
|
||||
fileAliases := self fileAliases.
|
||||
fileAliases exists
|
||||
ifFalse: [ | initialConfig |
|
||||
initialConfig := Dictionary new.
|
||||
fileAliases ensureCreateFile.
|
||||
MarkupFile exportAsFileOn: fileAliases containing: initialConfig
|
||||
].
|
||||
^ STON fromString: fileAliases contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
FileLocator class >> atAlias: aString put: aFolderOrFile [
|
||||
| updatedAliases |
|
||||
updatedAliases:= self aliases
|
||||
at: aString put: aFolderOrFile;
|
||||
yourself.
|
||||
MarkupFile exportAsFileOn: self fileAliases containing: updatedAliases.
|
||||
^ updatedAliases
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
FileLocator >> extractMetadata [
|
||||
"I package the functionality from [[How to extract meta information using ExifTool]],
|
||||
from the GToolkit Book.
|
||||
I depend on the external tool ExifTool."
|
||||
|
||||
| process variablesList |
|
||||
process := GtSubprocessWithInMemoryOutput new
|
||||
command: 'exiftool';
|
||||
arguments: { self fullName}.
|
||||
process errorBlock: [ :proc | ^ self error: 'Failed to run exiftool' ].
|
||||
process runAndWait.
|
||||
variablesList := process stdout lines collect: [ :currentLine |
|
||||
| separatorIndex name value |
|
||||
separatorIndex := currentLine indexOf: $:.
|
||||
name := (currentLine copyFrom: 1 to: separatorIndex - 1) trimBoth.
|
||||
value := (currentLine
|
||||
copyFrom: separatorIndex + 1
|
||||
to: currentLine size) trimBoth.
|
||||
name -> value
|
||||
].
|
||||
^ variablesList asOrderedDictionary
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
FileLocator class >> fileAliases [
|
||||
^ MiniDocs appFolder / 'fileAliases.ston'
|
||||
]
|
342
src/MiniDocs/GrafoscopioNode.class.st
Normal file
342
src/MiniDocs/GrafoscopioNode.class.st
Normal file
@ -0,0 +1,342 @@
|
||||
Class {
|
||||
#name : #GrafoscopioNode,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'header',
|
||||
'body',
|
||||
'tags',
|
||||
'children',
|
||||
'parent',
|
||||
'links',
|
||||
'level',
|
||||
'created',
|
||||
'nodesInPreorder',
|
||||
'selected',
|
||||
'edited',
|
||||
'headers',
|
||||
'key',
|
||||
'output',
|
||||
'remoteLocations'
|
||||
],
|
||||
#category : #'MiniDocs-Legacy'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode class >> fromFile: aFileReference [
|
||||
|
||||
^ (STON fromString: aFileReference contents) first parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode class >> fromLink: aStonLink [
|
||||
| notebook |
|
||||
notebook := (STON fromString: aStonLink asUrl retrieveContents utf8Decoded) first parent.
|
||||
notebook addRemoteLocation: aStonLink.
|
||||
^ notebook
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> addRemoteLocation: anURL [
|
||||
self remoteLocations add: anURL
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> ancestors [
|
||||
"I return a collection of all the nodes wich are ancestors of the receiver node"
|
||||
| currentNode ancestors |
|
||||
|
||||
currentNode := self.
|
||||
ancestors := OrderedCollection new.
|
||||
[ currentNode parent notNil and: [ currentNode level > 0 ] ]
|
||||
whileTrue: [
|
||||
ancestors add: currentNode parent.
|
||||
currentNode := currentNode parent].
|
||||
ancestors := ancestors reversed.
|
||||
^ ancestors
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> asLePage [
|
||||
| page |
|
||||
self root populateTimestamps.
|
||||
page := LePage new
|
||||
initializeTitle: 'Grafoscopio Notebook (imported)'.
|
||||
self nodesInPreorder allButFirst
|
||||
do: [:node | page addSnippet: node asSnippet ].
|
||||
page latestEditTime: self root latestEditionDate.
|
||||
page createTime: self root earliestCreationDate.
|
||||
page optionAt: 'remoteLocations' put: self remoteLocations.
|
||||
^ page.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> asSnippet [
|
||||
| snippet child |
|
||||
snippet := LeTextSnippet new
|
||||
string: self header;
|
||||
createTime: (LeTime new
|
||||
time: self created);
|
||||
uid: LeUID new.
|
||||
(self tags includes: 'código')
|
||||
ifFalse: [
|
||||
child := LeTextSnippet new;
|
||||
string: self body. ]
|
||||
ifTrue: [
|
||||
child := LePharoSnippet new;
|
||||
code: self body ].
|
||||
child
|
||||
createTime: (LeTime new
|
||||
time: self created);
|
||||
uid: LeUID new.
|
||||
snippet addFirstSnippet: child.
|
||||
snippet optionAt: 'tags' put: self tags.
|
||||
^ snippet
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> body [
|
||||
^ body
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> body: anObject [
|
||||
body := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> children [
|
||||
^ children
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> children: anObject [
|
||||
children := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> created [
|
||||
created ifNotNil: [^created asDateAndTime].
|
||||
^ created
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> created: anObject [
|
||||
created := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> earliestCreationDate [
|
||||
| earliest |
|
||||
|
||||
self nodesWithCreationDates ifNotEmpty: [
|
||||
earliest := self nodesWithCreationDates first created]
|
||||
ifEmpty: [ earliest := self earliestRepositoryTimestamp - 3 hours].
|
||||
self nodesWithCreationDates do: [:node |
|
||||
node created <= earliest ifTrue: [ earliest := node created ] ].
|
||||
^ earliest
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> earliestRepositoryTimestamp [
|
||||
| remote fossilHost docSegments repo checkinInfo |
|
||||
remote := self remoteLocations first asUrl.
|
||||
fossilHost := 'https://mutabit.com/repos.fossil'.
|
||||
(remote asString includesSubstring: fossilHost) ifFalse: [ ^ false ].
|
||||
docSegments := remote segments copyFrom: 5 to: remote segments size.
|
||||
repo := FossilRepo new
|
||||
remote: (remote scheme, '://', remote host, '/', remote segments first, '/', remote segments second).
|
||||
checkinInfo := repo firstCheckinFor: ('/' join: docSegments).
|
||||
^ DateAndTime fromUnixTime: (checkinInfo at: 'timestamp')
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> edited [
|
||||
^ edited ifNotNil: [^ edited asDateAndTime ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> edited: anObject [
|
||||
edited := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> gtTextFor: aView [
|
||||
<gtView>
|
||||
^ aView textEditor
|
||||
title: 'Body';
|
||||
text: [ body ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> header [
|
||||
^ header
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> header: anObject [
|
||||
header := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> latestEditionDate [
|
||||
| latest |
|
||||
|
||||
latest := self nodesWithEditionDates first edited.
|
||||
self nodesWithEditionDates do: [:node |
|
||||
node edited >= latest ifTrue: [ latest := node edited ] ].
|
||||
^ latest
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> level [
|
||||
^ level
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> level: anObject [
|
||||
level := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> links [
|
||||
^ links
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> links: anObject [
|
||||
links := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesInPreorder [
|
||||
^ nodesInPreorder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesInPreorder: anObject [
|
||||
nodesInPreorder := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesWithCreationDates [
|
||||
^ self nodesInPreorder select: [ :each | each created isNotNil ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesWithEditionDates [
|
||||
^ self nodesInPreorder select: [ :each | each edited isNotNil ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> parent [
|
||||
^ parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> parent: anObject [
|
||||
parent := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> populateTimestamps [
|
||||
| adhocCreationMarker adhocEditionMarker |
|
||||
adhocCreationMarker := 'adhoc creation timestamp'.
|
||||
adhocEditionMarker := 'adhoc edition timestamp'.
|
||||
(self nodesInPreorder size = self nodesWithCreationDates size
|
||||
and: [ self nodesInPreorder size = self nodesWithEditionDates size ])
|
||||
ifTrue: [ ^ self nodesInPreorder ].
|
||||
self nodesInPreorder allButFirst doWithIndex: [:node :i |
|
||||
node created ifNil: [
|
||||
node created: self earliestCreationDate + i.
|
||||
node tags add: adhocCreationMarker.
|
||||
].
|
||||
node edited ifNil: [
|
||||
node edited: self earliestCreationDate + i + 1.
|
||||
node tags add: 'adhoc edition timestamp'
|
||||
].
|
||||
].
|
||||
self root created ifNil: [
|
||||
self root created: self earliestCreationDate - 1.
|
||||
self root tags add: adhocCreationMarker.
|
||||
].
|
||||
self root edited ifNil: [
|
||||
self root edited: self latestEditionDate.
|
||||
self root tags add: adhocEditionMarker.
|
||||
].
|
||||
^ self nodesInPreorder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> printOn: aStream [
|
||||
super printOn: aStream.
|
||||
aStream
|
||||
nextPutAll: '( ', self header, ' )'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> remoteLocations [
|
||||
^ remoteLocations ifNil: [ remoteLocations := OrderedCollection new]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> root [
|
||||
self level = 0 ifTrue: [ ^ self ].
|
||||
^ self ancestors first.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> selected [
|
||||
^ selected
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> selected: anObject [
|
||||
selected := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> tags [
|
||||
^ tags
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> tags: anObject [
|
||||
tags := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := self header asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child header asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child body asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor) ].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> viewChildrenFor: aView [
|
||||
<gtView>
|
||||
|
||||
children ifNil: [ ^ aView empty ].
|
||||
|
||||
^ aView columnedTree
|
||||
title: 'Children';
|
||||
priority: 1;
|
||||
items: [ { self } ];
|
||||
children: #children;
|
||||
column: 'Name' text: #viewBody;
|
||||
expandUpTo: 2
|
||||
]
|
15
src/MiniDocs/GrafoscopioNodeTest.class.st
Normal file
15
src/MiniDocs/GrafoscopioNodeTest.class.st
Normal file
@ -0,0 +1,15 @@
|
||||
Class {
|
||||
#name : #GrafoscopioNodeTest,
|
||||
#superclass : #TestCase,
|
||||
#category : #'MiniDocs-Legacy'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNodeTest >> testEarliestCreationNode [
|
||||
| notebook remoteNotebook offedingNodes |
|
||||
remoteNotebook := 'https://mutabit.com/repos.fossil/documentaton/raw/a63598382?at=documentaton.ston'.
|
||||
notebook := (STON fromString: remoteNotebook asUrl retrieveContents utf8Decoded) first parent.
|
||||
offedingNodes := notebook nodesInPreorder select: [:node |
|
||||
node created isNotNil and: [node created < notebook earliestCreationDate] ].
|
||||
self assert: offedingNodes size equals: 0
|
||||
]
|
72
src/MiniDocs/GtGQLSnippet.extension.st
Normal file
72
src/MiniDocs/GtGQLSnippet.extension.st
Normal file
@ -0,0 +1,72 @@
|
||||
Extension { #name : #GtGQLSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
GtGQLSnippet >> asMarkdeep [
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
(self metadata)
|
||||
at: 'operation' put: self operation;
|
||||
at: 'input' put: self input;
|
||||
at: 'context' put: self context;
|
||||
yourself.
|
||||
output
|
||||
nextPutAll: self metadataDiv;
|
||||
nextPutAll: self markdeepCustomOpener;
|
||||
nextPutAll: self asMarkdownString;
|
||||
nextPut: Character lf;
|
||||
nextPutAll: self markdeepCustomCloser;
|
||||
nextPut: Character lf;
|
||||
nextPutAll: '</div>';
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf.
|
||||
^ output contents withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
GtGQLSnippet >> markdeepCustomCloser [
|
||||
^ self markdeepCustomOpener
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
GtGQLSnippet >> markdeepCustomOpener [
|
||||
^ '* * *'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
GtGQLSnippet >> metadataDiv [
|
||||
"PENDING: Shared among several snippets. Should be abstracted further?"
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: '<div st-class="' , self class greaseString , '"';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
|
||||
nextPut: Character lf.
|
||||
^ output contents withInternetLineEndings.
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
GtGQLSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uid asString36;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
GtGQLSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
212
src/MiniDocs/HedgeDoc.class.st
Normal file
212
src/MiniDocs/HedgeDoc.class.st
Normal file
@ -0,0 +1,212 @@
|
||||
"
|
||||
I model the interface between a CodiMD (https://demo.codimd.org) documentation
|
||||
server and Grafoscopio.
|
||||
I enable the interaction between Grafoscopio notebooks and CodiMD documents,
|
||||
so one document can start online (as a CodiMD pad) and continue as a Grafoscopio
|
||||
notebook or viceversa.
|
||||
"
|
||||
Class {
|
||||
#name : #HedgeDoc,
|
||||
#superclass : #Markdown,
|
||||
#instVars : [
|
||||
'server',
|
||||
'pad',
|
||||
'url'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc class >> fromLink: aUrl [
|
||||
^ self new fromLink: aUrl
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc class >> newDefault [
|
||||
^ self new
|
||||
defaultServer.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> asLePage [
|
||||
| newPage sanitizedMarkdown |
|
||||
sanitizedMarkdown := self bodyWithoutTitleHeader promoteMarkdownHeaders.
|
||||
newPage := LePage new
|
||||
initializeTitle: self title.
|
||||
sanitizedMarkdown := sanitizedMarkdown markdownSplitted.
|
||||
sanitizedMarkdown class = OrderedCollection ifTrue: [
|
||||
sanitizedMarkdown do: [:lines | | snippet |
|
||||
snippet := LeTextSnippet new
|
||||
string: lines asStringWithCr;
|
||||
uid: LeUID new.
|
||||
newPage
|
||||
addSnippet: snippet;
|
||||
yourself
|
||||
]
|
||||
].
|
||||
sanitizedMarkdown class = ByteString ifTrue: [ | snippet |
|
||||
snippet := LeTextSnippet new
|
||||
string: sanitizedMarkdown;
|
||||
uid: LeUID new.
|
||||
newPage
|
||||
addSnippet: snippet;
|
||||
yourself
|
||||
].
|
||||
newPage
|
||||
incomingLinks;
|
||||
splitAdmonitionSnippets.
|
||||
newPage editTime: DateAndTime now.
|
||||
newPage options
|
||||
at: 'HedgeDoc' at: 'yamlFrontmatter' put: self metadata;
|
||||
at: 'HedgeDoc' at: 'url' put: self url asString asHTMLComment.
|
||||
^ newPage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> asMarkdeep [
|
||||
^ Markdeep new
|
||||
metadata: self metadata;
|
||||
body: self contents;
|
||||
file: self file, 'html'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> asMarkdownTiddler [
|
||||
self url ifNil: [ ^ self ].
|
||||
^ Tiddler new
|
||||
title: self url segments first;
|
||||
text: (self contents ifNil: [ self retrieveContents]);
|
||||
type: 'text/x-markdown';
|
||||
created: Tiddler nowLocal.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> bodyWithoutTitleHeader [
|
||||
| headerIndex |
|
||||
headerIndex := self body lines
|
||||
detectIndex: [ :line | line includesSubstring: self headerAsTitle ]
|
||||
ifNone: [ ^ self body].
|
||||
^ (self body lines copyWithoutIndex: headerIndex) asStringWithCr
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> contents [
|
||||
^ super contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> contents: anObject [
|
||||
body := anObject
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> defaultServer [
|
||||
self server: 'https://docutopia.tupale.co'.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> fromLink: aString [
|
||||
self url: aString.
|
||||
self retrieveContents
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> htmlUrl [
|
||||
| link |
|
||||
link := self url copy.
|
||||
link segments insert: 's' before: 1.
|
||||
^ link
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> importContents [
|
||||
self contents: self retrieveContents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> pad [
|
||||
^ pad
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> pad: anObject [
|
||||
pad := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> retrieveContents [
|
||||
self url ifNil: [ ^ self ].
|
||||
self fromString: (self url addPathSegment: 'download') retrieveContents.
|
||||
^ self.
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> retrieveHtmlContents [
|
||||
| htmlContents |
|
||||
self url ifNil: [ ^ self ].
|
||||
htmlContents := self htmlUrl.
|
||||
^ htmlContents retrieveContents
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> saveContentsToFile: aFileLocator [
|
||||
self url ifNil: [ ^ self ].
|
||||
^ (self url addPathSegment: 'download') saveContentsToFile: aFileLocator
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> saveHtmlContentsToFile: aFileLocator [
|
||||
self url ifNil: [ ^ self ].
|
||||
^ self htmlUrl saveContentsToFile: aFileLocator
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> server [
|
||||
^ server
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> server: aUrlString [
|
||||
server := aUrlString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> url [
|
||||
url ifNotNil: [ ^ url asUrl ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> url: anObject [
|
||||
| tempUrl html |
|
||||
tempUrl := anObject asZnUrl.
|
||||
html := XMLHTMLParser parse: tempUrl retrieveContents.
|
||||
(html xpath: '//head/meta[@name="application-name"][@content = "HedgeDoc - Ideas grow better together"]') isEmpty
|
||||
ifTrue: [ self inform: 'Not a hedgedoc url'.
|
||||
url := nil ].
|
||||
server := tempUrl host.
|
||||
url := anObject
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
HedgeDoc >> visit [
|
||||
WebBrowser openOn: self server, '/', self pad.
|
||||
]
|
||||
|
||||
{ #category : #transformation }
|
||||
HedgeDoc >> youtubeEmbeddedLinksToMarkdeepFormat [
|
||||
"I replace the youtube embedded links from hedgedoc format to markdeep format."
|
||||
| linkDataCollection |
|
||||
linkDataCollection := (HedgeDocGrammar new youtubeEmbeddedLink parse: self contents)
|
||||
collect: [ :each | | parsedLink |
|
||||
parsedLink := OrderedCollection new.
|
||||
parsedLink
|
||||
add: ('' join:( each collect: [ :s | s value]));
|
||||
add: '![](https://youtu.be/',
|
||||
each second value trimmed , ')';
|
||||
add: (each first start to: each third stop);
|
||||
yourself ].
|
||||
linkDataCollection do: [ :each |
|
||||
self contents: (self contents
|
||||
copyReplaceAll: each first with: each second) ].
|
||||
^ self
|
||||
]
|
36
src/MiniDocs/HedgeDocExamples.class.st
Normal file
36
src/MiniDocs/HedgeDocExamples.class.st
Normal file
@ -0,0 +1,36 @@
|
||||
Class {
|
||||
#name : #HedgeDocExamples,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-Examples'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocExamples >> hedgeDocReplaceYoutubeEmbeddedLinkExample [
|
||||
<gtExample>
|
||||
| aSampleString hedgedocDoc parsedCollection hedgedocDocLinksReplaced |
|
||||
aSampleString := '---
|
||||
breaks: false
|
||||
|
||||
---
|
||||
|
||||
# Titulo
|
||||
|
||||
Un texto de ejemplo
|
||||
|
||||
# Enlaces youtube
|
||||
|
||||
{%youtube 1aw3XmTqFXA %}
|
||||
|
||||
otro video
|
||||
|
||||
{%youtube U7mpXaLN9Nc %}'.
|
||||
hedgedocDoc := HedgeDoc new
|
||||
contents: aSampleString.
|
||||
hedgedocDocLinksReplaced := HedgeDoc new contents: aSampleString; youtubeEmbeddedLinksToMarkdeepFormat.
|
||||
self assert: (hedgedocDoc contents
|
||||
includesSubstring: '{%youtube 1aw3XmTqFXA %}' ).
|
||||
self assert: (hedgedocDocLinksReplaced contents
|
||||
includesSubstring: '![](https://youtu.be/1aw3XmTqFXA)' ).
|
||||
^ { 'Original' -> hedgedocDoc .
|
||||
'Replaced' -> hedgedocDocLinksReplaced } asDictionary
|
||||
]
|
42
src/MiniDocs/HedgeDocGrammar.class.st
Normal file
42
src/MiniDocs/HedgeDocGrammar.class.st
Normal file
@ -0,0 +1,42 @@
|
||||
Class {
|
||||
#name : #HedgeDocGrammar,
|
||||
#superclass : #PP2CompositeNode,
|
||||
#instVars : [
|
||||
'youtubeEmbeddedLink'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> metadataAsYAML [
|
||||
"I parse the header of the hedgedoc document for YAML metadata."
|
||||
^ '---' asPParser token, #any asPParser starLazy token, '---' asPParser token
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> start [
|
||||
| any |
|
||||
any := #any asPParser.
|
||||
^ (self metadataAsYAML / any starLazy), youtubeEmbeddedLink
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> youtubeEmbeddedLink [
|
||||
"I parse the youtube embedded links in a hedgedoc document."
|
||||
| link linkSea |
|
||||
link := self youtubeEmbeddedLinkOpen,
|
||||
#any asPParser starLazy token,
|
||||
self youtubeEmbeddedLinkClose.
|
||||
linkSea := link islandInSea star.
|
||||
^ linkSea
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> youtubeEmbeddedLinkClose [
|
||||
^ '%}' asPParser token
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> youtubeEmbeddedLinkOpen [
|
||||
^ '{%youtube' asPParser token
|
||||
]
|
19
src/MiniDocs/HedgeDocGrammarExamples.class.st
Normal file
19
src/MiniDocs/HedgeDocGrammarExamples.class.st
Normal file
@ -0,0 +1,19 @@
|
||||
Class {
|
||||
#name : #HedgeDocGrammarExamples,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-Examples'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammarExamples >> hedgeDocParseYoutubeEmbeddedLinkExample [
|
||||
<gtExample>
|
||||
| aSampleString parsedStringTokens parsedCollection |
|
||||
aSampleString := '{%youtube 1aw3XmTqFXA %}'.
|
||||
parsedStringTokens := HedgeDocGrammar new youtubeEmbeddedLink parse: aSampleString.
|
||||
parsedCollection := parsedStringTokens first.
|
||||
self assert: parsedCollection size equals: 3.
|
||||
self assert: parsedCollection first value equals: '{%youtube'.
|
||||
self assert: parsedCollection second class equals: PP2Token.
|
||||
self assert: parsedCollection third value equals: '%}'.
|
||||
^ parsedStringTokens
|
||||
]
|
15
src/MiniDocs/HedgeDocGrammarTest.class.st
Normal file
15
src/MiniDocs/HedgeDocGrammarTest.class.st
Normal file
@ -0,0 +1,15 @@
|
||||
Class {
|
||||
#name : #HedgeDocGrammarTest,
|
||||
#superclass : #PP2CompositeNodeTest,
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammarTest >> parserClass [
|
||||
^ HedgeDocGrammar
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammarTest >> testYoutubeEmbeddedLink [
|
||||
^ self parse: '{%youtube U7mpXaLN9Nc %}' rule: #youtubeEmbeddedLink
|
||||
]
|
26
src/MiniDocs/LeChangesSnippet.extension.st
Normal file
26
src/MiniDocs/LeChangesSnippet.extension.st
Normal file
@ -0,0 +1,26 @@
|
||||
Extension { #name : #LeChangesSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeChangesSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeChangesSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
21
src/MiniDocs/LeCodeSnippet.extension.st
Normal file
21
src/MiniDocs/LeCodeSnippet.extension.st
Normal file
@ -0,0 +1,21 @@
|
||||
Extension { #name : #LeCodeSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeCodeSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [
|
||||
self parent isString
|
||||
ifTrue: [ surrogate := self parent]
|
||||
ifFalse: [ surrogate := self parent uidString ]
|
||||
].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
|
||||
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
|
||||
yourself
|
||||
]
|
@ -1,28 +1,17 @@
|
||||
Extension { #name : #LeDatabase }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> addPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| remoteMetadata divSnippets snippets page |
|
||||
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
|
||||
collect: [ :xmlElement | xmlElement postCopy ].
|
||||
snippets := divSnippets
|
||||
collect: [ :xmlElement |
|
||||
(xmlElement attributes at: 'st-class') = 'LeTextSnippet'
|
||||
ifTrue: [ LeTextSnippet new contentFrom: xmlElement ]
|
||||
ifFalse: [ (xmlElement attributes at: 'st-class') = 'LePharoSnippet'
|
||||
ifTrue: [ LePharoSnippet new contentFrom: xmlElement ] ] ].
|
||||
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
|
||||
page := LePage new
|
||||
title: (remoteMetadata at: 'title');
|
||||
basicUid: (UUID fromString36: (remoteMetadata at: 'id'));
|
||||
createTime: (LeTime new time: (remoteMetadata at: 'created') asDateAndTime);
|
||||
editTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
|
||||
latestEditTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
|
||||
createEmail: (LeEmail new email: (remoteMetadata at: 'creator'));
|
||||
editEmail: (LeEmail new email: (remoteMetadata at: 'modifier')).
|
||||
snippets do: [ :snippet | page addSnippet: snippet ].
|
||||
page children
|
||||
do: [ :snippet |
|
||||
LeDatabase >> addPage2FromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| newPage |
|
||||
"^ { snippets . page }"
|
||||
"Rebulding partial subtrees"
|
||||
"Adding unrooted subtrees to the page"
|
||||
"^ newPage"
|
||||
newPage := self
|
||||
rebuildPageFromMarkdeep: markdeepDocTree
|
||||
withRemote: externalDocLocation.
|
||||
newPage
|
||||
childrenDo: [ :snippet |
|
||||
(self hasBlockUID: snippet uid)
|
||||
ifTrue: [ | existingPage |
|
||||
existingPage := self pages
|
||||
@ -31,6 +20,56 @@ LeDatabase >> addPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocati
|
||||
^ self ]
|
||||
ifFalse: [ snippet database: self.
|
||||
self registerSnippet: snippet ] ].
|
||||
self addPage: newPage.
|
||||
^ newPage
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> addPageCopy: aLePage [
|
||||
| pageTitle timestamp shortID page |
|
||||
timestamp := DateAndTime now asString.
|
||||
pageTitle := 'Copy of ', aLePage title.
|
||||
page := aLePage duplicatePageWithNewName: pageTitle, timestamp.
|
||||
shortID := '(id: ', (page uid asString copyFrom: 1 to: 8), ')'.
|
||||
page title: (page title copyReplaceAll: timestamp with: shortID).
|
||||
^ page
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> addPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| remoteMetadata divSnippets dataSnippets page |
|
||||
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
|
||||
collect: [ :xmlElement | xmlElement postCopy ].
|
||||
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
|
||||
"Ensuring remote metadata has consistent data"
|
||||
remoteMetadata at: 'origin' put: externalDocLocation.
|
||||
remoteMetadata at: 'title' ifAbsentPut: [ markdeepDocTree detectMarkdeepTitle ].
|
||||
remoteMetadata at: 'id' ifAbsentPut: [UUID new asString36].
|
||||
remoteMetadata at: 'created' ifAbsentPut: [ DateAndTime now] .
|
||||
remoteMetadata at: 'creator' ifAbsentPut: [ 'unknown' ].
|
||||
remoteMetadata at: 'modified' ifAbsentPut: [ DateAndTime now].
|
||||
remoteMetadata at: 'modifier' ifAbsentPut: [ 'unknown' ].
|
||||
dataSnippets := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
|
||||
page := LePage new.
|
||||
page fromDictionary: remoteMetadata.
|
||||
dataSnippets do: [:each | | snippet|
|
||||
snippet := each asLepiterSnippet.
|
||||
page addSnippet: snippet.
|
||||
].
|
||||
page children
|
||||
do: [ :snippet |
|
||||
(self hasBlockUID: snippet uid)
|
||||
ifTrue: [ | existingPage |
|
||||
existingPage := self pages
|
||||
detect: [ :pageTemp | pageTemp includesSnippetUid: snippet uid ]
|
||||
ifFound: [
|
||||
self importErrorForLocal: existingPage withRemote: externalDocLocation.
|
||||
^ self
|
||||
]
|
||||
ifNone: [ snippet database: self ].
|
||||
]
|
||||
ifFalse: [ snippet database: self ]
|
||||
].
|
||||
self addPage: page.
|
||||
^ page
|
||||
]
|
||||
@ -42,7 +81,7 @@ LeDatabase >> addPageFromMarkdeepUrl: aString [
|
||||
page
|
||||
ifNotNil: [ :arg |
|
||||
self importErrorForLocal: page withRemote: aString.
|
||||
^ self ].
|
||||
^ self errorCardFor: page uidString ].
|
||||
^ self addPageFromMarkdeep: (self docTreeForLink: aString) withRemote: aString
|
||||
]
|
||||
|
||||
@ -62,10 +101,10 @@ LeDatabase >> docTreeForLink: aString [
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> errorCardFor: error [
|
||||
LeDatabase >> errorCardFor: errorKey [
|
||||
|
||||
| keepButton overwriteButton backupButton errorMessageUI localPage errorKey |
|
||||
errorKey := error keys first.
|
||||
| keepButton overwriteButton loadCopyButton errorMessageUI localPage |
|
||||
|
||||
localPage := self pageWithID: errorKey.
|
||||
keepButton := BrButton new
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
|
||||
@ -82,22 +121,28 @@ LeDatabase >> errorCardFor: error [
|
||||
icon: BrGlamorousVectorIcons edit;
|
||||
action: [ :aButton |
|
||||
self removePage: localPage.
|
||||
aButton phlow spawnObject: (self addPageFromMarkdeepUrl: (error at: errorKey at: 'remote')).
|
||||
aButton phlow spawnObject: (self addPageFromMarkdeepUrl: (self errors at: errorKey at: 'remote')).
|
||||
self errors removeKey: errorKey
|
||||
];
|
||||
margin: (BlInsets left: 10).
|
||||
backupButton := BrButton new
|
||||
loadCopyButton := BrButton new
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
|
||||
label: 'Backup local page';
|
||||
label: 'Load remote page as a copy';
|
||||
icon: BrGlamorousVectorIcons changes;
|
||||
action: [ :aButton | ];
|
||||
action: [ :aButton | self ];
|
||||
margin: (BlInsets left: 10).
|
||||
|
||||
errorMessageUI := BrEditor new
|
||||
aptitude: BrGlamorousRegularEditorAptitude new ;
|
||||
text: (error at: errorKey at: 'message');
|
||||
text: (self errors at: errorKey at: 'message');
|
||||
vFitContent.
|
||||
^ { errorMessageUI. keepButton. overwriteButton. backupButton }
|
||||
^ BrHorizontalPane new
|
||||
matchParent;
|
||||
alignCenter;
|
||||
addChild:errorMessageUI;
|
||||
addChild: keepButton;
|
||||
addChild: overwriteButton;
|
||||
addChild: loadCopyButton
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
@ -124,6 +169,34 @@ LeDatabase >> gtViewErrorDetailsOn: aView [
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> gtViewErrorDetailsOn: aView withKey: erroKey [
|
||||
<gtView>
|
||||
^ aView explicit
|
||||
title: 'Errors beta' translated;
|
||||
priority: 5;
|
||||
stencil: [ | container |
|
||||
container := BlElement new
|
||||
layout: BlFlowLayout new;
|
||||
constraintsDo: [ :c |
|
||||
c vertical fitContent.
|
||||
c horizontal matchParent ];
|
||||
padding: (BlInsets all: 10).
|
||||
container
|
||||
addChildren: (self errorCardFor: erroKey)
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> importDocumentFrom: aURL [
|
||||
| doc |
|
||||
"Using file extension in URL as a cheap (non-robuts) way of detecting the kind of document.
|
||||
Better file type detection should be implemented in the future."
|
||||
(aURL endsWith: '.md.html') ifTrue: [ ^ self addPageFromMarkdeepUrl: aURL ].
|
||||
doc := HedgeDoc fromLink: aURL asString.
|
||||
^ self addPage: doc asLePage
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> importErrorForLocal: page withRemote: externalDocLocation [
|
||||
|
||||
@ -154,7 +227,8 @@ LeDatabase >> importErrorForLocal: page withRemote: externalDocLocation [
|
||||
at: 'remote' put: externalDocLocation;
|
||||
at: 'message' put: message ;
|
||||
yourself.
|
||||
self errors at: id put: error
|
||||
self errors at: id put: error.
|
||||
^ self errors at: id.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
@ -162,3 +236,77 @@ LeDatabase >> options [
|
||||
|
||||
^ options
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> previewSanitizedPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| remoteMetadata divSnippets divSnippetsSanitized |
|
||||
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
|
||||
collect: [ :xmlElement | xmlElement postCopy ].
|
||||
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
|
||||
remoteMetadata at: 'origin' put: externalDocLocation.
|
||||
divSnippetsSanitized := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
|
||||
^ { divSnippets . divSnippetsSanitized . remoteMetadata }
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> rebuildPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| newPage snippets divSnippets remoteMetadata dataSnippets |
|
||||
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
|
||||
collect: [ :xmlElement | xmlElement postCopy ].
|
||||
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
|
||||
remoteMetadata at: 'origin' put: externalDocLocation.
|
||||
dataSnippets := self
|
||||
sanitizeMarkdeepSnippets: divSnippets
|
||||
withMetadata: remoteMetadata.
|
||||
snippets := dataSnippets collect: [ :each | each asLepiterSnippet ].
|
||||
newPage := LePage new
|
||||
title: (remoteMetadata at: 'title');
|
||||
basicUid: (UUID fromString36: (remoteMetadata at: 'id'));
|
||||
createTime: (LeTime new time: (remoteMetadata at: 'created') asDateAndTime);
|
||||
editTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
|
||||
latestEditTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
|
||||
createEmail: (remoteMetadata at: 'creator');
|
||||
editEmail: (remoteMetadata at: 'modifier'). "^ { snippets . page }" "Rebulding partial subtrees"
|
||||
snippets
|
||||
do: [ :currentSnippet |
|
||||
| parentSnippet |
|
||||
parentSnippet := snippets
|
||||
detect: [ :item | item uid asString = currentSnippet parent ]
|
||||
ifNone: [ parentSnippet := 'unrooted' ].
|
||||
currentSnippet parent: parentSnippet.
|
||||
parentSnippet class = ByteString
|
||||
ifFalse: [ parentSnippet children addChild: currentSnippet ] ]. "Adding unrooted subtrees to the page"
|
||||
"^ { unrooted . newPage }."
|
||||
snippets
|
||||
select: [ :each | each parent = 'unrooted' ]
|
||||
thenDo: [ :unrooted | newPage addSnippet: unrooted ].
|
||||
^ newPage
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata [
|
||||
^ divSnippets collectWithIndex: [:markdeepDiv :i | | snippetData creationTime modificationTime timestampWarning |
|
||||
snippetData := markdeepDiv asSnippetDictionary.
|
||||
creationTime := snippetData at: 'created'.
|
||||
modificationTime := snippetData at: 'modified'.
|
||||
timestampWarning := [:timestamp |
|
||||
'Modified timestamps: ', timestamp ,' date and time was replaced instead of nil value. See "origin" metadata for more historical traceability information.'
|
||||
].
|
||||
(creationTime = 'nil' and: [ modificationTime ~= 'nil' ])
|
||||
ifTrue: [
|
||||
snippetData redefineTimestampsBefore: modificationTime.
|
||||
snippetData addErrata: (timestampWarning value: 'creation').
|
||||
snippetData at: 'origin' put: (remoteMetadata at: 'origin').
|
||||
].
|
||||
(creationTime = 'nil' and: [ modificationTime = 'nil' ])
|
||||
ifTrue: [ | timeDiff |
|
||||
timeDiff := divSnippets size - i. "Suggesting that last snippets were modified after the first ones."
|
||||
modificationTime := (remoteMetadata at: 'created') asDateAndTime - timeDiff seconds.
|
||||
snippetData redefineTimestampsBefore: modificationTime.
|
||||
snippetData addErrata: (timestampWarning value: 'creation').
|
||||
snippetData addErrata: (timestampWarning value: 'modification').
|
||||
snippetData at: 'origin' put: (remoteMetadata at: 'origin').
|
||||
].
|
||||
snippetData.
|
||||
]
|
||||
]
|
||||
|
26
src/MiniDocs/LeDockerSnippet.extension.st
Normal file
26
src/MiniDocs/LeDockerSnippet.extension.st
Normal file
@ -0,0 +1,26 @@
|
||||
Extension { #name : #LeDockerSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDockerSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDockerSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
32
src/MiniDocs/LeExampleSnippet.extension.st
Normal file
32
src/MiniDocs/LeExampleSnippet.extension.st
Normal file
@ -0,0 +1,32 @@
|
||||
Extension { #name : #LeExampleSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeExampleSnippet >> asMarkdeep [
|
||||
|
||||
^ (WriteStream on: '') contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeExampleSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeExampleSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
26
src/MiniDocs/LeGitHubSnippet.extension.st
Normal file
26
src/MiniDocs/LeGitHubSnippet.extension.st
Normal file
@ -0,0 +1,26 @@
|
||||
Extension { #name : #LeGitHubSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeGitHubSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeGitHubSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
8
src/MiniDocs/LeHeaderNode.extension.st
Normal file
8
src/MiniDocs/LeHeaderNode.extension.st
Normal file
@ -0,0 +1,8 @@
|
||||
Extension { #name : #LeHeaderNode }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHeaderNode >> headerFullName [
|
||||
^ self topParent completeSource
|
||||
copyFrom: self startPosition
|
||||
to: self stopPosition
|
||||
]
|
56
src/MiniDocs/LeHomeDatabaseHeaderElement.extension.st
Normal file
56
src/MiniDocs/LeHomeDatabaseHeaderElement.extension.st
Normal file
@ -0,0 +1,56 @@
|
||||
Extension { #name : #LeHomeDatabaseHeaderElement }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> importMinidocsButtonElement [
|
||||
^ self userData at: 'importMinidocsButtonElement' ifAbsentPut: [ self newImportMiniDocsButton]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> initialize [
|
||||
super initialize.
|
||||
self initializeEditableTitleElement.
|
||||
self initializeButtons.
|
||||
|
||||
self addChild: self toolbarElement as: #toolbar.
|
||||
self toolbarElement
|
||||
addItem: self editableTitleElement;
|
||||
addItem: self newAddNewPageButton;
|
||||
addItem: self removeButtonElement;
|
||||
addItem: self importButtonElement;
|
||||
addItem: self exportButtonElement;
|
||||
addItem: self importMinidocsButtonElement.
|
||||
|
||||
self addAptitude: (BrLayoutResizerAptitude new
|
||||
hInherit;
|
||||
vAnyToFitContent;
|
||||
hInherit: self toolbarElement;
|
||||
vAnyToFitContent: self toolbarElement).
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> initializeButtons [
|
||||
self initializeRemoveButton.
|
||||
self initializeImportButton.
|
||||
self initializeExportButton.
|
||||
self initializeMiniDocsImportButton.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> initializeMiniDocsImportButton [
|
||||
self userData at: 'importMinidocsButtonElement' put: self newImportMiniDocsButton.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> newImportMiniDocsButton [
|
||||
^ LeMiniDocsImport new
|
||||
tooltip: 'Import document from link';
|
||||
contentExtent: 200 @ 30
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> updateToolbarButtons [
|
||||
self updateRemoveButtonElement.
|
||||
self exportButtonElement database: self database.
|
||||
self importButtonElement database: self database.
|
||||
self importMinidocsButtonElement database: self database.
|
||||
]
|
26
src/MiniDocs/LeJenkinsSnippet.extension.st
Normal file
26
src/MiniDocs/LeJenkinsSnippet.extension.st
Normal file
@ -0,0 +1,26 @@
|
||||
Extension { #name : #LeJenkinsSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeJenkinsSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeJenkinsSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
89
src/MiniDocs/LeMiniDocsImport.class.st
Normal file
89
src/MiniDocs/LeMiniDocsImport.class.st
Normal file
@ -0,0 +1,89 @@
|
||||
Class {
|
||||
#name : #LeMiniDocsImport,
|
||||
#superclass : #BrButton,
|
||||
#instVars : [
|
||||
'contentExtent',
|
||||
'database'
|
||||
],
|
||||
#category : #'MiniDocs-UI'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> contentExtent [
|
||||
^ contentExtent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> contentExtent: aPoint [
|
||||
self
|
||||
assert: [ aPoint isNotNil ]
|
||||
description: [ 'Extent must be non-nil' ].
|
||||
contentExtent := aPoint
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> createDropdownExpandedHandleButton [
|
||||
^ BrButton new
|
||||
icon: BrGlamorousVectorIcons downwards;
|
||||
label: self tooltip;
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> createURLeditable [
|
||||
| base editable |
|
||||
base := BlElement new
|
||||
background: (Color white);
|
||||
size: 200 @ 30;
|
||||
margin: (BlInsets all: 10);
|
||||
yourself.
|
||||
editable := BrEditableLabel new
|
||||
aptitude: BrGlamorousEditableLabelAptitude new glamorousRegularFontAndSize;
|
||||
text: 'Document link';
|
||||
switchToEditor.
|
||||
editable when: BrEditorAcceptWish do: [ :aWish |
|
||||
self importDocumentFrom: aWish text asString.
|
||||
].
|
||||
base addChild: editable.
|
||||
^ base
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> database [
|
||||
^ database
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> database: aLeDatabase [
|
||||
database := aLeDatabase
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> importDocumentFrom: aURL [
|
||||
^ self database importDocumentFrom: aURL.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> initialize [
|
||||
super initialize.
|
||||
|
||||
self
|
||||
icon: BrGlamorousVectorIcons downwards;
|
||||
label: 'Add MiniDocs';
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude.
|
||||
self addAptitude: (BrGlamorousWithDropdownAptitude
|
||||
handle: [ self createDropdownExpandedHandleButton ]
|
||||
content: [ self createURLeditable ]).
|
||||
|
||||
self aptitude - BrGlamorousButtonExteriorAptitude.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> tooltip [
|
||||
^ self label
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> tooltip: aString [
|
||||
self label: aString
|
||||
]
|
26
src/MiniDocs/LeMockedSnippet.extension.st
Normal file
26
src/MiniDocs/LeMockedSnippet.extension.st
Normal file
@ -0,0 +1,26 @@
|
||||
Extension { #name : #LeMockedSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeMockedSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeMockedSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
6
src/MiniDocs/LeNullDatabase.extension.st
Normal file
6
src/MiniDocs/LeNullDatabase.extension.st
Normal file
@ -0,0 +1,6 @@
|
||||
Extension { #name : #LeNullDatabase }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeNullDatabase >> attachmentsDirectory [
|
||||
^ (FileLocator temp / 'lepiter' / 'attachments') ensureCreateDirectory.
|
||||
]
|
@ -3,7 +3,7 @@ Extension { #name : #LePage }
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asHtmlFile [
|
||||
|
||||
self asMarkdownFile.
|
||||
self asMarkdownFileWithMetadataWrappers.
|
||||
self defaultPandocTemplate exists
|
||||
ifFalse: [ MarkupFile installTemplate: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/plantillas/Pandoc/clean-menu-mod.html' into: self defaultPandocTemplate parent ].
|
||||
|
||||
@ -21,61 +21,76 @@ LePage >> asHtmlFile [
|
||||
LePage >> asMarkdeep [
|
||||
| bodyStream markdeep |
|
||||
bodyStream := '' writeStream.
|
||||
self preorderTraversal do: [:snippet |
|
||||
bodyStream nextPutAll: snippet asMarkdeep
|
||||
].
|
||||
bodyStream nextPutAll: self notebookMetadataSnippet asMarkdeep.
|
||||
self preorderTraversal
|
||||
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdeep ].
|
||||
markdeep := Markdeep new
|
||||
title: self title;
|
||||
body: bodyStream contents;
|
||||
navTop: self navTop.
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
k = 'lang'
|
||||
ifTrue: [
|
||||
markdeep head
|
||||
add: '<meta lang="', v,'">';
|
||||
yourself.
|
||||
]
|
||||
ifFalse: [
|
||||
markdeep head
|
||||
add: '<meta name="', k, '" content="', v,'">';
|
||||
yourself.
|
||||
]
|
||||
].
|
||||
self metadata at: 'authors' ifPresent: [:author | markdeep metadata at: 'authors' put: author ].
|
||||
self metadata at: 'version' ifPresent: [:version | markdeep metadata at: 'version' put: version ].
|
||||
^ markdeep.
|
||||
title: self title;
|
||||
body: bodyStream contents;
|
||||
metadata: self metadata;
|
||||
file: self storage / self markdeepFileName;
|
||||
navTop: self navTop.
|
||||
self metadata
|
||||
at: 'authors'
|
||||
ifPresent: [ :author | markdeep metadata at: 'authors' put: author ].
|
||||
self metadata
|
||||
at: 'version'
|
||||
ifPresent: [ :version | markdeep metadata at: 'version' put: version ].
|
||||
markdeep head: nil.
|
||||
^ markdeep
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdeepFile [
|
||||
| folder |
|
||||
folder := self options at: 'storage' ifAbsent: [ FileLocator temp ].
|
||||
^ self asMarkdeep exportAsFileOn: folder / self markdeepFileName
|
||||
|
||||
^ self asMarkdeep notifyExportAsFileOn: self storage / self markdeepFileName
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdown [
|
||||
"PENDING: to debug the output."
|
||||
| bodyStream markdown |
|
||||
bodyStream := '' writeStream.
|
||||
bodyStream
|
||||
nextPutAll: '---';
|
||||
nextPutAll: String lf.
|
||||
self metadata keysAndValuesDo: [ :k :v |
|
||||
bodyStream
|
||||
nextPutAll: k , ': "' , v, '"';
|
||||
nextPutAll: String lf ].
|
||||
bodyStream nextPutAll: '---' , String lf , String lf.
|
||||
nextPutAll: '# ', self title; cr; cr.
|
||||
self preorderTraversal
|
||||
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdown ].
|
||||
markdown := Markdown new contents: bodyStream contents.
|
||||
markdown := Markdown new
|
||||
contents: bodyStream contents promoteMarkdownHeaders;
|
||||
metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new).
|
||||
^ markdown
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdownFile [
|
||||
LePage >> asMarkdownFileWithMetadataWrappers [
|
||||
| folder |
|
||||
folder := self options at: 'storage' ifAbsent: [ FileLocator temp ].
|
||||
^ MarkupFile exportAsFileOn: folder / self markdownFileName containing: self asMarkdown contents
|
||||
folder := self storage.
|
||||
^ MarkupFile exportAsFileOn: folder / self markdownFileName containing: self asMarkdownWithMetadataWrappers contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdownWithMetadataWrappers [
|
||||
| bodyStream markdown |
|
||||
bodyStream := '' writeStream.
|
||||
"bodyStream
|
||||
nextPut: Character lf;
|
||||
nextPutAll: '# ', self title; cr; cr."
|
||||
self preorderTraversal
|
||||
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdownWithMetadataWrappers ].
|
||||
markdown := Markdown new
|
||||
contents: bodyStream contents promoteMarkdownHeaders;
|
||||
title: self title;
|
||||
metadata: self metadata.
|
||||
^ markdown
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> config [
|
||||
| configFile |
|
||||
configFile := self storage / 'config.ston'.
|
||||
configFile exists
|
||||
ifTrue: [^ STON fromString: configFile contents ]
|
||||
ifFalse: [ ^ nil ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
@ -86,18 +101,52 @@ LePage >> defaultPandocTemplate [
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> detectParentSnippetWithUid: uidString [
|
||||
"Answer a boolean indicating whether the supplied uid is present"
|
||||
uidString = self uid asString36 ifTrue: [ ^ self ].
|
||||
^ self preorderTraversal detect: [ :snippet | snippet uidString = uidString ]
|
||||
]
|
||||
|
||||
^ self preorderTraversal detect: [ :snippet | snippet uidString = uidString ] ifNone: [ ^ self ]
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> exportMetadataToHead: markdeep [
|
||||
self metadata
|
||||
keysAndValuesDo: [ :k :v |
|
||||
k = 'lang'
|
||||
ifTrue: [ markdeep head
|
||||
add: '<meta lang="' , v , '">';
|
||||
yourself ]
|
||||
ifFalse: [ markdeep head
|
||||
add: '<meta name="' , k , '" content="' , v , '">';
|
||||
yourself ] ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> exportedFileName [
|
||||
| sanitized |
|
||||
sanitized := self title asDashedLowercase copyWithoutAll: #($/).
|
||||
| sanitized titleWords shortTitle |
|
||||
titleWords := self title splitOn: Character space.
|
||||
(titleWords size > 11)
|
||||
ifTrue: [
|
||||
titleWords := titleWords copyFrom: 1 to: 3.
|
||||
shortTitle := titleWords joinUsing: Character space.
|
||||
]
|
||||
ifFalse: [shortTitle := self title].
|
||||
sanitized := shortTitle asDashedLowercase romanizeAccents copyWithoutAll: #($/ $: $🢒 $,).
|
||||
^ sanitized , '--' , (self uidString copyFrom: 1 to: 5)
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> fromDictionary: aDictionary [
|
||||
self
|
||||
title: (aDictionary at: 'title');
|
||||
basicUid: (UUID fromString36: (aDictionary at: 'id'));
|
||||
createTime: (LeTime new
|
||||
time: (aDictionary at: 'created') asDateAndTime);
|
||||
editTime: (LeTime new
|
||||
time: (aDictionary at: 'modified') asDateAndTime);
|
||||
latestEditTime: (LeTime new
|
||||
time: (aDictionary at: 'modified') asDateAndTime);
|
||||
createEmail: (aDictionary at: 'creator');
|
||||
editEmail: (aDictionary at: 'modifier').
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> fromMarkdeepUrl: aString [
|
||||
| docTree pageMetadata |
|
||||
@ -127,6 +176,18 @@ LePage >> latestEditTime: aLeTime [
|
||||
latestEditTime := aLeTime
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> localHostAddress [
|
||||
| localUrl route |
|
||||
MiniDocsServer teapot server isRunning ifFalse: [ MiniDocsServer restart ].
|
||||
route := self storage path segments joinUsing: '/'.
|
||||
MiniDocsServer teapot
|
||||
serveStatic: ('/', route, '/', self markdeepFileName)
|
||||
from: self storage / self markdeepFileName.
|
||||
localUrl := MiniDocsServer teapot server localUrl asString.
|
||||
^ localUrl, route, '/', self markdeepFileName
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> markdeepFileName [
|
||||
|
||||
@ -141,17 +202,17 @@ LePage >> markdownFileName [
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> metadata [
|
||||
|
||||
^ self options at: 'metadata' ifAbsentPut: [ self metadataInit]
|
||||
^ self metadataUpdate
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> metadataInit [
|
||||
LePage >> metadataUpdate [
|
||||
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'title' put: self contentAsString;
|
||||
at: 'created' put: self createTime greaseString;
|
||||
at: 'modified' put: self latestEditTime greaseString;
|
||||
at: 'modified' put: self getLatestEditTime greaseString;
|
||||
at: 'creator' put: self createEmail greaseString;
|
||||
at: 'modifier' put: self editEmail greaseString;
|
||||
yourself
|
||||
@ -160,12 +221,36 @@ LePage >> metadataInit [
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> navTop [
|
||||
| topNavFile |
|
||||
topNavFile := ((self optionAt: 'storage' ifAbsentPut: [ FileLocator temp ]) / '_navtop.html').
|
||||
topNavFile := self storage / '_navtop.html'.
|
||||
topNavFile exists
|
||||
ifFalse: [ ^ '' ]
|
||||
ifTrue: [ ^ topNavFile contents ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> notebookMetadataSnippet [
|
||||
| response |
|
||||
response := LeTextSnippet new fromString: '<!-- See this snippet source code for this notebook''s metadata -->'.
|
||||
response parent: self.
|
||||
self optionAt: 'HedgeDoc' ifAbsent: [ ^ response ].
|
||||
(response extra)
|
||||
at: 'HedgeDoc' put: (self optionAt: 'HedgeDoc').
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> olderChild [
|
||||
"I provide the last edited child node.
|
||||
I'm useful to recalculate the age of a notebook."
|
||||
| response|
|
||||
response := self preorderTraversal first.
|
||||
self preorderTraversal do: [:current |
|
||||
current editTime >= response editTime
|
||||
ifTrue: [ response := current ]
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> options [
|
||||
^ options
|
||||
@ -173,10 +258,7 @@ LePage >> options [
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> preorderTraversal [
|
||||
| output |
|
||||
output := OrderedCollection new.
|
||||
self withDeepCollect: [:each | each allChildrenBreadthFirstDo: [:child | output add: child]].
|
||||
^ output.
|
||||
^ self allChildrenDepthFirst
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
@ -186,6 +268,11 @@ LePage >> removeSnippetsMetadata [
|
||||
ifTrue: [ snippet options removeKey: 'metadata' ] ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> sanitizeMetadata [
|
||||
self allChildrenDepthFirst do: [:snippet | snippet sanitizeMetadata ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> sharedVariablesBindings [
|
||||
| codeSnippets shared |
|
||||
@ -211,8 +298,87 @@ LePage >> sharedVariablesBindings [
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> storage [
|
||||
|
||||
^ self optionAt: 'storage'
|
||||
ifAbsent: [ ^ FileLocator temp ]
|
||||
LePage >> splitAdmonitionSnippets [
|
||||
"I'm used to clean after importing from HedgeDoc to ensure that a snippet contains only admonitions and extra content is put in a new cell."
|
||||
| admonitionSnippets |
|
||||
admonitionSnippets := self children select: [:node | node string startsWithMarkdownAdmonition ].
|
||||
admonitionSnippets ifEmpty: [ ^ self ].
|
||||
admonitionSnippets do: [:node | | nodeContent |
|
||||
node ifNotNil: [
|
||||
nodeContent := node string.
|
||||
nodeContent startsWithMarkdownAdmonition
|
||||
ifTrue: [ | snippetCommand |
|
||||
snippetCommand := node splitSnippetCommandAtPosition: nodeContent admonitionEndingPosition.
|
||||
snippetCommand execute.
|
||||
node tagWith: (nodeContent lines first trimBoth withoutPrefix: ':::')
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> storage [
|
||||
| current |
|
||||
current := self database attachmentsDirectory parent.
|
||||
self optionAt: 'storage' ifAbsent: [ ^ current ].
|
||||
(self optionAt: 'storage') ifNil: [ ^ current ].
|
||||
^ self optionAt: 'storage'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiAddCopyButtonFor: anAction [
|
||||
<lePageAction>
|
||||
^ anAction button
|
||||
tooltip: 'Export Page';
|
||||
icon: BrGlamorousVectorIcons changes;
|
||||
action: [:aButton | aButton phlow spawnObject: (self page database addPageCopy: self page) ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiDefineFolderFor: anAction [
|
||||
<lePageAction>
|
||||
| folderButton |
|
||||
folderButton := anAction dropdown
|
||||
icon: BrGlamorousIcons savetodisk;
|
||||
tooltip: 'Export folder'"";
|
||||
content: [:aButton | BlElement new
|
||||
background: (Color gray alpha: 0.2);
|
||||
size: 100 @ 100;
|
||||
margin: (BlInsets all: 10) ].
|
||||
^ folderButton
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiExportButtonFor: anAction [
|
||||
<lePageAction>
|
||||
^ anAction button
|
||||
tooltip: 'Export Page';
|
||||
icon: BrGlamorousVectorIcons down;
|
||||
action: [:aButton | aButton phlow spawnObject: self page asMarkdeepFile ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiRefreshWebPreviewButtonFor: anAction [
|
||||
<lePageAction>
|
||||
^ anAction button
|
||||
tooltip: 'Refresh web view';
|
||||
icon: BrGlamorousVectorIcons refresh;
|
||||
action: [
|
||||
self page asMarkdeep exportAsFileOn: (self page storage / self page markdeepFileName).
|
||||
GoogleChrome openWindowOn: self page localHostAddress.
|
||||
"TODO: If Chrome/Chromium are not installed, I should execute:"
|
||||
"WebBrowser openOn: self page localHostAddress" ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> youngerChild [
|
||||
"I provide the first create child node.
|
||||
I'm useful to recalculate the age of a notebook."
|
||||
| response|
|
||||
response := self preorderTraversal first.
|
||||
self preorderTraversal do: [:current |
|
||||
current createTime <= response createTime
|
||||
ifTrue: [ response := current ]
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
@ -1,15 +0,0 @@
|
||||
Extension { #name : #LePageHeaderBuilder }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePageHeaderBuilder >> addExportPageButton [
|
||||
<leHeaderAction>
|
||||
| newButton |
|
||||
|
||||
newButton := BrButton new
|
||||
aptitude: BrGlamorousButtonWithIconAptitude;
|
||||
label: 'Export Page';
|
||||
icon: BrGlamorousVectorIcons down;
|
||||
action: [ :aButton |
|
||||
aButton phlow spawnObject: self page asMarkdeepFile ].
|
||||
self toolbarElement addItem: newButton.
|
||||
]
|
26
src/MiniDocs/LePharoRewriteSnippet.extension.st
Normal file
26
src/MiniDocs/LePharoRewriteSnippet.extension.st
Normal file
@ -0,0 +1,26 @@
|
||||
Extension { #name : #LePharoRewriteSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoRewriteSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoRewriteSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
@ -9,21 +9,26 @@ LePharoSnippet >> contentAsStringCustomized [
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> contentFrom: markdeepDiv [
|
||||
LePharoSnippet >> fromDictionary: anOrderedDictionary [
|
||||
self
|
||||
uid: (LeUID new uidString: (anOrderedDictionary at: 'id'));
|
||||
parent: (anOrderedDictionary at: 'parent');
|
||||
createTime: (LeTime new time: ((anOrderedDictionary at: 'created')asDateAndTime));
|
||||
editTime: (LeTime new time: ((anOrderedDictionary at: 'modified') asDateAndTime));
|
||||
editEmail: (anOrderedDictionary at: 'modifier');
|
||||
createEmail: (anOrderedDictionary at: 'creator').
|
||||
]
|
||||
|
||||
| sanitizedStringText metadata joinedText |
|
||||
metadata := STON fromString: (markdeepDiv attributes at: 'st-data').
|
||||
sanitizedStringText := markdeepDiv contentString lines.
|
||||
sanitizedStringText := sanitizedStringText copyFrom: 4 to: sanitizedStringText size -2.
|
||||
joinedText := '' writeStream.
|
||||
sanitizedStringText do: [ :line | joinedText nextPutAll: line; nextPut: Character lf ].
|
||||
self code: joinedText contents allButLast;
|
||||
uid: (LeUID new uidString: (metadata at: 'id'));
|
||||
parent: (metadata at: 'parent');
|
||||
createTime: (LeTime new time: ((metadata at: 'created')asDateAndTime));
|
||||
editTime: (LeTime new time: ((metadata at: 'modified') asDateAndTime));
|
||||
editEmail: (metadata at: 'modifier');
|
||||
createEmail: (metadata at: 'creator')
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> fromMarkdeep: markdeepDiv [
|
||||
|
||||
^ markdeepDiv asSnippetDictionary asLepiterSnippet
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> fromString: aString [
|
||||
|
||||
[ self coder forSource: aString ] onErrorDo: [ ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
|
@ -2,14 +2,137 @@ Extension { #name : #LePictureSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> asMarkdeep [
|
||||
| output |
|
||||
| output curatedCaption captionLines |
|
||||
captionLines := self caption lines.
|
||||
(captionLines size <= 1)
|
||||
ifTrue: [ curatedCaption := caption ]
|
||||
ifFalse: [
|
||||
curatedCaption := WriteStream on: ''.
|
||||
curatedCaption nextPutAll: captionLines first.
|
||||
captionLines allButFirstDo: [:line |
|
||||
curatedCaption nextPutAll: ' ', line.
|
||||
curatedCaption := curatedCaption contents.
|
||||
]
|
||||
].
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: self metadataDiv;
|
||||
nextPutAll: self centeredFigure;
|
||||
nextPutAll: '![ ', curatedCaption ,' ](', self urlString, ')';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: '</div>';
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf.
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> asMarkdownWithMetadataWrappers [
|
||||
^ self asMarkdeep
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> contentFrom: markdeepDiv [
|
||||
| caption width |
|
||||
caption := markdeepDiv contentString.
|
||||
width := (markdeepDiv // 'img' @ 'width') stringValue.
|
||||
self
|
||||
optionAt: 'caption' put: caption;
|
||||
optionAt: 'width' put: width.
|
||||
self urlString: (markdeepDiv // 'img' @ 'src') stringValue.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> fromDictionary: anOrderedDictionary [
|
||||
| sanitizedUrl|
|
||||
sanitizedUrl := (anOrderedDictionary at: 'url').
|
||||
sanitizedUrl := sanitizedUrl copyFrom: 5 to: sanitizedUrl size - 3.
|
||||
self
|
||||
uid: (LeUID new uidString: (anOrderedDictionary at: 'id'));
|
||||
parent: (anOrderedDictionary at: 'parent');
|
||||
createTime: (LeTime new time: ((anOrderedDictionary at: 'created')asDateAndTime));
|
||||
editTime: (LeTime new time: ((anOrderedDictionary at: 'modified') asDateAndTime));
|
||||
editEmail: (anOrderedDictionary at: 'modifier');
|
||||
createEmail: (anOrderedDictionary at: 'creator');
|
||||
urlString: sanitizedUrl;
|
||||
caption: (anOrderedDictionary at: 'content') first
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> fromMarkdeep: markdeepDiv [
|
||||
^ markdeepDiv asSnippetDictionary asLepiterSnippet
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> fromString: aStringArray [
|
||||
"aStringArray should contain as first element the sanitized string and
|
||||
as second the full original image Link string, which may contains links in the description."
|
||||
| args urlTemp |
|
||||
|
||||
args := aStringArray second splitOn: ']('.
|
||||
urlTemp := args last.
|
||||
urlTemp := urlTemp copyFrom: 1 to: urlTemp size - 1.
|
||||
self caption: aStringArray first.
|
||||
self urlString: urlTemp.
|
||||
^ self
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadata [
|
||||
^ self metadataInit
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadataDiv [
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: '<div st-class="' , self class greaseString , '"';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
|
||||
nextPut: Character lf.
|
||||
^ output contents withInternetLineEndings.
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadataInit [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [ surrogate := self parent uidString ].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'url' put: '<!--',self contentAsString, '-->';
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
|
||||
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [ surrogate := self parent uidString ].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
|
||||
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
26
src/MiniDocs/LeSmaCCRewriteSnippet.extension.st
Normal file
26
src/MiniDocs/LeSmaCCRewriteSnippet.extension.st
Normal file
@ -0,0 +1,26 @@
|
||||
Extension { #name : #LeSmaCCRewriteSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSmaCCRewriteSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSmaCCRewriteSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
@ -7,6 +7,33 @@ LeSnippet class >> fromMetaMarkdeep: div [
|
||||
metadata := STON fromString:(div xpath: '@st-data') stringValue.
|
||||
snippet := className asClass new.
|
||||
snippet injectMetadataFrom: metadata.
|
||||
snippet contentFrom: div.
|
||||
snippet fromMarkdeep: div.
|
||||
^ snippet.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSnippet >> metadata [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
self optionAt: 'metadata' ifAbsentPut: [ OrderedDictionary new ].
|
||||
^ (self optionAt: 'metadata')
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uid asString36;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSnippet >> moveToPageTitled: pageName [
|
||||
| db origin destination thisSnippet |
|
||||
thisSnippet := self.
|
||||
db := self page database.
|
||||
destination := db pageNamed: pageName.
|
||||
origin := db pageNamed: thisSnippet page title.
|
||||
origin removeSnippet: thisSnippet.
|
||||
destination addSnippet: thisSnippet.
|
||||
]
|
||||
|
31
src/MiniDocs/LeTextCoderSnippetElement.extension.st
Normal file
31
src/MiniDocs/LeTextCoderSnippetElement.extension.st
Normal file
@ -0,0 +1,31 @@
|
||||
Extension { #name : #LeTextCoderSnippetElement }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextCoderSnippetElement >> asLePage [
|
||||
| currentSnippet newPage |
|
||||
currentSnippet := self snippet.
|
||||
newPage := LePage new.
|
||||
newPage
|
||||
title: (currentSnippet text asString trimLeft: [:char | char = $# ]) trim.
|
||||
self page database
|
||||
addPage: newPage.
|
||||
currentSnippet allChildrenBreadthFirstDo: [:child |
|
||||
child moveToPageTitled: newPage title.
|
||||
].
|
||||
^ newPage
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextCoderSnippetElement >> asSnippetViewModel [
|
||||
^ self snippetContent
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextCoderSnippetElement >> moveToPageTitled: pageName [
|
||||
| db origin destination |
|
||||
db := self page database.
|
||||
destination := db pageNamed: pageName.
|
||||
origin := db pageNamed: self page title.
|
||||
origin removeSnippet: self.
|
||||
destination addSnippet: self .
|
||||
]
|
@ -1,39 +1,52 @@
|
||||
Extension { #name : #LeTextSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> contentFrom: markdeepDiv [
|
||||
LeTextSnippet >> asLePage [
|
||||
| page title currentSnippet |
|
||||
title := self contentAsString markdownHeaders associations first value.
|
||||
title := (title trimBoth: [:char | char = $# ]) trimmed.
|
||||
page := LePage new
|
||||
initializeTitle: title.
|
||||
currentSnippet := LeTextSnippet new
|
||||
string: self contentAsString.
|
||||
page addSnippet: currentSnippet.
|
||||
self database addPage: page.
|
||||
self childrenDo: [:child |
|
||||
child moveToPageTitled: page title
|
||||
].
|
||||
self removeSelfCommand.
|
||||
^ page.
|
||||
]
|
||||
|
||||
| sanitizedStringText metadata |
|
||||
metadata := STON fromString: (markdeepDiv attributes at: 'st-data').
|
||||
sanitizedStringText := markdeepDiv contentString.
|
||||
sanitizedStringText := sanitizedStringText allButFirst.
|
||||
sanitizedStringText := sanitizedStringText allButLast.
|
||||
self string: sanitizedStringText;
|
||||
uid: (LeUID new uidString: (metadata at: 'id'));
|
||||
parent: (metadata at: 'parent');
|
||||
createTime: (LeTime new time: ((metadata at: 'created')asDateAndTime));
|
||||
editTime: (LeTime new time: ((metadata at: 'modified') asDateAndTime));
|
||||
editEmail: (metadata at: 'modifier');
|
||||
createEmail: (metadata at: 'creator')
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> fromDictionary: anOrderedDictionary [
|
||||
self
|
||||
uid: (LeUID new uidString: (anOrderedDictionary at: 'id'));
|
||||
parent: (anOrderedDictionary at: 'parent');
|
||||
createTime: (LeTime new time: ((anOrderedDictionary at: 'created')asDateAndTime));
|
||||
editTime: (LeTime new time: ((anOrderedDictionary at: 'modified') asDateAndTime));
|
||||
editEmail: (anOrderedDictionary at: 'modifier');
|
||||
createEmail: (anOrderedDictionary at: 'creator')
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> fromMarkdeep: markdeepDiv [
|
||||
|
||||
^ markdeepDiv asSnippetDictionary asLepiterSnippet
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> fromString: aString [
|
||||
|
||||
self
|
||||
string: aString;
|
||||
uid: LeUID new.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> metadata [
|
||||
|
||||
^ self optionAt: 'metadata' ifAbsentPut: [ self metadataInit ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> metadataInit [
|
||||
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parentId;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString;
|
||||
at: 'modifier' put: self editEmail asString;
|
||||
yourself
|
||||
|
||||
^ self metadataUpdate
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
@ -44,11 +57,23 @@ LeTextSnippet >> options [
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> parentId [
|
||||
self parent ifNil: [ ^ self ].
|
||||
(self parent isString) ifTrue: [^ self parent].
|
||||
^ self parent uidString.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> taggedWith: aString [
|
||||
self metadata at: 'tags' ifPresent: [ (self metadata at: 'tags') add: aString; yourself ] ifAbsentPut: [ Set new ].
|
||||
^ self metadata at: 'tags'
|
||||
LeTextSnippet >> tagWith: aString [
|
||||
self tags add: aString.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> withFollowingSnippets [
|
||||
"I'm the same method implemented for PharoSnippets,
|
||||
but present also here as a way to improve moving prose snippets from pages.
|
||||
"
|
||||
| snippets stop start |
|
||||
snippets := self parent children asArray.
|
||||
start := snippets indexOf: self.
|
||||
stop := snippets size.
|
||||
^ snippets copyFrom: start to: stop
|
||||
]
|
||||
|
@ -2,45 +2,62 @@ Extension { #name : #LeTextualSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> asMarkdeep [
|
||||
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
|
||||
'st-' properties as a way to extend divs metadata regarding its contents."
|
||||
|
||||
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: '<div st-class="' , self class greaseString , '"';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' st-data="' , (STON toString: self metadata) , '">';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: self metadataDiv;
|
||||
nextPutAll: self markdeepCustomOpener;
|
||||
nextPutAll: self contentAsString;
|
||||
nextPutAll: self contentAsStringAnnotated;
|
||||
nextPut: Character lf;
|
||||
nextPutAll: self markdeepCustomCloser;
|
||||
nextPutAll: '</div>';
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf.
|
||||
^ output contents
|
||||
^ output contents withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> asMarkdown [
|
||||
|
||||
| output |
|
||||
output := '' writeStream.
|
||||
output
|
||||
nextPutAll: self contentAsStringCustomized; lf.
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> asMarkdownWithMetadataWrappers [
|
||||
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
|
||||
'st-' properties as a way to extend divs metadata regarding its contents."
|
||||
| output |
|
||||
output := '' writeStream.
|
||||
output
|
||||
nextPutAll: '<div st-class="', self class asString, '"'; lf;
|
||||
nextPutAll: ' st-data="', (STON toString: self metadata), '">'; lf;
|
||||
nextPutAll: self metadataDiv;
|
||||
nextPutAll: self markdownCustomOpener;
|
||||
nextPutAll: self contentAsStringCustomized; lf;
|
||||
nextPutAll: self markdownCustomCloser;
|
||||
nextPutAll: '</div>'; lf; lf.
|
||||
^ output contents
|
||||
^ output contents withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> contentAsStringAnnotated [
|
||||
self ast ifNotNil: [ ^ self processSnippetAnnotations ].
|
||||
^ self contentAsString
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> contentAsStringCustomized [
|
||||
^ self contentAsString
|
||||
(self contentAsString beginsWith: '#')
|
||||
ifTrue: [ ^ '#', self contentAsString ]
|
||||
ifFalse: [ ^ self contentAsString ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> extra [
|
||||
^ self optionAt: 'extra' ifAbsentPut: [ Dictionary new ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
@ -66,26 +83,84 @@ LeTextualSnippet >> markdownCustomOpener [
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> metadata [
|
||||
|
||||
^ self optionAt: 'metadata' ifAbsentPut: [ self metadataInit ]
|
||||
^ self metadataUpdate
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> metadataInit [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [ surrogate := self parent uidString ].
|
||||
LeTextualSnippet >> metadataDiv [
|
||||
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
|
||||
'st-' properties as a way to extend divs metadata regarding its contents."
|
||||
"PENDING: this is repeated in several snippets. Can be abstracted up in a common object of the class hierarchy?"
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: '<div st-class="' , self class greaseString , '"';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
|
||||
nextPut: Character lf.
|
||||
^ output contents withInternetLineEndings.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'parent' put: (self parent ifNotNil: [self parent uidString ]);
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString;
|
||||
at: 'modifier' put: self editEmail asString;
|
||||
yourself
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
at: 'extra' put: self extra;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> processSnippetAnnotations [
|
||||
| exported substitutions annotations pageConfig |
|
||||
annotations := self ast parts
|
||||
select: [ :each | each className includesSubstring: 'AnnotationNode' ].
|
||||
annotations ifEmpty: [ ^ self contentAsString ].
|
||||
substitutions := OrderedDictionary new.
|
||||
pageConfig := self page config.
|
||||
annotations
|
||||
do: [ :each |
|
||||
| key type value color |
|
||||
key := each source.
|
||||
type := (key splitOn: ':') first copyWithoutAll: '{{'.
|
||||
value := key copyFrom: type size + 4 to: key size - 2.
|
||||
pageConfig
|
||||
ifNil: [ color := 'default' ]
|
||||
ifNotNil: [ | colors |
|
||||
colors := pageConfig at: 'annotationColors' ifAbsent: [ nil ].
|
||||
colors
|
||||
ifNotNil: [ color := colors
|
||||
at: type
|
||||
ifAbsent: [ colors at: 'defaultColor' ifAbsentPut: [ 'default' ] ] ] ].
|
||||
substitutions
|
||||
at: key
|
||||
put: '<span st-class="' , type , '" style="color:' , color , '">' , value , '</span>' ].
|
||||
exported := self contentAsString.
|
||||
substitutions
|
||||
keysAndValuesDo: [ :k :v | exported := exported copyReplaceAll: k with: v ].
|
||||
^ exported
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> sanitizeMetadata [
|
||||
self options ifNil: [^ self ].
|
||||
self options removeKey: 'metadata' ifAbsent: [^ self ].
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v asString includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v asString copyWithoutXMLDelimiters)
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> tags [
|
||||
^ self metadata at: 'tags' ifAbsentPut: [ Set new ]
|
||||
^ self extra at: 'tags' ifAbsentPut: [ Set new ]
|
||||
]
|
||||
|
21
src/MiniDocs/LeUnknownSnippet.extension.st
Normal file
21
src/MiniDocs/LeUnknownSnippet.extension.st
Normal file
@ -0,0 +1,21 @@
|
||||
Extension { #name : #LeUnknownSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeUnknownSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [
|
||||
self parent isString
|
||||
ifTrue: [ surrogate := self parent]
|
||||
ifFalse: [ surrogate := self parent uidString ]
|
||||
].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString;
|
||||
at: 'modifier' put: self editEmail asString;
|
||||
yourself
|
||||
]
|
26
src/MiniDocs/LeWardleyMapSnippet.extension.st
Normal file
26
src/MiniDocs/LeWardleyMapSnippet.extension.st
Normal file
@ -0,0 +1,26 @@
|
||||
Extension { #name : #LeWardleyMapSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWardleyMapSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWardleyMapSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
26
src/MiniDocs/LeWordSnippet.extension.st
Normal file
26
src/MiniDocs/LeWordSnippet.extension.st
Normal file
@ -0,0 +1,26 @@
|
||||
Extension { #name : #LeWordSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWordSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWordSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
57
src/MiniDocs/LeYoutubeReferenceSnippet.extension.st
Normal file
57
src/MiniDocs/LeYoutubeReferenceSnippet.extension.st
Normal file
@ -0,0 +1,57 @@
|
||||
Extension { #name : #LeYoutubeReferenceSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> asMarkdeep [
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: self metadataDiv;
|
||||
nextPutAll: '![ ', self title, ' | ', self authorName, ' ](',self urlString, ')';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: '</div>';
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf.
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> metadata [
|
||||
^ self optionAt: 'metadata' ifAbsentPut: [ self metadataUpdate ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> metadataDiv [
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: '<div st-class="' , self class greaseString , '"';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">'.
|
||||
^ output contents withInternetLineEndings.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [ surrogate := self parent uidString ].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString;
|
||||
at: 'modifier' put: self editEmail asString;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
33
src/MiniDocs/Logseq.class.st
Normal file
33
src/MiniDocs/Logseq.class.st
Normal file
@ -0,0 +1,33 @@
|
||||
Class {
|
||||
#name : #Logseq,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'folder'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> assets [
|
||||
^ self folder / 'assets'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> folder [
|
||||
^ folder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> folder: aFolder [
|
||||
folder := aFolder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> journals [
|
||||
self folder / 'journals'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> pages [
|
||||
^self folder/ 'pages'
|
||||
]
|
@ -3,20 +3,17 @@ I model a Mardeep file as described in https://casual-effects.com/markdeep/
|
||||
"
|
||||
Class {
|
||||
#name : #Markdeep,
|
||||
#superclass : #Object,
|
||||
#superclass : #Markdown,
|
||||
#instVars : [
|
||||
'title',
|
||||
'body',
|
||||
'comments',
|
||||
'tail',
|
||||
'language',
|
||||
'config',
|
||||
'metadata',
|
||||
'head',
|
||||
'navTop',
|
||||
'options'
|
||||
],
|
||||
#category : #MiniDocs
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
@ -24,21 +21,38 @@ Markdeep class >> fromMarkdownFile: aFileReference [
|
||||
^ self new fromMarkdownFile: aFileReference.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep class >> fromPubPubTOC: orderedDictionary folder: folder index: ordinalPossitive [
|
||||
| contentSection testFile |
|
||||
contentSection := orderedDictionary associations at: ordinalPossitive.
|
||||
testFile := folder / (contentSection key,'--', contentSection value),'md'.
|
||||
^ self new fromMarkdownFile: testFile.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> asMarkdownWithMetadataWrappers [
|
||||
^ Markdown new
|
||||
metadata: self metadata;
|
||||
body: self body;
|
||||
file: self markdownFile
|
||||
]
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdeep >> authors [
|
||||
self metadata at: 'authors' ifPresent: [:k | ^ '**', k, '**' ].
|
||||
^ ''.
|
||||
self metadata at: 'authors' ifAbsentPut: [ Dictionary new ].
|
||||
"self metadata at: 'authors' ifNotEmpty: [:k | ^ '**', k, '**' ]
|
||||
" ^ ''.
|
||||
]
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdeep >> authorsString [
|
||||
self authors
|
||||
ifNil: [ ^ '' ] ifNotNil: [ ^ ' ', self authors ]
|
||||
ifEmpty: [ ^ '' ] ifNotEmpty: [ ^ ' ', self authors ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> body [
|
||||
^ body
|
||||
^ body ifNil: [^ '' ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
@ -46,6 +60,31 @@ Markdeep >> body: anObject [
|
||||
body := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> bodyReplaceAll: original with: replacement [
|
||||
self body: (self body copyReplaceAll: original with: replacement)
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> cleanMetadata [
|
||||
metadata := nil
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> commentPubPubDelimiters [
|
||||
| commented openners |
|
||||
openners := #('::: {.pub-body-component}' '::: pub-body-component' '::: {.editor .Prosemirror}' '::: {.pub-notes}').
|
||||
commented := self body.
|
||||
openners do: [:openner |
|
||||
commented := commented copyReplaceAll: openner with: '<!--@div-open ', openner, '-->'
|
||||
].
|
||||
commented := commented
|
||||
copyReplaceAll: ':::
|
||||
' with: '<!--@div-close ::: -->
|
||||
'.
|
||||
self body: commented
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> comments [
|
||||
^ comments ifNil: [ ^ comments := true ]
|
||||
@ -87,7 +126,9 @@ Markdeep >> commentsSupport [
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> config [
|
||||
|
||||
| configFile |
|
||||
configFile := self folder / 'config.ston'.
|
||||
configFile exists ifTrue: [ ^ config := STON fromString: configFile contents ].
|
||||
^ config ifNil: [ config := Dictionary new]
|
||||
]
|
||||
|
||||
@ -104,32 +145,92 @@ Markdeep >> contents [
|
||||
output := '' writeStream.
|
||||
output
|
||||
nextPutAll: self headContents; lf; lf;
|
||||
nextPutAll: ' **', self title, '**'; lf;
|
||||
nextPutAll: self authorsString ; lf;
|
||||
nextPutAll: ' ', self version; lf;
|
||||
nextPutAll: ' **', self title trimmed accentedCharactersCorrection, '**'; lf;
|
||||
nextPutAll: self authorsString ; lf;
|
||||
nextPutAll: '', self version; lf;
|
||||
nextPutAll: self navTop; lf; lf;
|
||||
nextPutAll: self body; lf; lf;
|
||||
nextPutAll: self body; lf; lf;
|
||||
nextPutAll: self tail; lf; lf; lf; lf;
|
||||
nextPutAll: self commentsSupport.
|
||||
^ output contents.
|
||||
]
|
||||
|
||||
{ #category : #persistence }
|
||||
Markdeep >> exportAsFile [
|
||||
| newFile |
|
||||
self markdownFile ifNil: [ self inform: 'Define an input Markdown file or use #exportAsFileOn: instead.' ].
|
||||
newFile := (self markdownFile fullName, '.html') asFileReference.
|
||||
self exportAsFileOn: newFile.
|
||||
{ #category : #accessing }
|
||||
Markdeep >> converPubPubFootnoteBetween: footnote and: nextFootnote in: footnotesArray [
|
||||
| currentNoteIndex nextNoteIndex response noteLines |
|
||||
currentNoteIndex := footnotesArray indexOf: '[^',footnote, ']: '.
|
||||
nextNoteIndex := footnotesArray indexOf: '[^',nextFootnote, ']: '.
|
||||
noteLines := footnotesArray copyFrom: currentNoteIndex to: nextNoteIndex - 1.
|
||||
response := '' writeStream.
|
||||
noteLines do: [:line |
|
||||
line
|
||||
ifNotEmpty: [ response nextPutAll: line, String lf ]
|
||||
"ifEmpty: [ response nextPutAll: ' ' ]?"
|
||||
].
|
||||
response nextPutAll: String lf.
|
||||
^ response contents
|
||||
]
|
||||
|
||||
{ #category : #persistence }
|
||||
Markdeep >> exportAsFileOn: aFileReference [
|
||||
aFileReference ensureDelete.
|
||||
aFileReference exists ifFalse: [ aFileReference ensureCreateFile ].
|
||||
aFileReference writeStreamDo: [ :stream |
|
||||
stream nextPutAll: self contents ].
|
||||
self inform: 'Exported as: ', String cr, aFileReference fullName.
|
||||
^ aFileReference
|
||||
{ #category : #accessing }
|
||||
Markdeep >> extractTitleFrom: docTree [
|
||||
| tempTitle |
|
||||
tempTitle := ((docTree children
|
||||
detect: [ :node | node className = 'PPCMIndentedCode' ]) children
|
||||
detect: [ :subnode | subnode text trimmed beginsWith: '**' ]) text trimmed.
|
||||
self title: (tempTitle copyFrom: 3 to: tempTitle size - 2).
|
||||
^ tempTitle
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> extractYamlMetadataFrom: documentTree [
|
||||
| yamlComment response |
|
||||
yamlComment := documentTree children
|
||||
detect: [:node | node className = 'PPCMHtmlBlock' and: [node text trimmed beginsWith: '<!--@yaml']]
|
||||
ifNone: [ ^ nil ].
|
||||
response := '' writeStream.
|
||||
yamlComment children allButFirst allButLast do: [:each |
|
||||
response nextPutAll: each text; cr
|
||||
].
|
||||
^ {YAML2JSON fromString: response contents . yamlComment }
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> file: aFileReference [
|
||||
file := aFileReference.
|
||||
self fillInContentsFrom: aFileReference
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> fillInContentsFrom: aFileReference [
|
||||
| docTree docTreeChildren headTree bodyStartLine bodyEndLine contentLines rawMetadata |
|
||||
aFileReference exists ifFalse: [ ^ self ].
|
||||
docTree := (Markdown new contents: aFileReference contents) documentTree.
|
||||
docTreeChildren := docTree children.
|
||||
headTree := docTreeChildren
|
||||
detect: [ :node |
|
||||
node className = 'PPCMParagraph'
|
||||
and: [ (node children detect: [ :subnode | subnode text = '<head>' ]) isNotNil ] ]
|
||||
ifNone: [ ^self ].
|
||||
headTree children allButFirst allButLast
|
||||
do: [ :node | node className = 'PPCMHtml' ifTrue: [ self head add: node text ] ].
|
||||
self head: self head asSet asOrderedCollection.
|
||||
rawMetadata := (self extractYamlMetadataFrom: docTree).
|
||||
rawMetadata ifNotNil: [self metadata: rawMetadata first].
|
||||
self title ifNil: [
|
||||
self title: (self metadata at: 'title' ifAbsent: [self extractTitleFrom: docTree]).
|
||||
self title: (self title trimBoth: [ :char | char = $" ]).
|
||||
self metadata at: 'title' put: self title].
|
||||
contentLines := self file contents lines.
|
||||
bodyStartLine := (contentLines
|
||||
detectIndex: [ :line | line includesSubstring: '<!--@yaml' ] ifNone: [ ^ self ]) + rawMetadata second children size.
|
||||
bodyEndLine := contentLines detectIndex: [:line | line includesSubstring: '<!-- Markdeep'] ifNone: [ 0 ].
|
||||
self body: (contentLines copyFrom: bodyStartLine to: bodyEndLine - 1 ) asStringWithCr.
|
||||
^ self .
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> folder [
|
||||
^ self file parent
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
@ -143,9 +244,21 @@ Markdeep >> fontAwesomeHeader [
|
||||
Markdeep >> fromMarkdownFile: aFileReference [
|
||||
"I create a Markdeep document from a given Markdown file."
|
||||
self processMarkdownFor: aFileReference.
|
||||
self file: aFileReference, 'html'.
|
||||
^ self.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> fromPubPubToMarkdeep [
|
||||
self
|
||||
removeAutoGeneratedFileNotice;
|
||||
removeCCByLicenseDiv;
|
||||
commentPubPubDelimiters;
|
||||
replaceEscapedCharacters;
|
||||
renamePubPubFootnotes;
|
||||
removeAlternativeImagesArray
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> gtTextFor: aView [
|
||||
<gtView>
|
||||
@ -156,8 +269,11 @@ Markdeep >> gtTextFor: aView [
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> head [
|
||||
^ head ifNil: [ head := OrderedCollection new.
|
||||
head add: self fontAwesomeHeader; yourself ]
|
||||
|
||||
^ head ifNil: [
|
||||
head := OrderedCollection new.
|
||||
head add: self fontAwesomeHeader; yourself.
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
@ -178,6 +294,11 @@ Markdeep >> headContents [
|
||||
nextPutAll: line;
|
||||
nextPut: Character lf
|
||||
].
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
k = 'lang'
|
||||
ifTrue: [ stream nextPutAll: ' <meta lang="', v,'">'; cr. ]
|
||||
ifFalse: [ stream nextPutAll: ' <meta name="', k, '" content="', v,'">'; cr. ]
|
||||
].
|
||||
stream
|
||||
nextPutAll: '</head>';
|
||||
nextPut: Character lf.
|
||||
@ -205,18 +326,28 @@ Markdeep >> markdeepScriptTag [
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> markdownFile [
|
||||
^ Markdown new fromFile: (self config at: 'markdownFile')
|
||||
self file ifNil: [
|
||||
self file: FileLocator temp / ('untitled--', NanoID generate, '.md.html') ].
|
||||
^ (self file fullName withoutSuffix: '.html') asFileReference.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> markdownFile: aFileReference [
|
||||
"Where the Mardown file associated with me is stored. Used for sync. and import/export purposes."
|
||||
self config at: 'markdownFile' put: aFileReference
|
||||
self file: aFileReference, 'html'
|
||||
]
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdeep >> metadata [
|
||||
^ metadata ifNil: [ metadata := OrderedDictionary new ]
|
||||
metadata ifNil: [^ metadata := OrderedDictionary new ].
|
||||
(metadata isNil and: [ self file contents isNil ])
|
||||
ifTrue: [ metadata := OrderedDictionary new ].
|
||||
^ metadata
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> metadata: anOrderedDictionary [
|
||||
metadata := anOrderedDictionary
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
@ -231,7 +362,7 @@ Markdeep >> metadataFromXML: aXMLDocument [
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdeep >> navTop [
|
||||
^ navTop
|
||||
^ navTop ifNil: [ navTop := '' ]
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
@ -252,23 +383,250 @@ Markdeep >> options [
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #printing }
|
||||
Markdeep >> printOn: aStream [
|
||||
|
||||
super printOn: aStream.
|
||||
aStream
|
||||
nextPutAll: '( ', self title, ' )'
|
||||
]
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdeep >> processMarkdownFor: aFileReference [
|
||||
"comment stating purpose of message"
|
||||
| markdownContent |
|
||||
self markdownFile: aFileReference.
|
||||
self file: aFileReference, 'html'.
|
||||
markdownContent := Markdown fromFile: aFileReference.
|
||||
self metadata: markdownContent metadataAsYAML.
|
||||
self body: (markdownContent commentYAMLMetadata contents).
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> pubPubFootnoteMetadataFromString: string [
|
||||
| sanitized footnoteData altLine altString id |
|
||||
(string lines size <= 1) ifTrue: [ ^ nil ].
|
||||
sanitized := '' writeStream.
|
||||
altString := string copyReplaceAll: '.footnote' with: ''.
|
||||
altString := altString copyReplaceAll: ' node-type='
|
||||
with: '
|
||||
node-type= '.
|
||||
altString := altString copyReplaceAll: ' data-value=' with: '
|
||||
data-value='.
|
||||
altString := altString copyReplaceAll: ' date-structured-value=' with: '
|
||||
date-structured-value= '.
|
||||
altString lines allButFirstDo: [:line |
|
||||
(line beginsWith: '>')
|
||||
ifTrue: [ altLine := line allButFirst ]
|
||||
ifFalse: [ altLine := line ].
|
||||
sanitized
|
||||
nextPutAll: altLine trimBoth;
|
||||
nextPutAll: String lf
|
||||
].
|
||||
sanitized := sanitized contents.
|
||||
sanitized := sanitized copyReplaceAll: 'type=' with: 'type: '.
|
||||
sanitized := sanitized copyReplaceAll: 'value=' with: 'value: '.
|
||||
id := (altString lines first) allButFirst trimmed.
|
||||
footnoteData := { 'id' -> id } asDictionary.
|
||||
footnoteData addAll: (MiniDocs yamlToJson: sanitized trimmed).
|
||||
^ footnoteData
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> pubPubFootnoteRawLinks [
|
||||
^ self selectPubPubLinksWithSize: 2
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> pubPubFootnotesLinesRangeFor: contentSection [
|
||||
| beginningLine endingLine |
|
||||
beginningLine := contentSection lines size + 1.
|
||||
contentSection lines doWithIndex: [:line :i |
|
||||
((line includesSubstring: '::: {.pub-notes}') or: [line includesSubstring: '::: pub-notes'])
|
||||
ifTrue: [ beginningLine := i ].
|
||||
(i > beginningLine and: [ line beginsWith: ':::' ])
|
||||
ifTrue: [
|
||||
endingLine := i.
|
||||
^ {beginningLine . endingLine}
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> pubPubFootnotesLinesRangeForBody [
|
||||
^ self pubPubFootnotesLinesRangeFor: self body
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> pubPubFootnotesLinesRangeForContents [
|
||||
^ self pubPubFootnotesLinesRangeFor: self contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> pubPubFootnotesText [
|
||||
| footnotesLines output |
|
||||
footnotesLines := self contents lines
|
||||
copyFrom: self pubPubFootnotesLinesRangeForContents first + 3
|
||||
to: self pubPubFootnotesLinesRangeForContents second - 1.
|
||||
output := '' writeStream.
|
||||
footnotesLines do: [:line |
|
||||
output
|
||||
nextPutAll: line;
|
||||
nextPutAll: String crlf.
|
||||
].
|
||||
^ output contents allButLast
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> pubPubImageLinks [
|
||||
^ self selectPubPubLinksWithSize: 3
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> pubPubImagesToMarkdeep [
|
||||
| sanitized parsedLinks |
|
||||
|
||||
parsedLinks := self pubPubImageLinks.
|
||||
parsedLinks ifEmpty: [ ^self ].
|
||||
sanitized := self body.
|
||||
parsedLinks do: [:link |
|
||||
sanitized := sanitized copyReplaceAll: '{', link third, '}' with: ''
|
||||
].
|
||||
self body: sanitized
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> pubPubRawLinks [
|
||||
| parser |
|
||||
parser := PubPubGrammar2 new document.
|
||||
^ (parser parse: self body)
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> reformatPubPubFootnotes [
|
||||
| footnotesLines footnotesIDs toReplace response |
|
||||
(self = self pubPubFootnotesLinesRangeForContents)
|
||||
ifTrue: [^self].
|
||||
footnotesLines := self contents lines
|
||||
copyFrom: self pubPubFootnotesLinesRangeForContents first
|
||||
to: self pubPubFootnotesLinesRangeForContents second.
|
||||
footnotesIDs := self replacePubPubFootnotesIdentifiers.
|
||||
toReplace := footnotesLines select: [:line |
|
||||
(line includesSubstring: ' [[]{.pub-note-content-component}]{#fn-')
|
||||
].
|
||||
toReplace doWithIndex: [:replacement :i | | index |
|
||||
index := footnotesLines indexOf: replacement.
|
||||
footnotesLines at: index put: '[^', (footnotesIDs at: i),']: '
|
||||
].
|
||||
response := '' writeStream.
|
||||
footnotesIDs allButLast doWithIndex: [:footnote :i |
|
||||
response
|
||||
nextPutAll:
|
||||
(self
|
||||
converPubPubFootnoteBetween: footnote
|
||||
and: (footnotesIDs at: i + 1)
|
||||
in: footnotesLines)
|
||||
].
|
||||
^ response contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> removeAlternativeImagesArray [
|
||||
| replacements |
|
||||
self body ifNil: [^ self].
|
||||
replacements := self selectPubPubLinksWithSize: 3.
|
||||
replacements ifEmpty: [^self].
|
||||
replacements do: [:replacement |
|
||||
self body:
|
||||
(self body copyReplaceAll: replacement third with: '' )
|
||||
].
|
||||
self body: (self body copyReplaceAll: '{srcset=}' with: '').
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> removeAutoGeneratedFileNotice [
|
||||
| autoGeneratedNotice |
|
||||
autoGeneratedNotice := '**Notice:** This file is an auto-generated download and, as such, might
|
||||
include minor display or rendering errors. For the version of record,
|
||||
please visit the HTML version or download the PDF.
|
||||
|
||||
------------------------------------------------------------------------'.
|
||||
self body: (self body copyReplaceAll: autoGeneratedNotice with: '')
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> removeCCByLicenseDiv [
|
||||
| licenseDiv|
|
||||
licenseDiv := '
|
||||
<div>
|
||||
|
||||
**License:** [Creative Commons Attribution 4.0 International License
|
||||
(CC-BY 4.0)](https://creativecommons.org/licenses/by/4.0/)
|
||||
|
||||
</div>'.
|
||||
self body: (self body copyReplaceAll: licenseDiv with: '')
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> renamePubPubFootnotes [
|
||||
| reformated bodyLines beforeFootnotes afterFootnotesRaw afterFootnotes newBodyLines response |
|
||||
reformated := self reformatPubPubFootnotes.
|
||||
(self pubPubFootnotesLinesRangeForBody class = Markdeep) ifTrue: [ ^self ].
|
||||
bodyLines := self body lines.
|
||||
beforeFootnotes := bodyLines copyFrom: 1 to: self pubPubFootnotesLinesRangeForBody first .
|
||||
afterFootnotesRaw := bodyLines copyFrom: self pubPubFootnotesLinesRangeForBody second to: bodyLines size.
|
||||
afterFootnotes := OrderedCollection new.
|
||||
afterFootnotesRaw do:[:line |
|
||||
(line beginsWith: ':::')
|
||||
ifTrue: [
|
||||
afterFootnotes
|
||||
add: (line copyReplaceAll: ':::' with: '<!--@div-closer ::: -->').
|
||||
]
|
||||
].
|
||||
newBodyLines :=
|
||||
(beforeFootnotes copyWithAll:
|
||||
(#('# Footnotes' '')
|
||||
copyWithAll:(reformated lines
|
||||
copyWithAll: afterFootnotes))).
|
||||
response := '' writeStream.
|
||||
newBodyLines do: [:line |
|
||||
response nextPutAll: line, String lf
|
||||
].
|
||||
self body: response contents.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> replaceBackslashBreaklines [
|
||||
self bodyReplaceAll: '\
|
||||
' with: '<br>
|
||||
'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> replaceEscapedCharacters [
|
||||
self
|
||||
title: (self title copyReplaceAll: '\#' with: '#');
|
||||
body: (self body copyReplaceAll: '\#' with: '#');
|
||||
body: (self body copyReplaceAll: '\[' with: '[');
|
||||
body: (self body copyReplaceAll: '\]' with: ']');
|
||||
body: (self body copyReplaceAll: '\*' with: '*')
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> replacePubPubFootnotesIdentifiers [
|
||||
| footnotes sanitized parsedLinks linkIdentifiers |
|
||||
footnotes := OrderedDictionary new.
|
||||
parsedLinks := self pubPubFootnoteRawLinks.
|
||||
parsedLinks ifEmpty: [ ^self ].
|
||||
sanitized := self body.
|
||||
linkIdentifiers := OrderedCollection new.
|
||||
parsedLinks do: [:link | | id currentLinkText |
|
||||
id := (link second splitOn: '.footnote') first trimmed.
|
||||
linkIdentifiers add: id.
|
||||
currentLinkText := '[', link first, ']{#',link second,'}'.
|
||||
sanitized := sanitized copyReplaceAll: currentLinkText with: '[^', id, ']'
|
||||
].
|
||||
self body: sanitized.
|
||||
^ linkIdentifiers
|
||||
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> selectPubPubLinksWithSize: naturalNumber [
|
||||
^ self pubPubRawLinks select: [ :each | each size = naturalNumber ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdeep >> tail [
|
||||
"I enable the document tail, which, in turn, enables a Markdeep document"
|
||||
@ -291,7 +649,7 @@ Markdeep >> tail: anObject [
|
||||
{ #category : #accessing }
|
||||
Markdeep >> title [
|
||||
|
||||
^ title
|
||||
^ title
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
|
@ -6,12 +6,13 @@ particularly the ones provided by Pandoc and/or Lunamark.
|
||||
"
|
||||
Class {
|
||||
#name : #Markdown,
|
||||
#superclass : #Object,
|
||||
#superclass : #MarkupFile,
|
||||
#instVars : [
|
||||
'contents',
|
||||
'file'
|
||||
'metadata',
|
||||
'body',
|
||||
'title'
|
||||
],
|
||||
#category : #MiniDocs
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
@ -19,25 +20,43 @@ Markdown class >> fromFile: aFileReference [
|
||||
^ self new fromFile: aFileReference
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
Markdown class >> yamlMetadataDelimiter [
|
||||
^ '---'
|
||||
{ #category : #accessing }
|
||||
Markdown >> asMarkdeep [
|
||||
^ Markdeep new
|
||||
body: self body;
|
||||
commentYAMLMetadata
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> asMarkdownTiddler [
|
||||
^ Tiddler new
|
||||
title: self title;
|
||||
text: self contents;
|
||||
type: 'text/x-markdown';
|
||||
created: Tiddler nowLocal.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> body [
|
||||
^ body
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> body: aString [
|
||||
body := aString
|
||||
]
|
||||
|
||||
{ #category : #operation }
|
||||
Markdown >> commentYAMLMetadata [
|
||||
| newContents |
|
||||
self detectYAMLMetadata ifFalse: [ ^ self ].
|
||||
self contents detectYAMLMetadata ifFalse: [ ^ self ].
|
||||
newContents := '' writeStream.
|
||||
newContents nextPutAll: '<!--@yaml:'; crlf.
|
||||
newContents nextPutAll: self extractYAMLMetadata.
|
||||
newContents nextPutAll: String cr.
|
||||
newContents nextPutAll: '-->'; crlf.
|
||||
newContents nextPutAll: '<!--@yaml'; lf.
|
||||
newContents nextPutAll: self yamlMetadataString.
|
||||
newContents nextPutAll: '-->'; lf; lf.
|
||||
(self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line |
|
||||
newContents nextPutAll: line; crlf ].
|
||||
self contents: newContents contents.
|
||||
^ self contents
|
||||
newContents nextPutAll: line; lf ].
|
||||
^ newContents contents.
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
@ -47,22 +66,49 @@ Markdown >> containsYAMLMetadataClosing [
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> contents [
|
||||
^ contents
|
||||
| response metadataString |
|
||||
response := WriteStream on: ''.
|
||||
metadataString := self metadataAsYAML
|
||||
ifEmpty: [ '' ]
|
||||
ifNotEmpty: [ '---', String cr, self metadataAsYAML, String cr, '---', String cr ].
|
||||
response
|
||||
nextPutAll: metadataString;
|
||||
nextPutAll: (self body ifNil: [ '' ]).
|
||||
^ response contents withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> contents: anObject [
|
||||
contents := anObject
|
||||
Markdown >> contents: aString [
|
||||
body := aString
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
Markdown >> detectYAMLMetadata [
|
||||
| lines |
|
||||
lines := self lines.
|
||||
^ self startsWithYAMLMetadataDelimiter
|
||||
and: [ lines allButFirst
|
||||
detect: [ :currentLine | currentLine beginsWith: self class yamlMetadataDelimiter ]
|
||||
ifFound: [ ^ true ] ifNone: [ ^ false ] ]
|
||||
{ #category : #accessing }
|
||||
Markdown >> documentTree [
|
||||
| parser|
|
||||
self contents ifNil: [^ nil].
|
||||
parser := PPCommonMarkBlockParser new parse: self body.
|
||||
^ parser accept: CMBlockVisitor new
|
||||
]
|
||||
|
||||
{ #category : #persistence }
|
||||
Markdown >> exportAsFile [
|
||||
| newFile |
|
||||
|
||||
newFile := (self file fullName ) asFileReference.
|
||||
^ self notifyExportAsFileOn: newFile.
|
||||
]
|
||||
|
||||
{ #category : #persistence }
|
||||
Markdown >> exportAsFileOn: aFileReference [
|
||||
aFileReference ensureDelete.
|
||||
aFileReference exists ifFalse: [ aFileReference ensureCreateFile ].
|
||||
aFileReference writeStreamDo: [ :stream |
|
||||
stream nextPutAll: self contents withInternetLineEndings ].
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> exportAsHTML [
|
||||
^ Pandoc markdownToHtml: self file
|
||||
]
|
||||
|
||||
{ #category : #operation }
|
||||
@ -90,26 +136,13 @@ Markdown >> exportMetadataAsJson [
|
||||
Markdown >> exportMetadataAsYaml [
|
||||
| exportedFile |
|
||||
exportedFile := FileLocator temp / 'metadata.yaml'.
|
||||
MarkupFile exportAsFileOn: exportedFile containing: self yamlMetadataAsString.
|
||||
MarkupFile exportAsFileOn: exportedFile containing: self yamlMetadataStringWithDelimiters.
|
||||
^ exportedFile
|
||||
]
|
||||
|
||||
{ #category : #operation }
|
||||
Markdown >> extractYAMLMetadata [
|
||||
| output yamlLines |
|
||||
self detectYAMLMetadata ifFalse: [ ^ nil ].
|
||||
yamlLines := self lines copyFrom: 2 to: self yamlMetadataClosingLineNumber - 1.
|
||||
output := '' writeStream.
|
||||
yamlLines do: [ :line |
|
||||
output
|
||||
nextPutAll: line;
|
||||
nextPut: Character cr. ].
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> file [
|
||||
^ file
|
||||
^ file ifNil: [ file := FileLocator temp / 'temporalMarkdeep.md' ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
@ -120,8 +153,24 @@ Markdown >> file: aFileReference [
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdown >> fromFile: aFileReference [
|
||||
self contents: aFileReference contents.
|
||||
self file: aFileReference
|
||||
self fromString: aFileReference contents.
|
||||
self file: aFileReference.
|
||||
]
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdown >> fromString: markdownString [
|
||||
| yamlMetadataRaw bodyTemp |
|
||||
yamlMetadataRaw := (YamlHeaderParser parse: markdownString).
|
||||
bodyTemp := '' writeStream.
|
||||
(yamlMetadataRaw removeKey: 'body') do: [:paragraph |
|
||||
bodyTemp nextPutAll: paragraph; cr; cr
|
||||
].
|
||||
self body: bodyTemp contents withInternetLineEndings.
|
||||
(yamlMetadataRaw sanitizeMultilineValuesWith: markdownString)
|
||||
ifNotNil: [
|
||||
self metadata
|
||||
ifEmpty: [ self metadata: yamlMetadataRaw ]
|
||||
ifNotEmpty: [ self metadata at: 'hedgeDoc' put: yamlMetadataRaw ]].
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
@ -132,50 +181,67 @@ Markdown >> gtTextFor: aView [
|
||||
text: [ self contents ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> headerAsTitle [
|
||||
| headerNode |
|
||||
headerNode := self documentTree children
|
||||
detect: [ :node | node className = 'PPCMHeader' and: [ node level = 1 ] ] ifNone: [ ^ 'Untitled' ].
|
||||
^ headerNode text
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
Markdown >> lines [
|
||||
self file ifNotNil: [^ self file contents lines ].
|
||||
^ self contents lines.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> metadata [
|
||||
| rawMeta |
|
||||
rawMeta := PPYAMLGrammar new parse: self extractYAMLMetadata.
|
||||
rawMeta associationsDo: [ :assoc |
|
||||
assoc value = 'false' ifTrue: [ assoc value: false ].
|
||||
assoc value = 'true' ifTrue: [ assoc value: true ] ].
|
||||
^ rawMeta
|
||||
|
||||
^ metadata ifNil: [ metadata := Dictionary new].
|
||||
|
||||
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> metadata: rawMeta [
|
||||
|
||||
metadata := rawMeta
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> metadataAsYAML [
|
||||
self metadata isEmptyOrNil ifTrue: [ ^ '' ].
|
||||
^ (YQ jsonToYaml: self metadata) accentedCharactersCorrection
|
||||
]
|
||||
|
||||
{ #category : #persistence }
|
||||
Markdown >> notifyExportAsFileOn: aFileReference [
|
||||
self exportAsFileOn: aFileReference.
|
||||
self inform: 'Exported as: ', String cr, aFileReference fullName.
|
||||
^ aFileReference
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> options [
|
||||
^ self metadata at: 'options' ifAbsentPut: [ self defaultOptions]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> printOn: aStream [
|
||||
| response |
|
||||
super printOn: aStream.
|
||||
response := self title ifNil: [ 'Untitled' ].
|
||||
aStream
|
||||
nextPutAll: '( ', (self metadata at: 'title'), ' )'
|
||||
nextPutAll: '( ', response , ' )'
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
Markdown >> startsWithYAMLMetadataDelimiter [
|
||||
^ self lines first beginsWith: self class yamlMetadataDelimiter
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> title [
|
||||
^ title ifNil: [ title:= self headerAsTitle ]
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
Markdown >> yamlMetadataAsString [
|
||||
| output |
|
||||
self extractYAMLMetadata ifNil: [ ^ nil ].
|
||||
output := String new writeStream.
|
||||
output nextPutAll: self class yamlMetadataDelimiter; cr.
|
||||
output nextPutAll: self extractYAMLMetadata.
|
||||
output nextPutAll: self class yamlMetadataDelimiter; cr.
|
||||
^ output contents.
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
Markdown >> yamlMetadataClosingLineNumber [
|
||||
"I return the line where the closing of the YAML metadata occurs or 0 if no closing is found."
|
||||
self startsWithYAMLMetadataDelimiter ifFalse: [ ^ self ].
|
||||
self lines allButFirst doWithIndex: [ :currentLine :i |
|
||||
(currentLine beginsWith: self class yamlMetadataDelimiter) ifTrue: [ ^ i + 1 ]]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> title: aString [
|
||||
title := aString
|
||||
]
|
||||
|
@ -7,17 +7,20 @@ Class {
|
||||
#instVars : [
|
||||
'file'
|
||||
],
|
||||
#category : #MiniDocs
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #persistence }
|
||||
MarkupFile class >> exportAsFileOn: aFileReferenceOrFileName containing: text [
|
||||
| file |
|
||||
MarkupFile class >> exportAsFileOn: aFileReferenceOrFileName containing: anObject [
|
||||
| file preprocessed |
|
||||
file := aFileReferenceOrFileName asFileReference.
|
||||
file ensureDelete.
|
||||
file exists ifFalse: [ file ensureCreateFile ].
|
||||
(#('String' 'ByteString' 'WideString') includes: anObject className )
|
||||
ifTrue: [ preprocessed := anObject ]
|
||||
ifFalse: [preprocessed := STON toStringPretty: anObject ].
|
||||
file writeStreamDo: [ :stream |
|
||||
stream nextPutAll: text withUnixLineEndings].
|
||||
stream nextPutAll: preprocessed ].
|
||||
self inform: 'Exported as: ', String cr, file fullName.
|
||||
^ file
|
||||
]
|
||||
|
@ -1,23 +1,69 @@
|
||||
"
|
||||
MiniDocs is a project that includes several minimalistic documentation tools used by the [Grafoscopio](https://mutabit.com/grafoscopio/en.html) community, starting with [Markdeep](https://casual-effects.com/markdeep/) and its integrations with [Lepiter](https://lepiter.io/feenk/introducing-lepiter--knowledge-management--e2p6apqsz5npq7m4xte0kkywn/) .
|
||||
"
|
||||
Class {
|
||||
#name : #MiniDocs,
|
||||
#superclass : #Object,
|
||||
#category : #MiniDocs
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> altKeys [
|
||||
^ BlAlternativeCombination new
|
||||
combination: (BlSingleKeyCombination key:BlKeyboardKey altLeft)
|
||||
or: (BlSingleKeyCombination key:BlKeyboardKey altRight)
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> altShiftLeftCombo [
|
||||
^ BlCompulsoryCombination new
|
||||
with: self altKeys;
|
||||
with: self shiftKeys;
|
||||
with: (BlSingleKeyCombination key: BlKeyboardKey arrowLeft);
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> altShiftRightCombo [
|
||||
^ BlCompulsoryCombination new
|
||||
with: self altKeys;
|
||||
with: self shiftKeys;
|
||||
with: (BlSingleKeyCombination key: BlKeyboardKey arrowRight);
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> appFolder [
|
||||
| tempFolder |
|
||||
tempFolder := FileLocator userData / 'Mutabit' / 'MiniDocs'.
|
||||
tempFolder := ExoRepo userDataFolder / 'Mutabit' / 'MiniDocs'.
|
||||
tempFolder exists ifFalse: [ tempFolder ensureCreateDirectory ].
|
||||
^ tempFolder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> exportAsSton: anObject on: aFileReference [
|
||||
MarkupFile exportAsFileOn: aFileReference containing: (STON toStringPretty: anObject) withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> importGrafoscopioFile: aFileReference [
|
||||
|
||||
^ (STON fromString: aFileReference) first parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> initialize [
|
||||
self keyboardShortcutsRemapping
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> installYamlToJson [
|
||||
"For the moment, only Gnu/Linux and Mac are supported.
|
||||
"For the moment, only Gnu/Linux and Mac are supported.
|
||||
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
|
||||
self yamlToJsonBinary exists ifTrue: [ ^ MiniDocs appFolder ].
|
||||
Nimble install: 'commandeer'.
|
||||
Nimble
|
||||
install: 'yaml';
|
||||
install: 'commandeer'.
|
||||
OSSUnixSubprocess new
|
||||
command: 'nim';
|
||||
arguments: {'c'. self yamlToJsonSourceCode fullName};
|
||||
@ -27,8 +73,31 @@ MiniDocs class >> installYamlToJson [
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> yamlToJson: yamlString [
|
||||
MiniDocs class >> keyboardShortcutsRemapping [
|
||||
| primaryNewLine secondaryNewLine |
|
||||
primaryNewLine := LeSnippetElement keyboardShortcuts at: #NewLine.
|
||||
secondaryNewLine := LeSnippetElement keyboardShortcuts at: #SecondaryNewLine.
|
||||
^ LeSnippetElement keyboardShortcuts
|
||||
at: #NewLine put: secondaryNewLine;
|
||||
at: #SecondaryNewLine put: primaryNewLine;
|
||||
at: #IndentSnippet put: self altShiftRightCombo;
|
||||
at: #UnindentSnippet put: self altShiftLeftCombo;
|
||||
yourself
|
||||
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> shiftKeys [
|
||||
^ BlAlternativeCombination new
|
||||
combination: (BlSingleKeyCombination key:BlKeyboardKey shiftLeft)
|
||||
or: (BlSingleKeyCombination key:BlKeyboardKey shiftRight)
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> yamlToJson: yamlString [
|
||||
"This method uses a external binary written in Nim, as the native Pharo parser for YAML, written in PetitParser,
|
||||
was less robust and unable to parse correctly the same strings as the external one."
|
||||
yamlString ifNil: [ ^ Dictionary new ].
|
||||
self yamlToJsonBinary exists ifFalse: [ self installYamlToJson ].
|
||||
|
||||
OSSUnixSubprocess new
|
||||
@ -36,7 +105,7 @@ MiniDocs class >> yamlToJson: yamlString [
|
||||
arguments: {yamlString};
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :process :outString |
|
||||
^ (STONJSON fromString: outString allButFirst) first
|
||||
^ (STONJSON fromString: outString allButFirst accentedCharactersCorrection) first
|
||||
]
|
||||
]
|
||||
|
||||
@ -49,3 +118,25 @@ MiniDocs class >> yamlToJsonBinary [
|
||||
MiniDocs class >> yamlToJsonSourceCode [
|
||||
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/yamlToJson.nim'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs >> installNimFileExporter [
|
||||
| folder |
|
||||
folder := (MiniDocs appFolder / 'scripts') ensureCreateDirectory.
|
||||
|
||||
ZnClient new
|
||||
url: 'https://mutabit.com/repos.fossil/mutabit/uv/wiki/scripts/stringAsFileInto';
|
||||
downloadTo: folder / 'stringAsFileInto'.
|
||||
|
||||
ZnClient new
|
||||
url: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/wiki/scripts/stringAsFileInto.nim';
|
||||
downloadTo: folder / 'stringAsFileInto.nim'.
|
||||
|
||||
OSSUnixSubprocess new
|
||||
command: 'chmod';
|
||||
arguments: { '+x' . (folder / 'stringAsFileInto') fullName };
|
||||
workingDirectory: folder fullName;
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
|
||||
]
|
||||
|
74
src/MiniDocs/MiniDocsServer.class.st
Normal file
74
src/MiniDocs/MiniDocsServer.class.st
Normal file
@ -0,0 +1,74 @@
|
||||
Class {
|
||||
#name : #MiniDocsServer,
|
||||
#superclass : #TLWebserver,
|
||||
#instVars : [
|
||||
'storage'
|
||||
],
|
||||
#classInstVars : [
|
||||
'singleton'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> build [
|
||||
TLRESTAPIBuilder buildAPI.
|
||||
self start
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> defaultConfiguration [
|
||||
"Override to set more default values"
|
||||
^ {
|
||||
#port -> 1701
|
||||
}
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> listLepiterDocs: aRequest [
|
||||
<REST_API: 'GET' pattern: 'lepiter'>
|
||||
^ 'A list of Mardeep exported Lepiter docs will appear soon...'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> restart [
|
||||
Teapot stopAll.
|
||||
self build.
|
||||
^ self start
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> singleton [
|
||||
^ singleton ifNil: [ singleton := MiniDocsServer teapot ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> addStorage: anObject [
|
||||
self storage add: anObject.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> initRoutes [
|
||||
self storage: FileLocator documents / 'lepiter' / 'default'.
|
||||
self teapot
|
||||
serveStatic: '/lepiter/doc' from: self storage fullName.
|
||||
self teapot
|
||||
GET: '/lepiter' -> 'A list of Mardeep exported Lepiter docs will appear soon...'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> start [
|
||||
self class defaultPort: 1701.
|
||||
self initRoutes.
|
||||
super start.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> storage [
|
||||
^ storage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> storage: aFoldersOrderedCollection [
|
||||
storage := aFoldersOrderedCollection
|
||||
]
|
@ -1,53 +0,0 @@
|
||||
"
|
||||
I'm run an implementation of the [Nano ID](https://github.com/ai/nanoid) tiny, secure URL-friendly unique string ID generator via its [Nim implementation](https://github.com/icyphox/nanoid.nim).
|
||||
|
||||
The Nim script has hard coded:
|
||||
|
||||
* a [base 58 encoding](https://medium.com/concerning-pharo/understanding-base58-encoding-23e673e37ff6) alphabet to avoid similar looking letter and the use of non-alphanumeric characters.
|
||||
* a 12 characters length output, which gives [a pretty low probability collision](https://zelark.github.io/nano-id-cc/) for the previous alphabet:
|
||||
~616 years needed, in order to have a 1% probability of at least one collision at a speed of 1000 IDs per hour.
|
||||
This is more than enough for our unique IDs applications, mostly in the documentation context,
|
||||
which consists of hand crafted and/or programmatically produced notes ,
|
||||
for example in data narratives, book(lets) and TiddlyWiki tiddlers of tens or hundreds of notes at most,
|
||||
unevenly produced between hours, days and/or weeks..
|
||||
|
||||
"
|
||||
Class {
|
||||
#name : #NanoID,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-MiniDocs'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> binaryFile [
|
||||
^ MiniDocs appFolder / self scriptSourceCode basenameWithoutExtension
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> generate [
|
||||
self binaryFile exists ifFalse: [ NanoID install].
|
||||
OSSUnixSubprocess new
|
||||
command: self binaryFile fullName;
|
||||
redirectStdout;
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :process :outString | ^ outString copyWithoutAll: (Character lf asString) ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> install [
|
||||
"For the moment, only Gnu/Linux and Mac are supported.
|
||||
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
|
||||
self binaryFile exists ifTrue: [ ^ MiniDocs appFolder ].
|
||||
Nimble install: 'nanoid'.
|
||||
OSSUnixSubprocess new
|
||||
command: 'nim';
|
||||
arguments: {'c'. self scriptSourceCode fullName};
|
||||
runAndWaitOnExitDo: [ :process :outString |
|
||||
(self scriptSourceCode parent / (self scriptSourceCode) basenameWithoutExtension) moveTo: MiniDocs appFolder asFileReference.
|
||||
^ MiniDocs appFolder ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> scriptSourceCode [
|
||||
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/nanoIdGen.nim'
|
||||
]
|
@ -1,66 +0,0 @@
|
||||
"
|
||||
I'm a helper class modelling the common uses of the Nim's [Nimble package manager](https://github.com/nim-lang/nimble).
|
||||
This was evolved in the context of the [Grafoscopio](mutabit.com/grafoscopio/en.html) community exploration and prototyping of interactive documentation.
|
||||
"
|
||||
Class {
|
||||
#name : #Nimble,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-MiniDocs'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
Nimble class >> detect: packageName [
|
||||
^ self installed
|
||||
detect: [ :dependency | dependency beginsWith: packageName ]
|
||||
ifFound: [ ^ true ]
|
||||
ifNone: [ ^ false ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Nimble class >> install: packageName [
|
||||
(self detect: packageName) ifTrue: [ ^ self ].
|
||||
self installPackagesList.
|
||||
OSSUnixSubprocess new
|
||||
command: 'nimble';
|
||||
arguments: {'install'.
|
||||
packageName};
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Nimble class >> installPackagesList [
|
||||
|
||||
(FileLocator home / '.nimble' / 'packages_official.json') exists
|
||||
ifTrue: [ ^ self ].
|
||||
OSSUnixSubprocess new
|
||||
command: 'nimble';
|
||||
arguments: #('refresh');
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Nimble class >> installed [
|
||||
| installed |
|
||||
OSSUnixSubprocess new
|
||||
command: 'nimble';
|
||||
arguments: #('list' '--installed');
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString :errString |
|
||||
process isSuccess
|
||||
ifTrue: [ ^ outString lines ];
|
||||
ifFalse: [ ^ nil ]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Nimble class >> version [
|
||||
|
||||
OSSUnixSubprocess new
|
||||
command: 'nimble';
|
||||
arguments: #('--version');
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
|
||||
]
|
100
src/MiniDocs/OrderedDictionary.extension.st
Normal file
100
src/MiniDocs/OrderedDictionary.extension.st
Normal file
@ -0,0 +1,100 @@
|
||||
Extension { #name : #OrderedDictionary }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> addErrata: noteString [
|
||||
self errata add: noteString
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> asLepiterSnippet [
|
||||
| response |
|
||||
self at: 'className' ifAbsent: [ ^ nil ].
|
||||
response := (self at: 'className') asClass new.
|
||||
[ response fromDictionary: self ] onErrorDo: [ ].
|
||||
[ response fromString: (self at: 'content') ] onErrorDo: [ ].
|
||||
self at: 'origin' ifPresent: [ response metadata at: 'origin' put: (self at: 'origin') ].
|
||||
self at: 'errata' ifPresent: [ response metadata at: 'errata' put: (self at: 'errata') ].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> asYAML [
|
||||
^ (YQ jsonToYaml: self) accentedCharactersCorrection.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> errata [
|
||||
^ self at: 'errata' ifAbsentPut: [ OrderedCollection new]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> redefineTimestampsBefore: dateAndTime [
|
||||
self at: 'modified' put: dateAndTime asDateAndTime.
|
||||
self at: 'created' put: dateAndTime asDateAndTime - 1 second.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> replaceNilsWith: aCharacter [
|
||||
self associationsDo: [:each |
|
||||
each value ifNil: [self at: each key put: aCharacter].
|
||||
each value isDictionary ifTrue: [each value replaceNilsWith: aCharacter].
|
||||
each value isArray ifTrue: [ | newArray|
|
||||
newArray := (each value asDataSeries replaceNilsWith: aCharacter) asArray.
|
||||
self at: each key put: newArray
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> replaceWithUniqueNilsAndBooleansStartingAt: anInteger [
|
||||
| totalNils shortUID |
|
||||
totalNils := self flattened asDataSeries countNils.
|
||||
shortUID := [NanoID generate copyFrom: 1 to: 3].
|
||||
self associations doWithIndex: [:assoc :i | | subIndex |
|
||||
subIndex := anInteger asString, '-', i asString.
|
||||
assoc value
|
||||
ifNil: [ self at: assoc key put: 'nil-', subIndex ].
|
||||
assoc value isBoolean
|
||||
ifTrue: [ self at: assoc key put: assoc value asString, '-', subIndex ].
|
||||
assoc value isDictionary ifTrue: [assoc replaceWithUniqueNilsAndBooleansStartingAt: i].
|
||||
assoc value isArray
|
||||
ifTrue: [ self at: assoc key put: (assoc value replaceWithUniqueNilsAndBooleans)]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> sanitizeMultilineValuesWith: aString [
|
||||
| toSanitize response |
|
||||
toSanitize := OrderedCollection new.
|
||||
response := OrderedCollection new.
|
||||
self keysAndValuesDo: [:k :v |
|
||||
(v isString and: [v lines size > 1])
|
||||
ifTrue: [
|
||||
aString lines
|
||||
detect: [:line | line includesSubstring: k ]
|
||||
ifFound: [:line | | sanitized|
|
||||
sanitized := (line withoutPrefix: k, ':'), String cr,
|
||||
v indentedWithExtraSpaces: 4.
|
||||
self at: k put: sanitized ]
|
||||
]
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> treeView [
|
||||
| view |
|
||||
view := GtMondrian new.
|
||||
view nodes
|
||||
stencil: [ :x |
|
||||
BlElement new
|
||||
border: (BlBorder paint: Color black);
|
||||
geometry: BlEllipseGeometry new;
|
||||
layout: (BlLinearLayout new alignCenter);
|
||||
addChild: (BlTextElement text: (x asRopedText fontSize: 10)) ];
|
||||
with: (self flatCollectAsSet: #yourself) , self keys.
|
||||
view edges
|
||||
stencil: [ :x | BlLineElement new border: (BlBorder paint: (Color blue alpha: 0.5) width: 4) ];
|
||||
connect: self associations from: #key toAll: #value.
|
||||
view layout tree.
|
||||
^ view
|
||||
]
|
162
src/MiniDocs/Pandoc.class.st
Normal file
162
src/MiniDocs/Pandoc.class.st
Normal file
@ -0,0 +1,162 @@
|
||||
"
|
||||
I model the interaction between Pandoc and Grafoscopio.
|
||||
"
|
||||
Class {
|
||||
#name : #Pandoc,
|
||||
#superclass : #Object,
|
||||
#classInstVars : [
|
||||
'executable'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
|
||||
OSSUnixSubprocess new
|
||||
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :command :outString |
|
||||
^ outString
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
Pandoc class >> downloadLuaFilters [
|
||||
self luaFilters do: [ :filter | | filterUrl |
|
||||
filterUrl := filter asUrl.
|
||||
(FileLocator temp asFileReference / (filterUrl segments last)) exists
|
||||
ifFalse: [
|
||||
ZnClient new
|
||||
url: filterUrl;
|
||||
downloadTo: FileLocator temp ] ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> executable [
|
||||
^ executable ifNil: [ self executableLocation ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> executable: aFileReference [
|
||||
executable := aFileReference
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> executableLocation [
|
||||
| location |
|
||||
location := '/usr/bin/pandoc'.
|
||||
location asFileReference exists
|
||||
ifTrue: [ ^ location ]
|
||||
ifFalse: [ self definePandocExecutable ]
|
||||
]
|
||||
|
||||
{ #category : #utility }
|
||||
Pandoc class >> extractImagesInUnixFor: aFileReference withFilter: aLuaFilter [
|
||||
"I use Pandoc Lua scripting capabilities to extract al images links in aFileReference"
|
||||
|
||||
OSSUnixSubprocess new
|
||||
command: 'pandoc';
|
||||
arguments: {aFileReference fullName . '--lua-filter=',aLuaFilter fullName };
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString :errString |
|
||||
process isSuccess
|
||||
ifTrue: [
|
||||
^ ((Soup fromString: outString) findAllTags: 'td') collect: [ :each | each next ] ]
|
||||
ifFalse: [
|
||||
"OSSUnixProcessExitStatus has a nice #printOn: "
|
||||
Transcript show: 'Command exit with error status: ', process exitStatusInterpreter printString; cr.
|
||||
Transcript show: 'Stderr contents: ', errString.
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> htmlStringToMarkdown: aString [
|
||||
|
||||
OSSUnixSubprocess new
|
||||
shellCommand: 'echo "', aString , '" | pandoc -f markdown -t html';
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :command :outString |
|
||||
^ outString
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> htmlToMarkdown: inputFile [
|
||||
|
||||
| outputFile |
|
||||
outputFile := FileLocator temp / 'body.md'.
|
||||
outputFile ensureDelete.
|
||||
outputFile ensureCreateFile.
|
||||
OSSUnixSubprocess new
|
||||
command: 'pandoc';
|
||||
arguments: {'-f'. 'html'. '-t'. 'markdown'. '--atx-headers'. inputFile fullName.
|
||||
'--output'. outputFile fullName };
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString :errString |
|
||||
process isSuccess
|
||||
ifTrue: [ ^ outputFile contents ]
|
||||
ifFalse: [ ^inputFile contents ]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
Pandoc class >> listImagesFrom: aFileReference [
|
||||
"I provide a list of all images contained in aFile."
|
||||
|
||||
| filter commandString outputString |
|
||||
filter := FileLocator temp asFileReference / 'image-links.lua'.
|
||||
filter exists
|
||||
ifFalse: [ self downloadLuaFilters ].
|
||||
commandString := 'pandoc ' , aFileReference fullName
|
||||
, ' --lua-filter=' , filter fullName.
|
||||
^ self extractImagesInUnixFor: aFileReference withFilter: filter
|
||||
]
|
||||
|
||||
{ #category : #utility }
|
||||
Pandoc class >> luaFilters [
|
||||
"I define the location of set of scripts, that allows to change the default behaviour of Pandoc
|
||||
and/or the processing of supported markup languages.
|
||||
|
||||
For more information about Lua filters see:
|
||||
|
||||
https://pandoc.org/lua-filters.html
|
||||
"
|
||||
|
||||
| filters |
|
||||
filters := OrderedCollection new.
|
||||
filters
|
||||
add: 'http://mutabit.com/repos.fossil/dataweek/doc/tip/Artefactos/Scripts/image-links.lua'.
|
||||
^ filters
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> markdownToHtml: inputFile [
|
||||
|
||||
(Smalltalk os isUnix or: [ Smalltalk os isMacOS ]) ifTrue: [ ^ self markdownToHtmlOnUnix: inputFile ].
|
||||
Smalltalk os isWindows ifTrue: [ ^ self markdownToHtmlOnWindows: inputFile ].
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> markdownToHtmlOnUnix: inputFile [
|
||||
|
||||
| outputFile |
|
||||
|
||||
outputFile := inputFile parent / (inputFile basenameWithoutExtension , '.html').
|
||||
outputFile ensureDelete.
|
||||
outputFile ensureCreateFile.
|
||||
GtSubprocessWithInMemoryOutput new
|
||||
shellCommand: 'pandoc -f markdown+startnum+task_lists --standalone -t html ', inputFile fullName, ' --output ', outputFile fullName;
|
||||
runAndWait;
|
||||
stdout.
|
||||
^ outputFile.
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> markdownToHtmlOnWindows: inputFile [
|
||||
|
||||
"ToDo: This command still doesn't receive any arguments."
|
||||
^ (LibC resultOfCommand: 'pandoc ', inputFile fullName) correctAccentedCharacters.
|
||||
]
|
11
src/MiniDocs/Pandoc.extension.st
Normal file
11
src/MiniDocs/Pandoc.extension.st
Normal file
@ -0,0 +1,11 @@
|
||||
Extension { #name : #Pandoc }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
|
||||
OSSUnixSubprocess new
|
||||
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :command :outString |
|
||||
^ outString
|
||||
].
|
||||
]
|
148
src/MiniDocs/PubPubContent.class.st
Normal file
148
src/MiniDocs/PubPubContent.class.st
Normal file
@ -0,0 +1,148 @@
|
||||
Class {
|
||||
#name : #PubPubContent,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'title',
|
||||
'language',
|
||||
'url',
|
||||
'thumbnail',
|
||||
'work',
|
||||
'contents'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent class >> fromXML: anXMLElement [
|
||||
^ self new fromXML: anXMLElement
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> asMarkdeepFrontPageElement [
|
||||
| response anchorName anchorLink markdeepFile |
|
||||
response := '' writeStream.
|
||||
anchorName := '[', self title,']'.
|
||||
markdeepFile := './book/', self shortName,'--',self id,'.md.html'.
|
||||
anchorLink := '(', markdeepFile,')'.
|
||||
response
|
||||
nextPutAll: '<big>', anchorName, anchorLink,'</big><br><br>';
|
||||
nextPutAll: String lf.
|
||||
self thumbnail ifNotNil: [ |image|
|
||||
image := '
|
||||
<img
|
||||
src=', self thumbnail,
|
||||
' width="55%"
|
||||
style="width: 400px; height: 220px; object-fit: cover;"
|
||||
/>'.
|
||||
response nextPutAll: '<a href="',markdeepFile,'">', image, '</a>'
|
||||
].
|
||||
response
|
||||
nextPutAll: String lf, String lf.
|
||||
^ response contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> contents: anObject [
|
||||
contents := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> fileName [
|
||||
^ self shortName,'--', self id, '.md'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> fromXML: aXMLElement [
|
||||
| image anchor|
|
||||
image := aXMLElement contentNodes first xpath: './a/div'.
|
||||
image
|
||||
ifNotEmpty: [|style rawUrl|
|
||||
style := (image first attributeAt: 'style').
|
||||
rawUrl := (style splitOn: 'url') second.
|
||||
self
|
||||
thumbnail:(rawUrl copyFrom: 3 to: rawUrl size - 2)
|
||||
].
|
||||
anchor := (aXMLElement contentNodes second contentNodes first xpath: './div[@class="title-wrapper"]/a') first.
|
||||
self
|
||||
title: (anchor attributeAt: 'title');
|
||||
url: (anchor attributeAt: 'href').
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> id [
|
||||
^ (self url splitOn: $/) last
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PubPubContent >> language: aString [
|
||||
language := aString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> next [
|
||||
^ self nextInstance
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> previous [
|
||||
| index |
|
||||
index := self work tableOfContents detectIndex: [:pubContent | pubContent = self ] ifNone: [ ^ nil ].
|
||||
^ self work tableOfContents at: index - 1.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> printOn: aStream [
|
||||
super printOn: aStream.
|
||||
aStream
|
||||
nextPutAll: '( ', self title,' | ', self id, ' )'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> shortName [
|
||||
| sanitized |
|
||||
sanitized := (self title splitOn: $:) first.
|
||||
sanitized := sanitized copyReplaceAll: '’' with: ''.
|
||||
sanitized := sanitized asCamelCase.
|
||||
sanitized at: 1 put: sanitized first asLowercase.
|
||||
^ sanitized
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> thumbnail [
|
||||
^ thumbnail
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> thumbnail: anURL [
|
||||
thumbnail := anURL
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> title [
|
||||
^ title
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> title: anObject [
|
||||
title := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> url [
|
||||
^url
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> url: anObject [
|
||||
url := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> work [
|
||||
^ work
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> work: aPubPubWork [
|
||||
work := aPubPubWork
|
||||
]
|
75
src/MiniDocs/PubPubGrammar.class.st
Normal file
75
src/MiniDocs/PubPubGrammar.class.st
Normal file
@ -0,0 +1,75 @@
|
||||
Class {
|
||||
#name : #PubPubGrammar,
|
||||
#superclass : #PP2CompositeNode,
|
||||
#instVars : [
|
||||
'document',
|
||||
'link',
|
||||
'linkLabel',
|
||||
'linkContent',
|
||||
'imageLinkLabel',
|
||||
'imageLinkContent',
|
||||
'alternativeImages',
|
||||
'imageLink'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> alternativeImages [
|
||||
^ self linkContent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> document [
|
||||
^ (link / imageLink ) islandInSea star
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> imageLink [
|
||||
^ imageLinkLabel, imageLinkContent, alternativeImages
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> imageLinkContent [
|
||||
^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> imageLinkLabel [
|
||||
|
||||
| label |
|
||||
label := ("$] asPParser not /" #any asPParser) starLazy flatten.
|
||||
^ '![' asPParser, label, ']' asPParser ==> #second.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> imageLinkSea [
|
||||
^ imageLink sea ==> #second
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> link [
|
||||
^ linkLabel, linkContent
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> linkContent [
|
||||
^ '{' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second.
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> linkLabel [
|
||||
| label |
|
||||
label := ("$] asPParser not /" #any asPParser) starLazy flatten.
|
||||
^ $[ asPParser, label, $] asPParser ==> #second.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> linkSea [
|
||||
^ link sea ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> start [
|
||||
^ document
|
||||
]
|
65
src/MiniDocs/PubPubGrammar2.class.st
Normal file
65
src/MiniDocs/PubPubGrammar2.class.st
Normal file
@ -0,0 +1,65 @@
|
||||
Class {
|
||||
#name : #PubPubGrammar2,
|
||||
#superclass : #PP2CompositeNode,
|
||||
#instVars : [
|
||||
'imageLabel',
|
||||
'imageLink',
|
||||
'imagesArray',
|
||||
'imageLocation',
|
||||
'document',
|
||||
'footnote',
|
||||
'footnoteLabel',
|
||||
'footnoteContent'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> document [
|
||||
^ (imageLink / footnote) islandInSea star
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> footnote [
|
||||
^ footnoteLabel, footnoteContent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> footnoteContent [
|
||||
^ '{#' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> footnoteLabel [
|
||||
^ '[' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imageLabel [
|
||||
^ '![' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imageLink [
|
||||
^ imageLabel, imageLocation, imagesArray
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imageLocation [
|
||||
^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imagesArray [
|
||||
^ '{srcset=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imagesContent [
|
||||
^ '{src=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> start [
|
||||
^ document
|
||||
]
|
59
src/MiniDocs/PubPubGrammarTest.class.st
Normal file
59
src/MiniDocs/PubPubGrammarTest.class.st
Normal file
@ -0,0 +1,59 @@
|
||||
Class {
|
||||
#name : #PubPubGrammarTest,
|
||||
#superclass : #PP2CompositeNodeTest,
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> parserClass [
|
||||
^ PubPubGrammar
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testComposedImageLink [
|
||||
self
|
||||
parse: '![This is an image label with sublinks (bla bl)[blog]](this/is/an/image/link){this are alternate image sizes}'
|
||||
rule: #imageLink
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testImageLabel: label [
|
||||
self
|
||||
parse: label
|
||||
rule: #imageLinkLabel
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testImageLink [
|
||||
self
|
||||
parse: '![This is an image label](this/is/an/image/link){this are alternate image sizes}'
|
||||
rule: #imageLink
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testLabel: label [
|
||||
self
|
||||
parse: label
|
||||
rule: #linkLabel
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testLink [
|
||||
self
|
||||
parse: '[This is a label]{this/is/a/link}'
|
||||
rule: #link
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testNestedLabel [
|
||||
self
|
||||
parse: '[This is a label with [sublabels]]'
|
||||
rule: #linkLabel
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testSimpleLabel [
|
||||
self
|
||||
parse: '[This is a label]'
|
||||
rule: #linkLabel
|
||||
]
|
240
src/MiniDocs/PubPubWork.class.st
Normal file
240
src/MiniDocs/PubPubWork.class.st
Normal file
@ -0,0 +1,240 @@
|
||||
Class {
|
||||
#name : #PubPubWork,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'address',
|
||||
'tableOfContents',
|
||||
'titles',
|
||||
'folder',
|
||||
'currentLanguage',
|
||||
'languages'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> addTableOfContents: anOrderedDictionary [
|
||||
self tableOfContents
|
||||
at: (self currentLanguage) put: anOrderedDictionary;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> addTitle: aString [
|
||||
self titles
|
||||
at: (self currentLanguage) put: aString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> address [
|
||||
^ address
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> address: anUrl [
|
||||
address := anUrl
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> bookishFolder [
|
||||
^ { 'en' -> 'book'.
|
||||
'es' -> 'libro'} asDictionary
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> currentLanguage [
|
||||
^ currentLanguage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> currentLanguage: twoLettersInISO639_1 [
|
||||
currentLanguage := twoLettersInISO639_1
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> defaultOptions [
|
||||
^ { 'sourceCodeLink' -> true .
|
||||
'commentsProvider' -> 'Hypothesis' } asDictionary
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> defaultTitle [
|
||||
^ self titles associations first value
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> downloadContents [
|
||||
| workingDirectory |
|
||||
workingDirectory := self workingDirectory.
|
||||
self tableOfContentsDictionary
|
||||
keysAndValuesDo: [ :name :chapterAddress |
|
||||
| currentFileName |
|
||||
currentFileName := name , '--' , chapterAddress , '.md'.
|
||||
(workingDirectory / currentFileName) asFileReference ensureDelete.
|
||||
(workingDirectory / 'markdown') asFileReference ensureDelete.
|
||||
ZnClient new
|
||||
get: self address , 'pub/' , chapterAddress , '/download/markdown';
|
||||
downloadTo: workingDirectory.
|
||||
workingDirectory / 'markdown' renameTo: currentFileName ].
|
||||
^ workingDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> downloadContents2 [
|
||||
| workingDirectory |
|
||||
workingDirectory := self folder / self currentLanguage / 'book'.
|
||||
self tableOfContentsDictionary keysAndValuesDo: [ :name :chapterAddress | |currentFileName|
|
||||
currentFileName := name, '--', chapterAddress, '.md'.
|
||||
(workingDirectory / currentFileName) asFileReference ensureDelete.
|
||||
(workingDirectory / 'markdown') asFileReference ensureDelete.
|
||||
ZnClient new
|
||||
get: self address, 'pub/', chapterAddress, '/download/markdown';
|
||||
downloadTo: workingDirectory .
|
||||
workingDirectory / 'markdown' renameTo: currentFileName
|
||||
].
|
||||
^ workingDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> exportToHTML [
|
||||
self markdownFiles
|
||||
do: [ :file | | doc |
|
||||
doc := Markdown new fromFile: file.
|
||||
doc exportAsHTML ].
|
||||
^ self markdownFiles first parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> exportToMarkdeep [
|
||||
| markdeepDocs |
|
||||
|
||||
markdeepDocs := self markdownFiles
|
||||
collect: [ :file | Markdeep fromMarkdownFile: file ].
|
||||
markdeepDocs do: [ :each | each fromPubPubToMarkdeep exportAsFile ].
|
||||
^ self languageFolder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> extractAllContentsRaw [
|
||||
^ self frontPage xpath: '//div[@class="layout-pubs-block"]'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> extractRawTableOfContents [
|
||||
^ self extractAllContentsRaw first xpath: '//div[contains(concat(" ",normalize-space(@class)," "), " pub-preview-component ")]'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> folder [
|
||||
^ folder ensureCreateDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> folder: localDirectory [
|
||||
folder := localDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> frontPage [
|
||||
"This should scrap contents of the book's front-page and translate them into Markdeep,
|
||||
according to our templates."
|
||||
^ (XMLHTMLParser on: (self address asUrl retrieveContents)) parseDocument
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> languageFolder [
|
||||
^ self folder / self currentLanguage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> markdeepFrontPage [
|
||||
| frontPage markdeepIndex |
|
||||
frontPage := Markdeep new.
|
||||
frontPage
|
||||
title: self defaultTitle;
|
||||
file: self languageFolder / 'frontPage.md.html'.
|
||||
markdeepIndex := '' writeStream.
|
||||
self tableOfContents do: [:pubPubContent|
|
||||
markdeepIndex
|
||||
nextPutAll: pubPubContent asMarkdeepFrontPageElement
|
||||
].
|
||||
frontPage body: markdeepIndex contents.
|
||||
^ frontPage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> markdownFiles [
|
||||
^ self languageFolder allChildren
|
||||
select: [ :file | file basename endsWith: '.md' ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> populateContents [
|
||||
self tableOfContents isEmptyOrNil
|
||||
ifTrue: [ self populateTableOfContents ].
|
||||
self workingDirectory children ifEmpty: [self downloadContents].
|
||||
self tableOfContents do: [:pubPubContent | | contentFile|
|
||||
contentFile := self workingDirectory / pubPubContent fileName.
|
||||
contentFile exists
|
||||
ifTrue: [ pubPubContent contents: (Markdown new fromFile: contentFile) ]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> populateTableOfContents [
|
||||
| contentsCollection |
|
||||
contentsCollection := self extractRawTableOfContents collect: [:each |
|
||||
(PubPubContent fromXML: each)
|
||||
language: self currentLanguage;
|
||||
work: self
|
||||
].
|
||||
self addTableOfContents: contentsCollection asOrderedCollection
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> printOn: aStream [
|
||||
super printOn: aStream.
|
||||
aStream
|
||||
nextPutAll: '(',self defaultTitle, ' | ', self address, ' )'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> tableOfContents [
|
||||
tableOfContents ifNil: [ ^ tableOfContents := Dictionary new].
|
||||
^ tableOfContents at: self currentLanguage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> tableOfContents: anObject [
|
||||
tableOfContents := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> tableOfContentsDictionary [
|
||||
| response |
|
||||
response := OrderedDictionary new.
|
||||
self tableOfContents do: [:content |
|
||||
response
|
||||
at: content shortName put: content id
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> titles [
|
||||
^ titles ifNil: [titles := OrderedDictionary new]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> viewContentsFor: aView [
|
||||
<gtView>
|
||||
^ aView list
|
||||
title: 'Contents';
|
||||
priority: 10;
|
||||
items: [ self tableOfContents ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> workingDirectory [
|
||||
^ self folder / self currentLanguage / (self bookishFolder at: self currentLanguage)
|
||||
]
|
@ -1,8 +1,240 @@
|
||||
Extension { #name : #String }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> accentedCharactersCorrection [
|
||||
| modified corrections |
|
||||
corrections := {
|
||||
'ó' -> 'ó' . 'Ã' -> 'Ó' . 'ú' -> 'ú' . 'ñ' -> 'ñ' . 'Ã' -> 'Ñ' .
|
||||
'Ã' -> 'í' . 'á' -> 'á' . 'é' -> 'é' . 'â' -> $' asString} asDictionary.
|
||||
modified := self copy.
|
||||
corrections keysAndValuesDo: [ :k :v |
|
||||
modified := modified copyReplaceAll: k with: v
|
||||
].
|
||||
^ modified
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> admonitionBorderLines [
|
||||
| response |
|
||||
response := OrderedDictionary new.
|
||||
self lines doWithIndex: [:line :index |
|
||||
(self admonitionBorders includes: line trimBoth)
|
||||
ifTrue: [ response at: index put: line trimBoth ]
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> admonitionBorders [
|
||||
"For the moment I only work with the admonition starting border
|
||||
as adding the closing one would imply to redo the #markdownSplitted
|
||||
method implementing a proper parser, which, ATM is overkill."
|
||||
| response |
|
||||
response := #('info' 'success' 'warning' 'danger') collect: [ :each | ':::', each ].
|
||||
^ response "copyWith: ':::'"
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> admonitionEndingPosition [
|
||||
| response |
|
||||
response := 0.
|
||||
self startsWithMarkdownAdmonition ifFalse: [ ^ response ].
|
||||
self lines do: [:line |
|
||||
response > 0 ifTrue: [ response := response + 1 ].
|
||||
(line trimBoth = ':::')
|
||||
ifFalse: [ response := response + line size ]
|
||||
ifTrue: [ ^ response := response + line size. ]
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> asDashedLowercase [
|
||||
"I convert phrases like 'This is a phrase' into 'this-is-a-phrase'."
|
||||
|
||||
^ '-' join: (self substrings collect: [:each | each asLowercase ])
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> asInteger [
|
||||
"Return the integer present in the receiver, or nil. In case of float, returns the integer part."
|
||||
"'1' asInteger >>> 1"
|
||||
"'-1' asInteger >>> -1"
|
||||
"'10' asInteger >>> 10"
|
||||
"'a' asInteger >>> nil"
|
||||
"'1.234' asInteger >>> 1"
|
||||
^ (self copyWithoutAll: '_') asSignedInteger
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> contentsWithoutYAMLMetadata [
|
||||
| newContents |
|
||||
self detectYAMLMetadata ifFalse: [ ^ self ].
|
||||
newContents := '' writeStream.
|
||||
(self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line |
|
||||
newContents nextPutAll: line; cr ].
|
||||
^ newContents contents.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> deleteYAMLMetadata [
|
||||
| newContents |
|
||||
self detectYAMLMetadata ifFalse: [ ^ self ].
|
||||
newContents := '' writeStream.
|
||||
(self lines copyFrom: self yamlMetadataClosingLineNumber + 1 to: self lines size) do: [ :line |
|
||||
newContents nextPutAll: line; lf;lf ].
|
||||
^ newContents contents.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> demoteMarkdownHeaders [
|
||||
| response |
|
||||
response := self contents lines.
|
||||
self markdownHeaders associations allButFirstDo: [ :assoc |
|
||||
response at: assoc key put: '#', assoc value ].
|
||||
^ response asStringWithCr withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> detectYAMLMetadata [
|
||||
| lines |
|
||||
lines := self lines.
|
||||
^ self startsWithYAMLMetadataDelimiter
|
||||
and: [ lines allButFirst
|
||||
detect: [ :currentLine | currentLine beginsWith: self class yamlMetadataDelimiter ]
|
||||
ifFound: [ ^ true ] ifNone: [ ^ false ] ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> indentedWithExtraSpaces: spaceNumber [
|
||||
| response indent |
|
||||
response := '' writeStream.
|
||||
indent := String new.
|
||||
spaceNumber timesRepeat: [ indent := indent, ' ' ].
|
||||
self lines do: [:line | response nextPutAll: indent, line, String lf ].
|
||||
^ response contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> markdownHeaders [
|
||||
| response headers |
|
||||
headers := (LeTextSnippet string: self contents) ast // #LeHeaderNode collect: [ :each | each headerFullName asString ].
|
||||
response := OrderedDictionary new.
|
||||
self lines doWithIndex: [:line :index |
|
||||
(line beginsWithAnyOf: headers)
|
||||
ifTrue: [ response at: index put: line ]
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> markdownSplitLines [
|
||||
"I'm useful for conversions between the HedgeDoc Markdown variant and Lepiter page snippets.
|
||||
I provide broad places to where semantic breaks should be located in a page,
|
||||
depending on headers or admonitions to create pages snippets with similar divisions.
|
||||
Further page splits should be provided manually by the document author."
|
||||
| response |
|
||||
response := OrderedDictionary new.
|
||||
response := response
|
||||
addAll: self markdownHeaders;
|
||||
addAll: self admonitionBorderLines;
|
||||
yourself.
|
||||
^ (response associations sorted: [ :x :y | x key < y key ]) asOrderedDictionary
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> markdownSplitted [
|
||||
| response lastPart |
|
||||
self markdownSplitLines ifEmpty: [ ^ self ].
|
||||
response := OrderedCollection new.
|
||||
self markdownSplitLines keys allButLast doWithIndex: [:key :index | | nextLine part |
|
||||
nextLine := (self markdownSplitLines keys at: index + 1) - 1.
|
||||
part := self lines copyFrom: key to: nextLine.
|
||||
response add: part.
|
||||
].
|
||||
lastPart := self lines
|
||||
copyFrom: self markdownSplitLines keys last
|
||||
to: self lines size.
|
||||
response add: lastPart.
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> promoteMarkdownHeaders [
|
||||
| response |
|
||||
response := self contents lines.
|
||||
self markdownHeaders associationsDo: [ :assoc |
|
||||
response at: assoc key put: assoc value allButFirst ].
|
||||
^ response asStringWithCr withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> romanizeAccents [
|
||||
| modified corrections |
|
||||
corrections := {
|
||||
'ó' -> 'o' . 'ú' -> 'u' . 'ñ' -> 'n' .
|
||||
'í' -> 'i' . 'á' -> 'a' . 'é' -> 'e' } asDictionary.
|
||||
modified := self copy.
|
||||
corrections keysAndValuesDo: [ :k :v |
|
||||
modified := modified copyReplaceAll: k with: v
|
||||
].
|
||||
^ modified
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> startsWithMarkdownAdmonition [
|
||||
self lines ifEmpty: [ ^ false ].
|
||||
^ self admonitionBorders includes: self lines first trimBoth
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> startsWithYAMLMetadataDelimiter [
|
||||
self lines ifEmpty: [^false].
|
||||
^ self lines first beginsWith: self class yamlMetadataDelimiter
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> withoutXMLTagDelimiters [
|
||||
^ self copyWithoutAll: #($< $>)
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> yamlMetadataClosingLineNumber [
|
||||
"I return the line where the closing of the YAML metadata occurs or 0 if no closing is found."
|
||||
self startsWithYAMLMetadataDelimiter ifFalse: [ ^ self ].
|
||||
self lines allButFirst doWithIndex: [ :currentLine :i |
|
||||
(currentLine beginsWith: self class yamlMetadataDelimiter) ifTrue: [ ^ i + 1 ]]
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String class >> yamlMetadataDelimiter [
|
||||
^ '---'
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> yamlMetadataString [
|
||||
| output yamlLines |
|
||||
self detectYAMLMetadata ifFalse: [ ^nil ].
|
||||
self lines ifEmpty: [ ^nil ].
|
||||
yamlLines := self lines copyFrom: 2 to: self yamlMetadataClosingLineNumber - 1.
|
||||
output := '' writeStream.
|
||||
yamlLines do: [ :line |
|
||||
output
|
||||
nextPutAll: line;
|
||||
nextPut: Character lf. ].
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> yamlMetadataStringWithDelimiters [
|
||||
| output |
|
||||
self yamlMetadataString ifNil: [ ^ nil ].
|
||||
output := String new writeStream.
|
||||
output nextPutAll: self class yamlMetadataDelimiter; cr.
|
||||
output nextPutAll: self yamlMetadataString.
|
||||
output nextPutAll: self class yamlMetadataDelimiter; cr.
|
||||
^ output contents.
|
||||
]
|
||||
|
6
src/MiniDocs/TeaCompositeRouter.extension.st
Normal file
6
src/MiniDocs/TeaCompositeRouter.extension.st
Normal file
@ -0,0 +1,6 @@
|
||||
Extension { #name : #TeaCompositeRouter }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
TeaCompositeRouter >> staticRouters [
|
||||
^ routers
|
||||
]
|
6
src/MiniDocs/TeaStaticRouter.extension.st
Normal file
6
src/MiniDocs/TeaStaticRouter.extension.st
Normal file
@ -0,0 +1,6 @@
|
||||
Extension { #name : #TeaStaticRouter }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
TeaStaticRouter >> delegate [
|
||||
^ delegate
|
||||
]
|
6
src/MiniDocs/Teapot.extension.st
Normal file
6
src/MiniDocs/Teapot.extension.st
Normal file
@ -0,0 +1,6 @@
|
||||
Extension { #name : #Teapot }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Teapot >> staticRouter [
|
||||
^ staticRouter delegate
|
||||
]
|
10
src/MiniDocs/UnixChromePlatform.extension.st
Normal file
10
src/MiniDocs/UnixChromePlatform.extension.st
Normal file
@ -0,0 +1,10 @@
|
||||
Extension { #name : #UnixChromePlatform }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
UnixChromePlatform class >> defaultExecutableLocations [
|
||||
|
||||
^ #( '/opt/google/chrome/chrome'
|
||||
'/usr/bin/chromium-browser'
|
||||
'/usr/local/share/chromium/chrome'
|
||||
'/usr/bin/chromium' )
|
||||
]
|
9
src/MiniDocs/XMLDocument.extension.st
Normal file
9
src/MiniDocs/XMLDocument.extension.st
Normal file
@ -0,0 +1,9 @@
|
||||
Extension { #name : #XMLDocument }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
XMLDocument >> detectMarkdeepTitle [
|
||||
| titleLine |
|
||||
titleLine := (self nodesCollect: [:node | node contentString ]) first lines
|
||||
detect: [:line | line includesSubstring: ' **'] ifNone: ['Untitled'].
|
||||
^ titleLine trimmed trimBoth: [:char | char = $* ]
|
||||
]
|
53
src/MiniDocs/XMLElement.extension.st
Normal file
53
src/MiniDocs/XMLElement.extension.st
Normal file
@ -0,0 +1,53 @@
|
||||
Extension { #name : #XMLElement }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
XMLElement >> asSnippetDictionary [
|
||||
| response |
|
||||
response := STON fromString: (self attributes at: 'st-data').
|
||||
response at: 'className' put: (self attributes at: 'st-class').
|
||||
response at: 'content' put: self sanitizedContent.
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
XMLElement >> extractMarkdownImageLinkData [
|
||||
| linkParserNodes sanitizedText linkParser |
|
||||
linkParser := (PPCommonMarkBlockParser parse: (self contentString trimBoth: [:each | each = Character lf]) allButFirst)
|
||||
accept: CMBlockVisitor new.
|
||||
linkParserNodes := linkParser children first children.
|
||||
linkParserNodes size = 1
|
||||
ifTrue: [ sanitizedText := linkParserNodes first label text ]
|
||||
ifFalse: [ sanitizedText := '' writeStream.
|
||||
linkParserNodes allButLast
|
||||
do: [ :each |
|
||||
each className = 'PPCMText'
|
||||
ifTrue: [ sanitizedText nextPutAll: each text allButFirst ].
|
||||
each className = 'PPCMLink'
|
||||
ifTrue: [ sanitizedText nextPutAll: each printString ] ].
|
||||
sanitizedText := sanitizedText contents ].
|
||||
^ {sanitizedText . self contentString }
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
XMLElement >> sanitizedContent [
|
||||
| className sanitizedText |
|
||||
className := self attributes at: 'st-class'.
|
||||
className = 'LeTextSnippet'
|
||||
ifTrue: [ sanitizedText := self contentString.
|
||||
sanitizedText := sanitizedText allButFirst.
|
||||
sanitizedText := sanitizedText allButLast ].
|
||||
className = 'LePharoSnippet'
|
||||
ifTrue: [ | joinedText |
|
||||
sanitizedText := self contentString lines.
|
||||
sanitizedText := sanitizedText copyFrom: 4 to: sanitizedText size - 2.
|
||||
joinedText := '' writeStream.
|
||||
sanitizedText
|
||||
do: [ :line |
|
||||
joinedText
|
||||
nextPutAll: line;
|
||||
nextPut: Character lf ].
|
||||
sanitizedText := joinedText contents allButLast ].
|
||||
className = 'LePictureSnippet'
|
||||
ifTrue: [ sanitizedText := self extractMarkdownImageLinkData ].
|
||||
^ sanitizedText
|
||||
]
|
10
src/MiniDocs/ZnConstants.extension.st
Normal file
10
src/MiniDocs/ZnConstants.extension.st
Normal file
@ -0,0 +1,10 @@
|
||||
Extension { #name : #ZnConstants }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
ZnConstants class >> maximumLineLength [
|
||||
"Return the maximum line length to accept.
|
||||
Used by ZnLineReader and thus for reading request/status lines as well as headers.
|
||||
This helps to protect us from malicious content."
|
||||
|
||||
^ 5096 "8192"
|
||||
]
|
48
src/PetitMarkdown/CMBlockVisitor.class.st
Normal file
48
src/PetitMarkdown/CMBlockVisitor.class.st
Normal file
@ -0,0 +1,48 @@
|
||||
Class {
|
||||
#name : #CMBlockVisitor,
|
||||
#superclass : #CMVisitor,
|
||||
#instVars : [
|
||||
'inlineParser'
|
||||
],
|
||||
#category : #'PetitMarkdown-Visitors'
|
||||
}
|
||||
|
||||
{ #category : #initialization }
|
||||
CMBlockVisitor >> initialize [
|
||||
inlineParser := PPCommonMarkInlinesParser new.
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMBlockVisitor >> visitLinkRefDef: node [
|
||||
inlineParser registerLinkRefDef: node.
|
||||
^ super visitLinkRefDef: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMBlockVisitor >> visitParagraph: node [
|
||||
| result text |
|
||||
self assert: (node children anySatisfy: [ :e | e isLine ]).
|
||||
text := Character cr join: (node children collect: [:e | e text]).
|
||||
|
||||
result := inlineParser parse: (text trimRight).
|
||||
^ PPCMParagraph new
|
||||
addChildren: result;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMBlockVisitor >> visitPlainLine: node [
|
||||
| result |
|
||||
self assert: node text isString.
|
||||
result := inlineParser parse: node text.
|
||||
^ PPCMLine new
|
||||
addChildren: result;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMBlockVisitor >> visitPlainText: node [
|
||||
^ PPCMText new
|
||||
text: node text;
|
||||
yourself
|
||||
]
|
458
src/PetitMarkdown/CMHTMLVisitor.class.st
Normal file
458
src/PetitMarkdown/CMHTMLVisitor.class.st
Normal file
@ -0,0 +1,458 @@
|
||||
Class {
|
||||
#name : #CMHTMLVisitor,
|
||||
#superclass : #CMVisitor,
|
||||
#instVars : [
|
||||
'links',
|
||||
'shouldEscape',
|
||||
'tight',
|
||||
'shouldHtmlSpecChars'
|
||||
],
|
||||
#category : #'PetitMarkdown-Visitors'
|
||||
}
|
||||
|
||||
{ #category : #support }
|
||||
CMHTMLVisitor >> encodeEntities: text [
|
||||
^ PPCommonMarkUtils instance encodeEntities: text
|
||||
]
|
||||
|
||||
{ #category : #escape }
|
||||
CMHTMLVisitor >> escape: string [
|
||||
| retval regex |
|
||||
self shouldEscape ifFalse: [ ^ string ].
|
||||
|
||||
retval := string.
|
||||
retval := retval copyReplaceAll: '\\' with: '\'.
|
||||
|
||||
"Remove backlashes, \! -> !"
|
||||
regex := '\\[!#$%''()*+,-./:;=?@^_`{|}~]' asRegex.
|
||||
retval := regex copy: retval translatingMatchesUsing: [ :match | match second asString ].
|
||||
|
||||
retval := retval copyReplaceAll: '\[' with: '['.
|
||||
retval := retval copyReplaceAll: '\]' with: ']'.
|
||||
retval := retval copyReplaceAll: '\\' with: '\'.
|
||||
|
||||
^ retval
|
||||
]
|
||||
|
||||
{ #category : #escape }
|
||||
CMHTMLVisitor >> forbidEscape [
|
||||
shouldEscape push: false
|
||||
]
|
||||
|
||||
{ #category : #support }
|
||||
CMHTMLVisitor >> forbidHtmlSpecChars [
|
||||
shouldHtmlSpecChars push: false
|
||||
]
|
||||
|
||||
{ #category : #initialization }
|
||||
CMHTMLVisitor >> initialize [
|
||||
super initialize.
|
||||
links := IdentityDictionary new.
|
||||
shouldEscape := Stack with: true.
|
||||
shouldHtmlSpecChars := Stack with: true.
|
||||
]
|
||||
|
||||
{ #category : #'string operations' }
|
||||
CMHTMLVisitor >> removeLeadingEmptyLines: collection [
|
||||
| retval |
|
||||
collection isEmpty ifTrue: [ ^ collection ].
|
||||
|
||||
retval := collection copy.
|
||||
[retval first text = ''] whileTrue: [
|
||||
retval removeFirst
|
||||
].
|
||||
|
||||
^ retval
|
||||
]
|
||||
|
||||
{ #category : #'string operations' }
|
||||
CMHTMLVisitor >> removeTrailingEmptyLines: collection [
|
||||
| retval |
|
||||
collection isEmpty ifTrue: [ ^ collection ].
|
||||
|
||||
retval := collection copy.
|
||||
[retval last text = ''] whileTrue: [
|
||||
retval removeLast
|
||||
].
|
||||
|
||||
^ retval
|
||||
]
|
||||
|
||||
{ #category : #escape }
|
||||
CMHTMLVisitor >> restoreEscape [
|
||||
shouldEscape pop
|
||||
]
|
||||
|
||||
{ #category : #'string operations' }
|
||||
CMHTMLVisitor >> restoreHtmlSpecChars [
|
||||
shouldHtmlSpecChars pop
|
||||
]
|
||||
|
||||
{ #category : #escape }
|
||||
CMHTMLVisitor >> shouldEscape [
|
||||
^ shouldEscape top
|
||||
]
|
||||
|
||||
{ #category : #support }
|
||||
CMHTMLVisitor >> shouldHtmlSpecChars [
|
||||
^ shouldHtmlSpecChars top
|
||||
]
|
||||
|
||||
{ #category : #'string operations' }
|
||||
CMHTMLVisitor >> trimLeadingEmptyLines: string [
|
||||
| retval |
|
||||
retval := string.
|
||||
|
||||
[retval beginsWith: String cr] whileTrue: [
|
||||
retval := retval copyFrom: 2 to: retval size.
|
||||
].
|
||||
|
||||
^ retval
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitBlockQuote: node [
|
||||
| stream content |
|
||||
stream := WriteStream on: ''.
|
||||
stream nextPut: Character cr.
|
||||
stream nextPutAll: '<blockquote>'.
|
||||
|
||||
content := node child accept: self.
|
||||
content := content trimRight.
|
||||
|
||||
stream nextPutAll: content.
|
||||
stream nextPut: Character cr.
|
||||
stream nextPutAll: '</blockquote>'.
|
||||
^ stream contents.
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitContainer: node [
|
||||
| parts concat |
|
||||
parts := node children collect: [ :child |
|
||||
child accept: self
|
||||
].
|
||||
|
||||
concat := (parts reject: [ :e | e = '' ]) inject: '' into: [ :string :e | string, e ].
|
||||
^ concat
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitDelegate: node [
|
||||
| parts |
|
||||
parts := node children collect: [ :child |
|
||||
child accept: self
|
||||
].
|
||||
|
||||
^ String cr join: parts
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitDocument: node [
|
||||
^ self trimLeadingEmptyLines: (self visitContainer: node)
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitEmphasize: node [
|
||||
| retval |
|
||||
retval:= WriteStream on: ''.
|
||||
|
||||
retval nextPutAll: '<em>'.
|
||||
node children do: [ :child |
|
||||
retval nextPutAll: (child accept: self)
|
||||
].
|
||||
|
||||
retval nextPutAll: '</em>'.
|
||||
^ retval contents
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitFencedCode: node [
|
||||
| stream |
|
||||
stream := WriteStream on: ''.
|
||||
|
||||
stream nextPut: Character cr.
|
||||
stream nextPutAll: '<pre><code'.
|
||||
node infoString isNil ifFalse: [
|
||||
stream nextPutAll: ' class="language-'.
|
||||
stream nextPutAll: (self escape: node infoString trim).
|
||||
stream nextPutAll: '"'
|
||||
].
|
||||
stream nextPut: $>.
|
||||
|
||||
self forbidEscape.
|
||||
(node children) do: [ :child |
|
||||
stream nextPutAll: (child accept: self).
|
||||
stream nextPut: Character cr.
|
||||
].
|
||||
self restoreEscape.
|
||||
|
||||
stream nextPutAll: '</code></pre>'.
|
||||
^ stream contents.
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitHRule: node [
|
||||
^ String cr, '<hr />'
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitHardBreak: node [
|
||||
^ '<br />'
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitHeader: node [
|
||||
^ String cr, '<h', node level asString, '>',
|
||||
(node title accept: self) trim,
|
||||
'</h', node level asString, '>'
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitHtml: node [
|
||||
^ node text
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitHtmlBlock: node [
|
||||
| parts |
|
||||
self forbidEscape.
|
||||
self forbidHtmlSpecChars.
|
||||
parts := node children collect: [ :child |
|
||||
child accept: self
|
||||
].
|
||||
self restoreHtmlSpecChars.
|
||||
self restoreEscape.
|
||||
|
||||
" ^ String cr join: parts "
|
||||
^ parts inject: '' into: [ :string :e | string, String cr, e ]
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitIndentedCode: node [
|
||||
| stream |
|
||||
stream := WriteStream on: ''.
|
||||
|
||||
stream nextPut: Character cr.
|
||||
stream nextPutAll: '<pre><code>'.
|
||||
|
||||
self forbidEscape.
|
||||
(self removeTrailingEmptyLines: (self removeLeadingEmptyLines: node children)) do: [ :child |
|
||||
stream nextPutAll: (child accept: self).
|
||||
stream nextPut: Character cr.
|
||||
].
|
||||
self restoreEscape.
|
||||
|
||||
stream nextPutAll: '</code></pre>'.
|
||||
^ stream contents.
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitInlinedCode: node [
|
||||
| code code2 |
|
||||
code := node code.
|
||||
|
||||
code := code copyReplaceAll: (String cr) with: (String space).
|
||||
code := code.
|
||||
|
||||
code2 := code.
|
||||
[
|
||||
code := code2.
|
||||
code2 := code copyReplaceAll: ' ' with: ' '
|
||||
] doWhileFalse: [ code2 = code ].
|
||||
|
||||
^ '<code>', code trim , '</code>'
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitLine: node [
|
||||
| stream |
|
||||
stream := WriteStream on: ''.
|
||||
|
||||
node children do: [ :child |
|
||||
stream nextPutAll: (child accept: self).
|
||||
].
|
||||
^ stream contents
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitLink: node [
|
||||
| stream |
|
||||
stream := WriteStream on: ''.
|
||||
stream nextPutAll: '<a href="'.
|
||||
node destination isNil ifFalse: [
|
||||
stream nextPutAll: (self encodeEntities: (self escape: node destination)).
|
||||
] ifTrue: [ ].
|
||||
stream nextPutAll: '"'.
|
||||
node title isNil ifFalse: [
|
||||
stream nextPutAll: ' title="'.
|
||||
stream nextPutAll: (self escape: (self encodeEntities: node title)).
|
||||
stream nextPutAll: '"'.
|
||||
].
|
||||
stream nextPutAll: '>'.
|
||||
stream nextPutAll: (node label accept: self).
|
||||
stream nextPutAll: '</a>'.
|
||||
|
||||
^ stream contents
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitLinkRef: node [
|
||||
| stream ref |
|
||||
stream := WriteStream on: ''.
|
||||
|
||||
ref := links at: node label text asLowercase asSymbol.
|
||||
|
||||
stream nextPutAll: '<a href="'.
|
||||
stream nextPutAll: (self escape: ref destination).
|
||||
stream nextPutAll: '"'.
|
||||
ref title isNil ifFalse: [
|
||||
stream nextPutAll: ' title="'.
|
||||
stream nextPutAll: (self escape: ref title).
|
||||
stream nextPutAll: '"'.
|
||||
].
|
||||
stream nextPutAll: '>'.
|
||||
stream nextPutAll: (self escape: node label text).
|
||||
stream nextPutAll: '</a>'.
|
||||
|
||||
^ stream contents
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitLinkRefDef: node [
|
||||
links at: node label text asLowercase asSymbol ifAbsentPut: node.
|
||||
^ ''
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitLinkRefDefPlaceholder: node [
|
||||
^ ''
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitList: node [
|
||||
| stream tag tmp start |
|
||||
stream := WriteStream on: ''.
|
||||
|
||||
tmp := tight.
|
||||
tight := node isTight.
|
||||
start := ''.
|
||||
|
||||
(node type = #ordered) ifTrue: [
|
||||
tag := 'ol'.
|
||||
(node start = 1) ifFalse: [ start := ' start="', node start asString, '"' ]
|
||||
] ifFalse: [
|
||||
tag := 'ul'
|
||||
].
|
||||
|
||||
stream nextPut: Character cr.
|
||||
stream nextPut: $<.
|
||||
stream nextPutAll: tag.
|
||||
stream nextPutAll: start.
|
||||
stream nextPut: $>.
|
||||
|
||||
node children do: [ :child |
|
||||
child isBlankLine ifFalse: [
|
||||
stream nextPutAll: (child accept: self).
|
||||
]
|
||||
].
|
||||
|
||||
stream nextPut: Character cr.
|
||||
stream nextPutAll: '</'.
|
||||
stream nextPutAll: tag.
|
||||
stream nextPut: $>.
|
||||
|
||||
tight := tmp.
|
||||
^ stream contents
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitListItem: node [
|
||||
| stream nodeChildren |
|
||||
stream := WriteStream on: ''.
|
||||
nodeChildren := node child children reject: [:e | e isBlankLine ].
|
||||
|
||||
stream nextPut: Character cr.
|
||||
stream nextPutAll: '<li>'.
|
||||
nodeChildren do: [ :child |
|
||||
(child isParagraph and: [ tight ]) ifTrue: [
|
||||
child children do: [ :ch | stream nextPutAll: (ch accept: self) ]
|
||||
] ifFalse: [
|
||||
stream nextPutAll: (child accept: self).
|
||||
]
|
||||
].
|
||||
(nodeChildren isEmpty or:
|
||||
[nodeChildren last isParagraph and: [tight]]) ifFalse: [
|
||||
stream nextPut: Character cr
|
||||
].
|
||||
stream nextPutAll: '</li>'.
|
||||
|
||||
^ stream contents
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitNode: node [
|
||||
^ ''
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitParagraph: node [
|
||||
| stream |
|
||||
stream := WriteStream on: ''.
|
||||
stream nextPut: Character cr.
|
||||
stream nextPutAll: '<p>'.
|
||||
node children do: [ :child |
|
||||
stream nextPutAll: (child accept: self)
|
||||
].
|
||||
stream nextPutAll: '</p>'.
|
||||
|
||||
^ stream contents
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitPlainLine: node [
|
||||
^ self error: 'should not happen'
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitPlainText: node [
|
||||
^ self error: 'should not happen'
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitSoftBreak: node [
|
||||
^ String cr
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitStrong: node [
|
||||
| retval |
|
||||
retval:= WriteStream on: ''.
|
||||
|
||||
retval nextPutAll: '<strong>'.
|
||||
node children do: [ :child |
|
||||
retval nextPutAll: (child accept: self)
|
||||
].
|
||||
|
||||
retval nextPutAll: '</strong>'.
|
||||
^ retval contents
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
CMHTMLVisitor >> visitText: node [
|
||||
^ node text
|
||||
|
||||
]
|
99
src/PetitMarkdown/CMVisitor.class.st
Normal file
99
src/PetitMarkdown/CMVisitor.class.st
Normal file
@ -0,0 +1,99 @@
|
||||
Class {
|
||||
#name : #CMVisitor,
|
||||
#superclass : #Object,
|
||||
#category : #'PetitMarkdown-Visitors'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitBlockQuote: node [
|
||||
^ self visitWhatever: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitContainer: node [
|
||||
^ self visitWhatever: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitDocument: node [
|
||||
^ self visitWhatever: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitFencedCode: node [
|
||||
^ self visitWhatever: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitHRule: node [
|
||||
^ node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitHeader: node [
|
||||
^ self visitWhatever: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitHtml: node [
|
||||
^ node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitHtmlBlock: node [
|
||||
^ self visitWhatever: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitIndentedCode: node [
|
||||
^ self visitWhatever: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitLine: node [
|
||||
^ self visitWhatever: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitLinkRefDef: node [
|
||||
^ node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitLinkRefDefPlaceholder: node [
|
||||
^ node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitList: node [
|
||||
^ self visitWhatever: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitListItem: node [
|
||||
^ self visitWhatever: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitNode: node [
|
||||
^ node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitParagraph: node [
|
||||
^ self visitWhatever: node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitText: node [
|
||||
^ node
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
CMVisitor >> visitWhatever: node [
|
||||
node children do: [ :child |
|
||||
node replace: child
|
||||
with: (child accept: self)
|
||||
].
|
||||
^ node
|
||||
]
|
30
src/PetitMarkdown/PPCMBlockQuote.class.st
Normal file
30
src/PetitMarkdown/PPCMBlockQuote.class.st
Normal file
@ -0,0 +1,30 @@
|
||||
Class {
|
||||
#name : #PPCMBlockQuote,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#instVars : [
|
||||
'code',
|
||||
'infoString'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMBlockQuote >> accept: visitor [
|
||||
^ visitor visitBlockQuote: self
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMBlockQuote >> initialize [
|
||||
super initialize.
|
||||
children := Array new: 1.
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMBlockQuote >> isBlockLevel [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMBlockQuote >> viewBody [
|
||||
^ (self className ,' ', self text) asRopedText.
|
||||
]
|
25
src/PetitMarkdown/PPCMContainer.class.st
Normal file
25
src/PetitMarkdown/PPCMContainer.class.st
Normal file
@ -0,0 +1,25 @@
|
||||
Class {
|
||||
#name : #PPCMContainer,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMContainer >> accept: visitor [
|
||||
^ visitor visitContainer: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMContainer >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := self className asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor) ].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
92
src/PetitMarkdown/PPCMDelegateNode.class.st
Normal file
92
src/PetitMarkdown/PPCMDelegateNode.class.st
Normal file
@ -0,0 +1,92 @@
|
||||
Class {
|
||||
#name : #PPCMDelegateNode,
|
||||
#superclass : #PPCMNode,
|
||||
#instVars : [
|
||||
'children'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMDelegateNode >> accept: visitor [
|
||||
^ visitor visitDelegate: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDelegateNode >> addChild: node [
|
||||
self assert: node isCommonMarkNode.
|
||||
|
||||
self children add: node.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDelegateNode >> addChildFirst: node [
|
||||
self assert: node isCommonMarkNode.
|
||||
|
||||
self children addFirst: node.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDelegateNode >> addChildren: nodes [
|
||||
nodes do: [ :node | self addChild: node ]
|
||||
]
|
||||
|
||||
{ #category : #enumerating }
|
||||
PPCMDelegateNode >> allChildren [
|
||||
| retval |
|
||||
retval := OrderedCollection new.
|
||||
self children do: [ :child | retval addAll: child allChildren ].
|
||||
^ retval
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDelegateNode >> child [
|
||||
self assert: children size = 1.
|
||||
^ children first
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDelegateNode >> child: whatever [
|
||||
children at: 1 put: whatever
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDelegateNode >> children [
|
||||
^ children
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDelegateNode >> children: whatever [
|
||||
children := whatever
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDelegateNode >> firstChild [
|
||||
^ children at: 1
|
||||
]
|
||||
|
||||
{ #category : #initialization }
|
||||
PPCMDelegateNode >> initialize [
|
||||
children := OrderedCollection new
|
||||
]
|
||||
|
||||
{ #category : #replacing }
|
||||
PPCMDelegateNode >> replace: child with: anotherChild [
|
||||
children doWithIndex: [ :ch :index |
|
||||
(ch == child) ifTrue: [
|
||||
children at: index put: anotherChild .
|
||||
^ true
|
||||
]
|
||||
].
|
||||
^ false
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDelegateNode >> secondChild [
|
||||
^ children at: 2
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDelegateNode >> thirdChild [
|
||||
^ children at: 3
|
||||
]
|
52
src/PetitMarkdown/PPCMDocument.class.st
Normal file
52
src/PetitMarkdown/PPCMDocument.class.st
Normal file
@ -0,0 +1,52 @@
|
||||
Class {
|
||||
#name : #PPCMDocument,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMDocument >> accept: visitor [
|
||||
^ visitor visitDocument: self
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMDocument >> isBlockLevel [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDocument >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := self className asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor) ].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMDocument >> viewChildrenFor: aView [
|
||||
<gtView>
|
||||
|
||||
children ifNil: [ ^ aView empty ].
|
||||
|
||||
^ aView columnedTree
|
||||
title: 'Document tree';
|
||||
priority: 1;
|
||||
items: [ { self } ];
|
||||
children: #children;
|
||||
column: 'Name' text: #viewBody;
|
||||
expandUpTo: 7
|
||||
]
|
32
src/PetitMarkdown/PPCMEmphasize.class.st
Normal file
32
src/PetitMarkdown/PPCMEmphasize.class.st
Normal file
@ -0,0 +1,32 @@
|
||||
Class {
|
||||
#name : #PPCMEmphasize,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMEmphasize >> accept: visitor [
|
||||
^ visitor visitEmphasize: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMEmphasize >> viewBody [
|
||||
| aText |
|
||||
aText := (self className ,' ', self text) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
34
src/PetitMarkdown/PPCMFencedCode.class.st
Normal file
34
src/PetitMarkdown/PPCMFencedCode.class.st
Normal file
@ -0,0 +1,34 @@
|
||||
Class {
|
||||
#name : #PPCMFencedCode,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#instVars : [
|
||||
'infoString'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMFencedCode >> accept: visitor [
|
||||
^ visitor visitFencedCode: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMFencedCode >> code [
|
||||
"hackity hack, this should not be used except for tests..."
|
||||
^ String cr join: (self children collect: [ :e | e text ])
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMFencedCode >> infoString [
|
||||
^ infoString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMFencedCode >> infoString: anObject [
|
||||
infoString := anObject
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMFencedCode >> isBlockLevel [
|
||||
^ true
|
||||
]
|
32
src/PetitMarkdown/PPCMHardBreak.class.st
Normal file
32
src/PetitMarkdown/PPCMHardBreak.class.st
Normal file
@ -0,0 +1,32 @@
|
||||
Class {
|
||||
#name : #PPCMHardBreak,
|
||||
#superclass : #PPCMNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMHardBreak >> accept: visitor [
|
||||
^ visitor visitHardBreak: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHardBreak >> viewBody [
|
||||
| aText |
|
||||
aText := (self className ,' ', self text) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
67
src/PetitMarkdown/PPCMHeader.class.st
Normal file
67
src/PetitMarkdown/PPCMHeader.class.st
Normal file
@ -0,0 +1,67 @@
|
||||
Class {
|
||||
#name : #PPCMHeader,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#instVars : [
|
||||
'level',
|
||||
'title'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMHeader >> accept: visitor [
|
||||
^ visitor visitHeader: self
|
||||
]
|
||||
|
||||
{ #category : #initialization }
|
||||
PPCMHeader >> initialize [
|
||||
super initialize.
|
||||
children := Array new: 1.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHeader >> isBlockLevel [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHeader >> level [
|
||||
^ level
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHeader >> level: anObject [
|
||||
level := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHeader >> title [
|
||||
^ self child
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHeader >> title: anObject [
|
||||
self children at: 1 put: anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHeader >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className, ' level: ', self level asString) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor) ].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
47
src/PetitMarkdown/PPCMHrule.class.st
Normal file
47
src/PetitMarkdown/PPCMHrule.class.st
Normal file
@ -0,0 +1,47 @@
|
||||
Class {
|
||||
#name : #PPCMHrule,
|
||||
#superclass : #PPCMNode,
|
||||
#instVars : [
|
||||
'rule'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMHrule >> accept: visitor [
|
||||
^ visitor visitHRule: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHrule >> rule [
|
||||
^ rule
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHrule >> rule: anObject [
|
||||
rule := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHrule >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className ,' ',
|
||||
self rule) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child destination asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child destination asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
40
src/PetitMarkdown/PPCMHtml.class.st
Normal file
40
src/PetitMarkdown/PPCMHtml.class.st
Normal file
@ -0,0 +1,40 @@
|
||||
Class {
|
||||
#name : #PPCMHtml,
|
||||
#superclass : #PPCMNode,
|
||||
#instVars : [
|
||||
'text'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMHtml >> accept: visitor [
|
||||
^ visitor visitHtml: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHtml >> text [
|
||||
^ text
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHtml >> text: anObject [
|
||||
text := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHtml >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className ,' ',
|
||||
self text) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
37
src/PetitMarkdown/PPCMHtmlBlock.class.st
Normal file
37
src/PetitMarkdown/PPCMHtmlBlock.class.st
Normal file
@ -0,0 +1,37 @@
|
||||
Class {
|
||||
#name : #PPCMHtmlBlock,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMHtmlBlock >> accept: visitor [
|
||||
^ visitor visitHtmlBlock: self
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMHtmlBlock >> isBlockLevel [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMHtmlBlock >> viewBody [
|
||||
| aText |
|
||||
aText := (self className ,' ', self text) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
42
src/PetitMarkdown/PPCMIndentedCode.class.st
Normal file
42
src/PetitMarkdown/PPCMIndentedCode.class.st
Normal file
@ -0,0 +1,42 @@
|
||||
Class {
|
||||
#name : #PPCMIndentedCode,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMIndentedCode >> accept: visitor [
|
||||
^ visitor visitIndentedCode: self
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMIndentedCode >> code [
|
||||
^ self text
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMIndentedCode >> isBlockLevel [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMIndentedCode >> viewBody [
|
||||
| aText |
|
||||
aText := (self className ,' ', self text) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
28
src/PetitMarkdown/PPCMInlinedCode.class.st
Normal file
28
src/PetitMarkdown/PPCMInlinedCode.class.st
Normal file
@ -0,0 +1,28 @@
|
||||
Class {
|
||||
#name : #PPCMInlinedCode,
|
||||
#superclass : #PPCMNode,
|
||||
#instVars : [
|
||||
'code'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMInlinedCode >> accept: visitor [
|
||||
^ visitor visitInlinedCode: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMInlinedCode >> code [
|
||||
^ code
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMInlinedCode >> code: anObject [
|
||||
code := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMInlinedCode >> text [
|
||||
^ code
|
||||
]
|
60
src/PetitMarkdown/PPCMLine.class.st
Normal file
60
src/PetitMarkdown/PPCMLine.class.st
Normal file
@ -0,0 +1,60 @@
|
||||
Class {
|
||||
#name : #PPCMLine,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMLine class >> empty [
|
||||
^ PPCMLine new
|
||||
addChild: (PPCMText empty);
|
||||
yourself
|
||||
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMLine >> accept: visitor [
|
||||
^ visitor visitLine: self
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMLine >> isBlankLine [
|
||||
^ self text = ''
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMLine >> isLine [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMLine >> text [
|
||||
| stream |
|
||||
"hackity hack, this should not be used except for tests..."
|
||||
stream := WriteStream on: ''.
|
||||
children do: [ :child | stream nextPutAll: child text ].
|
||||
^ stream contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLine >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className ,' ', self text) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
78
src/PetitMarkdown/PPCMLink.class.st
Normal file
78
src/PetitMarkdown/PPCMLink.class.st
Normal file
@ -0,0 +1,78 @@
|
||||
Class {
|
||||
#name : #PPCMLink,
|
||||
#superclass : #PPCMNode,
|
||||
#instVars : [
|
||||
'label',
|
||||
'destination',
|
||||
'title'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMLink >> accept: visitor [
|
||||
^ visitor visitLink: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLink >> destination [
|
||||
^ destination
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLink >> destination: anObject [
|
||||
destination := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLink >> label [
|
||||
^ label
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLink >> label: anObject [
|
||||
label := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLink >> printOn: aStream [
|
||||
super initialize.
|
||||
^ aStream
|
||||
nextPutAll:
|
||||
'[',self label text,']',
|
||||
'(',self destination,')'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLink >> title [
|
||||
^ title
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLink >> title: anObject [
|
||||
title := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLink >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className ,' ',
|
||||
self label children first text, ' -> ', self destination) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child destination asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child destination asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
40
src/PetitMarkdown/PPCMLinkRef.class.st
Normal file
40
src/PetitMarkdown/PPCMLinkRef.class.st
Normal file
@ -0,0 +1,40 @@
|
||||
Class {
|
||||
#name : #PPCMLinkRef,
|
||||
#superclass : #PPCMNode,
|
||||
#instVars : [
|
||||
'label'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMLinkRef >> accept: visitor [
|
||||
^ visitor visitLinkRef: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLinkRef >> label [
|
||||
^ label
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLinkRef >> label: anObject [
|
||||
label := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLinkRef >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className ,' ',
|
||||
self label text) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
74
src/PetitMarkdown/PPCMLinkRefDef.class.st
Normal file
74
src/PetitMarkdown/PPCMLinkRefDef.class.st
Normal file
@ -0,0 +1,74 @@
|
||||
Class {
|
||||
#name : #PPCMLinkRefDef,
|
||||
#superclass : #PPCMNode,
|
||||
#instVars : [
|
||||
'label',
|
||||
'destination',
|
||||
'title'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMLinkRefDef >> accept: visitor [
|
||||
^ visitor visitLinkRefDef: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLinkRefDef >> destination [
|
||||
^ destination
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLinkRefDef >> destination: anObject [
|
||||
destination := anObject
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMLinkRefDef >> isBlockLevel [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLinkRefDef >> label [
|
||||
^ label
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLinkRefDef >> label: anObject [
|
||||
label := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLinkRefDef >> title [
|
||||
^ title
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLinkRefDef >> title: anObject [
|
||||
title := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLinkRefDef >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className ,' ',
|
||||
self label text, ' -> ', self destination) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child destination asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child destination asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
31
src/PetitMarkdown/PPCMLinkRefDefPlaceholder.class.st
Normal file
31
src/PetitMarkdown/PPCMLinkRefDefPlaceholder.class.st
Normal file
@ -0,0 +1,31 @@
|
||||
Class {
|
||||
#name : #PPCMLinkRefDefPlaceholder,
|
||||
#superclass : #PPCMNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMLinkRefDefPlaceholder >> accept: visitor [
|
||||
^ visitor visitLinkRefDefPlaceholder: self
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMLinkRefDefPlaceholder >> isBlockLevel [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMLinkRefDefPlaceholder >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className ) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
111
src/PetitMarkdown/PPCMList.class.st
Normal file
111
src/PetitMarkdown/PPCMList.class.st
Normal file
@ -0,0 +1,111 @@
|
||||
Class {
|
||||
#name : #PPCMList,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#instVars : [
|
||||
'type',
|
||||
'start'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMList >> accept: visitor [
|
||||
^ visitor visitList: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMList >> isBlockLevel [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMList >> isLooseItem: item [
|
||||
| document size |
|
||||
"empty item case"
|
||||
(item children size == 0) ifTrue: [ ^ false ].
|
||||
|
||||
document := item child.
|
||||
size := document children size.
|
||||
|
||||
size < 3 ifTrue: [ ^ false ].
|
||||
|
||||
(1 to: size - 2) do: [ :idx |
|
||||
((document children at: idx) isBlockLevel and:
|
||||
[(document children at: idx + 1) isBlankLine and:
|
||||
[(document children at: idx + 2) isBlockLevel] ]) ifTrue: [ ^ true ]
|
||||
].
|
||||
^ false
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMList >> isLooseList [
|
||||
| size |
|
||||
size := children size.
|
||||
|
||||
size < 3 ifTrue: [ ^ false ].
|
||||
|
||||
(1 to: size - 2) do: [ :idx |
|
||||
((children at: idx) isBlockLevel and:
|
||||
[(children at: idx + 1) isBlankLine and:
|
||||
[(children at: idx + 2) isBlockLevel] ]) ifTrue: [ ^ true ]
|
||||
].
|
||||
^ false
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMList >> isTight [
|
||||
"blanks in the list?"
|
||||
self isLooseList ifTrue: [
|
||||
^ false
|
||||
].
|
||||
|
||||
"blanks in the items?"
|
||||
self children do: [ :listItem |
|
||||
(self isLooseItem: listItem) ifTrue: [
|
||||
^ false
|
||||
]
|
||||
].
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMList >> start [
|
||||
^ start
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMList >> start: anObject [
|
||||
start := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMList >> type [
|
||||
^ type
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMList >> type: string [
|
||||
type := string
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMList >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className, ' type: ', self type) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor) ].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
43
src/PetitMarkdown/PPCMListItem.class.st
Normal file
43
src/PetitMarkdown/PPCMListItem.class.st
Normal file
@ -0,0 +1,43 @@
|
||||
Class {
|
||||
#name : #PPCMListItem,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMListItem >> accept: visitor [
|
||||
^ visitor visitListItem: self
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMListItem >> initialize [
|
||||
super initialize.
|
||||
children := Array new: 1.
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMListItem >> isBlockLevel [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMListItem >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := self className asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor) ].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
67
src/PetitMarkdown/PPCMNode.class.st
Normal file
67
src/PetitMarkdown/PPCMNode.class.st
Normal file
@ -0,0 +1,67 @@
|
||||
Class {
|
||||
#name : #PPCMNode,
|
||||
#superclass : #Object,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMNode >> accept: visitor [
|
||||
^ visitor visitNode: self
|
||||
]
|
||||
|
||||
{ #category : #enumerating }
|
||||
PPCMNode >> allChildren [
|
||||
^ Array with: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMNode >> children [
|
||||
^ #()
|
||||
]
|
||||
|
||||
{ #category : #gt }
|
||||
PPCMNode >> gtTreeViewIn: composite [
|
||||
<gtInspectorPresentationOrder: 40>
|
||||
|
||||
composite tree
|
||||
title: 'Tree';
|
||||
children: [:n | n children ];
|
||||
format: [:n| n name ifNil: [ n asString ] ifNotNil: [n name] ];
|
||||
shouldExpandToLevel: 6
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMNode >> isBlankLine [
|
||||
^ false
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMNode >> isBlockLevel [
|
||||
^ false
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMNode >> isCommonMarkNode [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMNode >> isLine [
|
||||
^ false
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMNode >> isParagraph [
|
||||
^ false
|
||||
]
|
||||
|
||||
{ #category : #replacing }
|
||||
PPCMNode >> replace: child with: anotherChild [
|
||||
^ false
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMNode >> text [
|
||||
"hackity hack, this should not be used except for tests..."
|
||||
^ String cr join: (self children collect: [ :e | e text ])
|
||||
]
|
47
src/PetitMarkdown/PPCMParagraph.class.st
Normal file
47
src/PetitMarkdown/PPCMParagraph.class.st
Normal file
@ -0,0 +1,47 @@
|
||||
Class {
|
||||
#name : #PPCMParagraph,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMParagraph >> accept: visitor [
|
||||
^ visitor visitParagraph: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMParagraph >> isBlockLevel [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #testing }
|
||||
PPCMParagraph >> isParagraph [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMParagraph >> text [
|
||||
"hackity hack, this should not be used except for tests..."
|
||||
^ String cr join: (self children collect: [ :e | e text ])
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMParagraph >> viewBody [
|
||||
| aText |
|
||||
aText := self className asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor) ].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
62
src/PetitMarkdown/PPCMPlainLine.class.st
Normal file
62
src/PetitMarkdown/PPCMPlainLine.class.st
Normal file
@ -0,0 +1,62 @@
|
||||
Class {
|
||||
#name : #PPCMPlainLine,
|
||||
#superclass : #PPCMNode,
|
||||
#instVars : [
|
||||
'text'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMPlainLine class >> empty [
|
||||
^ self new
|
||||
text: '';
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMPlainLine >> accept: visitor [
|
||||
^ visitor visitPlainLine: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMPlainLine >> isBlankLine [
|
||||
^ self text = ''
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMPlainLine >> isLine [
|
||||
^ true
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMPlainLine >> text [
|
||||
^ text
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMPlainLine >> text: whatever [
|
||||
text := whatever
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMPlainLine >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className ,' ', self text) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor) ].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
45
src/PetitMarkdown/PPCMPlainText.class.st
Normal file
45
src/PetitMarkdown/PPCMPlainText.class.st
Normal file
@ -0,0 +1,45 @@
|
||||
Class {
|
||||
#name : #PPCMPlainText,
|
||||
#superclass : #PPCMNode,
|
||||
#instVars : [
|
||||
'text'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMPlainText class >> empty [
|
||||
^ self new
|
||||
text: '';
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMPlainText >> accept: visitor [
|
||||
^ visitor visitPlainText: self
|
||||
]
|
||||
|
||||
{ #category : #converting }
|
||||
PPCMPlainText >> asString [
|
||||
^ text
|
||||
]
|
||||
|
||||
{ #category : #printing }
|
||||
PPCMPlainText >> printOn: aStream [
|
||||
super printOn: aStream.
|
||||
aStream nextPut: $(.
|
||||
aStream nextPut: $'.
|
||||
text isNil ifFalse: [ aStream nextPutAll: text ].
|
||||
aStream nextPut: $'.
|
||||
aStream nextPut: $).
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMPlainText >> text [
|
||||
^ text
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMPlainText >> text: anObject [
|
||||
text := anObject
|
||||
]
|
33
src/PetitMarkdown/PPCMSoftBreak.class.st
Normal file
33
src/PetitMarkdown/PPCMSoftBreak.class.st
Normal file
@ -0,0 +1,33 @@
|
||||
Class {
|
||||
#name : #PPCMSoftBreak,
|
||||
#superclass : #PPCMNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMSoftBreak >> accept: visitor [
|
||||
^ visitor visitSoftBreak: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMSoftBreak >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className ,' ', self text) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
27
src/PetitMarkdown/PPCMStrong.class.st
Normal file
27
src/PetitMarkdown/PPCMStrong.class.st
Normal file
@ -0,0 +1,27 @@
|
||||
Class {
|
||||
#name : #PPCMStrong,
|
||||
#superclass : #PPCMDelegateNode,
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMStrong >> accept: visitor [
|
||||
^ visitor visitStrong: self
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMStrong >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className ,' ',
|
||||
self text) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child className asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
78
src/PetitMarkdown/PPCMText.class.st
Normal file
78
src/PetitMarkdown/PPCMText.class.st
Normal file
@ -0,0 +1,78 @@
|
||||
Class {
|
||||
#name : #PPCMText,
|
||||
#superclass : #PPCMNode,
|
||||
#instVars : [
|
||||
'text'
|
||||
],
|
||||
#category : #'PetitMarkdown-AST'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCMText class >> empty [
|
||||
^ self new
|
||||
text: '';
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #comparing }
|
||||
PPCMText >> = anObject [
|
||||
^ text = anObject
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
PPCMText >> accept: visitor [
|
||||
^ visitor visitText: self
|
||||
]
|
||||
|
||||
{ #category : #converting }
|
||||
PPCMText >> asString [
|
||||
^ text
|
||||
]
|
||||
|
||||
{ #category : #comparing }
|
||||
PPCMText >> hash [
|
||||
^ text hash
|
||||
]
|
||||
|
||||
{ #category : #printing }
|
||||
PPCMText >> printOn: aStream [
|
||||
super printOn: aStream.
|
||||
aStream nextPut: $(.
|
||||
aStream nextPut: $'.
|
||||
text isNil ifFalse: [ aStream nextPutAll: text ].
|
||||
aStream nextPut: $'.
|
||||
aStream nextPut: $).
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMText >> text [
|
||||
^ text ifNil: [ '' ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMText >> text: anObject [
|
||||
text := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PPCMText >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := (self className ,' ', self text) asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child text asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor)
|
||||
].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
743
src/PetitMarkdown/PPCommonMarkBlockParser.class.st
Normal file
743
src/PetitMarkdown/PPCommonMarkBlockParser.class.st
Normal file
@ -0,0 +1,743 @@
|
||||
Class {
|
||||
#name : #PPCommonMarkBlockParser,
|
||||
#superclass : #PPCompositeParser,
|
||||
#instVars : [
|
||||
'space',
|
||||
'lineEnd',
|
||||
'linePrefix',
|
||||
'indentedCode',
|
||||
'fencedCode',
|
||||
'codeFirstFenceIndent',
|
||||
'newline',
|
||||
'codeFenceStart',
|
||||
'infoString',
|
||||
'prefix',
|
||||
'codeFenceStop',
|
||||
'codeFenceIndent',
|
||||
'codeLine',
|
||||
'prefixedEmptyLine',
|
||||
'documentEnd',
|
||||
'codeIndent',
|
||||
'emptyLine',
|
||||
'contentElement',
|
||||
'horizontalRule',
|
||||
'quoteBlock',
|
||||
'code',
|
||||
'list',
|
||||
'htmlBlock',
|
||||
'header',
|
||||
'linkRefDef',
|
||||
'paragraph',
|
||||
'document',
|
||||
'ATXHeader',
|
||||
'setextHeader',
|
||||
'setexLine',
|
||||
'setextHeaderUnderline',
|
||||
'listItem',
|
||||
'htmlTag',
|
||||
'plainLine',
|
||||
'quoteDedent',
|
||||
'quote',
|
||||
'listBegin',
|
||||
'listEmptyItem',
|
||||
'listEnd',
|
||||
'listOrderedMarker',
|
||||
'listBulletMarker',
|
||||
'listMarker',
|
||||
'listDoubleBlanks',
|
||||
'listBullet',
|
||||
'listContent',
|
||||
'listItemEnd',
|
||||
'quoteIndent',
|
||||
'paragraphLine',
|
||||
'lazyParagraphPrefix',
|
||||
'content',
|
||||
'linkLabel',
|
||||
'linkDestination',
|
||||
'linkTitle',
|
||||
'lineStart',
|
||||
'linkQuoteStart',
|
||||
'linkQuoteStop',
|
||||
'htmlBlockLine',
|
||||
'abstractLinkTitle'
|
||||
],
|
||||
#category : #'PetitMarkdown-Parser'
|
||||
}
|
||||
|
||||
{ #category : #headers }
|
||||
PPCommonMarkBlockParser >> ATXHeader [
|
||||
| begin end title |
|
||||
begin := (($# asParser plus) setMax: 6).
|
||||
end := ((space, $# asParser plus) optional trimRight: space), lineEnd.
|
||||
|
||||
title := end negate plus flatten asPPCMPlainLine.
|
||||
|
||||
^ linePrefix, begin, (end not, space, title ==> #last) optional, end
|
||||
|
||||
map: [ :_lp :level :_titleLine :_end |
|
||||
| size titleLine |
|
||||
size := level size.
|
||||
titleLine := _titleLine ifNil: [ PPCMPlainLine empty ].
|
||||
|
||||
PPCMHeader new
|
||||
level: size;
|
||||
title: titleLine;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PPCommonMarkBlockParser >> abstractLinkTitle [
|
||||
^ (space preceeds / lineStart),
|
||||
linkQuoteStart,
|
||||
( (linkQuoteStop / (lineEnd, emptyLine)) not,
|
||||
(('\' asParser, linkQuoteStop) / #any asParser)
|
||||
) plus flatten,
|
||||
linkQuoteStop
|
||||
|
||||
==> [ :e | self decodeEntities: (self escape: (e third)) ]
|
||||
]
|
||||
|
||||
{ #category : #code }
|
||||
PPCommonMarkBlockParser >> code [
|
||||
^ indentedCode / fencedCode
|
||||
]
|
||||
|
||||
{ #category : #code }
|
||||
PPCommonMarkBlockParser >> codeFenceIndent [
|
||||
^ [ :context |
|
||||
context codeFenceIndent parseOn: context
|
||||
] asParser
|
||||
]
|
||||
|
||||
{ #category : #code }
|
||||
PPCommonMarkBlockParser >> codeFenceStart [
|
||||
| tilde eh |
|
||||
tilde := ($~ asParser min: 3) >=> [ :context :cc |
|
||||
| retval |
|
||||
retval := cc value.
|
||||
retval isPetitFailure ifFalse: [
|
||||
context codeFence: ($~ asParser min: retval size).
|
||||
].
|
||||
retval
|
||||
].
|
||||
|
||||
eh := ($` asParser min: 3) >=> [ :context :cc |
|
||||
| retval |
|
||||
retval := cc value.
|
||||
retval isPetitFailure ifFalse: [
|
||||
context codeFence: ($` asParser min: retval size).
|
||||
].
|
||||
retval
|
||||
].
|
||||
|
||||
^ codeFirstFenceIndent, (tilde / eh)
|
||||
|
||||
]
|
||||
|
||||
{ #category : #code }
|
||||
PPCommonMarkBlockParser >> codeFenceStop [
|
||||
^ ([ :context |
|
||||
context codeFence parseOn: context
|
||||
] asParser trimRight: space), lineEnd and
|
||||
]
|
||||
|
||||
{ #category : #code }
|
||||
PPCommonMarkBlockParser >> codeFirstFenceIndent [
|
||||
^ (space max: 3) >=> [ :context :cc |
|
||||
| result |
|
||||
result := cc value.
|
||||
result isPetitFailure ifFalse: [
|
||||
context codeFenceIndent: (space max: result size).
|
||||
].
|
||||
result
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #code }
|
||||
PPCommonMarkBlockParser >> codeIndent [
|
||||
^ ' ' asParser / Character tab asParser
|
||||
]
|
||||
|
||||
{ #category : #code }
|
||||
PPCommonMarkBlockParser >> codeLine [
|
||||
^ newline negate star flatten
|
||||
|
||||
map: [ :_text |
|
||||
| textNode |
|
||||
textNode := PPCMText new
|
||||
text: (self encodeEntities: _text);
|
||||
yourself.
|
||||
|
||||
PPCMLine new
|
||||
addChild: textNode;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #document }
|
||||
PPCommonMarkBlockParser >> content [
|
||||
^ contentElement,
|
||||
((prefix, contentElement) nonEmpty ==> #second) star
|
||||
|
||||
map: [ :first :rest |
|
||||
| |
|
||||
PPCMContainer new
|
||||
addChild: first;
|
||||
addChildren: rest;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #document }
|
||||
PPCommonMarkBlockParser >> contentElement [
|
||||
^
|
||||
horizontalRule /
|
||||
quoteBlock /
|
||||
code /
|
||||
list /
|
||||
htmlBlock /
|
||||
header /
|
||||
linkRefDef /
|
||||
paragraph /
|
||||
((emptyLine, lineEnd) ==> #first)
|
||||
]
|
||||
|
||||
{ #category : #support }
|
||||
PPCommonMarkBlockParser >> decodeEntities: string [
|
||||
^ PPCommonMarkUtils instance decodeEntities: string
|
||||
]
|
||||
|
||||
{ #category : #document }
|
||||
PPCommonMarkBlockParser >> document [
|
||||
^ ((prefix, contentElement) nonEmpty ==> #second) star
|
||||
|
||||
map: [ :elems |
|
||||
PPCMDocument new
|
||||
addChildren: elems;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'lines and whitespace' }
|
||||
PPCommonMarkBlockParser >> documentEnd [
|
||||
^ #eof asParser
|
||||
]
|
||||
|
||||
{ #category : #'lines and whitespace' }
|
||||
PPCommonMarkBlockParser >> emptyLine [
|
||||
^ space star, #endOfLine asParser ==> [ :e |
|
||||
PPCMPlainLine empty
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #support }
|
||||
PPCommonMarkBlockParser >> encodeEntities: string [
|
||||
^ PPCommonMarkUtils instance encodeEntities: string
|
||||
]
|
||||
|
||||
{ #category : #support }
|
||||
PPCommonMarkBlockParser >> escape: string [
|
||||
^ PPCommonMarkUtils instance escape: string
|
||||
]
|
||||
|
||||
{ #category : #support }
|
||||
PPCommonMarkBlockParser >> escapeUrl: string [
|
||||
^ PPCommonMarkUtils instance escapeUrl: string
|
||||
]
|
||||
|
||||
{ #category : #code }
|
||||
PPCommonMarkBlockParser >> fencedCode [
|
||||
^ linePrefix and, codeFenceStart, infoString optional, lineEnd,
|
||||
(
|
||||
((
|
||||
(prefix, linePrefix, codeFenceStop) not, prefix, codeFenceIndent, codeLine, lineEnd) ==> #fourth /
|
||||
(prefixedEmptyLine, lineEnd ==> #first)
|
||||
) nonEmpty
|
||||
) star,
|
||||
((((prefix, linePrefix, codeFenceStop) / documentEnd), lineEnd) / prefix not)
|
||||
|
||||
map: [ :_lp :_fenceStart :_info :_le :_code :_fenceStop |
|
||||
PPCMFencedCode new
|
||||
infoString: _info;
|
||||
addChildren: _code;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #headers }
|
||||
PPCommonMarkBlockParser >> header [
|
||||
^ ATXHeader / setextHeader
|
||||
]
|
||||
|
||||
{ #category : #'horizontal rule' }
|
||||
PPCommonMarkBlockParser >> horizontalRule [
|
||||
| stars minus under |
|
||||
stars := '*' asParser, (('*' asParser trim: space) min: 2).
|
||||
minus := '-' asParser, (('-' asParser trim: space) min: 2).
|
||||
under := '_' asParser, (('_' asParser trim: space) min: 2).
|
||||
|
||||
^ linePrefix, ((stars / minus / under) flatten), space star, lineEnd
|
||||
map: [ :_prefix :_hrule :_space :_le |
|
||||
PPCMHrule new
|
||||
rule: _hrule;
|
||||
yourself
|
||||
]
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'html blocks' }
|
||||
PPCommonMarkBlockParser >> htmlBlock [
|
||||
^ (linePrefix, htmlTag) and, htmlBlockLine, lineEnd,
|
||||
(prefix, (emptyLine not), htmlBlockLine, lineEnd ==> #third) star
|
||||
|
||||
map: [ :_pred :_line :_le :_rest |
|
||||
PPCMHtmlBlock new
|
||||
addChild: _line;
|
||||
addChildren: _rest;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'html blocks' }
|
||||
PPCommonMarkBlockParser >> htmlBlockLine [
|
||||
^ newline negate star flatten
|
||||
|
||||
map: [ :_text |
|
||||
| text |
|
||||
text := PPCMText new
|
||||
text: _text;
|
||||
yourself.
|
||||
|
||||
PPCMLine new
|
||||
addChild: text;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'html blocks' }
|
||||
PPCommonMarkBlockParser >> htmlTag [
|
||||
^ '<table' asParser /
|
||||
'<tr' asParser /
|
||||
'<td' asParser /
|
||||
'<div' asParser /
|
||||
'<DIV' asParser /
|
||||
'<p' asParser /
|
||||
'</table' asParser /
|
||||
'</tr' asParser /
|
||||
'</td' asParser /
|
||||
'</div' asParser /
|
||||
'</DIV' asParser /
|
||||
'</p' asParser /
|
||||
'<!--' asParser /
|
||||
('<?' asParser, #letter asParser plus) /
|
||||
'<![CDATA[' asParser
|
||||
]
|
||||
|
||||
{ #category : #code }
|
||||
PPCommonMarkBlockParser >> indentedCode [
|
||||
^ codeIndent, emptyLine not, codeLine, lineEnd,
|
||||
(
|
||||
((prefix, codeIndent, codeLine, lineEnd) ==> #third) /
|
||||
((prefix, emptyLine, lineEnd) nonEmpty ==> #second)
|
||||
) star
|
||||
|
||||
map: [ :_cp :_pred :_first :_le :_rest |
|
||||
PPCMIndentedCode new
|
||||
addChild: _first;
|
||||
addChildren: _rest;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #code }
|
||||
PPCommonMarkBlockParser >> infoString [
|
||||
^ ((lineEnd / space / codeFenceStop / $` asParser) negate plus trimBlanks flatten),
|
||||
(lineEnd / $` asParser) negate star ==> [:e | self decodeEntities: e first ]
|
||||
]
|
||||
|
||||
{ #category : #paragraphs }
|
||||
PPCommonMarkBlockParser >> lazyParagraphPrefix [
|
||||
^ (prefix, quoteIndent) not,
|
||||
(quote / space) star
|
||||
]
|
||||
|
||||
{ #category : #'lines and whitespace' }
|
||||
PPCommonMarkBlockParser >> lineEnd [
|
||||
^ newline / documentEnd
|
||||
]
|
||||
|
||||
{ #category : #'lines and whitespace' }
|
||||
PPCommonMarkBlockParser >> linePrefix [
|
||||
^ ((PPPossessiveRepeatingParser on: (#blank asParser ))
|
||||
setMax: 3;
|
||||
yourself),
|
||||
(#blank asParser not)
|
||||
==> #first
|
||||
]
|
||||
|
||||
{ #category : #'lines and whitespace' }
|
||||
PPCommonMarkBlockParser >> lineStart [
|
||||
^ #startOfLine asParser
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PPCommonMarkBlockParser >> linkDestination [
|
||||
| parens escapedParen |
|
||||
"TODO: fix?"
|
||||
escapedParen := '\(' asParser / '\)' asParser.
|
||||
|
||||
parens := PPDelegateParser new
|
||||
name: 'parens';
|
||||
yourself.
|
||||
"Parens cannot be nested!"
|
||||
parens setParser: $( asParser, (($( asParser / $) asParser) not, (escapedParen / #any asParser)) star, $) asParser.
|
||||
|
||||
^ (($< asParser, ((($> asParser / newline) not, #any asParser) star) flatten, $> asParser)
|
||||
==> [ :e | self escapeUrl: (self escape: e second) ]) /
|
||||
((space / lineEnd / $) asParser) not, (parens / escapedParen / $( asParser negate)) plus flatten
|
||||
==> [ :e | self escapeUrl: (self escape: e) ]
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PPCommonMarkBlockParser >> linkLabel [
|
||||
| label |
|
||||
label := ($] asParser not, ('\]' asParser / #any asParser)) star flatten.
|
||||
|
||||
^ $[ asParser, label, $] asParser
|
||||
|
||||
map: [ :_start :_label :_end |
|
||||
PPCMText new
|
||||
text: (self escape: _label);
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PPCommonMarkBlockParser >> linkQuoteStart [
|
||||
^ PPFailingParser message: 'abstract quote start'.
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PPCommonMarkBlockParser >> linkQuoteStop [
|
||||
^ PPFailingParser message: 'abstract quote stop'
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PPCommonMarkBlockParser >> linkRefDef [
|
||||
^ (linePrefix, linkLabel, ':' asParser, (lineEnd optional trim: space), linkDestination, ((lineEnd optional trim: space), linkTitle ==> #second) optional, space star, lineEnd
|
||||
|
||||
map: [ :_lp :_label :_semicolon :_ws1 :_dest :_title :_ws3 :_le |
|
||||
PPCMLinkRefDef new
|
||||
label: _label;
|
||||
title: _title;
|
||||
destination: _dest;
|
||||
yourself.
|
||||
])
|
||||
|
||||
>=> [ :context :cc |
|
||||
| retval |
|
||||
retval := cc value.
|
||||
retval isPetitFailure ifFalse: [
|
||||
context registerLink: retval.
|
||||
retval := PPCMLinkRefDefPlaceholder new.
|
||||
].
|
||||
retval
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PPCommonMarkBlockParser >> linkTitle [
|
||||
^
|
||||
((abstractLinkTitle
|
||||
where: linkQuoteStart is: $" asParser)
|
||||
where: linkQuoteStop is: $" asParser) /
|
||||
((abstractLinkTitle
|
||||
where: linkQuoteStart is: $' asParser)
|
||||
where: linkQuoteStop is: $' asParser) /
|
||||
((abstractLinkTitle
|
||||
where: linkQuoteStart is: $( asParser)
|
||||
where: linkQuoteStop is: $) asParser)
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> list [
|
||||
^
|
||||
listBegin,
|
||||
listItem,
|
||||
(
|
||||
(prefix, listItem ==> #second) /
|
||||
"empty item is part of the list only if followed by normal item"
|
||||
(listEmptyItem, (prefix, listItem) and ==> #first)
|
||||
) star,
|
||||
listEnd
|
||||
|
||||
map: [ :_start :_first :_rest :_end |
|
||||
PPCMList new
|
||||
type: _start second;
|
||||
start: _start first;
|
||||
addChild: _first;
|
||||
addChildren: _rest;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> listBegin [
|
||||
^ (linePrefix, (listOrderedMarker / listBulletMarker)) and ==> #second >=> [ :context :cc |
|
||||
| retval |
|
||||
retval := cc value.
|
||||
retval isPetitFailure ifFalse: [
|
||||
context listItemType: (retval third).
|
||||
].
|
||||
retval
|
||||
]
|
||||
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> listBullet [
|
||||
^
|
||||
"push content as spaces on the indent stack"
|
||||
(
|
||||
(linePrefix, listMarker, space, linePrefix optional) flatten and
|
||||
==> [:e | self spaces: (e size)]
|
||||
/
|
||||
(linePrefix, listMarker, lineEnd) flatten and
|
||||
==> [:e | self spaces: (e size)]
|
||||
) pushAsParser,
|
||||
"Consume marker and one space"
|
||||
(linePrefix, listMarker, (space / lineEnd and))
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> listBulletMarker [
|
||||
^
|
||||
($- asParser /
|
||||
$* asParser /
|
||||
$+ asParser)
|
||||
|
||||
"Start . type . parser to accept the same type"
|
||||
==> [ :e | { nil . #unordered . e asParser } ]
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> listContent [
|
||||
^
|
||||
contentElement,
|
||||
(
|
||||
((prefix, contentElement) nonEmpty ==> #second) /
|
||||
"Empty line of the list content is part of the content only if followed by non-empty line"
|
||||
((prefixedEmptyLine, lineEnd, (prefix, contentElement) and) nonEmpty
|
||||
==> #first)
|
||||
) star
|
||||
|
||||
map: [ :_first :_rest |
|
||||
| |
|
||||
PPCMContainer new
|
||||
addChild: _first;
|
||||
addChildren: _rest;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> listDoubleBlanks [
|
||||
^
|
||||
(prefixedEmptyLine, lineEnd) nonEmpty,
|
||||
(prefixedEmptyLine, lineEnd) nonEmpty
|
||||
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> listEmptyItem [
|
||||
^ (listDoubleBlanks not, prefixedEmptyLine, lineEnd) nonEmpty ==> #second
|
||||
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> listEnd [
|
||||
^ [ :context |
|
||||
context listItemStack pop
|
||||
] asParser
|
||||
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> listItem [
|
||||
^ horizontalRule not, listBullet, listContent, listItemEnd
|
||||
|
||||
map: [ :_pred :_bullet :_content :_end |
|
||||
PPCMListItem new
|
||||
child: _content;
|
||||
yourself
|
||||
]
|
||||
|
||||
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> listItemEnd [
|
||||
^ [ :context | context indentStack pop ] asParser
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> listMarker [
|
||||
^ [ :context | context listItemType parseOn: context ] asParser
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> listOrderedMarker [
|
||||
| dot bracket |
|
||||
dot := #digit asParser plus flatten, $. asParser.
|
||||
bracket := #digit asParser plus flatten, $) asParser.
|
||||
|
||||
"Start . type . parser to accept the same type"
|
||||
^ (dot ==> [ :e | { e first asNumber . #ordered . dot } ]) /
|
||||
(bracket ==> [ :e | { e first asNumber . #ordered . bracket } ])
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'lines and whitespace' }
|
||||
PPCommonMarkBlockParser >> newline [
|
||||
^ #newline asParser
|
||||
]
|
||||
|
||||
{ #category : #paragraphs }
|
||||
PPCommonMarkBlockParser >> paragraph [
|
||||
^ linePrefix, (emptyLine) not, paragraphLine trimBlanks, lineEnd,
|
||||
(
|
||||
(prefix / lazyParagraphPrefix),
|
||||
(emptyLine / ATXHeader / horizontalRule / fencedCode / htmlBlock / list / quote) not,
|
||||
paragraphLine trimBlanks,
|
||||
lineEnd ==> #third
|
||||
) nonEmpty star
|
||||
map: [ :_lp :_pred :_line :_end :_rest |
|
||||
| para |
|
||||
para := PPCMParagraph new.
|
||||
para addChild: _line.
|
||||
_rest do: [ :anotherLine | para addChild: anotherLine ].
|
||||
para
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #paragraphs }
|
||||
PPCommonMarkBlockParser >> paragraphLine [
|
||||
^ plainLine
|
||||
]
|
||||
|
||||
{ #category : #'lines and whitespace' }
|
||||
PPCommonMarkBlockParser >> plainLine [
|
||||
^ newline negate star flatten
|
||||
|
||||
map: [ :_text |
|
||||
PPCMPlainLine new
|
||||
text: _text;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'lines and whitespace' }
|
||||
PPCommonMarkBlockParser >> prefix [
|
||||
^ #prefix asParser
|
||||
]
|
||||
|
||||
{ #category : #'lines and whitespace' }
|
||||
PPCommonMarkBlockParser >> prefixedEmptyLine [
|
||||
"empty line with appropriate number of quotes, but with arbitrary whitespaces"
|
||||
^ (quoteDedent not, (quote / space) star, #endOfLine asParser) ==> [ :e | PPCMPlainLine empty ]
|
||||
]
|
||||
|
||||
{ #category : #quotes }
|
||||
PPCommonMarkBlockParser >> quote [
|
||||
^ (linePrefix, $> asParser, space optional) flatten
|
||||
]
|
||||
|
||||
{ #category : #quotes }
|
||||
PPCommonMarkBlockParser >> quoteBlock [
|
||||
^ quoteIndent,
|
||||
content,
|
||||
quoteDedent
|
||||
|
||||
map: [ :indent :_content :dedent |
|
||||
PPCMBlockQuote new
|
||||
child: _content;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #quotes }
|
||||
PPCommonMarkBlockParser >> quoteDedent [
|
||||
^ (prefix not, quote pop) flatten
|
||||
]
|
||||
|
||||
{ #category : #quotes }
|
||||
PPCommonMarkBlockParser >> quoteIndent [
|
||||
^ (quote ==> [ :e | quote ]) pushAsParser
|
||||
]
|
||||
|
||||
{ #category : #headers }
|
||||
PPCommonMarkBlockParser >> setexLine [
|
||||
^ plainLine
|
||||
]
|
||||
|
||||
{ #category : #headers }
|
||||
PPCommonMarkBlockParser >> setextHeader [
|
||||
^ linePrefix, emptyLine not, setexLine, lineEnd, setextHeaderUnderline
|
||||
|
||||
map: [ :_prefix :_predicates :_text :_nl :_underline |
|
||||
PPCMHeader new
|
||||
title: _text;
|
||||
level: _underline;
|
||||
yourself
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #headers }
|
||||
PPCommonMarkBlockParser >> setextHeaderUnderline [
|
||||
| equal minus |
|
||||
equal := '=' asParser plus ==> [:t | 1].
|
||||
minus := '-' asParser plus ==> [:t | 2].
|
||||
|
||||
^ prefix, listItem not, linePrefix, ((equal / minus) trimRight: space), lineEnd ==> #fourth
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'lines and whitespace' }
|
||||
PPCommonMarkBlockParser >> space [
|
||||
^ Character space asParser
|
||||
]
|
||||
|
||||
{ #category : #lists }
|
||||
PPCommonMarkBlockParser >> spaces: length [
|
||||
| retval |
|
||||
retval := ''.
|
||||
length timesRepeat: [
|
||||
retval := retval, ' '.
|
||||
].
|
||||
^ retval
|
||||
]
|
||||
|
||||
{ #category : #document }
|
||||
PPCommonMarkBlockParser >> start [
|
||||
^ document >=> [ :context :cc |
|
||||
| retval |
|
||||
retval := cc value.
|
||||
retval isPetitFailure ifFalse: [
|
||||
context links do: [ :link |
|
||||
retval addChildFirst: link.
|
||||
]
|
||||
].
|
||||
retval
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #initialization }
|
||||
PPCommonMarkBlockParser >> utils [
|
||||
^ PPCommonMarkUtils instance
|
||||
]
|
||||
|
||||
{ #category : #'lines and whitespace' }
|
||||
PPCommonMarkBlockParser >> whitespace [
|
||||
^ #space asParser
|
||||
]
|
732
src/PetitMarkdown/PPCommonMarkBlockTest.class.st
Normal file
732
src/PetitMarkdown/PPCommonMarkBlockTest.class.st
Normal file
@ -0,0 +1,732 @@
|
||||
Class {
|
||||
#name : #PPCommonMarkBlockTest,
|
||||
#superclass : #PPCompositeParserTest,
|
||||
#instVars : [
|
||||
'context',
|
||||
'quote',
|
||||
'string',
|
||||
'expected'
|
||||
],
|
||||
#category : #'PetitMarkdown-Tests'
|
||||
}
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCommonMarkBlockTest >> assert: something type: type [
|
||||
self assert: (something isKindOf: type).
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCommonMarkBlockTest >> assertResult: expectedResult [
|
||||
self assert: expectedResult = result.
|
||||
|
||||
"
|
||||
(TextDiffBuilder from: result to: expectedResult) buildDisplayPatch.
|
||||
"
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCommonMarkBlockTest >> context [
|
||||
^ context
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCommonMarkBlockTest >> parse: input rule: rule to: expectedResult [
|
||||
self parse: input rule: rule.
|
||||
self assert: expectedResult = result.
|
||||
|
||||
"
|
||||
(TextDiffBuilder from: result to: expectedResult) buildDisplayPatch.
|
||||
"
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCommonMarkBlockTest >> parserClass [
|
||||
^ PPCommonMarkBlockParser
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCommonMarkBlockTest >> setUp [
|
||||
context := PPContext new.
|
||||
|
||||
quote := self parserInstanceFor: #quote
|
||||
]
|
||||
|
||||
{ #category : #'test-headers' }
|
||||
PPCommonMarkBlockTest >> testATXHeader [
|
||||
self parse: '# foo' rule: #ATXHeader.
|
||||
self assert: result title text = 'foo'.
|
||||
|
||||
self parse: '# foo#' rule: #ATXHeader.
|
||||
self assert: result title text = 'foo#'.
|
||||
|
||||
self parse: '# foo #' rule: #ATXHeader.
|
||||
self assert: result title text = 'foo'.
|
||||
]
|
||||
|
||||
{ #category : #'test-headers' }
|
||||
PPCommonMarkBlockTest >> testATXHeader2 [
|
||||
self parse: '#' rule: #ATXHeader.
|
||||
self assert: result title text = ''.
|
||||
|
||||
self parse: '# ' rule: #ATXHeader.
|
||||
self assert: result title text = ''.
|
||||
|
||||
self parse: '# #' rule: #ATXHeader.
|
||||
self assert: result title text = ''.
|
||||
|
||||
self parse: '### ###' rule: #ATXHeader.
|
||||
self assert: result title text = ''.
|
||||
]
|
||||
|
||||
{ #category : #'test-code' }
|
||||
PPCommonMarkBlockTest >> testFencedCode [
|
||||
self parse: '```
|
||||
abc
|
||||
|
||||
def
|
||||
```' rule: #code.
|
||||
self assert: result type: PPCMFencedCode.
|
||||
self assert: result code = 'abc
|
||||
|
||||
def'.
|
||||
]
|
||||
|
||||
{ #category : #'test-code' }
|
||||
PPCommonMarkBlockTest >> testFencedCode2 [
|
||||
context := PPContext new.
|
||||
context indentStack push: ' ' asParser.
|
||||
self parse: '```
|
||||
abc
|
||||
|
||||
def
|
||||
```' rule: #code.
|
||||
|
||||
self assert: result type: PPCMFencedCode.
|
||||
self assert: result code = 'abc
|
||||
|
||||
def'.
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCommonMarkBlockTest >> testHorizontalRule [
|
||||
self parse: '***' rule: #horizontalRule.
|
||||
|
||||
self parse: ' - - -' rule: #horizontalRule.
|
||||
]
|
||||
|
||||
{ #category : #'test-html blocks' }
|
||||
PPCommonMarkBlockTest >> testHtmlBlock [
|
||||
self parse: '<table>
|
||||
</table>' rule: #htmlBlock.
|
||||
|
||||
self assert: result type: PPCMHtmlBlock.
|
||||
]
|
||||
|
||||
{ #category : #'test-code' }
|
||||
PPCommonMarkBlockTest >> testIndentedCode [
|
||||
self parse: ' abc' rule: #code.
|
||||
self assert: result type: PPCMIndentedCode.
|
||||
self assert: result code = 'abc'.
|
||||
|
||||
self parse: ' abc
|
||||
def' rule: #code.
|
||||
self assert: result code = 'abc
|
||||
def'.
|
||||
|
||||
self parse: ' this is a
|
||||
code' rule: #code.
|
||||
self assert: result code = 'this is a
|
||||
code'.
|
||||
|
||||
self parse: ' this is
|
||||
|
||||
a code' rule: #code.
|
||||
self assert: result code = ' this is
|
||||
|
||||
a code'.
|
||||
|
||||
self parse: ' this is
|
||||
|
||||
a code
|
||||
' rule: #code.
|
||||
self assert: result code = ' this is
|
||||
|
||||
a code'.
|
||||
|
||||
|
||||
|
||||
|
||||
self parse: ' chunk1
|
||||
|
||||
chunk2
|
||||
|
||||
|
||||
|
||||
chunk3' rule: #code.
|
||||
|
||||
self assert: result code = 'chunk1
|
||||
|
||||
chunk2
|
||||
|
||||
|
||||
|
||||
chunk3'.
|
||||
|
||||
self parse: ' chunk1
|
||||
|
||||
chunk2' rule: #code.
|
||||
self assert: result code = 'chunk1
|
||||
|
||||
chunk2'.
|
||||
]
|
||||
|
||||
{ #category : #'test-paragraph' }
|
||||
PPCommonMarkBlockTest >> testLazyParagraphPrefix [
|
||||
self parse: '' rule: #lazyParagraphPrefix.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
self parse: '> ' rule: #lazyParagraphPrefix.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
self fail: '> >' rule: #lazyParagraphPrefix.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
context indentStack push: quote.
|
||||
self parse: ' > >' rule: #lazyParagraphPrefix.
|
||||
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'test-links' }
|
||||
PPCommonMarkBlockTest >> testLinkRef [
|
||||
self parse: '[foo]' rule: #paragraph.
|
||||
self assert: result type: PPCMParagraph.
|
||||
self assert: result text = '[foo]'.
|
||||
]
|
||||
|
||||
{ #category : #'test-links' }
|
||||
PPCommonMarkBlockTest >> testLinkRefDef [
|
||||
self parse: '[foo]: /url "title"' rule: #linkRefDef.
|
||||
self assert: result type: PPCMLinkRefDefPlaceholder.
|
||||
self assert: context links size = 1.
|
||||
self assert: context links anyOne type: PPCMLinkRefDef.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testList [
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one' rule: #list.
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result children size = 1.
|
||||
self assert: result child text = 'one'.
|
||||
|
||||
self assert: context indentStack isEmpty.
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one
|
||||
- two' rule: #list.
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result children size = 2.
|
||||
self assert: result firstChild text = 'one'.
|
||||
self assert: result secondChild text = 'two'.
|
||||
self assert: context indentStack isEmpty.
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one
|
||||
|
||||
- two' rule: #list.
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result children size = 3.
|
||||
self assert: result firstChild text trim = 'one'.
|
||||
self assert: result thirdChild text = 'two'.
|
||||
self assert: context indentStack isEmpty.
|
||||
self assert: context indentStack isEmpty.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
self parse: '- one
|
||||
>- two' rule: #list.
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result children size = 2.
|
||||
self assert: result firstChild text = 'one'.
|
||||
self assert: result secondChild text = 'two'.
|
||||
self assert: context indentStack size = 1.
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one
|
||||
- ' rule: #list.
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result children size = 2.
|
||||
self assert: result firstChild text = 'one'.
|
||||
self assert: result secondChild text = ''.
|
||||
self assert: context indentStack isEmpty.
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testList2 [
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '1. one' rule: #list.
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result children size = 1.
|
||||
self assert: result child text = 'one'.
|
||||
|
||||
self assert: context indentStack isEmpty.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListBullet [
|
||||
context := PPContext new.
|
||||
context listItemType: $- asParser.
|
||||
self parse: '- ' rule: #listBullet.
|
||||
self assert: context indentStack size = 1.
|
||||
self assert: context indentStack top literal = ' '.
|
||||
|
||||
|
||||
context := PPContext new.
|
||||
context listItemType: $- asParser.
|
||||
self parse: ' - ' rule: #listBullet.
|
||||
|
||||
self assert: context indentStack size = 1.
|
||||
self assert: context indentStack top literal = ' '.
|
||||
|
||||
context := PPContext new.
|
||||
context listItemType: $- asParser.
|
||||
parser := self parserInstanceFor: #listBullet.
|
||||
self assert: parser parse: ' - ' end: 3.
|
||||
|
||||
self assert: context indentStack size = 1.
|
||||
self assert: context indentStack top literal = ' '.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListBullet2 [
|
||||
context := PPContext new.
|
||||
context listItemType: $* asParser.
|
||||
self fail: '- ' rule: #listBullet.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListContent [
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
context indentStack push: ' ' asParser.
|
||||
|
||||
self parse: 'one
|
||||
> two' rule: #listContent.
|
||||
|
||||
self assert: result text = 'one
|
||||
two'.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
context indentStack push: ' ' asParser.
|
||||
|
||||
self parse: 'one
|
||||
> two' rule: #listContent.
|
||||
|
||||
self assert: result text = 'one
|
||||
two'.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
context indentStack push: ' ' asParser.
|
||||
|
||||
self parse: '> one
|
||||
> > two' rule: #listContent.
|
||||
|
||||
self assert: result firstChild type: PPCMBlockQuote.
|
||||
self assert: result firstChild text = 'one
|
||||
two'.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListItem [
|
||||
|
||||
context := PPContext new.
|
||||
context listItemType: $- asParser.
|
||||
self parse: '- one' rule: #listItem.
|
||||
|
||||
self assert: result type: PPCMListItem.
|
||||
self assert: result text = 'one'.
|
||||
self assert: context indentStack size = 0.
|
||||
|
||||
context := PPContext new.
|
||||
context listItemType: $- asParser.
|
||||
context indentStack push: quote.
|
||||
self parse: '- > one
|
||||
> > two' rule: #listItem.
|
||||
self assert: result type: PPCMListItem.
|
||||
self assert: result child child type: PPCMBlockQuote.
|
||||
self assert: result child child text = 'one
|
||||
two'.
|
||||
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
context listItemType: $- asParser.
|
||||
self parse: '- > one
|
||||
>
|
||||
> > two' rule: #listItem.
|
||||
self assert: result type: PPCMListItem.
|
||||
self assert: result child children size = 3.
|
||||
self assert: result child children first type: PPCMBlockQuote.
|
||||
self assert: result child children third type: PPCMIndentedCode.
|
||||
|
||||
context := PPContext new.
|
||||
context listItemType: $- asParser.
|
||||
|
||||
self parse: '- ' rule: #listItem.
|
||||
self assert: result type: PPCMListItem.
|
||||
self assert: result text = ''.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListItemCode [
|
||||
context := PPContext new.
|
||||
context listItemType: $- asParser.
|
||||
self parse: '- one' rule: #listItem.
|
||||
self assert: result child child type: PPCMIndentedCode.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListItemEmpty [
|
||||
|
||||
context := PPContext new.
|
||||
context listItemType: $- asParser.
|
||||
self parse: '- ' rule: #listItem.
|
||||
|
||||
self assert: result type: PPCMListItem.
|
||||
self assert: result text = ''.
|
||||
self assert: context indentStack size = 0.
|
||||
|
||||
context := PPContext new.
|
||||
context listItemType: $- asParser.
|
||||
self parse: '-' rule: #listItem.
|
||||
|
||||
self assert: result type: PPCMListItem.
|
||||
self assert: result text = ''.
|
||||
self assert: context indentStack size = 0.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListNested01 [
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one
|
||||
- two' rule: #list.
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result children size = 1.
|
||||
self assert: result child child firstChild text = 'one'.
|
||||
self assert: result child child secondChild type: PPCMList.
|
||||
self assert: result child child secondChild child text = 'two'.
|
||||
|
||||
self assert: context indentStack isEmpty.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListNested02 [
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one
|
||||
- two
|
||||
- three' rule: #list.
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result children size = 1.
|
||||
self assert: result child child firstChild text = 'one'.
|
||||
self assert: result child child secondChild type: PPCMList.
|
||||
self assert: result child child secondChild child child firstChild text = 'two'.
|
||||
self assert: result child child secondChild child child secondChild type: PPCMList.
|
||||
self assert: result child child secondChild child child secondChild text = 'three'.
|
||||
|
||||
self assert: context indentStack isEmpty.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListNested03 [
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one
|
||||
- two
|
||||
- three
|
||||
- four' rule: #list.
|
||||
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result children size = 1.
|
||||
self assert: result child child firstChild text = 'one'.
|
||||
self assert: result child child secondChild type: PPCMList.
|
||||
self assert: result child child secondChild firstChild child firstChild text = 'two'.
|
||||
self assert: result child child secondChild firstChild child secondChild type: PPCMList.
|
||||
self assert: result child child secondChild firstChild child secondChild text = 'three'.
|
||||
self assert: result child child secondChild secondChild child firstChild text = 'four'.
|
||||
|
||||
self assert: context indentStack isEmpty.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListNested04 [
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one
|
||||
- two
|
||||
- three
|
||||
- four
|
||||
|
||||
five' rule: #list.
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result children size = 1.
|
||||
self assert: result child child firstChild text = 'one'.
|
||||
self assert: result child child secondChild type: PPCMList.
|
||||
self assert: result child child secondChild firstChild child firstChild text = 'two'.
|
||||
self assert: result child child secondChild firstChild child secondChild type: PPCMList.
|
||||
self assert: result child child secondChild firstChild child secondChild text = 'three'.
|
||||
self assert: result child child secondChild secondChild child firstChild text = 'four'.
|
||||
self assert: result child child secondChild secondChild child thirdChild text = 'five'.
|
||||
|
||||
self assert: context indentStack isEmpty.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListTight [
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one
|
||||
- two' rule: #list.
|
||||
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result isTight.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListTight2 [
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one
|
||||
|
||||
- two' rule: #list.
|
||||
|
||||
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result children size = 3.
|
||||
self assert: result isTight not.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListTight3 [
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one
|
||||
two' rule: #list.
|
||||
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result isTight.
|
||||
]
|
||||
|
||||
{ #category : #'test-lists' }
|
||||
PPCommonMarkBlockTest >> testListTight4 [
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '- one
|
||||
|
||||
two' rule: #list.
|
||||
|
||||
self assert: result type: PPCMList.
|
||||
self assert: result isTight.
|
||||
]
|
||||
|
||||
{ #category : #'test-paragraph' }
|
||||
PPCommonMarkBlockTest >> testParagraph [
|
||||
self parse: 'abc
|
||||
def' rule: #paragraph.
|
||||
self assert: result text = 'abc
|
||||
def'.
|
||||
|
||||
self parse: 'abc
|
||||
def' rule: #paragraph.
|
||||
self assert: result text = 'abc
|
||||
def'.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
self parse: ' abc
|
||||
def' rule: #paragraph.
|
||||
self assert: result text = 'abc
|
||||
def'.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
self parse: 'abc
|
||||
> def' rule: #paragraph.
|
||||
self assert: result text = 'abc
|
||||
def'.
|
||||
]
|
||||
|
||||
{ #category : #'test-paragraph' }
|
||||
PPCommonMarkBlockTest >> testParagraph2 [
|
||||
self parse: 'foo
|
||||
# bar' rule: #paragraph.
|
||||
self assert: result text = 'foo
|
||||
# bar'.
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PPCommonMarkBlockTest >> testPrefix [
|
||||
self parse: '' rule: #prefix.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
self parse: '> ' rule: #prefix
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'test-quotes' }
|
||||
PPCommonMarkBlockTest >> testQuote [
|
||||
self parse: '>' rule: #quote.
|
||||
self assertResult: '>'.
|
||||
|
||||
self parse: '> ' rule: #quote.
|
||||
self assertResult: '> '.
|
||||
|
||||
self fail: ('>', String cr) rule: #quote.
|
||||
]
|
||||
|
||||
{ #category : #'test-quotes' }
|
||||
PPCommonMarkBlockTest >> testQuoteBlock [
|
||||
self parse: '> foo' rule: #quoteBlock.
|
||||
self assert: result type: PPCMBlockQuote.
|
||||
self assert: result children size = 1.
|
||||
self assert: result child text = 'foo'.
|
||||
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '> foo
|
||||
> bar' rule: #quoteBlock.
|
||||
self assert: result type: PPCMBlockQuote.
|
||||
self assert: result children size = 1.
|
||||
self assert: result child text = 'foo
|
||||
bar'.
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '>> foo
|
||||
>> bar' rule: #quoteBlock.
|
||||
self assert: result type: PPCMBlockQuote.
|
||||
self assert: result child child type: PPCMBlockQuote.
|
||||
self assert: result child child text = 'foo
|
||||
bar'.
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '># Foo' rule: #quoteBlock.
|
||||
self assert: result type: PPCMBlockQuote.
|
||||
self assert: result child child type: PPCMHeader.
|
||||
self assert: result child child text = 'Foo'.
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '> foo
|
||||
>
|
||||
> bar' rule: #quoteBlock.
|
||||
self assert: result type: PPCMBlockQuote.
|
||||
self assert: result child child type: PPCMIndentedCode.
|
||||
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '>' rule: #quoteBlock.
|
||||
self assert: result type: PPCMBlockQuote.
|
||||
|
||||
|
||||
context := PPContext new.
|
||||
self parse: '>
|
||||
>
|
||||
> ' rule: #quoteBlock.
|
||||
self assert: result type: PPCMBlockQuote.
|
||||
]
|
||||
|
||||
{ #category : #'test-quotes' }
|
||||
PPCommonMarkBlockTest >> testQuoteDedent [
|
||||
parser := self parserInstanceFor: #quoteDedent.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
self assert: parser parse: '' end: 0.
|
||||
self assert: context indentStack size = 0.
|
||||
self assert: parser fail: '' end: 0.
|
||||
|
||||
context := PPContext new.
|
||||
self assert: parser fail: ''.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
self assert: parser fail: '>' end: 0.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
context indentStack push: quote.
|
||||
self assert: parser parse: ' > ' end: 0.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
context indentStack push: ' ' asParser.
|
||||
self assert: parser fail: ' > ' end: 0.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
context indentStack push: quote.
|
||||
context indentStack push: quote.
|
||||
self assert: parser parse: ' > > ' end: 0.
|
||||
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
context indentStack push: quote.
|
||||
self assert: parser parse: '' end: 0.
|
||||
self assert: parser parse: '' end: 0.
|
||||
self assert: parser fail: '' end: 0.
|
||||
]
|
||||
|
||||
{ #category : #'test-quotes' }
|
||||
PPCommonMarkBlockTest >> testQuoteIndent [
|
||||
parser := self parserInstanceFor: #quoteIndent.
|
||||
|
||||
context := PPContext new.
|
||||
self assert: parser parse: '>' end: 1.
|
||||
self assert: context indentStack size = 1.
|
||||
self assert: context indentStack top = quote.
|
||||
|
||||
context := PPContext new.
|
||||
self assert: parser parse: ' > ' end: 5.
|
||||
|
||||
context := PPContext new.
|
||||
self assert: parser parse: ' >' end: 3.
|
||||
|
||||
context := PPContext new.
|
||||
self assert: parser parse: ' >' end: 2.
|
||||
|
||||
context := PPContext new.
|
||||
self assert: parser fail: ' >'.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
self assert: parser parse: '>' end: 1.
|
||||
self assert: context indentStack size = 2.
|
||||
self assert: context indentStack top = quote.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
self assert: parser parse: '> > ' end: 2.
|
||||
|
||||
context := PPContext new.
|
||||
context indentStack push: quote.
|
||||
self assert: parser parse: ' > > ' end: 3.
|
||||
]
|
||||
|
||||
{ #category : #'test-headers' }
|
||||
PPCommonMarkBlockTest >> testSetextHeader [
|
||||
self parse: 'Foo
|
||||
---' rule: #setextHeader.
|
||||
self assert: result title text = 'Foo'.
|
||||
]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user