Compare commits

..

No commits in common. "master" and "gt-crashed" have entirely different histories.

115 changed files with 430 additions and 14686 deletions

View File

@ -1,22 +1,3 @@
# MiniDocs
MiniDocs is a project that includes several minimalistic documentation tools used by the [Grafoscopio](https://mutabit.com/grafoscopio/en.html) community, starting with [Markdeep](https://casual-effects.com/markdeep/) and its integrations with [Lepiter](https://lepiter.io/feenk/introducing-lepiter--knowledge-management--e2p6apqsz5npq7m4xte0kkywn/) .
# Installation
To install it, *first* install [ExoRepo](https://code.tupale.co/Offray/ExoRepo) and then run from a playground:
```
ExoRepo new repository: 'https://code.sustrato.red/Offray/MiniDocs'; load.
```
# Usage
Once you have installed MiniDocs, each Lepiter note will provide an export button (1), as showcased here:
![Exporting to Markdeep with MiniDocs.](https://i.imgur.com/bTZUG0Z.png)
If you click on it, you will get the right panel in the previous screenshot, showcasing the exported document.
And if you click on the "Open in OS" button (2), you will see the document in your web browser, like this:
![Exported Lepiter note opened in the web browser](https://i.imgur.com/6fxkqZi.png)
MiniDocs is a project that includes several minimalistic documentation tools used by the [Grafoscopio](https://mutabit.com/grafoscopio/en.html) community, starting with [Markdeep](https://casual-effects.com/markdeep/) and its integrations with [Lepiter](https://lepiter.io/feenk/introducing-lepiter--knowledge-management--e2p6apqsz5npq7m4xte0kkywn/).

View File

@ -11,86 +11,27 @@ BaselineOfMiniDocs >> baseline: spec [
for: #common
do: [
"Dependencies"
self setUpTeapot: spec.
self setUpPetitParser: spec.
self setUpLepiterBuildingBlocs: spec. "working in v1.0.993"
spec
baseline: 'Mustache' with: [ spec repository: 'github://noha/mustache' ];
baseline: 'Temple' with: [ spec repository: 'github://astares/Pharo-Temple/src' ];
baseline: 'Tealight' with: [ spec repository: 'github://astares/Tealight:main/src' ];
baseline: 'DataFrame' with: [ spec repository: 'github://PolyMathOrg/DataFrame/src' ].
"self fossil: spec."
self xmlParserHTML: spec.
baseline: 'Mustache' with: [ spec repository: 'github://noha/mustache'].
"self xmlParserHTML: spec."
"Packages"
spec
package: 'PetitMarkdown' with: [ spec requires: #('PetitParser')];
package: 'MiniDocs'
with: [ spec requires: #(
'Mustache' 'Temple' "Templating"
'Teapot' 'Tealight' "Web server"
'PetitMarkdown' 'PetitParser' "Parsers"
'DataFrame' "Tabular data"
'LepiterBuildingBlocs' "Lepiter utilities")].
.
"Groups"
package: 'MiniDocs'
with: [ spec requires: #('Mustache' "'XMLParserHTML'") ]
].
spec
]
{ #category : #accessing }
BaselineOfMiniDocs >> fossil: spec [
| repo |
repo := ExoRepo new
repository: 'https://code.sustrato.red/Offray/Fossil'.
repo load.
spec baseline: 'Fossil' with: [ spec repository: 'gitlocal://', repo local fullName ]
]
{ #category : #accessing }
BaselineOfMiniDocs >> semanticVersion [
^ '0.2.0'
]
{ #category : #accessing }
BaselineOfMiniDocs >> setUpLepiterBuildingBlocs: spec [
spec
baseline: 'LepiterBuildingBlocs'
with: [spec repository: 'github://botwhytho/LepiterBuildingBlocs:main/src']
]
{ #category : #accessing }
BaselineOfMiniDocs >> setUpPetitParser: spec [
spec
baseline: 'PetitParser'
with: [ spec
repository: 'github://moosetechnology/PetitParser:v3.x.x/src';
loads: #('Minimal' 'Core' 'Tests' 'Islands')];
import: 'PetitParser'
]
{ #category : #accessing }
BaselineOfMiniDocs >> setUpTeapot: spec [
spec
baseline: 'Teapot'
with: [ spec
repository: 'github://zeroflag/Teapot/source';
loads: #('ALL') ];
import: 'Teapot'
]
{ #category : #accessing }
BaselineOfMiniDocs >> xmlParserHTML: spec [
spec
baseline: 'XMLParserHTML'
with: [ spec
repository: 'github://ruidajo/XML-XMLParserHTML/src';
loads: #('ALL')];
import: 'XMLParserHTML'
Metacello new
baseline: 'XMLParserHTML';
repository: 'github://pharo-contributions/XML-XMLParserHTML/src';
onConflict: [ :ex | ex useLoaded ];
onUpgrade: [ :ex | ex useLoaded ];
onDowngrade: [ :ex | ex useLoaded ];
onWarningLog;
load.
spec baseline: 'XMLParserHTML' with: [spec repository: 'github://pharo-contributions/XML-XMLParserHTML/src']
]

View File

@ -1,18 +0,0 @@
"
I model a possible bridge between TaskWarrior and MiniDocs. (starting DRAFT).
"
Class {
#name : #AcroReport,
#superclass : #Object,
#category : #MiniDocs
}
{ #category : #accessing }
AcroReport class >> project: projectName [
| jsonReport |
jsonReport := (GtSubprocessWithInMemoryOutput new
shellCommand: 'task project:', projectName , ' export';
runAndWait;
stdout).
^ STONJSON fromString: jsonReport
]

View File

@ -1,57 +0,0 @@
Class {
#name : #AlphanumCounter,
#superclass : #Object,
#instVars : [
'letters',
'digits',
'currentLetter',
'currentDigit'
],
#category : #MiniDocs
}
{ #category : #accessing }
AlphanumCounter >> current [
^ self currentLetter asString, self currentDigit asString
]
{ #category : #accessing }
AlphanumCounter >> currentDigit [
^ currentDigit ifNil: [ currentDigit := self digits first ]
]
{ #category : #accessing }
AlphanumCounter >> currentLetter [
^ currentLetter ifNil: [ currentLetter := self letters first ]
]
{ #category : #accessing }
AlphanumCounter >> currentLetterIndex [
^ self letters detectIndex: [:n | n = self currentLetter]
]
{ #category : #accessing }
AlphanumCounter >> digits [
^ digits ifNil: [ digits := 1 to: 9 ]
]
{ #category : #accessing }
AlphanumCounter >> digits: aNumbersArray [
digits := aNumbersArray
]
{ #category : #accessing }
AlphanumCounter >> increase [
(self currentDigit < self digits last)
ifTrue: [ currentDigit := currentDigit + 1 ]
ifFalse: [
currentLetter := self letters at: (self currentLetterIndex + 1).
currentDigit := self digits first
]
]
{ #category : #accessing }
AlphanumCounter >> letters [
^ letters ifNil: [ letters := $A to: $Z ]
]

View File

@ -1,45 +0,0 @@
Extension { #name : #Array }
{ #category : #'*MiniDocs' }
Array >> bagOfWordsFor: sentenceArray [
"An utility machine training little algorithm.
Inspired by https://youtu.be/8qwowmiXANQ?t=1144.
This should be moved probably to [Polyglot](https://github.com/pharo-ai/Polyglot),
but the repository is pretty innactive (with commits 2 or more years old and no reponse to issues).
Meanwhile, it will be in MiniDocs.
Given the sentence := #('hello' 'how' 'are' 'you')
and the testVocabulary := #('hi' 'hello' 'I' 'you' 'bye' 'thank' 'you')
then
testVocabulary bagOfWordsFor: sentence.
Should give: #(0 1 0 1 0 0 0)
"
| bagOfWords |
bagOfWords := Array new: self size.
bagOfWords doWithIndex: [:each :i | bagOfWords at: i put: 0 ].
sentenceArray do: [:token | |index|
index := self indexOf: token.
index > 0
ifTrue: [bagOfWords at: index put: 1]
].
^ bagOfWords
]
{ #category : #'*MiniDocs' }
Array >> replaceWithUniqueNilsAndBooleans [
| response |
(self includesAny: #(true false nil))
ifFalse: [ response := self ]
ifTrue: [ | newItem |
response := OrderedCollection new.
self do: [:item |
(item isBoolean or: [ item isNil ])
ifTrue: [ newItem := item asString, '-', (NanoID generate copyFrom: 1 to: 3) ]
ifFalse: [ newItem := item ].
response add: newItem.
].
].
^ response
]

View File

@ -1,23 +0,0 @@
Extension { #name : #BrAsyncFileWidget }
{ #category : #'*MiniDocs' }
BrAsyncFileWidget >> url: aUrl [
| realUrl imageUrl |
realUrl := aUrl asZnUrl.
realUrl scheme = #file ifTrue: [
^ self file: realUrl asFileReference ].
imageUrl := realUrl.
realUrl host = 'www.youtube.com' ifTrue: [ | video |
video := LeRawYoutubeReferenceInfo fromYoutubeStringUrl: realUrl asString.
imageUrl := (video rawData at: 'thumbnail_url') asUrl.
].
self stencil: [
(SkiaImage fromForm:
(Form fromBase64String: imageUrl retrieveContents base64Encoded))
asElement constraintsDo: [ :c |
c horizontal matchParent.
c vertical matchParent ] ]
]

View File

@ -1,12 +0,0 @@
Extension { #name : #ByteString }
{ #category : #'*MiniDocs' }
ByteString >> asHTMLComment [
^ '<!-- ', self , ' -->'
]
{ #category : #'*MiniDocs' }
ByteString >> email [
"Quick fix for importing Lepiter pages that have a plain ByteString field as email."
^ self
]

View File

@ -1,46 +0,0 @@
Extension { #name : #DataFrame }
{ #category : #'*MiniDocs' }
DataFrame >> asMarkdown [
| response |
response := '' writeStream.
self columnNames do: [ :name | response nextPutAll: '| ' , name , ' ' ].
response
nextPutAll: '|';
cr.
self columns size timesRepeat: [ response nextPutAll: '|---' ].
response
nextPutAll: '|';
cr.
self asArrayOfRows
do: [ :row |
row do: [ :cell | response nextPutAll: '| ' , cell asString , ' ' ].
response
nextPutAll: '|';
cr ].
^ response contents accentedCharactersCorrection withInternetLineEndings.
]
{ #category : #'*MiniDocs' }
DataFrame >> viewDataFor: aView [
<gtView>
| columnedList |
self numberOfRows >= 1 ifFalse: [ ^ aView empty ].
columnedList := aView columnedList
title: 'Data';
items: [ self transposed columns ];
priority: 40.
self columnNames
withIndexDo: [:aName :anIndex |
columnedList
column: aName
text: [:anItem | anItem at: anIndex ]
].
^ columnedList
]
{ #category : #'*MiniDocs' }
DataFrame >> webView [
^ Pandoc convertString: self asMarkdown from: 'markdown' to: 'html'
]

View File

@ -1,6 +0,0 @@
Extension { #name : #Dictionary }
{ #category : #'*MiniDocs' }
Dictionary >> treeView [
^ self asOrderedDictionary treeView
]

View File

@ -1,53 +0,0 @@
Extension { #name : #FileLocator }
{ #category : #'*MiniDocs' }
FileLocator class >> aliases [
| fileAliases |
fileAliases := self fileAliases.
fileAliases exists
ifFalse: [ | initialConfig |
initialConfig := Dictionary new.
fileAliases ensureCreateFile.
MarkupFile exportAsFileOn: fileAliases containing: initialConfig
].
^ STON fromString: fileAliases contents
]
{ #category : #'*MiniDocs' }
FileLocator class >> atAlias: aString put: aFolderOrFile [
| updatedAliases |
updatedAliases:= self aliases
at: aString put: aFolderOrFile;
yourself.
MarkupFile exportAsFileOn: self fileAliases containing: updatedAliases.
^ updatedAliases
]
{ #category : #'*MiniDocs' }
FileLocator >> extractMetadata [
"I package the functionality from [[How to extract meta information using ExifTool]],
from the GToolkit Book.
I depend on the external tool ExifTool."
| process variablesList |
process := GtSubprocessWithInMemoryOutput new
command: 'exiftool';
arguments: { self fullName}.
process errorBlock: [ :proc | ^ self error: 'Failed to run exiftool' ].
process runAndWait.
variablesList := process stdout lines collect: [ :currentLine |
| separatorIndex name value |
separatorIndex := currentLine indexOf: $:.
name := (currentLine copyFrom: 1 to: separatorIndex - 1) trimBoth.
value := (currentLine
copyFrom: separatorIndex + 1
to: currentLine size) trimBoth.
name -> value
].
^ variablesList asOrderedDictionary
]
{ #category : #'*MiniDocs' }
FileLocator class >> fileAliases [
^ MiniDocs appFolder / 'fileAliases.ston'
]

View File

@ -1,342 +0,0 @@
Class {
#name : #GrafoscopioNode,
#superclass : #Object,
#instVars : [
'header',
'body',
'tags',
'children',
'parent',
'links',
'level',
'created',
'nodesInPreorder',
'selected',
'edited',
'headers',
'key',
'output',
'remoteLocations'
],
#category : #'MiniDocs-Legacy'
}
{ #category : #accessing }
GrafoscopioNode class >> fromFile: aFileReference [
^ (STON fromString: aFileReference contents) first parent
]
{ #category : #accessing }
GrafoscopioNode class >> fromLink: aStonLink [
| notebook |
notebook := (STON fromString: aStonLink asUrl retrieveContents utf8Decoded) first parent.
notebook addRemoteLocation: aStonLink.
^ notebook
]
{ #category : #accessing }
GrafoscopioNode >> addRemoteLocation: anURL [
self remoteLocations add: anURL
]
{ #category : #accessing }
GrafoscopioNode >> ancestors [
"I return a collection of all the nodes wich are ancestors of the receiver node"
| currentNode ancestors |
currentNode := self.
ancestors := OrderedCollection new.
[ currentNode parent notNil and: [ currentNode level > 0 ] ]
whileTrue: [
ancestors add: currentNode parent.
currentNode := currentNode parent].
ancestors := ancestors reversed.
^ ancestors
]
{ #category : #accessing }
GrafoscopioNode >> asLePage [
| page |
self root populateTimestamps.
page := LePage new
initializeTitle: 'Grafoscopio Notebook (imported)'.
self nodesInPreorder allButFirst
do: [:node | page addSnippet: node asSnippet ].
page latestEditTime: self root latestEditionDate.
page createTime: self root earliestCreationDate.
page optionAt: 'remoteLocations' put: self remoteLocations.
^ page.
]
{ #category : #accessing }
GrafoscopioNode >> asSnippet [
| snippet child |
snippet := LeTextSnippet new
string: self header;
createTime: (LeTime new
time: self created);
uid: LeUID new.
(self tags includes: 'código')
ifFalse: [
child := LeTextSnippet new;
string: self body. ]
ifTrue: [
child := LePharoSnippet new;
code: self body ].
child
createTime: (LeTime new
time: self created);
uid: LeUID new.
snippet addFirstSnippet: child.
snippet optionAt: 'tags' put: self tags.
^ snippet
]
{ #category : #accessing }
GrafoscopioNode >> body [
^ body
]
{ #category : #accessing }
GrafoscopioNode >> body: anObject [
body := anObject
]
{ #category : #accessing }
GrafoscopioNode >> children [
^ children
]
{ #category : #accessing }
GrafoscopioNode >> children: anObject [
children := anObject
]
{ #category : #accessing }
GrafoscopioNode >> created [
created ifNotNil: [^created asDateAndTime].
^ created
]
{ #category : #accessing }
GrafoscopioNode >> created: anObject [
created := anObject
]
{ #category : #accessing }
GrafoscopioNode >> earliestCreationDate [
| earliest |
self nodesWithCreationDates ifNotEmpty: [
earliest := self nodesWithCreationDates first created]
ifEmpty: [ earliest := self earliestRepositoryTimestamp - 3 hours].
self nodesWithCreationDates do: [:node |
node created <= earliest ifTrue: [ earliest := node created ] ].
^ earliest
]
{ #category : #accessing }
GrafoscopioNode >> earliestRepositoryTimestamp [
| remote fossilHost docSegments repo checkinInfo |
remote := self remoteLocations first asUrl.
fossilHost := 'https://mutabit.com/repos.fossil'.
(remote asString includesSubstring: fossilHost) ifFalse: [ ^ false ].
docSegments := remote segments copyFrom: 5 to: remote segments size.
repo := FossilRepo new
remote: (remote scheme, '://', remote host, '/', remote segments first, '/', remote segments second).
checkinInfo := repo firstCheckinFor: ('/' join: docSegments).
^ DateAndTime fromUnixTime: (checkinInfo at: 'timestamp')
]
{ #category : #accessing }
GrafoscopioNode >> edited [
^ edited ifNotNil: [^ edited asDateAndTime ]
]
{ #category : #accessing }
GrafoscopioNode >> edited: anObject [
edited := anObject
]
{ #category : #accessing }
GrafoscopioNode >> gtTextFor: aView [
<gtView>
^ aView textEditor
title: 'Body';
text: [ body ]
]
{ #category : #accessing }
GrafoscopioNode >> header [
^ header
]
{ #category : #accessing }
GrafoscopioNode >> header: anObject [
header := anObject
]
{ #category : #accessing }
GrafoscopioNode >> latestEditionDate [
| latest |
latest := self nodesWithEditionDates first edited.
self nodesWithEditionDates do: [:node |
node edited >= latest ifTrue: [ latest := node edited ] ].
^ latest
]
{ #category : #accessing }
GrafoscopioNode >> level [
^ level
]
{ #category : #accessing }
GrafoscopioNode >> level: anObject [
level := anObject
]
{ #category : #accessing }
GrafoscopioNode >> links [
^ links
]
{ #category : #accessing }
GrafoscopioNode >> links: anObject [
links := anObject
]
{ #category : #accessing }
GrafoscopioNode >> nodesInPreorder [
^ nodesInPreorder
]
{ #category : #accessing }
GrafoscopioNode >> nodesInPreorder: anObject [
nodesInPreorder := anObject
]
{ #category : #accessing }
GrafoscopioNode >> nodesWithCreationDates [
^ self nodesInPreorder select: [ :each | each created isNotNil ]
]
{ #category : #accessing }
GrafoscopioNode >> nodesWithEditionDates [
^ self nodesInPreorder select: [ :each | each edited isNotNil ]
]
{ #category : #accessing }
GrafoscopioNode >> parent [
^ parent
]
{ #category : #accessing }
GrafoscopioNode >> parent: anObject [
parent := anObject
]
{ #category : #accessing }
GrafoscopioNode >> populateTimestamps [
| adhocCreationMarker adhocEditionMarker |
adhocCreationMarker := 'adhoc creation timestamp'.
adhocEditionMarker := 'adhoc edition timestamp'.
(self nodesInPreorder size = self nodesWithCreationDates size
and: [ self nodesInPreorder size = self nodesWithEditionDates size ])
ifTrue: [ ^ self nodesInPreorder ].
self nodesInPreorder allButFirst doWithIndex: [:node :i |
node created ifNil: [
node created: self earliestCreationDate + i.
node tags add: adhocCreationMarker.
].
node edited ifNil: [
node edited: self earliestCreationDate + i + 1.
node tags add: 'adhoc edition timestamp'
].
].
self root created ifNil: [
self root created: self earliestCreationDate - 1.
self root tags add: adhocCreationMarker.
].
self root edited ifNil: [
self root edited: self latestEditionDate.
self root tags add: adhocEditionMarker.
].
^ self nodesInPreorder
]
{ #category : #accessing }
GrafoscopioNode >> printOn: aStream [
super printOn: aStream.
aStream
nextPutAll: '( ', self header, ' )'
]
{ #category : #accessing }
GrafoscopioNode >> remoteLocations [
^ remoteLocations ifNil: [ remoteLocations := OrderedCollection new]
]
{ #category : #accessing }
GrafoscopioNode >> root [
self level = 0 ifTrue: [ ^ self ].
^ self ancestors first.
]
{ #category : #accessing }
GrafoscopioNode >> selected [
^ selected
]
{ #category : #accessing }
GrafoscopioNode >> selected: anObject [
selected := anObject
]
{ #category : #accessing }
GrafoscopioNode >> tags [
^ tags
]
{ #category : #accessing }
GrafoscopioNode >> tags: anObject [
tags := anObject
]
{ #category : #accessing }
GrafoscopioNode >> viewBody [
| aText |
aText := self header asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child header asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child body asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor) ].
^ aText
]
{ #category : #accessing }
GrafoscopioNode >> viewChildrenFor: aView [
<gtView>
children ifNil: [ ^ aView empty ].
^ aView columnedTree
title: 'Children';
priority: 1;
items: [ { self } ];
children: #children;
column: 'Name' text: #viewBody;
expandUpTo: 2
]

View File

@ -1,15 +0,0 @@
Class {
#name : #GrafoscopioNodeTest,
#superclass : #TestCase,
#category : #'MiniDocs-Legacy'
}
{ #category : #accessing }
GrafoscopioNodeTest >> testEarliestCreationNode [
| notebook remoteNotebook offedingNodes |
remoteNotebook := 'https://mutabit.com/repos.fossil/documentaton/raw/a63598382?at=documentaton.ston'.
notebook := (STON fromString: remoteNotebook asUrl retrieveContents utf8Decoded) first parent.
offedingNodes := notebook nodesInPreorder select: [:node |
node created isNotNil and: [node created < notebook earliestCreationDate] ].
self assert: offedingNodes size equals: 0
]

View File

@ -1,72 +0,0 @@
Extension { #name : #GtGQLSnippet }
{ #category : #'*MiniDocs' }
GtGQLSnippet >> asMarkdeep [
| output |
output := WriteStream on: ''.
(self metadata)
at: 'operation' put: self operation;
at: 'input' put: self input;
at: 'context' put: self context;
yourself.
output
nextPutAll: self metadataDiv;
nextPutAll: self markdeepCustomOpener;
nextPutAll: self asMarkdownString;
nextPut: Character lf;
nextPutAll: self markdeepCustomCloser;
nextPut: Character lf;
nextPutAll: '</div>';
nextPut: Character lf;
nextPut: Character lf.
^ output contents withInternetLineEndings
]
{ #category : #'*MiniDocs' }
GtGQLSnippet >> markdeepCustomCloser [
^ self markdeepCustomOpener
]
{ #category : #'*MiniDocs' }
GtGQLSnippet >> markdeepCustomOpener [
^ '* * *'
]
{ #category : #'*MiniDocs' }
GtGQLSnippet >> metadataDiv [
"PENDING: Shared among several snippets. Should be abstracted further?"
| output |
output := WriteStream on: ''.
output
nextPutAll: '<div st-class="' , self class greaseString , '"';
nextPut: Character lf;
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
nextPut: Character lf.
^ output contents withInternetLineEndings.
]
{ #category : #'*MiniDocs' }
GtGQLSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parent uid asString36;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
GtGQLSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -1,212 +0,0 @@
"
I model the interface between a CodiMD (https://demo.codimd.org) documentation
server and Grafoscopio.
I enable the interaction between Grafoscopio notebooks and CodiMD documents,
so one document can start online (as a CodiMD pad) and continue as a Grafoscopio
notebook or viceversa.
"
Class {
#name : #HedgeDoc,
#superclass : #Markdown,
#instVars : [
'server',
'pad',
'url'
],
#category : #'MiniDocs-Core'
}
{ #category : #accessing }
HedgeDoc class >> fromLink: aUrl [
^ self new fromLink: aUrl
]
{ #category : #'as yet unclassified' }
HedgeDoc class >> newDefault [
^ self new
defaultServer.
]
{ #category : #accessing }
HedgeDoc >> asLePage [
| newPage sanitizedMarkdown |
sanitizedMarkdown := self bodyWithoutTitleHeader promoteMarkdownHeaders.
newPage := LePage new
initializeTitle: self title.
sanitizedMarkdown := sanitizedMarkdown markdownSplitted.
sanitizedMarkdown class = OrderedCollection ifTrue: [
sanitizedMarkdown do: [:lines | | snippet |
snippet := LeTextSnippet new
string: lines asStringWithCr;
uid: LeUID new.
newPage
addSnippet: snippet;
yourself
]
].
sanitizedMarkdown class = ByteString ifTrue: [ | snippet |
snippet := LeTextSnippet new
string: sanitizedMarkdown;
uid: LeUID new.
newPage
addSnippet: snippet;
yourself
].
newPage
incomingLinks;
splitAdmonitionSnippets.
newPage editTime: DateAndTime now.
newPage options
at: 'HedgeDoc' at: 'yamlFrontmatter' put: self metadata;
at: 'HedgeDoc' at: 'url' put: self url asString asHTMLComment.
^ newPage
]
{ #category : #accessing }
HedgeDoc >> asMarkdeep [
^ Markdeep new
metadata: self metadata;
body: self contents;
file: self file, 'html'
]
{ #category : #accessing }
HedgeDoc >> asMarkdownTiddler [
self url ifNil: [ ^ self ].
^ Tiddler new
title: self url segments first;
text: (self contents ifNil: [ self retrieveContents]);
type: 'text/x-markdown';
created: Tiddler nowLocal.
]
{ #category : #accessing }
HedgeDoc >> bodyWithoutTitleHeader [
| headerIndex |
headerIndex := self body lines
detectIndex: [ :line | line includesSubstring: self headerAsTitle ]
ifNone: [ ^ self body].
^ (self body lines copyWithoutIndex: headerIndex) asStringWithCr
]
{ #category : #accessing }
HedgeDoc >> contents [
^ super contents
]
{ #category : #accessing }
HedgeDoc >> contents: anObject [
body := anObject
]
{ #category : #'as yet unclassified' }
HedgeDoc >> defaultServer [
self server: 'https://docutopia.tupale.co'.
]
{ #category : #accessing }
HedgeDoc >> fromLink: aString [
self url: aString.
self retrieveContents
]
{ #category : #'as yet unclassified' }
HedgeDoc >> htmlUrl [
| link |
link := self url copy.
link segments insert: 's' before: 1.
^ link
]
{ #category : #'as yet unclassified' }
HedgeDoc >> importContents [
self contents: self retrieveContents
]
{ #category : #accessing }
HedgeDoc >> pad [
^ pad
]
{ #category : #accessing }
HedgeDoc >> pad: anObject [
pad := anObject
]
{ #category : #accessing }
HedgeDoc >> retrieveContents [
self url ifNil: [ ^ self ].
self fromString: (self url addPathSegment: 'download') retrieveContents.
^ self.
]
{ #category : #'as yet unclassified' }
HedgeDoc >> retrieveHtmlContents [
| htmlContents |
self url ifNil: [ ^ self ].
htmlContents := self htmlUrl.
^ htmlContents retrieveContents
]
{ #category : #'as yet unclassified' }
HedgeDoc >> saveContentsToFile: aFileLocator [
self url ifNil: [ ^ self ].
^ (self url addPathSegment: 'download') saveContentsToFile: aFileLocator
]
{ #category : #'as yet unclassified' }
HedgeDoc >> saveHtmlContentsToFile: aFileLocator [
self url ifNil: [ ^ self ].
^ self htmlUrl saveContentsToFile: aFileLocator
]
{ #category : #accessing }
HedgeDoc >> server [
^ server
]
{ #category : #accessing }
HedgeDoc >> server: aUrlString [
server := aUrlString
]
{ #category : #accessing }
HedgeDoc >> url [
url ifNotNil: [ ^ url asUrl ]
]
{ #category : #accessing }
HedgeDoc >> url: anObject [
| tempUrl html |
tempUrl := anObject asZnUrl.
html := XMLHTMLParser parse: tempUrl retrieveContents.
(html xpath: '//head/meta[@name="application-name"][@content = "HedgeDoc - Ideas grow better together"]') isEmpty
ifTrue: [ self inform: 'Not a hedgedoc url'.
url := nil ].
server := tempUrl host.
url := anObject
]
{ #category : #visiting }
HedgeDoc >> visit [
WebBrowser openOn: self server, '/', self pad.
]
{ #category : #transformation }
HedgeDoc >> youtubeEmbeddedLinksToMarkdeepFormat [
"I replace the youtube embedded links from hedgedoc format to markdeep format."
| linkDataCollection |
linkDataCollection := (HedgeDocGrammar new youtubeEmbeddedLink parse: self contents)
collect: [ :each | | parsedLink |
parsedLink := OrderedCollection new.
parsedLink
add: ('' join:( each collect: [ :s | s value]));
add: '![](https://youtu.be/',
each second value trimmed , ')';
add: (each first start to: each third stop);
yourself ].
linkDataCollection do: [ :each |
self contents: (self contents
copyReplaceAll: each first with: each second) ].
^ self
]

View File

@ -1,36 +0,0 @@
Class {
#name : #HedgeDocExamples,
#superclass : #Object,
#category : #'MiniDocs-Examples'
}
{ #category : #accessing }
HedgeDocExamples >> hedgeDocReplaceYoutubeEmbeddedLinkExample [
<gtExample>
| aSampleString hedgedocDoc parsedCollection hedgedocDocLinksReplaced |
aSampleString := '---
breaks: false
---
# Titulo
Un texto de ejemplo
# Enlaces youtube
{%youtube 1aw3XmTqFXA %}
otro video
{%youtube U7mpXaLN9Nc %}'.
hedgedocDoc := HedgeDoc new
contents: aSampleString.
hedgedocDocLinksReplaced := HedgeDoc new contents: aSampleString; youtubeEmbeddedLinksToMarkdeepFormat.
self assert: (hedgedocDoc contents
includesSubstring: '{%youtube 1aw3XmTqFXA %}' ).
self assert: (hedgedocDocLinksReplaced contents
includesSubstring: '![](https://youtu.be/1aw3XmTqFXA)' ).
^ { 'Original' -> hedgedocDoc .
'Replaced' -> hedgedocDocLinksReplaced } asDictionary
]

View File

@ -1,42 +0,0 @@
Class {
#name : #HedgeDocGrammar,
#superclass : #PP2CompositeNode,
#instVars : [
'youtubeEmbeddedLink'
],
#category : #'MiniDocs-Model'
}
{ #category : #accessing }
HedgeDocGrammar >> metadataAsYAML [
"I parse the header of the hedgedoc document for YAML metadata."
^ '---' asPParser token, #any asPParser starLazy token, '---' asPParser token
]
{ #category : #accessing }
HedgeDocGrammar >> start [
| any |
any := #any asPParser.
^ (self metadataAsYAML / any starLazy), youtubeEmbeddedLink
]
{ #category : #accessing }
HedgeDocGrammar >> youtubeEmbeddedLink [
"I parse the youtube embedded links in a hedgedoc document."
| link linkSea |
link := self youtubeEmbeddedLinkOpen,
#any asPParser starLazy token,
self youtubeEmbeddedLinkClose.
linkSea := link islandInSea star.
^ linkSea
]
{ #category : #accessing }
HedgeDocGrammar >> youtubeEmbeddedLinkClose [
^ '%}' asPParser token
]
{ #category : #accessing }
HedgeDocGrammar >> youtubeEmbeddedLinkOpen [
^ '{%youtube' asPParser token
]

View File

@ -1,19 +0,0 @@
Class {
#name : #HedgeDocGrammarExamples,
#superclass : #Object,
#category : #'MiniDocs-Examples'
}
{ #category : #accessing }
HedgeDocGrammarExamples >> hedgeDocParseYoutubeEmbeddedLinkExample [
<gtExample>
| aSampleString parsedStringTokens parsedCollection |
aSampleString := '{%youtube 1aw3XmTqFXA %}'.
parsedStringTokens := HedgeDocGrammar new youtubeEmbeddedLink parse: aSampleString.
parsedCollection := parsedStringTokens first.
self assert: parsedCollection size equals: 3.
self assert: parsedCollection first value equals: '{%youtube'.
self assert: parsedCollection second class equals: PP2Token.
self assert: parsedCollection third value equals: '%}'.
^ parsedStringTokens
]

View File

@ -1,15 +0,0 @@
Class {
#name : #HedgeDocGrammarTest,
#superclass : #PP2CompositeNodeTest,
#category : #'MiniDocs-Model'
}
{ #category : #accessing }
HedgeDocGrammarTest >> parserClass [
^ HedgeDocGrammar
]
{ #category : #accessing }
HedgeDocGrammarTest >> testYoutubeEmbeddedLink [
^ self parse: '{%youtube U7mpXaLN9Nc %}' rule: #youtubeEmbeddedLink
]

View File

@ -1,26 +0,0 @@
Extension { #name : #LeChangesSnippet }
{ #category : #'*MiniDocs' }
LeChangesSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
LeChangesSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -1,21 +0,0 @@
Extension { #name : #LeCodeSnippet }
{ #category : #'*MiniDocs' }
LeCodeSnippet >> metadataUpdate [
| surrogate |
self parent
ifNil: [ surrogate := nil]
ifNotNil: [
self parent isString
ifTrue: [ surrogate := self parent]
ifFalse: [ surrogate := self parent uidString ]
].
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: surrogate;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
yourself
]

View File

@ -1,17 +1,28 @@
Extension { #name : #LeDatabase }
{ #category : #'*MiniDocs' }
LeDatabase >> addPage2FromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
| newPage |
"^ { snippets . page }"
"Rebulding partial subtrees"
"Adding unrooted subtrees to the page"
"^ newPage"
newPage := self
rebuildPageFromMarkdeep: markdeepDocTree
withRemote: externalDocLocation.
newPage
childrenDo: [ :snippet |
LeDatabase >> addPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
| remoteMetadata divSnippets snippets page |
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
collect: [ :xmlElement | xmlElement postCopy ].
snippets := divSnippets
collect: [ :xmlElement |
(xmlElement attributes at: 'st-class') = 'LeTextSnippet'
ifTrue: [ LeTextSnippet new contentFrom: xmlElement ]
ifFalse: [ (xmlElement attributes at: 'st-class') = 'LePharoSnippet'
ifTrue: [ LePharoSnippet new contentFrom: xmlElement ] ] ].
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
page := LePage new
title: (remoteMetadata at: 'title');
basicUid: (UUID fromString36: (remoteMetadata at: 'id'));
createTime: (LeTime new time: (remoteMetadata at: 'created') asDateAndTime);
editTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
latestEditTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
createEmail: (LeEmail new email: (remoteMetadata at: 'creator'));
editEmail: (LeEmail new email: (remoteMetadata at: 'modifier')).
snippets do: [ :snippet | page addSnippet: snippet ].
page children
do: [ :snippet |
(self hasBlockUID: snippet uid)
ifTrue: [ | existingPage |
existingPage := self pages
@ -20,56 +31,6 @@ LeDatabase >> addPage2FromMarkdeep: markdeepDocTree withRemote: externalDocLocat
^ self ]
ifFalse: [ snippet database: self.
self registerSnippet: snippet ] ].
self addPage: newPage.
^ newPage
]
{ #category : #'*MiniDocs' }
LeDatabase >> addPageCopy: aLePage [
| pageTitle timestamp shortID page |
timestamp := DateAndTime now asString.
pageTitle := 'Copy of ', aLePage title.
page := aLePage duplicatePageWithNewName: pageTitle, timestamp.
shortID := '(id: ', (page uid asString copyFrom: 1 to: 8), ')'.
page title: (page title copyReplaceAll: timestamp with: shortID).
^ page
]
{ #category : #'*MiniDocs' }
LeDatabase >> addPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
| remoteMetadata divSnippets dataSnippets page |
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
collect: [ :xmlElement | xmlElement postCopy ].
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
"Ensuring remote metadata has consistent data"
remoteMetadata at: 'origin' put: externalDocLocation.
remoteMetadata at: 'title' ifAbsentPut: [ markdeepDocTree detectMarkdeepTitle ].
remoteMetadata at: 'id' ifAbsentPut: [UUID new asString36].
remoteMetadata at: 'created' ifAbsentPut: [ DateAndTime now] .
remoteMetadata at: 'creator' ifAbsentPut: [ 'unknown' ].
remoteMetadata at: 'modified' ifAbsentPut: [ DateAndTime now].
remoteMetadata at: 'modifier' ifAbsentPut: [ 'unknown' ].
dataSnippets := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
page := LePage new.
page fromDictionary: remoteMetadata.
dataSnippets do: [:each | | snippet|
snippet := each asLepiterSnippet.
page addSnippet: snippet.
].
page children
do: [ :snippet |
(self hasBlockUID: snippet uid)
ifTrue: [ | existingPage |
existingPage := self pages
detect: [ :pageTemp | pageTemp includesSnippetUid: snippet uid ]
ifFound: [
self importErrorForLocal: existingPage withRemote: externalDocLocation.
^ self
]
ifNone: [ snippet database: self ].
]
ifFalse: [ snippet database: self ]
].
self addPage: page.
^ page
]
@ -81,7 +42,7 @@ LeDatabase >> addPageFromMarkdeepUrl: aString [
page
ifNotNil: [ :arg |
self importErrorForLocal: page withRemote: aString.
^ self errorCardFor: page uidString ].
^ self ].
^ self addPageFromMarkdeep: (self docTreeForLink: aString) withRemote: aString
]
@ -101,10 +62,10 @@ LeDatabase >> docTreeForLink: aString [
]
{ #category : #'*MiniDocs' }
LeDatabase >> errorCardFor: errorKey [
LeDatabase >> errorCardFor: error [
| keepButton overwriteButton loadCopyButton errorMessageUI localPage |
| keepButton overwriteButton backupButton errorMessageUI localPage errorKey |
errorKey := error keys first.
localPage := self pageWithID: errorKey.
keepButton := BrButton new
aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
@ -121,28 +82,22 @@ LeDatabase >> errorCardFor: errorKey [
icon: BrGlamorousVectorIcons edit;
action: [ :aButton |
self removePage: localPage.
aButton phlow spawnObject: (self addPageFromMarkdeepUrl: (self errors at: errorKey at: 'remote')).
aButton phlow spawnObject: (self addPageFromMarkdeepUrl: (error at: errorKey at: 'remote')).
self errors removeKey: errorKey
];
margin: (BlInsets left: 10).
loadCopyButton := BrButton new
backupButton := BrButton new
aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
label: 'Load remote page as a copy';
label: 'Backup local page';
icon: BrGlamorousVectorIcons changes;
action: [ :aButton | self ];
action: [ :aButton | ];
margin: (BlInsets left: 10).
errorMessageUI := BrEditor new
aptitude: BrGlamorousRegularEditorAptitude new ;
text: (self errors at: errorKey at: 'message');
text: (error at: errorKey at: 'message');
vFitContent.
^ BrHorizontalPane new
matchParent;
alignCenter;
addChild:errorMessageUI;
addChild: keepButton;
addChild: overwriteButton;
addChild: loadCopyButton
^ { errorMessageUI. keepButton. overwriteButton. backupButton }
]
{ #category : #'*MiniDocs' }
@ -169,34 +124,6 @@ LeDatabase >> gtViewErrorDetailsOn: aView [
].
]
{ #category : #'*MiniDocs' }
LeDatabase >> gtViewErrorDetailsOn: aView withKey: erroKey [
<gtView>
^ aView explicit
title: 'Errors beta' translated;
priority: 5;
stencil: [ | container |
container := BlElement new
layout: BlFlowLayout new;
constraintsDo: [ :c |
c vertical fitContent.
c horizontal matchParent ];
padding: (BlInsets all: 10).
container
addChildren: (self errorCardFor: erroKey)
].
]
{ #category : #'*MiniDocs' }
LeDatabase >> importDocumentFrom: aURL [
| doc |
"Using file extension in URL as a cheap (non-robuts) way of detecting the kind of document.
Better file type detection should be implemented in the future."
(aURL endsWith: '.md.html') ifTrue: [ ^ self addPageFromMarkdeepUrl: aURL ].
doc := HedgeDoc fromLink: aURL asString.
^ self addPage: doc asLePage
]
{ #category : #'*MiniDocs' }
LeDatabase >> importErrorForLocal: page withRemote: externalDocLocation [
@ -227,8 +154,7 @@ LeDatabase >> importErrorForLocal: page withRemote: externalDocLocation [
at: 'remote' put: externalDocLocation;
at: 'message' put: message ;
yourself.
self errors at: id put: error.
^ self errors at: id.
self errors at: id put: error
]
{ #category : #'*MiniDocs' }
@ -236,77 +162,3 @@ LeDatabase >> options [
^ options
]
{ #category : #'*MiniDocs' }
LeDatabase >> previewSanitizedPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
| remoteMetadata divSnippets divSnippetsSanitized |
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
collect: [ :xmlElement | xmlElement postCopy ].
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
remoteMetadata at: 'origin' put: externalDocLocation.
divSnippetsSanitized := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
^ { divSnippets . divSnippetsSanitized . remoteMetadata }
]
{ #category : #'*MiniDocs' }
LeDatabase >> rebuildPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
| newPage snippets divSnippets remoteMetadata dataSnippets |
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
collect: [ :xmlElement | xmlElement postCopy ].
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
remoteMetadata at: 'origin' put: externalDocLocation.
dataSnippets := self
sanitizeMarkdeepSnippets: divSnippets
withMetadata: remoteMetadata.
snippets := dataSnippets collect: [ :each | each asLepiterSnippet ].
newPage := LePage new
title: (remoteMetadata at: 'title');
basicUid: (UUID fromString36: (remoteMetadata at: 'id'));
createTime: (LeTime new time: (remoteMetadata at: 'created') asDateAndTime);
editTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
latestEditTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
createEmail: (remoteMetadata at: 'creator');
editEmail: (remoteMetadata at: 'modifier'). "^ { snippets . page }" "Rebulding partial subtrees"
snippets
do: [ :currentSnippet |
| parentSnippet |
parentSnippet := snippets
detect: [ :item | item uid asString = currentSnippet parent ]
ifNone: [ parentSnippet := 'unrooted' ].
currentSnippet parent: parentSnippet.
parentSnippet class = ByteString
ifFalse: [ parentSnippet children addChild: currentSnippet ] ]. "Adding unrooted subtrees to the page"
"^ { unrooted . newPage }."
snippets
select: [ :each | each parent = 'unrooted' ]
thenDo: [ :unrooted | newPage addSnippet: unrooted ].
^ newPage
]
{ #category : #'*MiniDocs' }
LeDatabase >> sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata [
^ divSnippets collectWithIndex: [:markdeepDiv :i | | snippetData creationTime modificationTime timestampWarning |
snippetData := markdeepDiv asSnippetDictionary.
creationTime := snippetData at: 'created'.
modificationTime := snippetData at: 'modified'.
timestampWarning := [:timestamp |
'Modified timestamps: ', timestamp ,' date and time was replaced instead of nil value. See "origin" metadata for more historical traceability information.'
].
(creationTime = 'nil' and: [ modificationTime ~= 'nil' ])
ifTrue: [
snippetData redefineTimestampsBefore: modificationTime.
snippetData addErrata: (timestampWarning value: 'creation').
snippetData at: 'origin' put: (remoteMetadata at: 'origin').
].
(creationTime = 'nil' and: [ modificationTime = 'nil' ])
ifTrue: [ | timeDiff |
timeDiff := divSnippets size - i. "Suggesting that last snippets were modified after the first ones."
modificationTime := (remoteMetadata at: 'created') asDateAndTime - timeDiff seconds.
snippetData redefineTimestampsBefore: modificationTime.
snippetData addErrata: (timestampWarning value: 'creation').
snippetData addErrata: (timestampWarning value: 'modification').
snippetData at: 'origin' put: (remoteMetadata at: 'origin').
].
snippetData.
]
]

View File

@ -1,26 +0,0 @@
Extension { #name : #LeDockerSnippet }
{ #category : #'*MiniDocs' }
LeDockerSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
LeDockerSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -1,32 +0,0 @@
Extension { #name : #LeExampleSnippet }
{ #category : #'*MiniDocs' }
LeExampleSnippet >> asMarkdeep [
^ (WriteStream on: '') contents
]
{ #category : #'*MiniDocs' }
LeExampleSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
LeExampleSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -1,26 +0,0 @@
Extension { #name : #LeGitHubSnippet }
{ #category : #'*MiniDocs' }
LeGitHubSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
LeGitHubSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -1,8 +0,0 @@
Extension { #name : #LeHeaderNode }
{ #category : #'*MiniDocs' }
LeHeaderNode >> headerFullName [
^ self topParent completeSource
copyFrom: self startPosition
to: self stopPosition
]

View File

@ -1,56 +0,0 @@
Extension { #name : #LeHomeDatabaseHeaderElement }
{ #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> importMinidocsButtonElement [
^ self userData at: 'importMinidocsButtonElement' ifAbsentPut: [ self newImportMiniDocsButton]
]
{ #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> initialize [
super initialize.
self initializeEditableTitleElement.
self initializeButtons.
self addChild: self toolbarElement as: #toolbar.
self toolbarElement
addItem: self editableTitleElement;
addItem: self newAddNewPageButton;
addItem: self removeButtonElement;
addItem: self importButtonElement;
addItem: self exportButtonElement;
addItem: self importMinidocsButtonElement.
self addAptitude: (BrLayoutResizerAptitude new
hInherit;
vAnyToFitContent;
hInherit: self toolbarElement;
vAnyToFitContent: self toolbarElement).
]
{ #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> initializeButtons [
self initializeRemoveButton.
self initializeImportButton.
self initializeExportButton.
self initializeMiniDocsImportButton.
]
{ #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> initializeMiniDocsImportButton [
self userData at: 'importMinidocsButtonElement' put: self newImportMiniDocsButton.
]
{ #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> newImportMiniDocsButton [
^ LeMiniDocsImport new
tooltip: 'Import document from link';
contentExtent: 200 @ 30
]
{ #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> updateToolbarButtons [
self updateRemoveButtonElement.
self exportButtonElement database: self database.
self importButtonElement database: self database.
self importMinidocsButtonElement database: self database.
]

View File

@ -1,26 +0,0 @@
Extension { #name : #LeJenkinsSnippet }
{ #category : #'*MiniDocs' }
LeJenkinsSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
LeJenkinsSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -1,89 +0,0 @@
Class {
#name : #LeMiniDocsImport,
#superclass : #BrButton,
#instVars : [
'contentExtent',
'database'
],
#category : #'MiniDocs-UI'
}
{ #category : #accessing }
LeMiniDocsImport >> contentExtent [
^ contentExtent
]
{ #category : #accessing }
LeMiniDocsImport >> contentExtent: aPoint [
self
assert: [ aPoint isNotNil ]
description: [ 'Extent must be non-nil' ].
contentExtent := aPoint
]
{ #category : #accessing }
LeMiniDocsImport >> createDropdownExpandedHandleButton [
^ BrButton new
icon: BrGlamorousVectorIcons downwards;
label: self tooltip;
aptitude: BrGlamorousButtonWithIconAndLabelAptitude
]
{ #category : #accessing }
LeMiniDocsImport >> createURLeditable [
| base editable |
base := BlElement new
background: (Color white);
size: 200 @ 30;
margin: (BlInsets all: 10);
yourself.
editable := BrEditableLabel new
aptitude: BrGlamorousEditableLabelAptitude new glamorousRegularFontAndSize;
text: 'Document link';
switchToEditor.
editable when: BrEditorAcceptWish do: [ :aWish |
self importDocumentFrom: aWish text asString.
].
base addChild: editable.
^ base
]
{ #category : #accessing }
LeMiniDocsImport >> database [
^ database
]
{ #category : #accessing }
LeMiniDocsImport >> database: aLeDatabase [
database := aLeDatabase
]
{ #category : #accessing }
LeMiniDocsImport >> importDocumentFrom: aURL [
^ self database importDocumentFrom: aURL.
]
{ #category : #accessing }
LeMiniDocsImport >> initialize [
super initialize.
self
icon: BrGlamorousVectorIcons downwards;
label: 'Add MiniDocs';
aptitude: BrGlamorousButtonWithIconAndLabelAptitude.
self addAptitude: (BrGlamorousWithDropdownAptitude
handle: [ self createDropdownExpandedHandleButton ]
content: [ self createURLeditable ]).
self aptitude - BrGlamorousButtonExteriorAptitude.
]
{ #category : #accessing }
LeMiniDocsImport >> tooltip [
^ self label
]
{ #category : #accessing }
LeMiniDocsImport >> tooltip: aString [
self label: aString
]

View File

@ -1,26 +0,0 @@
Extension { #name : #LeMockedSnippet }
{ #category : #'*MiniDocs' }
LeMockedSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
LeMockedSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -1,6 +0,0 @@
Extension { #name : #LeNullDatabase }
{ #category : #'*MiniDocs' }
LeNullDatabase >> attachmentsDirectory [
^ (FileLocator temp / 'lepiter' / 'attachments') ensureCreateDirectory.
]

View File

@ -3,7 +3,7 @@ Extension { #name : #LePage }
{ #category : #'*MiniDocs' }
LePage >> asHtmlFile [
self asMarkdownFileWithMetadataWrappers.
self asMarkdownFile.
self defaultPandocTemplate exists
ifFalse: [ MarkupFile installTemplate: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/plantillas/Pandoc/clean-menu-mod.html' into: self defaultPandocTemplate parent ].
@ -21,76 +21,61 @@ LePage >> asHtmlFile [
LePage >> asMarkdeep [
| bodyStream markdeep |
bodyStream := '' writeStream.
bodyStream nextPutAll: self notebookMetadataSnippet asMarkdeep.
self preorderTraversal
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdeep ].
self preorderTraversal do: [:snippet |
bodyStream nextPutAll: snippet asMarkdeep
].
markdeep := Markdeep new
title: self title;
body: bodyStream contents;
metadata: self metadata;
file: self storage / self markdeepFileName;
navTop: self navTop.
self metadata
at: 'authors'
ifPresent: [ :author | markdeep metadata at: 'authors' put: author ].
self metadata
at: 'version'
ifPresent: [ :version | markdeep metadata at: 'version' put: version ].
markdeep head: nil.
^ markdeep
title: self title;
body: bodyStream contents;
navTop: self navTop.
self metadata keysAndValuesDo: [:k :v |
k = 'lang'
ifTrue: [
markdeep head
add: '<meta lang="', v,'">';
yourself.
]
ifFalse: [
markdeep head
add: '<meta name="', k, '" content="', v,'">';
yourself.
]
].
self metadata at: 'authors' ifPresent: [:author | markdeep metadata at: 'authors' put: author ].
self metadata at: 'version' ifPresent: [:version | markdeep metadata at: 'version' put: version ].
^ markdeep.
]
{ #category : #'*MiniDocs' }
LePage >> asMarkdeepFile [
^ self asMarkdeep notifyExportAsFileOn: self storage / self markdeepFileName
| folder |
folder := self options at: 'storage' ifAbsent: [ FileLocator temp ].
^ self asMarkdeep exportAsFileOn: folder / self markdeepFileName
]
{ #category : #'*MiniDocs' }
LePage >> asMarkdown [
"PENDING: to debug the output."
| bodyStream markdown |
bodyStream := '' writeStream.
bodyStream
nextPutAll: '# ', self title; cr; cr.
nextPutAll: '---';
nextPutAll: String lf.
self metadata keysAndValuesDo: [ :k :v |
bodyStream
nextPutAll: k , ': "' , v, '"';
nextPutAll: String lf ].
bodyStream nextPutAll: '---' , String lf , String lf.
self preorderTraversal
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdown ].
markdown := Markdown new
contents: bodyStream contents promoteMarkdownHeaders;
metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new).
markdown := Markdown new contents: bodyStream contents.
^ markdown
]
{ #category : #'*MiniDocs' }
LePage >> asMarkdownFileWithMetadataWrappers [
LePage >> asMarkdownFile [
| folder |
folder := self storage.
^ MarkupFile exportAsFileOn: folder / self markdownFileName containing: self asMarkdownWithMetadataWrappers contents
]
{ #category : #'*MiniDocs' }
LePage >> asMarkdownWithMetadataWrappers [
| bodyStream markdown |
bodyStream := '' writeStream.
"bodyStream
nextPut: Character lf;
nextPutAll: '# ', self title; cr; cr."
self preorderTraversal
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdownWithMetadataWrappers ].
markdown := Markdown new
contents: bodyStream contents promoteMarkdownHeaders;
title: self title;
metadata: self metadata.
^ markdown
]
{ #category : #'*MiniDocs' }
LePage >> config [
| configFile |
configFile := self storage / 'config.ston'.
configFile exists
ifTrue: [^ STON fromString: configFile contents ]
ifFalse: [ ^ nil ]
folder := self options at: 'storage' ifAbsent: [ FileLocator temp ].
^ MarkupFile exportAsFileOn: folder / self markdownFileName containing: self asMarkdown contents
]
{ #category : #'*MiniDocs' }
@ -101,52 +86,18 @@ LePage >> defaultPandocTemplate [
{ #category : #'*MiniDocs' }
LePage >> detectParentSnippetWithUid: uidString [
uidString = self uid asString36 ifTrue: [ ^ self ].
^ self preorderTraversal detect: [ :snippet | snippet uidString = uidString ]
]
"Answer a boolean indicating whether the supplied uid is present"
{ #category : #'*MiniDocs' }
LePage >> exportMetadataToHead: markdeep [
self metadata
keysAndValuesDo: [ :k :v |
k = 'lang'
ifTrue: [ markdeep head
add: '<meta lang="' , v , '">';
yourself ]
ifFalse: [ markdeep head
add: '<meta name="' , k , '" content="' , v , '">';
yourself ] ]
^ self preorderTraversal detect: [ :snippet | snippet uidString = uidString ] ifNone: [ ^ self ]
]
{ #category : #'*MiniDocs' }
LePage >> exportedFileName [
| sanitized titleWords shortTitle |
titleWords := self title splitOn: Character space.
(titleWords size > 11)
ifTrue: [
titleWords := titleWords copyFrom: 1 to: 3.
shortTitle := titleWords joinUsing: Character space.
]
ifFalse: [shortTitle := self title].
sanitized := shortTitle asDashedLowercase romanizeAccents copyWithoutAll: #($/ $: $🢒 $,).
| sanitized |
sanitized := self title asDashedLowercase copyWithoutAll: #($/).
^ sanitized , '--' , (self uidString copyFrom: 1 to: 5)
]
{ #category : #'*MiniDocs' }
LePage >> fromDictionary: aDictionary [
self
title: (aDictionary at: 'title');
basicUid: (UUID fromString36: (aDictionary at: 'id'));
createTime: (LeTime new
time: (aDictionary at: 'created') asDateAndTime);
editTime: (LeTime new
time: (aDictionary at: 'modified') asDateAndTime);
latestEditTime: (LeTime new
time: (aDictionary at: 'modified') asDateAndTime);
createEmail: (aDictionary at: 'creator');
editEmail: (aDictionary at: 'modifier').
]
{ #category : #'*MiniDocs' }
LePage >> fromMarkdeepUrl: aString [
| docTree pageMetadata |
@ -176,18 +127,6 @@ LePage >> latestEditTime: aLeTime [
latestEditTime := aLeTime
]
{ #category : #'*MiniDocs' }
LePage >> localHostAddress [
| localUrl route |
MiniDocsServer teapot server isRunning ifFalse: [ MiniDocsServer restart ].
route := self storage path segments joinUsing: '/'.
MiniDocsServer teapot
serveStatic: ('/', route, '/', self markdeepFileName)
from: self storage / self markdeepFileName.
localUrl := MiniDocsServer teapot server localUrl asString.
^ localUrl, route, '/', self markdeepFileName
]
{ #category : #'*MiniDocs' }
LePage >> markdeepFileName [
@ -202,17 +141,17 @@ LePage >> markdownFileName [
{ #category : #'*MiniDocs' }
LePage >> metadata [
^ self metadataUpdate
^ self options at: 'metadata' ifAbsentPut: [ self metadataInit]
]
{ #category : #'*MiniDocs' }
LePage >> metadataUpdate [
LePage >> metadataInit [
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'title' put: self contentAsString;
at: 'created' put: self createTime greaseString;
at: 'modified' put: self getLatestEditTime greaseString;
at: 'modified' put: self latestEditTime greaseString;
at: 'creator' put: self createEmail greaseString;
at: 'modifier' put: self editEmail greaseString;
yourself
@ -221,36 +160,12 @@ LePage >> metadataUpdate [
{ #category : #'*MiniDocs' }
LePage >> navTop [
| topNavFile |
topNavFile := self storage / '_navtop.html'.
topNavFile := ((self optionAt: 'storage' ifAbsentPut: [ FileLocator temp ]) / '_navtop.html').
topNavFile exists
ifFalse: [ ^ '' ]
ifTrue: [ ^ topNavFile contents ]
]
{ #category : #'*MiniDocs' }
LePage >> notebookMetadataSnippet [
| response |
response := LeTextSnippet new fromString: '<!-- See this snippet source code for this notebook''s metadata -->'.
response parent: self.
self optionAt: 'HedgeDoc' ifAbsent: [ ^ response ].
(response extra)
at: 'HedgeDoc' put: (self optionAt: 'HedgeDoc').
^ response
]
{ #category : #'*MiniDocs' }
LePage >> olderChild [
"I provide the last edited child node.
I'm useful to recalculate the age of a notebook."
| response|
response := self preorderTraversal first.
self preorderTraversal do: [:current |
current editTime >= response editTime
ifTrue: [ response := current ]
].
^ response
]
{ #category : #'*MiniDocs' }
LePage >> options [
^ options
@ -258,7 +173,10 @@ LePage >> options [
{ #category : #'*MiniDocs' }
LePage >> preorderTraversal [
^ self allChildrenDepthFirst
| output |
output := OrderedCollection new.
self withDeepCollect: [:each | each allChildrenBreadthFirstDo: [:child | output add: child]].
^ output.
]
{ #category : #'*MiniDocs' }
@ -268,11 +186,6 @@ LePage >> removeSnippetsMetadata [
ifTrue: [ snippet options removeKey: 'metadata' ] ]
]
{ #category : #'*MiniDocs' }
LePage >> sanitizeMetadata [
self allChildrenDepthFirst do: [:snippet | snippet sanitizeMetadata ]
]
{ #category : #'*MiniDocs' }
LePage >> sharedVariablesBindings [
| codeSnippets shared |
@ -297,88 +210,9 @@ LePage >> sharedVariablesBindings [
^ shared asDictionary
]
{ #category : #'*MiniDocs' }
LePage >> splitAdmonitionSnippets [
"I'm used to clean after importing from HedgeDoc to ensure that a snippet contains only admonitions and extra content is put in a new cell."
| admonitionSnippets |
admonitionSnippets := self children select: [:node | node string startsWithMarkdownAdmonition ].
admonitionSnippets ifEmpty: [ ^ self ].
admonitionSnippets do: [:node | | nodeContent |
node ifNotNil: [
nodeContent := node string.
nodeContent startsWithMarkdownAdmonition
ifTrue: [ | snippetCommand |
snippetCommand := node splitSnippetCommandAtPosition: nodeContent admonitionEndingPosition.
snippetCommand execute.
node tagWith: (nodeContent lines first trimBoth withoutPrefix: ':::')
]
]
]
]
{ #category : #'*MiniDocs' }
LePage >> storage [
| current |
current := self database attachmentsDirectory parent.
self optionAt: 'storage' ifAbsent: [ ^ current ].
(self optionAt: 'storage') ifNil: [ ^ current ].
^ self optionAt: 'storage'
]
{ #category : #'*MiniDocs' }
LePage >> uiAddCopyButtonFor: anAction [
<lePageAction>
^ anAction button
tooltip: 'Export Page';
icon: BrGlamorousVectorIcons changes;
action: [:aButton | aButton phlow spawnObject: (self page database addPageCopy: self page) ]
]
{ #category : #'*MiniDocs' }
LePage >> uiDefineFolderFor: anAction [
<lePageAction>
| folderButton |
folderButton := anAction dropdown
icon: BrGlamorousIcons savetodisk;
tooltip: 'Export folder'"";
content: [:aButton | BlElement new
background: (Color gray alpha: 0.2);
size: 100 @ 100;
margin: (BlInsets all: 10) ].
^ folderButton
]
{ #category : #'*MiniDocs' }
LePage >> uiExportButtonFor: anAction [
<lePageAction>
^ anAction button
tooltip: 'Export Page';
icon: BrGlamorousVectorIcons down;
action: [:aButton | aButton phlow spawnObject: self page asMarkdeepFile ]
]
{ #category : #'*MiniDocs' }
LePage >> uiRefreshWebPreviewButtonFor: anAction [
<lePageAction>
^ anAction button
tooltip: 'Refresh web view';
icon: BrGlamorousVectorIcons refresh;
action: [
self page asMarkdeep exportAsFileOn: (self page storage / self page markdeepFileName).
GoogleChrome openWindowOn: self page localHostAddress.
"TODO: If Chrome/Chromium are not installed, I should execute:"
"WebBrowser openOn: self page localHostAddress" ]
]
{ #category : #'*MiniDocs' }
LePage >> youngerChild [
"I provide the first create child node.
I'm useful to recalculate the age of a notebook."
| response|
response := self preorderTraversal first.
self preorderTraversal do: [:current |
current createTime <= response createTime
ifTrue: [ response := current ]
].
^ response
^ self optionAt: 'storage'
ifAbsent: [ ^ FileLocator temp ]
]

View File

@ -0,0 +1,15 @@
Extension { #name : #LePageHeaderBuilder }
{ #category : #'*MiniDocs' }
LePageHeaderBuilder >> addExportPageButton [
<leHeaderAction>
| newButton |
newButton := BrButton new
aptitude: BrGlamorousButtonWithIconAptitude;
label: 'Export Page';
icon: BrGlamorousVectorIcons down;
action: [ :aButton |
aButton phlow spawnObject: self page asMarkdeepFile ].
self toolbarElement addItem: newButton.
]

View File

@ -1,26 +0,0 @@
Extension { #name : #LePharoRewriteSnippet }
{ #category : #'*MiniDocs' }
LePharoRewriteSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
LePharoRewriteSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -9,26 +9,21 @@ LePharoSnippet >> contentAsStringCustomized [
]
{ #category : #'*MiniDocs' }
LePharoSnippet >> fromDictionary: anOrderedDictionary [
self
uid: (LeUID new uidString: (anOrderedDictionary at: 'id'));
parent: (anOrderedDictionary at: 'parent');
createTime: (LeTime new time: ((anOrderedDictionary at: 'created')asDateAndTime));
editTime: (LeTime new time: ((anOrderedDictionary at: 'modified') asDateAndTime));
editEmail: (anOrderedDictionary at: 'modifier');
createEmail: (anOrderedDictionary at: 'creator').
]
LePharoSnippet >> contentFrom: markdeepDiv [
{ #category : #'*MiniDocs' }
LePharoSnippet >> fromMarkdeep: markdeepDiv [
^ markdeepDiv asSnippetDictionary asLepiterSnippet
]
{ #category : #'*MiniDocs' }
LePharoSnippet >> fromString: aString [
[ self coder forSource: aString ] onErrorDo: [ ]
| sanitizedStringText metadata joinedText |
metadata := STON fromString: (markdeepDiv attributes at: 'st-data').
sanitizedStringText := markdeepDiv contentString lines.
sanitizedStringText := sanitizedStringText copyFrom: 4 to: sanitizedStringText size -2.
joinedText := '' writeStream.
sanitizedStringText do: [ :line | joinedText nextPutAll: line; nextPut: Character lf ].
self code: joinedText contents allButLast;
uid: (LeUID new uidString: (metadata at: 'id'));
parent: (metadata at: 'parent');
createTime: (LeTime new time: ((metadata at: 'created')asDateAndTime));
editTime: (LeTime new time: ((metadata at: 'modified') asDateAndTime));
editEmail: (metadata at: 'modifier');
createEmail: (metadata at: 'creator')
]
{ #category : #'*MiniDocs' }

View File

@ -2,137 +2,14 @@ Extension { #name : #LePictureSnippet }
{ #category : #'*MiniDocs' }
LePictureSnippet >> asMarkdeep [
| output curatedCaption captionLines |
captionLines := self caption lines.
(captionLines size <= 1)
ifTrue: [ curatedCaption := caption ]
ifFalse: [
curatedCaption := WriteStream on: ''.
curatedCaption nextPutAll: captionLines first.
captionLines allButFirstDo: [:line |
curatedCaption nextPutAll: ' ', line.
curatedCaption := curatedCaption contents.
]
].
| output |
output := WriteStream on: ''.
output
nextPutAll: self metadataDiv;
nextPutAll: '![ ', curatedCaption ,' ](', self urlString, ')';
nextPutAll: self centeredFigure;
nextPut: Character lf;
nextPutAll: '</div>';
nextPut: Character lf;
nextPut: Character lf.
^ output contents
]
{ #category : #'*MiniDocs' }
LePictureSnippet >> asMarkdownWithMetadataWrappers [
^ self asMarkdeep
]
{ #category : #'*MiniDocs' }
LePictureSnippet >> contentFrom: markdeepDiv [
| caption width |
caption := markdeepDiv contentString.
width := (markdeepDiv // 'img' @ 'width') stringValue.
self
optionAt: 'caption' put: caption;
optionAt: 'width' put: width.
self urlString: (markdeepDiv // 'img' @ 'src') stringValue.
]
{ #category : #'*MiniDocs' }
LePictureSnippet >> fromDictionary: anOrderedDictionary [
| sanitizedUrl|
sanitizedUrl := (anOrderedDictionary at: 'url').
sanitizedUrl := sanitizedUrl copyFrom: 5 to: sanitizedUrl size - 3.
self
uid: (LeUID new uidString: (anOrderedDictionary at: 'id'));
parent: (anOrderedDictionary at: 'parent');
createTime: (LeTime new time: ((anOrderedDictionary at: 'created')asDateAndTime));
editTime: (LeTime new time: ((anOrderedDictionary at: 'modified') asDateAndTime));
editEmail: (anOrderedDictionary at: 'modifier');
createEmail: (anOrderedDictionary at: 'creator');
urlString: sanitizedUrl;
caption: (anOrderedDictionary at: 'content') first
]
{ #category : #'*MiniDocs' }
LePictureSnippet >> fromMarkdeep: markdeepDiv [
^ markdeepDiv asSnippetDictionary asLepiterSnippet
]
{ #category : #'*MiniDocs' }
LePictureSnippet >> fromString: aStringArray [
"aStringArray should contain as first element the sanitized string and
as second the full original image Link string, which may contains links in the description."
| args urlTemp |
args := aStringArray second splitOn: ']('.
urlTemp := args last.
urlTemp := urlTemp copyFrom: 1 to: urlTemp size - 1.
self caption: aStringArray first.
self urlString: urlTemp.
^ self
]
{ #category : #'*MiniDocs' }
LePictureSnippet >> metadata [
^ self metadataInit
]
{ #category : #'*MiniDocs' }
LePictureSnippet >> metadataDiv [
| output |
output := WriteStream on: ''.
output
nextPutAll: '<div st-class="' , self class greaseString , '"';
nextPut: Character lf;
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
nextPut: Character lf.
^ output contents withInternetLineEndings.
]
{ #category : #'*MiniDocs' }
LePictureSnippet >> metadataInit [
| surrogate |
self parent
ifNil: [ surrogate := nil]
ifNotNil: [ surrogate := self parent uidString ].
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: surrogate;
at: 'url' put: '<!--',self contentAsString, '-->';
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
yourself
]
{ #category : #'*MiniDocs' }
LePictureSnippet >> metadataUpdate [
| surrogate |
self parent
ifNil: [ surrogate := nil]
ifNotNil: [ surrogate := self parent uidString ].
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: surrogate;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
yourself
]
{ #category : #'*MiniDocs' }
LePictureSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -1,26 +0,0 @@
Extension { #name : #LeSmaCCRewriteSnippet }
{ #category : #'*MiniDocs' }
LeSmaCCRewriteSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
LeSmaCCRewriteSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -7,33 +7,6 @@ LeSnippet class >> fromMetaMarkdeep: div [
metadata := STON fromString:(div xpath: '@st-data') stringValue.
snippet := className asClass new.
snippet injectMetadataFrom: metadata.
snippet fromMarkdeep: div.
snippet contentFrom: div.
^ snippet.
]
{ #category : #'*MiniDocs' }
LeSnippet >> metadata [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
self optionAt: 'metadata' ifAbsentPut: [ OrderedDictionary new ].
^ (self optionAt: 'metadata')
at: 'id' put: self uidString;
at: 'parent' put: self parent uid asString36;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
LeSnippet >> moveToPageTitled: pageName [
| db origin destination thisSnippet |
thisSnippet := self.
db := self page database.
destination := db pageNamed: pageName.
origin := db pageNamed: thisSnippet page title.
origin removeSnippet: thisSnippet.
destination addSnippet: thisSnippet.
]

View File

@ -1,31 +0,0 @@
Extension { #name : #LeTextCoderSnippetElement }
{ #category : #'*MiniDocs' }
LeTextCoderSnippetElement >> asLePage [
| currentSnippet newPage |
currentSnippet := self snippet.
newPage := LePage new.
newPage
title: (currentSnippet text asString trimLeft: [:char | char = $# ]) trim.
self page database
addPage: newPage.
currentSnippet allChildrenBreadthFirstDo: [:child |
child moveToPageTitled: newPage title.
].
^ newPage
]
{ #category : #'*MiniDocs' }
LeTextCoderSnippetElement >> asSnippetViewModel [
^ self snippetContent
]
{ #category : #'*MiniDocs' }
LeTextCoderSnippetElement >> moveToPageTitled: pageName [
| db origin destination |
db := self page database.
destination := db pageNamed: pageName.
origin := db pageNamed: self page title.
origin removeSnippet: self.
destination addSnippet: self .
]

View File

@ -1,52 +1,39 @@
Extension { #name : #LeTextSnippet }
{ #category : #'*MiniDocs' }
LeTextSnippet >> asLePage [
| page title currentSnippet |
title := self contentAsString markdownHeaders associations first value.
title := (title trimBoth: [:char | char = $# ]) trimmed.
page := LePage new
initializeTitle: title.
currentSnippet := LeTextSnippet new
string: self contentAsString.
page addSnippet: currentSnippet.
self database addPage: page.
self childrenDo: [:child |
child moveToPageTitled: page title
].
self removeSelfCommand.
^ page.
]
LeTextSnippet >> contentFrom: markdeepDiv [
{ #category : #'*MiniDocs' }
LeTextSnippet >> fromDictionary: anOrderedDictionary [
self
uid: (LeUID new uidString: (anOrderedDictionary at: 'id'));
parent: (anOrderedDictionary at: 'parent');
createTime: (LeTime new time: ((anOrderedDictionary at: 'created')asDateAndTime));
editTime: (LeTime new time: ((anOrderedDictionary at: 'modified') asDateAndTime));
editEmail: (anOrderedDictionary at: 'modifier');
createEmail: (anOrderedDictionary at: 'creator')
]
{ #category : #'*MiniDocs' }
LeTextSnippet >> fromMarkdeep: markdeepDiv [
^ markdeepDiv asSnippetDictionary asLepiterSnippet
]
{ #category : #'*MiniDocs' }
LeTextSnippet >> fromString: aString [
self
string: aString;
uid: LeUID new.
| sanitizedStringText metadata |
metadata := STON fromString: (markdeepDiv attributes at: 'st-data').
sanitizedStringText := markdeepDiv contentString.
sanitizedStringText := sanitizedStringText allButFirst.
sanitizedStringText := sanitizedStringText allButLast.
self string: sanitizedStringText;
uid: (LeUID new uidString: (metadata at: 'id'));
parent: (metadata at: 'parent');
createTime: (LeTime new time: ((metadata at: 'created')asDateAndTime));
editTime: (LeTime new time: ((metadata at: 'modified') asDateAndTime));
editEmail: (metadata at: 'modifier');
createEmail: (metadata at: 'creator')
]
{ #category : #'*MiniDocs' }
LeTextSnippet >> metadata [
^ self metadataUpdate
^ self optionAt: 'metadata' ifAbsentPut: [ self metadataInit ]
]
{ #category : #'*MiniDocs' }
LeTextSnippet >> metadataInit [
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parentId;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: self createEmail asString;
at: 'modifier' put: self editEmail asString;
yourself
]
{ #category : #'*MiniDocs' }
@ -57,23 +44,11 @@ LeTextSnippet >> options [
{ #category : #'*MiniDocs' }
LeTextSnippet >> parentId [
self parent ifNil: [ ^ self ].
(self parent isString) ifTrue: [^ self parent].
^ self parent uidString.
]
{ #category : #'*MiniDocs' }
LeTextSnippet >> tagWith: aString [
self tags add: aString.
]
{ #category : #'*MiniDocs' }
LeTextSnippet >> withFollowingSnippets [
"I'm the same method implemented for PharoSnippets,
but present also here as a way to improve moving prose snippets from pages.
"
| snippets stop start |
snippets := self parent children asArray.
start := snippets indexOf: self.
stop := snippets size.
^ snippets copyFrom: start to: stop
LeTextSnippet >> taggedWith: aString [
self metadata at: 'tags' ifPresent: [ (self metadata at: 'tags') add: aString; yourself ] ifAbsentPut: [ Set new ].
^ self metadata at: 'tags'
]

View File

@ -2,62 +2,45 @@ Extension { #name : #LeTextualSnippet }
{ #category : #'*MiniDocs' }
LeTextualSnippet >> asMarkdeep [
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
'st-' properties as a way to extend divs metadata regarding its contents."
| output |
output := WriteStream on: ''.
output
nextPutAll: self metadataDiv;
nextPutAll: '<div st-class="' , self class greaseString , '"';
nextPut: Character lf;
nextPutAll: ' st-data="' , (STON toString: self metadata) , '">';
nextPut: Character lf;
nextPutAll: self markdeepCustomOpener;
nextPutAll: self contentAsStringAnnotated;
nextPutAll: self contentAsString;
nextPut: Character lf;
nextPutAll: self markdeepCustomCloser;
nextPutAll: '</div>';
nextPut: Character lf;
nextPut: Character lf.
^ output contents withInternetLineEndings
]
{ #category : #'*MiniDocs' }
LeTextualSnippet >> asMarkdown [
| output |
output := '' writeStream.
output
nextPutAll: self contentAsStringCustomized; lf.
^ output contents
]
{ #category : #'*MiniDocs' }
LeTextualSnippet >> asMarkdownWithMetadataWrappers [
LeTextualSnippet >> asMarkdown [
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
'st-' properties as a way to extend divs metadata regarding its contents."
| output |
output := '' writeStream.
output
nextPutAll: self metadataDiv;
nextPutAll: '<div st-class="', self class asString, '"'; lf;
nextPutAll: ' st-data="', (STON toString: self metadata), '">'; lf;
nextPutAll: self markdownCustomOpener;
nextPutAll: self contentAsStringCustomized; lf;
nextPutAll: self markdownCustomCloser;
nextPutAll: '</div>'; lf; lf.
^ output contents withInternetLineEndings
]
{ #category : #'*MiniDocs' }
LeTextualSnippet >> contentAsStringAnnotated [
self ast ifNotNil: [ ^ self processSnippetAnnotations ].
^ self contentAsString
^ output contents
]
{ #category : #'*MiniDocs' }
LeTextualSnippet >> contentAsStringCustomized [
(self contentAsString beginsWith: '#')
ifTrue: [ ^ '#', self contentAsString ]
ifFalse: [ ^ self contentAsString ]
]
{ #category : #'*MiniDocs' }
LeTextualSnippet >> extra [
^ self optionAt: 'extra' ifAbsentPut: [ Dictionary new ]
^ self contentAsString
]
{ #category : #'*MiniDocs' }
@ -83,84 +66,26 @@ LeTextualSnippet >> markdownCustomOpener [
{ #category : #'*MiniDocs' }
LeTextualSnippet >> metadata [
^ self metadataUpdate
^ self optionAt: 'metadata' ifAbsentPut: [ self metadataInit ]
]
{ #category : #'*MiniDocs' }
LeTextualSnippet >> metadataDiv [
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
'st-' properties as a way to extend divs metadata regarding its contents."
"PENDING: this is repeated in several snippets. Can be abstracted up in a common object of the class hierarchy?"
| output |
output := WriteStream on: ''.
output
nextPutAll: '<div st-class="' , self class greaseString , '"';
nextPut: Character lf;
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
nextPut: Character lf.
^ output contents withInternetLineEndings.
]
{ #category : #'*MiniDocs' }
LeTextualSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
LeTextualSnippet >> metadataInit [
| surrogate |
self parent
ifNil: [ surrogate := nil]
ifNotNil: [ surrogate := self parent uidString ].
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: (self parent ifNotNil: [self parent uidString ]);
at: 'parent' put: surrogate;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
at: 'extra' put: self extra;
yourself
]
{ #category : #'*MiniDocs' }
LeTextualSnippet >> processSnippetAnnotations [
| exported substitutions annotations pageConfig |
annotations := self ast parts
select: [ :each | each className includesSubstring: 'AnnotationNode' ].
annotations ifEmpty: [ ^ self contentAsString ].
substitutions := OrderedDictionary new.
pageConfig := self page config.
annotations
do: [ :each |
| key type value color |
key := each source.
type := (key splitOn: ':') first copyWithoutAll: '{{'.
value := key copyFrom: type size + 4 to: key size - 2.
pageConfig
ifNil: [ color := 'default' ]
ifNotNil: [ | colors |
colors := pageConfig at: 'annotationColors' ifAbsent: [ nil ].
colors
ifNotNil: [ color := colors
at: type
ifAbsent: [ colors at: 'defaultColor' ifAbsentPut: [ 'default' ] ] ] ].
substitutions
at: key
put: '<span st-class="' , type , '" style="color:' , color , '">' , value , '</span>' ].
exported := self contentAsString.
substitutions
keysAndValuesDo: [ :k :v | exported := exported copyReplaceAll: k with: v ].
^ exported
]
{ #category : #'*MiniDocs' }
LeTextualSnippet >> sanitizeMetadata [
self options ifNil: [^ self ].
self options removeKey: 'metadata' ifAbsent: [^ self ].
self metadata keysAndValuesDo: [:k :v |
(v asString includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v asString copyWithoutXMLDelimiters)
]
]
at: 'creator' put: self createEmail asString;
at: 'modifier' put: self editEmail asString;
yourself
]
{ #category : #'*MiniDocs' }
LeTextualSnippet >> tags [
^ self extra at: 'tags' ifAbsentPut: [ Set new ]
^ self metadata at: 'tags' ifAbsentPut: [ Set new ]
]

View File

@ -1,21 +0,0 @@
Extension { #name : #LeUnknownSnippet }
{ #category : #'*MiniDocs' }
LeUnknownSnippet >> metadataUpdate [
| surrogate |
self parent
ifNil: [ surrogate := nil]
ifNotNil: [
self parent isString
ifTrue: [ surrogate := self parent]
ifFalse: [ surrogate := self parent uidString ]
].
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: surrogate;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: self createEmail asString;
at: 'modifier' put: self editEmail asString;
yourself
]

View File

@ -1,26 +0,0 @@
Extension { #name : #LeWardleyMapSnippet }
{ #category : #'*MiniDocs' }
LeWardleyMapSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
LeWardleyMapSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -1,26 +0,0 @@
Extension { #name : #LeWordSnippet }
{ #category : #'*MiniDocs' }
LeWordSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized;
yourself
]
{ #category : #'*MiniDocs' }
LeWordSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -1,57 +0,0 @@
Extension { #name : #LeYoutubeReferenceSnippet }
{ #category : #'*MiniDocs' }
LeYoutubeReferenceSnippet >> asMarkdeep [
| output |
output := WriteStream on: ''.
output
nextPutAll: self metadataDiv;
nextPutAll: '![ ', self title, ' | ', self authorName, ' ](',self urlString, ')';
nextPut: Character lf;
nextPutAll: '</div>';
nextPut: Character lf;
nextPut: Character lf.
^ output contents
]
{ #category : #'*MiniDocs' }
LeYoutubeReferenceSnippet >> metadata [
^ self optionAt: 'metadata' ifAbsentPut: [ self metadataUpdate ]
]
{ #category : #'*MiniDocs' }
LeYoutubeReferenceSnippet >> metadataDiv [
| output |
output := WriteStream on: ''.
output
nextPutAll: '<div st-class="' , self class greaseString , '"';
nextPut: Character lf;
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">'.
^ output contents withInternetLineEndings.
]
{ #category : #'*MiniDocs' }
LeYoutubeReferenceSnippet >> metadataUpdate [
| surrogate |
self parent
ifNil: [ surrogate := nil]
ifNotNil: [ surrogate := self parent uidString ].
^ OrderedDictionary new
at: 'id' put: self uidString;
at: 'parent' put: surrogate;
at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString;
at: 'creator' put: self createEmail asString;
at: 'modifier' put: self editEmail asString;
yourself
]
{ #category : #'*MiniDocs' }
LeYoutubeReferenceSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>))
ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>))
]
]
]

View File

@ -1,33 +0,0 @@
Class {
#name : #Logseq,
#superclass : #Object,
#instVars : [
'folder'
],
#category : #'MiniDocs-Model'
}
{ #category : #accessing }
Logseq >> assets [
^ self folder / 'assets'
]
{ #category : #accessing }
Logseq >> folder [
^ folder
]
{ #category : #accessing }
Logseq >> folder: aFolder [
folder := aFolder
]
{ #category : #accessing }
Logseq >> journals [
self folder / 'journals'
]
{ #category : #accessing }
Logseq >> pages [
^self folder/ 'pages'
]

View File

@ -3,17 +3,20 @@ I model a Mardeep file as described in https://casual-effects.com/markdeep/
"
Class {
#name : #Markdeep,
#superclass : #Markdown,
#superclass : #Object,
#instVars : [
'title',
'body',
'comments',
'tail',
'language',
'config',
'metadata',
'head',
'navTop',
'options'
],
#category : #'MiniDocs-Core'
#category : #MiniDocs
}
{ #category : #'as yet unclassified' }
@ -21,38 +24,21 @@ Markdeep class >> fromMarkdownFile: aFileReference [
^ self new fromMarkdownFile: aFileReference.
]
{ #category : #accessing }
Markdeep class >> fromPubPubTOC: orderedDictionary folder: folder index: ordinalPossitive [
| contentSection testFile |
contentSection := orderedDictionary associations at: ordinalPossitive.
testFile := folder / (contentSection key,'--', contentSection value),'md'.
^ self new fromMarkdownFile: testFile.
]
{ #category : #accessing }
Markdeep >> asMarkdownWithMetadataWrappers [
^ Markdown new
metadata: self metadata;
body: self body;
file: self markdownFile
]
{ #category : #'instance creation' }
Markdeep >> authors [
self metadata at: 'authors' ifAbsentPut: [ Dictionary new ].
"self metadata at: 'authors' ifNotEmpty: [:k | ^ '**', k, '**' ]
" ^ ''.
self metadata at: 'authors' ifPresent: [:k | ^ '**', k, '**' ].
^ ''.
]
{ #category : #'instance creation' }
Markdeep >> authorsString [
self authors
ifEmpty: [ ^ '' ] ifNotEmpty: [ ^ ' ', self authors ]
ifNil: [ ^ '' ] ifNotNil: [ ^ ' ', self authors ]
]
{ #category : #accessing }
Markdeep >> body [
^ body ifNil: [^ '' ]
^ body
]
{ #category : #accessing }
@ -60,31 +46,6 @@ Markdeep >> body: anObject [
body := anObject
]
{ #category : #accessing }
Markdeep >> bodyReplaceAll: original with: replacement [
self body: (self body copyReplaceAll: original with: replacement)
]
{ #category : #accessing }
Markdeep >> cleanMetadata [
metadata := nil
]
{ #category : #accessing }
Markdeep >> commentPubPubDelimiters [
| commented openners |
openners := #('::: {.pub-body-component}' '::: pub-body-component' '::: {.editor .Prosemirror}' '::: {.pub-notes}').
commented := self body.
openners do: [:openner |
commented := commented copyReplaceAll: openner with: '<!--@div-open ', openner, '-->'
].
commented := commented
copyReplaceAll: ':::
' with: '<!--@div-close ::: -->
'.
self body: commented
]
{ #category : #accessing }
Markdeep >> comments [
^ comments ifNil: [ ^ comments := true ]
@ -126,9 +87,7 @@ Markdeep >> commentsSupport [
{ #category : #accessing }
Markdeep >> config [
| configFile |
configFile := self folder / 'config.ston'.
configFile exists ifTrue: [ ^ config := STON fromString: configFile contents ].
^ config ifNil: [ config := Dictionary new]
]
@ -145,92 +104,32 @@ Markdeep >> contents [
output := '' writeStream.
output
nextPutAll: self headContents; lf; lf;
nextPutAll: ' **', self title trimmed accentedCharactersCorrection, '**'; lf;
nextPutAll: self authorsString ; lf;
nextPutAll: '', self version; lf;
nextPutAll: ' **', self title, '**'; lf;
nextPutAll: self authorsString ; lf;
nextPutAll: ' ', self version; lf;
nextPutAll: self navTop; lf; lf;
nextPutAll: self body; lf; lf;
nextPutAll: self body; lf; lf;
nextPutAll: self tail; lf; lf; lf; lf;
nextPutAll: self commentsSupport.
^ output contents.
]
{ #category : #accessing }
Markdeep >> converPubPubFootnoteBetween: footnote and: nextFootnote in: footnotesArray [
| currentNoteIndex nextNoteIndex response noteLines |
currentNoteIndex := footnotesArray indexOf: '[^',footnote, ']: '.
nextNoteIndex := footnotesArray indexOf: '[^',nextFootnote, ']: '.
noteLines := footnotesArray copyFrom: currentNoteIndex to: nextNoteIndex - 1.
response := '' writeStream.
noteLines do: [:line |
line
ifNotEmpty: [ response nextPutAll: line, String lf ]
"ifEmpty: [ response nextPutAll: ' ' ]?"
].
response nextPutAll: String lf.
^ response contents
{ #category : #persistence }
Markdeep >> exportAsFile [
| newFile |
self markdownFile ifNil: [ self inform: 'Define an input Markdown file or use #exportAsFileOn: instead.' ].
newFile := (self markdownFile fullName, '.html') asFileReference.
self exportAsFileOn: newFile.
]
{ #category : #accessing }
Markdeep >> extractTitleFrom: docTree [
| tempTitle |
tempTitle := ((docTree children
detect: [ :node | node className = 'PPCMIndentedCode' ]) children
detect: [ :subnode | subnode text trimmed beginsWith: '**' ]) text trimmed.
self title: (tempTitle copyFrom: 3 to: tempTitle size - 2).
^ tempTitle
]
{ #category : #accessing }
Markdeep >> extractYamlMetadataFrom: documentTree [
| yamlComment response |
yamlComment := documentTree children
detect: [:node | node className = 'PPCMHtmlBlock' and: [node text trimmed beginsWith: '<!--@yaml']]
ifNone: [ ^ nil ].
response := '' writeStream.
yamlComment children allButFirst allButLast do: [:each |
response nextPutAll: each text; cr
].
^ {YAML2JSON fromString: response contents . yamlComment }
]
{ #category : #accessing }
Markdeep >> file: aFileReference [
file := aFileReference.
self fillInContentsFrom: aFileReference
]
{ #category : #accessing }
Markdeep >> fillInContentsFrom: aFileReference [
| docTree docTreeChildren headTree bodyStartLine bodyEndLine contentLines rawMetadata |
aFileReference exists ifFalse: [ ^ self ].
docTree := (Markdown new contents: aFileReference contents) documentTree.
docTreeChildren := docTree children.
headTree := docTreeChildren
detect: [ :node |
node className = 'PPCMParagraph'
and: [ (node children detect: [ :subnode | subnode text = '<head>' ]) isNotNil ] ]
ifNone: [ ^self ].
headTree children allButFirst allButLast
do: [ :node | node className = 'PPCMHtml' ifTrue: [ self head add: node text ] ].
self head: self head asSet asOrderedCollection.
rawMetadata := (self extractYamlMetadataFrom: docTree).
rawMetadata ifNotNil: [self metadata: rawMetadata first].
self title ifNil: [
self title: (self metadata at: 'title' ifAbsent: [self extractTitleFrom: docTree]).
self title: (self title trimBoth: [ :char | char = $" ]).
self metadata at: 'title' put: self title].
contentLines := self file contents lines.
bodyStartLine := (contentLines
detectIndex: [ :line | line includesSubstring: '<!--@yaml' ] ifNone: [ ^ self ]) + rawMetadata second children size.
bodyEndLine := contentLines detectIndex: [:line | line includesSubstring: '<!-- Markdeep'] ifNone: [ 0 ].
self body: (contentLines copyFrom: bodyStartLine to: bodyEndLine - 1 ) asStringWithCr.
^ self .
]
{ #category : #accessing }
Markdeep >> folder [
^ self file parent
{ #category : #persistence }
Markdeep >> exportAsFileOn: aFileReference [
aFileReference ensureDelete.
aFileReference exists ifFalse: [ aFileReference ensureCreateFile ].
aFileReference writeStreamDo: [ :stream |
stream nextPutAll: self contents ].
self inform: 'Exported as: ', String cr, aFileReference fullName.
^ aFileReference
]
{ #category : #utilities }
@ -244,21 +143,9 @@ Markdeep >> fontAwesomeHeader [
Markdeep >> fromMarkdownFile: aFileReference [
"I create a Markdeep document from a given Markdown file."
self processMarkdownFor: aFileReference.
self file: aFileReference, 'html'.
^ self.
]
{ #category : #accessing }
Markdeep >> fromPubPubToMarkdeep [
self
removeAutoGeneratedFileNotice;
removeCCByLicenseDiv;
commentPubPubDelimiters;
replaceEscapedCharacters;
renamePubPubFootnotes;
removeAlternativeImagesArray
]
{ #category : #accessing }
Markdeep >> gtTextFor: aView [
<gtView>
@ -269,11 +156,8 @@ Markdeep >> gtTextFor: aView [
{ #category : #accessing }
Markdeep >> head [
^ head ifNil: [
head := OrderedCollection new.
head add: self fontAwesomeHeader; yourself.
].
^ head ifNil: [ head := OrderedCollection new.
head add: self fontAwesomeHeader; yourself ]
]
{ #category : #accessing }
@ -294,11 +178,6 @@ Markdeep >> headContents [
nextPutAll: line;
nextPut: Character lf
].
self metadata keysAndValuesDo: [:k :v |
k = 'lang'
ifTrue: [ stream nextPutAll: ' <meta lang="', v,'">'; cr. ]
ifFalse: [ stream nextPutAll: ' <meta name="', k, '" content="', v,'">'; cr. ]
].
stream
nextPutAll: '</head>';
nextPut: Character lf.
@ -326,28 +205,18 @@ Markdeep >> markdeepScriptTag [
{ #category : #accessing }
Markdeep >> markdownFile [
self file ifNil: [
self file: FileLocator temp / ('untitled--', NanoID generate, '.md.html') ].
^ (self file fullName withoutSuffix: '.html') asFileReference.
^ Markdown new fromFile: (self config at: 'markdownFile')
]
{ #category : #accessing }
Markdeep >> markdownFile: aFileReference [
"Where the Mardown file associated with me is stored. Used for sync. and import/export purposes."
self file: aFileReference, 'html'
self config at: 'markdownFile' put: aFileReference
]
{ #category : #'instance creation' }
Markdeep >> metadata [
metadata ifNil: [^ metadata := OrderedDictionary new ].
(metadata isNil and: [ self file contents isNil ])
ifTrue: [ metadata := OrderedDictionary new ].
^ metadata
]
{ #category : #accessing }
Markdeep >> metadata: anOrderedDictionary [
metadata := anOrderedDictionary
^ metadata ifNil: [ metadata := OrderedDictionary new ]
]
{ #category : #utilities }
@ -362,7 +231,7 @@ Markdeep >> metadataFromXML: aXMLDocument [
{ #category : #'instance creation' }
Markdeep >> navTop [
^ navTop ifNil: [ navTop := '' ]
^ navTop
]
{ #category : #'as yet unclassified' }
@ -383,250 +252,23 @@ Markdeep >> options [
]
]
{ #category : #printing }
Markdeep >> printOn: aStream [
super printOn: aStream.
aStream
nextPutAll: '( ', self title, ' )'
]
{ #category : #'instance creation' }
Markdeep >> processMarkdownFor: aFileReference [
"comment stating purpose of message"
| markdownContent |
self file: aFileReference, 'html'.
self markdownFile: aFileReference.
markdownContent := Markdown fromFile: aFileReference.
self metadata: markdownContent metadataAsYAML.
self body: (markdownContent commentYAMLMetadata contents).
]
{ #category : #accessing }
Markdeep >> pubPubFootnoteMetadataFromString: string [
| sanitized footnoteData altLine altString id |
(string lines size <= 1) ifTrue: [ ^ nil ].
sanitized := '' writeStream.
altString := string copyReplaceAll: '.footnote' with: ''.
altString := altString copyReplaceAll: ' node-type='
with: '
node-type= '.
altString := altString copyReplaceAll: ' data-value=' with: '
data-value='.
altString := altString copyReplaceAll: ' date-structured-value=' with: '
date-structured-value= '.
altString lines allButFirstDo: [:line |
(line beginsWith: '>')
ifTrue: [ altLine := line allButFirst ]
ifFalse: [ altLine := line ].
sanitized
nextPutAll: altLine trimBoth;
nextPutAll: String lf
].
sanitized := sanitized contents.
sanitized := sanitized copyReplaceAll: 'type=' with: 'type: '.
sanitized := sanitized copyReplaceAll: 'value=' with: 'value: '.
id := (altString lines first) allButFirst trimmed.
footnoteData := { 'id' -> id } asDictionary.
footnoteData addAll: (MiniDocs yamlToJson: sanitized trimmed).
^ footnoteData
]
{ #category : #accessing }
Markdeep >> pubPubFootnoteRawLinks [
^ self selectPubPubLinksWithSize: 2
]
{ #category : #accessing }
Markdeep >> pubPubFootnotesLinesRangeFor: contentSection [
| beginningLine endingLine |
beginningLine := contentSection lines size + 1.
contentSection lines doWithIndex: [:line :i |
((line includesSubstring: '::: {.pub-notes}') or: [line includesSubstring: '::: pub-notes'])
ifTrue: [ beginningLine := i ].
(i > beginningLine and: [ line beginsWith: ':::' ])
ifTrue: [
endingLine := i.
^ {beginningLine . endingLine}
]
]
]
{ #category : #accessing }
Markdeep >> pubPubFootnotesLinesRangeForBody [
^ self pubPubFootnotesLinesRangeFor: self body
]
{ #category : #accessing }
Markdeep >> pubPubFootnotesLinesRangeForContents [
^ self pubPubFootnotesLinesRangeFor: self contents
]
{ #category : #accessing }
Markdeep >> pubPubFootnotesText [
| footnotesLines output |
footnotesLines := self contents lines
copyFrom: self pubPubFootnotesLinesRangeForContents first + 3
to: self pubPubFootnotesLinesRangeForContents second - 1.
output := '' writeStream.
footnotesLines do: [:line |
output
nextPutAll: line;
nextPutAll: String crlf.
].
^ output contents allButLast
]
{ #category : #accessing }
Markdeep >> pubPubImageLinks [
^ self selectPubPubLinksWithSize: 3
]
{ #category : #accessing }
Markdeep >> pubPubImagesToMarkdeep [
| sanitized parsedLinks |
parsedLinks := self pubPubImageLinks.
parsedLinks ifEmpty: [ ^self ].
sanitized := self body.
parsedLinks do: [:link |
sanitized := sanitized copyReplaceAll: '{', link third, '}' with: ''
].
self body: sanitized
]
{ #category : #accessing }
Markdeep >> pubPubRawLinks [
| parser |
parser := PubPubGrammar2 new document.
^ (parser parse: self body)
]
{ #category : #accessing }
Markdeep >> reformatPubPubFootnotes [
| footnotesLines footnotesIDs toReplace response |
(self = self pubPubFootnotesLinesRangeForContents)
ifTrue: [^self].
footnotesLines := self contents lines
copyFrom: self pubPubFootnotesLinesRangeForContents first
to: self pubPubFootnotesLinesRangeForContents second.
footnotesIDs := self replacePubPubFootnotesIdentifiers.
toReplace := footnotesLines select: [:line |
(line includesSubstring: ' [[]{.pub-note-content-component}]{#fn-')
].
toReplace doWithIndex: [:replacement :i | | index |
index := footnotesLines indexOf: replacement.
footnotesLines at: index put: '[^', (footnotesIDs at: i),']: '
].
response := '' writeStream.
footnotesIDs allButLast doWithIndex: [:footnote :i |
response
nextPutAll:
(self
converPubPubFootnoteBetween: footnote
and: (footnotesIDs at: i + 1)
in: footnotesLines)
].
^ response contents
]
{ #category : #accessing }
Markdeep >> removeAlternativeImagesArray [
| replacements |
self body ifNil: [^ self].
replacements := self selectPubPubLinksWithSize: 3.
replacements ifEmpty: [^self].
replacements do: [:replacement |
self body:
(self body copyReplaceAll: replacement third with: '' )
].
self body: (self body copyReplaceAll: '{srcset=}' with: '').
]
{ #category : #accessing }
Markdeep >> removeAutoGeneratedFileNotice [
| autoGeneratedNotice |
autoGeneratedNotice := '**Notice:** This file is an auto-generated download and, as such, might
include minor display or rendering errors. For the version of record,
please visit the HTML version or download the PDF.
------------------------------------------------------------------------'.
self body: (self body copyReplaceAll: autoGeneratedNotice with: '')
]
{ #category : #accessing }
Markdeep >> removeCCByLicenseDiv [
| licenseDiv|
licenseDiv := '
<div>
**License:** [Creative Commons Attribution 4.0 International License
(CC-BY 4.0)](https://creativecommons.org/licenses/by/4.0/)
</div>'.
self body: (self body copyReplaceAll: licenseDiv with: '')
]
{ #category : #accessing }
Markdeep >> renamePubPubFootnotes [
| reformated bodyLines beforeFootnotes afterFootnotesRaw afterFootnotes newBodyLines response |
reformated := self reformatPubPubFootnotes.
(self pubPubFootnotesLinesRangeForBody class = Markdeep) ifTrue: [ ^self ].
bodyLines := self body lines.
beforeFootnotes := bodyLines copyFrom: 1 to: self pubPubFootnotesLinesRangeForBody first .
afterFootnotesRaw := bodyLines copyFrom: self pubPubFootnotesLinesRangeForBody second to: bodyLines size.
afterFootnotes := OrderedCollection new.
afterFootnotesRaw do:[:line |
(line beginsWith: ':::')
ifTrue: [
afterFootnotes
add: (line copyReplaceAll: ':::' with: '<!--@div-closer ::: -->').
]
].
newBodyLines :=
(beforeFootnotes copyWithAll:
(#('# Footnotes' '')
copyWithAll:(reformated lines
copyWithAll: afterFootnotes))).
response := '' writeStream.
newBodyLines do: [:line |
response nextPutAll: line, String lf
].
self body: response contents.
]
{ #category : #accessing }
Markdeep >> replaceBackslashBreaklines [
self bodyReplaceAll: '\
' with: '<br>
'
]
{ #category : #accessing }
Markdeep >> replaceEscapedCharacters [
self
title: (self title copyReplaceAll: '\#' with: '#');
body: (self body copyReplaceAll: '\#' with: '#');
body: (self body copyReplaceAll: '\[' with: '[');
body: (self body copyReplaceAll: '\]' with: ']');
body: (self body copyReplaceAll: '\*' with: '*')
]
{ #category : #accessing }
Markdeep >> replacePubPubFootnotesIdentifiers [
| footnotes sanitized parsedLinks linkIdentifiers |
footnotes := OrderedDictionary new.
parsedLinks := self pubPubFootnoteRawLinks.
parsedLinks ifEmpty: [ ^self ].
sanitized := self body.
linkIdentifiers := OrderedCollection new.
parsedLinks do: [:link | | id currentLinkText |
id := (link second splitOn: '.footnote') first trimmed.
linkIdentifiers add: id.
currentLinkText := '[', link first, ']{#',link second,'}'.
sanitized := sanitized copyReplaceAll: currentLinkText with: '[^', id, ']'
].
self body: sanitized.
^ linkIdentifiers
]
{ #category : #accessing }
Markdeep >> selectPubPubLinksWithSize: naturalNumber [
^ self pubPubRawLinks select: [ :each | each size = naturalNumber ]
]
{ #category : #accessing }
Markdeep >> tail [
"I enable the document tail, which, in turn, enables a Markdeep document"
@ -649,7 +291,7 @@ Markdeep >> tail: anObject [
{ #category : #accessing }
Markdeep >> title [
^ title
^ title
]
{ #category : #accessing }

View File

@ -6,13 +6,12 @@ particularly the ones provided by Pandoc and/or Lunamark.
"
Class {
#name : #Markdown,
#superclass : #MarkupFile,
#superclass : #Object,
#instVars : [
'metadata',
'body',
'title'
'contents',
'file'
],
#category : #'MiniDocs-Core'
#category : #MiniDocs
}
{ #category : #'instance creation' }
@ -20,43 +19,25 @@ Markdown class >> fromFile: aFileReference [
^ self new fromFile: aFileReference
]
{ #category : #accessing }
Markdown >> asMarkdeep [
^ Markdeep new
body: self body;
commentYAMLMetadata
]
{ #category : #utilities }
Markdown class >> yamlMetadataDelimiter [
^ '---'
{ #category : #accessing }
Markdown >> asMarkdownTiddler [
^ Tiddler new
title: self title;
text: self contents;
type: 'text/x-markdown';
created: Tiddler nowLocal.
]
{ #category : #accessing }
Markdown >> body [
^ body
]
{ #category : #accessing }
Markdown >> body: aString [
body := aString
]
{ #category : #operation }
Markdown >> commentYAMLMetadata [
| newContents |
self contents detectYAMLMetadata ifFalse: [ ^ self ].
self detectYAMLMetadata ifFalse: [ ^ self ].
newContents := '' writeStream.
newContents nextPutAll: '<!--@yaml'; lf.
newContents nextPutAll: self yamlMetadataString.
newContents nextPutAll: '-->'; lf; lf.
newContents nextPutAll: '<!--@yaml:'; crlf.
newContents nextPutAll: self extractYAMLMetadata.
newContents nextPutAll: String cr.
newContents nextPutAll: '-->'; crlf.
(self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line |
newContents nextPutAll: line; lf ].
^ newContents contents.
newContents nextPutAll: line; crlf ].
self contents: newContents contents.
^ self contents
]
{ #category : #utilities }
@ -66,49 +47,22 @@ Markdown >> containsYAMLMetadataClosing [
{ #category : #accessing }
Markdown >> contents [
| response metadataString |
response := WriteStream on: ''.
metadataString := self metadataAsYAML
ifEmpty: [ '' ]
ifNotEmpty: [ '---', String cr, self metadataAsYAML, String cr, '---', String cr ].
response
nextPutAll: metadataString;
nextPutAll: (self body ifNil: [ '' ]).
^ response contents withInternetLineEndings
^ contents
]
{ #category : #accessing }
Markdown >> contents: aString [
body := aString
Markdown >> contents: anObject [
contents := anObject
]
{ #category : #accessing }
Markdown >> documentTree [
| parser|
self contents ifNil: [^ nil].
parser := PPCommonMarkBlockParser new parse: self body.
^ parser accept: CMBlockVisitor new
]
{ #category : #persistence }
Markdown >> exportAsFile [
| newFile |
newFile := (self file fullName ) asFileReference.
^ self notifyExportAsFileOn: newFile.
]
{ #category : #persistence }
Markdown >> exportAsFileOn: aFileReference [
aFileReference ensureDelete.
aFileReference exists ifFalse: [ aFileReference ensureCreateFile ].
aFileReference writeStreamDo: [ :stream |
stream nextPutAll: self contents withInternetLineEndings ].
]
{ #category : #accessing }
Markdown >> exportAsHTML [
^ Pandoc markdownToHtml: self file
{ #category : #utilities }
Markdown >> detectYAMLMetadata [
| lines |
lines := self lines.
^ self startsWithYAMLMetadataDelimiter
and: [ lines allButFirst
detect: [ :currentLine | currentLine beginsWith: self class yamlMetadataDelimiter ]
ifFound: [ ^ true ] ifNone: [ ^ false ] ]
]
{ #category : #operation }
@ -136,13 +90,26 @@ Markdown >> exportMetadataAsJson [
Markdown >> exportMetadataAsYaml [
| exportedFile |
exportedFile := FileLocator temp / 'metadata.yaml'.
MarkupFile exportAsFileOn: exportedFile containing: self yamlMetadataStringWithDelimiters.
MarkupFile exportAsFileOn: exportedFile containing: self yamlMetadataAsString.
^ exportedFile
]
{ #category : #operation }
Markdown >> extractYAMLMetadata [
| output yamlLines |
self detectYAMLMetadata ifFalse: [ ^ nil ].
yamlLines := self lines copyFrom: 2 to: self yamlMetadataClosingLineNumber - 1.
output := '' writeStream.
yamlLines do: [ :line |
output
nextPutAll: line;
nextPut: Character cr. ].
^ output contents
]
{ #category : #accessing }
Markdown >> file [
^ file ifNil: [ file := FileLocator temp / 'temporalMarkdeep.md' ]
^ file
]
{ #category : #accessing }
@ -153,24 +120,8 @@ Markdown >> file: aFileReference [
{ #category : #'instance creation' }
Markdown >> fromFile: aFileReference [
self fromString: aFileReference contents.
self file: aFileReference.
]
{ #category : #'instance creation' }
Markdown >> fromString: markdownString [
| yamlMetadataRaw bodyTemp |
yamlMetadataRaw := (YamlHeaderParser parse: markdownString).
bodyTemp := '' writeStream.
(yamlMetadataRaw removeKey: 'body') do: [:paragraph |
bodyTemp nextPutAll: paragraph; cr; cr
].
self body: bodyTemp contents withInternetLineEndings.
(yamlMetadataRaw sanitizeMultilineValuesWith: markdownString)
ifNotNil: [
self metadata
ifEmpty: [ self metadata: yamlMetadataRaw ]
ifNotEmpty: [ self metadata at: 'hedgeDoc' put: yamlMetadataRaw ]].
self contents: aFileReference contents.
self file: aFileReference
]
{ #category : #accessing }
@ -181,67 +132,50 @@ Markdown >> gtTextFor: aView [
text: [ self contents ]
]
{ #category : #accessing }
Markdown >> headerAsTitle [
| headerNode |
headerNode := self documentTree children
detect: [ :node | node className = 'PPCMHeader' and: [ node level = 1 ] ] ifNone: [ ^ 'Untitled' ].
^ headerNode text
]
{ #category : #utilities }
Markdown >> lines [
self file ifNotNil: [^ self file contents lines ].
^ self contents lines.
]
{ #category : #accessing }
Markdown >> metadata [
^ metadata ifNil: [ metadata := Dictionary new].
]
{ #category : #accessing }
Markdown >> metadata: rawMeta [
metadata := rawMeta
]
{ #category : #accessing }
Markdown >> metadataAsYAML [
self metadata isEmptyOrNil ifTrue: [ ^ '' ].
^ (YQ jsonToYaml: self metadata) accentedCharactersCorrection
]
{ #category : #persistence }
Markdown >> notifyExportAsFileOn: aFileReference [
self exportAsFileOn: aFileReference.
self inform: 'Exported as: ', String cr, aFileReference fullName.
^ aFileReference
]
{ #category : #accessing }
Markdown >> options [
^ self metadata at: 'options' ifAbsentPut: [ self defaultOptions]
| rawMeta |
rawMeta := PPYAMLGrammar new parse: self extractYAMLMetadata.
rawMeta associationsDo: [ :assoc |
assoc value = 'false' ifTrue: [ assoc value: false ].
assoc value = 'true' ifTrue: [ assoc value: true ] ].
^ rawMeta
]
{ #category : #accessing }
Markdown >> printOn: aStream [
| response |
super printOn: aStream.
response := self title ifNil: [ 'Untitled' ].
aStream
nextPutAll: '( ', response , ' )'
nextPutAll: '( ', (self metadata at: 'title'), ' )'
]
{ #category : #accessing }
Markdown >> title [
^ title ifNil: [ title:= self headerAsTitle ]
{ #category : #utilities }
Markdown >> startsWithYAMLMetadataDelimiter [
^ self lines first beginsWith: self class yamlMetadataDelimiter
]
{ #category : #accessing }
Markdown >> title: aString [
title := aString
{ #category : #utilities }
Markdown >> yamlMetadataAsString [
| output |
self extractYAMLMetadata ifNil: [ ^ nil ].
output := String new writeStream.
output nextPutAll: self class yamlMetadataDelimiter; cr.
output nextPutAll: self extractYAMLMetadata.
output nextPutAll: self class yamlMetadataDelimiter; cr.
^ output contents.
]
{ #category : #utilities }
Markdown >> yamlMetadataClosingLineNumber [
"I return the line where the closing of the YAML metadata occurs or 0 if no closing is found."
self startsWithYAMLMetadataDelimiter ifFalse: [ ^ self ].
self lines allButFirst doWithIndex: [ :currentLine :i |
(currentLine beginsWith: self class yamlMetadataDelimiter) ifTrue: [ ^ i + 1 ]]
]

View File

@ -7,20 +7,17 @@ Class {
#instVars : [
'file'
],
#category : #'MiniDocs-Core'
#category : #MiniDocs
}
{ #category : #persistence }
MarkupFile class >> exportAsFileOn: aFileReferenceOrFileName containing: anObject [
| file preprocessed |
MarkupFile class >> exportAsFileOn: aFileReferenceOrFileName containing: text [
| file |
file := aFileReferenceOrFileName asFileReference.
file ensureDelete.
file exists ifFalse: [ file ensureCreateFile ].
(#('String' 'ByteString' 'WideString') includes: anObject className )
ifTrue: [ preprocessed := anObject ]
ifFalse: [preprocessed := STON toStringPretty: anObject ].
file writeStreamDo: [ :stream |
stream nextPutAll: preprocessed ].
stream nextPutAll: text withUnixLineEndings].
self inform: 'Exported as: ', String cr, file fullName.
^ file
]

View File

@ -1,69 +1,23 @@
"
MiniDocs is a project that includes several minimalistic documentation tools used by the [Grafoscopio](https://mutabit.com/grafoscopio/en.html) community, starting with [Markdeep](https://casual-effects.com/markdeep/) and its integrations with [Lepiter](https://lepiter.io/feenk/introducing-lepiter--knowledge-management--e2p6apqsz5npq7m4xte0kkywn/) .
"
Class {
#name : #MiniDocs,
#superclass : #Object,
#category : #'MiniDocs-Core'
#category : #MiniDocs
}
{ #category : #accessing }
MiniDocs class >> altKeys [
^ BlAlternativeCombination new
combination: (BlSingleKeyCombination key:BlKeyboardKey altLeft)
or: (BlSingleKeyCombination key:BlKeyboardKey altRight)
]
{ #category : #accessing }
MiniDocs class >> altShiftLeftCombo [
^ BlCompulsoryCombination new
with: self altKeys;
with: self shiftKeys;
with: (BlSingleKeyCombination key: BlKeyboardKey arrowLeft);
yourself
]
{ #category : #accessing }
MiniDocs class >> altShiftRightCombo [
^ BlCompulsoryCombination new
with: self altKeys;
with: self shiftKeys;
with: (BlSingleKeyCombination key: BlKeyboardKey arrowRight);
yourself
]
{ #category : #accessing }
MiniDocs class >> appFolder [
| tempFolder |
tempFolder := ExoRepo userDataFolder / 'Mutabit' / 'MiniDocs'.
tempFolder := FileLocator userData / 'Mutabit' / 'MiniDocs'.
tempFolder exists ifFalse: [ tempFolder ensureCreateDirectory ].
^ tempFolder
]
{ #category : #accessing }
MiniDocs class >> exportAsSton: anObject on: aFileReference [
MarkupFile exportAsFileOn: aFileReference containing: (STON toStringPretty: anObject) withInternetLineEndings
]
{ #category : #accessing }
MiniDocs class >> importGrafoscopioFile: aFileReference [
^ (STON fromString: aFileReference) first parent
]
{ #category : #accessing }
MiniDocs class >> initialize [
self keyboardShortcutsRemapping
]
{ #category : #accessing }
MiniDocs class >> installYamlToJson [
"For the moment, only Gnu/Linux and Mac are supported.
"For the moment, only Gnu/Linux and Mac are supported.
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
self yamlToJsonBinary exists ifTrue: [ ^ MiniDocs appFolder ].
Nimble
install: 'yaml';
install: 'commandeer'.
Nimble install: 'commandeer'.
OSSUnixSubprocess new
command: 'nim';
arguments: {'c'. self yamlToJsonSourceCode fullName};
@ -72,32 +26,9 @@ MiniDocs class >> installYamlToJson [
^ MiniDocs appFolder ]
]
{ #category : #accessing }
MiniDocs class >> keyboardShortcutsRemapping [
| primaryNewLine secondaryNewLine |
primaryNewLine := LeSnippetElement keyboardShortcuts at: #NewLine.
secondaryNewLine := LeSnippetElement keyboardShortcuts at: #SecondaryNewLine.
^ LeSnippetElement keyboardShortcuts
at: #NewLine put: secondaryNewLine;
at: #SecondaryNewLine put: primaryNewLine;
at: #IndentSnippet put: self altShiftRightCombo;
at: #UnindentSnippet put: self altShiftLeftCombo;
yourself
]
{ #category : #accessing }
MiniDocs class >> shiftKeys [
^ BlAlternativeCombination new
combination: (BlSingleKeyCombination key:BlKeyboardKey shiftLeft)
or: (BlSingleKeyCombination key:BlKeyboardKey shiftRight)
]
{ #category : #accessing }
MiniDocs class >> yamlToJson: yamlString [
"This method uses a external binary written in Nim, as the native Pharo parser for YAML, written in PetitParser,
was less robust and unable to parse correctly the same strings as the external one."
yamlString ifNil: [ ^ Dictionary new ].
self yamlToJsonBinary exists ifFalse: [ self installYamlToJson ].
OSSUnixSubprocess new
@ -105,7 +36,7 @@ MiniDocs class >> yamlToJson: yamlString [
arguments: {yamlString};
redirectStdout;
runAndWaitOnExitDo: [ :process :outString |
^ (STONJSON fromString: outString allButFirst accentedCharactersCorrection) first
^ (STONJSON fromString: outString allButFirst) first
]
]
@ -118,25 +49,3 @@ MiniDocs class >> yamlToJsonBinary [
MiniDocs class >> yamlToJsonSourceCode [
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/yamlToJson.nim'
]
{ #category : #accessing }
MiniDocs >> installNimFileExporter [
| folder |
folder := (MiniDocs appFolder / 'scripts') ensureCreateDirectory.
ZnClient new
url: 'https://mutabit.com/repos.fossil/mutabit/uv/wiki/scripts/stringAsFileInto';
downloadTo: folder / 'stringAsFileInto'.
ZnClient new
url: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/wiki/scripts/stringAsFileInto.nim';
downloadTo: folder / 'stringAsFileInto.nim'.
OSSUnixSubprocess new
command: 'chmod';
arguments: { '+x' . (folder / 'stringAsFileInto') fullName };
workingDirectory: folder fullName;
redirectStdout;
redirectStderr;
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
]

View File

@ -1,74 +0,0 @@
Class {
#name : #MiniDocsServer,
#superclass : #TLWebserver,
#instVars : [
'storage'
],
#classInstVars : [
'singleton'
],
#category : #'MiniDocs-Core'
}
{ #category : #accessing }
MiniDocsServer class >> build [
TLRESTAPIBuilder buildAPI.
self start
]
{ #category : #accessing }
MiniDocsServer class >> defaultConfiguration [
"Override to set more default values"
^ {
#port -> 1701
}
]
{ #category : #accessing }
MiniDocsServer class >> listLepiterDocs: aRequest [
<REST_API: 'GET' pattern: 'lepiter'>
^ 'A list of Mardeep exported Lepiter docs will appear soon...'
]
{ #category : #accessing }
MiniDocsServer class >> restart [
Teapot stopAll.
self build.
^ self start
]
{ #category : #accessing }
MiniDocsServer class >> singleton [
^ singleton ifNil: [ singleton := MiniDocsServer teapot ]
]
{ #category : #accessing }
MiniDocsServer >> addStorage: anObject [
self storage add: anObject.
]
{ #category : #accessing }
MiniDocsServer >> initRoutes [
self storage: FileLocator documents / 'lepiter' / 'default'.
self teapot
serveStatic: '/lepiter/doc' from: self storage fullName.
self teapot
GET: '/lepiter' -> 'A list of Mardeep exported Lepiter docs will appear soon...'
]
{ #category : #accessing }
MiniDocsServer >> start [
self class defaultPort: 1701.
self initRoutes.
super start.
]
{ #category : #accessing }
MiniDocsServer >> storage [
^ storage
]
{ #category : #accessing }
MiniDocsServer >> storage: aFoldersOrderedCollection [
storage := aFoldersOrderedCollection
]

View File

@ -0,0 +1,53 @@
"
I'm run an implementation of the [Nano ID](https://github.com/ai/nanoid) tiny, secure URL-friendly unique string ID generator via its [Nim implementation](https://github.com/icyphox/nanoid.nim).
The Nim script has hard coded:
* a [base 58 encoding](https://medium.com/concerning-pharo/understanding-base58-encoding-23e673e37ff6) alphabet to avoid similar looking letter and the use of non-alphanumeric characters.
* a 12 characters length output, which gives [a pretty low probability collision](https://zelark.github.io/nano-id-cc/) for the previous alphabet:
~616 years needed, in order to have a 1% probability of at least one collision at a speed of 1000 IDs per hour.
This is more than enough for our unique IDs applications, mostly in the documentation context,
which consists of hand crafted and/or programmatically produced notes ,
for example in data narratives, book(lets) and TiddlyWiki tiddlers of tens or hundreds of notes at most,
unevenly produced between hours, days and/or weeks..
"
Class {
#name : #NanoID,
#superclass : #Object,
#category : #'MiniDocs-MiniDocs'
}
{ #category : #accessing }
NanoID class >> binaryFile [
^ MiniDocs appFolder / self scriptSourceCode basenameWithoutExtension
]
{ #category : #accessing }
NanoID class >> generate [
self binaryFile exists ifFalse: [ NanoID install].
OSSUnixSubprocess new
command: self binaryFile fullName;
redirectStdout;
redirectStdout;
runAndWaitOnExitDo: [ :process :outString | ^ outString copyWithoutAll: (Character lf asString) ]
]
{ #category : #accessing }
NanoID class >> install [
"For the moment, only Gnu/Linux and Mac are supported.
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
self binaryFile exists ifTrue: [ ^ MiniDocs appFolder ].
Nimble install: 'nanoid'.
OSSUnixSubprocess new
command: 'nim';
arguments: {'c'. self scriptSourceCode fullName};
runAndWaitOnExitDo: [ :process :outString |
(self scriptSourceCode parent / (self scriptSourceCode) basenameWithoutExtension) moveTo: MiniDocs appFolder asFileReference.
^ MiniDocs appFolder ]
]
{ #category : #accessing }
NanoID class >> scriptSourceCode [
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/nanoIdGen.nim'
]

View File

@ -0,0 +1,66 @@
"
I'm a helper class modelling the common uses of the Nim's [Nimble package manager](https://github.com/nim-lang/nimble).
This was evolved in the context of the [Grafoscopio](mutabit.com/grafoscopio/en.html) community exploration and prototyping of interactive documentation.
"
Class {
#name : #Nimble,
#superclass : #Object,
#category : #'MiniDocs-MiniDocs'
}
{ #category : #accessing }
Nimble class >> detect: packageName [
^ self installed
detect: [ :dependency | dependency beginsWith: packageName ]
ifFound: [ ^ true ]
ifNone: [ ^ false ]
]
{ #category : #accessing }
Nimble class >> install: packageName [
(self detect: packageName) ifTrue: [ ^ self ].
self installPackagesList.
OSSUnixSubprocess new
command: 'nimble';
arguments: {'install'.
packageName};
redirectStdout;
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
]
{ #category : #accessing }
Nimble class >> installPackagesList [
(FileLocator home / '.nimble' / 'packages_official.json') exists
ifTrue: [ ^ self ].
OSSUnixSubprocess new
command: 'nimble';
arguments: #('refresh');
redirectStdout;
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
]
{ #category : #accessing }
Nimble class >> installed [
| installed |
OSSUnixSubprocess new
command: 'nimble';
arguments: #('list' '--installed');
redirectStdout;
redirectStderr;
runAndWaitOnExitDo: [ :process :outString :errString |
process isSuccess
ifTrue: [ ^ outString lines ];
ifFalse: [ ^ nil ]
]
]
{ #category : #accessing }
Nimble class >> version [
OSSUnixSubprocess new
command: 'nimble';
arguments: #('--version');
redirectStdout;
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
]

View File

@ -1,100 +0,0 @@
Extension { #name : #OrderedDictionary }
{ #category : #'*MiniDocs' }
OrderedDictionary >> addErrata: noteString [
self errata add: noteString
]
{ #category : #'*MiniDocs' }
OrderedDictionary >> asLepiterSnippet [
| response |
self at: 'className' ifAbsent: [ ^ nil ].
response := (self at: 'className') asClass new.
[ response fromDictionary: self ] onErrorDo: [ ].
[ response fromString: (self at: 'content') ] onErrorDo: [ ].
self at: 'origin' ifPresent: [ response metadata at: 'origin' put: (self at: 'origin') ].
self at: 'errata' ifPresent: [ response metadata at: 'errata' put: (self at: 'errata') ].
^ response
]
{ #category : #'*MiniDocs' }
OrderedDictionary >> asYAML [
^ (YQ jsonToYaml: self) accentedCharactersCorrection.
]
{ #category : #'*MiniDocs' }
OrderedDictionary >> errata [
^ self at: 'errata' ifAbsentPut: [ OrderedCollection new]
]
{ #category : #'*MiniDocs' }
OrderedDictionary >> redefineTimestampsBefore: dateAndTime [
self at: 'modified' put: dateAndTime asDateAndTime.
self at: 'created' put: dateAndTime asDateAndTime - 1 second.
]
{ #category : #'*MiniDocs' }
OrderedDictionary >> replaceNilsWith: aCharacter [
self associationsDo: [:each |
each value ifNil: [self at: each key put: aCharacter].
each value isDictionary ifTrue: [each value replaceNilsWith: aCharacter].
each value isArray ifTrue: [ | newArray|
newArray := (each value asDataSeries replaceNilsWith: aCharacter) asArray.
self at: each key put: newArray
]
]
]
{ #category : #'*MiniDocs' }
OrderedDictionary >> replaceWithUniqueNilsAndBooleansStartingAt: anInteger [
| totalNils shortUID |
totalNils := self flattened asDataSeries countNils.
shortUID := [NanoID generate copyFrom: 1 to: 3].
self associations doWithIndex: [:assoc :i | | subIndex |
subIndex := anInteger asString, '-', i asString.
assoc value
ifNil: [ self at: assoc key put: 'nil-', subIndex ].
assoc value isBoolean
ifTrue: [ self at: assoc key put: assoc value asString, '-', subIndex ].
assoc value isDictionary ifTrue: [assoc replaceWithUniqueNilsAndBooleansStartingAt: i].
assoc value isArray
ifTrue: [ self at: assoc key put: (assoc value replaceWithUniqueNilsAndBooleans)]
]
]
{ #category : #'*MiniDocs' }
OrderedDictionary >> sanitizeMultilineValuesWith: aString [
| toSanitize response |
toSanitize := OrderedCollection new.
response := OrderedCollection new.
self keysAndValuesDo: [:k :v |
(v isString and: [v lines size > 1])
ifTrue: [
aString lines
detect: [:line | line includesSubstring: k ]
ifFound: [:line | | sanitized|
sanitized := (line withoutPrefix: k, ':'), String cr,
v indentedWithExtraSpaces: 4.
self at: k put: sanitized ]
]
].
]
{ #category : #'*MiniDocs' }
OrderedDictionary >> treeView [
| view |
view := GtMondrian new.
view nodes
stencil: [ :x |
BlElement new
border: (BlBorder paint: Color black);
geometry: BlEllipseGeometry new;
layout: (BlLinearLayout new alignCenter);
addChild: (BlTextElement text: (x asRopedText fontSize: 10)) ];
with: (self flatCollectAsSet: #yourself) , self keys.
view edges
stencil: [ :x | BlLineElement new border: (BlBorder paint: (Color blue alpha: 0.5) width: 4) ];
connect: self associations from: #key toAll: #value.
view layout tree.
^ view
]

View File

@ -1,162 +0,0 @@
"
I model the interaction between Pandoc and Grafoscopio.
"
Class {
#name : #Pandoc,
#superclass : #Object,
#classInstVars : [
'executable'
],
#category : #'MiniDocs-Core'
}
{ #category : #'*MiniDocs' }
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
OSSUnixSubprocess new
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
redirectStdout;
runAndWaitOnExitDo: [ :command :outString |
^ outString
].
]
{ #category : #'as yet unclassified' }
Pandoc class >> downloadLuaFilters [
self luaFilters do: [ :filter | | filterUrl |
filterUrl := filter asUrl.
(FileLocator temp asFileReference / (filterUrl segments last)) exists
ifFalse: [
ZnClient new
url: filterUrl;
downloadTo: FileLocator temp ] ]
]
{ #category : #accessing }
Pandoc class >> executable [
^ executable ifNil: [ self executableLocation ]
]
{ #category : #accessing }
Pandoc class >> executable: aFileReference [
executable := aFileReference
]
{ #category : #accessing }
Pandoc class >> executableLocation [
| location |
location := '/usr/bin/pandoc'.
location asFileReference exists
ifTrue: [ ^ location ]
ifFalse: [ self definePandocExecutable ]
]
{ #category : #utility }
Pandoc class >> extractImagesInUnixFor: aFileReference withFilter: aLuaFilter [
"I use Pandoc Lua scripting capabilities to extract al images links in aFileReference"
OSSUnixSubprocess new
command: 'pandoc';
arguments: {aFileReference fullName . '--lua-filter=',aLuaFilter fullName };
redirectStdout;
redirectStderr;
runAndWaitOnExitDo: [ :process :outString :errString |
process isSuccess
ifTrue: [
^ ((Soup fromString: outString) findAllTags: 'td') collect: [ :each | each next ] ]
ifFalse: [
"OSSUnixProcessExitStatus has a nice #printOn: "
Transcript show: 'Command exit with error status: ', process exitStatusInterpreter printString; cr.
Transcript show: 'Stderr contents: ', errString.
]
]
]
{ #category : #accessing }
Pandoc class >> htmlStringToMarkdown: aString [
OSSUnixSubprocess new
shellCommand: 'echo "', aString , '" | pandoc -f markdown -t html';
redirectStdout;
runAndWaitOnExitDo: [ :command :outString |
^ outString
].
]
{ #category : #converters }
Pandoc class >> htmlToMarkdown: inputFile [
| outputFile |
outputFile := FileLocator temp / 'body.md'.
outputFile ensureDelete.
outputFile ensureCreateFile.
OSSUnixSubprocess new
command: 'pandoc';
arguments: {'-f'. 'html'. '-t'. 'markdown'. '--atx-headers'. inputFile fullName.
'--output'. outputFile fullName };
redirectStdout;
redirectStderr;
runAndWaitOnExitDo: [ :process :outString :errString |
process isSuccess
ifTrue: [ ^ outputFile contents ]
ifFalse: [ ^inputFile contents ]
]
]
{ #category : #'as yet unclassified' }
Pandoc class >> listImagesFrom: aFileReference [
"I provide a list of all images contained in aFile."
| filter commandString outputString |
filter := FileLocator temp asFileReference / 'image-links.lua'.
filter exists
ifFalse: [ self downloadLuaFilters ].
commandString := 'pandoc ' , aFileReference fullName
, ' --lua-filter=' , filter fullName.
^ self extractImagesInUnixFor: aFileReference withFilter: filter
]
{ #category : #utility }
Pandoc class >> luaFilters [
"I define the location of set of scripts, that allows to change the default behaviour of Pandoc
and/or the processing of supported markup languages.
For more information about Lua filters see:
https://pandoc.org/lua-filters.html
"
| filters |
filters := OrderedCollection new.
filters
add: 'http://mutabit.com/repos.fossil/dataweek/doc/tip/Artefactos/Scripts/image-links.lua'.
^ filters
]
{ #category : #converters }
Pandoc class >> markdownToHtml: inputFile [
(Smalltalk os isUnix or: [ Smalltalk os isMacOS ]) ifTrue: [ ^ self markdownToHtmlOnUnix: inputFile ].
Smalltalk os isWindows ifTrue: [ ^ self markdownToHtmlOnWindows: inputFile ].
]
{ #category : #converters }
Pandoc class >> markdownToHtmlOnUnix: inputFile [
| outputFile |
outputFile := inputFile parent / (inputFile basenameWithoutExtension , '.html').
outputFile ensureDelete.
outputFile ensureCreateFile.
GtSubprocessWithInMemoryOutput new
shellCommand: 'pandoc -f markdown+startnum+task_lists --standalone -t html ', inputFile fullName, ' --output ', outputFile fullName;
runAndWait;
stdout.
^ outputFile.
]
{ #category : #converters }
Pandoc class >> markdownToHtmlOnWindows: inputFile [
"ToDo: This command still doesn't receive any arguments."
^ (LibC resultOfCommand: 'pandoc ', inputFile fullName) correctAccentedCharacters.
]

View File

@ -1,11 +0,0 @@
Extension { #name : #Pandoc }
{ #category : #'*MiniDocs' }
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
OSSUnixSubprocess new
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
redirectStdout;
runAndWaitOnExitDo: [ :command :outString |
^ outString
].
]

View File

@ -1,148 +0,0 @@
Class {
#name : #PubPubContent,
#superclass : #Object,
#instVars : [
'title',
'language',
'url',
'thumbnail',
'work',
'contents'
],
#category : #'MiniDocs-Model'
}
{ #category : #accessing }
PubPubContent class >> fromXML: anXMLElement [
^ self new fromXML: anXMLElement
]
{ #category : #accessing }
PubPubContent >> asMarkdeepFrontPageElement [
| response anchorName anchorLink markdeepFile |
response := '' writeStream.
anchorName := '[', self title,']'.
markdeepFile := './book/', self shortName,'--',self id,'.md.html'.
anchorLink := '(', markdeepFile,')'.
response
nextPutAll: '<big>', anchorName, anchorLink,'</big><br><br>';
nextPutAll: String lf.
self thumbnail ifNotNil: [ |image|
image := '
<img
src=', self thumbnail,
' width="55%"
style="width: 400px; height: 220px; object-fit: cover;"
/>'.
response nextPutAll: '<a href="',markdeepFile,'">', image, '</a>'
].
response
nextPutAll: String lf, String lf.
^ response contents
]
{ #category : #accessing }
PubPubContent >> contents: anObject [
contents := anObject
]
{ #category : #accessing }
PubPubContent >> fileName [
^ self shortName,'--', self id, '.md'
]
{ #category : #accessing }
PubPubContent >> fromXML: aXMLElement [
| image anchor|
image := aXMLElement contentNodes first xpath: './a/div'.
image
ifNotEmpty: [|style rawUrl|
style := (image first attributeAt: 'style').
rawUrl := (style splitOn: 'url') second.
self
thumbnail:(rawUrl copyFrom: 3 to: rawUrl size - 2)
].
anchor := (aXMLElement contentNodes second contentNodes first xpath: './div[@class="title-wrapper"]/a') first.
self
title: (anchor attributeAt: 'title');
url: (anchor attributeAt: 'href').
]
{ #category : #accessing }
PubPubContent >> id [
^ (self url splitOn: $/) last
]
{ #category : #'as yet unclassified' }
PubPubContent >> language: aString [
language := aString
]
{ #category : #accessing }
PubPubContent >> next [
^ self nextInstance
]
{ #category : #accessing }
PubPubContent >> previous [
| index |
index := self work tableOfContents detectIndex: [:pubContent | pubContent = self ] ifNone: [ ^ nil ].
^ self work tableOfContents at: index - 1.
]
{ #category : #accessing }
PubPubContent >> printOn: aStream [
super printOn: aStream.
aStream
nextPutAll: '( ', self title,' | ', self id, ' )'
]
{ #category : #accessing }
PubPubContent >> shortName [
| sanitized |
sanitized := (self title splitOn: $:) first.
sanitized := sanitized copyReplaceAll: '' with: ''.
sanitized := sanitized asCamelCase.
sanitized at: 1 put: sanitized first asLowercase.
^ sanitized
]
{ #category : #accessing }
PubPubContent >> thumbnail [
^ thumbnail
]
{ #category : #accessing }
PubPubContent >> thumbnail: anURL [
thumbnail := anURL
]
{ #category : #accessing }
PubPubContent >> title [
^ title
]
{ #category : #accessing }
PubPubContent >> title: anObject [
title := anObject
]
{ #category : #accessing }
PubPubContent >> url [
^url
]
{ #category : #accessing }
PubPubContent >> url: anObject [
url := anObject
]
{ #category : #accessing }
PubPubContent >> work [
^ work
]
{ #category : #accessing }
PubPubContent >> work: aPubPubWork [
work := aPubPubWork
]

View File

@ -1,75 +0,0 @@
Class {
#name : #PubPubGrammar,
#superclass : #PP2CompositeNode,
#instVars : [
'document',
'link',
'linkLabel',
'linkContent',
'imageLinkLabel',
'imageLinkContent',
'alternativeImages',
'imageLink'
],
#category : #'MiniDocs-Model'
}
{ #category : #accessing }
PubPubGrammar >> alternativeImages [
^ self linkContent
]
{ #category : #accessing }
PubPubGrammar >> document [
^ (link / imageLink ) islandInSea star
]
{ #category : #links }
PubPubGrammar >> imageLink [
^ imageLinkLabel, imageLinkContent, alternativeImages
]
{ #category : #links }
PubPubGrammar >> imageLinkContent [
^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second
]
{ #category : #links }
PubPubGrammar >> imageLinkLabel [
| label |
label := ("$] asPParser not /" #any asPParser) starLazy flatten.
^ '![' asPParser, label, ']' asPParser ==> #second.
]
{ #category : #accessing }
PubPubGrammar >> imageLinkSea [
^ imageLink sea ==> #second
]
{ #category : #links }
PubPubGrammar >> link [
^ linkLabel, linkContent
]
{ #category : #links }
PubPubGrammar >> linkContent [
^ '{' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second.
]
{ #category : #links }
PubPubGrammar >> linkLabel [
| label |
label := ("$] asPParser not /" #any asPParser) starLazy flatten.
^ $[ asPParser, label, $] asPParser ==> #second.
]
{ #category : #accessing }
PubPubGrammar >> linkSea [
^ link sea ==> #second
]
{ #category : #accessing }
PubPubGrammar >> start [
^ document
]

View File

@ -1,65 +0,0 @@
Class {
#name : #PubPubGrammar2,
#superclass : #PP2CompositeNode,
#instVars : [
'imageLabel',
'imageLink',
'imagesArray',
'imageLocation',
'document',
'footnote',
'footnoteLabel',
'footnoteContent'
],
#category : #'MiniDocs-Model'
}
{ #category : #accessing }
PubPubGrammar2 >> document [
^ (imageLink / footnote) islandInSea star
]
{ #category : #accessing }
PubPubGrammar2 >> footnote [
^ footnoteLabel, footnoteContent
]
{ #category : #accessing }
PubPubGrammar2 >> footnoteContent [
^ '{#' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
]
{ #category : #accessing }
PubPubGrammar2 >> footnoteLabel [
^ '[' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second
]
{ #category : #accessing }
PubPubGrammar2 >> imageLabel [
^ '![' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second
]
{ #category : #accessing }
PubPubGrammar2 >> imageLink [
^ imageLabel, imageLocation, imagesArray
]
{ #category : #accessing }
PubPubGrammar2 >> imageLocation [
^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second
]
{ #category : #accessing }
PubPubGrammar2 >> imagesArray [
^ '{srcset=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
]
{ #category : #accessing }
PubPubGrammar2 >> imagesContent [
^ '{src=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
]
{ #category : #accessing }
PubPubGrammar2 >> start [
^ document
]

View File

@ -1,59 +0,0 @@
Class {
#name : #PubPubGrammarTest,
#superclass : #PP2CompositeNodeTest,
#category : #'MiniDocs-Model'
}
{ #category : #accessing }
PubPubGrammarTest >> parserClass [
^ PubPubGrammar
]
{ #category : #accessing }
PubPubGrammarTest >> testComposedImageLink [
self
parse: '![This is an image label with sublinks (bla bl)[blog]](this/is/an/image/link){this are alternate image sizes}'
rule: #imageLink
]
{ #category : #accessing }
PubPubGrammarTest >> testImageLabel: label [
self
parse: label
rule: #imageLinkLabel
]
{ #category : #accessing }
PubPubGrammarTest >> testImageLink [
self
parse: '![This is an image label](this/is/an/image/link){this are alternate image sizes}'
rule: #imageLink
]
{ #category : #accessing }
PubPubGrammarTest >> testLabel: label [
self
parse: label
rule: #linkLabel
]
{ #category : #accessing }
PubPubGrammarTest >> testLink [
self
parse: '[This is a label]{this/is/a/link}'
rule: #link
]
{ #category : #accessing }
PubPubGrammarTest >> testNestedLabel [
self
parse: '[This is a label with [sublabels]]'
rule: #linkLabel
]
{ #category : #accessing }
PubPubGrammarTest >> testSimpleLabel [
self
parse: '[This is a label]'
rule: #linkLabel
]

View File

@ -1,240 +0,0 @@
Class {
#name : #PubPubWork,
#superclass : #Object,
#instVars : [
'address',
'tableOfContents',
'titles',
'folder',
'currentLanguage',
'languages'
],
#category : #'MiniDocs-Model'
}
{ #category : #accessing }
PubPubWork >> addTableOfContents: anOrderedDictionary [
self tableOfContents
at: (self currentLanguage) put: anOrderedDictionary;
yourself
]
{ #category : #accessing }
PubPubWork >> addTitle: aString [
self titles
at: (self currentLanguage) put: aString
]
{ #category : #accessing }
PubPubWork >> address [
^ address
]
{ #category : #accessing }
PubPubWork >> address: anUrl [
address := anUrl
]
{ #category : #accessing }
PubPubWork >> bookishFolder [
^ { 'en' -> 'book'.
'es' -> 'libro'} asDictionary
]
{ #category : #accessing }
PubPubWork >> currentLanguage [
^ currentLanguage
]
{ #category : #accessing }
PubPubWork >> currentLanguage: twoLettersInISO639_1 [
currentLanguage := twoLettersInISO639_1
]
{ #category : #accessing }
PubPubWork >> defaultOptions [
^ { 'sourceCodeLink' -> true .
'commentsProvider' -> 'Hypothesis' } asDictionary
]
{ #category : #accessing }
PubPubWork >> defaultTitle [
^ self titles associations first value
]
{ #category : #accessing }
PubPubWork >> downloadContents [
| workingDirectory |
workingDirectory := self workingDirectory.
self tableOfContentsDictionary
keysAndValuesDo: [ :name :chapterAddress |
| currentFileName |
currentFileName := name , '--' , chapterAddress , '.md'.
(workingDirectory / currentFileName) asFileReference ensureDelete.
(workingDirectory / 'markdown') asFileReference ensureDelete.
ZnClient new
get: self address , 'pub/' , chapterAddress , '/download/markdown';
downloadTo: workingDirectory.
workingDirectory / 'markdown' renameTo: currentFileName ].
^ workingDirectory
]
{ #category : #accessing }
PubPubWork >> downloadContents2 [
| workingDirectory |
workingDirectory := self folder / self currentLanguage / 'book'.
self tableOfContentsDictionary keysAndValuesDo: [ :name :chapterAddress | |currentFileName|
currentFileName := name, '--', chapterAddress, '.md'.
(workingDirectory / currentFileName) asFileReference ensureDelete.
(workingDirectory / 'markdown') asFileReference ensureDelete.
ZnClient new
get: self address, 'pub/', chapterAddress, '/download/markdown';
downloadTo: workingDirectory .
workingDirectory / 'markdown' renameTo: currentFileName
].
^ workingDirectory
]
{ #category : #accessing }
PubPubWork >> exportToHTML [
self markdownFiles
do: [ :file | | doc |
doc := Markdown new fromFile: file.
doc exportAsHTML ].
^ self markdownFiles first parent
]
{ #category : #accessing }
PubPubWork >> exportToMarkdeep [
| markdeepDocs |
markdeepDocs := self markdownFiles
collect: [ :file | Markdeep fromMarkdownFile: file ].
markdeepDocs do: [ :each | each fromPubPubToMarkdeep exportAsFile ].
^ self languageFolder
]
{ #category : #accessing }
PubPubWork >> extractAllContentsRaw [
^ self frontPage xpath: '//div[@class="layout-pubs-block"]'
]
{ #category : #accessing }
PubPubWork >> extractRawTableOfContents [
^ self extractAllContentsRaw first xpath: '//div[contains(concat(" ",normalize-space(@class)," "), " pub-preview-component ")]'
]
{ #category : #accessing }
PubPubWork >> folder [
^ folder ensureCreateDirectory
]
{ #category : #accessing }
PubPubWork >> folder: localDirectory [
folder := localDirectory
]
{ #category : #accessing }
PubPubWork >> frontPage [
"This should scrap contents of the book's front-page and translate them into Markdeep,
according to our templates."
^ (XMLHTMLParser on: (self address asUrl retrieveContents)) parseDocument
]
{ #category : #accessing }
PubPubWork >> languageFolder [
^ self folder / self currentLanguage
]
{ #category : #accessing }
PubPubWork >> markdeepFrontPage [
| frontPage markdeepIndex |
frontPage := Markdeep new.
frontPage
title: self defaultTitle;
file: self languageFolder / 'frontPage.md.html'.
markdeepIndex := '' writeStream.
self tableOfContents do: [:pubPubContent|
markdeepIndex
nextPutAll: pubPubContent asMarkdeepFrontPageElement
].
frontPage body: markdeepIndex contents.
^ frontPage
]
{ #category : #accessing }
PubPubWork >> markdownFiles [
^ self languageFolder allChildren
select: [ :file | file basename endsWith: '.md' ]
]
{ #category : #accessing }
PubPubWork >> populateContents [
self tableOfContents isEmptyOrNil
ifTrue: [ self populateTableOfContents ].
self workingDirectory children ifEmpty: [self downloadContents].
self tableOfContents do: [:pubPubContent | | contentFile|
contentFile := self workingDirectory / pubPubContent fileName.
contentFile exists
ifTrue: [ pubPubContent contents: (Markdown new fromFile: contentFile) ]
]
]
{ #category : #accessing }
PubPubWork >> populateTableOfContents [
| contentsCollection |
contentsCollection := self extractRawTableOfContents collect: [:each |
(PubPubContent fromXML: each)
language: self currentLanguage;
work: self
].
self addTableOfContents: contentsCollection asOrderedCollection
]
{ #category : #accessing }
PubPubWork >> printOn: aStream [
super printOn: aStream.
aStream
nextPutAll: '(',self defaultTitle, ' | ', self address, ' )'
]
{ #category : #accessing }
PubPubWork >> tableOfContents [
tableOfContents ifNil: [ ^ tableOfContents := Dictionary new].
^ tableOfContents at: self currentLanguage
]
{ #category : #accessing }
PubPubWork >> tableOfContents: anObject [
tableOfContents := anObject
]
{ #category : #accessing }
PubPubWork >> tableOfContentsDictionary [
| response |
response := OrderedDictionary new.
self tableOfContents do: [:content |
response
at: content shortName put: content id
].
^ response
]
{ #category : #accessing }
PubPubWork >> titles [
^ titles ifNil: [titles := OrderedDictionary new]
]
{ #category : #accessing }
PubPubWork >> viewContentsFor: aView [
<gtView>
^ aView list
title: 'Contents';
priority: 10;
items: [ self tableOfContents ]
]
{ #category : #accessing }
PubPubWork >> workingDirectory [
^ self folder / self currentLanguage / (self bookishFolder at: self currentLanguage)
]

View File

@ -1,240 +1,8 @@
Extension { #name : #String }
{ #category : #'*MiniDocs' }
String >> accentedCharactersCorrection [
| modified corrections |
corrections := {
'ó' -> 'ó' . 'Ó' -> 'Ó' . 'ú' -> 'ú' . 'ñ' -> 'ñ' . 'Ñ' -> 'Ñ' .
'í' -> 'í' . 'á' -> 'á' . 'é' -> 'é' . '’' -> $' asString} asDictionary.
modified := self copy.
corrections keysAndValuesDo: [ :k :v |
modified := modified copyReplaceAll: k with: v
].
^ modified
]
{ #category : #'*MiniDocs' }
String >> admonitionBorderLines [
| response |
response := OrderedDictionary new.
self lines doWithIndex: [:line :index |
(self admonitionBorders includes: line trimBoth)
ifTrue: [ response at: index put: line trimBoth ]
].
^ response
]
{ #category : #'*MiniDocs' }
String >> admonitionBorders [
"For the moment I only work with the admonition starting border
as adding the closing one would imply to redo the #markdownSplitted
method implementing a proper parser, which, ATM is overkill."
| response |
response := #('info' 'success' 'warning' 'danger') collect: [ :each | ':::', each ].
^ response "copyWith: ':::'"
]
{ #category : #'*MiniDocs' }
String >> admonitionEndingPosition [
| response |
response := 0.
self startsWithMarkdownAdmonition ifFalse: [ ^ response ].
self lines do: [:line |
response > 0 ifTrue: [ response := response + 1 ].
(line trimBoth = ':::')
ifFalse: [ response := response + line size ]
ifTrue: [ ^ response := response + line size. ]
].
^ response
]
{ #category : #'*MiniDocs' }
String >> asDashedLowercase [
"I convert phrases like 'This is a phrase' into 'this-is-a-phrase'."
^ '-' join: (self substrings collect: [:each | each asLowercase ])
]
{ #category : #'*MiniDocs' }
String >> asInteger [
"Return the integer present in the receiver, or nil. In case of float, returns the integer part."
"'1' asInteger >>> 1"
"'-1' asInteger >>> -1"
"'10' asInteger >>> 10"
"'a' asInteger >>> nil"
"'1.234' asInteger >>> 1"
^ (self copyWithoutAll: '_') asSignedInteger
]
{ #category : #'*MiniDocs' }
String >> contentsWithoutYAMLMetadata [
| newContents |
self detectYAMLMetadata ifFalse: [ ^ self ].
newContents := '' writeStream.
(self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line |
newContents nextPutAll: line; cr ].
^ newContents contents.
]
{ #category : #'*MiniDocs' }
String >> deleteYAMLMetadata [
| newContents |
self detectYAMLMetadata ifFalse: [ ^ self ].
newContents := '' writeStream.
(self lines copyFrom: self yamlMetadataClosingLineNumber + 1 to: self lines size) do: [ :line |
newContents nextPutAll: line; lf;lf ].
^ newContents contents.
]
{ #category : #'*MiniDocs' }
String >> demoteMarkdownHeaders [
| response |
response := self contents lines.
self markdownHeaders associations allButFirstDo: [ :assoc |
response at: assoc key put: '#', assoc value ].
^ response asStringWithCr withInternetLineEndings
]
{ #category : #'*MiniDocs' }
String >> detectYAMLMetadata [
| lines |
lines := self lines.
^ self startsWithYAMLMetadataDelimiter
and: [ lines allButFirst
detect: [ :currentLine | currentLine beginsWith: self class yamlMetadataDelimiter ]
ifFound: [ ^ true ] ifNone: [ ^ false ] ]
]
{ #category : #'*MiniDocs' }
String >> indentedWithExtraSpaces: spaceNumber [
| response indent |
response := '' writeStream.
indent := String new.
spaceNumber timesRepeat: [ indent := indent, ' ' ].
self lines do: [:line | response nextPutAll: indent, line, String lf ].
^ response contents
]
{ #category : #'*MiniDocs' }
String >> markdownHeaders [
| response headers |
headers := (LeTextSnippet string: self contents) ast // #LeHeaderNode collect: [ :each | each headerFullName asString ].
response := OrderedDictionary new.
self lines doWithIndex: [:line :index |
(line beginsWithAnyOf: headers)
ifTrue: [ response at: index put: line ]
].
^ response
]
{ #category : #'*MiniDocs' }
String >> markdownSplitLines [
"I'm useful for conversions between the HedgeDoc Markdown variant and Lepiter page snippets.
I provide broad places to where semantic breaks should be located in a page,
depending on headers or admonitions to create pages snippets with similar divisions.
Further page splits should be provided manually by the document author."
| response |
response := OrderedDictionary new.
response := response
addAll: self markdownHeaders;
addAll: self admonitionBorderLines;
yourself.
^ (response associations sorted: [ :x :y | x key < y key ]) asOrderedDictionary
]
{ #category : #'*MiniDocs' }
String >> markdownSplitted [
| response lastPart |
self markdownSplitLines ifEmpty: [ ^ self ].
response := OrderedCollection new.
self markdownSplitLines keys allButLast doWithIndex: [:key :index | | nextLine part |
nextLine := (self markdownSplitLines keys at: index + 1) - 1.
part := self lines copyFrom: key to: nextLine.
response add: part.
].
lastPart := self lines
copyFrom: self markdownSplitLines keys last
to: self lines size.
response add: lastPart.
^ response
]
{ #category : #'*MiniDocs' }
String >> promoteMarkdownHeaders [
| response |
response := self contents lines.
self markdownHeaders associationsDo: [ :assoc |
response at: assoc key put: assoc value allButFirst ].
^ response asStringWithCr withInternetLineEndings
]
{ #category : #'*MiniDocs' }
String >> romanizeAccents [
| modified corrections |
corrections := {
'ó' -> 'o' . 'ú' -> 'u' . 'ñ' -> 'n' .
'í' -> 'i' . 'á' -> 'a' . 'é' -> 'e' } asDictionary.
modified := self copy.
corrections keysAndValuesDo: [ :k :v |
modified := modified copyReplaceAll: k with: v
].
^ modified
]
{ #category : #'*MiniDocs' }
String >> startsWithMarkdownAdmonition [
self lines ifEmpty: [ ^ false ].
^ self admonitionBorders includes: self lines first trimBoth
]
{ #category : #'*MiniDocs' }
String >> startsWithYAMLMetadataDelimiter [
self lines ifEmpty: [^false].
^ self lines first beginsWith: self class yamlMetadataDelimiter
]
{ #category : #'*MiniDocs' }
String >> withoutXMLTagDelimiters [
^ self copyWithoutAll: #($< $>)
]
{ #category : #'*MiniDocs' }
String >> yamlMetadataClosingLineNumber [
"I return the line where the closing of the YAML metadata occurs or 0 if no closing is found."
self startsWithYAMLMetadataDelimiter ifFalse: [ ^ self ].
self lines allButFirst doWithIndex: [ :currentLine :i |
(currentLine beginsWith: self class yamlMetadataDelimiter) ifTrue: [ ^ i + 1 ]]
]
{ #category : #'*MiniDocs' }
String class >> yamlMetadataDelimiter [
^ '---'
]
{ #category : #'*MiniDocs' }
String >> yamlMetadataString [
| output yamlLines |
self detectYAMLMetadata ifFalse: [ ^nil ].
self lines ifEmpty: [ ^nil ].
yamlLines := self lines copyFrom: 2 to: self yamlMetadataClosingLineNumber - 1.
output := '' writeStream.
yamlLines do: [ :line |
output
nextPutAll: line;
nextPut: Character lf. ].
^ output contents
]
{ #category : #'*MiniDocs' }
String >> yamlMetadataStringWithDelimiters [
| output |
self yamlMetadataString ifNil: [ ^ nil ].
output := String new writeStream.
output nextPutAll: self class yamlMetadataDelimiter; cr.
output nextPutAll: self yamlMetadataString.
output nextPutAll: self class yamlMetadataDelimiter; cr.
^ output contents.
]

View File

@ -1,6 +0,0 @@
Extension { #name : #TeaCompositeRouter }
{ #category : #'*MiniDocs' }
TeaCompositeRouter >> staticRouters [
^ routers
]

View File

@ -1,6 +0,0 @@
Extension { #name : #TeaStaticRouter }
{ #category : #'*MiniDocs' }
TeaStaticRouter >> delegate [
^ delegate
]

View File

@ -1,6 +0,0 @@
Extension { #name : #Teapot }
{ #category : #'*MiniDocs' }
Teapot >> staticRouter [
^ staticRouter delegate
]

View File

@ -1,10 +0,0 @@
Extension { #name : #UnixChromePlatform }
{ #category : #'*MiniDocs' }
UnixChromePlatform class >> defaultExecutableLocations [
^ #( '/opt/google/chrome/chrome'
'/usr/bin/chromium-browser'
'/usr/local/share/chromium/chrome'
'/usr/bin/chromium' )
]

View File

@ -1,9 +0,0 @@
Extension { #name : #XMLDocument }
{ #category : #'*MiniDocs' }
XMLDocument >> detectMarkdeepTitle [
| titleLine |
titleLine := (self nodesCollect: [:node | node contentString ]) first lines
detect: [:line | line includesSubstring: ' **'] ifNone: ['Untitled'].
^ titleLine trimmed trimBoth: [:char | char = $* ]
]

View File

@ -1,53 +0,0 @@
Extension { #name : #XMLElement }
{ #category : #'*MiniDocs' }
XMLElement >> asSnippetDictionary [
| response |
response := STON fromString: (self attributes at: 'st-data').
response at: 'className' put: (self attributes at: 'st-class').
response at: 'content' put: self sanitizedContent.
^ response
]
{ #category : #'*MiniDocs' }
XMLElement >> extractMarkdownImageLinkData [
| linkParserNodes sanitizedText linkParser |
linkParser := (PPCommonMarkBlockParser parse: (self contentString trimBoth: [:each | each = Character lf]) allButFirst)
accept: CMBlockVisitor new.
linkParserNodes := linkParser children first children.
linkParserNodes size = 1
ifTrue: [ sanitizedText := linkParserNodes first label text ]
ifFalse: [ sanitizedText := '' writeStream.
linkParserNodes allButLast
do: [ :each |
each className = 'PPCMText'
ifTrue: [ sanitizedText nextPutAll: each text allButFirst ].
each className = 'PPCMLink'
ifTrue: [ sanitizedText nextPutAll: each printString ] ].
sanitizedText := sanitizedText contents ].
^ {sanitizedText . self contentString }
]
{ #category : #'*MiniDocs' }
XMLElement >> sanitizedContent [
| className sanitizedText |
className := self attributes at: 'st-class'.
className = 'LeTextSnippet'
ifTrue: [ sanitizedText := self contentString.
sanitizedText := sanitizedText allButFirst.
sanitizedText := sanitizedText allButLast ].
className = 'LePharoSnippet'
ifTrue: [ | joinedText |
sanitizedText := self contentString lines.
sanitizedText := sanitizedText copyFrom: 4 to: sanitizedText size - 2.
joinedText := '' writeStream.
sanitizedText
do: [ :line |
joinedText
nextPutAll: line;
nextPut: Character lf ].
sanitizedText := joinedText contents allButLast ].
className = 'LePictureSnippet'
ifTrue: [ sanitizedText := self extractMarkdownImageLinkData ].
^ sanitizedText
]

View File

@ -1,10 +0,0 @@
Extension { #name : #ZnConstants }
{ #category : #'*MiniDocs' }
ZnConstants class >> maximumLineLength [
"Return the maximum line length to accept.
Used by ZnLineReader and thus for reading request/status lines as well as headers.
This helps to protect us from malicious content."
^ 5096 "8192"
]

View File

@ -1,48 +0,0 @@
Class {
#name : #CMBlockVisitor,
#superclass : #CMVisitor,
#instVars : [
'inlineParser'
],
#category : #'PetitMarkdown-Visitors'
}
{ #category : #initialization }
CMBlockVisitor >> initialize [
inlineParser := PPCommonMarkInlinesParser new.
]
{ #category : #'as yet unclassified' }
CMBlockVisitor >> visitLinkRefDef: node [
inlineParser registerLinkRefDef: node.
^ super visitLinkRefDef: node
]
{ #category : #'as yet unclassified' }
CMBlockVisitor >> visitParagraph: node [
| result text |
self assert: (node children anySatisfy: [ :e | e isLine ]).
text := Character cr join: (node children collect: [:e | e text]).
result := inlineParser parse: (text trimRight).
^ PPCMParagraph new
addChildren: result;
yourself
]
{ #category : #'as yet unclassified' }
CMBlockVisitor >> visitPlainLine: node [
| result |
self assert: node text isString.
result := inlineParser parse: node text.
^ PPCMLine new
addChildren: result;
yourself
]
{ #category : #'as yet unclassified' }
CMBlockVisitor >> visitPlainText: node [
^ PPCMText new
text: node text;
yourself
]

View File

@ -1,458 +0,0 @@
Class {
#name : #CMHTMLVisitor,
#superclass : #CMVisitor,
#instVars : [
'links',
'shouldEscape',
'tight',
'shouldHtmlSpecChars'
],
#category : #'PetitMarkdown-Visitors'
}
{ #category : #support }
CMHTMLVisitor >> encodeEntities: text [
^ PPCommonMarkUtils instance encodeEntities: text
]
{ #category : #escape }
CMHTMLVisitor >> escape: string [
| retval regex |
self shouldEscape ifFalse: [ ^ string ].
retval := string.
retval := retval copyReplaceAll: '\\' with: '\'.
"Remove backlashes, \! -> !"
regex := '\\[!#$%''()*+,-./:;=?@^_`{|}~]' asRegex.
retval := regex copy: retval translatingMatchesUsing: [ :match | match second asString ].
retval := retval copyReplaceAll: '\[' with: '['.
retval := retval copyReplaceAll: '\]' with: ']'.
retval := retval copyReplaceAll: '\\' with: '\'.
^ retval
]
{ #category : #escape }
CMHTMLVisitor >> forbidEscape [
shouldEscape push: false
]
{ #category : #support }
CMHTMLVisitor >> forbidHtmlSpecChars [
shouldHtmlSpecChars push: false
]
{ #category : #initialization }
CMHTMLVisitor >> initialize [
super initialize.
links := IdentityDictionary new.
shouldEscape := Stack with: true.
shouldHtmlSpecChars := Stack with: true.
]
{ #category : #'string operations' }
CMHTMLVisitor >> removeLeadingEmptyLines: collection [
| retval |
collection isEmpty ifTrue: [ ^ collection ].
retval := collection copy.
[retval first text = ''] whileTrue: [
retval removeFirst
].
^ retval
]
{ #category : #'string operations' }
CMHTMLVisitor >> removeTrailingEmptyLines: collection [
| retval |
collection isEmpty ifTrue: [ ^ collection ].
retval := collection copy.
[retval last text = ''] whileTrue: [
retval removeLast
].
^ retval
]
{ #category : #escape }
CMHTMLVisitor >> restoreEscape [
shouldEscape pop
]
{ #category : #'string operations' }
CMHTMLVisitor >> restoreHtmlSpecChars [
shouldHtmlSpecChars pop
]
{ #category : #escape }
CMHTMLVisitor >> shouldEscape [
^ shouldEscape top
]
{ #category : #support }
CMHTMLVisitor >> shouldHtmlSpecChars [
^ shouldHtmlSpecChars top
]
{ #category : #'string operations' }
CMHTMLVisitor >> trimLeadingEmptyLines: string [
| retval |
retval := string.
[retval beginsWith: String cr] whileTrue: [
retval := retval copyFrom: 2 to: retval size.
].
^ retval
]
{ #category : #visiting }
CMHTMLVisitor >> visitBlockQuote: node [
| stream content |
stream := WriteStream on: ''.
stream nextPut: Character cr.
stream nextPutAll: '<blockquote>'.
content := node child accept: self.
content := content trimRight.
stream nextPutAll: content.
stream nextPut: Character cr.
stream nextPutAll: '</blockquote>'.
^ stream contents.
]
{ #category : #visiting }
CMHTMLVisitor >> visitContainer: node [
| parts concat |
parts := node children collect: [ :child |
child accept: self
].
concat := (parts reject: [ :e | e = '' ]) inject: '' into: [ :string :e | string, e ].
^ concat
]
{ #category : #visiting }
CMHTMLVisitor >> visitDelegate: node [
| parts |
parts := node children collect: [ :child |
child accept: self
].
^ String cr join: parts
]
{ #category : #visiting }
CMHTMLVisitor >> visitDocument: node [
^ self trimLeadingEmptyLines: (self visitContainer: node)
]
{ #category : #visiting }
CMHTMLVisitor >> visitEmphasize: node [
| retval |
retval:= WriteStream on: ''.
retval nextPutAll: '<em>'.
node children do: [ :child |
retval nextPutAll: (child accept: self)
].
retval nextPutAll: '</em>'.
^ retval contents
]
{ #category : #visiting }
CMHTMLVisitor >> visitFencedCode: node [
| stream |
stream := WriteStream on: ''.
stream nextPut: Character cr.
stream nextPutAll: '<pre><code'.
node infoString isNil ifFalse: [
stream nextPutAll: ' class="language-'.
stream nextPutAll: (self escape: node infoString trim).
stream nextPutAll: '"'
].
stream nextPut: $>.
self forbidEscape.
(node children) do: [ :child |
stream nextPutAll: (child accept: self).
stream nextPut: Character cr.
].
self restoreEscape.
stream nextPutAll: '</code></pre>'.
^ stream contents.
]
{ #category : #visiting }
CMHTMLVisitor >> visitHRule: node [
^ String cr, '<hr />'
]
{ #category : #visiting }
CMHTMLVisitor >> visitHardBreak: node [
^ '<br />'
]
{ #category : #visiting }
CMHTMLVisitor >> visitHeader: node [
^ String cr, '<h', node level asString, '>',
(node title accept: self) trim,
'</h', node level asString, '>'
]
{ #category : #visiting }
CMHTMLVisitor >> visitHtml: node [
^ node text
]
{ #category : #visiting }
CMHTMLVisitor >> visitHtmlBlock: node [
| parts |
self forbidEscape.
self forbidHtmlSpecChars.
parts := node children collect: [ :child |
child accept: self
].
self restoreHtmlSpecChars.
self restoreEscape.
" ^ String cr join: parts "
^ parts inject: '' into: [ :string :e | string, String cr, e ]
]
{ #category : #visiting }
CMHTMLVisitor >> visitIndentedCode: node [
| stream |
stream := WriteStream on: ''.
stream nextPut: Character cr.
stream nextPutAll: '<pre><code>'.
self forbidEscape.
(self removeTrailingEmptyLines: (self removeLeadingEmptyLines: node children)) do: [ :child |
stream nextPutAll: (child accept: self).
stream nextPut: Character cr.
].
self restoreEscape.
stream nextPutAll: '</code></pre>'.
^ stream contents.
]
{ #category : #visiting }
CMHTMLVisitor >> visitInlinedCode: node [
| code code2 |
code := node code.
code := code copyReplaceAll: (String cr) with: (String space).
code := code.
code2 := code.
[
code := code2.
code2 := code copyReplaceAll: ' ' with: ' '
] doWhileFalse: [ code2 = code ].
^ '<code>', code trim , '</code>'
]
{ #category : #visiting }
CMHTMLVisitor >> visitLine: node [
| stream |
stream := WriteStream on: ''.
node children do: [ :child |
stream nextPutAll: (child accept: self).
].
^ stream contents
]
{ #category : #visiting }
CMHTMLVisitor >> visitLink: node [
| stream |
stream := WriteStream on: ''.
stream nextPutAll: '<a href="'.
node destination isNil ifFalse: [
stream nextPutAll: (self encodeEntities: (self escape: node destination)).
] ifTrue: [ ].
stream nextPutAll: '"'.
node title isNil ifFalse: [
stream nextPutAll: ' title="'.
stream nextPutAll: (self escape: (self encodeEntities: node title)).
stream nextPutAll: '"'.
].
stream nextPutAll: '>'.
stream nextPutAll: (node label accept: self).
stream nextPutAll: '</a>'.
^ stream contents
]
{ #category : #visiting }
CMHTMLVisitor >> visitLinkRef: node [
| stream ref |
stream := WriteStream on: ''.
ref := links at: node label text asLowercase asSymbol.
stream nextPutAll: '<a href="'.
stream nextPutAll: (self escape: ref destination).
stream nextPutAll: '"'.
ref title isNil ifFalse: [
stream nextPutAll: ' title="'.
stream nextPutAll: (self escape: ref title).
stream nextPutAll: '"'.
].
stream nextPutAll: '>'.
stream nextPutAll: (self escape: node label text).
stream nextPutAll: '</a>'.
^ stream contents
]
{ #category : #visiting }
CMHTMLVisitor >> visitLinkRefDef: node [
links at: node label text asLowercase asSymbol ifAbsentPut: node.
^ ''
]
{ #category : #visiting }
CMHTMLVisitor >> visitLinkRefDefPlaceholder: node [
^ ''
]
{ #category : #visiting }
CMHTMLVisitor >> visitList: node [
| stream tag tmp start |
stream := WriteStream on: ''.
tmp := tight.
tight := node isTight.
start := ''.
(node type = #ordered) ifTrue: [
tag := 'ol'.
(node start = 1) ifFalse: [ start := ' start="', node start asString, '"' ]
] ifFalse: [
tag := 'ul'
].
stream nextPut: Character cr.
stream nextPut: $<.
stream nextPutAll: tag.
stream nextPutAll: start.
stream nextPut: $>.
node children do: [ :child |
child isBlankLine ifFalse: [
stream nextPutAll: (child accept: self).
]
].
stream nextPut: Character cr.
stream nextPutAll: '</'.
stream nextPutAll: tag.
stream nextPut: $>.
tight := tmp.
^ stream contents
]
{ #category : #visiting }
CMHTMLVisitor >> visitListItem: node [
| stream nodeChildren |
stream := WriteStream on: ''.
nodeChildren := node child children reject: [:e | e isBlankLine ].
stream nextPut: Character cr.
stream nextPutAll: '<li>'.
nodeChildren do: [ :child |
(child isParagraph and: [ tight ]) ifTrue: [
child children do: [ :ch | stream nextPutAll: (ch accept: self) ]
] ifFalse: [
stream nextPutAll: (child accept: self).
]
].
(nodeChildren isEmpty or:
[nodeChildren last isParagraph and: [tight]]) ifFalse: [
stream nextPut: Character cr
].
stream nextPutAll: '</li>'.
^ stream contents
]
{ #category : #visiting }
CMHTMLVisitor >> visitNode: node [
^ ''
]
{ #category : #visiting }
CMHTMLVisitor >> visitParagraph: node [
| stream |
stream := WriteStream on: ''.
stream nextPut: Character cr.
stream nextPutAll: '<p>'.
node children do: [ :child |
stream nextPutAll: (child accept: self)
].
stream nextPutAll: '</p>'.
^ stream contents
]
{ #category : #visiting }
CMHTMLVisitor >> visitPlainLine: node [
^ self error: 'should not happen'
]
{ #category : #visiting }
CMHTMLVisitor >> visitPlainText: node [
^ self error: 'should not happen'
]
{ #category : #visiting }
CMHTMLVisitor >> visitSoftBreak: node [
^ String cr
]
{ #category : #visiting }
CMHTMLVisitor >> visitStrong: node [
| retval |
retval:= WriteStream on: ''.
retval nextPutAll: '<strong>'.
node children do: [ :child |
retval nextPutAll: (child accept: self)
].
retval nextPutAll: '</strong>'.
^ retval contents
]
{ #category : #visiting }
CMHTMLVisitor >> visitText: node [
^ node text
]

View File

@ -1,99 +0,0 @@
Class {
#name : #CMVisitor,
#superclass : #Object,
#category : #'PetitMarkdown-Visitors'
}
{ #category : #'as yet unclassified' }
CMVisitor >> visitBlockQuote: node [
^ self visitWhatever: node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitContainer: node [
^ self visitWhatever: node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitDocument: node [
^ self visitWhatever: node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitFencedCode: node [
^ self visitWhatever: node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitHRule: node [
^ node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitHeader: node [
^ self visitWhatever: node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitHtml: node [
^ node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitHtmlBlock: node [
^ self visitWhatever: node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitIndentedCode: node [
^ self visitWhatever: node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitLine: node [
^ self visitWhatever: node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitLinkRefDef: node [
^ node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitLinkRefDefPlaceholder: node [
^ node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitList: node [
^ self visitWhatever: node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitListItem: node [
^ self visitWhatever: node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitNode: node [
^ node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitParagraph: node [
^ self visitWhatever: node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitText: node [
^ node
]
{ #category : #'as yet unclassified' }
CMVisitor >> visitWhatever: node [
node children do: [ :child |
node replace: child
with: (child accept: self)
].
^ node
]

View File

@ -1,30 +0,0 @@
Class {
#name : #PPCMBlockQuote,
#superclass : #PPCMDelegateNode,
#instVars : [
'code',
'infoString'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMBlockQuote >> accept: visitor [
^ visitor visitBlockQuote: self
]
{ #category : #visiting }
PPCMBlockQuote >> initialize [
super initialize.
children := Array new: 1.
]
{ #category : #testing }
PPCMBlockQuote >> isBlockLevel [
^ true
]
{ #category : #accessing }
PPCMBlockQuote >> viewBody [
^ (self className ,' ', self text) asRopedText.
]

View File

@ -1,25 +0,0 @@
Class {
#name : #PPCMContainer,
#superclass : #PPCMDelegateNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMContainer >> accept: visitor [
^ visitor visitContainer: self
]
{ #category : #accessing }
PPCMContainer >> viewBody [
| aText |
aText := self className asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor) ].
^ aText
]

View File

@ -1,92 +0,0 @@
Class {
#name : #PPCMDelegateNode,
#superclass : #PPCMNode,
#instVars : [
'children'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMDelegateNode >> accept: visitor [
^ visitor visitDelegate: self
]
{ #category : #accessing }
PPCMDelegateNode >> addChild: node [
self assert: node isCommonMarkNode.
self children add: node.
]
{ #category : #accessing }
PPCMDelegateNode >> addChildFirst: node [
self assert: node isCommonMarkNode.
self children addFirst: node.
]
{ #category : #accessing }
PPCMDelegateNode >> addChildren: nodes [
nodes do: [ :node | self addChild: node ]
]
{ #category : #enumerating }
PPCMDelegateNode >> allChildren [
| retval |
retval := OrderedCollection new.
self children do: [ :child | retval addAll: child allChildren ].
^ retval
]
{ #category : #accessing }
PPCMDelegateNode >> child [
self assert: children size = 1.
^ children first
]
{ #category : #accessing }
PPCMDelegateNode >> child: whatever [
children at: 1 put: whatever
]
{ #category : #accessing }
PPCMDelegateNode >> children [
^ children
]
{ #category : #accessing }
PPCMDelegateNode >> children: whatever [
children := whatever
]
{ #category : #accessing }
PPCMDelegateNode >> firstChild [
^ children at: 1
]
{ #category : #initialization }
PPCMDelegateNode >> initialize [
children := OrderedCollection new
]
{ #category : #replacing }
PPCMDelegateNode >> replace: child with: anotherChild [
children doWithIndex: [ :ch :index |
(ch == child) ifTrue: [
children at: index put: anotherChild .
^ true
]
].
^ false
]
{ #category : #accessing }
PPCMDelegateNode >> secondChild [
^ children at: 2
]
{ #category : #accessing }
PPCMDelegateNode >> thirdChild [
^ children at: 3
]

View File

@ -1,52 +0,0 @@
Class {
#name : #PPCMDocument,
#superclass : #PPCMDelegateNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMDocument >> accept: visitor [
^ visitor visitDocument: self
]
{ #category : #testing }
PPCMDocument >> isBlockLevel [
^ true
]
{ #category : #accessing }
PPCMDocument >> viewBody [
| aText |
aText := self className asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor) ].
^ aText
]
{ #category : #accessing }
PPCMDocument >> viewChildrenFor: aView [
<gtView>
children ifNil: [ ^ aView empty ].
^ aView columnedTree
title: 'Document tree';
priority: 1;
items: [ { self } ];
children: #children;
column: 'Name' text: #viewBody;
expandUpTo: 7
]

View File

@ -1,32 +0,0 @@
Class {
#name : #PPCMEmphasize,
#superclass : #PPCMDelegateNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMEmphasize >> accept: visitor [
^ visitor visitEmphasize: self
]
{ #category : #accessing }
PPCMEmphasize >> viewBody [
| aText |
aText := (self className ,' ', self text) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,34 +0,0 @@
Class {
#name : #PPCMFencedCode,
#superclass : #PPCMDelegateNode,
#instVars : [
'infoString'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMFencedCode >> accept: visitor [
^ visitor visitFencedCode: self
]
{ #category : #accessing }
PPCMFencedCode >> code [
"hackity hack, this should not be used except for tests..."
^ String cr join: (self children collect: [ :e | e text ])
]
{ #category : #accessing }
PPCMFencedCode >> infoString [
^ infoString
]
{ #category : #accessing }
PPCMFencedCode >> infoString: anObject [
infoString := anObject
]
{ #category : #testing }
PPCMFencedCode >> isBlockLevel [
^ true
]

View File

@ -1,32 +0,0 @@
Class {
#name : #PPCMHardBreak,
#superclass : #PPCMNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMHardBreak >> accept: visitor [
^ visitor visitHardBreak: self
]
{ #category : #accessing }
PPCMHardBreak >> viewBody [
| aText |
aText := (self className ,' ', self text) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,67 +0,0 @@
Class {
#name : #PPCMHeader,
#superclass : #PPCMDelegateNode,
#instVars : [
'level',
'title'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMHeader >> accept: visitor [
^ visitor visitHeader: self
]
{ #category : #initialization }
PPCMHeader >> initialize [
super initialize.
children := Array new: 1.
]
{ #category : #accessing }
PPCMHeader >> isBlockLevel [
^ true
]
{ #category : #accessing }
PPCMHeader >> level [
^ level
]
{ #category : #accessing }
PPCMHeader >> level: anObject [
level := anObject
]
{ #category : #accessing }
PPCMHeader >> title [
^ self child
]
{ #category : #accessing }
PPCMHeader >> title: anObject [
self children at: 1 put: anObject
]
{ #category : #accessing }
PPCMHeader >> viewBody [
| aText |
aText := (self className, ' level: ', self level asString) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor) ].
^ aText
]

View File

@ -1,47 +0,0 @@
Class {
#name : #PPCMHrule,
#superclass : #PPCMNode,
#instVars : [
'rule'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMHrule >> accept: visitor [
^ visitor visitHRule: self
]
{ #category : #accessing }
PPCMHrule >> rule [
^ rule
]
{ #category : #accessing }
PPCMHrule >> rule: anObject [
rule := anObject
]
{ #category : #accessing }
PPCMHrule >> viewBody [
| aText |
aText := (self className ,' ',
self rule) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child destination asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child destination asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,40 +0,0 @@
Class {
#name : #PPCMHtml,
#superclass : #PPCMNode,
#instVars : [
'text'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMHtml >> accept: visitor [
^ visitor visitHtml: self
]
{ #category : #accessing }
PPCMHtml >> text [
^ text
]
{ #category : #accessing }
PPCMHtml >> text: anObject [
text := anObject
]
{ #category : #accessing }
PPCMHtml >> viewBody [
| aText |
aText := (self className ,' ',
self text) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,37 +0,0 @@
Class {
#name : #PPCMHtmlBlock,
#superclass : #PPCMDelegateNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMHtmlBlock >> accept: visitor [
^ visitor visitHtmlBlock: self
]
{ #category : #'as yet unclassified' }
PPCMHtmlBlock >> isBlockLevel [
^ true
]
{ #category : #accessing }
PPCMHtmlBlock >> viewBody [
| aText |
aText := (self className ,' ', self text) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,42 +0,0 @@
Class {
#name : #PPCMIndentedCode,
#superclass : #PPCMDelegateNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMIndentedCode >> accept: visitor [
^ visitor visitIndentedCode: self
]
{ #category : #'as yet unclassified' }
PPCMIndentedCode >> code [
^ self text
]
{ #category : #'as yet unclassified' }
PPCMIndentedCode >> isBlockLevel [
^ true
]
{ #category : #accessing }
PPCMIndentedCode >> viewBody [
| aText |
aText := (self className ,' ', self text) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,28 +0,0 @@
Class {
#name : #PPCMInlinedCode,
#superclass : #PPCMNode,
#instVars : [
'code'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMInlinedCode >> accept: visitor [
^ visitor visitInlinedCode: self
]
{ #category : #accessing }
PPCMInlinedCode >> code [
^ code
]
{ #category : #accessing }
PPCMInlinedCode >> code: anObject [
code := anObject
]
{ #category : #accessing }
PPCMInlinedCode >> text [
^ code
]

View File

@ -1,60 +0,0 @@
Class {
#name : #PPCMLine,
#superclass : #PPCMDelegateNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMLine class >> empty [
^ PPCMLine new
addChild: (PPCMText empty);
yourself
]
{ #category : #visiting }
PPCMLine >> accept: visitor [
^ visitor visitLine: self
]
{ #category : #testing }
PPCMLine >> isBlankLine [
^ self text = ''
]
{ #category : #testing }
PPCMLine >> isLine [
^ true
]
{ #category : #visiting }
PPCMLine >> text [
| stream |
"hackity hack, this should not be used except for tests..."
stream := WriteStream on: ''.
children do: [ :child | stream nextPutAll: child text ].
^ stream contents
]
{ #category : #accessing }
PPCMLine >> viewBody [
| aText |
aText := (self className ,' ', self text) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,78 +0,0 @@
Class {
#name : #PPCMLink,
#superclass : #PPCMNode,
#instVars : [
'label',
'destination',
'title'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMLink >> accept: visitor [
^ visitor visitLink: self
]
{ #category : #accessing }
PPCMLink >> destination [
^ destination
]
{ #category : #accessing }
PPCMLink >> destination: anObject [
destination := anObject
]
{ #category : #accessing }
PPCMLink >> label [
^ label
]
{ #category : #accessing }
PPCMLink >> label: anObject [
label := anObject
]
{ #category : #accessing }
PPCMLink >> printOn: aStream [
super initialize.
^ aStream
nextPutAll:
'[',self label text,']',
'(',self destination,')'
]
{ #category : #accessing }
PPCMLink >> title [
^ title
]
{ #category : #accessing }
PPCMLink >> title: anObject [
title := anObject
]
{ #category : #accessing }
PPCMLink >> viewBody [
| aText |
aText := (self className ,' ',
self label children first text, ' -> ', self destination) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child destination asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child destination asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,40 +0,0 @@
Class {
#name : #PPCMLinkRef,
#superclass : #PPCMNode,
#instVars : [
'label'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMLinkRef >> accept: visitor [
^ visitor visitLinkRef: self
]
{ #category : #accessing }
PPCMLinkRef >> label [
^ label
]
{ #category : #accessing }
PPCMLinkRef >> label: anObject [
label := anObject
]
{ #category : #accessing }
PPCMLinkRef >> viewBody [
| aText |
aText := (self className ,' ',
self label text) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,74 +0,0 @@
Class {
#name : #PPCMLinkRefDef,
#superclass : #PPCMNode,
#instVars : [
'label',
'destination',
'title'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMLinkRefDef >> accept: visitor [
^ visitor visitLinkRefDef: self
]
{ #category : #accessing }
PPCMLinkRefDef >> destination [
^ destination
]
{ #category : #accessing }
PPCMLinkRefDef >> destination: anObject [
destination := anObject
]
{ #category : #testing }
PPCMLinkRefDef >> isBlockLevel [
^ true
]
{ #category : #accessing }
PPCMLinkRefDef >> label [
^ label
]
{ #category : #accessing }
PPCMLinkRefDef >> label: anObject [
label := anObject
]
{ #category : #accessing }
PPCMLinkRefDef >> title [
^ title
]
{ #category : #accessing }
PPCMLinkRefDef >> title: anObject [
title := anObject
]
{ #category : #accessing }
PPCMLinkRefDef >> viewBody [
| aText |
aText := (self className ,' ',
self label text, ' -> ', self destination) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child destination asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child destination asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,31 +0,0 @@
Class {
#name : #PPCMLinkRefDefPlaceholder,
#superclass : #PPCMNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMLinkRefDefPlaceholder >> accept: visitor [
^ visitor visitLinkRefDefPlaceholder: self
]
{ #category : #'as yet unclassified' }
PPCMLinkRefDefPlaceholder >> isBlockLevel [
^ true
]
{ #category : #accessing }
PPCMLinkRefDefPlaceholder >> viewBody [
| aText |
aText := (self className ) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
].
^ aText
]

View File

@ -1,111 +0,0 @@
Class {
#name : #PPCMList,
#superclass : #PPCMDelegateNode,
#instVars : [
'type',
'start'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMList >> accept: visitor [
^ visitor visitList: self
]
{ #category : #accessing }
PPCMList >> isBlockLevel [
^ true
]
{ #category : #'as yet unclassified' }
PPCMList >> isLooseItem: item [
| document size |
"empty item case"
(item children size == 0) ifTrue: [ ^ false ].
document := item child.
size := document children size.
size < 3 ifTrue: [ ^ false ].
(1 to: size - 2) do: [ :idx |
((document children at: idx) isBlockLevel and:
[(document children at: idx + 1) isBlankLine and:
[(document children at: idx + 2) isBlockLevel] ]) ifTrue: [ ^ true ]
].
^ false
]
{ #category : #'as yet unclassified' }
PPCMList >> isLooseList [
| size |
size := children size.
size < 3 ifTrue: [ ^ false ].
(1 to: size - 2) do: [ :idx |
((children at: idx) isBlockLevel and:
[(children at: idx + 1) isBlankLine and:
[(children at: idx + 2) isBlockLevel] ]) ifTrue: [ ^ true ]
].
^ false
]
{ #category : #'as yet unclassified' }
PPCMList >> isTight [
"blanks in the list?"
self isLooseList ifTrue: [
^ false
].
"blanks in the items?"
self children do: [ :listItem |
(self isLooseItem: listItem) ifTrue: [
^ false
]
].
^ true
]
{ #category : #accessing }
PPCMList >> start [
^ start
]
{ #category : #accessing }
PPCMList >> start: anObject [
start := anObject
]
{ #category : #accessing }
PPCMList >> type [
^ type
]
{ #category : #accessing }
PPCMList >> type: string [
type := string
]
{ #category : #accessing }
PPCMList >> viewBody [
| aText |
aText := (self className, ' type: ', self type) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor) ].
^ aText
]

View File

@ -1,43 +0,0 @@
Class {
#name : #PPCMListItem,
#superclass : #PPCMDelegateNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMListItem >> accept: visitor [
^ visitor visitListItem: self
]
{ #category : #'as yet unclassified' }
PPCMListItem >> initialize [
super initialize.
children := Array new: 1.
]
{ #category : #'as yet unclassified' }
PPCMListItem >> isBlockLevel [
^ true
]
{ #category : #accessing }
PPCMListItem >> viewBody [
| aText |
aText := self className asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor) ].
^ aText
]

View File

@ -1,67 +0,0 @@
Class {
#name : #PPCMNode,
#superclass : #Object,
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMNode >> accept: visitor [
^ visitor visitNode: self
]
{ #category : #enumerating }
PPCMNode >> allChildren [
^ Array with: self
]
{ #category : #accessing }
PPCMNode >> children [
^ #()
]
{ #category : #gt }
PPCMNode >> gtTreeViewIn: composite [
<gtInspectorPresentationOrder: 40>
composite tree
title: 'Tree';
children: [:n | n children ];
format: [:n| n name ifNil: [ n asString ] ifNotNil: [n name] ];
shouldExpandToLevel: 6
]
{ #category : #testing }
PPCMNode >> isBlankLine [
^ false
]
{ #category : #testing }
PPCMNode >> isBlockLevel [
^ false
]
{ #category : #testing }
PPCMNode >> isCommonMarkNode [
^ true
]
{ #category : #testing }
PPCMNode >> isLine [
^ false
]
{ #category : #testing }
PPCMNode >> isParagraph [
^ false
]
{ #category : #replacing }
PPCMNode >> replace: child with: anotherChild [
^ false
]
{ #category : #accessing }
PPCMNode >> text [
"hackity hack, this should not be used except for tests..."
^ String cr join: (self children collect: [ :e | e text ])
]

View File

@ -1,47 +0,0 @@
Class {
#name : #PPCMParagraph,
#superclass : #PPCMDelegateNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMParagraph >> accept: visitor [
^ visitor visitParagraph: self
]
{ #category : #accessing }
PPCMParagraph >> isBlockLevel [
^ true
]
{ #category : #testing }
PPCMParagraph >> isParagraph [
^ true
]
{ #category : #accessing }
PPCMParagraph >> text [
"hackity hack, this should not be used except for tests..."
^ String cr join: (self children collect: [ :e | e text ])
]
{ #category : #accessing }
PPCMParagraph >> viewBody [
| aText |
aText := self className asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor) ].
^ aText
]

View File

@ -1,62 +0,0 @@
Class {
#name : #PPCMPlainLine,
#superclass : #PPCMNode,
#instVars : [
'text'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMPlainLine class >> empty [
^ self new
text: '';
yourself
]
{ #category : #visiting }
PPCMPlainLine >> accept: visitor [
^ visitor visitPlainLine: self
]
{ #category : #accessing }
PPCMPlainLine >> isBlankLine [
^ self text = ''
]
{ #category : #accessing }
PPCMPlainLine >> isLine [
^ true
]
{ #category : #visiting }
PPCMPlainLine >> text [
^ text
]
{ #category : #visiting }
PPCMPlainLine >> text: whatever [
text := whatever
]
{ #category : #accessing }
PPCMPlainLine >> viewBody [
| aText |
aText := (self className ,' ', self text) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor) ].
^ aText
]

View File

@ -1,45 +0,0 @@
Class {
#name : #PPCMPlainText,
#superclass : #PPCMNode,
#instVars : [
'text'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMPlainText class >> empty [
^ self new
text: '';
yourself
]
{ #category : #visiting }
PPCMPlainText >> accept: visitor [
^ visitor visitPlainText: self
]
{ #category : #converting }
PPCMPlainText >> asString [
^ text
]
{ #category : #printing }
PPCMPlainText >> printOn: aStream [
super printOn: aStream.
aStream nextPut: $(.
aStream nextPut: $'.
text isNil ifFalse: [ aStream nextPutAll: text ].
aStream nextPut: $'.
aStream nextPut: $).
]
{ #category : #accessing }
PPCMPlainText >> text [
^ text
]
{ #category : #accessing }
PPCMPlainText >> text: anObject [
text := anObject
]

View File

@ -1,33 +0,0 @@
Class {
#name : #PPCMSoftBreak,
#superclass : #PPCMNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMSoftBreak >> accept: visitor [
^ visitor visitSoftBreak: self
]
{ #category : #accessing }
PPCMSoftBreak >> viewBody [
| aText |
aText := (self className ,' ', self text) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,27 +0,0 @@
Class {
#name : #PPCMStrong,
#superclass : #PPCMDelegateNode,
#category : #'PetitMarkdown-AST'
}
{ #category : #visiting }
PPCMStrong >> accept: visitor [
^ visitor visitStrong: self
]
{ #category : #accessing }
PPCMStrong >> viewBody [
| aText |
aText := (self className ,' ',
self text) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child className asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,78 +0,0 @@
Class {
#name : #PPCMText,
#superclass : #PPCMNode,
#instVars : [
'text'
],
#category : #'PetitMarkdown-AST'
}
{ #category : #'as yet unclassified' }
PPCMText class >> empty [
^ self new
text: '';
yourself
]
{ #category : #comparing }
PPCMText >> = anObject [
^ text = anObject
]
{ #category : #visiting }
PPCMText >> accept: visitor [
^ visitor visitText: self
]
{ #category : #converting }
PPCMText >> asString [
^ text
]
{ #category : #comparing }
PPCMText >> hash [
^ text hash
]
{ #category : #printing }
PPCMText >> printOn: aStream [
super printOn: aStream.
aStream nextPut: $(.
aStream nextPut: $'.
text isNil ifFalse: [ aStream nextPutAll: text ].
aStream nextPut: $'.
aStream nextPut: $).
]
{ #category : #accessing }
PPCMText >> text [
^ text ifNil: [ '' ]
]
{ #category : #accessing }
PPCMText >> text: anObject [
text := anObject
]
{ #category : #accessing }
PPCMText >> viewBody [
| aText |
aText := (self className ,' ', self text) asRopedText.
self children do: [ :child |
aText append: ' ' asRopedText.
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append: (child text asRopedText foreground:
BrGlamorousColors disabledButtonTextColor).
aText append:
('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor)
].
^ aText
]

View File

@ -1,743 +0,0 @@
Class {
#name : #PPCommonMarkBlockParser,
#superclass : #PPCompositeParser,
#instVars : [
'space',
'lineEnd',
'linePrefix',
'indentedCode',
'fencedCode',
'codeFirstFenceIndent',
'newline',
'codeFenceStart',
'infoString',
'prefix',
'codeFenceStop',
'codeFenceIndent',
'codeLine',
'prefixedEmptyLine',
'documentEnd',
'codeIndent',
'emptyLine',
'contentElement',
'horizontalRule',
'quoteBlock',
'code',
'list',
'htmlBlock',
'header',
'linkRefDef',
'paragraph',
'document',
'ATXHeader',
'setextHeader',
'setexLine',
'setextHeaderUnderline',
'listItem',
'htmlTag',
'plainLine',
'quoteDedent',
'quote',
'listBegin',
'listEmptyItem',
'listEnd',
'listOrderedMarker',
'listBulletMarker',
'listMarker',
'listDoubleBlanks',
'listBullet',
'listContent',
'listItemEnd',
'quoteIndent',
'paragraphLine',
'lazyParagraphPrefix',
'content',
'linkLabel',
'linkDestination',
'linkTitle',
'lineStart',
'linkQuoteStart',
'linkQuoteStop',
'htmlBlockLine',
'abstractLinkTitle'
],
#category : #'PetitMarkdown-Parser'
}
{ #category : #headers }
PPCommonMarkBlockParser >> ATXHeader [
| begin end title |
begin := (($# asParser plus) setMax: 6).
end := ((space, $# asParser plus) optional trimRight: space), lineEnd.
title := end negate plus flatten asPPCMPlainLine.
^ linePrefix, begin, (end not, space, title ==> #last) optional, end
map: [ :_lp :level :_titleLine :_end |
| size titleLine |
size := level size.
titleLine := _titleLine ifNil: [ PPCMPlainLine empty ].
PPCMHeader new
level: size;
title: titleLine;
yourself
]
]
{ #category : #links }
PPCommonMarkBlockParser >> abstractLinkTitle [
^ (space preceeds / lineStart),
linkQuoteStart,
( (linkQuoteStop / (lineEnd, emptyLine)) not,
(('\' asParser, linkQuoteStop) / #any asParser)
) plus flatten,
linkQuoteStop
==> [ :e | self decodeEntities: (self escape: (e third)) ]
]
{ #category : #code }
PPCommonMarkBlockParser >> code [
^ indentedCode / fencedCode
]
{ #category : #code }
PPCommonMarkBlockParser >> codeFenceIndent [
^ [ :context |
context codeFenceIndent parseOn: context
] asParser
]
{ #category : #code }
PPCommonMarkBlockParser >> codeFenceStart [
| tilde eh |
tilde := ($~ asParser min: 3) >=> [ :context :cc |
| retval |
retval := cc value.
retval isPetitFailure ifFalse: [
context codeFence: ($~ asParser min: retval size).
].
retval
].
eh := ($` asParser min: 3) >=> [ :context :cc |
| retval |
retval := cc value.
retval isPetitFailure ifFalse: [
context codeFence: ($` asParser min: retval size).
].
retval
].
^ codeFirstFenceIndent, (tilde / eh)
]
{ #category : #code }
PPCommonMarkBlockParser >> codeFenceStop [
^ ([ :context |
context codeFence parseOn: context
] asParser trimRight: space), lineEnd and
]
{ #category : #code }
PPCommonMarkBlockParser >> codeFirstFenceIndent [
^ (space max: 3) >=> [ :context :cc |
| result |
result := cc value.
result isPetitFailure ifFalse: [
context codeFenceIndent: (space max: result size).
].
result
]
]
{ #category : #code }
PPCommonMarkBlockParser >> codeIndent [
^ ' ' asParser / Character tab asParser
]
{ #category : #code }
PPCommonMarkBlockParser >> codeLine [
^ newline negate star flatten
map: [ :_text |
| textNode |
textNode := PPCMText new
text: (self encodeEntities: _text);
yourself.
PPCMLine new
addChild: textNode;
yourself
]
]
{ #category : #document }
PPCommonMarkBlockParser >> content [
^ contentElement,
((prefix, contentElement) nonEmpty ==> #second) star
map: [ :first :rest |
| |
PPCMContainer new
addChild: first;
addChildren: rest;
yourself
]
]
{ #category : #document }
PPCommonMarkBlockParser >> contentElement [
^
horizontalRule /
quoteBlock /
code /
list /
htmlBlock /
header /
linkRefDef /
paragraph /
((emptyLine, lineEnd) ==> #first)
]
{ #category : #support }
PPCommonMarkBlockParser >> decodeEntities: string [
^ PPCommonMarkUtils instance decodeEntities: string
]
{ #category : #document }
PPCommonMarkBlockParser >> document [
^ ((prefix, contentElement) nonEmpty ==> #second) star
map: [ :elems |
PPCMDocument new
addChildren: elems;
yourself
]
]
{ #category : #'lines and whitespace' }
PPCommonMarkBlockParser >> documentEnd [
^ #eof asParser
]
{ #category : #'lines and whitespace' }
PPCommonMarkBlockParser >> emptyLine [
^ space star, #endOfLine asParser ==> [ :e |
PPCMPlainLine empty
]
]
{ #category : #support }
PPCommonMarkBlockParser >> encodeEntities: string [
^ PPCommonMarkUtils instance encodeEntities: string
]
{ #category : #support }
PPCommonMarkBlockParser >> escape: string [
^ PPCommonMarkUtils instance escape: string
]
{ #category : #support }
PPCommonMarkBlockParser >> escapeUrl: string [
^ PPCommonMarkUtils instance escapeUrl: string
]
{ #category : #code }
PPCommonMarkBlockParser >> fencedCode [
^ linePrefix and, codeFenceStart, infoString optional, lineEnd,
(
((
(prefix, linePrefix, codeFenceStop) not, prefix, codeFenceIndent, codeLine, lineEnd) ==> #fourth /
(prefixedEmptyLine, lineEnd ==> #first)
) nonEmpty
) star,
((((prefix, linePrefix, codeFenceStop) / documentEnd), lineEnd) / prefix not)
map: [ :_lp :_fenceStart :_info :_le :_code :_fenceStop |
PPCMFencedCode new
infoString: _info;
addChildren: _code;
yourself
]
]
{ #category : #headers }
PPCommonMarkBlockParser >> header [
^ ATXHeader / setextHeader
]
{ #category : #'horizontal rule' }
PPCommonMarkBlockParser >> horizontalRule [
| stars minus under |
stars := '*' asParser, (('*' asParser trim: space) min: 2).
minus := '-' asParser, (('-' asParser trim: space) min: 2).
under := '_' asParser, (('_' asParser trim: space) min: 2).
^ linePrefix, ((stars / minus / under) flatten), space star, lineEnd
map: [ :_prefix :_hrule :_space :_le |
PPCMHrule new
rule: _hrule;
yourself
]
]
{ #category : #'html blocks' }
PPCommonMarkBlockParser >> htmlBlock [
^ (linePrefix, htmlTag) and, htmlBlockLine, lineEnd,
(prefix, (emptyLine not), htmlBlockLine, lineEnd ==> #third) star
map: [ :_pred :_line :_le :_rest |
PPCMHtmlBlock new
addChild: _line;
addChildren: _rest;
yourself
]
]
{ #category : #'html blocks' }
PPCommonMarkBlockParser >> htmlBlockLine [
^ newline negate star flatten
map: [ :_text |
| text |
text := PPCMText new
text: _text;
yourself.
PPCMLine new
addChild: text;
yourself
]
]
{ #category : #'html blocks' }
PPCommonMarkBlockParser >> htmlTag [
^ '<table' asParser /
'<tr' asParser /
'<td' asParser /
'<div' asParser /
'<DIV' asParser /
'<p' asParser /
'</table' asParser /
'</tr' asParser /
'</td' asParser /
'</div' asParser /
'</DIV' asParser /
'</p' asParser /
'<!--' asParser /
('<?' asParser, #letter asParser plus) /
'<![CDATA[' asParser
]
{ #category : #code }
PPCommonMarkBlockParser >> indentedCode [
^ codeIndent, emptyLine not, codeLine, lineEnd,
(
((prefix, codeIndent, codeLine, lineEnd) ==> #third) /
((prefix, emptyLine, lineEnd) nonEmpty ==> #second)
) star
map: [ :_cp :_pred :_first :_le :_rest |
PPCMIndentedCode new
addChild: _first;
addChildren: _rest;
yourself
]
]
{ #category : #code }
PPCommonMarkBlockParser >> infoString [
^ ((lineEnd / space / codeFenceStop / $` asParser) negate plus trimBlanks flatten),
(lineEnd / $` asParser) negate star ==> [:e | self decodeEntities: e first ]
]
{ #category : #paragraphs }
PPCommonMarkBlockParser >> lazyParagraphPrefix [
^ (prefix, quoteIndent) not,
(quote / space) star
]
{ #category : #'lines and whitespace' }
PPCommonMarkBlockParser >> lineEnd [
^ newline / documentEnd
]
{ #category : #'lines and whitespace' }
PPCommonMarkBlockParser >> linePrefix [
^ ((PPPossessiveRepeatingParser on: (#blank asParser ))
setMax: 3;
yourself),
(#blank asParser not)
==> #first
]
{ #category : #'lines and whitespace' }
PPCommonMarkBlockParser >> lineStart [
^ #startOfLine asParser
]
{ #category : #links }
PPCommonMarkBlockParser >> linkDestination [
| parens escapedParen |
"TODO: fix?"
escapedParen := '\(' asParser / '\)' asParser.
parens := PPDelegateParser new
name: 'parens';
yourself.
"Parens cannot be nested!"
parens setParser: $( asParser, (($( asParser / $) asParser) not, (escapedParen / #any asParser)) star, $) asParser.
^ (($< asParser, ((($> asParser / newline) not, #any asParser) star) flatten, $> asParser)
==> [ :e | self escapeUrl: (self escape: e second) ]) /
((space / lineEnd / $) asParser) not, (parens / escapedParen / $( asParser negate)) plus flatten
==> [ :e | self escapeUrl: (self escape: e) ]
]
{ #category : #links }
PPCommonMarkBlockParser >> linkLabel [
| label |
label := ($] asParser not, ('\]' asParser / #any asParser)) star flatten.
^ $[ asParser, label, $] asParser
map: [ :_start :_label :_end |
PPCMText new
text: (self escape: _label);
yourself
]
]
{ #category : #links }
PPCommonMarkBlockParser >> linkQuoteStart [
^ PPFailingParser message: 'abstract quote start'.
]
{ #category : #links }
PPCommonMarkBlockParser >> linkQuoteStop [
^ PPFailingParser message: 'abstract quote stop'
]
{ #category : #links }
PPCommonMarkBlockParser >> linkRefDef [
^ (linePrefix, linkLabel, ':' asParser, (lineEnd optional trim: space), linkDestination, ((lineEnd optional trim: space), linkTitle ==> #second) optional, space star, lineEnd
map: [ :_lp :_label :_semicolon :_ws1 :_dest :_title :_ws3 :_le |
PPCMLinkRefDef new
label: _label;
title: _title;
destination: _dest;
yourself.
])
>=> [ :context :cc |
| retval |
retval := cc value.
retval isPetitFailure ifFalse: [
context registerLink: retval.
retval := PPCMLinkRefDefPlaceholder new.
].
retval
]
]
{ #category : #links }
PPCommonMarkBlockParser >> linkTitle [
^
((abstractLinkTitle
where: linkQuoteStart is: $" asParser)
where: linkQuoteStop is: $" asParser) /
((abstractLinkTitle
where: linkQuoteStart is: $' asParser)
where: linkQuoteStop is: $' asParser) /
((abstractLinkTitle
where: linkQuoteStart is: $( asParser)
where: linkQuoteStop is: $) asParser)
]
{ #category : #lists }
PPCommonMarkBlockParser >> list [
^
listBegin,
listItem,
(
(prefix, listItem ==> #second) /
"empty item is part of the list only if followed by normal item"
(listEmptyItem, (prefix, listItem) and ==> #first)
) star,
listEnd
map: [ :_start :_first :_rest :_end |
PPCMList new
type: _start second;
start: _start first;
addChild: _first;
addChildren: _rest;
yourself
]
]
{ #category : #lists }
PPCommonMarkBlockParser >> listBegin [
^ (linePrefix, (listOrderedMarker / listBulletMarker)) and ==> #second >=> [ :context :cc |
| retval |
retval := cc value.
retval isPetitFailure ifFalse: [
context listItemType: (retval third).
].
retval
]
]
{ #category : #lists }
PPCommonMarkBlockParser >> listBullet [
^
"push content as spaces on the indent stack"
(
(linePrefix, listMarker, space, linePrefix optional) flatten and
==> [:e | self spaces: (e size)]
/
(linePrefix, listMarker, lineEnd) flatten and
==> [:e | self spaces: (e size)]
) pushAsParser,
"Consume marker and one space"
(linePrefix, listMarker, (space / lineEnd and))
]
{ #category : #lists }
PPCommonMarkBlockParser >> listBulletMarker [
^
($- asParser /
$* asParser /
$+ asParser)
"Start . type . parser to accept the same type"
==> [ :e | { nil . #unordered . e asParser } ]
]
{ #category : #lists }
PPCommonMarkBlockParser >> listContent [
^
contentElement,
(
((prefix, contentElement) nonEmpty ==> #second) /
"Empty line of the list content is part of the content only if followed by non-empty line"
((prefixedEmptyLine, lineEnd, (prefix, contentElement) and) nonEmpty
==> #first)
) star
map: [ :_first :_rest |
| |
PPCMContainer new
addChild: _first;
addChildren: _rest;
yourself
]
]
{ #category : #lists }
PPCommonMarkBlockParser >> listDoubleBlanks [
^
(prefixedEmptyLine, lineEnd) nonEmpty,
(prefixedEmptyLine, lineEnd) nonEmpty
]
{ #category : #lists }
PPCommonMarkBlockParser >> listEmptyItem [
^ (listDoubleBlanks not, prefixedEmptyLine, lineEnd) nonEmpty ==> #second
]
{ #category : #lists }
PPCommonMarkBlockParser >> listEnd [
^ [ :context |
context listItemStack pop
] asParser
]
{ #category : #lists }
PPCommonMarkBlockParser >> listItem [
^ horizontalRule not, listBullet, listContent, listItemEnd
map: [ :_pred :_bullet :_content :_end |
PPCMListItem new
child: _content;
yourself
]
]
{ #category : #lists }
PPCommonMarkBlockParser >> listItemEnd [
^ [ :context | context indentStack pop ] asParser
]
{ #category : #lists }
PPCommonMarkBlockParser >> listMarker [
^ [ :context | context listItemType parseOn: context ] asParser
]
{ #category : #lists }
PPCommonMarkBlockParser >> listOrderedMarker [
| dot bracket |
dot := #digit asParser plus flatten, $. asParser.
bracket := #digit asParser plus flatten, $) asParser.
"Start . type . parser to accept the same type"
^ (dot ==> [ :e | { e first asNumber . #ordered . dot } ]) /
(bracket ==> [ :e | { e first asNumber . #ordered . bracket } ])
]
{ #category : #'lines and whitespace' }
PPCommonMarkBlockParser >> newline [
^ #newline asParser
]
{ #category : #paragraphs }
PPCommonMarkBlockParser >> paragraph [
^ linePrefix, (emptyLine) not, paragraphLine trimBlanks, lineEnd,
(
(prefix / lazyParagraphPrefix),
(emptyLine / ATXHeader / horizontalRule / fencedCode / htmlBlock / list / quote) not,
paragraphLine trimBlanks,
lineEnd ==> #third
) nonEmpty star
map: [ :_lp :_pred :_line :_end :_rest |
| para |
para := PPCMParagraph new.
para addChild: _line.
_rest do: [ :anotherLine | para addChild: anotherLine ].
para
]
]
{ #category : #paragraphs }
PPCommonMarkBlockParser >> paragraphLine [
^ plainLine
]
{ #category : #'lines and whitespace' }
PPCommonMarkBlockParser >> plainLine [
^ newline negate star flatten
map: [ :_text |
PPCMPlainLine new
text: _text;
yourself
]
]
{ #category : #'lines and whitespace' }
PPCommonMarkBlockParser >> prefix [
^ #prefix asParser
]
{ #category : #'lines and whitespace' }
PPCommonMarkBlockParser >> prefixedEmptyLine [
"empty line with appropriate number of quotes, but with arbitrary whitespaces"
^ (quoteDedent not, (quote / space) star, #endOfLine asParser) ==> [ :e | PPCMPlainLine empty ]
]
{ #category : #quotes }
PPCommonMarkBlockParser >> quote [
^ (linePrefix, $> asParser, space optional) flatten
]
{ #category : #quotes }
PPCommonMarkBlockParser >> quoteBlock [
^ quoteIndent,
content,
quoteDedent
map: [ :indent :_content :dedent |
PPCMBlockQuote new
child: _content;
yourself
]
]
{ #category : #quotes }
PPCommonMarkBlockParser >> quoteDedent [
^ (prefix not, quote pop) flatten
]
{ #category : #quotes }
PPCommonMarkBlockParser >> quoteIndent [
^ (quote ==> [ :e | quote ]) pushAsParser
]
{ #category : #headers }
PPCommonMarkBlockParser >> setexLine [
^ plainLine
]
{ #category : #headers }
PPCommonMarkBlockParser >> setextHeader [
^ linePrefix, emptyLine not, setexLine, lineEnd, setextHeaderUnderline
map: [ :_prefix :_predicates :_text :_nl :_underline |
PPCMHeader new
title: _text;
level: _underline;
yourself
]
]
{ #category : #headers }
PPCommonMarkBlockParser >> setextHeaderUnderline [
| equal minus |
equal := '=' asParser plus ==> [:t | 1].
minus := '-' asParser plus ==> [:t | 2].
^ prefix, listItem not, linePrefix, ((equal / minus) trimRight: space), lineEnd ==> #fourth
]
{ #category : #'lines and whitespace' }
PPCommonMarkBlockParser >> space [
^ Character space asParser
]
{ #category : #lists }
PPCommonMarkBlockParser >> spaces: length [
| retval |
retval := ''.
length timesRepeat: [
retval := retval, ' '.
].
^ retval
]
{ #category : #document }
PPCommonMarkBlockParser >> start [
^ document >=> [ :context :cc |
| retval |
retval := cc value.
retval isPetitFailure ifFalse: [
context links do: [ :link |
retval addChildFirst: link.
]
].
retval
]
]
{ #category : #initialization }
PPCommonMarkBlockParser >> utils [
^ PPCommonMarkUtils instance
]
{ #category : #'lines and whitespace' }
PPCommonMarkBlockParser >> whitespace [
^ #space asParser
]

View File

@ -1,732 +0,0 @@
Class {
#name : #PPCommonMarkBlockTest,
#superclass : #PPCompositeParserTest,
#instVars : [
'context',
'quote',
'string',
'expected'
],
#category : #'PetitMarkdown-Tests'
}
{ #category : #'as yet unclassified' }
PPCommonMarkBlockTest >> assert: something type: type [
self assert: (something isKindOf: type).
]
{ #category : #'as yet unclassified' }
PPCommonMarkBlockTest >> assertResult: expectedResult [
self assert: expectedResult = result.
"
(TextDiffBuilder from: result to: expectedResult) buildDisplayPatch.
"
]
{ #category : #'as yet unclassified' }
PPCommonMarkBlockTest >> context [
^ context
]
{ #category : #'as yet unclassified' }
PPCommonMarkBlockTest >> parse: input rule: rule to: expectedResult [
self parse: input rule: rule.
self assert: expectedResult = result.
"
(TextDiffBuilder from: result to: expectedResult) buildDisplayPatch.
"
]
{ #category : #'as yet unclassified' }
PPCommonMarkBlockTest >> parserClass [
^ PPCommonMarkBlockParser
]
{ #category : #'as yet unclassified' }
PPCommonMarkBlockTest >> setUp [
context := PPContext new.
quote := self parserInstanceFor: #quote
]
{ #category : #'test-headers' }
PPCommonMarkBlockTest >> testATXHeader [
self parse: '# foo' rule: #ATXHeader.
self assert: result title text = 'foo'.
self parse: '# foo#' rule: #ATXHeader.
self assert: result title text = 'foo#'.
self parse: '# foo #' rule: #ATXHeader.
self assert: result title text = 'foo'.
]
{ #category : #'test-headers' }
PPCommonMarkBlockTest >> testATXHeader2 [
self parse: '#' rule: #ATXHeader.
self assert: result title text = ''.
self parse: '# ' rule: #ATXHeader.
self assert: result title text = ''.
self parse: '# #' rule: #ATXHeader.
self assert: result title text = ''.
self parse: '### ###' rule: #ATXHeader.
self assert: result title text = ''.
]
{ #category : #'test-code' }
PPCommonMarkBlockTest >> testFencedCode [
self parse: '```
abc
def
```' rule: #code.
self assert: result type: PPCMFencedCode.
self assert: result code = 'abc
def'.
]
{ #category : #'test-code' }
PPCommonMarkBlockTest >> testFencedCode2 [
context := PPContext new.
context indentStack push: ' ' asParser.
self parse: '```
abc
def
```' rule: #code.
self assert: result type: PPCMFencedCode.
self assert: result code = 'abc
def'.
]
{ #category : #'as yet unclassified' }
PPCommonMarkBlockTest >> testHorizontalRule [
self parse: '***' rule: #horizontalRule.
self parse: ' - - -' rule: #horizontalRule.
]
{ #category : #'test-html blocks' }
PPCommonMarkBlockTest >> testHtmlBlock [
self parse: '<table>
</table>' rule: #htmlBlock.
self assert: result type: PPCMHtmlBlock.
]
{ #category : #'test-code' }
PPCommonMarkBlockTest >> testIndentedCode [
self parse: ' abc' rule: #code.
self assert: result type: PPCMIndentedCode.
self assert: result code = 'abc'.
self parse: ' abc
def' rule: #code.
self assert: result code = 'abc
def'.
self parse: ' this is a
code' rule: #code.
self assert: result code = 'this is a
code'.
self parse: ' this is
a code' rule: #code.
self assert: result code = ' this is
a code'.
self parse: ' this is
a code
' rule: #code.
self assert: result code = ' this is
a code'.
self parse: ' chunk1
chunk2
chunk3' rule: #code.
self assert: result code = 'chunk1
chunk2
chunk3'.
self parse: ' chunk1
chunk2' rule: #code.
self assert: result code = 'chunk1
chunk2'.
]
{ #category : #'test-paragraph' }
PPCommonMarkBlockTest >> testLazyParagraphPrefix [
self parse: '' rule: #lazyParagraphPrefix.
context := PPContext new.
context indentStack push: quote.
self parse: '> ' rule: #lazyParagraphPrefix.
context := PPContext new.
context indentStack push: quote.
self fail: '> >' rule: #lazyParagraphPrefix.
context := PPContext new.
context indentStack push: quote.
context indentStack push: quote.
self parse: ' > >' rule: #lazyParagraphPrefix.
]
{ #category : #'test-links' }
PPCommonMarkBlockTest >> testLinkRef [
self parse: '[foo]' rule: #paragraph.
self assert: result type: PPCMParagraph.
self assert: result text = '[foo]'.
]
{ #category : #'test-links' }
PPCommonMarkBlockTest >> testLinkRefDef [
self parse: '[foo]: /url "title"' rule: #linkRefDef.
self assert: result type: PPCMLinkRefDefPlaceholder.
self assert: context links size = 1.
self assert: context links anyOne type: PPCMLinkRefDef.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testList [
context := PPContext new.
self parse: '- one' rule: #list.
self assert: result type: PPCMList.
self assert: result children size = 1.
self assert: result child text = 'one'.
self assert: context indentStack isEmpty.
context := PPContext new.
self parse: '- one
- two' rule: #list.
self assert: result type: PPCMList.
self assert: result children size = 2.
self assert: result firstChild text = 'one'.
self assert: result secondChild text = 'two'.
self assert: context indentStack isEmpty.
context := PPContext new.
self parse: '- one
- two' rule: #list.
self assert: result type: PPCMList.
self assert: result children size = 3.
self assert: result firstChild text trim = 'one'.
self assert: result thirdChild text = 'two'.
self assert: context indentStack isEmpty.
self assert: context indentStack isEmpty.
context := PPContext new.
context indentStack push: quote.
self parse: '- one
>- two' rule: #list.
self assert: result type: PPCMList.
self assert: result children size = 2.
self assert: result firstChild text = 'one'.
self assert: result secondChild text = 'two'.
self assert: context indentStack size = 1.
context := PPContext new.
self parse: '- one
- ' rule: #list.
self assert: result type: PPCMList.
self assert: result children size = 2.
self assert: result firstChild text = 'one'.
self assert: result secondChild text = ''.
self assert: context indentStack isEmpty.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testList2 [
context := PPContext new.
self parse: '1. one' rule: #list.
self assert: result type: PPCMList.
self assert: result children size = 1.
self assert: result child text = 'one'.
self assert: context indentStack isEmpty.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListBullet [
context := PPContext new.
context listItemType: $- asParser.
self parse: '- ' rule: #listBullet.
self assert: context indentStack size = 1.
self assert: context indentStack top literal = ' '.
context := PPContext new.
context listItemType: $- asParser.
self parse: ' - ' rule: #listBullet.
self assert: context indentStack size = 1.
self assert: context indentStack top literal = ' '.
context := PPContext new.
context listItemType: $- asParser.
parser := self parserInstanceFor: #listBullet.
self assert: parser parse: ' - ' end: 3.
self assert: context indentStack size = 1.
self assert: context indentStack top literal = ' '.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListBullet2 [
context := PPContext new.
context listItemType: $* asParser.
self fail: '- ' rule: #listBullet.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListContent [
context := PPContext new.
context indentStack push: quote.
context indentStack push: ' ' asParser.
self parse: 'one
> two' rule: #listContent.
self assert: result text = 'one
two'.
context := PPContext new.
context indentStack push: quote.
context indentStack push: ' ' asParser.
self parse: 'one
> two' rule: #listContent.
self assert: result text = 'one
two'.
context := PPContext new.
context indentStack push: quote.
context indentStack push: ' ' asParser.
self parse: '> one
> > two' rule: #listContent.
self assert: result firstChild type: PPCMBlockQuote.
self assert: result firstChild text = 'one
two'.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListItem [
context := PPContext new.
context listItemType: $- asParser.
self parse: '- one' rule: #listItem.
self assert: result type: PPCMListItem.
self assert: result text = 'one'.
self assert: context indentStack size = 0.
context := PPContext new.
context listItemType: $- asParser.
context indentStack push: quote.
self parse: '- > one
> > two' rule: #listItem.
self assert: result type: PPCMListItem.
self assert: result child child type: PPCMBlockQuote.
self assert: result child child text = 'one
two'.
context := PPContext new.
context indentStack push: quote.
context listItemType: $- asParser.
self parse: '- > one
>
> > two' rule: #listItem.
self assert: result type: PPCMListItem.
self assert: result child children size = 3.
self assert: result child children first type: PPCMBlockQuote.
self assert: result child children third type: PPCMIndentedCode.
context := PPContext new.
context listItemType: $- asParser.
self parse: '- ' rule: #listItem.
self assert: result type: PPCMListItem.
self assert: result text = ''.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListItemCode [
context := PPContext new.
context listItemType: $- asParser.
self parse: '- one' rule: #listItem.
self assert: result child child type: PPCMIndentedCode.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListItemEmpty [
context := PPContext new.
context listItemType: $- asParser.
self parse: '- ' rule: #listItem.
self assert: result type: PPCMListItem.
self assert: result text = ''.
self assert: context indentStack size = 0.
context := PPContext new.
context listItemType: $- asParser.
self parse: '-' rule: #listItem.
self assert: result type: PPCMListItem.
self assert: result text = ''.
self assert: context indentStack size = 0.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListNested01 [
context := PPContext new.
self parse: '- one
- two' rule: #list.
self assert: result type: PPCMList.
self assert: result children size = 1.
self assert: result child child firstChild text = 'one'.
self assert: result child child secondChild type: PPCMList.
self assert: result child child secondChild child text = 'two'.
self assert: context indentStack isEmpty.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListNested02 [
context := PPContext new.
self parse: '- one
- two
- three' rule: #list.
self assert: result type: PPCMList.
self assert: result children size = 1.
self assert: result child child firstChild text = 'one'.
self assert: result child child secondChild type: PPCMList.
self assert: result child child secondChild child child firstChild text = 'two'.
self assert: result child child secondChild child child secondChild type: PPCMList.
self assert: result child child secondChild child child secondChild text = 'three'.
self assert: context indentStack isEmpty.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListNested03 [
context := PPContext new.
self parse: '- one
- two
- three
- four' rule: #list.
self assert: result type: PPCMList.
self assert: result children size = 1.
self assert: result child child firstChild text = 'one'.
self assert: result child child secondChild type: PPCMList.
self assert: result child child secondChild firstChild child firstChild text = 'two'.
self assert: result child child secondChild firstChild child secondChild type: PPCMList.
self assert: result child child secondChild firstChild child secondChild text = 'three'.
self assert: result child child secondChild secondChild child firstChild text = 'four'.
self assert: context indentStack isEmpty.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListNested04 [
context := PPContext new.
self parse: '- one
- two
- three
- four
five' rule: #list.
self assert: result type: PPCMList.
self assert: result children size = 1.
self assert: result child child firstChild text = 'one'.
self assert: result child child secondChild type: PPCMList.
self assert: result child child secondChild firstChild child firstChild text = 'two'.
self assert: result child child secondChild firstChild child secondChild type: PPCMList.
self assert: result child child secondChild firstChild child secondChild text = 'three'.
self assert: result child child secondChild secondChild child firstChild text = 'four'.
self assert: result child child secondChild secondChild child thirdChild text = 'five'.
self assert: context indentStack isEmpty.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListTight [
context := PPContext new.
self parse: '- one
- two' rule: #list.
self assert: result type: PPCMList.
self assert: result isTight.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListTight2 [
context := PPContext new.
self parse: '- one
- two' rule: #list.
self assert: result type: PPCMList.
self assert: result children size = 3.
self assert: result isTight not.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListTight3 [
context := PPContext new.
self parse: '- one
two' rule: #list.
self assert: result type: PPCMList.
self assert: result isTight.
]
{ #category : #'test-lists' }
PPCommonMarkBlockTest >> testListTight4 [
context := PPContext new.
self parse: '- one
two' rule: #list.
self assert: result type: PPCMList.
self assert: result isTight.
]
{ #category : #'test-paragraph' }
PPCommonMarkBlockTest >> testParagraph [
self parse: 'abc
def' rule: #paragraph.
self assert: result text = 'abc
def'.
self parse: 'abc
def' rule: #paragraph.
self assert: result text = 'abc
def'.
context := PPContext new.
context indentStack push: quote.
self parse: ' abc
def' rule: #paragraph.
self assert: result text = 'abc
def'.
context := PPContext new.
context indentStack push: quote.
self parse: 'abc
> def' rule: #paragraph.
self assert: result text = 'abc
def'.
]
{ #category : #'test-paragraph' }
PPCommonMarkBlockTest >> testParagraph2 [
self parse: 'foo
# bar' rule: #paragraph.
self assert: result text = 'foo
# bar'.
]
{ #category : #'as yet unclassified' }
PPCommonMarkBlockTest >> testPrefix [
self parse: '' rule: #prefix.
context := PPContext new.
context indentStack push: quote.
self parse: '> ' rule: #prefix
]
{ #category : #'test-quotes' }
PPCommonMarkBlockTest >> testQuote [
self parse: '>' rule: #quote.
self assertResult: '>'.
self parse: '> ' rule: #quote.
self assertResult: '> '.
self fail: ('>', String cr) rule: #quote.
]
{ #category : #'test-quotes' }
PPCommonMarkBlockTest >> testQuoteBlock [
self parse: '> foo' rule: #quoteBlock.
self assert: result type: PPCMBlockQuote.
self assert: result children size = 1.
self assert: result child text = 'foo'.
context := PPContext new.
self parse: '> foo
> bar' rule: #quoteBlock.
self assert: result type: PPCMBlockQuote.
self assert: result children size = 1.
self assert: result child text = 'foo
bar'.
context := PPContext new.
self parse: '>> foo
>> bar' rule: #quoteBlock.
self assert: result type: PPCMBlockQuote.
self assert: result child child type: PPCMBlockQuote.
self assert: result child child text = 'foo
bar'.
context := PPContext new.
self parse: '># Foo' rule: #quoteBlock.
self assert: result type: PPCMBlockQuote.
self assert: result child child type: PPCMHeader.
self assert: result child child text = 'Foo'.
context := PPContext new.
self parse: '> foo
>
> bar' rule: #quoteBlock.
self assert: result type: PPCMBlockQuote.
self assert: result child child type: PPCMIndentedCode.
context := PPContext new.
self parse: '>' rule: #quoteBlock.
self assert: result type: PPCMBlockQuote.
context := PPContext new.
self parse: '>
>
> ' rule: #quoteBlock.
self assert: result type: PPCMBlockQuote.
]
{ #category : #'test-quotes' }
PPCommonMarkBlockTest >> testQuoteDedent [
parser := self parserInstanceFor: #quoteDedent.
context := PPContext new.
context indentStack push: quote.
self assert: parser parse: '' end: 0.
self assert: context indentStack size = 0.
self assert: parser fail: '' end: 0.
context := PPContext new.
self assert: parser fail: ''.
context := PPContext new.
context indentStack push: quote.
self assert: parser fail: '>' end: 0.
context := PPContext new.
context indentStack push: quote.
context indentStack push: quote.
self assert: parser parse: ' > ' end: 0.
context := PPContext new.
context indentStack push: quote.
context indentStack push: ' ' asParser.
self assert: parser fail: ' > ' end: 0.
context := PPContext new.
context indentStack push: quote.
context indentStack push: quote.
context indentStack push: quote.
self assert: parser parse: ' > > ' end: 0.
context := PPContext new.
context indentStack push: quote.
context indentStack push: quote.
self assert: parser parse: '' end: 0.
self assert: parser parse: '' end: 0.
self assert: parser fail: '' end: 0.
]
{ #category : #'test-quotes' }
PPCommonMarkBlockTest >> testQuoteIndent [
parser := self parserInstanceFor: #quoteIndent.
context := PPContext new.
self assert: parser parse: '>' end: 1.
self assert: context indentStack size = 1.
self assert: context indentStack top = quote.
context := PPContext new.
self assert: parser parse: ' > ' end: 5.
context := PPContext new.
self assert: parser parse: ' >' end: 3.
context := PPContext new.
self assert: parser parse: ' >' end: 2.
context := PPContext new.
self assert: parser fail: ' >'.
context := PPContext new.
context indentStack push: quote.
self assert: parser parse: '>' end: 1.
self assert: context indentStack size = 2.
self assert: context indentStack top = quote.
context := PPContext new.
context indentStack push: quote.
self assert: parser parse: '> > ' end: 2.
context := PPContext new.
context indentStack push: quote.
self assert: parser parse: ' > > ' end: 3.
]
{ #category : #'test-headers' }
PPCommonMarkBlockTest >> testSetextHeader [
self parse: 'Foo
---' rule: #setextHeader.
self assert: result title text = 'Foo'.
]

Some files were not shown because too many files have changed in this diff Show More