Starting multiOS support for binary files.
This commit is contained in:
parent
5e4db00352
commit
a22005da27
@ -1,28 +1,28 @@
|
||||
Extension { #name : #Array }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Array >> bagOfWordsFor: sentenceArray [
|
||||
"An utility machine training little algorithm.
|
||||
Inspired by https://youtu.be/8qwowmiXANQ?t=1144.
|
||||
This should be moved probably to [Polyglot](https://github.com/pharo-ai/Polyglot),
|
||||
but the repository is pretty innactive (with commits 2 or more years old and no reponse to issues).
|
||||
Meanwhile, it will be in MiniDocs.
|
||||
|
||||
Given the sentence := #('hello' 'how' 'are' 'you')
|
||||
and the testVocabulary := #('hi' 'hello' 'I' 'you' 'bye' 'thank' 'you')
|
||||
then
|
||||
|
||||
testVocabulary bagOfWordsFor: sentence.
|
||||
|
||||
Should give: #(0 1 0 1 0 0 0)
|
||||
"
|
||||
| bagOfWords |
|
||||
bagOfWords := Array new: self size.
|
||||
bagOfWords doWithIndex: [:each :i | bagOfWords at: i put: 0 ].
|
||||
sentenceArray do: [:token | |index|
|
||||
index := self indexOf: token.
|
||||
index > 0
|
||||
ifTrue: [bagOfWords at: index put: 1]
|
||||
].
|
||||
^ bagOfWords
|
||||
]
|
||||
Extension { #name : #Array }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Array >> bagOfWordsFor: sentenceArray [
|
||||
"An utility machine training little algorithm.
|
||||
Inspired by https://youtu.be/8qwowmiXANQ?t=1144.
|
||||
This should be moved probably to [Polyglot](https://github.com/pharo-ai/Polyglot),
|
||||
but the repository is pretty innactive (with commits 2 or more years old and no reponse to issues).
|
||||
Meanwhile, it will be in MiniDocs.
|
||||
|
||||
Given the sentence := #('hello' 'how' 'are' 'you')
|
||||
and the testVocabulary := #('hi' 'hello' 'I' 'you' 'bye' 'thank' 'you')
|
||||
then
|
||||
|
||||
testVocabulary bagOfWordsFor: sentence.
|
||||
|
||||
Should give: #(0 1 0 1 0 0 0)
|
||||
"
|
||||
| bagOfWords |
|
||||
bagOfWords := Array new: self size.
|
||||
bagOfWords doWithIndex: [:each :i | bagOfWords at: i put: 0 ].
|
||||
sentenceArray do: [:token | |index|
|
||||
index := self indexOf: token.
|
||||
index > 0
|
||||
ifTrue: [bagOfWords at: index put: 1]
|
||||
].
|
||||
^ bagOfWords
|
||||
]
|
||||
|
@ -1,23 +1,23 @@
|
||||
Extension { #name : #BrAsyncFileWidget }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
BrAsyncFileWidget >> url: aUrl [
|
||||
|
||||
| realUrl imageUrl |
|
||||
realUrl := aUrl asZnUrl.
|
||||
|
||||
realUrl scheme = #file ifTrue: [
|
||||
^ self file: realUrl asFileReference ].
|
||||
imageUrl := realUrl.
|
||||
realUrl host = 'www.youtube.com' ifTrue: [ | video |
|
||||
video := LeRawYoutubeReferenceInfo fromYoutubeStringUrl: realUrl asString.
|
||||
imageUrl := (video rawData at: 'thumbnail_url') asUrl.
|
||||
].
|
||||
|
||||
self stencil: [
|
||||
(SkiaImage fromForm:
|
||||
(Form fromBase64String: imageUrl retrieveContents base64Encoded))
|
||||
asElement constraintsDo: [ :c |
|
||||
c horizontal matchParent.
|
||||
c vertical matchParent ] ]
|
||||
]
|
||||
Extension { #name : #BrAsyncFileWidget }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
BrAsyncFileWidget >> url: aUrl [
|
||||
|
||||
| realUrl imageUrl |
|
||||
realUrl := aUrl asZnUrl.
|
||||
|
||||
realUrl scheme = #file ifTrue: [
|
||||
^ self file: realUrl asFileReference ].
|
||||
imageUrl := realUrl.
|
||||
realUrl host = 'www.youtube.com' ifTrue: [ | video |
|
||||
video := LeRawYoutubeReferenceInfo fromYoutubeStringUrl: realUrl asString.
|
||||
imageUrl := (video rawData at: 'thumbnail_url') asUrl.
|
||||
].
|
||||
|
||||
self stencil: [
|
||||
(SkiaImage fromForm:
|
||||
(Form fromBase64String: imageUrl retrieveContents base64Encoded))
|
||||
asElement constraintsDo: [ :c |
|
||||
c horizontal matchParent.
|
||||
c vertical matchParent ] ]
|
||||
]
|
||||
|
@ -1,7 +1,7 @@
|
||||
Extension { #name : #ByteString }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
ByteString >> email [
|
||||
"Quick fix for importing Lepiter pages that have a plain ByteString field as email."
|
||||
^ self
|
||||
]
|
||||
Extension { #name : #ByteString }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
ByteString >> email [
|
||||
"Quick fix for importing Lepiter pages that have a plain ByteString field as email."
|
||||
^ self
|
||||
]
|
||||
|
@ -1,46 +1,46 @@
|
||||
Extension { #name : #DataFrame }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
DataFrame >> asMarkdown [
|
||||
| response |
|
||||
response := '' writeStream.
|
||||
self columnNames do: [ :name | response nextPutAll: '| ' , name , ' ' ].
|
||||
response
|
||||
nextPutAll: '|';
|
||||
cr.
|
||||
self columns size timesRepeat: [ response nextPutAll: '|---' ].
|
||||
response
|
||||
nextPutAll: '|';
|
||||
cr.
|
||||
self asArrayOfRows
|
||||
do: [ :row |
|
||||
row do: [ :cell | response nextPutAll: '| ' , cell asString , ' ' ].
|
||||
response
|
||||
nextPutAll: '|';
|
||||
cr ].
|
||||
^ response contents accentedCharactersCorrection withInternetLineEndings.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
DataFrame >> viewDataFor: aView [
|
||||
<gtView>
|
||||
| columnedList |
|
||||
self numberOfRows >= 1 ifFalse: [ ^ aView empty ].
|
||||
columnedList := aView columnedList
|
||||
title: 'Data';
|
||||
items: [ self transposed columns ];
|
||||
priority: 40.
|
||||
self columnNames
|
||||
withIndexDo: [:aName :anIndex |
|
||||
columnedList
|
||||
column: aName
|
||||
text: [:anItem | anItem at: anIndex ]
|
||||
].
|
||||
^ columnedList
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
DataFrame >> webView [
|
||||
|
||||
^ Pandoc convertString: self asMarkdown from: 'markdown' to: 'html'
|
||||
]
|
||||
Extension { #name : #DataFrame }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
DataFrame >> asMarkdown [
|
||||
| response |
|
||||
response := '' writeStream.
|
||||
self columnNames do: [ :name | response nextPutAll: '| ' , name , ' ' ].
|
||||
response
|
||||
nextPutAll: '|';
|
||||
cr.
|
||||
self columns size timesRepeat: [ response nextPutAll: '|---' ].
|
||||
response
|
||||
nextPutAll: '|';
|
||||
cr.
|
||||
self asArrayOfRows
|
||||
do: [ :row |
|
||||
row do: [ :cell | response nextPutAll: '| ' , cell asString , ' ' ].
|
||||
response
|
||||
nextPutAll: '|';
|
||||
cr ].
|
||||
^ response contents accentedCharactersCorrection withInternetLineEndings.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
DataFrame >> viewDataFor: aView [
|
||||
<gtView>
|
||||
| columnedList |
|
||||
self numberOfRows >= 1 ifFalse: [ ^ aView empty ].
|
||||
columnedList := aView columnedList
|
||||
title: 'Data';
|
||||
items: [ self transposed columns ];
|
||||
priority: 40.
|
||||
self columnNames
|
||||
withIndexDo: [:aName :anIndex |
|
||||
columnedList
|
||||
column: aName
|
||||
text: [:anItem | anItem at: anIndex ]
|
||||
].
|
||||
^ columnedList
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
DataFrame >> webView [
|
||||
|
||||
^ Pandoc convertString: self asMarkdown from: 'markdown' to: 'html'
|
||||
]
|
||||
|
@ -1,342 +1,342 @@
|
||||
Class {
|
||||
#name : #GrafoscopioNode,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'header',
|
||||
'body',
|
||||
'tags',
|
||||
'children',
|
||||
'parent',
|
||||
'links',
|
||||
'level',
|
||||
'created',
|
||||
'nodesInPreorder',
|
||||
'selected',
|
||||
'edited',
|
||||
'headers',
|
||||
'key',
|
||||
'output',
|
||||
'remoteLocations'
|
||||
],
|
||||
#category : #'MiniDocs-Legacy'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode class >> fromFile: aFileReference [
|
||||
|
||||
^ (STON fromString: aFileReference contents) first parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode class >> fromLink: aStonLink [
|
||||
| notebook |
|
||||
notebook := (STON fromString: aStonLink asUrl retrieveContents utf8Decoded) first parent.
|
||||
notebook addRemoteLocation: aStonLink.
|
||||
^ notebook
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> addRemoteLocation: anURL [
|
||||
self remoteLocations add: anURL
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> ancestors [
|
||||
"I return a collection of all the nodes wich are ancestors of the receiver node"
|
||||
| currentNode ancestors |
|
||||
|
||||
currentNode := self.
|
||||
ancestors := OrderedCollection new.
|
||||
[ currentNode parent notNil and: [ currentNode level > 0 ] ]
|
||||
whileTrue: [
|
||||
ancestors add: currentNode parent.
|
||||
currentNode := currentNode parent].
|
||||
ancestors := ancestors reversed.
|
||||
^ ancestors
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> asLePage [
|
||||
| page |
|
||||
self root populateTimestamps.
|
||||
page := LePage new
|
||||
initializeTitle: 'Grafoscopio Notebook (imported)'.
|
||||
self nodesInPreorder allButFirst
|
||||
do: [:node | page addSnippet: node asSnippet ].
|
||||
page latestEditTime: self root latestEditionDate.
|
||||
page createTime: self root earliestCreationDate.
|
||||
page optionAt: 'remoteLocations' put: self remoteLocations.
|
||||
^ page.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> asSnippet [
|
||||
| snippet child |
|
||||
snippet := LeTextSnippet new
|
||||
string: self header;
|
||||
createTime: (LeTime new
|
||||
time: self created);
|
||||
uid: LeUID new.
|
||||
(self tags includes: 'código')
|
||||
ifFalse: [
|
||||
child := LeTextSnippet new;
|
||||
string: self body. ]
|
||||
ifTrue: [
|
||||
child := LePharoSnippet new;
|
||||
code: self body ].
|
||||
child
|
||||
createTime: (LeTime new
|
||||
time: self created);
|
||||
uid: LeUID new.
|
||||
snippet addFirstSnippet: child.
|
||||
snippet optionAt: 'tags' put: self tags.
|
||||
^ snippet
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> body [
|
||||
^ body
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> body: anObject [
|
||||
body := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> children [
|
||||
^ children
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> children: anObject [
|
||||
children := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> created [
|
||||
created ifNotNil: [^created asDateAndTime].
|
||||
^ created
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> created: anObject [
|
||||
created := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> earliestCreationDate [
|
||||
| earliest |
|
||||
|
||||
self nodesWithCreationDates ifNotEmpty: [
|
||||
earliest := self nodesWithCreationDates first created]
|
||||
ifEmpty: [ earliest := self earliestRepositoryTimestamp - 3 hours].
|
||||
self nodesWithCreationDates do: [:node |
|
||||
node created <= earliest ifTrue: [ earliest := node created ] ].
|
||||
^ earliest
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> earliestRepositoryTimestamp [
|
||||
| remote fossilHost docSegments repo checkinInfo |
|
||||
remote := self remoteLocations first asUrl.
|
||||
fossilHost := 'https://mutabit.com/repos.fossil'.
|
||||
(remote asString includesSubstring: fossilHost) ifFalse: [ ^ false ].
|
||||
docSegments := remote segments copyFrom: 5 to: remote segments size.
|
||||
repo := FossilRepo new
|
||||
remote: (remote scheme, '://', remote host, '/', remote segments first, '/', remote segments second).
|
||||
checkinInfo := repo firstCheckinFor: ('/' join: docSegments).
|
||||
^ DateAndTime fromUnixTime: (checkinInfo at: 'timestamp')
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> edited [
|
||||
^ edited ifNotNil: [^ edited asDateAndTime ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> edited: anObject [
|
||||
edited := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> gtTextFor: aView [
|
||||
<gtView>
|
||||
^ aView textEditor
|
||||
title: 'Body';
|
||||
text: [ body ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> header [
|
||||
^ header
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> header: anObject [
|
||||
header := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> latestEditionDate [
|
||||
| latest |
|
||||
|
||||
latest := self nodesWithEditionDates first edited.
|
||||
self nodesWithEditionDates do: [:node |
|
||||
node edited >= latest ifTrue: [ latest := node edited ] ].
|
||||
^ latest
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> level [
|
||||
^ level
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> level: anObject [
|
||||
level := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> links [
|
||||
^ links
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> links: anObject [
|
||||
links := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesInPreorder [
|
||||
^ nodesInPreorder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesInPreorder: anObject [
|
||||
nodesInPreorder := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesWithCreationDates [
|
||||
^ self nodesInPreorder select: [ :each | each created isNotNil ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesWithEditionDates [
|
||||
^ self nodesInPreorder select: [ :each | each edited isNotNil ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> parent [
|
||||
^ parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> parent: anObject [
|
||||
parent := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> populateTimestamps [
|
||||
| adhocCreationMarker adhocEditionMarker |
|
||||
adhocCreationMarker := 'adhoc creation timestamp'.
|
||||
adhocEditionMarker := 'adhoc edition timestamp'.
|
||||
(self nodesInPreorder size = self nodesWithCreationDates size
|
||||
and: [ self nodesInPreorder size = self nodesWithEditionDates size ])
|
||||
ifTrue: [ ^ self nodesInPreorder ].
|
||||
self nodesInPreorder allButFirst doWithIndex: [:node :i |
|
||||
node created ifNil: [
|
||||
node created: self earliestCreationDate + i.
|
||||
node tags add: adhocCreationMarker.
|
||||
].
|
||||
node edited ifNil: [
|
||||
node edited: self earliestCreationDate + i + 1.
|
||||
node tags add: 'adhoc edition timestamp'
|
||||
].
|
||||
].
|
||||
self root created ifNil: [
|
||||
self root created: self earliestCreationDate - 1.
|
||||
self root tags add: adhocCreationMarker.
|
||||
].
|
||||
self root edited ifNil: [
|
||||
self root edited: self latestEditionDate.
|
||||
self root tags add: adhocEditionMarker.
|
||||
].
|
||||
^ self nodesInPreorder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> printOn: aStream [
|
||||
super printOn: aStream.
|
||||
aStream
|
||||
nextPutAll: '( ', self header, ' )'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> remoteLocations [
|
||||
^ remoteLocations ifNil: [ remoteLocations := OrderedCollection new]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> root [
|
||||
self level = 0 ifTrue: [ ^ self ].
|
||||
^ self ancestors first.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> selected [
|
||||
^ selected
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> selected: anObject [
|
||||
selected := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> tags [
|
||||
^ tags
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> tags: anObject [
|
||||
tags := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := self header asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child header asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child body asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor) ].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> viewChildrenFor: aView [
|
||||
<gtView>
|
||||
|
||||
children ifNil: [ ^ aView empty ].
|
||||
|
||||
^ aView columnedTree
|
||||
title: 'Children';
|
||||
priority: 1;
|
||||
items: [ { self } ];
|
||||
children: #children;
|
||||
column: 'Name' text: #viewBody;
|
||||
expandUpTo: 2
|
||||
]
|
||||
Class {
|
||||
#name : #GrafoscopioNode,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'header',
|
||||
'body',
|
||||
'tags',
|
||||
'children',
|
||||
'parent',
|
||||
'links',
|
||||
'level',
|
||||
'created',
|
||||
'nodesInPreorder',
|
||||
'selected',
|
||||
'edited',
|
||||
'headers',
|
||||
'key',
|
||||
'output',
|
||||
'remoteLocations'
|
||||
],
|
||||
#category : #'MiniDocs-Legacy'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode class >> fromFile: aFileReference [
|
||||
|
||||
^ (STON fromString: aFileReference contents) first parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode class >> fromLink: aStonLink [
|
||||
| notebook |
|
||||
notebook := (STON fromString: aStonLink asUrl retrieveContents utf8Decoded) first parent.
|
||||
notebook addRemoteLocation: aStonLink.
|
||||
^ notebook
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> addRemoteLocation: anURL [
|
||||
self remoteLocations add: anURL
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> ancestors [
|
||||
"I return a collection of all the nodes wich are ancestors of the receiver node"
|
||||
| currentNode ancestors |
|
||||
|
||||
currentNode := self.
|
||||
ancestors := OrderedCollection new.
|
||||
[ currentNode parent notNil and: [ currentNode level > 0 ] ]
|
||||
whileTrue: [
|
||||
ancestors add: currentNode parent.
|
||||
currentNode := currentNode parent].
|
||||
ancestors := ancestors reversed.
|
||||
^ ancestors
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> asLePage [
|
||||
| page |
|
||||
self root populateTimestamps.
|
||||
page := LePage new
|
||||
initializeTitle: 'Grafoscopio Notebook (imported)'.
|
||||
self nodesInPreorder allButFirst
|
||||
do: [:node | page addSnippet: node asSnippet ].
|
||||
page latestEditTime: self root latestEditionDate.
|
||||
page createTime: self root earliestCreationDate.
|
||||
page optionAt: 'remoteLocations' put: self remoteLocations.
|
||||
^ page.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> asSnippet [
|
||||
| snippet child |
|
||||
snippet := LeTextSnippet new
|
||||
string: self header;
|
||||
createTime: (LeTime new
|
||||
time: self created);
|
||||
uid: LeUID new.
|
||||
(self tags includes: 'código')
|
||||
ifFalse: [
|
||||
child := LeTextSnippet new;
|
||||
string: self body. ]
|
||||
ifTrue: [
|
||||
child := LePharoSnippet new;
|
||||
code: self body ].
|
||||
child
|
||||
createTime: (LeTime new
|
||||
time: self created);
|
||||
uid: LeUID new.
|
||||
snippet addFirstSnippet: child.
|
||||
snippet optionAt: 'tags' put: self tags.
|
||||
^ snippet
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> body [
|
||||
^ body
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> body: anObject [
|
||||
body := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> children [
|
||||
^ children
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> children: anObject [
|
||||
children := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> created [
|
||||
created ifNotNil: [^created asDateAndTime].
|
||||
^ created
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> created: anObject [
|
||||
created := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> earliestCreationDate [
|
||||
| earliest |
|
||||
|
||||
self nodesWithCreationDates ifNotEmpty: [
|
||||
earliest := self nodesWithCreationDates first created]
|
||||
ifEmpty: [ earliest := self earliestRepositoryTimestamp - 3 hours].
|
||||
self nodesWithCreationDates do: [:node |
|
||||
node created <= earliest ifTrue: [ earliest := node created ] ].
|
||||
^ earliest
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> earliestRepositoryTimestamp [
|
||||
| remote fossilHost docSegments repo checkinInfo |
|
||||
remote := self remoteLocations first asUrl.
|
||||
fossilHost := 'https://mutabit.com/repos.fossil'.
|
||||
(remote asString includesSubstring: fossilHost) ifFalse: [ ^ false ].
|
||||
docSegments := remote segments copyFrom: 5 to: remote segments size.
|
||||
repo := FossilRepo new
|
||||
remote: (remote scheme, '://', remote host, '/', remote segments first, '/', remote segments second).
|
||||
checkinInfo := repo firstCheckinFor: ('/' join: docSegments).
|
||||
^ DateAndTime fromUnixTime: (checkinInfo at: 'timestamp')
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> edited [
|
||||
^ edited ifNotNil: [^ edited asDateAndTime ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> edited: anObject [
|
||||
edited := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> gtTextFor: aView [
|
||||
<gtView>
|
||||
^ aView textEditor
|
||||
title: 'Body';
|
||||
text: [ body ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> header [
|
||||
^ header
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> header: anObject [
|
||||
header := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> latestEditionDate [
|
||||
| latest |
|
||||
|
||||
latest := self nodesWithEditionDates first edited.
|
||||
self nodesWithEditionDates do: [:node |
|
||||
node edited >= latest ifTrue: [ latest := node edited ] ].
|
||||
^ latest
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> level [
|
||||
^ level
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> level: anObject [
|
||||
level := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> links [
|
||||
^ links
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> links: anObject [
|
||||
links := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesInPreorder [
|
||||
^ nodesInPreorder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesInPreorder: anObject [
|
||||
nodesInPreorder := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesWithCreationDates [
|
||||
^ self nodesInPreorder select: [ :each | each created isNotNil ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> nodesWithEditionDates [
|
||||
^ self nodesInPreorder select: [ :each | each edited isNotNil ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> parent [
|
||||
^ parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> parent: anObject [
|
||||
parent := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> populateTimestamps [
|
||||
| adhocCreationMarker adhocEditionMarker |
|
||||
adhocCreationMarker := 'adhoc creation timestamp'.
|
||||
adhocEditionMarker := 'adhoc edition timestamp'.
|
||||
(self nodesInPreorder size = self nodesWithCreationDates size
|
||||
and: [ self nodesInPreorder size = self nodesWithEditionDates size ])
|
||||
ifTrue: [ ^ self nodesInPreorder ].
|
||||
self nodesInPreorder allButFirst doWithIndex: [:node :i |
|
||||
node created ifNil: [
|
||||
node created: self earliestCreationDate + i.
|
||||
node tags add: adhocCreationMarker.
|
||||
].
|
||||
node edited ifNil: [
|
||||
node edited: self earliestCreationDate + i + 1.
|
||||
node tags add: 'adhoc edition timestamp'
|
||||
].
|
||||
].
|
||||
self root created ifNil: [
|
||||
self root created: self earliestCreationDate - 1.
|
||||
self root tags add: adhocCreationMarker.
|
||||
].
|
||||
self root edited ifNil: [
|
||||
self root edited: self latestEditionDate.
|
||||
self root tags add: adhocEditionMarker.
|
||||
].
|
||||
^ self nodesInPreorder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> printOn: aStream [
|
||||
super printOn: aStream.
|
||||
aStream
|
||||
nextPutAll: '( ', self header, ' )'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> remoteLocations [
|
||||
^ remoteLocations ifNil: [ remoteLocations := OrderedCollection new]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> root [
|
||||
self level = 0 ifTrue: [ ^ self ].
|
||||
^ self ancestors first.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> selected [
|
||||
^ selected
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> selected: anObject [
|
||||
selected := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> tags [
|
||||
^ tags
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> tags: anObject [
|
||||
tags := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> viewBody [
|
||||
|
||||
| aText |
|
||||
aText := self header asRopedText.
|
||||
|
||||
self children do: [ :child |
|
||||
aText append: ' ' asRopedText.
|
||||
aText append: (child header asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: ('= "' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append: (child body asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor).
|
||||
aText append:
|
||||
('"' asRopedText foreground:
|
||||
BrGlamorousColors disabledButtonTextColor) ].
|
||||
|
||||
|
||||
^ aText
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNode >> viewChildrenFor: aView [
|
||||
<gtView>
|
||||
|
||||
children ifNil: [ ^ aView empty ].
|
||||
|
||||
^ aView columnedTree
|
||||
title: 'Children';
|
||||
priority: 1;
|
||||
items: [ { self } ];
|
||||
children: #children;
|
||||
column: 'Name' text: #viewBody;
|
||||
expandUpTo: 2
|
||||
]
|
||||
|
@ -1,15 +1,15 @@
|
||||
Class {
|
||||
#name : #GrafoscopioNodeTest,
|
||||
#superclass : #TestCase,
|
||||
#category : #'MiniDocs-Legacy'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNodeTest >> testEarliestCreationNode [
|
||||
| notebook remoteNotebook offedingNodes |
|
||||
remoteNotebook := 'https://mutabit.com/repos.fossil/documentaton/raw/a63598382?at=documentaton.ston'.
|
||||
notebook := (STON fromString: remoteNotebook asUrl retrieveContents utf8Decoded) first parent.
|
||||
offedingNodes := notebook nodesInPreorder select: [:node |
|
||||
node created isNotNil and: [node created < notebook earliestCreationDate] ].
|
||||
self assert: offedingNodes size equals: 0
|
||||
]
|
||||
Class {
|
||||
#name : #GrafoscopioNodeTest,
|
||||
#superclass : #TestCase,
|
||||
#category : #'MiniDocs-Legacy'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
GrafoscopioNodeTest >> testEarliestCreationNode [
|
||||
| notebook remoteNotebook offedingNodes |
|
||||
remoteNotebook := 'https://mutabit.com/repos.fossil/documentaton/raw/a63598382?at=documentaton.ston'.
|
||||
notebook := (STON fromString: remoteNotebook asUrl retrieveContents utf8Decoded) first parent.
|
||||
offedingNodes := notebook nodesInPreorder select: [:node |
|
||||
node created isNotNil and: [node created < notebook earliestCreationDate] ].
|
||||
self assert: offedingNodes size equals: 0
|
||||
]
|
||||
|
@ -1,26 +1,26 @@
|
||||
Extension { #name : #GtGQLSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
GtGQLSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
GtGQLSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #GtGQLSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
GtGQLSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
GtGQLSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,192 +1,192 @@
|
||||
"
|
||||
I model the interface between a CodiMD (https://demo.codimd.org) documentation
|
||||
server and Grafoscopio.
|
||||
I enable the interaction between Grafoscopio notebooks and CodiMD documents,
|
||||
so one document can start online (as a CodiMD pad) and continue as a Grafoscopio
|
||||
notebook or viceversa.
|
||||
"
|
||||
Class {
|
||||
#name : #HedgeDoc,
|
||||
#superclass : #Markdown,
|
||||
#instVars : [
|
||||
'server',
|
||||
'pad',
|
||||
'url'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc class >> fromLink: aUrl [
|
||||
^ self new fromLink: aUrl
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc class >> newDefault [
|
||||
^ self new
|
||||
defaultServer.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> asLePage [
|
||||
| newPage snippet |
|
||||
snippet := LeTextSnippet new
|
||||
string: self bodyWithoutTitleHeader promoteMarkdownHeaders.
|
||||
newPage := LePage new
|
||||
initializeTitle: self title;
|
||||
addSnippet: snippet;
|
||||
yourself.
|
||||
newPage incomingLinks.
|
||||
newPage metadata addAll: self metadata.
|
||||
^ newPage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> asMarkdeep [
|
||||
^ Markdeep new
|
||||
metadata: self metadata;
|
||||
body: self contents;
|
||||
file: self file, 'html'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> asMarkdownTiddler [
|
||||
self url ifNil: [ ^ self ].
|
||||
^ Tiddler new
|
||||
title: self url segments first;
|
||||
text: (self contents ifNil: [ self retrieveContents]);
|
||||
type: 'text/x-markdown';
|
||||
created: Tiddler nowLocal.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> bodyWithoutTitleHeader [
|
||||
| headerIndex |
|
||||
headerIndex := self body lines
|
||||
detectIndex: [ :line | line includesSubstring: self headerAsTitle ]
|
||||
ifNone: [ ^ self body].
|
||||
^ (self body lines copyWithoutIndex: headerIndex) asStringWithCr
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> contents [
|
||||
^ super contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> contents: anObject [
|
||||
body := anObject
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> defaultServer [
|
||||
self server: 'https://docutopia.tupale.co'.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> fromLink: aString [
|
||||
self url: aString.
|
||||
self retrieveContents
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> htmlUrl [
|
||||
| link |
|
||||
link := self url copy.
|
||||
link segments insert: 's' before: 1.
|
||||
^ link
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> importContents [
|
||||
self contents: self retrieveContents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> pad [
|
||||
^ pad
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> pad: anObject [
|
||||
pad := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> retrieveContents [
|
||||
self url ifNil: [ ^ self ].
|
||||
self fromString: (self url addPathSegment: 'download') retrieveContents.
|
||||
^ self.
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> retrieveHtmlContents [
|
||||
| htmlContents |
|
||||
self url ifNil: [ ^ self ].
|
||||
htmlContents := self htmlUrl.
|
||||
^ htmlContents retrieveContents
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> saveContentsToFile: aFileLocator [
|
||||
self url ifNil: [ ^ self ].
|
||||
^ (self url addPathSegment: 'download') saveContentsToFile: aFileLocator
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> saveHtmlContentsToFile: aFileLocator [
|
||||
self url ifNil: [ ^ self ].
|
||||
^ self htmlUrl saveContentsToFile: aFileLocator
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> server [
|
||||
^ server
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> server: aUrlString [
|
||||
server := aUrlString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> url [
|
||||
^ url asUrl
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> url: anObject [
|
||||
| tempUrl html |
|
||||
tempUrl := anObject asZnUrl.
|
||||
html := XMLHTMLParser parse: tempUrl retrieveContents.
|
||||
(html xpath: '//head/meta[@name="application-name"][@content = "HedgeDoc - Ideas grow better together"]') isEmpty
|
||||
ifTrue: [ self inform: 'Not a hedgedoc url'.
|
||||
url := nil ].
|
||||
self metadata at: 'title' put: tempUrl firstPathSegment.
|
||||
server := tempUrl host.
|
||||
url := anObject
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
HedgeDoc >> visit [
|
||||
WebBrowser openOn: self server, '/', self pad.
|
||||
]
|
||||
|
||||
{ #category : #transformation }
|
||||
HedgeDoc >> youtubeEmbeddedLinksToMarkdeepFormat [
|
||||
"I replace the youtube embedded links from hedgedoc format to markdeep format."
|
||||
| linkDataCollection |
|
||||
linkDataCollection := (HedgeDocGrammar new youtubeEmbeddedLink parse: self contents)
|
||||
collect: [ :each | | parsedLink |
|
||||
parsedLink := OrderedCollection new.
|
||||
parsedLink
|
||||
add: ('' join:( each collect: [ :s | s value]));
|
||||
add: '![](https://youtu.be/',
|
||||
each second value trimmed , ')';
|
||||
add: (each first start to: each third stop);
|
||||
yourself ].
|
||||
linkDataCollection do: [ :each |
|
||||
self contents: (self contents
|
||||
copyReplaceAll: each first with: each second) ].
|
||||
^ self
|
||||
]
|
||||
"
|
||||
I model the interface between a CodiMD (https://demo.codimd.org) documentation
|
||||
server and Grafoscopio.
|
||||
I enable the interaction between Grafoscopio notebooks and CodiMD documents,
|
||||
so one document can start online (as a CodiMD pad) and continue as a Grafoscopio
|
||||
notebook or viceversa.
|
||||
"
|
||||
Class {
|
||||
#name : #HedgeDoc,
|
||||
#superclass : #Markdown,
|
||||
#instVars : [
|
||||
'server',
|
||||
'pad',
|
||||
'url'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc class >> fromLink: aUrl [
|
||||
^ self new fromLink: aUrl
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc class >> newDefault [
|
||||
^ self new
|
||||
defaultServer.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> asLePage [
|
||||
| newPage snippet |
|
||||
snippet := LeTextSnippet new
|
||||
string: self bodyWithoutTitleHeader promoteMarkdownHeaders.
|
||||
newPage := LePage new
|
||||
initializeTitle: self title;
|
||||
addSnippet: snippet;
|
||||
yourself.
|
||||
newPage incomingLinks.
|
||||
newPage metadata addAll: self metadata.
|
||||
^ newPage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> asMarkdeep [
|
||||
^ Markdeep new
|
||||
metadata: self metadata;
|
||||
body: self contents;
|
||||
file: self file, 'html'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> asMarkdownTiddler [
|
||||
self url ifNil: [ ^ self ].
|
||||
^ Tiddler new
|
||||
title: self url segments first;
|
||||
text: (self contents ifNil: [ self retrieveContents]);
|
||||
type: 'text/x-markdown';
|
||||
created: Tiddler nowLocal.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> bodyWithoutTitleHeader [
|
||||
| headerIndex |
|
||||
headerIndex := self body lines
|
||||
detectIndex: [ :line | line includesSubstring: self headerAsTitle ]
|
||||
ifNone: [ ^ self body].
|
||||
^ (self body lines copyWithoutIndex: headerIndex) asStringWithCr
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> contents [
|
||||
^ super contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> contents: anObject [
|
||||
body := anObject
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> defaultServer [
|
||||
self server: 'https://docutopia.tupale.co'.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> fromLink: aString [
|
||||
self url: aString.
|
||||
self retrieveContents
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> htmlUrl [
|
||||
| link |
|
||||
link := self url copy.
|
||||
link segments insert: 's' before: 1.
|
||||
^ link
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> importContents [
|
||||
self contents: self retrieveContents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> pad [
|
||||
^ pad
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> pad: anObject [
|
||||
pad := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> retrieveContents [
|
||||
self url ifNil: [ ^ self ].
|
||||
self fromString: (self url addPathSegment: 'download') retrieveContents.
|
||||
^ self.
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> retrieveHtmlContents [
|
||||
| htmlContents |
|
||||
self url ifNil: [ ^ self ].
|
||||
htmlContents := self htmlUrl.
|
||||
^ htmlContents retrieveContents
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> saveContentsToFile: aFileLocator [
|
||||
self url ifNil: [ ^ self ].
|
||||
^ (self url addPathSegment: 'download') saveContentsToFile: aFileLocator
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
HedgeDoc >> saveHtmlContentsToFile: aFileLocator [
|
||||
self url ifNil: [ ^ self ].
|
||||
^ self htmlUrl saveContentsToFile: aFileLocator
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> server [
|
||||
^ server
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> server: aUrlString [
|
||||
server := aUrlString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> url [
|
||||
^ url asUrl
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDoc >> url: anObject [
|
||||
| tempUrl html |
|
||||
tempUrl := anObject asZnUrl.
|
||||
html := XMLHTMLParser parse: tempUrl retrieveContents.
|
||||
(html xpath: '//head/meta[@name="application-name"][@content = "HedgeDoc - Ideas grow better together"]') isEmpty
|
||||
ifTrue: [ self inform: 'Not a hedgedoc url'.
|
||||
url := nil ].
|
||||
self metadata at: 'title' put: tempUrl firstPathSegment.
|
||||
server := tempUrl host.
|
||||
url := anObject
|
||||
]
|
||||
|
||||
{ #category : #visiting }
|
||||
HedgeDoc >> visit [
|
||||
WebBrowser openOn: self server, '/', self pad.
|
||||
]
|
||||
|
||||
{ #category : #transformation }
|
||||
HedgeDoc >> youtubeEmbeddedLinksToMarkdeepFormat [
|
||||
"I replace the youtube embedded links from hedgedoc format to markdeep format."
|
||||
| linkDataCollection |
|
||||
linkDataCollection := (HedgeDocGrammar new youtubeEmbeddedLink parse: self contents)
|
||||
collect: [ :each | | parsedLink |
|
||||
parsedLink := OrderedCollection new.
|
||||
parsedLink
|
||||
add: ('' join:( each collect: [ :s | s value]));
|
||||
add: '![](https://youtu.be/',
|
||||
each second value trimmed , ')';
|
||||
add: (each first start to: each third stop);
|
||||
yourself ].
|
||||
linkDataCollection do: [ :each |
|
||||
self contents: (self contents
|
||||
copyReplaceAll: each first with: each second) ].
|
||||
^ self
|
||||
]
|
||||
|
@ -1,36 +1,36 @@
|
||||
Class {
|
||||
#name : #HedgeDocExamples,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-Examples'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocExamples >> hedgeDocReplaceYoutubeEmbeddedLinkExample [
|
||||
<gtExample>
|
||||
| aSampleString hedgedocDoc parsedCollection hedgedocDocLinksReplaced |
|
||||
aSampleString := '---
|
||||
breaks: false
|
||||
|
||||
---
|
||||
|
||||
# Titulo
|
||||
|
||||
Un texto de ejemplo
|
||||
|
||||
# Enlaces youtube
|
||||
|
||||
{%youtube 1aw3XmTqFXA %}
|
||||
|
||||
otro video
|
||||
|
||||
{%youtube U7mpXaLN9Nc %}'.
|
||||
hedgedocDoc := HedgeDoc new
|
||||
contents: aSampleString.
|
||||
hedgedocDocLinksReplaced := HedgeDoc new contents: aSampleString; youtubeEmbeddedLinksToMarkdeepFormat.
|
||||
self assert: (hedgedocDoc contents
|
||||
includesSubstring: '{%youtube 1aw3XmTqFXA %}' ).
|
||||
self assert: (hedgedocDocLinksReplaced contents
|
||||
includesSubstring: '![](https://youtu.be/1aw3XmTqFXA)' ).
|
||||
^ { 'Original' -> hedgedocDoc .
|
||||
'Replaced' -> hedgedocDocLinksReplaced } asDictionary
|
||||
]
|
||||
Class {
|
||||
#name : #HedgeDocExamples,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-Examples'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocExamples >> hedgeDocReplaceYoutubeEmbeddedLinkExample [
|
||||
<gtExample>
|
||||
| aSampleString hedgedocDoc parsedCollection hedgedocDocLinksReplaced |
|
||||
aSampleString := '---
|
||||
breaks: false
|
||||
|
||||
---
|
||||
|
||||
# Titulo
|
||||
|
||||
Un texto de ejemplo
|
||||
|
||||
# Enlaces youtube
|
||||
|
||||
{%youtube 1aw3XmTqFXA %}
|
||||
|
||||
otro video
|
||||
|
||||
{%youtube U7mpXaLN9Nc %}'.
|
||||
hedgedocDoc := HedgeDoc new
|
||||
contents: aSampleString.
|
||||
hedgedocDocLinksReplaced := HedgeDoc new contents: aSampleString; youtubeEmbeddedLinksToMarkdeepFormat.
|
||||
self assert: (hedgedocDoc contents
|
||||
includesSubstring: '{%youtube 1aw3XmTqFXA %}' ).
|
||||
self assert: (hedgedocDocLinksReplaced contents
|
||||
includesSubstring: '![](https://youtu.be/1aw3XmTqFXA)' ).
|
||||
^ { 'Original' -> hedgedocDoc .
|
||||
'Replaced' -> hedgedocDocLinksReplaced } asDictionary
|
||||
]
|
||||
|
@ -1,42 +1,42 @@
|
||||
Class {
|
||||
#name : #HedgeDocGrammar,
|
||||
#superclass : #PP2CompositeNode,
|
||||
#instVars : [
|
||||
'youtubeEmbeddedLink'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> metadataAsYAML [
|
||||
"I parse the header of the hedgedoc document for YAML metadata."
|
||||
^ '---' asPParser token, #any asPParser starLazy token, '---' asPParser token
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> start [
|
||||
| any |
|
||||
any := #any asPParser.
|
||||
^ (self metadataAsYAML / any starLazy), youtubeEmbeddedLink
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> youtubeEmbeddedLink [
|
||||
"I parse the youtube embedded links in a hedgedoc document."
|
||||
| link linkSea |
|
||||
link := self youtubeEmbeddedLinkOpen,
|
||||
#any asPParser starLazy token,
|
||||
self youtubeEmbeddedLinkClose.
|
||||
linkSea := link islandInSea star.
|
||||
^ linkSea
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> youtubeEmbeddedLinkClose [
|
||||
^ '%}' asPParser token
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> youtubeEmbeddedLinkOpen [
|
||||
^ '{%youtube' asPParser token
|
||||
]
|
||||
Class {
|
||||
#name : #HedgeDocGrammar,
|
||||
#superclass : #PP2CompositeNode,
|
||||
#instVars : [
|
||||
'youtubeEmbeddedLink'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> metadataAsYAML [
|
||||
"I parse the header of the hedgedoc document for YAML metadata."
|
||||
^ '---' asPParser token, #any asPParser starLazy token, '---' asPParser token
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> start [
|
||||
| any |
|
||||
any := #any asPParser.
|
||||
^ (self metadataAsYAML / any starLazy), youtubeEmbeddedLink
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> youtubeEmbeddedLink [
|
||||
"I parse the youtube embedded links in a hedgedoc document."
|
||||
| link linkSea |
|
||||
link := self youtubeEmbeddedLinkOpen,
|
||||
#any asPParser starLazy token,
|
||||
self youtubeEmbeddedLinkClose.
|
||||
linkSea := link islandInSea star.
|
||||
^ linkSea
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> youtubeEmbeddedLinkClose [
|
||||
^ '%}' asPParser token
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammar >> youtubeEmbeddedLinkOpen [
|
||||
^ '{%youtube' asPParser token
|
||||
]
|
||||
|
@ -1,19 +1,19 @@
|
||||
Class {
|
||||
#name : #HedgeDocGrammarExamples,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-Examples'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammarExamples >> hedgeDocParseYoutubeEmbeddedLinkExample [
|
||||
<gtExample>
|
||||
| aSampleString parsedStringTokens parsedCollection |
|
||||
aSampleString := '{%youtube 1aw3XmTqFXA %}'.
|
||||
parsedStringTokens := HedgeDocGrammar new youtubeEmbeddedLink parse: aSampleString.
|
||||
parsedCollection := parsedStringTokens first.
|
||||
self assert: parsedCollection size equals: 3.
|
||||
self assert: parsedCollection first value equals: '{%youtube'.
|
||||
self assert: parsedCollection second class equals: PP2Token.
|
||||
self assert: parsedCollection third value equals: '%}'.
|
||||
^ parsedStringTokens
|
||||
]
|
||||
Class {
|
||||
#name : #HedgeDocGrammarExamples,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-Examples'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammarExamples >> hedgeDocParseYoutubeEmbeddedLinkExample [
|
||||
<gtExample>
|
||||
| aSampleString parsedStringTokens parsedCollection |
|
||||
aSampleString := '{%youtube 1aw3XmTqFXA %}'.
|
||||
parsedStringTokens := HedgeDocGrammar new youtubeEmbeddedLink parse: aSampleString.
|
||||
parsedCollection := parsedStringTokens first.
|
||||
self assert: parsedCollection size equals: 3.
|
||||
self assert: parsedCollection first value equals: '{%youtube'.
|
||||
self assert: parsedCollection second class equals: PP2Token.
|
||||
self assert: parsedCollection third value equals: '%}'.
|
||||
^ parsedStringTokens
|
||||
]
|
||||
|
@ -1,15 +1,15 @@
|
||||
Class {
|
||||
#name : #HedgeDocGrammarTest,
|
||||
#superclass : #PP2CompositeNodeTest,
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammarTest >> parserClass [
|
||||
^ HedgeDocGrammar
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammarTest >> testYoutubeEmbeddedLink [
|
||||
^ self parse: '{%youtube U7mpXaLN9Nc %}' rule: #youtubeEmbeddedLink
|
||||
]
|
||||
Class {
|
||||
#name : #HedgeDocGrammarTest,
|
||||
#superclass : #PP2CompositeNodeTest,
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammarTest >> parserClass [
|
||||
^ HedgeDocGrammar
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
HedgeDocGrammarTest >> testYoutubeEmbeddedLink [
|
||||
^ self parse: '{%youtube U7mpXaLN9Nc %}' rule: #youtubeEmbeddedLink
|
||||
]
|
||||
|
@ -1,26 +1,26 @@
|
||||
Extension { #name : #LeChangesSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeChangesSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeChangesSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LeChangesSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeChangesSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeChangesSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,21 +1,21 @@
|
||||
Extension { #name : #LeCodeSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeCodeSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [
|
||||
self parent isString
|
||||
ifTrue: [ surrogate := self parent]
|
||||
ifFalse: [ surrogate := self parent uidString ]
|
||||
].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
|
||||
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
|
||||
yourself
|
||||
]
|
||||
Extension { #name : #LeCodeSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeCodeSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [
|
||||
self parent isString
|
||||
ifTrue: [ surrogate := self parent]
|
||||
ifFalse: [ surrogate := self parent uidString ]
|
||||
].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
|
||||
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
|
||||
yourself
|
||||
]
|
||||
|
@ -1,313 +1,313 @@
|
||||
Extension { #name : #LeDatabase }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> addPage2FromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| newPage |
|
||||
"^ { snippets . page }"
|
||||
"Rebulding partial subtrees"
|
||||
"Adding unrooted subtrees to the page"
|
||||
"^ newPage"
|
||||
newPage := self
|
||||
rebuildPageFromMarkdeep: markdeepDocTree
|
||||
withRemote: externalDocLocation.
|
||||
newPage
|
||||
childrenDo: [ :snippet |
|
||||
(self hasBlockUID: snippet uid)
|
||||
ifTrue: [ | existingPage |
|
||||
existingPage := self pages
|
||||
detect: [ :pageTemp | pageTemp includesSnippetUid: snippet uid ].
|
||||
self importErrorForLocal: existingPage withRemote: externalDocLocation.
|
||||
^ self ]
|
||||
ifFalse: [ snippet database: self.
|
||||
self registerSnippet: snippet ] ].
|
||||
self addPage: newPage.
|
||||
^ newPage
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> addPageCopy: aLePage [
|
||||
| pageTitle timestamp shortID page |
|
||||
timestamp := DateAndTime now asString.
|
||||
pageTitle := 'Copy of ', aLePage title.
|
||||
page := aLePage duplicatePageWithNewName: pageTitle, timestamp.
|
||||
shortID := '(id: ', (page uid asString copyFrom: 1 to: 8), ')'.
|
||||
page title: (page title copyReplaceAll: timestamp with: shortID).
|
||||
^ page
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> addPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| remoteMetadata divSnippets dataSnippets snippets page |
|
||||
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
|
||||
collect: [ :xmlElement | xmlElement postCopy ].
|
||||
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
|
||||
remoteMetadata at: 'origin' put: externalDocLocation.
|
||||
dataSnippets := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
|
||||
snippets := dataSnippets collect: [ :each | each asLepiterSnippet ].
|
||||
page := LePage new.
|
||||
page
|
||||
title: (remoteMetadata at: 'title' ifAbsent: [ page detectMarkdeepTitleFrom: markdeepDocTree ]);
|
||||
basicUid: (UUID fromString36: (remoteMetadata at: 'id' ifAbsent: [UUID new asString36]));
|
||||
createTime: (LeTime new
|
||||
time: (remoteMetadata at: 'created' ifAbsent: [ DateAndTime now]) asDateAndTime);
|
||||
editTime: (LeTime new
|
||||
time: (remoteMetadata at: 'modified' ifAbsent: [ DateAndTime now]) asDateAndTime);
|
||||
latestEditTime: (LeTime new
|
||||
time: (remoteMetadata at: 'modified' ifAbsent: [ DateAndTime now]) asDateAndTime);
|
||||
createEmail: (remoteMetadata at: 'creator' ifAbsent: [ 'unknown' ]);
|
||||
editEmail: (remoteMetadata at: 'modifier' ifAbsent: [ 'unknown' ]).
|
||||
snippets do: [ :snippet | "| currentParent |"
|
||||
page addSnippet: snippet.
|
||||
"currentParent := page detectParentSnippetWithUid: (snippet metadata at: 'parent').
|
||||
snippet parent: currentParent."
|
||||
].
|
||||
page children
|
||||
do: [ :snippet |
|
||||
(self hasBlockUID: snippet uid)
|
||||
ifTrue: [ | existingPage |
|
||||
existingPage := self pages
|
||||
detect: [ :pageTemp | pageTemp includesSnippetUid: snippet uid ].
|
||||
self importErrorForLocal: existingPage withRemote: externalDocLocation.
|
||||
^ self ]
|
||||
ifFalse: [ snippet database: self.
|
||||
self registerSnippet: snippet ] ].
|
||||
self addPage: page.
|
||||
^ page
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> addPageFromMarkdeepUrl: aString [
|
||||
| page |
|
||||
page := self detectLocalPageForRemote: aString.
|
||||
page
|
||||
ifNotNil: [ :arg |
|
||||
self importErrorForLocal: page withRemote: aString.
|
||||
^ self errorCardFor: page uidString ].
|
||||
^ self addPageFromMarkdeep: (self docTreeForLink: aString) withRemote: aString
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> detectLocalPageForRemote: markdeepDocUrl [
|
||||
| markdeepHelper id remoteMetadata docTree |
|
||||
markdeepHelper := Markdeep new.
|
||||
docTree := self docTreeForLink: markdeepDocUrl.
|
||||
remoteMetadata := markdeepHelper metadataFromXML: docTree.
|
||||
id := remoteMetadata at: 'id' ifAbsent: [ nil ].
|
||||
^ self pageWithID: id ifAbsent: [ ^ nil ].
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> docTreeForLink: aString [
|
||||
^ (XMLHTMLParser on: aString asUrl retrieveContents) parseDocument
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> errorCardFor: errorKey [
|
||||
|
||||
| keepButton overwriteButton loadCopyButton errorMessageUI localPage |
|
||||
|
||||
localPage := self pageWithID: errorKey.
|
||||
keepButton := BrButton new
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
|
||||
label: 'Keep existing local page';
|
||||
icon: BrGlamorousVectorIcons cancel;
|
||||
margin: (BlInsets left: 10);
|
||||
action: [ :aButton |
|
||||
aButton phlow spawnObject: localPage.
|
||||
self errors removeKey: errorKey
|
||||
].
|
||||
overwriteButton := BrButton new
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
|
||||
label: 'Overwrite with remote page';
|
||||
icon: BrGlamorousVectorIcons edit;
|
||||
action: [ :aButton |
|
||||
self removePage: localPage.
|
||||
aButton phlow spawnObject: (self addPageFromMarkdeepUrl: (self errors at: errorKey at: 'remote')).
|
||||
self errors removeKey: errorKey
|
||||
];
|
||||
margin: (BlInsets left: 10).
|
||||
loadCopyButton := BrButton new
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
|
||||
label: 'Load remote page as a copy';
|
||||
icon: BrGlamorousVectorIcons changes;
|
||||
action: [ :aButton | self ];
|
||||
margin: (BlInsets left: 10).
|
||||
|
||||
errorMessageUI := BrEditor new
|
||||
aptitude: BrGlamorousRegularEditorAptitude new ;
|
||||
text: (self errors at: errorKey at: 'message');
|
||||
vFitContent.
|
||||
^ BrHorizontalPane new
|
||||
matchParent;
|
||||
alignCenter;
|
||||
addChild:errorMessageUI;
|
||||
addChild: keepButton;
|
||||
addChild: overwriteButton;
|
||||
addChild: loadCopyButton
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> errors [
|
||||
|
||||
^ self optionAt: 'errors' ifAbsentPut: [ Dictionary new ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> gtViewErrorDetailsOn: aView [
|
||||
<gtView>
|
||||
^ aView explicit
|
||||
title: 'Errors' translated;
|
||||
priority: 5;
|
||||
stencil: [ | container |
|
||||
container := BlElement new
|
||||
layout: BlFlowLayout new;
|
||||
constraintsDo: [ :c |
|
||||
c vertical fitContent.
|
||||
c horizontal matchParent ];
|
||||
padding: (BlInsets all: 10).
|
||||
container
|
||||
addChildren: (self errorCardFor: self errors)
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> gtViewErrorDetailsOn: aView withKey: erroKey [
|
||||
<gtView>
|
||||
^ aView explicit
|
||||
title: 'Errors beta' translated;
|
||||
priority: 5;
|
||||
stencil: [ | container |
|
||||
container := BlElement new
|
||||
layout: BlFlowLayout new;
|
||||
constraintsDo: [ :c |
|
||||
c vertical fitContent.
|
||||
c horizontal matchParent ];
|
||||
padding: (BlInsets all: 10).
|
||||
container
|
||||
addChildren: (self errorCardFor: erroKey)
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> importDocumentFrom: aURL [
|
||||
| doc |
|
||||
"Using file extension in URL as a cheap (non-robuts) way of detecting the kind of document.
|
||||
Better file type detection should be implemented in the future."
|
||||
(aURL endsWith: '.md.html') ifTrue: [ ^ self addPageFromMarkdeepUrl: aURL ].
|
||||
doc := HedgeDoc fromLink: aURL asString.
|
||||
^ self addPage: doc asLePage
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> importErrorForLocal: page withRemote: externalDocLocation [
|
||||
|
||||
| message id error |
|
||||
id := page uidString.
|
||||
message := String streamContents: [ :stream |
|
||||
stream
|
||||
nextPutAll: 'IMPORTATION ERROR: A page with
|
||||
';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' id: ' , id;
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' title: ' , page contentAsString;
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf;
|
||||
nextPutAll: 'already exists in this database and includes overlapping contents';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: 'with the page you are trying to import from:
|
||||
';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: externalDocLocation;
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf;
|
||||
nextPutAll:
|
||||
'Please choose one of the following options to addres the issue:
|
||||
' ].
|
||||
error := Dictionary new
|
||||
at: 'remote' put: externalDocLocation;
|
||||
at: 'message' put: message ;
|
||||
yourself.
|
||||
self errors at: id put: error.
|
||||
^ self errors at: id.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> options [
|
||||
|
||||
^ options
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> previewSanitizedPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| remoteMetadata divSnippets divSnippetsSanitized |
|
||||
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
|
||||
collect: [ :xmlElement | xmlElement postCopy ].
|
||||
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
|
||||
remoteMetadata at: 'origin' put: externalDocLocation.
|
||||
divSnippetsSanitized := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
|
||||
^ { divSnippets . divSnippetsSanitized . remoteMetadata }
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> rebuildPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| newPage snippets divSnippets remoteMetadata dataSnippets |
|
||||
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
|
||||
collect: [ :xmlElement | xmlElement postCopy ].
|
||||
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
|
||||
remoteMetadata at: 'origin' put: externalDocLocation.
|
||||
dataSnippets := self
|
||||
sanitizeMarkdeepSnippets: divSnippets
|
||||
withMetadata: remoteMetadata.
|
||||
snippets := dataSnippets collect: [ :each | each asLepiterSnippet ].
|
||||
newPage := LePage new
|
||||
title: (remoteMetadata at: 'title');
|
||||
basicUid: (UUID fromString36: (remoteMetadata at: 'id'));
|
||||
createTime: (LeTime new time: (remoteMetadata at: 'created') asDateAndTime);
|
||||
editTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
|
||||
latestEditTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
|
||||
createEmail: (remoteMetadata at: 'creator');
|
||||
editEmail: (remoteMetadata at: 'modifier'). "^ { snippets . page }" "Rebulding partial subtrees"
|
||||
snippets
|
||||
do: [ :currentSnippet |
|
||||
| parentSnippet |
|
||||
parentSnippet := snippets
|
||||
detect: [ :item | item uid asString = currentSnippet parent ]
|
||||
ifNone: [ parentSnippet := 'unrooted' ].
|
||||
currentSnippet parent: parentSnippet.
|
||||
parentSnippet class = ByteString
|
||||
ifFalse: [ parentSnippet children addChild: currentSnippet ] ]. "Adding unrooted subtrees to the page"
|
||||
"^ { unrooted . newPage }."
|
||||
snippets
|
||||
select: [ :each | each parent = 'unrooted' ]
|
||||
thenDo: [ :unrooted | newPage addSnippet: unrooted ].
|
||||
^ newPage
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata [
|
||||
^ divSnippets collectWithIndex: [:markdeepDiv :i | | snippetData creationTime modificationTime timestampWarning |
|
||||
snippetData := markdeepDiv asSnippetDictionary.
|
||||
creationTime := snippetData at: 'created'.
|
||||
modificationTime := snippetData at: 'modified'.
|
||||
timestampWarning := [:timestamp |
|
||||
'Modified timestamps: ', timestamp ,' date and time was replaced instead of nil value. See "origin" metadata for more historical traceability information.'
|
||||
].
|
||||
(creationTime = 'nil' and: [ modificationTime ~= 'nil' ])
|
||||
ifTrue: [
|
||||
snippetData redefineTimestampsBefore: modificationTime.
|
||||
snippetData addErrata: (timestampWarning value: 'creation').
|
||||
snippetData at: 'origin' put: (remoteMetadata at: 'origin').
|
||||
].
|
||||
(creationTime = 'nil' and: [ modificationTime = 'nil' ])
|
||||
ifTrue: [ | timeDiff |
|
||||
timeDiff := divSnippets size - i. "Suggesting that last snippets were modified after the first ones."
|
||||
modificationTime := (remoteMetadata at: 'created') asDateAndTime - timeDiff seconds.
|
||||
snippetData redefineTimestampsBefore: modificationTime.
|
||||
snippetData addErrata: (timestampWarning value: 'creation').
|
||||
snippetData addErrata: (timestampWarning value: 'modification').
|
||||
snippetData at: 'origin' put: (remoteMetadata at: 'origin').
|
||||
].
|
||||
snippetData.
|
||||
]
|
||||
]
|
||||
Extension { #name : #LeDatabase }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> addPage2FromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| newPage |
|
||||
"^ { snippets . page }"
|
||||
"Rebulding partial subtrees"
|
||||
"Adding unrooted subtrees to the page"
|
||||
"^ newPage"
|
||||
newPage := self
|
||||
rebuildPageFromMarkdeep: markdeepDocTree
|
||||
withRemote: externalDocLocation.
|
||||
newPage
|
||||
childrenDo: [ :snippet |
|
||||
(self hasBlockUID: snippet uid)
|
||||
ifTrue: [ | existingPage |
|
||||
existingPage := self pages
|
||||
detect: [ :pageTemp | pageTemp includesSnippetUid: snippet uid ].
|
||||
self importErrorForLocal: existingPage withRemote: externalDocLocation.
|
||||
^ self ]
|
||||
ifFalse: [ snippet database: self.
|
||||
self registerSnippet: snippet ] ].
|
||||
self addPage: newPage.
|
||||
^ newPage
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> addPageCopy: aLePage [
|
||||
| pageTitle timestamp shortID page |
|
||||
timestamp := DateAndTime now asString.
|
||||
pageTitle := 'Copy of ', aLePage title.
|
||||
page := aLePage duplicatePageWithNewName: pageTitle, timestamp.
|
||||
shortID := '(id: ', (page uid asString copyFrom: 1 to: 8), ')'.
|
||||
page title: (page title copyReplaceAll: timestamp with: shortID).
|
||||
^ page
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> addPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| remoteMetadata divSnippets dataSnippets snippets page |
|
||||
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
|
||||
collect: [ :xmlElement | xmlElement postCopy ].
|
||||
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
|
||||
remoteMetadata at: 'origin' put: externalDocLocation.
|
||||
dataSnippets := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
|
||||
snippets := dataSnippets collect: [ :each | each asLepiterSnippet ].
|
||||
page := LePage new.
|
||||
page
|
||||
title: (remoteMetadata at: 'title' ifAbsent: [ page detectMarkdeepTitleFrom: markdeepDocTree ]);
|
||||
basicUid: (UUID fromString36: (remoteMetadata at: 'id' ifAbsent: [UUID new asString36]));
|
||||
createTime: (LeTime new
|
||||
time: (remoteMetadata at: 'created' ifAbsent: [ DateAndTime now]) asDateAndTime);
|
||||
editTime: (LeTime new
|
||||
time: (remoteMetadata at: 'modified' ifAbsent: [ DateAndTime now]) asDateAndTime);
|
||||
latestEditTime: (LeTime new
|
||||
time: (remoteMetadata at: 'modified' ifAbsent: [ DateAndTime now]) asDateAndTime);
|
||||
createEmail: (remoteMetadata at: 'creator' ifAbsent: [ 'unknown' ]);
|
||||
editEmail: (remoteMetadata at: 'modifier' ifAbsent: [ 'unknown' ]).
|
||||
snippets do: [ :snippet | "| currentParent |"
|
||||
page addSnippet: snippet.
|
||||
"currentParent := page detectParentSnippetWithUid: (snippet metadata at: 'parent').
|
||||
snippet parent: currentParent."
|
||||
].
|
||||
page children
|
||||
do: [ :snippet |
|
||||
(self hasBlockUID: snippet uid)
|
||||
ifTrue: [ | existingPage |
|
||||
existingPage := self pages
|
||||
detect: [ :pageTemp | pageTemp includesSnippetUid: snippet uid ].
|
||||
self importErrorForLocal: existingPage withRemote: externalDocLocation.
|
||||
^ self ]
|
||||
ifFalse: [ snippet database: self.
|
||||
self registerSnippet: snippet ] ].
|
||||
self addPage: page.
|
||||
^ page
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> addPageFromMarkdeepUrl: aString [
|
||||
| page |
|
||||
page := self detectLocalPageForRemote: aString.
|
||||
page
|
||||
ifNotNil: [ :arg |
|
||||
self importErrorForLocal: page withRemote: aString.
|
||||
^ self errorCardFor: page uidString ].
|
||||
^ self addPageFromMarkdeep: (self docTreeForLink: aString) withRemote: aString
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> detectLocalPageForRemote: markdeepDocUrl [
|
||||
| markdeepHelper id remoteMetadata docTree |
|
||||
markdeepHelper := Markdeep new.
|
||||
docTree := self docTreeForLink: markdeepDocUrl.
|
||||
remoteMetadata := markdeepHelper metadataFromXML: docTree.
|
||||
id := remoteMetadata at: 'id' ifAbsent: [ nil ].
|
||||
^ self pageWithID: id ifAbsent: [ ^ nil ].
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> docTreeForLink: aString [
|
||||
^ (XMLHTMLParser on: aString asUrl retrieveContents) parseDocument
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> errorCardFor: errorKey [
|
||||
|
||||
| keepButton overwriteButton loadCopyButton errorMessageUI localPage |
|
||||
|
||||
localPage := self pageWithID: errorKey.
|
||||
keepButton := BrButton new
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
|
||||
label: 'Keep existing local page';
|
||||
icon: BrGlamorousVectorIcons cancel;
|
||||
margin: (BlInsets left: 10);
|
||||
action: [ :aButton |
|
||||
aButton phlow spawnObject: localPage.
|
||||
self errors removeKey: errorKey
|
||||
].
|
||||
overwriteButton := BrButton new
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
|
||||
label: 'Overwrite with remote page';
|
||||
icon: BrGlamorousVectorIcons edit;
|
||||
action: [ :aButton |
|
||||
self removePage: localPage.
|
||||
aButton phlow spawnObject: (self addPageFromMarkdeepUrl: (self errors at: errorKey at: 'remote')).
|
||||
self errors removeKey: errorKey
|
||||
];
|
||||
margin: (BlInsets left: 10).
|
||||
loadCopyButton := BrButton new
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
|
||||
label: 'Load remote page as a copy';
|
||||
icon: BrGlamorousVectorIcons changes;
|
||||
action: [ :aButton | self ];
|
||||
margin: (BlInsets left: 10).
|
||||
|
||||
errorMessageUI := BrEditor new
|
||||
aptitude: BrGlamorousRegularEditorAptitude new ;
|
||||
text: (self errors at: errorKey at: 'message');
|
||||
vFitContent.
|
||||
^ BrHorizontalPane new
|
||||
matchParent;
|
||||
alignCenter;
|
||||
addChild:errorMessageUI;
|
||||
addChild: keepButton;
|
||||
addChild: overwriteButton;
|
||||
addChild: loadCopyButton
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> errors [
|
||||
|
||||
^ self optionAt: 'errors' ifAbsentPut: [ Dictionary new ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> gtViewErrorDetailsOn: aView [
|
||||
<gtView>
|
||||
^ aView explicit
|
||||
title: 'Errors' translated;
|
||||
priority: 5;
|
||||
stencil: [ | container |
|
||||
container := BlElement new
|
||||
layout: BlFlowLayout new;
|
||||
constraintsDo: [ :c |
|
||||
c vertical fitContent.
|
||||
c horizontal matchParent ];
|
||||
padding: (BlInsets all: 10).
|
||||
container
|
||||
addChildren: (self errorCardFor: self errors)
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> gtViewErrorDetailsOn: aView withKey: erroKey [
|
||||
<gtView>
|
||||
^ aView explicit
|
||||
title: 'Errors beta' translated;
|
||||
priority: 5;
|
||||
stencil: [ | container |
|
||||
container := BlElement new
|
||||
layout: BlFlowLayout new;
|
||||
constraintsDo: [ :c |
|
||||
c vertical fitContent.
|
||||
c horizontal matchParent ];
|
||||
padding: (BlInsets all: 10).
|
||||
container
|
||||
addChildren: (self errorCardFor: erroKey)
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> importDocumentFrom: aURL [
|
||||
| doc |
|
||||
"Using file extension in URL as a cheap (non-robuts) way of detecting the kind of document.
|
||||
Better file type detection should be implemented in the future."
|
||||
(aURL endsWith: '.md.html') ifTrue: [ ^ self addPageFromMarkdeepUrl: aURL ].
|
||||
doc := HedgeDoc fromLink: aURL asString.
|
||||
^ self addPage: doc asLePage
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> importErrorForLocal: page withRemote: externalDocLocation [
|
||||
|
||||
| message id error |
|
||||
id := page uidString.
|
||||
message := String streamContents: [ :stream |
|
||||
stream
|
||||
nextPutAll: 'IMPORTATION ERROR: A page with
|
||||
';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' id: ' , id;
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' title: ' , page contentAsString;
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf;
|
||||
nextPutAll: 'already exists in this database and includes overlapping contents';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: 'with the page you are trying to import from:
|
||||
';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: externalDocLocation;
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf;
|
||||
nextPutAll:
|
||||
'Please choose one of the following options to addres the issue:
|
||||
' ].
|
||||
error := Dictionary new
|
||||
at: 'remote' put: externalDocLocation;
|
||||
at: 'message' put: message ;
|
||||
yourself.
|
||||
self errors at: id put: error.
|
||||
^ self errors at: id.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> options [
|
||||
|
||||
^ options
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> previewSanitizedPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| remoteMetadata divSnippets divSnippetsSanitized |
|
||||
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
|
||||
collect: [ :xmlElement | xmlElement postCopy ].
|
||||
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
|
||||
remoteMetadata at: 'origin' put: externalDocLocation.
|
||||
divSnippetsSanitized := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
|
||||
^ { divSnippets . divSnippetsSanitized . remoteMetadata }
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> rebuildPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
|
||||
| newPage snippets divSnippets remoteMetadata dataSnippets |
|
||||
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
|
||||
collect: [ :xmlElement | xmlElement postCopy ].
|
||||
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
|
||||
remoteMetadata at: 'origin' put: externalDocLocation.
|
||||
dataSnippets := self
|
||||
sanitizeMarkdeepSnippets: divSnippets
|
||||
withMetadata: remoteMetadata.
|
||||
snippets := dataSnippets collect: [ :each | each asLepiterSnippet ].
|
||||
newPage := LePage new
|
||||
title: (remoteMetadata at: 'title');
|
||||
basicUid: (UUID fromString36: (remoteMetadata at: 'id'));
|
||||
createTime: (LeTime new time: (remoteMetadata at: 'created') asDateAndTime);
|
||||
editTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
|
||||
latestEditTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
|
||||
createEmail: (remoteMetadata at: 'creator');
|
||||
editEmail: (remoteMetadata at: 'modifier'). "^ { snippets . page }" "Rebulding partial subtrees"
|
||||
snippets
|
||||
do: [ :currentSnippet |
|
||||
| parentSnippet |
|
||||
parentSnippet := snippets
|
||||
detect: [ :item | item uid asString = currentSnippet parent ]
|
||||
ifNone: [ parentSnippet := 'unrooted' ].
|
||||
currentSnippet parent: parentSnippet.
|
||||
parentSnippet class = ByteString
|
||||
ifFalse: [ parentSnippet children addChild: currentSnippet ] ]. "Adding unrooted subtrees to the page"
|
||||
"^ { unrooted . newPage }."
|
||||
snippets
|
||||
select: [ :each | each parent = 'unrooted' ]
|
||||
thenDo: [ :unrooted | newPage addSnippet: unrooted ].
|
||||
^ newPage
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDatabase >> sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata [
|
||||
^ divSnippets collectWithIndex: [:markdeepDiv :i | | snippetData creationTime modificationTime timestampWarning |
|
||||
snippetData := markdeepDiv asSnippetDictionary.
|
||||
creationTime := snippetData at: 'created'.
|
||||
modificationTime := snippetData at: 'modified'.
|
||||
timestampWarning := [:timestamp |
|
||||
'Modified timestamps: ', timestamp ,' date and time was replaced instead of nil value. See "origin" metadata for more historical traceability information.'
|
||||
].
|
||||
(creationTime = 'nil' and: [ modificationTime ~= 'nil' ])
|
||||
ifTrue: [
|
||||
snippetData redefineTimestampsBefore: modificationTime.
|
||||
snippetData addErrata: (timestampWarning value: 'creation').
|
||||
snippetData at: 'origin' put: (remoteMetadata at: 'origin').
|
||||
].
|
||||
(creationTime = 'nil' and: [ modificationTime = 'nil' ])
|
||||
ifTrue: [ | timeDiff |
|
||||
timeDiff := divSnippets size - i. "Suggesting that last snippets were modified after the first ones."
|
||||
modificationTime := (remoteMetadata at: 'created') asDateAndTime - timeDiff seconds.
|
||||
snippetData redefineTimestampsBefore: modificationTime.
|
||||
snippetData addErrata: (timestampWarning value: 'creation').
|
||||
snippetData addErrata: (timestampWarning value: 'modification').
|
||||
snippetData at: 'origin' put: (remoteMetadata at: 'origin').
|
||||
].
|
||||
snippetData.
|
||||
]
|
||||
]
|
||||
|
@ -1,26 +1,26 @@
|
||||
Extension { #name : #LeDockerSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDockerSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDockerSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LeDockerSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDockerSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeDockerSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,32 +1,32 @@
|
||||
Extension { #name : #LeExampleSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeExampleSnippet >> asMarkdeep [
|
||||
|
||||
^ (WriteStream on: '') contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeExampleSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeExampleSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LeExampleSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeExampleSnippet >> asMarkdeep [
|
||||
|
||||
^ (WriteStream on: '') contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeExampleSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeExampleSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,26 +1,26 @@
|
||||
Extension { #name : #LeGitHubSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeGitHubSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeGitHubSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LeGitHubSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeGitHubSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeGitHubSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,8 +1,8 @@
|
||||
Extension { #name : #LeHeaderNode }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHeaderNode >> headerFullName [
|
||||
^ self topParent completeSource
|
||||
copyFrom: self startPosition
|
||||
to: self stopPosition
|
||||
]
|
||||
Extension { #name : #LeHeaderNode }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHeaderNode >> headerFullName [
|
||||
^ self topParent completeSource
|
||||
copyFrom: self startPosition
|
||||
to: self stopPosition
|
||||
]
|
||||
|
@ -1,56 +1,56 @@
|
||||
Extension { #name : #LeHomeDatabaseHeaderElement }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> importMinidocsButtonElement [
|
||||
^ self userData at: 'importMinidocsButtonElement' ifAbsentPut: [ self newImportMiniDocsButton]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> initialize [
|
||||
super initialize.
|
||||
self initializeEditableTitleElement.
|
||||
self initializeButtons.
|
||||
|
||||
self addChild: self toolbarElement as: #toolbar.
|
||||
self toolbarElement
|
||||
addItem: self editableTitleElement;
|
||||
addItem: self newAddNewPageButton;
|
||||
addItem: self removeButtonElement;
|
||||
addItem: self importButtonElement;
|
||||
addItem: self exportButtonElement;
|
||||
addItem: self importMinidocsButtonElement.
|
||||
|
||||
self addAptitude: (BrLayoutResizerAptitude new
|
||||
hInherit;
|
||||
vAnyToFitContent;
|
||||
hInherit: self toolbarElement;
|
||||
vAnyToFitContent: self toolbarElement).
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> initializeButtons [
|
||||
self initializeRemoveButton.
|
||||
self initializeImportButton.
|
||||
self initializeExportButton.
|
||||
self initializeMiniDocsImportButton.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> initializeMiniDocsImportButton [
|
||||
self userData at: 'importMinidocsButtonElement' put: self newImportMiniDocsButton.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> newImportMiniDocsButton [
|
||||
^ LeMiniDocsImport new
|
||||
tooltip: 'Import document from link';
|
||||
contentExtent: 200 @ 30
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> updateToolbarButtons [
|
||||
self updateRemoveButtonElement.
|
||||
self exportButtonElement database: self database.
|
||||
self importButtonElement database: self database.
|
||||
self importMinidocsButtonElement database: self database.
|
||||
]
|
||||
Extension { #name : #LeHomeDatabaseHeaderElement }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> importMinidocsButtonElement [
|
||||
^ self userData at: 'importMinidocsButtonElement' ifAbsentPut: [ self newImportMiniDocsButton]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> initialize [
|
||||
super initialize.
|
||||
self initializeEditableTitleElement.
|
||||
self initializeButtons.
|
||||
|
||||
self addChild: self toolbarElement as: #toolbar.
|
||||
self toolbarElement
|
||||
addItem: self editableTitleElement;
|
||||
addItem: self newAddNewPageButton;
|
||||
addItem: self removeButtonElement;
|
||||
addItem: self importButtonElement;
|
||||
addItem: self exportButtonElement;
|
||||
addItem: self importMinidocsButtonElement.
|
||||
|
||||
self addAptitude: (BrLayoutResizerAptitude new
|
||||
hInherit;
|
||||
vAnyToFitContent;
|
||||
hInherit: self toolbarElement;
|
||||
vAnyToFitContent: self toolbarElement).
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> initializeButtons [
|
||||
self initializeRemoveButton.
|
||||
self initializeImportButton.
|
||||
self initializeExportButton.
|
||||
self initializeMiniDocsImportButton.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> initializeMiniDocsImportButton [
|
||||
self userData at: 'importMinidocsButtonElement' put: self newImportMiniDocsButton.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> newImportMiniDocsButton [
|
||||
^ LeMiniDocsImport new
|
||||
tooltip: 'Import document from link';
|
||||
contentExtent: 200 @ 30
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeHomeDatabaseHeaderElement >> updateToolbarButtons [
|
||||
self updateRemoveButtonElement.
|
||||
self exportButtonElement database: self database.
|
||||
self importButtonElement database: self database.
|
||||
self importMinidocsButtonElement database: self database.
|
||||
]
|
||||
|
@ -1,26 +1,26 @@
|
||||
Extension { #name : #LeJenkinsSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeJenkinsSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeJenkinsSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LeJenkinsSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeJenkinsSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeJenkinsSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,89 +1,89 @@
|
||||
Class {
|
||||
#name : #LeMiniDocsImport,
|
||||
#superclass : #BrButton,
|
||||
#instVars : [
|
||||
'contentExtent',
|
||||
'database'
|
||||
],
|
||||
#category : #'MiniDocs-UI'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> contentExtent [
|
||||
^ contentExtent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> contentExtent: aPoint [
|
||||
self
|
||||
assert: [ aPoint isNotNil ]
|
||||
description: [ 'Extent must be non-nil' ].
|
||||
contentExtent := aPoint
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> createDropdownExpandedHandleButton [
|
||||
^ BrButton new
|
||||
icon: BrGlamorousVectorIcons downwards;
|
||||
label: self tooltip;
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> createURLeditable [
|
||||
| base editable |
|
||||
base := BlElement new
|
||||
background: (Color white);
|
||||
size: 200 @ 30;
|
||||
margin: (BlInsets all: 10);
|
||||
yourself.
|
||||
editable := BrEditableLabel new
|
||||
aptitude: BrGlamorousEditableLabelAptitude new glamorousRegularFontAndSize;
|
||||
text: 'Document link';
|
||||
switchToEditor.
|
||||
editable when: BrEditorAcceptWish do: [ :aWish |
|
||||
self importDocumentFrom: aWish text asString.
|
||||
].
|
||||
base addChild: editable.
|
||||
^ base
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> database [
|
||||
^ database
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> database: aLeDatabase [
|
||||
database := aLeDatabase
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> importDocumentFrom: aURL [
|
||||
^ self database importDocumentFrom: aURL.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> initialize [
|
||||
super initialize.
|
||||
|
||||
self
|
||||
icon: BrGlamorousVectorIcons downwards;
|
||||
label: 'Add MiniDocs';
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude.
|
||||
self addAptitude: (BrGlamorousWithDropdownAptitude
|
||||
handle: [ self createDropdownExpandedHandleButton ]
|
||||
content: [ self createURLeditable ]).
|
||||
|
||||
self aptitude - BrGlamorousButtonExteriorAptitude.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> tooltip [
|
||||
^ self label
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> tooltip: aString [
|
||||
self label: aString
|
||||
]
|
||||
Class {
|
||||
#name : #LeMiniDocsImport,
|
||||
#superclass : #BrButton,
|
||||
#instVars : [
|
||||
'contentExtent',
|
||||
'database'
|
||||
],
|
||||
#category : #'MiniDocs-UI'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> contentExtent [
|
||||
^ contentExtent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> contentExtent: aPoint [
|
||||
self
|
||||
assert: [ aPoint isNotNil ]
|
||||
description: [ 'Extent must be non-nil' ].
|
||||
contentExtent := aPoint
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> createDropdownExpandedHandleButton [
|
||||
^ BrButton new
|
||||
icon: BrGlamorousVectorIcons downwards;
|
||||
label: self tooltip;
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> createURLeditable [
|
||||
| base editable |
|
||||
base := BlElement new
|
||||
background: (Color white);
|
||||
size: 200 @ 30;
|
||||
margin: (BlInsets all: 10);
|
||||
yourself.
|
||||
editable := BrEditableLabel new
|
||||
aptitude: BrGlamorousEditableLabelAptitude new glamorousRegularFontAndSize;
|
||||
text: 'Document link';
|
||||
switchToEditor.
|
||||
editable when: BrEditorAcceptWish do: [ :aWish |
|
||||
self importDocumentFrom: aWish text asString.
|
||||
].
|
||||
base addChild: editable.
|
||||
^ base
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> database [
|
||||
^ database
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> database: aLeDatabase [
|
||||
database := aLeDatabase
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> importDocumentFrom: aURL [
|
||||
^ self database importDocumentFrom: aURL.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> initialize [
|
||||
super initialize.
|
||||
|
||||
self
|
||||
icon: BrGlamorousVectorIcons downwards;
|
||||
label: 'Add MiniDocs';
|
||||
aptitude: BrGlamorousButtonWithIconAndLabelAptitude.
|
||||
self addAptitude: (BrGlamorousWithDropdownAptitude
|
||||
handle: [ self createDropdownExpandedHandleButton ]
|
||||
content: [ self createURLeditable ]).
|
||||
|
||||
self aptitude - BrGlamorousButtonExteriorAptitude.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> tooltip [
|
||||
^ self label
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
LeMiniDocsImport >> tooltip: aString [
|
||||
self label: aString
|
||||
]
|
||||
|
@ -1,26 +1,26 @@
|
||||
Extension { #name : #LeMockedSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeMockedSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeMockedSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LeMockedSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeMockedSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeMockedSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,6 +1,6 @@
|
||||
Extension { #name : #LeNullDatabase }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeNullDatabase >> attachmentsDirectory [
|
||||
^ (FileLocator temp / 'lepiter' / 'attachments') ensureCreateDirectory.
|
||||
]
|
||||
Extension { #name : #LeNullDatabase }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeNullDatabase >> attachmentsDirectory [
|
||||
^ (FileLocator temp / 'lepiter' / 'attachments') ensureCreateDirectory.
|
||||
]
|
||||
|
@ -1,333 +1,333 @@
|
||||
Extension { #name : #LePage }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asHtmlFile [
|
||||
|
||||
self asMarkdownFile.
|
||||
self defaultPandocTemplate exists
|
||||
ifFalse: [ MarkupFile installTemplate: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/plantillas/Pandoc/clean-menu-mod.html' into: self defaultPandocTemplate parent ].
|
||||
|
||||
OSSUnixSubprocess new
|
||||
command: 'pandoc' ;
|
||||
arguments: {
|
||||
self markdownFileName. '-o'. self htmlFileName .
|
||||
'--toc' .
|
||||
'--template=', self defaultPandocTemplate basenameWithoutExtension };
|
||||
workingDirectory: self storage fullName;
|
||||
runAndWaitOnExitDo: [ :process :outString | ^ self storage / self htmlFileName].
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdeep [
|
||||
| bodyStream markdeep |
|
||||
bodyStream := '' writeStream.
|
||||
self preorderTraversal
|
||||
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdeep ].
|
||||
markdeep := Markdeep new
|
||||
title: self title;
|
||||
body: bodyStream contents;
|
||||
metadata: self metadata;
|
||||
file: self storage / self markdeepFileName;
|
||||
navTop: self navTop.
|
||||
self metadata
|
||||
at: 'authors'
|
||||
ifPresent: [ :author | markdeep metadata at: 'authors' put: author ].
|
||||
self metadata
|
||||
at: 'version'
|
||||
ifPresent: [ :version | markdeep metadata at: 'version' put: version ].
|
||||
markdeep head: nil.
|
||||
^ markdeep
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdeepFile [
|
||||
|
||||
^ self asMarkdeep notifyExportAsFileOn: self storage / self markdeepFileName
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdown [
|
||||
| bodyStream markdown |
|
||||
bodyStream := '' writeStream.
|
||||
bodyStream
|
||||
nextPutAll: '# ', self title; cr; cr.
|
||||
self preorderTraversal
|
||||
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdown ].
|
||||
markdown := Markdown new
|
||||
contents: bodyStream contents demoteMarkdownHeaders;
|
||||
metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new).
|
||||
^ markdown
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdownFile [
|
||||
| folder |
|
||||
folder := self storage.
|
||||
^ MarkupFile exportAsFileOn: folder / self markdownFileName containing: self asMarkdownWithMetadataWrappers contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdownWithMetadataWrappers [
|
||||
| bodyStream markdown |
|
||||
bodyStream := '' writeStream.
|
||||
bodyStream
|
||||
nextPutAll: '# ', self title; cr; cr.
|
||||
self preorderTraversal
|
||||
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdownWithMetadataWrappers ].
|
||||
markdown := Markdown new
|
||||
contents: bodyStream contents demoteMarkdownHeaders;
|
||||
metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new).
|
||||
^ markdown
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> config [
|
||||
| configFile |
|
||||
configFile := self storage / 'config.ston'.
|
||||
configFile exists
|
||||
ifTrue: [^ STON fromString: configFile contents ]
|
||||
ifFalse: [ ^ nil ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> defaultPandocTemplate [
|
||||
|
||||
^ FileLocator home / '.pandoc' / 'templates' / 'clean-menu-mod.html'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> detectMarkdeepTitleFrom: xmlSubtree [
|
||||
| titleLine |
|
||||
titleLine := (xmlSubtree nodesCollect: [:node | node contentString ]) first lines
|
||||
detect: [:line | line includesSubstring: ' **'] ifNone: ['Untitled'].
|
||||
^ titleLine trimmed trimBoth: [:char | char = $* ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> detectParentSnippetWithUid: uidString [
|
||||
uidString = self uid asString36 ifTrue: [ ^ self ].
|
||||
^ self preorderTraversal detect: [ :snippet | snippet uidString = uidString ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> exportMetadataToHead: markdeep [
|
||||
self metadata
|
||||
keysAndValuesDo: [ :k :v |
|
||||
k = 'lang'
|
||||
ifTrue: [ markdeep head
|
||||
add: '<meta lang="' , v , '">';
|
||||
yourself ]
|
||||
ifFalse: [ markdeep head
|
||||
add: '<meta name="' , k , '" content="' , v , '">';
|
||||
yourself ] ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> exportedFileName [
|
||||
| sanitized |
|
||||
sanitized := self title asDashedLowercase romanizeAccents copyWithoutAll: #($/ $: $🢒).
|
||||
^ sanitized , '--' , (self uidString copyFrom: 1 to: 5)
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> fromMarkdeepUrl: aString [
|
||||
| docTree pageMetadata |
|
||||
docTree := GrafoscopioUtils xmlFromUrl: aString.
|
||||
pageMetadata := Markdeep new metadataFromXML: docTree.
|
||||
self
|
||||
basicUid: (pageMetadata at: 'id');
|
||||
title: (pageMetadata at: 'title');
|
||||
createTime: (pageMetadata at: 'created') asDateAndTime;
|
||||
editTime: (pageMetadata at: 'modified') asDateAndTime;
|
||||
createEmail: (pageMetadata at: 'creator');
|
||||
editEmail: (pageMetadata at: 'modifier');
|
||||
optionAt: 'metadata' put: pageMetadata.
|
||||
self populateChildrenFrom: (docTree xpath: '//div')
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> htmlFileName [
|
||||
^ self exportedFileName, '.html'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> latestEditTime: aLeTime [
|
||||
"Used for adding a LePage to database from a shared markdeep LePage version."
|
||||
|
||||
latestEditTime := aLeTime
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> localHostAddress [
|
||||
| localUrl route |
|
||||
MiniDocsServer teapot server isRunning ifFalse: [ MiniDocsServer restart ].
|
||||
route := MiniDocsServer teapot staticRouter prefix joinUsing: '/'.
|
||||
localUrl := MiniDocsServer teapot server localUrl asString.
|
||||
^ localUrl, route, '/', self markdeepFileName
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> markdeepFileName [
|
||||
|
||||
^ self markdownFileName , '.html'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> markdownFileName [
|
||||
^ self exportedFileName, '.md'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> metadata [
|
||||
|
||||
^ self metadataUpdate
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> metadataUpdate [
|
||||
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'title' put: self contentAsString;
|
||||
at: 'created' put: self createTime greaseString;
|
||||
at: 'modified' put: self getLatestEditTime greaseString;
|
||||
at: 'creator' put: self createEmail greaseString;
|
||||
at: 'modifier' put: self editEmail greaseString;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> navTop [
|
||||
| topNavFile |
|
||||
topNavFile := self storage / '_navtop.html'.
|
||||
topNavFile exists
|
||||
ifFalse: [ ^ '' ]
|
||||
ifTrue: [ ^ topNavFile contents ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> olderChild [
|
||||
"I provide the last edited child node.
|
||||
I'm useful to recalculate the age of a notebook."
|
||||
| response|
|
||||
response := self preorderTraversal first.
|
||||
self preorderTraversal do: [:current |
|
||||
current editTime >= response editTime
|
||||
ifTrue: [ response := current ]
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> options [
|
||||
^ options
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> preorderTraversal [
|
||||
^ self allChildrenDepthFirst
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> removeSnippetsMetadata [
|
||||
self preorderTraversal do: [ :snippet |
|
||||
(snippet options isNotNil and: [ snippet options includesKey: 'metadata' ])
|
||||
ifTrue: [ snippet options removeKey: 'metadata' ] ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> sanitizeMetadata [
|
||||
self allChildrenDepthFirst do: [:snippet | snippet sanitizeMetadata ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> sharedVariablesBindings [
|
||||
| codeSnippets shared |
|
||||
codeSnippets := self preorderTraversal select: [:snippet |
|
||||
snippet class = LePharoSnippet and: [ snippet code includesSubstring: ':=']
|
||||
].
|
||||
|
||||
codeSnippets first in: [:snippet | | context |
|
||||
context := snippet coder evaluationContext.
|
||||
snippet coder doItInContext: context.
|
||||
shared := context bindingStrategy bindings detect: [:each |
|
||||
each isKindOf: GtSharedVariablesBindings
|
||||
]
|
||||
].
|
||||
|
||||
codeSnippets asArray allButFirstDo: [:snippet| | context|
|
||||
context := snippet coder evaluationContext.
|
||||
context addBindings: shared.
|
||||
snippet coder doItInContext: context
|
||||
].
|
||||
|
||||
^ shared asDictionary
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> storage [
|
||||
| current |
|
||||
current := self database attachmentsDirectory parent.
|
||||
self optionAt: 'storage' ifAbsent: [ ^ current ].
|
||||
(self optionAt: 'storage') ifNil: [ ^ current ].
|
||||
^ self optionAt: 'storage'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiAddCopyButtonFor: anAction [
|
||||
<lePageAction>
|
||||
^ anAction button
|
||||
tooltip: 'Export Page';
|
||||
icon: BrGlamorousVectorIcons changes;
|
||||
action: [:aButton | aButton phlow spawnObject: (self page database addPageCopy: self page) ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiDefineFolderFor: anAction [
|
||||
<lePageAction>
|
||||
| folderButton |
|
||||
folderButton := anAction dropdown
|
||||
icon: BrGlamorousIcons savetodisk;
|
||||
tooltip: 'Export folder'"";
|
||||
content: [:aButton | BlElement new
|
||||
background: (Color gray alpha: 0.2);
|
||||
size: 100 @ 100;
|
||||
margin: (BlInsets all: 10) ].
|
||||
^ folderButton
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiExportButtonFor: anAction [
|
||||
<lePageAction>
|
||||
^ anAction button
|
||||
tooltip: 'Export Page';
|
||||
icon: BrGlamorousVectorIcons down;
|
||||
action: [:aButton | aButton phlow spawnObject: self page asMarkdeepFile ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiRefreshWebPreviewButtonFor: anAction [
|
||||
<lePageAction>
|
||||
^ anAction button
|
||||
tooltip: 'Refresh web view';
|
||||
icon: BrGlamorousVectorIcons refresh;
|
||||
action: [
|
||||
self page asMarkdeep exportAsFileOn: (self page storage / self page markdeepFileName).
|
||||
GoogleChrome openWindowOn: self page localHostAddress.
|
||||
"TODO: If Chrome/Chromium are not installed, I should execute:"
|
||||
"WebBrowser openOn: self page localHostAddress" ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> youngerChild [
|
||||
"I provide the first create child node.
|
||||
I'm useful to recalculate the age of a notebook."
|
||||
| response|
|
||||
response := self preorderTraversal first.
|
||||
self preorderTraversal do: [:current |
|
||||
current createTime <= response createTime
|
||||
ifTrue: [ response := current ]
|
||||
].
|
||||
^ response
|
||||
]
|
||||
Extension { #name : #LePage }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asHtmlFile [
|
||||
|
||||
self asMarkdownFile.
|
||||
self defaultPandocTemplate exists
|
||||
ifFalse: [ MarkupFile installTemplate: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/plantillas/Pandoc/clean-menu-mod.html' into: self defaultPandocTemplate parent ].
|
||||
|
||||
OSSUnixSubprocess new
|
||||
command: 'pandoc' ;
|
||||
arguments: {
|
||||
self markdownFileName. '-o'. self htmlFileName .
|
||||
'--toc' .
|
||||
'--template=', self defaultPandocTemplate basenameWithoutExtension };
|
||||
workingDirectory: self storage fullName;
|
||||
runAndWaitOnExitDo: [ :process :outString | ^ self storage / self htmlFileName].
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdeep [
|
||||
| bodyStream markdeep |
|
||||
bodyStream := '' writeStream.
|
||||
self preorderTraversal
|
||||
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdeep ].
|
||||
markdeep := Markdeep new
|
||||
title: self title;
|
||||
body: bodyStream contents;
|
||||
metadata: self metadata;
|
||||
file: self storage / self markdeepFileName;
|
||||
navTop: self navTop.
|
||||
self metadata
|
||||
at: 'authors'
|
||||
ifPresent: [ :author | markdeep metadata at: 'authors' put: author ].
|
||||
self metadata
|
||||
at: 'version'
|
||||
ifPresent: [ :version | markdeep metadata at: 'version' put: version ].
|
||||
markdeep head: nil.
|
||||
^ markdeep
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdeepFile [
|
||||
|
||||
^ self asMarkdeep notifyExportAsFileOn: self storage / self markdeepFileName
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdown [
|
||||
| bodyStream markdown |
|
||||
bodyStream := '' writeStream.
|
||||
bodyStream
|
||||
nextPutAll: '# ', self title; cr; cr.
|
||||
self preorderTraversal
|
||||
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdown ].
|
||||
markdown := Markdown new
|
||||
contents: bodyStream contents demoteMarkdownHeaders;
|
||||
metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new).
|
||||
^ markdown
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdownFile [
|
||||
| folder |
|
||||
folder := self storage.
|
||||
^ MarkupFile exportAsFileOn: folder / self markdownFileName containing: self asMarkdownWithMetadataWrappers contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> asMarkdownWithMetadataWrappers [
|
||||
| bodyStream markdown |
|
||||
bodyStream := '' writeStream.
|
||||
bodyStream
|
||||
nextPutAll: '# ', self title; cr; cr.
|
||||
self preorderTraversal
|
||||
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdownWithMetadataWrappers ].
|
||||
markdown := Markdown new
|
||||
contents: bodyStream contents demoteMarkdownHeaders;
|
||||
metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new).
|
||||
^ markdown
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> config [
|
||||
| configFile |
|
||||
configFile := self storage / 'config.ston'.
|
||||
configFile exists
|
||||
ifTrue: [^ STON fromString: configFile contents ]
|
||||
ifFalse: [ ^ nil ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> defaultPandocTemplate [
|
||||
|
||||
^ FileLocator home / '.pandoc' / 'templates' / 'clean-menu-mod.html'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> detectMarkdeepTitleFrom: xmlSubtree [
|
||||
| titleLine |
|
||||
titleLine := (xmlSubtree nodesCollect: [:node | node contentString ]) first lines
|
||||
detect: [:line | line includesSubstring: ' **'] ifNone: ['Untitled'].
|
||||
^ titleLine trimmed trimBoth: [:char | char = $* ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> detectParentSnippetWithUid: uidString [
|
||||
uidString = self uid asString36 ifTrue: [ ^ self ].
|
||||
^ self preorderTraversal detect: [ :snippet | snippet uidString = uidString ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> exportMetadataToHead: markdeep [
|
||||
self metadata
|
||||
keysAndValuesDo: [ :k :v |
|
||||
k = 'lang'
|
||||
ifTrue: [ markdeep head
|
||||
add: '<meta lang="' , v , '">';
|
||||
yourself ]
|
||||
ifFalse: [ markdeep head
|
||||
add: '<meta name="' , k , '" content="' , v , '">';
|
||||
yourself ] ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> exportedFileName [
|
||||
| sanitized |
|
||||
sanitized := self title asDashedLowercase romanizeAccents copyWithoutAll: #($/ $: $🢒).
|
||||
^ sanitized , '--' , (self uidString copyFrom: 1 to: 5)
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> fromMarkdeepUrl: aString [
|
||||
| docTree pageMetadata |
|
||||
docTree := GrafoscopioUtils xmlFromUrl: aString.
|
||||
pageMetadata := Markdeep new metadataFromXML: docTree.
|
||||
self
|
||||
basicUid: (pageMetadata at: 'id');
|
||||
title: (pageMetadata at: 'title');
|
||||
createTime: (pageMetadata at: 'created') asDateAndTime;
|
||||
editTime: (pageMetadata at: 'modified') asDateAndTime;
|
||||
createEmail: (pageMetadata at: 'creator');
|
||||
editEmail: (pageMetadata at: 'modifier');
|
||||
optionAt: 'metadata' put: pageMetadata.
|
||||
self populateChildrenFrom: (docTree xpath: '//div')
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> htmlFileName [
|
||||
^ self exportedFileName, '.html'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> latestEditTime: aLeTime [
|
||||
"Used for adding a LePage to database from a shared markdeep LePage version."
|
||||
|
||||
latestEditTime := aLeTime
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> localHostAddress [
|
||||
| localUrl route |
|
||||
MiniDocsServer teapot server isRunning ifFalse: [ MiniDocsServer restart ].
|
||||
route := MiniDocsServer teapot staticRouter prefix joinUsing: '/'.
|
||||
localUrl := MiniDocsServer teapot server localUrl asString.
|
||||
^ localUrl, route, '/', self markdeepFileName
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> markdeepFileName [
|
||||
|
||||
^ self markdownFileName , '.html'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> markdownFileName [
|
||||
^ self exportedFileName, '.md'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> metadata [
|
||||
|
||||
^ self metadataUpdate
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> metadataUpdate [
|
||||
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'title' put: self contentAsString;
|
||||
at: 'created' put: self createTime greaseString;
|
||||
at: 'modified' put: self getLatestEditTime greaseString;
|
||||
at: 'creator' put: self createEmail greaseString;
|
||||
at: 'modifier' put: self editEmail greaseString;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> navTop [
|
||||
| topNavFile |
|
||||
topNavFile := self storage / '_navtop.html'.
|
||||
topNavFile exists
|
||||
ifFalse: [ ^ '' ]
|
||||
ifTrue: [ ^ topNavFile contents ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> olderChild [
|
||||
"I provide the last edited child node.
|
||||
I'm useful to recalculate the age of a notebook."
|
||||
| response|
|
||||
response := self preorderTraversal first.
|
||||
self preorderTraversal do: [:current |
|
||||
current editTime >= response editTime
|
||||
ifTrue: [ response := current ]
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> options [
|
||||
^ options
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> preorderTraversal [
|
||||
^ self allChildrenDepthFirst
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> removeSnippetsMetadata [
|
||||
self preorderTraversal do: [ :snippet |
|
||||
(snippet options isNotNil and: [ snippet options includesKey: 'metadata' ])
|
||||
ifTrue: [ snippet options removeKey: 'metadata' ] ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> sanitizeMetadata [
|
||||
self allChildrenDepthFirst do: [:snippet | snippet sanitizeMetadata ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> sharedVariablesBindings [
|
||||
| codeSnippets shared |
|
||||
codeSnippets := self preorderTraversal select: [:snippet |
|
||||
snippet class = LePharoSnippet and: [ snippet code includesSubstring: ':=']
|
||||
].
|
||||
|
||||
codeSnippets first in: [:snippet | | context |
|
||||
context := snippet coder evaluationContext.
|
||||
snippet coder doItInContext: context.
|
||||
shared := context bindingStrategy bindings detect: [:each |
|
||||
each isKindOf: GtSharedVariablesBindings
|
||||
]
|
||||
].
|
||||
|
||||
codeSnippets asArray allButFirstDo: [:snippet| | context|
|
||||
context := snippet coder evaluationContext.
|
||||
context addBindings: shared.
|
||||
snippet coder doItInContext: context
|
||||
].
|
||||
|
||||
^ shared asDictionary
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> storage [
|
||||
| current |
|
||||
current := self database attachmentsDirectory parent.
|
||||
self optionAt: 'storage' ifAbsent: [ ^ current ].
|
||||
(self optionAt: 'storage') ifNil: [ ^ current ].
|
||||
^ self optionAt: 'storage'
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiAddCopyButtonFor: anAction [
|
||||
<lePageAction>
|
||||
^ anAction button
|
||||
tooltip: 'Export Page';
|
||||
icon: BrGlamorousVectorIcons changes;
|
||||
action: [:aButton | aButton phlow spawnObject: (self page database addPageCopy: self page) ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiDefineFolderFor: anAction [
|
||||
<lePageAction>
|
||||
| folderButton |
|
||||
folderButton := anAction dropdown
|
||||
icon: BrGlamorousIcons savetodisk;
|
||||
tooltip: 'Export folder'"";
|
||||
content: [:aButton | BlElement new
|
||||
background: (Color gray alpha: 0.2);
|
||||
size: 100 @ 100;
|
||||
margin: (BlInsets all: 10) ].
|
||||
^ folderButton
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiExportButtonFor: anAction [
|
||||
<lePageAction>
|
||||
^ anAction button
|
||||
tooltip: 'Export Page';
|
||||
icon: BrGlamorousVectorIcons down;
|
||||
action: [:aButton | aButton phlow spawnObject: self page asMarkdeepFile ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> uiRefreshWebPreviewButtonFor: anAction [
|
||||
<lePageAction>
|
||||
^ anAction button
|
||||
tooltip: 'Refresh web view';
|
||||
icon: BrGlamorousVectorIcons refresh;
|
||||
action: [
|
||||
self page asMarkdeep exportAsFileOn: (self page storage / self page markdeepFileName).
|
||||
GoogleChrome openWindowOn: self page localHostAddress.
|
||||
"TODO: If Chrome/Chromium are not installed, I should execute:"
|
||||
"WebBrowser openOn: self page localHostAddress" ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePage >> youngerChild [
|
||||
"I provide the first create child node.
|
||||
I'm useful to recalculate the age of a notebook."
|
||||
| response|
|
||||
response := self preorderTraversal first.
|
||||
self preorderTraversal do: [:current |
|
||||
current createTime <= response createTime
|
||||
ifTrue: [ response := current ]
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
@ -1,26 +1,26 @@
|
||||
Extension { #name : #LePharoRewriteSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoRewriteSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoRewriteSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LePharoRewriteSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoRewriteSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoRewriteSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,58 +1,58 @@
|
||||
Extension { #name : #LePharoSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> contentAsStringCustomized [
|
||||
| thisObject |
|
||||
(self tags includes: 'output') ifFalse: [ ^ self contentAsString ].
|
||||
thisObject := ((self page sharedVariablesBindings) at: self detectObject) value.
|
||||
^ thisObject perform: self detectMessage trimmed asSymbol.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> fromMarkdeep: markdeepDiv [
|
||||
|
||||
^ markdeepDiv asSnippetDictionary asLepiterSnippet
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> fromString: aString [
|
||||
|
||||
self code: aString
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> markdeepCustomCloser [
|
||||
^ String streamContents: [ :stream |
|
||||
stream
|
||||
nextPutAll: '~~~'; lf;
|
||||
nextPutAll: '</script>'; lf.
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> markdeepCustomOpener [
|
||||
^ String streamContents: [ :stream |
|
||||
stream
|
||||
nextPutAll: '<script type="preformatted">'; lf;
|
||||
nextPutAll: '~~~ Smalltalk'; lf
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> markdownCustomCloser [
|
||||
(self tags includes: 'output') ifTrue: [^ String with: Character lf].
|
||||
^ String streamContents: [:stream |
|
||||
stream
|
||||
nextPutAll: '~~~'; lf
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> markdownCustomOpener [
|
||||
(self tags includes: 'output') ifTrue: [ ^ String with: Character lf ].
|
||||
^ String
|
||||
streamContents: [ :stream |
|
||||
stream
|
||||
nextPutAll: '~~~ Smalltalk';
|
||||
lf ]
|
||||
]
|
||||
Extension { #name : #LePharoSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> contentAsStringCustomized [
|
||||
| thisObject |
|
||||
(self tags includes: 'output') ifFalse: [ ^ self contentAsString ].
|
||||
thisObject := ((self page sharedVariablesBindings) at: self detectObject) value.
|
||||
^ thisObject perform: self detectMessage trimmed asSymbol.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> fromMarkdeep: markdeepDiv [
|
||||
|
||||
^ markdeepDiv asSnippetDictionary asLepiterSnippet
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> fromString: aString [
|
||||
|
||||
self code: aString
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> markdeepCustomCloser [
|
||||
^ String streamContents: [ :stream |
|
||||
stream
|
||||
nextPutAll: '~~~'; lf;
|
||||
nextPutAll: '</script>'; lf.
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> markdeepCustomOpener [
|
||||
^ String streamContents: [ :stream |
|
||||
stream
|
||||
nextPutAll: '<script type="preformatted">'; lf;
|
||||
nextPutAll: '~~~ Smalltalk'; lf
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> markdownCustomCloser [
|
||||
(self tags includes: 'output') ifTrue: [^ String with: Character lf].
|
||||
^ String streamContents: [:stream |
|
||||
stream
|
||||
nextPutAll: '~~~'; lf
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePharoSnippet >> markdownCustomOpener [
|
||||
(self tags includes: 'output') ifTrue: [ ^ String with: Character lf ].
|
||||
^ String
|
||||
streamContents: [ :stream |
|
||||
stream
|
||||
nextPutAll: '~~~ Smalltalk';
|
||||
lf ]
|
||||
]
|
||||
|
@ -1,122 +1,122 @@
|
||||
Extension { #name : #LePictureSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> asMarkdeep [
|
||||
| output curatedCaption captionLines |
|
||||
captionLines := self caption lines.
|
||||
(captionLines size <= 1)
|
||||
ifTrue: [ curatedCaption := caption ]
|
||||
ifFalse: [
|
||||
curatedCaption := WriteStream on: ''.
|
||||
curatedCaption nextPutAll: captionLines first.
|
||||
captionLines allButFirstDo: [:line |
|
||||
curatedCaption nextPutAll: ' ', line.
|
||||
curatedCaption := curatedCaption contents.
|
||||
]
|
||||
].
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: self metadataDiv;
|
||||
nextPutAll: '![ ', curatedCaption ,' ](', self urlString, ')';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: '</div>';
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf.
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> asMarkdownWithMetadataWrappers [
|
||||
^ self asMarkdeep
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> contentFrom: markdeepDiv [
|
||||
| caption width |
|
||||
caption := markdeepDiv contentString.
|
||||
width := (markdeepDiv // 'img' @ 'width') stringValue.
|
||||
self
|
||||
optionAt: 'caption' put: caption;
|
||||
optionAt: 'width' put: width.
|
||||
self urlString: (markdeepDiv // 'img' @ 'src') stringValue.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> fromMarkdeep: markdeepDiv [
|
||||
^ markdeepDiv asSnippetDictionary asLepiterSnippet
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> fromString: aStringArray [
|
||||
"aStringArray should contain as first element the sanitized string and
|
||||
as second the full original image Link string, which may contains links in the description."
|
||||
| args urlTemp |
|
||||
|
||||
args := aStringArray second splitOn: ']('.
|
||||
urlTemp := args last.
|
||||
urlTemp := urlTemp copyFrom: 1 to: urlTemp size - 1.
|
||||
self caption: aStringArray first.
|
||||
self urlString: urlTemp.
|
||||
^ self
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadata [
|
||||
^ self metadataInit
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadataDiv [
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: '<div st-class="' , self class greaseString , '"';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
|
||||
nextPut: Character lf.
|
||||
^ output contents withInternetLineEndings.
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadataInit [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [ surrogate := self parent uidString ].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'url' put: '<!--',self contentAsString, '-->';
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
|
||||
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [ surrogate := self parent uidString ].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
|
||||
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LePictureSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> asMarkdeep [
|
||||
| output curatedCaption captionLines |
|
||||
captionLines := self caption lines.
|
||||
(captionLines size <= 1)
|
||||
ifTrue: [ curatedCaption := caption ]
|
||||
ifFalse: [
|
||||
curatedCaption := WriteStream on: ''.
|
||||
curatedCaption nextPutAll: captionLines first.
|
||||
captionLines allButFirstDo: [:line |
|
||||
curatedCaption nextPutAll: ' ', line.
|
||||
curatedCaption := curatedCaption contents.
|
||||
]
|
||||
].
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: self metadataDiv;
|
||||
nextPutAll: '![ ', curatedCaption ,' ](', self urlString, ')';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: '</div>';
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf.
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> asMarkdownWithMetadataWrappers [
|
||||
^ self asMarkdeep
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> contentFrom: markdeepDiv [
|
||||
| caption width |
|
||||
caption := markdeepDiv contentString.
|
||||
width := (markdeepDiv // 'img' @ 'width') stringValue.
|
||||
self
|
||||
optionAt: 'caption' put: caption;
|
||||
optionAt: 'width' put: width.
|
||||
self urlString: (markdeepDiv // 'img' @ 'src') stringValue.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> fromMarkdeep: markdeepDiv [
|
||||
^ markdeepDiv asSnippetDictionary asLepiterSnippet
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> fromString: aStringArray [
|
||||
"aStringArray should contain as first element the sanitized string and
|
||||
as second the full original image Link string, which may contains links in the description."
|
||||
| args urlTemp |
|
||||
|
||||
args := aStringArray second splitOn: ']('.
|
||||
urlTemp := args last.
|
||||
urlTemp := urlTemp copyFrom: 1 to: urlTemp size - 1.
|
||||
self caption: aStringArray first.
|
||||
self urlString: urlTemp.
|
||||
^ self
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadata [
|
||||
^ self metadataInit
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadataDiv [
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: '<div st-class="' , self class greaseString , '"';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
|
||||
nextPut: Character lf.
|
||||
^ output contents withInternetLineEndings.
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadataInit [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [ surrogate := self parent uidString ].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'url' put: '<!--',self contentAsString, '-->';
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
|
||||
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [ surrogate := self parent uidString ].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
|
||||
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LePictureSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,26 +1,26 @@
|
||||
Extension { #name : #LeSmaCCRewriteSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSmaCCRewriteSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSmaCCRewriteSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LeSmaCCRewriteSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSmaCCRewriteSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSmaCCRewriteSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,28 +1,28 @@
|
||||
Extension { #name : #LeSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSnippet class >> fromMetaMarkdeep: div [
|
||||
| className metadata snippet |
|
||||
className := (div xpath: '@st-class') stringValue.
|
||||
metadata := STON fromString:(div xpath: '@st-data') stringValue.
|
||||
snippet := className asClass new.
|
||||
snippet injectMetadataFrom: metadata.
|
||||
snippet fromMarkdeep: div.
|
||||
^ snippet.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSnippet >> metadata [
|
||||
^ self metadataUpdate
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSnippet >> moveToPageTitled: pageName [
|
||||
| db origin destination thisSnippet |
|
||||
thisSnippet := self.
|
||||
db := self page database.
|
||||
destination := db pageNamed: pageName.
|
||||
origin := db pageNamed: thisSnippet page title.
|
||||
origin removeSnippet: thisSnippet.
|
||||
destination addSnippet: thisSnippet.
|
||||
]
|
||||
Extension { #name : #LeSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSnippet class >> fromMetaMarkdeep: div [
|
||||
| className metadata snippet |
|
||||
className := (div xpath: '@st-class') stringValue.
|
||||
metadata := STON fromString:(div xpath: '@st-data') stringValue.
|
||||
snippet := className asClass new.
|
||||
snippet injectMetadataFrom: metadata.
|
||||
snippet fromMarkdeep: div.
|
||||
^ snippet.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSnippet >> metadata [
|
||||
^ self metadataUpdate
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeSnippet >> moveToPageTitled: pageName [
|
||||
| db origin destination thisSnippet |
|
||||
thisSnippet := self.
|
||||
db := self page database.
|
||||
destination := db pageNamed: pageName.
|
||||
origin := db pageNamed: thisSnippet page title.
|
||||
origin removeSnippet: thisSnippet.
|
||||
destination addSnippet: thisSnippet.
|
||||
]
|
||||
|
@ -1,11 +1,11 @@
|
||||
Extension { #name : #LeTextCoderSnippetElement }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextCoderSnippetElement >> moveToPageTitled: pageName [
|
||||
| db origin destination |
|
||||
db := self page database.
|
||||
destination := db pageNamed: pageName.
|
||||
origin := db pageNamed: self page title.
|
||||
origin removeSnippet: self.
|
||||
destination addSnippet: self .
|
||||
]
|
||||
Extension { #name : #LeTextCoderSnippetElement }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextCoderSnippetElement >> moveToPageTitled: pageName [
|
||||
| db origin destination |
|
||||
db := self page database.
|
||||
destination := db pageNamed: pageName.
|
||||
origin := db pageNamed: self page title.
|
||||
origin removeSnippet: self.
|
||||
destination addSnippet: self .
|
||||
]
|
||||
|
@ -1,55 +1,55 @@
|
||||
Extension { #name : #LeTextSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> asLePage [
|
||||
| page title currentSnippet |
|
||||
title := self contentAsString markdownHeaders associations first value.
|
||||
title := (title trimBoth: [:char | char = $# ]) trimmed.
|
||||
page := LePage new
|
||||
initializeTitle: title.
|
||||
currentSnippet := LeTextSnippet new
|
||||
string: self contentAsString.
|
||||
page addSnippet: currentSnippet.
|
||||
self database addPage: page.
|
||||
self childrenDo: [:child |
|
||||
child moveToPageTitled: page title
|
||||
].
|
||||
self removeSelfCommand.
|
||||
^ page.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> fromMarkdeep: markdeepDiv [
|
||||
|
||||
^ markdeepDiv asSnippetDictionary asLepiterSnippet
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> fromString: aString [
|
||||
|
||||
self string: aString
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> metadata [
|
||||
|
||||
^ self metadataUpdate
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> options [
|
||||
^ options
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> parentId [
|
||||
self parent ifNil: [ ^ self ].
|
||||
(self parent isString) ifTrue: [^ self parent].
|
||||
^ self parent uidString.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> taggedWith: aString [
|
||||
self metadata at: 'tags' ifPresent: [ (self metadata at: 'tags') add: aString; yourself ] ifAbsentPut: [ Set new ].
|
||||
^ self metadata at: 'tags'
|
||||
]
|
||||
Extension { #name : #LeTextSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> asLePage [
|
||||
| page title currentSnippet |
|
||||
title := self contentAsString markdownHeaders associations first value.
|
||||
title := (title trimBoth: [:char | char = $# ]) trimmed.
|
||||
page := LePage new
|
||||
initializeTitle: title.
|
||||
currentSnippet := LeTextSnippet new
|
||||
string: self contentAsString.
|
||||
page addSnippet: currentSnippet.
|
||||
self database addPage: page.
|
||||
self childrenDo: [:child |
|
||||
child moveToPageTitled: page title
|
||||
].
|
||||
self removeSelfCommand.
|
||||
^ page.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> fromMarkdeep: markdeepDiv [
|
||||
|
||||
^ markdeepDiv asSnippetDictionary asLepiterSnippet
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> fromString: aString [
|
||||
|
||||
self string: aString
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> metadata [
|
||||
|
||||
^ self metadataUpdate
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> options [
|
||||
^ options
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> parentId [
|
||||
self parent ifNil: [ ^ self ].
|
||||
(self parent isString) ifTrue: [^ self parent].
|
||||
^ self parent uidString.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextSnippet >> taggedWith: aString [
|
||||
self metadata at: 'tags' ifPresent: [ (self metadata at: 'tags') add: aString; yourself ] ifAbsentPut: [ Set new ].
|
||||
^ self metadata at: 'tags'
|
||||
]
|
||||
|
@ -1,145 +1,145 @@
|
||||
Extension { #name : #LeTextualSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> asMarkdeep [
|
||||
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
|
||||
'st-' properties as a way to extend divs metadata regarding its contents."
|
||||
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: '<div st-class="' , self class greaseString , '"';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: self markdeepCustomOpener;
|
||||
nextPutAll: self contentAsStringAnnotated;
|
||||
nextPut: Character lf;
|
||||
nextPutAll: self markdeepCustomCloser;
|
||||
nextPutAll: '</div>';
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf.
|
||||
^ output contents withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> asMarkdown [
|
||||
|
||||
| output |
|
||||
output := '' writeStream.
|
||||
output
|
||||
nextPutAll: self contentAsStringCustomized; lf.
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> asMarkdownWithMetadataWrappers [
|
||||
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
|
||||
'st-' properties as a way to extend divs metadata regarding its contents."
|
||||
| output |
|
||||
output := '' writeStream.
|
||||
output
|
||||
nextPutAll: '<div st-class="', self class asString, '"'; lf;
|
||||
nextPutAll: ' st-data="', (STON toString: self metadata), '">'; lf;
|
||||
nextPutAll: self markdownCustomOpener;
|
||||
nextPutAll: self contentAsStringCustomized; lf;
|
||||
nextPutAll: self markdownCustomCloser;
|
||||
nextPutAll: '</div>'; lf; lf.
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> contentAsStringAnnotated [
|
||||
| annotations substitutions exported pageConfig|
|
||||
self ast ifNil: [ ^ self contentAsString ].
|
||||
annotations := self ast parts select: [:each | each className includesSubstring: 'AnnotationNode' ].
|
||||
annotations ifEmpty: [ ^ self contentAsString ].
|
||||
substitutions := OrderedDictionary new.
|
||||
pageConfig := self page config.
|
||||
annotations do: [ :each | | key type value color |
|
||||
key := each source.
|
||||
type := (key splitOn: ':') first copyWithoutAll: '{{'.
|
||||
value := key copyFrom: type size + 4 to: key size - 2.
|
||||
pageConfig
|
||||
ifNil: [ color := 'default' ]
|
||||
ifNotNil: [ | colors |
|
||||
colors := pageConfig at: 'annotationColors' ifAbsent: [ nil ].
|
||||
colors ifNotNil: [
|
||||
color := colors at: type ifAbsent: [ colors at: 'defaultColor' ifAbsentPut: ['default'] ]
|
||||
]
|
||||
].
|
||||
substitutions
|
||||
at: key
|
||||
put: '<span st-class="',type,'" style="color:', color, '">', value,'</span>'.
|
||||
].
|
||||
exported := self contentAsString.
|
||||
substitutions keysAndValuesDo: [:k :v |
|
||||
exported := exported copyReplaceAll: k with: v.
|
||||
].
|
||||
^ exported
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> contentAsStringCustomized [
|
||||
(self contentAsString beginsWith: '#')
|
||||
ifTrue: [ ^ '#', self contentAsString ]
|
||||
ifFalse: [ ^ self contentAsString ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> markdeepCustomCloser [
|
||||
^ ''
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> markdeepCustomOpener [
|
||||
^ ''
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> markdownCustomCloser [
|
||||
^ self markdeepCustomCloser
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> markdownCustomOpener [
|
||||
^ self markdeepCustomOpener
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> metadata [
|
||||
|
||||
^ self metadataUpdate
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uidString;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> sanitizeMetadata [
|
||||
self options ifNil: [^ self ].
|
||||
self options removeKey: 'metadata' ifAbsent: [^ self ].
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v asString includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v asString copyWithoutXMLDelimiters)
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> tags [
|
||||
^ self metadata at: 'tags' ifAbsentPut: [ Set new ]
|
||||
]
|
||||
Extension { #name : #LeTextualSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> asMarkdeep [
|
||||
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
|
||||
'st-' properties as a way to extend divs metadata regarding its contents."
|
||||
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: '<div st-class="' , self class greaseString , '"';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: self markdeepCustomOpener;
|
||||
nextPutAll: self contentAsStringAnnotated;
|
||||
nextPut: Character lf;
|
||||
nextPutAll: self markdeepCustomCloser;
|
||||
nextPutAll: '</div>';
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf.
|
||||
^ output contents withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> asMarkdown [
|
||||
|
||||
| output |
|
||||
output := '' writeStream.
|
||||
output
|
||||
nextPutAll: self contentAsStringCustomized; lf.
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> asMarkdownWithMetadataWrappers [
|
||||
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
|
||||
'st-' properties as a way to extend divs metadata regarding its contents."
|
||||
| output |
|
||||
output := '' writeStream.
|
||||
output
|
||||
nextPutAll: '<div st-class="', self class asString, '"'; lf;
|
||||
nextPutAll: ' st-data="', (STON toString: self metadata), '">'; lf;
|
||||
nextPutAll: self markdownCustomOpener;
|
||||
nextPutAll: self contentAsStringCustomized; lf;
|
||||
nextPutAll: self markdownCustomCloser;
|
||||
nextPutAll: '</div>'; lf; lf.
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> contentAsStringAnnotated [
|
||||
| annotations substitutions exported pageConfig|
|
||||
self ast ifNil: [ ^ self contentAsString ].
|
||||
annotations := self ast parts select: [:each | each className includesSubstring: 'AnnotationNode' ].
|
||||
annotations ifEmpty: [ ^ self contentAsString ].
|
||||
substitutions := OrderedDictionary new.
|
||||
pageConfig := self page config.
|
||||
annotations do: [ :each | | key type value color |
|
||||
key := each source.
|
||||
type := (key splitOn: ':') first copyWithoutAll: '{{'.
|
||||
value := key copyFrom: type size + 4 to: key size - 2.
|
||||
pageConfig
|
||||
ifNil: [ color := 'default' ]
|
||||
ifNotNil: [ | colors |
|
||||
colors := pageConfig at: 'annotationColors' ifAbsent: [ nil ].
|
||||
colors ifNotNil: [
|
||||
color := colors at: type ifAbsent: [ colors at: 'defaultColor' ifAbsentPut: ['default'] ]
|
||||
]
|
||||
].
|
||||
substitutions
|
||||
at: key
|
||||
put: '<span st-class="',type,'" style="color:', color, '">', value,'</span>'.
|
||||
].
|
||||
exported := self contentAsString.
|
||||
substitutions keysAndValuesDo: [:k :v |
|
||||
exported := exported copyReplaceAll: k with: v.
|
||||
].
|
||||
^ exported
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> contentAsStringCustomized [
|
||||
(self contentAsString beginsWith: '#')
|
||||
ifTrue: [ ^ '#', self contentAsString ]
|
||||
ifFalse: [ ^ self contentAsString ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> markdeepCustomCloser [
|
||||
^ ''
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> markdeepCustomOpener [
|
||||
^ ''
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> markdownCustomCloser [
|
||||
^ self markdeepCustomCloser
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> markdownCustomOpener [
|
||||
^ self markdeepCustomOpener
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> metadata [
|
||||
|
||||
^ self metadataUpdate
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uidString;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> sanitizeMetadata [
|
||||
self options ifNil: [^ self ].
|
||||
self options removeKey: 'metadata' ifAbsent: [^ self ].
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v asString includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v asString copyWithoutXMLDelimiters)
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeTextualSnippet >> tags [
|
||||
^ self metadata at: 'tags' ifAbsentPut: [ Set new ]
|
||||
]
|
||||
|
@ -1,21 +1,21 @@
|
||||
Extension { #name : #LeUnknownSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeUnknownSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [
|
||||
self parent isString
|
||||
ifTrue: [ surrogate := self parent]
|
||||
ifFalse: [ surrogate := self parent uidString ]
|
||||
].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString;
|
||||
at: 'modifier' put: self editEmail asString;
|
||||
yourself
|
||||
]
|
||||
Extension { #name : #LeUnknownSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeUnknownSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [
|
||||
self parent isString
|
||||
ifTrue: [ surrogate := self parent]
|
||||
ifFalse: [ surrogate := self parent uidString ]
|
||||
].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString;
|
||||
at: 'modifier' put: self editEmail asString;
|
||||
yourself
|
||||
]
|
||||
|
@ -1,26 +1,26 @@
|
||||
Extension { #name : #LeWardleyMapSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWardleyMapSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWardleyMapSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LeWardleyMapSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWardleyMapSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWardleyMapSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,26 +1,26 @@
|
||||
Extension { #name : #LeWordSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWordSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWordSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LeWordSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWordSnippet >> metadataUpdate [
|
||||
| createEmailSanitized editEmailSanitized |
|
||||
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
|
||||
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: self parent uuid;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: createEmailSanitized;
|
||||
at: 'modifier' put: editEmailSanitized;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeWordSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,57 +1,57 @@
|
||||
Extension { #name : #LeYoutubeReferenceSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> asMarkdeep [
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: self metadataDiv;
|
||||
nextPutAll: '![ ', self title, ' | ', self authorName, ' ](',self urlString, ')';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: '</div>';
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf.
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> metadata [
|
||||
^ self optionAt: 'metadata' ifAbsentPut: [ self metadataUpdate ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> metadataDiv [
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: '<div st-class="' , self class greaseString , '"';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">'.
|
||||
^ output contents withInternetLineEndings.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [ surrogate := self parent uidString ].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString;
|
||||
at: 'modifier' put: self editEmail asString;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
Extension { #name : #LeYoutubeReferenceSnippet }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> asMarkdeep [
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: self metadataDiv;
|
||||
nextPutAll: '![ ', self title, ' | ', self authorName, ' ](',self urlString, ')';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: '</div>';
|
||||
nextPut: Character lf;
|
||||
nextPut: Character lf.
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> metadata [
|
||||
^ self optionAt: 'metadata' ifAbsentPut: [ self metadataUpdate ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> metadataDiv [
|
||||
| output |
|
||||
output := WriteStream on: ''.
|
||||
output
|
||||
nextPutAll: '<div st-class="' , self class greaseString , '"';
|
||||
nextPut: Character lf;
|
||||
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">'.
|
||||
^ output contents withInternetLineEndings.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> metadataUpdate [
|
||||
| surrogate |
|
||||
self parent
|
||||
ifNil: [ surrogate := nil]
|
||||
ifNotNil: [ surrogate := self parent uidString ].
|
||||
^ OrderedDictionary new
|
||||
at: 'id' put: self uidString;
|
||||
at: 'parent' put: surrogate;
|
||||
at: 'created' put: self createTime asString;
|
||||
at: 'modified' put: self latestEditTime asString;
|
||||
at: 'creator' put: self createEmail asString;
|
||||
at: 'modifier' put: self editEmail asString;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
LeYoutubeReferenceSnippet >> sanitizeMetadata [
|
||||
self metadata keysAndValuesDo: [:k :v |
|
||||
(v includesAny: #($< $>))
|
||||
ifTrue: [
|
||||
self metadata at: k put: (v copyWithoutAll: #($< $>))
|
||||
]
|
||||
]
|
||||
]
|
||||
|
@ -1,33 +1,33 @@
|
||||
Class {
|
||||
#name : #Logseq,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'folder'
|
||||
],
|
||||
#category : #MiniDocs
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> assets [
|
||||
^ self folder / 'assets'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> folder [
|
||||
^ folder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> folder: aFolder [
|
||||
folder := aFolder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> journals [
|
||||
self folder / 'journals'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> pages [
|
||||
self folder/ 'pages'
|
||||
]
|
||||
Class {
|
||||
#name : #Logseq,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'folder'
|
||||
],
|
||||
#category : #MiniDocs
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> assets [
|
||||
^ self folder / 'assets'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> folder [
|
||||
^ folder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> folder: aFolder [
|
||||
folder := aFolder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> journals [
|
||||
self folder / 'journals'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Logseq >> pages [
|
||||
self folder/ 'pages'
|
||||
]
|
||||
|
@ -1,23 +1,23 @@
|
||||
"
|
||||
Please describe the package using the class comment of the included manifest class. The manifest class also includes other additional metadata for the package. These meta data are used by other tools such as the SmalllintManifestChecker and the critics Browser
|
||||
"
|
||||
Class {
|
||||
#name : #ManifestMiniDocs,
|
||||
#superclass : #PackageManifest,
|
||||
#category : #'MiniDocs-Manifest'
|
||||
}
|
||||
|
||||
{ #category : #'code-critics' }
|
||||
ManifestMiniDocs class >> ruleCascadedNextPutAllsRuleV1FalsePositive [
|
||||
^ #(#(#(#RGMethodDefinition #(#LeTextualSnippet #asMarkdeep #false)) #'2022-09-09T12:31:08.106585-05:00') )
|
||||
]
|
||||
|
||||
{ #category : #'code-critics' }
|
||||
ManifestMiniDocs class >> ruleExcessiveVariablesRuleV1FalsePositive [
|
||||
^ #(#(#(#RGClassDefinition #(#Markdeep)) #'2022-07-16T12:24:34.695032-05:00') )
|
||||
]
|
||||
|
||||
{ #category : #'code-critics' }
|
||||
ManifestMiniDocs class >> ruleParseTreeLintRuleV1FalsePositive [
|
||||
^ #(#(#(#RGPackageDefinition #(#MiniDocs)) #'2022-07-25T09:28:50.156394-05:00') )
|
||||
]
|
||||
"
|
||||
Please describe the package using the class comment of the included manifest class. The manifest class also includes other additional metadata for the package. These meta data are used by other tools such as the SmalllintManifestChecker and the critics Browser
|
||||
"
|
||||
Class {
|
||||
#name : #ManifestMiniDocs,
|
||||
#superclass : #PackageManifest,
|
||||
#category : #'MiniDocs-Manifest'
|
||||
}
|
||||
|
||||
{ #category : #'code-critics' }
|
||||
ManifestMiniDocs class >> ruleCascadedNextPutAllsRuleV1FalsePositive [
|
||||
^ #(#(#(#RGMethodDefinition #(#LeTextualSnippet #asMarkdeep #false)) #'2022-09-09T12:31:08.106585-05:00') )
|
||||
]
|
||||
|
||||
{ #category : #'code-critics' }
|
||||
ManifestMiniDocs class >> ruleExcessiveVariablesRuleV1FalsePositive [
|
||||
^ #(#(#(#RGClassDefinition #(#Markdeep)) #'2022-07-16T12:24:34.695032-05:00') )
|
||||
]
|
||||
|
||||
{ #category : #'code-critics' }
|
||||
ManifestMiniDocs class >> ruleParseTreeLintRuleV1FalsePositive [
|
||||
^ #(#(#(#RGPackageDefinition #(#MiniDocs)) #'2022-07-25T09:28:50.156394-05:00') )
|
||||
]
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,221 +1,221 @@
|
||||
"
|
||||
I model a Markdown document.
|
||||
At some point the idea is to have a full native parser implemented to deal
|
||||
with my syntax, but meanwhile I will be collaborating with external parsers,
|
||||
particularly the ones provided by Pandoc and/or Lunamark.
|
||||
"
|
||||
Class {
|
||||
#name : #Markdown,
|
||||
#superclass : #MarkupFile,
|
||||
#instVars : [
|
||||
'metadata',
|
||||
'body',
|
||||
'title'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdown class >> fromFile: aFileReference [
|
||||
^ self new fromFile: aFileReference
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> asMarkdeep [
|
||||
^ Markdeep new
|
||||
body: self body;
|
||||
commentYAMLMetadata
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> body [
|
||||
^ body
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> body: aString [
|
||||
body := aString
|
||||
]
|
||||
|
||||
{ #category : #operation }
|
||||
Markdown >> commentYAMLMetadata [
|
||||
| newContents |
|
||||
self detectYAMLMetadata ifFalse: [ ^ self ].
|
||||
newContents := '' writeStream.
|
||||
newContents nextPutAll: '<!--@yaml'; lf.
|
||||
newContents nextPutAll: self yamlMetadataString.
|
||||
newContents nextPutAll: '-->'; lf; lf.
|
||||
(self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line |
|
||||
newContents nextPutAll: line; lf ].
|
||||
^ newContents contents.
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
Markdown >> containsYAMLMetadataClosing [
|
||||
^ self yamlMetadataClosingLineNumber > 0
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> contents [
|
||||
| response |
|
||||
response := WriteStream on: ''.
|
||||
response
|
||||
nextPutAll: '---'; cr;
|
||||
nextPutAll: self metadataAsYAML; cr;
|
||||
nextPutAll: '---'; cr;
|
||||
nextPutAll: self body.
|
||||
^ response contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> contents: aString [
|
||||
body := aString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> documentTree [
|
||||
| parser|
|
||||
self contents ifNil: [^ nil].
|
||||
parser := PPCommonMarkBlockParser new parse: self body.
|
||||
^ parser accept: CMBlockVisitor new
|
||||
]
|
||||
|
||||
{ #category : #persistence }
|
||||
Markdown >> exportAsFile [
|
||||
| newFile |
|
||||
|
||||
newFile := (self file fullName ) asFileReference.
|
||||
^ self notifyExportAsFileOn: newFile.
|
||||
]
|
||||
|
||||
{ #category : #persistence }
|
||||
Markdown >> exportAsFileOn: aFileReference [
|
||||
aFileReference ensureDelete.
|
||||
aFileReference exists ifFalse: [ aFileReference ensureCreateFile ].
|
||||
aFileReference writeStreamDo: [ :stream |
|
||||
stream nextPutAll: self contents withInternetLineEndings ].
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> exportAsHTML [
|
||||
^ Pandoc markdownToHtml: self file
|
||||
]
|
||||
|
||||
{ #category : #operation }
|
||||
Markdown >> exportMetadataAsJson [
|
||||
"TBD: Lua scripts should be checked and installed when missing. Maybe a shared location
|
||||
in '.local/share/Grafoscopio/Scripts' should be developed in the near future."
|
||||
| output luaScript |
|
||||
luaScript := FileLocator home / '.local/share/Brea/scripts/meta-to-json.lua'.
|
||||
Smalltalk platformName = 'unix' ifTrue: [
|
||||
OSSUnixSubprocess new
|
||||
workingDirectory: self file parent fullName;
|
||||
command: 'pandoc';
|
||||
arguments: { '--lua-filter=', luaScript fullName . self file basename };
|
||||
redirectStdout;
|
||||
redirectStdin;
|
||||
runAndWaitOnExitDo: [ :process :outString :errString |
|
||||
output := process isSuccess
|
||||
ifTrue: [ outString ]
|
||||
ifFalse: [ errString ]
|
||||
]].
|
||||
^ output correctAccentedCharacters
|
||||
]
|
||||
|
||||
{ #category : #operation }
|
||||
Markdown >> exportMetadataAsYaml [
|
||||
| exportedFile |
|
||||
exportedFile := FileLocator temp / 'metadata.yaml'.
|
||||
MarkupFile exportAsFileOn: exportedFile containing: self yamlMetadataStringWithDelimiters.
|
||||
^ exportedFile
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> file [
|
||||
^ file ifNil: [ file := FileLocator temp / 'temporalMarkdeep.md.html' ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> file: aFileReference [
|
||||
"I store the origen/destination of the Markdown contents."
|
||||
file := aFileReference
|
||||
]
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdown >> fromFile: aFileReference [
|
||||
self fromString: aFileReference contents.
|
||||
self file: aFileReference.
|
||||
]
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdown >> fromString: markdownString [
|
||||
(self metadata) at: 'original' put: markdownString yamlMetadata.
|
||||
self body: markdownString contentsWithoutYAMLMetadata
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> gtTextFor: aView [
|
||||
<gtView>
|
||||
^ aView textEditor
|
||||
title: 'Text';
|
||||
text: [ self contents ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> headerAsTitle [
|
||||
| headerNode |
|
||||
headerNode := self documentTree children
|
||||
detect: [ :node | node className = 'PPCMHeader' and: [ node level = 1 ] ] ifNone: [ ^ 'Untitled' ].
|
||||
^ headerNode text
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
Markdown >> lines [
|
||||
self file ifNotNil: [^ self file contents lines ].
|
||||
^ self contents lines.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> metadata [
|
||||
|
||||
^ metadata ifNil: [ metadata := Dictionary new].
|
||||
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> metadata: rawMeta [
|
||||
|
||||
metadata := rawMeta
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> metadataAsYAML [
|
||||
self metadata isEmptyOrNil ifTrue: [ ^ '' ].
|
||||
^ (YQ jsonToYaml: self metadata) accentedCharactersCorrection
|
||||
]
|
||||
|
||||
{ #category : #persistence }
|
||||
Markdown >> notifyExportAsFileOn: aFileReference [
|
||||
self exportAsFileOn: aFileReference.
|
||||
self inform: 'Exported as: ', String cr, aFileReference fullName.
|
||||
^ aFileReference
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> options [
|
||||
^ self metadata at: 'options' ifAbsentPut: [ self defaultOptions]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> printOn: aStream [
|
||||
| response |
|
||||
super printOn: aStream.
|
||||
response := self title ifNil: [ 'Untitled' ].
|
||||
aStream
|
||||
nextPutAll: '( ', response , ' )'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> title [
|
||||
^ title ifNil: [ title:= self headerAsTitle ]
|
||||
]
|
||||
"
|
||||
I model a Markdown document.
|
||||
At some point the idea is to have a full native parser implemented to deal
|
||||
with my syntax, but meanwhile I will be collaborating with external parsers,
|
||||
particularly the ones provided by Pandoc and/or Lunamark.
|
||||
"
|
||||
Class {
|
||||
#name : #Markdown,
|
||||
#superclass : #MarkupFile,
|
||||
#instVars : [
|
||||
'metadata',
|
||||
'body',
|
||||
'title'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdown class >> fromFile: aFileReference [
|
||||
^ self new fromFile: aFileReference
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> asMarkdeep [
|
||||
^ Markdeep new
|
||||
body: self body;
|
||||
commentYAMLMetadata
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> body [
|
||||
^ body
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> body: aString [
|
||||
body := aString
|
||||
]
|
||||
|
||||
{ #category : #operation }
|
||||
Markdown >> commentYAMLMetadata [
|
||||
| newContents |
|
||||
self detectYAMLMetadata ifFalse: [ ^ self ].
|
||||
newContents := '' writeStream.
|
||||
newContents nextPutAll: '<!--@yaml'; lf.
|
||||
newContents nextPutAll: self yamlMetadataString.
|
||||
newContents nextPutAll: '-->'; lf; lf.
|
||||
(self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line |
|
||||
newContents nextPutAll: line; lf ].
|
||||
^ newContents contents.
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
Markdown >> containsYAMLMetadataClosing [
|
||||
^ self yamlMetadataClosingLineNumber > 0
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> contents [
|
||||
| response |
|
||||
response := WriteStream on: ''.
|
||||
response
|
||||
nextPutAll: '---'; cr;
|
||||
nextPutAll: self metadataAsYAML; cr;
|
||||
nextPutAll: '---'; cr;
|
||||
nextPutAll: self body.
|
||||
^ response contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> contents: aString [
|
||||
body := aString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> documentTree [
|
||||
| parser|
|
||||
self contents ifNil: [^ nil].
|
||||
parser := PPCommonMarkBlockParser new parse: self body.
|
||||
^ parser accept: CMBlockVisitor new
|
||||
]
|
||||
|
||||
{ #category : #persistence }
|
||||
Markdown >> exportAsFile [
|
||||
| newFile |
|
||||
|
||||
newFile := (self file fullName ) asFileReference.
|
||||
^ self notifyExportAsFileOn: newFile.
|
||||
]
|
||||
|
||||
{ #category : #persistence }
|
||||
Markdown >> exportAsFileOn: aFileReference [
|
||||
aFileReference ensureDelete.
|
||||
aFileReference exists ifFalse: [ aFileReference ensureCreateFile ].
|
||||
aFileReference writeStreamDo: [ :stream |
|
||||
stream nextPutAll: self contents withInternetLineEndings ].
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> exportAsHTML [
|
||||
^ Pandoc markdownToHtml: self file
|
||||
]
|
||||
|
||||
{ #category : #operation }
|
||||
Markdown >> exportMetadataAsJson [
|
||||
"TBD: Lua scripts should be checked and installed when missing. Maybe a shared location
|
||||
in '.local/share/Grafoscopio/Scripts' should be developed in the near future."
|
||||
| output luaScript |
|
||||
luaScript := FileLocator home / '.local/share/Brea/scripts/meta-to-json.lua'.
|
||||
Smalltalk platformName = 'unix' ifTrue: [
|
||||
OSSUnixSubprocess new
|
||||
workingDirectory: self file parent fullName;
|
||||
command: 'pandoc';
|
||||
arguments: { '--lua-filter=', luaScript fullName . self file basename };
|
||||
redirectStdout;
|
||||
redirectStdin;
|
||||
runAndWaitOnExitDo: [ :process :outString :errString |
|
||||
output := process isSuccess
|
||||
ifTrue: [ outString ]
|
||||
ifFalse: [ errString ]
|
||||
]].
|
||||
^ output correctAccentedCharacters
|
||||
]
|
||||
|
||||
{ #category : #operation }
|
||||
Markdown >> exportMetadataAsYaml [
|
||||
| exportedFile |
|
||||
exportedFile := FileLocator temp / 'metadata.yaml'.
|
||||
MarkupFile exportAsFileOn: exportedFile containing: self yamlMetadataStringWithDelimiters.
|
||||
^ exportedFile
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> file [
|
||||
^ file ifNil: [ file := FileLocator temp / 'temporalMarkdeep.md.html' ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> file: aFileReference [
|
||||
"I store the origen/destination of the Markdown contents."
|
||||
file := aFileReference
|
||||
]
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdown >> fromFile: aFileReference [
|
||||
self fromString: aFileReference contents.
|
||||
self file: aFileReference.
|
||||
]
|
||||
|
||||
{ #category : #'instance creation' }
|
||||
Markdown >> fromString: markdownString [
|
||||
(self metadata) at: 'original' put: markdownString yamlMetadata.
|
||||
self body: markdownString contentsWithoutYAMLMetadata
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> gtTextFor: aView [
|
||||
<gtView>
|
||||
^ aView textEditor
|
||||
title: 'Text';
|
||||
text: [ self contents ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> headerAsTitle [
|
||||
| headerNode |
|
||||
headerNode := self documentTree children
|
||||
detect: [ :node | node className = 'PPCMHeader' and: [ node level = 1 ] ] ifNone: [ ^ 'Untitled' ].
|
||||
^ headerNode text
|
||||
]
|
||||
|
||||
{ #category : #utilities }
|
||||
Markdown >> lines [
|
||||
self file ifNotNil: [^ self file contents lines ].
|
||||
^ self contents lines.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> metadata [
|
||||
|
||||
^ metadata ifNil: [ metadata := Dictionary new].
|
||||
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> metadata: rawMeta [
|
||||
|
||||
metadata := rawMeta
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> metadataAsYAML [
|
||||
self metadata isEmptyOrNil ifTrue: [ ^ '' ].
|
||||
^ (YQ jsonToYaml: self metadata) accentedCharactersCorrection
|
||||
]
|
||||
|
||||
{ #category : #persistence }
|
||||
Markdown >> notifyExportAsFileOn: aFileReference [
|
||||
self exportAsFileOn: aFileReference.
|
||||
self inform: 'Exported as: ', String cr, aFileReference fullName.
|
||||
^ aFileReference
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> options [
|
||||
^ self metadata at: 'options' ifAbsentPut: [ self defaultOptions]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> printOn: aStream [
|
||||
| response |
|
||||
super printOn: aStream.
|
||||
response := self title ifNil: [ 'Untitled' ].
|
||||
aStream
|
||||
nextPutAll: '( ', response , ' )'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Markdown >> title [
|
||||
^ title ifNil: [ title:= self headerAsTitle ]
|
||||
]
|
||||
|
@ -1,40 +1,40 @@
|
||||
"
|
||||
I model common operations made with several markup files.
|
||||
"
|
||||
Class {
|
||||
#name : #MarkupFile,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'file'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #persistence }
|
||||
MarkupFile class >> exportAsFileOn: aFileReferenceOrFileName containing: anObject [
|
||||
| file preprocessed |
|
||||
file := aFileReferenceOrFileName asFileReference.
|
||||
file ensureDelete.
|
||||
file exists ifFalse: [ file ensureCreateFile ].
|
||||
(#('String' 'ByteString' 'WideString') includes: anObject className )
|
||||
ifTrue: [ preprocessed := anObject ]
|
||||
ifFalse: [preprocessed := STON toStringPretty: anObject ].
|
||||
file writeStreamDo: [ :stream |
|
||||
stream nextPutAll: preprocessed ].
|
||||
self inform: 'Exported as: ', String cr, file fullName.
|
||||
^ file
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MarkupFile class >> installTemplate: anUrl into: aFolder [
|
||||
|
||||
| fileName |
|
||||
fileName := anUrl asUrl segments last.
|
||||
(aFolder / fileName) exists
|
||||
ifTrue: [ (aFolder / fileName) ensureDeleteFile ]
|
||||
ifFalse: [ aFolder ensureCreateDirectory ].
|
||||
ZnClient new
|
||||
url: anUrl;
|
||||
downloadTo: aFolder.
|
||||
^ aFolder
|
||||
]
|
||||
"
|
||||
I model common operations made with several markup files.
|
||||
"
|
||||
Class {
|
||||
#name : #MarkupFile,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'file'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #persistence }
|
||||
MarkupFile class >> exportAsFileOn: aFileReferenceOrFileName containing: anObject [
|
||||
| file preprocessed |
|
||||
file := aFileReferenceOrFileName asFileReference.
|
||||
file ensureDelete.
|
||||
file exists ifFalse: [ file ensureCreateFile ].
|
||||
(#('String' 'ByteString' 'WideString') includes: anObject className )
|
||||
ifTrue: [ preprocessed := anObject ]
|
||||
ifFalse: [preprocessed := STON toStringPretty: anObject ].
|
||||
file writeStreamDo: [ :stream |
|
||||
stream nextPutAll: preprocessed ].
|
||||
self inform: 'Exported as: ', String cr, file fullName.
|
||||
^ file
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MarkupFile class >> installTemplate: anUrl into: aFolder [
|
||||
|
||||
| fileName |
|
||||
fileName := anUrl asUrl segments last.
|
||||
(aFolder / fileName) exists
|
||||
ifTrue: [ (aFolder / fileName) ensureDeleteFile ]
|
||||
ifFalse: [ aFolder ensureCreateDirectory ].
|
||||
ZnClient new
|
||||
url: anUrl;
|
||||
downloadTo: aFolder.
|
||||
^ aFolder
|
||||
]
|
||||
|
@ -1,145 +1,142 @@
|
||||
"
|
||||
MiniDocs is a project that includes several minimalistic documentation tools used by the [Grafoscopio](https://mutabit.com/grafoscopio/en.html) community, starting with [Markdeep](https://casual-effects.com/markdeep/) and its integrations with [Lepiter](https://lepiter.io/feenk/introducing-lepiter--knowledge-management--e2p6apqsz5npq7m4xte0kkywn/) .
|
||||
"
|
||||
Class {
|
||||
#name : #MiniDocs,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> altKeys [
|
||||
^ BlAlternativeCombination new
|
||||
combination: (BlSingleKeyCombination key:BlKeyboardKey altLeft)
|
||||
or: (BlSingleKeyCombination key:BlKeyboardKey altRight)
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> altShiftLeftCombo [
|
||||
^ BlCompulsoryCombination new
|
||||
with: self altKeys;
|
||||
with: self shiftKeys;
|
||||
with: (BlSingleKeyCombination key: BlKeyboardKey arrowLeft);
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> altShiftRightCombo [
|
||||
^ BlCompulsoryCombination new
|
||||
with: self altKeys;
|
||||
with: self shiftKeys;
|
||||
with: (BlSingleKeyCombination key: BlKeyboardKey arrowRight);
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> appFolder [
|
||||
| tempFolder userDataFolder |
|
||||
userDataFolder := Smalltalk os isWindows
|
||||
ifTrue: [ FileLocator home / 'AppData' / 'Local' ]
|
||||
ifFalse: [ FileLocator userData ].
|
||||
tempFolder := userDataFolder / 'Mutabit' / 'MiniDocs'.
|
||||
tempFolder exists ifFalse: [ tempFolder ensureCreateDirectory ].
|
||||
^ tempFolder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> exportAsSton: anObject on: aFileReference [
|
||||
MarkupFile exportAsFileOn: aFileReference containing: (STON toStringPretty: anObject) withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> importGrafoscopioFile: aFileReference [
|
||||
|
||||
^ (STON fromString: aFileReference) first parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> initialize [
|
||||
self keyboardShortcutsRemapping
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> installYamlToJson [
|
||||
"For the moment, only Gnu/Linux and Mac are supported.
|
||||
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
|
||||
self yamlToJsonBinary exists ifTrue: [ ^ MiniDocs appFolder ].
|
||||
Nimble
|
||||
install: 'yaml';
|
||||
install: 'commandeer'.
|
||||
OSSUnixSubprocess new
|
||||
command: 'nim';
|
||||
arguments: {'c'. self yamlToJsonSourceCode fullName};
|
||||
runAndWaitOnExitDo: [ :process :outString |
|
||||
(self yamlToJsonSourceCode parent / self yamlToJsonSourceCode basenameWithoutExtension) moveTo: MiniDocs appFolder asFileReference.
|
||||
^ MiniDocs appFolder ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> keyboardShortcutsRemapping [
|
||||
| primaryNewLine secondaryNewLine |
|
||||
primaryNewLine := LeSnippetElement keyboardShortcuts at: #NewLine.
|
||||
secondaryNewLine := LeSnippetElement keyboardShortcuts at: #SecondaryNewLine.
|
||||
^ LeSnippetElement keyboardShortcuts
|
||||
at: #NewLine put: secondaryNewLine;
|
||||
at: #SecondaryNewLine put: primaryNewLine;
|
||||
at: #IndentSnippet put: self altShiftRightCombo;
|
||||
at: #UnindentSnippet put: self altShiftLeftCombo;
|
||||
yourself
|
||||
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> shiftKeys [
|
||||
^ BlAlternativeCombination new
|
||||
combination: (BlSingleKeyCombination key:BlKeyboardKey shiftLeft)
|
||||
or: (BlSingleKeyCombination key:BlKeyboardKey shiftRight)
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> yamlToJson: yamlString [
|
||||
"This method uses a external binary written in Nim, as the native Pharo parser for YAML, written in PetitParser,
|
||||
was less robust and unable to parse correctly the same strings as the external one."
|
||||
yamlString ifNil: [ ^ Dictionary new ].
|
||||
self yamlToJsonBinary exists ifFalse: [ self installYamlToJson ].
|
||||
|
||||
OSSUnixSubprocess new
|
||||
command: self yamlToJsonBinary fullName;
|
||||
arguments: {yamlString};
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :process :outString |
|
||||
^ (STONJSON fromString: outString allButFirst accentedCharactersCorrection) first
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> yamlToJsonBinary [
|
||||
^ self appFolder / 'yamlToJson'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> yamlToJsonSourceCode [
|
||||
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/yamlToJson.nim'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs >> installNimFileExporter [
|
||||
| folder |
|
||||
folder := (MiniDocs appFolder / 'scripts') ensureCreateDirectory.
|
||||
|
||||
ZnClient new
|
||||
url: 'https://mutabit.com/repos.fossil/mutabit/uv/wiki/scripts/stringAsFileInto';
|
||||
downloadTo: folder / 'stringAsFileInto'.
|
||||
|
||||
ZnClient new
|
||||
url: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/wiki/scripts/stringAsFileInto.nim';
|
||||
downloadTo: folder / 'stringAsFileInto.nim'.
|
||||
|
||||
OSSUnixSubprocess new
|
||||
command: 'chmod';
|
||||
arguments: { '+x' . (folder / 'stringAsFileInto') fullName };
|
||||
workingDirectory: folder fullName;
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
|
||||
]
|
||||
"
|
||||
MiniDocs is a project that includes several minimalistic documentation tools used by the [Grafoscopio](https://mutabit.com/grafoscopio/en.html) community, starting with [Markdeep](https://casual-effects.com/markdeep/) and its integrations with [Lepiter](https://lepiter.io/feenk/introducing-lepiter--knowledge-management--e2p6apqsz5npq7m4xte0kkywn/) .
|
||||
"
|
||||
Class {
|
||||
#name : #MiniDocs,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> altKeys [
|
||||
^ BlAlternativeCombination new
|
||||
combination: (BlSingleKeyCombination key:BlKeyboardKey altLeft)
|
||||
or: (BlSingleKeyCombination key:BlKeyboardKey altRight)
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> altShiftLeftCombo [
|
||||
^ BlCompulsoryCombination new
|
||||
with: self altKeys;
|
||||
with: self shiftKeys;
|
||||
with: (BlSingleKeyCombination key: BlKeyboardKey arrowLeft);
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> altShiftRightCombo [
|
||||
^ BlCompulsoryCombination new
|
||||
with: self altKeys;
|
||||
with: self shiftKeys;
|
||||
with: (BlSingleKeyCombination key: BlKeyboardKey arrowRight);
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> appFolder [
|
||||
| tempFolder |
|
||||
tempFolder := ExoRepo userDataFolder / 'Mutabit' / 'MiniDocs'.
|
||||
tempFolder exists ifFalse: [ tempFolder ensureCreateDirectory ].
|
||||
^ tempFolder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> exportAsSton: anObject on: aFileReference [
|
||||
MarkupFile exportAsFileOn: aFileReference containing: (STON toStringPretty: anObject) withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> importGrafoscopioFile: aFileReference [
|
||||
|
||||
^ (STON fromString: aFileReference) first parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> initialize [
|
||||
self keyboardShortcutsRemapping
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> installYamlToJson [
|
||||
"For the moment, only Gnu/Linux and Mac are supported.
|
||||
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
|
||||
self yamlToJsonBinary exists ifTrue: [ ^ MiniDocs appFolder ].
|
||||
Nimble
|
||||
install: 'yaml';
|
||||
install: 'commandeer'.
|
||||
OSSUnixSubprocess new
|
||||
command: 'nim';
|
||||
arguments: {'c'. self yamlToJsonSourceCode fullName};
|
||||
runAndWaitOnExitDo: [ :process :outString |
|
||||
(self yamlToJsonSourceCode parent / self yamlToJsonSourceCode basenameWithoutExtension) moveTo: MiniDocs appFolder asFileReference.
|
||||
^ MiniDocs appFolder ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> keyboardShortcutsRemapping [
|
||||
| primaryNewLine secondaryNewLine |
|
||||
primaryNewLine := LeSnippetElement keyboardShortcuts at: #NewLine.
|
||||
secondaryNewLine := LeSnippetElement keyboardShortcuts at: #SecondaryNewLine.
|
||||
^ LeSnippetElement keyboardShortcuts
|
||||
at: #NewLine put: secondaryNewLine;
|
||||
at: #SecondaryNewLine put: primaryNewLine;
|
||||
at: #IndentSnippet put: self altShiftRightCombo;
|
||||
at: #UnindentSnippet put: self altShiftLeftCombo;
|
||||
yourself
|
||||
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> shiftKeys [
|
||||
^ BlAlternativeCombination new
|
||||
combination: (BlSingleKeyCombination key:BlKeyboardKey shiftLeft)
|
||||
or: (BlSingleKeyCombination key:BlKeyboardKey shiftRight)
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> yamlToJson: yamlString [
|
||||
"This method uses a external binary written in Nim, as the native Pharo parser for YAML, written in PetitParser,
|
||||
was less robust and unable to parse correctly the same strings as the external one."
|
||||
yamlString ifNil: [ ^ Dictionary new ].
|
||||
self yamlToJsonBinary exists ifFalse: [ self installYamlToJson ].
|
||||
|
||||
OSSUnixSubprocess new
|
||||
command: self yamlToJsonBinary fullName;
|
||||
arguments: {yamlString};
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :process :outString |
|
||||
^ (STONJSON fromString: outString allButFirst accentedCharactersCorrection) first
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> yamlToJsonBinary [
|
||||
^ self appFolder / 'yamlToJson'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs class >> yamlToJsonSourceCode [
|
||||
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/yamlToJson.nim'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocs >> installNimFileExporter [
|
||||
| folder |
|
||||
folder := (MiniDocs appFolder / 'scripts') ensureCreateDirectory.
|
||||
|
||||
ZnClient new
|
||||
url: 'https://mutabit.com/repos.fossil/mutabit/uv/wiki/scripts/stringAsFileInto';
|
||||
downloadTo: folder / 'stringAsFileInto'.
|
||||
|
||||
ZnClient new
|
||||
url: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/wiki/scripts/stringAsFileInto.nim';
|
||||
downloadTo: folder / 'stringAsFileInto.nim'.
|
||||
|
||||
OSSUnixSubprocess new
|
||||
command: 'chmod';
|
||||
arguments: { '+x' . (folder / 'stringAsFileInto') fullName };
|
||||
workingDirectory: folder fullName;
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
|
||||
]
|
||||
|
@ -1,74 +1,74 @@
|
||||
Class {
|
||||
#name : #MiniDocsServer,
|
||||
#superclass : #TLWebserver,
|
||||
#instVars : [
|
||||
'storage'
|
||||
],
|
||||
#classInstVars : [
|
||||
'singleton'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> build [
|
||||
TLRESTAPIBuilder buildAPI.
|
||||
self start
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> defaultConfiguration [
|
||||
"Override to set more default values"
|
||||
^ {
|
||||
#port -> 1701
|
||||
}
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> listLepiterDocs: aRequest [
|
||||
<REST_API: 'GET' pattern: 'lepiter'>
|
||||
^ 'A list of Mardeep exported Lepiter docs will appear soon...'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> restart [
|
||||
Teapot stopAll.
|
||||
self build.
|
||||
^ self start
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> singleton [
|
||||
^ singleton ifNil: [ singleton := MiniDocsServer teapot ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> addStorage: anObject [
|
||||
self storage add: anObject.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> initRoutes [
|
||||
self storage: FileLocator documents / 'lepiter' / 'default'.
|
||||
self teapot
|
||||
serveStatic: '/lepiter/doc' from: self storage fullName.
|
||||
self teapot
|
||||
GET: '/lepiter' -> 'A list of Mardeep exported Lepiter docs will appear soon...'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> start [
|
||||
self class defaultPort: 1701.
|
||||
self initRoutes.
|
||||
super start.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> storage [
|
||||
^ storage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> storage: aFoldersOrderedCollection [
|
||||
storage := aFoldersOrderedCollection
|
||||
]
|
||||
Class {
|
||||
#name : #MiniDocsServer,
|
||||
#superclass : #TLWebserver,
|
||||
#instVars : [
|
||||
'storage'
|
||||
],
|
||||
#classInstVars : [
|
||||
'singleton'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> build [
|
||||
TLRESTAPIBuilder buildAPI.
|
||||
self start
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> defaultConfiguration [
|
||||
"Override to set more default values"
|
||||
^ {
|
||||
#port -> 1701
|
||||
}
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> listLepiterDocs: aRequest [
|
||||
<REST_API: 'GET' pattern: 'lepiter'>
|
||||
^ 'A list of Mardeep exported Lepiter docs will appear soon...'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> restart [
|
||||
Teapot stopAll.
|
||||
self build.
|
||||
^ self start
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer class >> singleton [
|
||||
^ singleton ifNil: [ singleton := MiniDocsServer teapot ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> addStorage: anObject [
|
||||
self storage add: anObject.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> initRoutes [
|
||||
self storage: FileLocator documents / 'lepiter' / 'default'.
|
||||
self teapot
|
||||
serveStatic: '/lepiter/doc' from: self storage fullName.
|
||||
self teapot
|
||||
GET: '/lepiter' -> 'A list of Mardeep exported Lepiter docs will appear soon...'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> start [
|
||||
self class defaultPort: 1701.
|
||||
self initRoutes.
|
||||
super start.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> storage [
|
||||
^ storage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
MiniDocsServer >> storage: aFoldersOrderedCollection [
|
||||
storage := aFoldersOrderedCollection
|
||||
]
|
||||
|
@ -1,64 +1,66 @@
|
||||
"
|
||||
I'm run an implementation of the [Nano ID](https://github.com/ai/nanoid) tiny, secure URL-friendly unique string ID generator via its [Nim implementation](https://github.com/icyphox/nanoid.nim).
|
||||
|
||||
The Nim script has hard coded:
|
||||
|
||||
* a [base 58 encoding](https://medium.com/concerning-pharo/understanding-base58-encoding-23e673e37ff6) alphabet to avoid similar looking letter and the use of non-alphanumeric characters.
|
||||
* a 12 characters length output, which gives [a pretty low probability collision](https://zelark.github.io/nano-id-cc/) for the previous alphabet:
|
||||
~616 years needed, in order to have a 1% probability of at least one collision at a speed of 1000 IDs per hour.
|
||||
This is more than enough for our unique IDs applications, mostly in the documentation context,
|
||||
which consists of hand crafted and/or programmatically produced notes ,
|
||||
for example in data narratives, book(lets) and TiddlyWiki tiddlers of tens or hundreds of notes at most,
|
||||
unevenly produced between hours, days and/or weeks..
|
||||
|
||||
The `External` tag is related on its dependency on other programming languages and frameworks,
|
||||
though the dependency should be loaded by just loading a small binary with no dependencies.
|
||||
"
|
||||
Class {
|
||||
#name : #NanoID,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-External'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> binaryFile [
|
||||
^ MiniDocs appFolder / self scriptSourceCode basenameWithoutExtension
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> generate [
|
||||
self binaryFile exists ifFalse: [ NanoID install].
|
||||
Smalltalk os isWindows
|
||||
ifTrue: [ ^ (LibC resultOfCommand:self binaryFile fullName) copyWithoutAll: (Character lf asString) ].
|
||||
OSSUnixSubprocess new
|
||||
command: self binaryFile fullName;
|
||||
redirectStdout;
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :process :outString | ^ outString copyWithoutAll: (Character lf asString) ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> install [
|
||||
"For the moment, only Gnu/Linux and Mac are supported.
|
||||
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
|
||||
self binaryFile exists ifTrue: [ ^ MiniDocs appFolder ].
|
||||
Nimble install: 'nanoid'.
|
||||
Smalltalk os isWindows
|
||||
ifTrue: [ ^ LibC resultOfCommand: 'nanoid c ',self scriptSourceCode fullName ].
|
||||
OSSUnixSubprocess new
|
||||
command: 'nim';
|
||||
arguments: {'c'. self scriptSourceCode fullName};
|
||||
runAndWaitOnExitDo: [ :process :outString |
|
||||
(self scriptSourceCode parent / (self scriptSourceCode) basenameWithoutExtension) moveToPageTitled: MiniDocs appFolder asFileReference.
|
||||
^ MiniDocs appFolder ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> isInstalled [
|
||||
^ self binaryFile exists
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> scriptSourceCode [
|
||||
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/nanoIdGen.nim'
|
||||
]
|
||||
"
|
||||
I'm run an implementation of the [Nano ID](https://github.com/ai/nanoid) tiny, secure URL-friendly unique string ID generator via its [Nim implementation](https://github.com/icyphox/nanoid.nim).
|
||||
|
||||
The Nim script has hard coded:
|
||||
|
||||
* a [base 58 encoding](https://medium.com/concerning-pharo/understanding-base58-encoding-23e673e37ff6) alphabet to avoid similar looking letter and the use of non-alphanumeric characters.
|
||||
* a 12 characters length output, which gives [a pretty low probability collision](https://zelark.github.io/nano-id-cc/) for the previous alphabet:
|
||||
~616 years needed, in order to have a 1% probability of at least one collision at a speed of 1000 IDs per hour.
|
||||
This is more than enough for our unique IDs applications, mostly in the documentation context,
|
||||
which consists of hand crafted and/or programmatically produced notes ,
|
||||
for example in data narratives, book(lets) and TiddlyWiki tiddlers of tens or hundreds of notes at most,
|
||||
unevenly produced between hours, days and/or weeks..
|
||||
|
||||
The `External` tag is related on its dependency on other programming languages and frameworks,
|
||||
though the dependency should be loaded by just loading a small binary with no dependencies.
|
||||
"
|
||||
Class {
|
||||
#name : #NanoID,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-External'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> binaryFile [
|
||||
Smalltalk os isWindows
|
||||
ifFalse: [ ^ MiniDocs appFolder / self scriptSourceCode basenameWithoutExtension ]
|
||||
ifTrue: [ ^ ExoRepo userDataFolder / 'NanoId' / 'nanoid' ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> generate [
|
||||
self binaryFile exists ifFalse: [ NanoID install].
|
||||
Smalltalk os isWindows
|
||||
ifTrue: [ ^ (LibC resultOfCommand:self binaryFile fullName) copyWithoutAll: (Character lf asString) ].
|
||||
OSSUnixSubprocess new
|
||||
command: self binaryFile fullName;
|
||||
redirectStdout;
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :process :outString | ^ outString copyWithoutAll: (Character lf asString) ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> install [
|
||||
"For the moment, only Gnu/Linux and Mac are supported.
|
||||
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
|
||||
self binaryFile exists ifTrue: [ ^ MiniDocs appFolder ].
|
||||
Nimble install: 'nanoid'.
|
||||
Smalltalk os isWindows
|
||||
ifTrue: [ ^ LibC resultOfCommand: 'nanoid c ',self scriptSourceCode fullName ].
|
||||
OSSUnixSubprocess new
|
||||
command: 'nim';
|
||||
arguments: {'c'. self scriptSourceCode fullName};
|
||||
runAndWaitOnExitDo: [ :process :outString |
|
||||
(self scriptSourceCode parent / (self scriptSourceCode) basenameWithoutExtension) moveToPageTitled: MiniDocs appFolder asFileReference.
|
||||
^ MiniDocs appFolder ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> isInstalled [
|
||||
^ self binaryFile exists
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
NanoID class >> scriptSourceCode [
|
||||
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/nanoIdGen.nim'
|
||||
]
|
||||
|
@ -1,35 +1,35 @@
|
||||
Extension { #name : #OrderedDictionary }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> addErrata: noteString [
|
||||
self errata add: noteString
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> asLepiterSnippet [
|
||||
| response |
|
||||
self at: 'className' ifAbsent: [ ^ nil ].
|
||||
response := (self at: 'className') asClass new.
|
||||
response fromString: (self at: 'content').
|
||||
response
|
||||
uid: (LeUID new uidString: (self at: 'id'));
|
||||
parent: (self at: 'parent');
|
||||
createTime: (LeTime new time: ((self at: 'created')asDateAndTime));
|
||||
editTime: (LeTime new time: ((self at: 'modified') asDateAndTime));
|
||||
editEmail: (self at: 'modifier');
|
||||
createEmail: (self at: 'creator').
|
||||
self at: 'origin' ifPresent: [ response metadata at: 'origin' put: (self at: 'origin') ].
|
||||
self at: 'errata' ifPresent: [ response metadata at: 'errata' put: (self at: 'errata') ].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> errata [
|
||||
^ self at: 'errata' ifAbsentPut: [ OrderedCollection new]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> redefineTimestampsBefore: dateAndTime [
|
||||
self at: 'modified' put: dateAndTime asDateAndTime.
|
||||
self at: 'created' put: dateAndTime asDateAndTime - 1 second.
|
||||
]
|
||||
Extension { #name : #OrderedDictionary }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> addErrata: noteString [
|
||||
self errata add: noteString
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> asLepiterSnippet [
|
||||
| response |
|
||||
self at: 'className' ifAbsent: [ ^ nil ].
|
||||
response := (self at: 'className') asClass new.
|
||||
response fromString: (self at: 'content').
|
||||
response
|
||||
uid: (LeUID new uidString: (self at: 'id'));
|
||||
parent: (self at: 'parent');
|
||||
createTime: (LeTime new time: ((self at: 'created')asDateAndTime));
|
||||
editTime: (LeTime new time: ((self at: 'modified') asDateAndTime));
|
||||
editEmail: (self at: 'modifier');
|
||||
createEmail: (self at: 'creator').
|
||||
self at: 'origin' ifPresent: [ response metadata at: 'origin' put: (self at: 'origin') ].
|
||||
self at: 'errata' ifPresent: [ response metadata at: 'errata' put: (self at: 'errata') ].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> errata [
|
||||
^ self at: 'errata' ifAbsentPut: [ OrderedCollection new]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
OrderedDictionary >> redefineTimestampsBefore: dateAndTime [
|
||||
self at: 'modified' put: dateAndTime asDateAndTime.
|
||||
self at: 'created' put: dateAndTime asDateAndTime - 1 second.
|
||||
]
|
||||
|
@ -1,168 +1,168 @@
|
||||
"
|
||||
I model the interaction between Pandoc and Grafoscopio.
|
||||
"
|
||||
Class {
|
||||
#name : #Pandoc,
|
||||
#superclass : #Object,
|
||||
#classInstVars : [
|
||||
'executable'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
|
||||
OSSUnixSubprocess new
|
||||
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :command :outString |
|
||||
^ outString
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
Pandoc class >> downloadLuaFilters [
|
||||
self luaFilters do: [ :filter | | filterUrl |
|
||||
filterUrl := filter asUrl.
|
||||
(FileLocator temp asFileReference / (filterUrl segments last)) exists
|
||||
ifFalse: [
|
||||
ZnClient new
|
||||
url: filterUrl;
|
||||
downloadTo: FileLocator temp ] ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> executable [
|
||||
^ executable ifNil: [ self executableLocation ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> executable: aFileReference [
|
||||
executable := aFileReference
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> executableLocation [
|
||||
| location |
|
||||
location := '/usr/bin/pandoc'.
|
||||
location asFileReference exists
|
||||
ifTrue: [ ^ location ]
|
||||
ifFalse: [ self definePandocExecutable ]
|
||||
]
|
||||
|
||||
{ #category : #utility }
|
||||
Pandoc class >> extractImagesInUnixFor: aFileReference withFilter: aLuaFilter [
|
||||
"I use Pandoc Lua scripting capabilities to extract al images links in aFileReference"
|
||||
|
||||
OSSUnixSubprocess new
|
||||
command: 'pandoc';
|
||||
arguments: {aFileReference fullName . '--lua-filter=',aLuaFilter fullName };
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString :errString |
|
||||
process isSuccess
|
||||
ifTrue: [
|
||||
^ ((Soup fromString: outString) findAllTags: 'td') collect: [ :each | each next ] ]
|
||||
ifFalse: [
|
||||
"OSSUnixProcessExitStatus has a nice #printOn: "
|
||||
Transcript show: 'Command exit with error status: ', process exitStatusInterpreter printString; cr.
|
||||
Transcript show: 'Stderr contents: ', errString.
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> htmlStringToMarkdown: aString [
|
||||
|
||||
OSSUnixSubprocess new
|
||||
shellCommand: 'echo "', aString , '" | pandoc -f markdown -t html';
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :command :outString |
|
||||
^ outString
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> htmlToMarkdown: inputFile [
|
||||
|
||||
| outputFile |
|
||||
outputFile := FileLocator temp / 'body.md'.
|
||||
outputFile ensureDelete.
|
||||
outputFile ensureCreateFile.
|
||||
OSSUnixSubprocess new
|
||||
command: 'pandoc';
|
||||
arguments: {'-f'. 'html'. '-t'. 'markdown'. '--atx-headers'. inputFile fullName.
|
||||
'--output'. outputFile fullName };
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString :errString |
|
||||
process isSuccess
|
||||
ifTrue: [ ^ outputFile contents ]
|
||||
ifFalse: [ ^inputFile contents ]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
Pandoc class >> listImagesFrom: aFileReference [
|
||||
"I provide a list of all images contained in aFile."
|
||||
|
||||
| filter commandString outputString |
|
||||
filter := FileLocator temp asFileReference / 'image-links.lua'.
|
||||
filter exists
|
||||
ifFalse: [ self downloadLuaFilters ].
|
||||
commandString := 'pandoc ' , aFileReference fullName
|
||||
, ' --lua-filter=' , filter fullName.
|
||||
^ self extractImagesInUnixFor: aFileReference withFilter: filter
|
||||
]
|
||||
|
||||
{ #category : #utility }
|
||||
Pandoc class >> luaFilters [
|
||||
"I define the location of set of scripts, that allows to change the default behaviour of Pandoc
|
||||
and/or the processing of supported markup languages.
|
||||
|
||||
For more information about Lua filters see:
|
||||
|
||||
https://pandoc.org/lua-filters.html
|
||||
"
|
||||
|
||||
| filters |
|
||||
filters := OrderedCollection new.
|
||||
filters
|
||||
add: 'http://mutabit.com/repos.fossil/dataweek/doc/tip/Artefactos/Scripts/image-links.lua'.
|
||||
^ filters
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> markdownToHtml: inputFile [
|
||||
|
||||
(Smalltalk os isUnix or: [ Smalltalk os isMacOS ]) ifTrue: [ ^ self markdownToHtmlOnUnix: inputFile ].
|
||||
Smalltalk os isWindows ifTrue: [ ^ self markdownToHtmlOnWindows: inputFile ].
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> markdownToHtmlOnUnix: inputFile [
|
||||
|
||||
| outputFile |
|
||||
|
||||
outputFile := inputFile parent / (inputFile basenameWithoutExtension , '.html').
|
||||
outputFile ensureDelete.
|
||||
outputFile ensureCreateFile.
|
||||
OSSUnixSubprocess new
|
||||
command: 'pandoc';
|
||||
arguments: {'-f'. 'markdown+startnum+task_lists'. '--standalone'. '-t'. 'html'. inputFile fullName.
|
||||
'--output'. outputFile fullName };
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString :errString |
|
||||
process isSuccess
|
||||
ifTrue: [ ^ outputFile ]
|
||||
ifFalse: [ ^ inputFile ]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> markdownToHtmlOnWindows: inputFile [
|
||||
|
||||
"ToDo: This command still doesn't receive any arguments."
|
||||
^ (LibC resultOfCommand: 'pandoc ', inputFile fullName) correctAccentedCharacters.
|
||||
]
|
||||
"
|
||||
I model the interaction between Pandoc and Grafoscopio.
|
||||
"
|
||||
Class {
|
||||
#name : #Pandoc,
|
||||
#superclass : #Object,
|
||||
#classInstVars : [
|
||||
'executable'
|
||||
],
|
||||
#category : #'MiniDocs-Core'
|
||||
}
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
|
||||
OSSUnixSubprocess new
|
||||
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :command :outString |
|
||||
^ outString
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
Pandoc class >> downloadLuaFilters [
|
||||
self luaFilters do: [ :filter | | filterUrl |
|
||||
filterUrl := filter asUrl.
|
||||
(FileLocator temp asFileReference / (filterUrl segments last)) exists
|
||||
ifFalse: [
|
||||
ZnClient new
|
||||
url: filterUrl;
|
||||
downloadTo: FileLocator temp ] ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> executable [
|
||||
^ executable ifNil: [ self executableLocation ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> executable: aFileReference [
|
||||
executable := aFileReference
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> executableLocation [
|
||||
| location |
|
||||
location := '/usr/bin/pandoc'.
|
||||
location asFileReference exists
|
||||
ifTrue: [ ^ location ]
|
||||
ifFalse: [ self definePandocExecutable ]
|
||||
]
|
||||
|
||||
{ #category : #utility }
|
||||
Pandoc class >> extractImagesInUnixFor: aFileReference withFilter: aLuaFilter [
|
||||
"I use Pandoc Lua scripting capabilities to extract al images links in aFileReference"
|
||||
|
||||
OSSUnixSubprocess new
|
||||
command: 'pandoc';
|
||||
arguments: {aFileReference fullName . '--lua-filter=',aLuaFilter fullName };
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString :errString |
|
||||
process isSuccess
|
||||
ifTrue: [
|
||||
^ ((Soup fromString: outString) findAllTags: 'td') collect: [ :each | each next ] ]
|
||||
ifFalse: [
|
||||
"OSSUnixProcessExitStatus has a nice #printOn: "
|
||||
Transcript show: 'Command exit with error status: ', process exitStatusInterpreter printString; cr.
|
||||
Transcript show: 'Stderr contents: ', errString.
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
Pandoc class >> htmlStringToMarkdown: aString [
|
||||
|
||||
OSSUnixSubprocess new
|
||||
shellCommand: 'echo "', aString , '" | pandoc -f markdown -t html';
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :command :outString |
|
||||
^ outString
|
||||
].
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> htmlToMarkdown: inputFile [
|
||||
|
||||
| outputFile |
|
||||
outputFile := FileLocator temp / 'body.md'.
|
||||
outputFile ensureDelete.
|
||||
outputFile ensureCreateFile.
|
||||
OSSUnixSubprocess new
|
||||
command: 'pandoc';
|
||||
arguments: {'-f'. 'html'. '-t'. 'markdown'. '--atx-headers'. inputFile fullName.
|
||||
'--output'. outputFile fullName };
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString :errString |
|
||||
process isSuccess
|
||||
ifTrue: [ ^ outputFile contents ]
|
||||
ifFalse: [ ^inputFile contents ]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
Pandoc class >> listImagesFrom: aFileReference [
|
||||
"I provide a list of all images contained in aFile."
|
||||
|
||||
| filter commandString outputString |
|
||||
filter := FileLocator temp asFileReference / 'image-links.lua'.
|
||||
filter exists
|
||||
ifFalse: [ self downloadLuaFilters ].
|
||||
commandString := 'pandoc ' , aFileReference fullName
|
||||
, ' --lua-filter=' , filter fullName.
|
||||
^ self extractImagesInUnixFor: aFileReference withFilter: filter
|
||||
]
|
||||
|
||||
{ #category : #utility }
|
||||
Pandoc class >> luaFilters [
|
||||
"I define the location of set of scripts, that allows to change the default behaviour of Pandoc
|
||||
and/or the processing of supported markup languages.
|
||||
|
||||
For more information about Lua filters see:
|
||||
|
||||
https://pandoc.org/lua-filters.html
|
||||
"
|
||||
|
||||
| filters |
|
||||
filters := OrderedCollection new.
|
||||
filters
|
||||
add: 'http://mutabit.com/repos.fossil/dataweek/doc/tip/Artefactos/Scripts/image-links.lua'.
|
||||
^ filters
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> markdownToHtml: inputFile [
|
||||
|
||||
(Smalltalk os isUnix or: [ Smalltalk os isMacOS ]) ifTrue: [ ^ self markdownToHtmlOnUnix: inputFile ].
|
||||
Smalltalk os isWindows ifTrue: [ ^ self markdownToHtmlOnWindows: inputFile ].
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> markdownToHtmlOnUnix: inputFile [
|
||||
|
||||
| outputFile |
|
||||
|
||||
outputFile := inputFile parent / (inputFile basenameWithoutExtension , '.html').
|
||||
outputFile ensureDelete.
|
||||
outputFile ensureCreateFile.
|
||||
OSSUnixSubprocess new
|
||||
command: 'pandoc';
|
||||
arguments: {'-f'. 'markdown+startnum+task_lists'. '--standalone'. '-t'. 'html'. inputFile fullName.
|
||||
'--output'. outputFile fullName };
|
||||
redirectStdout;
|
||||
redirectStderr;
|
||||
runAndWaitOnExitDo: [ :process :outString :errString |
|
||||
process isSuccess
|
||||
ifTrue: [ ^ outputFile ]
|
||||
ifFalse: [ ^ inputFile ]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #converters }
|
||||
Pandoc class >> markdownToHtmlOnWindows: inputFile [
|
||||
|
||||
"ToDo: This command still doesn't receive any arguments."
|
||||
^ (LibC resultOfCommand: 'pandoc ', inputFile fullName) correctAccentedCharacters.
|
||||
]
|
||||
|
@ -1,11 +1,11 @@
|
||||
Extension { #name : #Pandoc }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
|
||||
OSSUnixSubprocess new
|
||||
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :command :outString |
|
||||
^ outString
|
||||
].
|
||||
]
|
||||
Extension { #name : #Pandoc }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
|
||||
OSSUnixSubprocess new
|
||||
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :command :outString |
|
||||
^ outString
|
||||
].
|
||||
]
|
||||
|
@ -1,148 +1,148 @@
|
||||
Class {
|
||||
#name : #PubPubContent,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'title',
|
||||
'language',
|
||||
'url',
|
||||
'thumbnail',
|
||||
'work',
|
||||
'contents'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent class >> fromXML: anXMLElement [
|
||||
^ self new fromXML: anXMLElement
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> asMarkdeepFrontPageElement [
|
||||
| response anchorName anchorLink markdeepFile |
|
||||
response := '' writeStream.
|
||||
anchorName := '[', self title,']'.
|
||||
markdeepFile := './book/', self shortName,'--',self id,'.md.html'.
|
||||
anchorLink := '(', markdeepFile,')'.
|
||||
response
|
||||
nextPutAll: '<big>', anchorName, anchorLink,'</big><br><br>';
|
||||
nextPutAll: String lf.
|
||||
self thumbnail ifNotNil: [ |image|
|
||||
image := '
|
||||
<img
|
||||
src=', self thumbnail,
|
||||
' width="55%"
|
||||
style="width: 400px; height: 220px; object-fit: cover;"
|
||||
/>'.
|
||||
response nextPutAll: '<a href="',markdeepFile,'">', image, '</a>'
|
||||
].
|
||||
response
|
||||
nextPutAll: String lf, String lf.
|
||||
^ response contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> contents: anObject [
|
||||
contents := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> fileName [
|
||||
^ self shortName,'--', self id, '.md'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> fromXML: aXMLElement [
|
||||
| image anchor|
|
||||
image := aXMLElement contentNodes first xpath: './a/div'.
|
||||
image
|
||||
ifNotEmpty: [|style rawUrl|
|
||||
style := (image first attributeAt: 'style').
|
||||
rawUrl := (style splitOn: 'url') second.
|
||||
self
|
||||
thumbnail:(rawUrl copyFrom: 3 to: rawUrl size - 2)
|
||||
].
|
||||
anchor := (aXMLElement contentNodes second contentNodes first xpath: './div[@class="title-wrapper"]/a') first.
|
||||
self
|
||||
title: (anchor attributeAt: 'title');
|
||||
url: (anchor attributeAt: 'href').
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> id [
|
||||
^ (self url splitOn: $/) last
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PubPubContent >> language: aString [
|
||||
language := aString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> next [
|
||||
^ self nextInstance
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> previous [
|
||||
| index |
|
||||
index := self work tableOfContents detectIndex: [:pubContent | pubContent = self ] ifNone: [ ^ nil ].
|
||||
^ self work tableOfContents at: index - 1.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> printOn: aStream [
|
||||
super printOn: aStream.
|
||||
aStream
|
||||
nextPutAll: '( ', self title,' | ', self id, ' )'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> shortName [
|
||||
| sanitized |
|
||||
sanitized := (self title splitOn: $:) first.
|
||||
sanitized := sanitized copyReplaceAll: '’' with: ''.
|
||||
sanitized := sanitized asCamelCase.
|
||||
sanitized at: 1 put: sanitized first asLowercase.
|
||||
^ sanitized
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> thumbnail [
|
||||
^ thumbnail
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> thumbnail: anURL [
|
||||
thumbnail := anURL
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> title [
|
||||
^ title
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> title: anObject [
|
||||
title := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> url [
|
||||
^url
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> url: anObject [
|
||||
url := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> work [
|
||||
^ work
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> work: aPubPubWork [
|
||||
work := aPubPubWork
|
||||
]
|
||||
Class {
|
||||
#name : #PubPubContent,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'title',
|
||||
'language',
|
||||
'url',
|
||||
'thumbnail',
|
||||
'work',
|
||||
'contents'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent class >> fromXML: anXMLElement [
|
||||
^ self new fromXML: anXMLElement
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> asMarkdeepFrontPageElement [
|
||||
| response anchorName anchorLink markdeepFile |
|
||||
response := '' writeStream.
|
||||
anchorName := '[', self title,']'.
|
||||
markdeepFile := './book/', self shortName,'--',self id,'.md.html'.
|
||||
anchorLink := '(', markdeepFile,')'.
|
||||
response
|
||||
nextPutAll: '<big>', anchorName, anchorLink,'</big><br><br>';
|
||||
nextPutAll: String lf.
|
||||
self thumbnail ifNotNil: [ |image|
|
||||
image := '
|
||||
<img
|
||||
src=', self thumbnail,
|
||||
' width="55%"
|
||||
style="width: 400px; height: 220px; object-fit: cover;"
|
||||
/>'.
|
||||
response nextPutAll: '<a href="',markdeepFile,'">', image, '</a>'
|
||||
].
|
||||
response
|
||||
nextPutAll: String lf, String lf.
|
||||
^ response contents
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> contents: anObject [
|
||||
contents := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> fileName [
|
||||
^ self shortName,'--', self id, '.md'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> fromXML: aXMLElement [
|
||||
| image anchor|
|
||||
image := aXMLElement contentNodes first xpath: './a/div'.
|
||||
image
|
||||
ifNotEmpty: [|style rawUrl|
|
||||
style := (image first attributeAt: 'style').
|
||||
rawUrl := (style splitOn: 'url') second.
|
||||
self
|
||||
thumbnail:(rawUrl copyFrom: 3 to: rawUrl size - 2)
|
||||
].
|
||||
anchor := (aXMLElement contentNodes second contentNodes first xpath: './div[@class="title-wrapper"]/a') first.
|
||||
self
|
||||
title: (anchor attributeAt: 'title');
|
||||
url: (anchor attributeAt: 'href').
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> id [
|
||||
^ (self url splitOn: $/) last
|
||||
]
|
||||
|
||||
{ #category : #'as yet unclassified' }
|
||||
PubPubContent >> language: aString [
|
||||
language := aString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> next [
|
||||
^ self nextInstance
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> previous [
|
||||
| index |
|
||||
index := self work tableOfContents detectIndex: [:pubContent | pubContent = self ] ifNone: [ ^ nil ].
|
||||
^ self work tableOfContents at: index - 1.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> printOn: aStream [
|
||||
super printOn: aStream.
|
||||
aStream
|
||||
nextPutAll: '( ', self title,' | ', self id, ' )'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> shortName [
|
||||
| sanitized |
|
||||
sanitized := (self title splitOn: $:) first.
|
||||
sanitized := sanitized copyReplaceAll: '’' with: ''.
|
||||
sanitized := sanitized asCamelCase.
|
||||
sanitized at: 1 put: sanitized first asLowercase.
|
||||
^ sanitized
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> thumbnail [
|
||||
^ thumbnail
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> thumbnail: anURL [
|
||||
thumbnail := anURL
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> title [
|
||||
^ title
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> title: anObject [
|
||||
title := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> url [
|
||||
^url
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> url: anObject [
|
||||
url := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> work [
|
||||
^ work
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubContent >> work: aPubPubWork [
|
||||
work := aPubPubWork
|
||||
]
|
||||
|
@ -1,75 +1,75 @@
|
||||
Class {
|
||||
#name : #PubPubGrammar,
|
||||
#superclass : #PP2CompositeNode,
|
||||
#instVars : [
|
||||
'document',
|
||||
'link',
|
||||
'linkLabel',
|
||||
'linkContent',
|
||||
'imageLinkLabel',
|
||||
'imageLinkContent',
|
||||
'alternativeImages',
|
||||
'imageLink'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> alternativeImages [
|
||||
^ self linkContent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> document [
|
||||
^ (link / imageLink ) islandInSea star
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> imageLink [
|
||||
^ imageLinkLabel, imageLinkContent, alternativeImages
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> imageLinkContent [
|
||||
^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> imageLinkLabel [
|
||||
|
||||
| label |
|
||||
label := ("$] asPParser not /" #any asPParser) starLazy flatten.
|
||||
^ '![' asPParser, label, ']' asPParser ==> #second.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> imageLinkSea [
|
||||
^ imageLink sea ==> #second
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> link [
|
||||
^ linkLabel, linkContent
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> linkContent [
|
||||
^ '{' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second.
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> linkLabel [
|
||||
| label |
|
||||
label := ("$] asPParser not /" #any asPParser) starLazy flatten.
|
||||
^ $[ asPParser, label, $] asPParser ==> #second.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> linkSea [
|
||||
^ link sea ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> start [
|
||||
^ document
|
||||
]
|
||||
Class {
|
||||
#name : #PubPubGrammar,
|
||||
#superclass : #PP2CompositeNode,
|
||||
#instVars : [
|
||||
'document',
|
||||
'link',
|
||||
'linkLabel',
|
||||
'linkContent',
|
||||
'imageLinkLabel',
|
||||
'imageLinkContent',
|
||||
'alternativeImages',
|
||||
'imageLink'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> alternativeImages [
|
||||
^ self linkContent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> document [
|
||||
^ (link / imageLink ) islandInSea star
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> imageLink [
|
||||
^ imageLinkLabel, imageLinkContent, alternativeImages
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> imageLinkContent [
|
||||
^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> imageLinkLabel [
|
||||
|
||||
| label |
|
||||
label := ("$] asPParser not /" #any asPParser) starLazy flatten.
|
||||
^ '![' asPParser, label, ']' asPParser ==> #second.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> imageLinkSea [
|
||||
^ imageLink sea ==> #second
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> link [
|
||||
^ linkLabel, linkContent
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> linkContent [
|
||||
^ '{' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second.
|
||||
]
|
||||
|
||||
{ #category : #links }
|
||||
PubPubGrammar >> linkLabel [
|
||||
| label |
|
||||
label := ("$] asPParser not /" #any asPParser) starLazy flatten.
|
||||
^ $[ asPParser, label, $] asPParser ==> #second.
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> linkSea [
|
||||
^ link sea ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar >> start [
|
||||
^ document
|
||||
]
|
||||
|
@ -1,65 +1,65 @@
|
||||
Class {
|
||||
#name : #PubPubGrammar2,
|
||||
#superclass : #PP2CompositeNode,
|
||||
#instVars : [
|
||||
'imageLabel',
|
||||
'imageLink',
|
||||
'imagesArray',
|
||||
'imageLocation',
|
||||
'document',
|
||||
'footnote',
|
||||
'footnoteLabel',
|
||||
'footnoteContent'
|
||||
],
|
||||
#category : #MiniDocs
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> document [
|
||||
^ (imageLink / footnote) islandInSea star
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> footnote [
|
||||
^ footnoteLabel, footnoteContent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> footnoteContent [
|
||||
^ '{#' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> footnoteLabel [
|
||||
^ '[' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imageLabel [
|
||||
^ '![' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imageLink [
|
||||
^ imageLabel, imageLocation, imagesArray
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imageLocation [
|
||||
^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imagesArray [
|
||||
^ '{srcset=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imagesContent [
|
||||
^ '{src=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> start [
|
||||
^ document
|
||||
]
|
||||
Class {
|
||||
#name : #PubPubGrammar2,
|
||||
#superclass : #PP2CompositeNode,
|
||||
#instVars : [
|
||||
'imageLabel',
|
||||
'imageLink',
|
||||
'imagesArray',
|
||||
'imageLocation',
|
||||
'document',
|
||||
'footnote',
|
||||
'footnoteLabel',
|
||||
'footnoteContent'
|
||||
],
|
||||
#category : #MiniDocs
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> document [
|
||||
^ (imageLink / footnote) islandInSea star
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> footnote [
|
||||
^ footnoteLabel, footnoteContent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> footnoteContent [
|
||||
^ '{#' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> footnoteLabel [
|
||||
^ '[' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imageLabel [
|
||||
^ '![' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imageLink [
|
||||
^ imageLabel, imageLocation, imagesArray
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imageLocation [
|
||||
^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imagesArray [
|
||||
^ '{srcset=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> imagesContent [
|
||||
^ '{src=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammar2 >> start [
|
||||
^ document
|
||||
]
|
||||
|
@ -1,59 +1,59 @@
|
||||
Class {
|
||||
#name : #PubPubGrammarTest,
|
||||
#superclass : #PP2CompositeNodeTest,
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> parserClass [
|
||||
^ PubPubGrammar
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testComposedImageLink [
|
||||
self
|
||||
parse: '![This is an image label with sublinks (bla bl)[blog]](this/is/an/image/link){this are alternate image sizes}'
|
||||
rule: #imageLink
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testImageLabel: label [
|
||||
self
|
||||
parse: label
|
||||
rule: #imageLinkLabel
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testImageLink [
|
||||
self
|
||||
parse: '![This is an image label](this/is/an/image/link){this are alternate image sizes}'
|
||||
rule: #imageLink
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testLabel: label [
|
||||
self
|
||||
parse: label
|
||||
rule: #linkLabel
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testLink [
|
||||
self
|
||||
parse: '[This is a label]{this/is/a/link}'
|
||||
rule: #link
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testNestedLabel [
|
||||
self
|
||||
parse: '[This is a label with [sublabels]]'
|
||||
rule: #linkLabel
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testSimpleLabel [
|
||||
self
|
||||
parse: '[This is a label]'
|
||||
rule: #linkLabel
|
||||
]
|
||||
Class {
|
||||
#name : #PubPubGrammarTest,
|
||||
#superclass : #PP2CompositeNodeTest,
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> parserClass [
|
||||
^ PubPubGrammar
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testComposedImageLink [
|
||||
self
|
||||
parse: '![This is an image label with sublinks (bla bl)[blog]](this/is/an/image/link){this are alternate image sizes}'
|
||||
rule: #imageLink
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testImageLabel: label [
|
||||
self
|
||||
parse: label
|
||||
rule: #imageLinkLabel
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testImageLink [
|
||||
self
|
||||
parse: '![This is an image label](this/is/an/image/link){this are alternate image sizes}'
|
||||
rule: #imageLink
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testLabel: label [
|
||||
self
|
||||
parse: label
|
||||
rule: #linkLabel
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testLink [
|
||||
self
|
||||
parse: '[This is a label]{this/is/a/link}'
|
||||
rule: #link
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testNestedLabel [
|
||||
self
|
||||
parse: '[This is a label with [sublabels]]'
|
||||
rule: #linkLabel
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubGrammarTest >> testSimpleLabel [
|
||||
self
|
||||
parse: '[This is a label]'
|
||||
rule: #linkLabel
|
||||
]
|
||||
|
@ -1,240 +1,240 @@
|
||||
Class {
|
||||
#name : #PubPubWork,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'address',
|
||||
'tableOfContents',
|
||||
'titles',
|
||||
'folder',
|
||||
'currentLanguage',
|
||||
'languages'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> addTableOfContents: anOrderedDictionary [
|
||||
self tableOfContents
|
||||
at: (self currentLanguage) put: anOrderedDictionary;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> addTitle: aString [
|
||||
self titles
|
||||
at: (self currentLanguage) put: aString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> address [
|
||||
^ address
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> address: anUrl [
|
||||
address := anUrl
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> bookishFolder [
|
||||
^ { 'en' -> 'book'.
|
||||
'es' -> 'libro'} asDictionary
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> currentLanguage [
|
||||
^ currentLanguage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> currentLanguage: twoLettersInISO639_1 [
|
||||
currentLanguage := twoLettersInISO639_1
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> defaultOptions [
|
||||
^ { 'sourceCodeLink' -> true .
|
||||
'commentsProvider' -> 'Hypothesis' } asDictionary
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> defaultTitle [
|
||||
^ self titles associations first value
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> downloadContents [
|
||||
| workingDirectory |
|
||||
workingDirectory := self workingDirectory.
|
||||
self tableOfContentsDictionary
|
||||
keysAndValuesDo: [ :name :chapterAddress |
|
||||
| currentFileName |
|
||||
currentFileName := name , '--' , chapterAddress , '.md'.
|
||||
(workingDirectory / currentFileName) asFileReference ensureDelete.
|
||||
(workingDirectory / 'markdown') asFileReference ensureDelete.
|
||||
ZnClient new
|
||||
get: self address , 'pub/' , chapterAddress , '/download/markdown';
|
||||
downloadTo: workingDirectory.
|
||||
workingDirectory / 'markdown' renameTo: currentFileName ].
|
||||
^ workingDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> downloadContents2 [
|
||||
| workingDirectory |
|
||||
workingDirectory := self folder / self currentLanguage / 'book'.
|
||||
self tableOfContentsDictionary keysAndValuesDo: [ :name :chapterAddress | |currentFileName|
|
||||
currentFileName := name, '--', chapterAddress, '.md'.
|
||||
(workingDirectory / currentFileName) asFileReference ensureDelete.
|
||||
(workingDirectory / 'markdown') asFileReference ensureDelete.
|
||||
ZnClient new
|
||||
get: self address, 'pub/', chapterAddress, '/download/markdown';
|
||||
downloadTo: workingDirectory .
|
||||
workingDirectory / 'markdown' renameTo: currentFileName
|
||||
].
|
||||
^ workingDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> exportToHTML [
|
||||
self markdownFiles
|
||||
do: [ :file | | doc |
|
||||
doc := Markdown new fromFile: file.
|
||||
doc exportAsHTML ].
|
||||
^ self markdownFiles first parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> exportToMarkdeep [
|
||||
| markdeepDocs |
|
||||
|
||||
markdeepDocs := self markdownFiles
|
||||
collect: [ :file | Markdeep fromMarkdownFile: file ].
|
||||
markdeepDocs do: [ :each | each fromPubPubToMarkdeep exportAsFile ].
|
||||
^ self languageFolder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> extractAllContentsRaw [
|
||||
^ self frontPage xpath: '//div[@class="layout-pubs-block"]'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> extractRawTableOfContents [
|
||||
^ self extractAllContentsRaw first xpath: '//div[contains(concat(" ",normalize-space(@class)," "), " pub-preview-component ")]'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> folder [
|
||||
^ folder ensureCreateDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> folder: localDirectory [
|
||||
folder := localDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> frontPage [
|
||||
"This should scrap contents of the book's front-page and translate them into Markdeep,
|
||||
according to our templates."
|
||||
^ (XMLHTMLParser on: (self address asUrl retrieveContents)) parseDocument
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> languageFolder [
|
||||
^ self folder / self currentLanguage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> markdeepFrontPage [
|
||||
| frontPage markdeepIndex |
|
||||
frontPage := Markdeep new.
|
||||
frontPage
|
||||
title: self defaultTitle;
|
||||
file: self languageFolder / 'frontPage.md.html'.
|
||||
markdeepIndex := '' writeStream.
|
||||
self tableOfContents do: [:pubPubContent|
|
||||
markdeepIndex
|
||||
nextPutAll: pubPubContent asMarkdeepFrontPageElement
|
||||
].
|
||||
frontPage body: markdeepIndex contents.
|
||||
^ frontPage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> markdownFiles [
|
||||
^ self languageFolder allChildren
|
||||
select: [ :file | file basename endsWith: '.md' ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> populateContents [
|
||||
self tableOfContents isEmptyOrNil
|
||||
ifTrue: [ self populateTableOfContents ].
|
||||
self workingDirectory children ifEmpty: [self downloadContents].
|
||||
self tableOfContents do: [:pubPubContent | | contentFile|
|
||||
contentFile := self workingDirectory / pubPubContent fileName.
|
||||
contentFile exists
|
||||
ifTrue: [ pubPubContent contents: (Markdown new fromFile: contentFile) ]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> populateTableOfContents [
|
||||
| contentsCollection |
|
||||
contentsCollection := self extractRawTableOfContents collect: [:each |
|
||||
(PubPubContent fromXML: each)
|
||||
language: self currentLanguage;
|
||||
work: self
|
||||
].
|
||||
self addTableOfContents: contentsCollection asOrderedCollection
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> printOn: aStream [
|
||||
super printOn: aStream.
|
||||
aStream
|
||||
nextPutAll: '(',self defaultTitle, ' | ', self address, ' )'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> tableOfContents [
|
||||
tableOfContents ifNil: [ ^ tableOfContents := Dictionary new].
|
||||
^ tableOfContents at: self currentLanguage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> tableOfContents: anObject [
|
||||
tableOfContents := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> tableOfContentsDictionary [
|
||||
| response |
|
||||
response := OrderedDictionary new.
|
||||
self tableOfContents do: [:content |
|
||||
response
|
||||
at: content shortName put: content id
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> titles [
|
||||
^ titles ifNil: [titles := OrderedDictionary new]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> viewContentsFor: aView [
|
||||
<gtView>
|
||||
^ aView list
|
||||
title: 'Contents';
|
||||
priority: 10;
|
||||
items: [ self tableOfContents ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> workingDirectory [
|
||||
^ self folder / self currentLanguage / (self bookishFolder at: self currentLanguage)
|
||||
]
|
||||
Class {
|
||||
#name : #PubPubWork,
|
||||
#superclass : #Object,
|
||||
#instVars : [
|
||||
'address',
|
||||
'tableOfContents',
|
||||
'titles',
|
||||
'folder',
|
||||
'currentLanguage',
|
||||
'languages'
|
||||
],
|
||||
#category : #'MiniDocs-Model'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> addTableOfContents: anOrderedDictionary [
|
||||
self tableOfContents
|
||||
at: (self currentLanguage) put: anOrderedDictionary;
|
||||
yourself
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> addTitle: aString [
|
||||
self titles
|
||||
at: (self currentLanguage) put: aString
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> address [
|
||||
^ address
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> address: anUrl [
|
||||
address := anUrl
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> bookishFolder [
|
||||
^ { 'en' -> 'book'.
|
||||
'es' -> 'libro'} asDictionary
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> currentLanguage [
|
||||
^ currentLanguage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> currentLanguage: twoLettersInISO639_1 [
|
||||
currentLanguage := twoLettersInISO639_1
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> defaultOptions [
|
||||
^ { 'sourceCodeLink' -> true .
|
||||
'commentsProvider' -> 'Hypothesis' } asDictionary
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> defaultTitle [
|
||||
^ self titles associations first value
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> downloadContents [
|
||||
| workingDirectory |
|
||||
workingDirectory := self workingDirectory.
|
||||
self tableOfContentsDictionary
|
||||
keysAndValuesDo: [ :name :chapterAddress |
|
||||
| currentFileName |
|
||||
currentFileName := name , '--' , chapterAddress , '.md'.
|
||||
(workingDirectory / currentFileName) asFileReference ensureDelete.
|
||||
(workingDirectory / 'markdown') asFileReference ensureDelete.
|
||||
ZnClient new
|
||||
get: self address , 'pub/' , chapterAddress , '/download/markdown';
|
||||
downloadTo: workingDirectory.
|
||||
workingDirectory / 'markdown' renameTo: currentFileName ].
|
||||
^ workingDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> downloadContents2 [
|
||||
| workingDirectory |
|
||||
workingDirectory := self folder / self currentLanguage / 'book'.
|
||||
self tableOfContentsDictionary keysAndValuesDo: [ :name :chapterAddress | |currentFileName|
|
||||
currentFileName := name, '--', chapterAddress, '.md'.
|
||||
(workingDirectory / currentFileName) asFileReference ensureDelete.
|
||||
(workingDirectory / 'markdown') asFileReference ensureDelete.
|
||||
ZnClient new
|
||||
get: self address, 'pub/', chapterAddress, '/download/markdown';
|
||||
downloadTo: workingDirectory .
|
||||
workingDirectory / 'markdown' renameTo: currentFileName
|
||||
].
|
||||
^ workingDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> exportToHTML [
|
||||
self markdownFiles
|
||||
do: [ :file | | doc |
|
||||
doc := Markdown new fromFile: file.
|
||||
doc exportAsHTML ].
|
||||
^ self markdownFiles first parent
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> exportToMarkdeep [
|
||||
| markdeepDocs |
|
||||
|
||||
markdeepDocs := self markdownFiles
|
||||
collect: [ :file | Markdeep fromMarkdownFile: file ].
|
||||
markdeepDocs do: [ :each | each fromPubPubToMarkdeep exportAsFile ].
|
||||
^ self languageFolder
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> extractAllContentsRaw [
|
||||
^ self frontPage xpath: '//div[@class="layout-pubs-block"]'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> extractRawTableOfContents [
|
||||
^ self extractAllContentsRaw first xpath: '//div[contains(concat(" ",normalize-space(@class)," "), " pub-preview-component ")]'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> folder [
|
||||
^ folder ensureCreateDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> folder: localDirectory [
|
||||
folder := localDirectory
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> frontPage [
|
||||
"This should scrap contents of the book's front-page and translate them into Markdeep,
|
||||
according to our templates."
|
||||
^ (XMLHTMLParser on: (self address asUrl retrieveContents)) parseDocument
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> languageFolder [
|
||||
^ self folder / self currentLanguage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> markdeepFrontPage [
|
||||
| frontPage markdeepIndex |
|
||||
frontPage := Markdeep new.
|
||||
frontPage
|
||||
title: self defaultTitle;
|
||||
file: self languageFolder / 'frontPage.md.html'.
|
||||
markdeepIndex := '' writeStream.
|
||||
self tableOfContents do: [:pubPubContent|
|
||||
markdeepIndex
|
||||
nextPutAll: pubPubContent asMarkdeepFrontPageElement
|
||||
].
|
||||
frontPage body: markdeepIndex contents.
|
||||
^ frontPage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> markdownFiles [
|
||||
^ self languageFolder allChildren
|
||||
select: [ :file | file basename endsWith: '.md' ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> populateContents [
|
||||
self tableOfContents isEmptyOrNil
|
||||
ifTrue: [ self populateTableOfContents ].
|
||||
self workingDirectory children ifEmpty: [self downloadContents].
|
||||
self tableOfContents do: [:pubPubContent | | contentFile|
|
||||
contentFile := self workingDirectory / pubPubContent fileName.
|
||||
contentFile exists
|
||||
ifTrue: [ pubPubContent contents: (Markdown new fromFile: contentFile) ]
|
||||
]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> populateTableOfContents [
|
||||
| contentsCollection |
|
||||
contentsCollection := self extractRawTableOfContents collect: [:each |
|
||||
(PubPubContent fromXML: each)
|
||||
language: self currentLanguage;
|
||||
work: self
|
||||
].
|
||||
self addTableOfContents: contentsCollection asOrderedCollection
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> printOn: aStream [
|
||||
super printOn: aStream.
|
||||
aStream
|
||||
nextPutAll: '(',self defaultTitle, ' | ', self address, ' )'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> tableOfContents [
|
||||
tableOfContents ifNil: [ ^ tableOfContents := Dictionary new].
|
||||
^ tableOfContents at: self currentLanguage
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> tableOfContents: anObject [
|
||||
tableOfContents := anObject
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> tableOfContentsDictionary [
|
||||
| response |
|
||||
response := OrderedDictionary new.
|
||||
self tableOfContents do: [:content |
|
||||
response
|
||||
at: content shortName put: content id
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> titles [
|
||||
^ titles ifNil: [titles := OrderedDictionary new]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> viewContentsFor: aView [
|
||||
<gtView>
|
||||
^ aView list
|
||||
title: 'Contents';
|
||||
priority: 10;
|
||||
items: [ self tableOfContents ]
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
PubPubWork >> workingDirectory [
|
||||
^ self folder / self currentLanguage / (self bookishFolder at: self currentLanguage)
|
||||
]
|
||||
|
@ -1,162 +1,162 @@
|
||||
Extension { #name : #String }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> accentedCharactersCorrection [
|
||||
| modified corrections |
|
||||
corrections := {
|
||||
'ó' -> 'ó' . 'ú' -> 'ú' . 'ñ' -> 'ñ' .
|
||||
'Ã' -> 'í' . 'á' -> 'á' . 'é' -> 'é' . 'â' -> $' asString} asDictionary.
|
||||
modified := self copy.
|
||||
corrections keysAndValuesDo: [ :k :v |
|
||||
modified := modified copyReplaceAll: k with: v
|
||||
].
|
||||
^ modified
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> asDashedLowercase [
|
||||
"I convert phrases like 'This is a phrase' into 'this-is-a-phrase'."
|
||||
|
||||
^ '-' join: (self substrings collect: [:each | each asLowercase ])
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> asInteger [
|
||||
"Return the integer present in the receiver, or nil. In case of float, returns the integer part."
|
||||
"'1' asInteger >>> 1"
|
||||
"'-1' asInteger >>> -1"
|
||||
"'10' asInteger >>> 10"
|
||||
"'a' asInteger >>> nil"
|
||||
"'1.234' asInteger >>> 1"
|
||||
^ (self copyWithoutAll: '_') asSignedInteger
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> contentsWithoutYAMLMetadata [
|
||||
| newContents |
|
||||
self detectYAMLMetadata ifFalse: [ ^ self ].
|
||||
newContents := '' writeStream.
|
||||
(self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line |
|
||||
newContents nextPutAll: line; cr ].
|
||||
^ newContents contents.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> deleteYAMLMetadata [
|
||||
| newContents |
|
||||
self detectYAMLMetadata ifFalse: [ ^ self ].
|
||||
newContents := '' writeStream.
|
||||
(self lines copyFrom: self yamlMetadataClosingLineNumber + 1 to: self lines size) do: [ :line |
|
||||
newContents nextPutAll: line; lf;lf ].
|
||||
^ newContents contents.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> demoteMarkdownHeaders [
|
||||
| response |
|
||||
response := self contents lines.
|
||||
self markdownHeaders associations allButFirstDo: [ :assoc |
|
||||
response at: assoc key put: '#', assoc value ].
|
||||
^ response asStringWithCr withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> detectYAMLMetadata [
|
||||
| lines |
|
||||
lines := self lines.
|
||||
^ self startsWithYAMLMetadataDelimiter
|
||||
and: [ lines allButFirst
|
||||
detect: [ :currentLine | currentLine beginsWith: self class yamlMetadataDelimiter ]
|
||||
ifFound: [ ^ true ] ifNone: [ ^ false ] ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> markdownHeaders [
|
||||
| response headers |
|
||||
headers := (LeTextSnippet string: self contents) ast // #LeHeaderNode collect: [ :each | each headerFullName asString ].
|
||||
response := OrderedDictionary new.
|
||||
self lines doWithIndex: [:line :index |
|
||||
(line beginsWithAnyOf: headers)
|
||||
ifTrue: [ response at: index put: line ]
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> promoteMarkdownHeaders [
|
||||
| response |
|
||||
response := self contents lines.
|
||||
self markdownHeaders associationsDo: [ :assoc |
|
||||
response at: assoc key put: assoc value allButFirst ].
|
||||
^ response asStringWithCr withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> romanizeAccents [
|
||||
| modified corrections |
|
||||
corrections := {
|
||||
'ó' -> 'o' . 'ú' -> 'u' . 'ñ' -> 'n' .
|
||||
'í' -> 'i' . 'á' -> 'a' . 'é' -> 'e' } asDictionary.
|
||||
modified := self copy.
|
||||
corrections keysAndValuesDo: [ :k :v |
|
||||
modified := modified copyReplaceAll: k with: v
|
||||
].
|
||||
^ modified
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> startsWithYAMLMetadataDelimiter [
|
||||
self lines ifEmpty: [^false].
|
||||
^ self lines first beginsWith: self class yamlMetadataDelimiter
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> withoutXMLTagDelimiters [
|
||||
^ self copyWithoutAll: #($< $>)
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> yamlMetadata [
|
||||
^ (YAML2JSON fromString: self yamlMetadataString)
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> yamlMetadataClosingLineNumber [
|
||||
"I return the line where the closing of the YAML metadata occurs or 0 if no closing is found."
|
||||
self startsWithYAMLMetadataDelimiter ifFalse: [ ^ self ].
|
||||
self lines allButFirst doWithIndex: [ :currentLine :i |
|
||||
(currentLine beginsWith: self class yamlMetadataDelimiter) ifTrue: [ ^ i + 1 ]]
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String class >> yamlMetadataDelimiter [
|
||||
^ '---'
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> yamlMetadataString [
|
||||
| output yamlLines |
|
||||
self detectYAMLMetadata ifFalse: [ ^nil ].
|
||||
self lines ifEmpty: [ ^nil ].
|
||||
yamlLines := self lines copyFrom: 2 to: self yamlMetadataClosingLineNumber - 1.
|
||||
output := '' writeStream.
|
||||
yamlLines do: [ :line |
|
||||
output
|
||||
nextPutAll: line;
|
||||
nextPut: Character lf. ].
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> yamlMetadataStringWithDelimiters [
|
||||
| output |
|
||||
self yamlMetadataString ifNil: [ ^ nil ].
|
||||
output := String new writeStream.
|
||||
output nextPutAll: self class yamlMetadataDelimiter; cr.
|
||||
output nextPutAll: self yamlMetadataString.
|
||||
output nextPutAll: self class yamlMetadataDelimiter; cr.
|
||||
^ output contents.
|
||||
]
|
||||
Extension { #name : #String }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> accentedCharactersCorrection [
|
||||
| modified corrections |
|
||||
corrections := {
|
||||
'ó' -> 'ó' . 'ú' -> 'ú' . 'ñ' -> 'ñ' .
|
||||
'Ã' -> 'í' . 'á' -> 'á' . 'é' -> 'é' . 'â' -> $' asString} asDictionary.
|
||||
modified := self copy.
|
||||
corrections keysAndValuesDo: [ :k :v |
|
||||
modified := modified copyReplaceAll: k with: v
|
||||
].
|
||||
^ modified
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> asDashedLowercase [
|
||||
"I convert phrases like 'This is a phrase' into 'this-is-a-phrase'."
|
||||
|
||||
^ '-' join: (self substrings collect: [:each | each asLowercase ])
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> asInteger [
|
||||
"Return the integer present in the receiver, or nil. In case of float, returns the integer part."
|
||||
"'1' asInteger >>> 1"
|
||||
"'-1' asInteger >>> -1"
|
||||
"'10' asInteger >>> 10"
|
||||
"'a' asInteger >>> nil"
|
||||
"'1.234' asInteger >>> 1"
|
||||
^ (self copyWithoutAll: '_') asSignedInteger
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> contentsWithoutYAMLMetadata [
|
||||
| newContents |
|
||||
self detectYAMLMetadata ifFalse: [ ^ self ].
|
||||
newContents := '' writeStream.
|
||||
(self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line |
|
||||
newContents nextPutAll: line; cr ].
|
||||
^ newContents contents.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> deleteYAMLMetadata [
|
||||
| newContents |
|
||||
self detectYAMLMetadata ifFalse: [ ^ self ].
|
||||
newContents := '' writeStream.
|
||||
(self lines copyFrom: self yamlMetadataClosingLineNumber + 1 to: self lines size) do: [ :line |
|
||||
newContents nextPutAll: line; lf;lf ].
|
||||
^ newContents contents.
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> demoteMarkdownHeaders [
|
||||
| response |
|
||||
response := self contents lines.
|
||||
self markdownHeaders associations allButFirstDo: [ :assoc |
|
||||
response at: assoc key put: '#', assoc value ].
|
||||
^ response asStringWithCr withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> detectYAMLMetadata [
|
||||
| lines |
|
||||
lines := self lines.
|
||||
^ self startsWithYAMLMetadataDelimiter
|
||||
and: [ lines allButFirst
|
||||
detect: [ :currentLine | currentLine beginsWith: self class yamlMetadataDelimiter ]
|
||||
ifFound: [ ^ true ] ifNone: [ ^ false ] ]
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> markdownHeaders [
|
||||
| response headers |
|
||||
headers := (LeTextSnippet string: self contents) ast // #LeHeaderNode collect: [ :each | each headerFullName asString ].
|
||||
response := OrderedDictionary new.
|
||||
self lines doWithIndex: [:line :index |
|
||||
(line beginsWithAnyOf: headers)
|
||||
ifTrue: [ response at: index put: line ]
|
||||
].
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> promoteMarkdownHeaders [
|
||||
| response |
|
||||
response := self contents lines.
|
||||
self markdownHeaders associationsDo: [ :assoc |
|
||||
response at: assoc key put: assoc value allButFirst ].
|
||||
^ response asStringWithCr withInternetLineEndings
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> romanizeAccents [
|
||||
| modified corrections |
|
||||
corrections := {
|
||||
'ó' -> 'o' . 'ú' -> 'u' . 'ñ' -> 'n' .
|
||||
'í' -> 'i' . 'á' -> 'a' . 'é' -> 'e' } asDictionary.
|
||||
modified := self copy.
|
||||
corrections keysAndValuesDo: [ :k :v |
|
||||
modified := modified copyReplaceAll: k with: v
|
||||
].
|
||||
^ modified
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> startsWithYAMLMetadataDelimiter [
|
||||
self lines ifEmpty: [^false].
|
||||
^ self lines first beginsWith: self class yamlMetadataDelimiter
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> withoutXMLTagDelimiters [
|
||||
^ self copyWithoutAll: #($< $>)
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> yamlMetadata [
|
||||
^ (YAML2JSON fromString: self yamlMetadataString)
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> yamlMetadataClosingLineNumber [
|
||||
"I return the line where the closing of the YAML metadata occurs or 0 if no closing is found."
|
||||
self startsWithYAMLMetadataDelimiter ifFalse: [ ^ self ].
|
||||
self lines allButFirst doWithIndex: [ :currentLine :i |
|
||||
(currentLine beginsWith: self class yamlMetadataDelimiter) ifTrue: [ ^ i + 1 ]]
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String class >> yamlMetadataDelimiter [
|
||||
^ '---'
|
||||
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> yamlMetadataString [
|
||||
| output yamlLines |
|
||||
self detectYAMLMetadata ifFalse: [ ^nil ].
|
||||
self lines ifEmpty: [ ^nil ].
|
||||
yamlLines := self lines copyFrom: 2 to: self yamlMetadataClosingLineNumber - 1.
|
||||
output := '' writeStream.
|
||||
yamlLines do: [ :line |
|
||||
output
|
||||
nextPutAll: line;
|
||||
nextPut: Character lf. ].
|
||||
^ output contents
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
String >> yamlMetadataStringWithDelimiters [
|
||||
| output |
|
||||
self yamlMetadataString ifNil: [ ^ nil ].
|
||||
output := String new writeStream.
|
||||
output nextPutAll: self class yamlMetadataDelimiter; cr.
|
||||
output nextPutAll: self yamlMetadataString.
|
||||
output nextPutAll: self class yamlMetadataDelimiter; cr.
|
||||
^ output contents.
|
||||
]
|
||||
|
@ -1,6 +1,6 @@
|
||||
Extension { #name : #TeaCompositeRouter }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
TeaCompositeRouter >> staticRouters [
|
||||
^ routers
|
||||
]
|
||||
Extension { #name : #TeaCompositeRouter }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
TeaCompositeRouter >> staticRouters [
|
||||
^ routers
|
||||
]
|
||||
|
@ -1,6 +1,6 @@
|
||||
Extension { #name : #TeaStaticRouter }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
TeaStaticRouter >> delegate [
|
||||
^ delegate
|
||||
]
|
||||
Extension { #name : #TeaStaticRouter }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
TeaStaticRouter >> delegate [
|
||||
^ delegate
|
||||
]
|
||||
|
@ -1,6 +1,6 @@
|
||||
Extension { #name : #Teapot }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Teapot >> staticRouter [
|
||||
^ staticRouter delegate
|
||||
]
|
||||
Extension { #name : #Teapot }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
Teapot >> staticRouter [
|
||||
^ staticRouter delegate
|
||||
]
|
||||
|
@ -1,10 +1,10 @@
|
||||
Extension { #name : #UnixChromePlatform }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
UnixChromePlatform class >> defaultExecutableLocations [
|
||||
|
||||
^ #( '/opt/google/chrome/chrome'
|
||||
'/usr/bin/chromium-browser'
|
||||
'/usr/local/share/chromium/chrome'
|
||||
'/usr/bin/chromium' )
|
||||
]
|
||||
Extension { #name : #UnixChromePlatform }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
UnixChromePlatform class >> defaultExecutableLocations [
|
||||
|
||||
^ #( '/opt/google/chrome/chrome'
|
||||
'/usr/bin/chromium-browser'
|
||||
'/usr/local/share/chromium/chrome'
|
||||
'/usr/bin/chromium' )
|
||||
]
|
||||
|
@ -1,53 +1,53 @@
|
||||
Extension { #name : #XMLElement }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
XMLElement >> asSnippetDictionary [
|
||||
| response |
|
||||
response := STON fromString: (self attributes at: 'st-data').
|
||||
response at: 'className' put: (self attributes at: 'st-class').
|
||||
response at: 'content' put: self sanitizedContent.
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
XMLElement >> extractMarkdownImageLinkData [
|
||||
| linkParserNodes sanitizedText linkParser |
|
||||
linkParser := (PPCommonMarkBlockParser parse: (self contentString trimBoth: [:each | each = Character lf]) allButFirst)
|
||||
accept: CMBlockVisitor new.
|
||||
linkParserNodes := linkParser children first children.
|
||||
linkParserNodes size = 1
|
||||
ifTrue: [ sanitizedText := linkParserNodes first label text ]
|
||||
ifFalse: [ sanitizedText := '' writeStream.
|
||||
linkParserNodes allButLast
|
||||
do: [ :each |
|
||||
each className = 'PPCMText'
|
||||
ifTrue: [ sanitizedText nextPutAll: each text allButFirst ].
|
||||
each className = 'PPCMLink'
|
||||
ifTrue: [ sanitizedText nextPutAll: each printString ] ].
|
||||
sanitizedText := sanitizedText contents ].
|
||||
^ {sanitizedText . self contentString }
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
XMLElement >> sanitizedContent [
|
||||
| className sanitizedText |
|
||||
className := self attributes at: 'st-class'.
|
||||
className = 'LeTextSnippet'
|
||||
ifTrue: [ sanitizedText := self contentString.
|
||||
sanitizedText := sanitizedText allButFirst.
|
||||
sanitizedText := sanitizedText allButLast ].
|
||||
className = 'LePharoSnippet'
|
||||
ifTrue: [ | joinedText |
|
||||
sanitizedText := self contentString lines.
|
||||
sanitizedText := sanitizedText copyFrom: 4 to: sanitizedText size - 2.
|
||||
joinedText := '' writeStream.
|
||||
sanitizedText
|
||||
do: [ :line |
|
||||
joinedText
|
||||
nextPutAll: line;
|
||||
nextPut: Character lf ].
|
||||
sanitizedText := joinedText contents allButLast ].
|
||||
className = 'LePictureSnippet'
|
||||
ifTrue: [ sanitizedText := self extractMarkdownImageLinkData ].
|
||||
^ sanitizedText
|
||||
]
|
||||
Extension { #name : #XMLElement }
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
XMLElement >> asSnippetDictionary [
|
||||
| response |
|
||||
response := STON fromString: (self attributes at: 'st-data').
|
||||
response at: 'className' put: (self attributes at: 'st-class').
|
||||
response at: 'content' put: self sanitizedContent.
|
||||
^ response
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
XMLElement >> extractMarkdownImageLinkData [
|
||||
| linkParserNodes sanitizedText linkParser |
|
||||
linkParser := (PPCommonMarkBlockParser parse: (self contentString trimBoth: [:each | each = Character lf]) allButFirst)
|
||||
accept: CMBlockVisitor new.
|
||||
linkParserNodes := linkParser children first children.
|
||||
linkParserNodes size = 1
|
||||
ifTrue: [ sanitizedText := linkParserNodes first label text ]
|
||||
ifFalse: [ sanitizedText := '' writeStream.
|
||||
linkParserNodes allButLast
|
||||
do: [ :each |
|
||||
each className = 'PPCMText'
|
||||
ifTrue: [ sanitizedText nextPutAll: each text allButFirst ].
|
||||
each className = 'PPCMLink'
|
||||
ifTrue: [ sanitizedText nextPutAll: each printString ] ].
|
||||
sanitizedText := sanitizedText contents ].
|
||||
^ {sanitizedText . self contentString }
|
||||
]
|
||||
|
||||
{ #category : #'*MiniDocs' }
|
||||
XMLElement >> sanitizedContent [
|
||||
| className sanitizedText |
|
||||
className := self attributes at: 'st-class'.
|
||||
className = 'LeTextSnippet'
|
||||
ifTrue: [ sanitizedText := self contentString.
|
||||
sanitizedText := sanitizedText allButFirst.
|
||||
sanitizedText := sanitizedText allButLast ].
|
||||
className = 'LePharoSnippet'
|
||||
ifTrue: [ | joinedText |
|
||||
sanitizedText := self contentString lines.
|
||||
sanitizedText := sanitizedText copyFrom: 4 to: sanitizedText size - 2.
|
||||
joinedText := '' writeStream.
|
||||
sanitizedText
|
||||
do: [ :line |
|
||||
joinedText
|
||||
nextPutAll: line;
|
||||
nextPut: Character lf ].
|
||||
sanitizedText := joinedText contents allButLast ].
|
||||
className = 'LePictureSnippet'
|
||||
ifTrue: [ sanitizedText := self extractMarkdownImageLinkData ].
|
||||
^ sanitizedText
|
||||
]
|
||||
|
@ -1,52 +1,52 @@
|
||||
"
|
||||
The `External` tag is related on its dependency on other programming languages and frameworks,
|
||||
though the dependency should be loaded by just loading a small binary with no dependencies.
|
||||
"
|
||||
Class {
|
||||
#name : #YQ,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-External'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
YQ class >> binaryDownloadLinkFor: operativeSystem on: processor [
|
||||
| binaryName binaryDownloadData |
|
||||
binaryName := 'yq_', operativeSystem , '_', processor.
|
||||
binaryDownloadData := ((self lastReleaseData at: 'assets')
|
||||
select: [:each | (each at: 'name') beginsWith: binaryName ]) first.
|
||||
^ binaryDownloadData at: 'browser_download_url'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
YQ class >> binaryFile [
|
||||
"Starting with location on Arch Linux and its derivates. Multidistro and multiOS support should be added."
|
||||
^ FileLocator root / 'usr/bin/yq'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
YQ class >> install [
|
||||
^ self lastReleaseData
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
YQ class >> jsonToYaml: aDictionary [
|
||||
| jsonFile |
|
||||
self binaryFile exists ifFalse: [ YQ install].
|
||||
jsonFile := MarkupFile exportAsFileOn: FileLocator temp / 'data.json' containing: aDictionary.
|
||||
(Smalltalk os isUnix or: [ Smalltalk os isMacOS ])
|
||||
ifTrue: [
|
||||
OSSUnixSubprocess new
|
||||
shellCommand: 'cat ', jsonFile fullName,' | yq -y';
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :command :outString |
|
||||
^ outString
|
||||
]].
|
||||
Smalltalk os isWindows
|
||||
ifTrue: [ ^ LibC resultOfCommand: 'yq -p=json ', jsonFile fullName ].
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
YQ class >> lastReleaseData [
|
||||
^ (STONJSON
|
||||
fromString: 'https://api.github.com/repos/mikefarah/yq/releases' asUrl retrieveContents) first
|
||||
]
|
||||
"
|
||||
The `External` tag is related on its dependency on other programming languages and frameworks,
|
||||
though the dependency should be loaded by just loading a small binary with no dependencies.
|
||||
"
|
||||
Class {
|
||||
#name : #YQ,
|
||||
#superclass : #Object,
|
||||
#category : #'MiniDocs-External'
|
||||
}
|
||||
|
||||
{ #category : #accessing }
|
||||
YQ class >> binaryDownloadLinkFor: operativeSystem on: processor [
|
||||
| binaryName binaryDownloadData |
|
||||
binaryName := 'yq_', operativeSystem , '_', processor.
|
||||
binaryDownloadData := ((self lastReleaseData at: 'assets')
|
||||
select: [:each | (each at: 'name') beginsWith: binaryName ]) first.
|
||||
^ binaryDownloadData at: 'browser_download_url'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
YQ class >> binaryFile [
|
||||
"Starting with location on Arch Linux and its derivates. Multidistro and multiOS support should be added."
|
||||
^ FileLocator root / 'usr/bin/yq'
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
YQ class >> install [
|
||||
^ self lastReleaseData
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
YQ class >> jsonToYaml: aDictionary [
|
||||
| jsonFile |
|
||||
self binaryFile exists ifFalse: [ YQ install].
|
||||
jsonFile := MarkupFile exportAsFileOn: FileLocator temp / 'data.json' containing: aDictionary.
|
||||
(Smalltalk os isUnix or: [ Smalltalk os isMacOS ])
|
||||
ifTrue: [
|
||||
OSSUnixSubprocess new
|
||||
shellCommand: 'cat ', jsonFile fullName,' | yq -y';
|
||||
redirectStdout;
|
||||
runAndWaitOnExitDo: [ :command :outString |
|
||||
^ outString
|
||||
]].
|
||||
Smalltalk os isWindows
|
||||
ifTrue: [ ^ LibC resultOfCommand: 'yq -p=json ', jsonFile fullName ].
|
||||
]
|
||||
|
||||
{ #category : #accessing }
|
||||
YQ class >> lastReleaseData [
|
||||
^ (STONJSON
|
||||
fromString: 'https://api.github.com/repos/mikefarah/yq/releases' asUrl retrieveContents) first
|
||||
]
|
||||
|
@ -1 +1 @@
|
||||
Package { #name : #MiniDocs }
|
||||
Package { #name : #MiniDocs }
|
||||
|
Loading…
Reference in New Issue
Block a user