Starting multiOS support for binary files.

This commit is contained in:
Offray Vladimir Luna Cárdenas 2024-04-29 17:28:20 -05:00
parent 5e4db00352
commit a22005da27
61 changed files with 4752 additions and 4753 deletions

View File

@ -1,28 +1,28 @@
Extension { #name : #Array } Extension { #name : #Array }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
Array >> bagOfWordsFor: sentenceArray [ Array >> bagOfWordsFor: sentenceArray [
"An utility machine training little algorithm. "An utility machine training little algorithm.
Inspired by https://youtu.be/8qwowmiXANQ?t=1144. Inspired by https://youtu.be/8qwowmiXANQ?t=1144.
This should be moved probably to [Polyglot](https://github.com/pharo-ai/Polyglot), This should be moved probably to [Polyglot](https://github.com/pharo-ai/Polyglot),
but the repository is pretty innactive (with commits 2 or more years old and no reponse to issues). but the repository is pretty innactive (with commits 2 or more years old and no reponse to issues).
Meanwhile, it will be in MiniDocs. Meanwhile, it will be in MiniDocs.
Given the sentence := #('hello' 'how' 'are' 'you') Given the sentence := #('hello' 'how' 'are' 'you')
and the testVocabulary := #('hi' 'hello' 'I' 'you' 'bye' 'thank' 'you') and the testVocabulary := #('hi' 'hello' 'I' 'you' 'bye' 'thank' 'you')
then then
testVocabulary bagOfWordsFor: sentence. testVocabulary bagOfWordsFor: sentence.
Should give: #(0 1 0 1 0 0 0) Should give: #(0 1 0 1 0 0 0)
" "
| bagOfWords | | bagOfWords |
bagOfWords := Array new: self size. bagOfWords := Array new: self size.
bagOfWords doWithIndex: [:each :i | bagOfWords at: i put: 0 ]. bagOfWords doWithIndex: [:each :i | bagOfWords at: i put: 0 ].
sentenceArray do: [:token | |index| sentenceArray do: [:token | |index|
index := self indexOf: token. index := self indexOf: token.
index > 0 index > 0
ifTrue: [bagOfWords at: index put: 1] ifTrue: [bagOfWords at: index put: 1]
]. ].
^ bagOfWords ^ bagOfWords
] ]

View File

@ -1,23 +1,23 @@
Extension { #name : #BrAsyncFileWidget } Extension { #name : #BrAsyncFileWidget }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
BrAsyncFileWidget >> url: aUrl [ BrAsyncFileWidget >> url: aUrl [
| realUrl imageUrl | | realUrl imageUrl |
realUrl := aUrl asZnUrl. realUrl := aUrl asZnUrl.
realUrl scheme = #file ifTrue: [ realUrl scheme = #file ifTrue: [
^ self file: realUrl asFileReference ]. ^ self file: realUrl asFileReference ].
imageUrl := realUrl. imageUrl := realUrl.
realUrl host = 'www.youtube.com' ifTrue: [ | video | realUrl host = 'www.youtube.com' ifTrue: [ | video |
video := LeRawYoutubeReferenceInfo fromYoutubeStringUrl: realUrl asString. video := LeRawYoutubeReferenceInfo fromYoutubeStringUrl: realUrl asString.
imageUrl := (video rawData at: 'thumbnail_url') asUrl. imageUrl := (video rawData at: 'thumbnail_url') asUrl.
]. ].
self stencil: [ self stencil: [
(SkiaImage fromForm: (SkiaImage fromForm:
(Form fromBase64String: imageUrl retrieveContents base64Encoded)) (Form fromBase64String: imageUrl retrieveContents base64Encoded))
asElement constraintsDo: [ :c | asElement constraintsDo: [ :c |
c horizontal matchParent. c horizontal matchParent.
c vertical matchParent ] ] c vertical matchParent ] ]
] ]

View File

@ -1,7 +1,7 @@
Extension { #name : #ByteString } Extension { #name : #ByteString }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
ByteString >> email [ ByteString >> email [
"Quick fix for importing Lepiter pages that have a plain ByteString field as email." "Quick fix for importing Lepiter pages that have a plain ByteString field as email."
^ self ^ self
] ]

View File

@ -1,46 +1,46 @@
Extension { #name : #DataFrame } Extension { #name : #DataFrame }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
DataFrame >> asMarkdown [ DataFrame >> asMarkdown [
| response | | response |
response := '' writeStream. response := '' writeStream.
self columnNames do: [ :name | response nextPutAll: '| ' , name , ' ' ]. self columnNames do: [ :name | response nextPutAll: '| ' , name , ' ' ].
response response
nextPutAll: '|'; nextPutAll: '|';
cr. cr.
self columns size timesRepeat: [ response nextPutAll: '|---' ]. self columns size timesRepeat: [ response nextPutAll: '|---' ].
response response
nextPutAll: '|'; nextPutAll: '|';
cr. cr.
self asArrayOfRows self asArrayOfRows
do: [ :row | do: [ :row |
row do: [ :cell | response nextPutAll: '| ' , cell asString , ' ' ]. row do: [ :cell | response nextPutAll: '| ' , cell asString , ' ' ].
response response
nextPutAll: '|'; nextPutAll: '|';
cr ]. cr ].
^ response contents accentedCharactersCorrection withInternetLineEndings. ^ response contents accentedCharactersCorrection withInternetLineEndings.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
DataFrame >> viewDataFor: aView [ DataFrame >> viewDataFor: aView [
<gtView> <gtView>
| columnedList | | columnedList |
self numberOfRows >= 1 ifFalse: [ ^ aView empty ]. self numberOfRows >= 1 ifFalse: [ ^ aView empty ].
columnedList := aView columnedList columnedList := aView columnedList
title: 'Data'; title: 'Data';
items: [ self transposed columns ]; items: [ self transposed columns ];
priority: 40. priority: 40.
self columnNames self columnNames
withIndexDo: [:aName :anIndex | withIndexDo: [:aName :anIndex |
columnedList columnedList
column: aName column: aName
text: [:anItem | anItem at: anIndex ] text: [:anItem | anItem at: anIndex ]
]. ].
^ columnedList ^ columnedList
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
DataFrame >> webView [ DataFrame >> webView [
^ Pandoc convertString: self asMarkdown from: 'markdown' to: 'html' ^ Pandoc convertString: self asMarkdown from: 'markdown' to: 'html'
] ]

View File

@ -1,342 +1,342 @@
Class { Class {
#name : #GrafoscopioNode, #name : #GrafoscopioNode,
#superclass : #Object, #superclass : #Object,
#instVars : [ #instVars : [
'header', 'header',
'body', 'body',
'tags', 'tags',
'children', 'children',
'parent', 'parent',
'links', 'links',
'level', 'level',
'created', 'created',
'nodesInPreorder', 'nodesInPreorder',
'selected', 'selected',
'edited', 'edited',
'headers', 'headers',
'key', 'key',
'output', 'output',
'remoteLocations' 'remoteLocations'
], ],
#category : #'MiniDocs-Legacy' #category : #'MiniDocs-Legacy'
} }
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode class >> fromFile: aFileReference [ GrafoscopioNode class >> fromFile: aFileReference [
^ (STON fromString: aFileReference contents) first parent ^ (STON fromString: aFileReference contents) first parent
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode class >> fromLink: aStonLink [ GrafoscopioNode class >> fromLink: aStonLink [
| notebook | | notebook |
notebook := (STON fromString: aStonLink asUrl retrieveContents utf8Decoded) first parent. notebook := (STON fromString: aStonLink asUrl retrieveContents utf8Decoded) first parent.
notebook addRemoteLocation: aStonLink. notebook addRemoteLocation: aStonLink.
^ notebook ^ notebook
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> addRemoteLocation: anURL [ GrafoscopioNode >> addRemoteLocation: anURL [
self remoteLocations add: anURL self remoteLocations add: anURL
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> ancestors [ GrafoscopioNode >> ancestors [
"I return a collection of all the nodes wich are ancestors of the receiver node" "I return a collection of all the nodes wich are ancestors of the receiver node"
| currentNode ancestors | | currentNode ancestors |
currentNode := self. currentNode := self.
ancestors := OrderedCollection new. ancestors := OrderedCollection new.
[ currentNode parent notNil and: [ currentNode level > 0 ] ] [ currentNode parent notNil and: [ currentNode level > 0 ] ]
whileTrue: [ whileTrue: [
ancestors add: currentNode parent. ancestors add: currentNode parent.
currentNode := currentNode parent]. currentNode := currentNode parent].
ancestors := ancestors reversed. ancestors := ancestors reversed.
^ ancestors ^ ancestors
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> asLePage [ GrafoscopioNode >> asLePage [
| page | | page |
self root populateTimestamps. self root populateTimestamps.
page := LePage new page := LePage new
initializeTitle: 'Grafoscopio Notebook (imported)'. initializeTitle: 'Grafoscopio Notebook (imported)'.
self nodesInPreorder allButFirst self nodesInPreorder allButFirst
do: [:node | page addSnippet: node asSnippet ]. do: [:node | page addSnippet: node asSnippet ].
page latestEditTime: self root latestEditionDate. page latestEditTime: self root latestEditionDate.
page createTime: self root earliestCreationDate. page createTime: self root earliestCreationDate.
page optionAt: 'remoteLocations' put: self remoteLocations. page optionAt: 'remoteLocations' put: self remoteLocations.
^ page. ^ page.
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> asSnippet [ GrafoscopioNode >> asSnippet [
| snippet child | | snippet child |
snippet := LeTextSnippet new snippet := LeTextSnippet new
string: self header; string: self header;
createTime: (LeTime new createTime: (LeTime new
time: self created); time: self created);
uid: LeUID new. uid: LeUID new.
(self tags includes: 'código') (self tags includes: 'código')
ifFalse: [ ifFalse: [
child := LeTextSnippet new; child := LeTextSnippet new;
string: self body. ] string: self body. ]
ifTrue: [ ifTrue: [
child := LePharoSnippet new; child := LePharoSnippet new;
code: self body ]. code: self body ].
child child
createTime: (LeTime new createTime: (LeTime new
time: self created); time: self created);
uid: LeUID new. uid: LeUID new.
snippet addFirstSnippet: child. snippet addFirstSnippet: child.
snippet optionAt: 'tags' put: self tags. snippet optionAt: 'tags' put: self tags.
^ snippet ^ snippet
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> body [ GrafoscopioNode >> body [
^ body ^ body
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> body: anObject [ GrafoscopioNode >> body: anObject [
body := anObject body := anObject
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> children [ GrafoscopioNode >> children [
^ children ^ children
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> children: anObject [ GrafoscopioNode >> children: anObject [
children := anObject children := anObject
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> created [ GrafoscopioNode >> created [
created ifNotNil: [^created asDateAndTime]. created ifNotNil: [^created asDateAndTime].
^ created ^ created
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> created: anObject [ GrafoscopioNode >> created: anObject [
created := anObject created := anObject
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> earliestCreationDate [ GrafoscopioNode >> earliestCreationDate [
| earliest | | earliest |
self nodesWithCreationDates ifNotEmpty: [ self nodesWithCreationDates ifNotEmpty: [
earliest := self nodesWithCreationDates first created] earliest := self nodesWithCreationDates first created]
ifEmpty: [ earliest := self earliestRepositoryTimestamp - 3 hours]. ifEmpty: [ earliest := self earliestRepositoryTimestamp - 3 hours].
self nodesWithCreationDates do: [:node | self nodesWithCreationDates do: [:node |
node created <= earliest ifTrue: [ earliest := node created ] ]. node created <= earliest ifTrue: [ earliest := node created ] ].
^ earliest ^ earliest
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> earliestRepositoryTimestamp [ GrafoscopioNode >> earliestRepositoryTimestamp [
| remote fossilHost docSegments repo checkinInfo | | remote fossilHost docSegments repo checkinInfo |
remote := self remoteLocations first asUrl. remote := self remoteLocations first asUrl.
fossilHost := 'https://mutabit.com/repos.fossil'. fossilHost := 'https://mutabit.com/repos.fossil'.
(remote asString includesSubstring: fossilHost) ifFalse: [ ^ false ]. (remote asString includesSubstring: fossilHost) ifFalse: [ ^ false ].
docSegments := remote segments copyFrom: 5 to: remote segments size. docSegments := remote segments copyFrom: 5 to: remote segments size.
repo := FossilRepo new repo := FossilRepo new
remote: (remote scheme, '://', remote host, '/', remote segments first, '/', remote segments second). remote: (remote scheme, '://', remote host, '/', remote segments first, '/', remote segments second).
checkinInfo := repo firstCheckinFor: ('/' join: docSegments). checkinInfo := repo firstCheckinFor: ('/' join: docSegments).
^ DateAndTime fromUnixTime: (checkinInfo at: 'timestamp') ^ DateAndTime fromUnixTime: (checkinInfo at: 'timestamp')
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> edited [ GrafoscopioNode >> edited [
^ edited ifNotNil: [^ edited asDateAndTime ] ^ edited ifNotNil: [^ edited asDateAndTime ]
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> edited: anObject [ GrafoscopioNode >> edited: anObject [
edited := anObject edited := anObject
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> gtTextFor: aView [ GrafoscopioNode >> gtTextFor: aView [
<gtView> <gtView>
^ aView textEditor ^ aView textEditor
title: 'Body'; title: 'Body';
text: [ body ] text: [ body ]
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> header [ GrafoscopioNode >> header [
^ header ^ header
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> header: anObject [ GrafoscopioNode >> header: anObject [
header := anObject header := anObject
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> latestEditionDate [ GrafoscopioNode >> latestEditionDate [
| latest | | latest |
latest := self nodesWithEditionDates first edited. latest := self nodesWithEditionDates first edited.
self nodesWithEditionDates do: [:node | self nodesWithEditionDates do: [:node |
node edited >= latest ifTrue: [ latest := node edited ] ]. node edited >= latest ifTrue: [ latest := node edited ] ].
^ latest ^ latest
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> level [ GrafoscopioNode >> level [
^ level ^ level
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> level: anObject [ GrafoscopioNode >> level: anObject [
level := anObject level := anObject
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> links [ GrafoscopioNode >> links [
^ links ^ links
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> links: anObject [ GrafoscopioNode >> links: anObject [
links := anObject links := anObject
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> nodesInPreorder [ GrafoscopioNode >> nodesInPreorder [
^ nodesInPreorder ^ nodesInPreorder
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> nodesInPreorder: anObject [ GrafoscopioNode >> nodesInPreorder: anObject [
nodesInPreorder := anObject nodesInPreorder := anObject
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> nodesWithCreationDates [ GrafoscopioNode >> nodesWithCreationDates [
^ self nodesInPreorder select: [ :each | each created isNotNil ] ^ self nodesInPreorder select: [ :each | each created isNotNil ]
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> nodesWithEditionDates [ GrafoscopioNode >> nodesWithEditionDates [
^ self nodesInPreorder select: [ :each | each edited isNotNil ] ^ self nodesInPreorder select: [ :each | each edited isNotNil ]
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> parent [ GrafoscopioNode >> parent [
^ parent ^ parent
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> parent: anObject [ GrafoscopioNode >> parent: anObject [
parent := anObject parent := anObject
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> populateTimestamps [ GrafoscopioNode >> populateTimestamps [
| adhocCreationMarker adhocEditionMarker | | adhocCreationMarker adhocEditionMarker |
adhocCreationMarker := 'adhoc creation timestamp'. adhocCreationMarker := 'adhoc creation timestamp'.
adhocEditionMarker := 'adhoc edition timestamp'. adhocEditionMarker := 'adhoc edition timestamp'.
(self nodesInPreorder size = self nodesWithCreationDates size (self nodesInPreorder size = self nodesWithCreationDates size
and: [ self nodesInPreorder size = self nodesWithEditionDates size ]) and: [ self nodesInPreorder size = self nodesWithEditionDates size ])
ifTrue: [ ^ self nodesInPreorder ]. ifTrue: [ ^ self nodesInPreorder ].
self nodesInPreorder allButFirst doWithIndex: [:node :i | self nodesInPreorder allButFirst doWithIndex: [:node :i |
node created ifNil: [ node created ifNil: [
node created: self earliestCreationDate + i. node created: self earliestCreationDate + i.
node tags add: adhocCreationMarker. node tags add: adhocCreationMarker.
]. ].
node edited ifNil: [ node edited ifNil: [
node edited: self earliestCreationDate + i + 1. node edited: self earliestCreationDate + i + 1.
node tags add: 'adhoc edition timestamp' node tags add: 'adhoc edition timestamp'
]. ].
]. ].
self root created ifNil: [ self root created ifNil: [
self root created: self earliestCreationDate - 1. self root created: self earliestCreationDate - 1.
self root tags add: adhocCreationMarker. self root tags add: adhocCreationMarker.
]. ].
self root edited ifNil: [ self root edited ifNil: [
self root edited: self latestEditionDate. self root edited: self latestEditionDate.
self root tags add: adhocEditionMarker. self root tags add: adhocEditionMarker.
]. ].
^ self nodesInPreorder ^ self nodesInPreorder
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> printOn: aStream [ GrafoscopioNode >> printOn: aStream [
super printOn: aStream. super printOn: aStream.
aStream aStream
nextPutAll: '( ', self header, ' )' nextPutAll: '( ', self header, ' )'
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> remoteLocations [ GrafoscopioNode >> remoteLocations [
^ remoteLocations ifNil: [ remoteLocations := OrderedCollection new] ^ remoteLocations ifNil: [ remoteLocations := OrderedCollection new]
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> root [ GrafoscopioNode >> root [
self level = 0 ifTrue: [ ^ self ]. self level = 0 ifTrue: [ ^ self ].
^ self ancestors first. ^ self ancestors first.
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> selected [ GrafoscopioNode >> selected [
^ selected ^ selected
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> selected: anObject [ GrafoscopioNode >> selected: anObject [
selected := anObject selected := anObject
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> tags [ GrafoscopioNode >> tags [
^ tags ^ tags
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> tags: anObject [ GrafoscopioNode >> tags: anObject [
tags := anObject tags := anObject
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> viewBody [ GrafoscopioNode >> viewBody [
| aText | | aText |
aText := self header asRopedText. aText := self header asRopedText.
self children do: [ :child | self children do: [ :child |
aText append: ' ' asRopedText. aText append: ' ' asRopedText.
aText append: (child header asRopedText foreground: aText append: (child header asRopedText foreground:
BrGlamorousColors disabledButtonTextColor). BrGlamorousColors disabledButtonTextColor).
aText append: ('= "' asRopedText foreground: aText append: ('= "' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor). BrGlamorousColors disabledButtonTextColor).
aText append: (child body asRopedText foreground: aText append: (child body asRopedText foreground:
BrGlamorousColors disabledButtonTextColor). BrGlamorousColors disabledButtonTextColor).
aText append: aText append:
('"' asRopedText foreground: ('"' asRopedText foreground:
BrGlamorousColors disabledButtonTextColor) ]. BrGlamorousColors disabledButtonTextColor) ].
^ aText ^ aText
] ]
{ #category : #accessing } { #category : #accessing }
GrafoscopioNode >> viewChildrenFor: aView [ GrafoscopioNode >> viewChildrenFor: aView [
<gtView> <gtView>
children ifNil: [ ^ aView empty ]. children ifNil: [ ^ aView empty ].
^ aView columnedTree ^ aView columnedTree
title: 'Children'; title: 'Children';
priority: 1; priority: 1;
items: [ { self } ]; items: [ { self } ];
children: #children; children: #children;
column: 'Name' text: #viewBody; column: 'Name' text: #viewBody;
expandUpTo: 2 expandUpTo: 2
] ]

View File

@ -1,15 +1,15 @@
Class { Class {
#name : #GrafoscopioNodeTest, #name : #GrafoscopioNodeTest,
#superclass : #TestCase, #superclass : #TestCase,
#category : #'MiniDocs-Legacy' #category : #'MiniDocs-Legacy'
} }
{ #category : #accessing } { #category : #accessing }
GrafoscopioNodeTest >> testEarliestCreationNode [ GrafoscopioNodeTest >> testEarliestCreationNode [
| notebook remoteNotebook offedingNodes | | notebook remoteNotebook offedingNodes |
remoteNotebook := 'https://mutabit.com/repos.fossil/documentaton/raw/a63598382?at=documentaton.ston'. remoteNotebook := 'https://mutabit.com/repos.fossil/documentaton/raw/a63598382?at=documentaton.ston'.
notebook := (STON fromString: remoteNotebook asUrl retrieveContents utf8Decoded) first parent. notebook := (STON fromString: remoteNotebook asUrl retrieveContents utf8Decoded) first parent.
offedingNodes := notebook nodesInPreorder select: [:node | offedingNodes := notebook nodesInPreorder select: [:node |
node created isNotNil and: [node created < notebook earliestCreationDate] ]. node created isNotNil and: [node created < notebook earliestCreationDate] ].
self assert: offedingNodes size equals: 0 self assert: offedingNodes size equals: 0
] ]

View File

@ -1,26 +1,26 @@
Extension { #name : #GtGQLSnippet } Extension { #name : #GtGQLSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
GtGQLSnippet >> metadataUpdate [ GtGQLSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uuid; at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
GtGQLSnippet >> sanitizeMetadata [ GtGQLSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,192 +1,192 @@
" "
I model the interface between a CodiMD (https://demo.codimd.org) documentation I model the interface between a CodiMD (https://demo.codimd.org) documentation
server and Grafoscopio. server and Grafoscopio.
I enable the interaction between Grafoscopio notebooks and CodiMD documents, I enable the interaction between Grafoscopio notebooks and CodiMD documents,
so one document can start online (as a CodiMD pad) and continue as a Grafoscopio so one document can start online (as a CodiMD pad) and continue as a Grafoscopio
notebook or viceversa. notebook or viceversa.
" "
Class { Class {
#name : #HedgeDoc, #name : #HedgeDoc,
#superclass : #Markdown, #superclass : #Markdown,
#instVars : [ #instVars : [
'server', 'server',
'pad', 'pad',
'url' 'url'
], ],
#category : #'MiniDocs-Core' #category : #'MiniDocs-Core'
} }
{ #category : #accessing } { #category : #accessing }
HedgeDoc class >> fromLink: aUrl [ HedgeDoc class >> fromLink: aUrl [
^ self new fromLink: aUrl ^ self new fromLink: aUrl
] ]
{ #category : #'as yet unclassified' } { #category : #'as yet unclassified' }
HedgeDoc class >> newDefault [ HedgeDoc class >> newDefault [
^ self new ^ self new
defaultServer. defaultServer.
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> asLePage [ HedgeDoc >> asLePage [
| newPage snippet | | newPage snippet |
snippet := LeTextSnippet new snippet := LeTextSnippet new
string: self bodyWithoutTitleHeader promoteMarkdownHeaders. string: self bodyWithoutTitleHeader promoteMarkdownHeaders.
newPage := LePage new newPage := LePage new
initializeTitle: self title; initializeTitle: self title;
addSnippet: snippet; addSnippet: snippet;
yourself. yourself.
newPage incomingLinks. newPage incomingLinks.
newPage metadata addAll: self metadata. newPage metadata addAll: self metadata.
^ newPage ^ newPage
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> asMarkdeep [ HedgeDoc >> asMarkdeep [
^ Markdeep new ^ Markdeep new
metadata: self metadata; metadata: self metadata;
body: self contents; body: self contents;
file: self file, 'html' file: self file, 'html'
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> asMarkdownTiddler [ HedgeDoc >> asMarkdownTiddler [
self url ifNil: [ ^ self ]. self url ifNil: [ ^ self ].
^ Tiddler new ^ Tiddler new
title: self url segments first; title: self url segments first;
text: (self contents ifNil: [ self retrieveContents]); text: (self contents ifNil: [ self retrieveContents]);
type: 'text/x-markdown'; type: 'text/x-markdown';
created: Tiddler nowLocal. created: Tiddler nowLocal.
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> bodyWithoutTitleHeader [ HedgeDoc >> bodyWithoutTitleHeader [
| headerIndex | | headerIndex |
headerIndex := self body lines headerIndex := self body lines
detectIndex: [ :line | line includesSubstring: self headerAsTitle ] detectIndex: [ :line | line includesSubstring: self headerAsTitle ]
ifNone: [ ^ self body]. ifNone: [ ^ self body].
^ (self body lines copyWithoutIndex: headerIndex) asStringWithCr ^ (self body lines copyWithoutIndex: headerIndex) asStringWithCr
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> contents [ HedgeDoc >> contents [
^ super contents ^ super contents
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> contents: anObject [ HedgeDoc >> contents: anObject [
body := anObject body := anObject
] ]
{ #category : #'as yet unclassified' } { #category : #'as yet unclassified' }
HedgeDoc >> defaultServer [ HedgeDoc >> defaultServer [
self server: 'https://docutopia.tupale.co'. self server: 'https://docutopia.tupale.co'.
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> fromLink: aString [ HedgeDoc >> fromLink: aString [
self url: aString. self url: aString.
self retrieveContents self retrieveContents
] ]
{ #category : #'as yet unclassified' } { #category : #'as yet unclassified' }
HedgeDoc >> htmlUrl [ HedgeDoc >> htmlUrl [
| link | | link |
link := self url copy. link := self url copy.
link segments insert: 's' before: 1. link segments insert: 's' before: 1.
^ link ^ link
] ]
{ #category : #'as yet unclassified' } { #category : #'as yet unclassified' }
HedgeDoc >> importContents [ HedgeDoc >> importContents [
self contents: self retrieveContents self contents: self retrieveContents
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> pad [ HedgeDoc >> pad [
^ pad ^ pad
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> pad: anObject [ HedgeDoc >> pad: anObject [
pad := anObject pad := anObject
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> retrieveContents [ HedgeDoc >> retrieveContents [
self url ifNil: [ ^ self ]. self url ifNil: [ ^ self ].
self fromString: (self url addPathSegment: 'download') retrieveContents. self fromString: (self url addPathSegment: 'download') retrieveContents.
^ self. ^ self.
] ]
{ #category : #'as yet unclassified' } { #category : #'as yet unclassified' }
HedgeDoc >> retrieveHtmlContents [ HedgeDoc >> retrieveHtmlContents [
| htmlContents | | htmlContents |
self url ifNil: [ ^ self ]. self url ifNil: [ ^ self ].
htmlContents := self htmlUrl. htmlContents := self htmlUrl.
^ htmlContents retrieveContents ^ htmlContents retrieveContents
] ]
{ #category : #'as yet unclassified' } { #category : #'as yet unclassified' }
HedgeDoc >> saveContentsToFile: aFileLocator [ HedgeDoc >> saveContentsToFile: aFileLocator [
self url ifNil: [ ^ self ]. self url ifNil: [ ^ self ].
^ (self url addPathSegment: 'download') saveContentsToFile: aFileLocator ^ (self url addPathSegment: 'download') saveContentsToFile: aFileLocator
] ]
{ #category : #'as yet unclassified' } { #category : #'as yet unclassified' }
HedgeDoc >> saveHtmlContentsToFile: aFileLocator [ HedgeDoc >> saveHtmlContentsToFile: aFileLocator [
self url ifNil: [ ^ self ]. self url ifNil: [ ^ self ].
^ self htmlUrl saveContentsToFile: aFileLocator ^ self htmlUrl saveContentsToFile: aFileLocator
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> server [ HedgeDoc >> server [
^ server ^ server
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> server: aUrlString [ HedgeDoc >> server: aUrlString [
server := aUrlString server := aUrlString
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> url [ HedgeDoc >> url [
^ url asUrl ^ url asUrl
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDoc >> url: anObject [ HedgeDoc >> url: anObject [
| tempUrl html | | tempUrl html |
tempUrl := anObject asZnUrl. tempUrl := anObject asZnUrl.
html := XMLHTMLParser parse: tempUrl retrieveContents. html := XMLHTMLParser parse: tempUrl retrieveContents.
(html xpath: '//head/meta[@name="application-name"][@content = "HedgeDoc - Ideas grow better together"]') isEmpty (html xpath: '//head/meta[@name="application-name"][@content = "HedgeDoc - Ideas grow better together"]') isEmpty
ifTrue: [ self inform: 'Not a hedgedoc url'. ifTrue: [ self inform: 'Not a hedgedoc url'.
url := nil ]. url := nil ].
self metadata at: 'title' put: tempUrl firstPathSegment. self metadata at: 'title' put: tempUrl firstPathSegment.
server := tempUrl host. server := tempUrl host.
url := anObject url := anObject
] ]
{ #category : #visiting } { #category : #visiting }
HedgeDoc >> visit [ HedgeDoc >> visit [
WebBrowser openOn: self server, '/', self pad. WebBrowser openOn: self server, '/', self pad.
] ]
{ #category : #transformation } { #category : #transformation }
HedgeDoc >> youtubeEmbeddedLinksToMarkdeepFormat [ HedgeDoc >> youtubeEmbeddedLinksToMarkdeepFormat [
"I replace the youtube embedded links from hedgedoc format to markdeep format." "I replace the youtube embedded links from hedgedoc format to markdeep format."
| linkDataCollection | | linkDataCollection |
linkDataCollection := (HedgeDocGrammar new youtubeEmbeddedLink parse: self contents) linkDataCollection := (HedgeDocGrammar new youtubeEmbeddedLink parse: self contents)
collect: [ :each | | parsedLink | collect: [ :each | | parsedLink |
parsedLink := OrderedCollection new. parsedLink := OrderedCollection new.
parsedLink parsedLink
add: ('' join:( each collect: [ :s | s value])); add: ('' join:( each collect: [ :s | s value]));
add: '![](https://youtu.be/', add: '![](https://youtu.be/',
each second value trimmed , ')'; each second value trimmed , ')';
add: (each first start to: each third stop); add: (each first start to: each third stop);
yourself ]. yourself ].
linkDataCollection do: [ :each | linkDataCollection do: [ :each |
self contents: (self contents self contents: (self contents
copyReplaceAll: each first with: each second) ]. copyReplaceAll: each first with: each second) ].
^ self ^ self
] ]

View File

@ -1,36 +1,36 @@
Class { Class {
#name : #HedgeDocExamples, #name : #HedgeDocExamples,
#superclass : #Object, #superclass : #Object,
#category : #'MiniDocs-Examples' #category : #'MiniDocs-Examples'
} }
{ #category : #accessing } { #category : #accessing }
HedgeDocExamples >> hedgeDocReplaceYoutubeEmbeddedLinkExample [ HedgeDocExamples >> hedgeDocReplaceYoutubeEmbeddedLinkExample [
<gtExample> <gtExample>
| aSampleString hedgedocDoc parsedCollection hedgedocDocLinksReplaced | | aSampleString hedgedocDoc parsedCollection hedgedocDocLinksReplaced |
aSampleString := '--- aSampleString := '---
breaks: false breaks: false
--- ---
# Titulo # Titulo
Un texto de ejemplo Un texto de ejemplo
# Enlaces youtube # Enlaces youtube
{%youtube 1aw3XmTqFXA %} {%youtube 1aw3XmTqFXA %}
otro video otro video
{%youtube U7mpXaLN9Nc %}'. {%youtube U7mpXaLN9Nc %}'.
hedgedocDoc := HedgeDoc new hedgedocDoc := HedgeDoc new
contents: aSampleString. contents: aSampleString.
hedgedocDocLinksReplaced := HedgeDoc new contents: aSampleString; youtubeEmbeddedLinksToMarkdeepFormat. hedgedocDocLinksReplaced := HedgeDoc new contents: aSampleString; youtubeEmbeddedLinksToMarkdeepFormat.
self assert: (hedgedocDoc contents self assert: (hedgedocDoc contents
includesSubstring: '{%youtube 1aw3XmTqFXA %}' ). includesSubstring: '{%youtube 1aw3XmTqFXA %}' ).
self assert: (hedgedocDocLinksReplaced contents self assert: (hedgedocDocLinksReplaced contents
includesSubstring: '![](https://youtu.be/1aw3XmTqFXA)' ). includesSubstring: '![](https://youtu.be/1aw3XmTqFXA)' ).
^ { 'Original' -> hedgedocDoc . ^ { 'Original' -> hedgedocDoc .
'Replaced' -> hedgedocDocLinksReplaced } asDictionary 'Replaced' -> hedgedocDocLinksReplaced } asDictionary
] ]

View File

@ -1,42 +1,42 @@
Class { Class {
#name : #HedgeDocGrammar, #name : #HedgeDocGrammar,
#superclass : #PP2CompositeNode, #superclass : #PP2CompositeNode,
#instVars : [ #instVars : [
'youtubeEmbeddedLink' 'youtubeEmbeddedLink'
], ],
#category : #'MiniDocs-Model' #category : #'MiniDocs-Model'
} }
{ #category : #accessing } { #category : #accessing }
HedgeDocGrammar >> metadataAsYAML [ HedgeDocGrammar >> metadataAsYAML [
"I parse the header of the hedgedoc document for YAML metadata." "I parse the header of the hedgedoc document for YAML metadata."
^ '---' asPParser token, #any asPParser starLazy token, '---' asPParser token ^ '---' asPParser token, #any asPParser starLazy token, '---' asPParser token
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDocGrammar >> start [ HedgeDocGrammar >> start [
| any | | any |
any := #any asPParser. any := #any asPParser.
^ (self metadataAsYAML / any starLazy), youtubeEmbeddedLink ^ (self metadataAsYAML / any starLazy), youtubeEmbeddedLink
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDocGrammar >> youtubeEmbeddedLink [ HedgeDocGrammar >> youtubeEmbeddedLink [
"I parse the youtube embedded links in a hedgedoc document." "I parse the youtube embedded links in a hedgedoc document."
| link linkSea | | link linkSea |
link := self youtubeEmbeddedLinkOpen, link := self youtubeEmbeddedLinkOpen,
#any asPParser starLazy token, #any asPParser starLazy token,
self youtubeEmbeddedLinkClose. self youtubeEmbeddedLinkClose.
linkSea := link islandInSea star. linkSea := link islandInSea star.
^ linkSea ^ linkSea
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDocGrammar >> youtubeEmbeddedLinkClose [ HedgeDocGrammar >> youtubeEmbeddedLinkClose [
^ '%}' asPParser token ^ '%}' asPParser token
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDocGrammar >> youtubeEmbeddedLinkOpen [ HedgeDocGrammar >> youtubeEmbeddedLinkOpen [
^ '{%youtube' asPParser token ^ '{%youtube' asPParser token
] ]

View File

@ -1,19 +1,19 @@
Class { Class {
#name : #HedgeDocGrammarExamples, #name : #HedgeDocGrammarExamples,
#superclass : #Object, #superclass : #Object,
#category : #'MiniDocs-Examples' #category : #'MiniDocs-Examples'
} }
{ #category : #accessing } { #category : #accessing }
HedgeDocGrammarExamples >> hedgeDocParseYoutubeEmbeddedLinkExample [ HedgeDocGrammarExamples >> hedgeDocParseYoutubeEmbeddedLinkExample [
<gtExample> <gtExample>
| aSampleString parsedStringTokens parsedCollection | | aSampleString parsedStringTokens parsedCollection |
aSampleString := '{%youtube 1aw3XmTqFXA %}'. aSampleString := '{%youtube 1aw3XmTqFXA %}'.
parsedStringTokens := HedgeDocGrammar new youtubeEmbeddedLink parse: aSampleString. parsedStringTokens := HedgeDocGrammar new youtubeEmbeddedLink parse: aSampleString.
parsedCollection := parsedStringTokens first. parsedCollection := parsedStringTokens first.
self assert: parsedCollection size equals: 3. self assert: parsedCollection size equals: 3.
self assert: parsedCollection first value equals: '{%youtube'. self assert: parsedCollection first value equals: '{%youtube'.
self assert: parsedCollection second class equals: PP2Token. self assert: parsedCollection second class equals: PP2Token.
self assert: parsedCollection third value equals: '%}'. self assert: parsedCollection third value equals: '%}'.
^ parsedStringTokens ^ parsedStringTokens
] ]

View File

@ -1,15 +1,15 @@
Class { Class {
#name : #HedgeDocGrammarTest, #name : #HedgeDocGrammarTest,
#superclass : #PP2CompositeNodeTest, #superclass : #PP2CompositeNodeTest,
#category : #'MiniDocs-Model' #category : #'MiniDocs-Model'
} }
{ #category : #accessing } { #category : #accessing }
HedgeDocGrammarTest >> parserClass [ HedgeDocGrammarTest >> parserClass [
^ HedgeDocGrammar ^ HedgeDocGrammar
] ]
{ #category : #accessing } { #category : #accessing }
HedgeDocGrammarTest >> testYoutubeEmbeddedLink [ HedgeDocGrammarTest >> testYoutubeEmbeddedLink [
^ self parse: '{%youtube U7mpXaLN9Nc %}' rule: #youtubeEmbeddedLink ^ self parse: '{%youtube U7mpXaLN9Nc %}' rule: #youtubeEmbeddedLink
] ]

View File

@ -1,26 +1,26 @@
Extension { #name : #LeChangesSnippet } Extension { #name : #LeChangesSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeChangesSnippet >> metadataUpdate [ LeChangesSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uuid; at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeChangesSnippet >> sanitizeMetadata [ LeChangesSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,21 +1,21 @@
Extension { #name : #LeCodeSnippet } Extension { #name : #LeCodeSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeCodeSnippet >> metadataUpdate [ LeCodeSnippet >> metadataUpdate [
| surrogate | | surrogate |
self parent self parent
ifNil: [ surrogate := nil] ifNil: [ surrogate := nil]
ifNotNil: [ ifNotNil: [
self parent isString self parent isString
ifTrue: [ surrogate := self parent] ifTrue: [ surrogate := self parent]
ifFalse: [ surrogate := self parent uidString ] ifFalse: [ surrogate := self parent uidString ]
]. ].
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: surrogate; at: 'parent' put: surrogate;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters; at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters; at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
yourself yourself
] ]

View File

@ -1,313 +1,313 @@
Extension { #name : #LeDatabase } Extension { #name : #LeDatabase }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> addPage2FromMarkdeep: markdeepDocTree withRemote: externalDocLocation [ LeDatabase >> addPage2FromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
| newPage | | newPage |
"^ { snippets . page }" "^ { snippets . page }"
"Rebulding partial subtrees" "Rebulding partial subtrees"
"Adding unrooted subtrees to the page" "Adding unrooted subtrees to the page"
"^ newPage" "^ newPage"
newPage := self newPage := self
rebuildPageFromMarkdeep: markdeepDocTree rebuildPageFromMarkdeep: markdeepDocTree
withRemote: externalDocLocation. withRemote: externalDocLocation.
newPage newPage
childrenDo: [ :snippet | childrenDo: [ :snippet |
(self hasBlockUID: snippet uid) (self hasBlockUID: snippet uid)
ifTrue: [ | existingPage | ifTrue: [ | existingPage |
existingPage := self pages existingPage := self pages
detect: [ :pageTemp | pageTemp includesSnippetUid: snippet uid ]. detect: [ :pageTemp | pageTemp includesSnippetUid: snippet uid ].
self importErrorForLocal: existingPage withRemote: externalDocLocation. self importErrorForLocal: existingPage withRemote: externalDocLocation.
^ self ] ^ self ]
ifFalse: [ snippet database: self. ifFalse: [ snippet database: self.
self registerSnippet: snippet ] ]. self registerSnippet: snippet ] ].
self addPage: newPage. self addPage: newPage.
^ newPage ^ newPage
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> addPageCopy: aLePage [ LeDatabase >> addPageCopy: aLePage [
| pageTitle timestamp shortID page | | pageTitle timestamp shortID page |
timestamp := DateAndTime now asString. timestamp := DateAndTime now asString.
pageTitle := 'Copy of ', aLePage title. pageTitle := 'Copy of ', aLePage title.
page := aLePage duplicatePageWithNewName: pageTitle, timestamp. page := aLePage duplicatePageWithNewName: pageTitle, timestamp.
shortID := '(id: ', (page uid asString copyFrom: 1 to: 8), ')'. shortID := '(id: ', (page uid asString copyFrom: 1 to: 8), ')'.
page title: (page title copyReplaceAll: timestamp with: shortID). page title: (page title copyReplaceAll: timestamp with: shortID).
^ page ^ page
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> addPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [ LeDatabase >> addPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
| remoteMetadata divSnippets dataSnippets snippets page | | remoteMetadata divSnippets dataSnippets snippets page |
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
collect: [ :xmlElement | xmlElement postCopy ]. collect: [ :xmlElement | xmlElement postCopy ].
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree. remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
remoteMetadata at: 'origin' put: externalDocLocation. remoteMetadata at: 'origin' put: externalDocLocation.
dataSnippets := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata. dataSnippets := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
snippets := dataSnippets collect: [ :each | each asLepiterSnippet ]. snippets := dataSnippets collect: [ :each | each asLepiterSnippet ].
page := LePage new. page := LePage new.
page page
title: (remoteMetadata at: 'title' ifAbsent: [ page detectMarkdeepTitleFrom: markdeepDocTree ]); title: (remoteMetadata at: 'title' ifAbsent: [ page detectMarkdeepTitleFrom: markdeepDocTree ]);
basicUid: (UUID fromString36: (remoteMetadata at: 'id' ifAbsent: [UUID new asString36])); basicUid: (UUID fromString36: (remoteMetadata at: 'id' ifAbsent: [UUID new asString36]));
createTime: (LeTime new createTime: (LeTime new
time: (remoteMetadata at: 'created' ifAbsent: [ DateAndTime now]) asDateAndTime); time: (remoteMetadata at: 'created' ifAbsent: [ DateAndTime now]) asDateAndTime);
editTime: (LeTime new editTime: (LeTime new
time: (remoteMetadata at: 'modified' ifAbsent: [ DateAndTime now]) asDateAndTime); time: (remoteMetadata at: 'modified' ifAbsent: [ DateAndTime now]) asDateAndTime);
latestEditTime: (LeTime new latestEditTime: (LeTime new
time: (remoteMetadata at: 'modified' ifAbsent: [ DateAndTime now]) asDateAndTime); time: (remoteMetadata at: 'modified' ifAbsent: [ DateAndTime now]) asDateAndTime);
createEmail: (remoteMetadata at: 'creator' ifAbsent: [ 'unknown' ]); createEmail: (remoteMetadata at: 'creator' ifAbsent: [ 'unknown' ]);
editEmail: (remoteMetadata at: 'modifier' ifAbsent: [ 'unknown' ]). editEmail: (remoteMetadata at: 'modifier' ifAbsent: [ 'unknown' ]).
snippets do: [ :snippet | "| currentParent |" snippets do: [ :snippet | "| currentParent |"
page addSnippet: snippet. page addSnippet: snippet.
"currentParent := page detectParentSnippetWithUid: (snippet metadata at: 'parent'). "currentParent := page detectParentSnippetWithUid: (snippet metadata at: 'parent').
snippet parent: currentParent." snippet parent: currentParent."
]. ].
page children page children
do: [ :snippet | do: [ :snippet |
(self hasBlockUID: snippet uid) (self hasBlockUID: snippet uid)
ifTrue: [ | existingPage | ifTrue: [ | existingPage |
existingPage := self pages existingPage := self pages
detect: [ :pageTemp | pageTemp includesSnippetUid: snippet uid ]. detect: [ :pageTemp | pageTemp includesSnippetUid: snippet uid ].
self importErrorForLocal: existingPage withRemote: externalDocLocation. self importErrorForLocal: existingPage withRemote: externalDocLocation.
^ self ] ^ self ]
ifFalse: [ snippet database: self. ifFalse: [ snippet database: self.
self registerSnippet: snippet ] ]. self registerSnippet: snippet ] ].
self addPage: page. self addPage: page.
^ page ^ page
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> addPageFromMarkdeepUrl: aString [ LeDatabase >> addPageFromMarkdeepUrl: aString [
| page | | page |
page := self detectLocalPageForRemote: aString. page := self detectLocalPageForRemote: aString.
page page
ifNotNil: [ :arg | ifNotNil: [ :arg |
self importErrorForLocal: page withRemote: aString. self importErrorForLocal: page withRemote: aString.
^ self errorCardFor: page uidString ]. ^ self errorCardFor: page uidString ].
^ self addPageFromMarkdeep: (self docTreeForLink: aString) withRemote: aString ^ self addPageFromMarkdeep: (self docTreeForLink: aString) withRemote: aString
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> detectLocalPageForRemote: markdeepDocUrl [ LeDatabase >> detectLocalPageForRemote: markdeepDocUrl [
| markdeepHelper id remoteMetadata docTree | | markdeepHelper id remoteMetadata docTree |
markdeepHelper := Markdeep new. markdeepHelper := Markdeep new.
docTree := self docTreeForLink: markdeepDocUrl. docTree := self docTreeForLink: markdeepDocUrl.
remoteMetadata := markdeepHelper metadataFromXML: docTree. remoteMetadata := markdeepHelper metadataFromXML: docTree.
id := remoteMetadata at: 'id' ifAbsent: [ nil ]. id := remoteMetadata at: 'id' ifAbsent: [ nil ].
^ self pageWithID: id ifAbsent: [ ^ nil ]. ^ self pageWithID: id ifAbsent: [ ^ nil ].
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> docTreeForLink: aString [ LeDatabase >> docTreeForLink: aString [
^ (XMLHTMLParser on: aString asUrl retrieveContents) parseDocument ^ (XMLHTMLParser on: aString asUrl retrieveContents) parseDocument
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> errorCardFor: errorKey [ LeDatabase >> errorCardFor: errorKey [
| keepButton overwriteButton loadCopyButton errorMessageUI localPage | | keepButton overwriteButton loadCopyButton errorMessageUI localPage |
localPage := self pageWithID: errorKey. localPage := self pageWithID: errorKey.
keepButton := BrButton new keepButton := BrButton new
aptitude: BrGlamorousButtonWithIconAndLabelAptitude; aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
label: 'Keep existing local page'; label: 'Keep existing local page';
icon: BrGlamorousVectorIcons cancel; icon: BrGlamorousVectorIcons cancel;
margin: (BlInsets left: 10); margin: (BlInsets left: 10);
action: [ :aButton | action: [ :aButton |
aButton phlow spawnObject: localPage. aButton phlow spawnObject: localPage.
self errors removeKey: errorKey self errors removeKey: errorKey
]. ].
overwriteButton := BrButton new overwriteButton := BrButton new
aptitude: BrGlamorousButtonWithIconAndLabelAptitude; aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
label: 'Overwrite with remote page'; label: 'Overwrite with remote page';
icon: BrGlamorousVectorIcons edit; icon: BrGlamorousVectorIcons edit;
action: [ :aButton | action: [ :aButton |
self removePage: localPage. self removePage: localPage.
aButton phlow spawnObject: (self addPageFromMarkdeepUrl: (self errors at: errorKey at: 'remote')). aButton phlow spawnObject: (self addPageFromMarkdeepUrl: (self errors at: errorKey at: 'remote')).
self errors removeKey: errorKey self errors removeKey: errorKey
]; ];
margin: (BlInsets left: 10). margin: (BlInsets left: 10).
loadCopyButton := BrButton new loadCopyButton := BrButton new
aptitude: BrGlamorousButtonWithIconAndLabelAptitude; aptitude: BrGlamorousButtonWithIconAndLabelAptitude;
label: 'Load remote page as a copy'; label: 'Load remote page as a copy';
icon: BrGlamorousVectorIcons changes; icon: BrGlamorousVectorIcons changes;
action: [ :aButton | self ]; action: [ :aButton | self ];
margin: (BlInsets left: 10). margin: (BlInsets left: 10).
errorMessageUI := BrEditor new errorMessageUI := BrEditor new
aptitude: BrGlamorousRegularEditorAptitude new ; aptitude: BrGlamorousRegularEditorAptitude new ;
text: (self errors at: errorKey at: 'message'); text: (self errors at: errorKey at: 'message');
vFitContent. vFitContent.
^ BrHorizontalPane new ^ BrHorizontalPane new
matchParent; matchParent;
alignCenter; alignCenter;
addChild:errorMessageUI; addChild:errorMessageUI;
addChild: keepButton; addChild: keepButton;
addChild: overwriteButton; addChild: overwriteButton;
addChild: loadCopyButton addChild: loadCopyButton
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> errors [ LeDatabase >> errors [
^ self optionAt: 'errors' ifAbsentPut: [ Dictionary new ] ^ self optionAt: 'errors' ifAbsentPut: [ Dictionary new ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> gtViewErrorDetailsOn: aView [ LeDatabase >> gtViewErrorDetailsOn: aView [
<gtView> <gtView>
^ aView explicit ^ aView explicit
title: 'Errors' translated; title: 'Errors' translated;
priority: 5; priority: 5;
stencil: [ | container | stencil: [ | container |
container := BlElement new container := BlElement new
layout: BlFlowLayout new; layout: BlFlowLayout new;
constraintsDo: [ :c | constraintsDo: [ :c |
c vertical fitContent. c vertical fitContent.
c horizontal matchParent ]; c horizontal matchParent ];
padding: (BlInsets all: 10). padding: (BlInsets all: 10).
container container
addChildren: (self errorCardFor: self errors) addChildren: (self errorCardFor: self errors)
]. ].
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> gtViewErrorDetailsOn: aView withKey: erroKey [ LeDatabase >> gtViewErrorDetailsOn: aView withKey: erroKey [
<gtView> <gtView>
^ aView explicit ^ aView explicit
title: 'Errors beta' translated; title: 'Errors beta' translated;
priority: 5; priority: 5;
stencil: [ | container | stencil: [ | container |
container := BlElement new container := BlElement new
layout: BlFlowLayout new; layout: BlFlowLayout new;
constraintsDo: [ :c | constraintsDo: [ :c |
c vertical fitContent. c vertical fitContent.
c horizontal matchParent ]; c horizontal matchParent ];
padding: (BlInsets all: 10). padding: (BlInsets all: 10).
container container
addChildren: (self errorCardFor: erroKey) addChildren: (self errorCardFor: erroKey)
]. ].
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> importDocumentFrom: aURL [ LeDatabase >> importDocumentFrom: aURL [
| doc | | doc |
"Using file extension in URL as a cheap (non-robuts) way of detecting the kind of document. "Using file extension in URL as a cheap (non-robuts) way of detecting the kind of document.
Better file type detection should be implemented in the future." Better file type detection should be implemented in the future."
(aURL endsWith: '.md.html') ifTrue: [ ^ self addPageFromMarkdeepUrl: aURL ]. (aURL endsWith: '.md.html') ifTrue: [ ^ self addPageFromMarkdeepUrl: aURL ].
doc := HedgeDoc fromLink: aURL asString. doc := HedgeDoc fromLink: aURL asString.
^ self addPage: doc asLePage ^ self addPage: doc asLePage
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> importErrorForLocal: page withRemote: externalDocLocation [ LeDatabase >> importErrorForLocal: page withRemote: externalDocLocation [
| message id error | | message id error |
id := page uidString. id := page uidString.
message := String streamContents: [ :stream | message := String streamContents: [ :stream |
stream stream
nextPutAll: 'IMPORTATION ERROR: A page with nextPutAll: 'IMPORTATION ERROR: A page with
'; ';
nextPut: Character lf; nextPut: Character lf;
nextPutAll: ' id: ' , id; nextPutAll: ' id: ' , id;
nextPut: Character lf; nextPut: Character lf;
nextPutAll: ' title: ' , page contentAsString; nextPutAll: ' title: ' , page contentAsString;
nextPut: Character lf; nextPut: Character lf;
nextPut: Character lf; nextPut: Character lf;
nextPutAll: 'already exists in this database and includes overlapping contents'; nextPutAll: 'already exists in this database and includes overlapping contents';
nextPut: Character lf; nextPut: Character lf;
nextPutAll: 'with the page you are trying to import from: nextPutAll: 'with the page you are trying to import from:
'; ';
nextPut: Character lf; nextPut: Character lf;
nextPutAll: externalDocLocation; nextPutAll: externalDocLocation;
nextPut: Character lf; nextPut: Character lf;
nextPut: Character lf; nextPut: Character lf;
nextPutAll: nextPutAll:
'Please choose one of the following options to addres the issue: 'Please choose one of the following options to addres the issue:
' ]. ' ].
error := Dictionary new error := Dictionary new
at: 'remote' put: externalDocLocation; at: 'remote' put: externalDocLocation;
at: 'message' put: message ; at: 'message' put: message ;
yourself. yourself.
self errors at: id put: error. self errors at: id put: error.
^ self errors at: id. ^ self errors at: id.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> options [ LeDatabase >> options [
^ options ^ options
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> previewSanitizedPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [ LeDatabase >> previewSanitizedPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
| remoteMetadata divSnippets divSnippetsSanitized | | remoteMetadata divSnippets divSnippetsSanitized |
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
collect: [ :xmlElement | xmlElement postCopy ]. collect: [ :xmlElement | xmlElement postCopy ].
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree. remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
remoteMetadata at: 'origin' put: externalDocLocation. remoteMetadata at: 'origin' put: externalDocLocation.
divSnippetsSanitized := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata. divSnippetsSanitized := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
^ { divSnippets . divSnippetsSanitized . remoteMetadata } ^ { divSnippets . divSnippetsSanitized . remoteMetadata }
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> rebuildPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [ LeDatabase >> rebuildPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocation [
| newPage snippets divSnippets remoteMetadata dataSnippets | | newPage snippets divSnippets remoteMetadata dataSnippets |
divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection divSnippets := (markdeepDocTree xpath: '//div[@st-class]') asOrderedCollection
collect: [ :xmlElement | xmlElement postCopy ]. collect: [ :xmlElement | xmlElement postCopy ].
remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree. remoteMetadata := Markdeep new metadataFromXML: markdeepDocTree.
remoteMetadata at: 'origin' put: externalDocLocation. remoteMetadata at: 'origin' put: externalDocLocation.
dataSnippets := self dataSnippets := self
sanitizeMarkdeepSnippets: divSnippets sanitizeMarkdeepSnippets: divSnippets
withMetadata: remoteMetadata. withMetadata: remoteMetadata.
snippets := dataSnippets collect: [ :each | each asLepiterSnippet ]. snippets := dataSnippets collect: [ :each | each asLepiterSnippet ].
newPage := LePage new newPage := LePage new
title: (remoteMetadata at: 'title'); title: (remoteMetadata at: 'title');
basicUid: (UUID fromString36: (remoteMetadata at: 'id')); basicUid: (UUID fromString36: (remoteMetadata at: 'id'));
createTime: (LeTime new time: (remoteMetadata at: 'created') asDateAndTime); createTime: (LeTime new time: (remoteMetadata at: 'created') asDateAndTime);
editTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime); editTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
latestEditTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime); latestEditTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
createEmail: (remoteMetadata at: 'creator'); createEmail: (remoteMetadata at: 'creator');
editEmail: (remoteMetadata at: 'modifier'). "^ { snippets . page }" "Rebulding partial subtrees" editEmail: (remoteMetadata at: 'modifier'). "^ { snippets . page }" "Rebulding partial subtrees"
snippets snippets
do: [ :currentSnippet | do: [ :currentSnippet |
| parentSnippet | | parentSnippet |
parentSnippet := snippets parentSnippet := snippets
detect: [ :item | item uid asString = currentSnippet parent ] detect: [ :item | item uid asString = currentSnippet parent ]
ifNone: [ parentSnippet := 'unrooted' ]. ifNone: [ parentSnippet := 'unrooted' ].
currentSnippet parent: parentSnippet. currentSnippet parent: parentSnippet.
parentSnippet class = ByteString parentSnippet class = ByteString
ifFalse: [ parentSnippet children addChild: currentSnippet ] ]. "Adding unrooted subtrees to the page" ifFalse: [ parentSnippet children addChild: currentSnippet ] ]. "Adding unrooted subtrees to the page"
"^ { unrooted . newPage }." "^ { unrooted . newPage }."
snippets snippets
select: [ :each | each parent = 'unrooted' ] select: [ :each | each parent = 'unrooted' ]
thenDo: [ :unrooted | newPage addSnippet: unrooted ]. thenDo: [ :unrooted | newPage addSnippet: unrooted ].
^ newPage ^ newPage
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDatabase >> sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata [ LeDatabase >> sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata [
^ divSnippets collectWithIndex: [:markdeepDiv :i | | snippetData creationTime modificationTime timestampWarning | ^ divSnippets collectWithIndex: [:markdeepDiv :i | | snippetData creationTime modificationTime timestampWarning |
snippetData := markdeepDiv asSnippetDictionary. snippetData := markdeepDiv asSnippetDictionary.
creationTime := snippetData at: 'created'. creationTime := snippetData at: 'created'.
modificationTime := snippetData at: 'modified'. modificationTime := snippetData at: 'modified'.
timestampWarning := [:timestamp | timestampWarning := [:timestamp |
'Modified timestamps: ', timestamp ,' date and time was replaced instead of nil value. See "origin" metadata for more historical traceability information.' 'Modified timestamps: ', timestamp ,' date and time was replaced instead of nil value. See "origin" metadata for more historical traceability information.'
]. ].
(creationTime = 'nil' and: [ modificationTime ~= 'nil' ]) (creationTime = 'nil' and: [ modificationTime ~= 'nil' ])
ifTrue: [ ifTrue: [
snippetData redefineTimestampsBefore: modificationTime. snippetData redefineTimestampsBefore: modificationTime.
snippetData addErrata: (timestampWarning value: 'creation'). snippetData addErrata: (timestampWarning value: 'creation').
snippetData at: 'origin' put: (remoteMetadata at: 'origin'). snippetData at: 'origin' put: (remoteMetadata at: 'origin').
]. ].
(creationTime = 'nil' and: [ modificationTime = 'nil' ]) (creationTime = 'nil' and: [ modificationTime = 'nil' ])
ifTrue: [ | timeDiff | ifTrue: [ | timeDiff |
timeDiff := divSnippets size - i. "Suggesting that last snippets were modified after the first ones." timeDiff := divSnippets size - i. "Suggesting that last snippets were modified after the first ones."
modificationTime := (remoteMetadata at: 'created') asDateAndTime - timeDiff seconds. modificationTime := (remoteMetadata at: 'created') asDateAndTime - timeDiff seconds.
snippetData redefineTimestampsBefore: modificationTime. snippetData redefineTimestampsBefore: modificationTime.
snippetData addErrata: (timestampWarning value: 'creation'). snippetData addErrata: (timestampWarning value: 'creation').
snippetData addErrata: (timestampWarning value: 'modification'). snippetData addErrata: (timestampWarning value: 'modification').
snippetData at: 'origin' put: (remoteMetadata at: 'origin'). snippetData at: 'origin' put: (remoteMetadata at: 'origin').
]. ].
snippetData. snippetData.
] ]
] ]

View File

@ -1,26 +1,26 @@
Extension { #name : #LeDockerSnippet } Extension { #name : #LeDockerSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDockerSnippet >> metadataUpdate [ LeDockerSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uuid; at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeDockerSnippet >> sanitizeMetadata [ LeDockerSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,32 +1,32 @@
Extension { #name : #LeExampleSnippet } Extension { #name : #LeExampleSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeExampleSnippet >> asMarkdeep [ LeExampleSnippet >> asMarkdeep [
^ (WriteStream on: '') contents ^ (WriteStream on: '') contents
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeExampleSnippet >> metadataUpdate [ LeExampleSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uuid; at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeExampleSnippet >> sanitizeMetadata [ LeExampleSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,26 +1,26 @@
Extension { #name : #LeGitHubSnippet } Extension { #name : #LeGitHubSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeGitHubSnippet >> metadataUpdate [ LeGitHubSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uuid; at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeGitHubSnippet >> sanitizeMetadata [ LeGitHubSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,8 +1,8 @@
Extension { #name : #LeHeaderNode } Extension { #name : #LeHeaderNode }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeHeaderNode >> headerFullName [ LeHeaderNode >> headerFullName [
^ self topParent completeSource ^ self topParent completeSource
copyFrom: self startPosition copyFrom: self startPosition
to: self stopPosition to: self stopPosition
] ]

View File

@ -1,56 +1,56 @@
Extension { #name : #LeHomeDatabaseHeaderElement } Extension { #name : #LeHomeDatabaseHeaderElement }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> importMinidocsButtonElement [ LeHomeDatabaseHeaderElement >> importMinidocsButtonElement [
^ self userData at: 'importMinidocsButtonElement' ifAbsentPut: [ self newImportMiniDocsButton] ^ self userData at: 'importMinidocsButtonElement' ifAbsentPut: [ self newImportMiniDocsButton]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> initialize [ LeHomeDatabaseHeaderElement >> initialize [
super initialize. super initialize.
self initializeEditableTitleElement. self initializeEditableTitleElement.
self initializeButtons. self initializeButtons.
self addChild: self toolbarElement as: #toolbar. self addChild: self toolbarElement as: #toolbar.
self toolbarElement self toolbarElement
addItem: self editableTitleElement; addItem: self editableTitleElement;
addItem: self newAddNewPageButton; addItem: self newAddNewPageButton;
addItem: self removeButtonElement; addItem: self removeButtonElement;
addItem: self importButtonElement; addItem: self importButtonElement;
addItem: self exportButtonElement; addItem: self exportButtonElement;
addItem: self importMinidocsButtonElement. addItem: self importMinidocsButtonElement.
self addAptitude: (BrLayoutResizerAptitude new self addAptitude: (BrLayoutResizerAptitude new
hInherit; hInherit;
vAnyToFitContent; vAnyToFitContent;
hInherit: self toolbarElement; hInherit: self toolbarElement;
vAnyToFitContent: self toolbarElement). vAnyToFitContent: self toolbarElement).
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> initializeButtons [ LeHomeDatabaseHeaderElement >> initializeButtons [
self initializeRemoveButton. self initializeRemoveButton.
self initializeImportButton. self initializeImportButton.
self initializeExportButton. self initializeExportButton.
self initializeMiniDocsImportButton. self initializeMiniDocsImportButton.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> initializeMiniDocsImportButton [ LeHomeDatabaseHeaderElement >> initializeMiniDocsImportButton [
self userData at: 'importMinidocsButtonElement' put: self newImportMiniDocsButton. self userData at: 'importMinidocsButtonElement' put: self newImportMiniDocsButton.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> newImportMiniDocsButton [ LeHomeDatabaseHeaderElement >> newImportMiniDocsButton [
^ LeMiniDocsImport new ^ LeMiniDocsImport new
tooltip: 'Import document from link'; tooltip: 'Import document from link';
contentExtent: 200 @ 30 contentExtent: 200 @ 30
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeHomeDatabaseHeaderElement >> updateToolbarButtons [ LeHomeDatabaseHeaderElement >> updateToolbarButtons [
self updateRemoveButtonElement. self updateRemoveButtonElement.
self exportButtonElement database: self database. self exportButtonElement database: self database.
self importButtonElement database: self database. self importButtonElement database: self database.
self importMinidocsButtonElement database: self database. self importMinidocsButtonElement database: self database.
] ]

View File

@ -1,26 +1,26 @@
Extension { #name : #LeJenkinsSnippet } Extension { #name : #LeJenkinsSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeJenkinsSnippet >> metadataUpdate [ LeJenkinsSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uuid; at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeJenkinsSnippet >> sanitizeMetadata [ LeJenkinsSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,89 +1,89 @@
Class { Class {
#name : #LeMiniDocsImport, #name : #LeMiniDocsImport,
#superclass : #BrButton, #superclass : #BrButton,
#instVars : [ #instVars : [
'contentExtent', 'contentExtent',
'database' 'database'
], ],
#category : #'MiniDocs-UI' #category : #'MiniDocs-UI'
} }
{ #category : #accessing } { #category : #accessing }
LeMiniDocsImport >> contentExtent [ LeMiniDocsImport >> contentExtent [
^ contentExtent ^ contentExtent
] ]
{ #category : #accessing } { #category : #accessing }
LeMiniDocsImport >> contentExtent: aPoint [ LeMiniDocsImport >> contentExtent: aPoint [
self self
assert: [ aPoint isNotNil ] assert: [ aPoint isNotNil ]
description: [ 'Extent must be non-nil' ]. description: [ 'Extent must be non-nil' ].
contentExtent := aPoint contentExtent := aPoint
] ]
{ #category : #accessing } { #category : #accessing }
LeMiniDocsImport >> createDropdownExpandedHandleButton [ LeMiniDocsImport >> createDropdownExpandedHandleButton [
^ BrButton new ^ BrButton new
icon: BrGlamorousVectorIcons downwards; icon: BrGlamorousVectorIcons downwards;
label: self tooltip; label: self tooltip;
aptitude: BrGlamorousButtonWithIconAndLabelAptitude aptitude: BrGlamorousButtonWithIconAndLabelAptitude
] ]
{ #category : #accessing } { #category : #accessing }
LeMiniDocsImport >> createURLeditable [ LeMiniDocsImport >> createURLeditable [
| base editable | | base editable |
base := BlElement new base := BlElement new
background: (Color white); background: (Color white);
size: 200 @ 30; size: 200 @ 30;
margin: (BlInsets all: 10); margin: (BlInsets all: 10);
yourself. yourself.
editable := BrEditableLabel new editable := BrEditableLabel new
aptitude: BrGlamorousEditableLabelAptitude new glamorousRegularFontAndSize; aptitude: BrGlamorousEditableLabelAptitude new glamorousRegularFontAndSize;
text: 'Document link'; text: 'Document link';
switchToEditor. switchToEditor.
editable when: BrEditorAcceptWish do: [ :aWish | editable when: BrEditorAcceptWish do: [ :aWish |
self importDocumentFrom: aWish text asString. self importDocumentFrom: aWish text asString.
]. ].
base addChild: editable. base addChild: editable.
^ base ^ base
] ]
{ #category : #accessing } { #category : #accessing }
LeMiniDocsImport >> database [ LeMiniDocsImport >> database [
^ database ^ database
] ]
{ #category : #accessing } { #category : #accessing }
LeMiniDocsImport >> database: aLeDatabase [ LeMiniDocsImport >> database: aLeDatabase [
database := aLeDatabase database := aLeDatabase
] ]
{ #category : #accessing } { #category : #accessing }
LeMiniDocsImport >> importDocumentFrom: aURL [ LeMiniDocsImport >> importDocumentFrom: aURL [
^ self database importDocumentFrom: aURL. ^ self database importDocumentFrom: aURL.
] ]
{ #category : #accessing } { #category : #accessing }
LeMiniDocsImport >> initialize [ LeMiniDocsImport >> initialize [
super initialize. super initialize.
self self
icon: BrGlamorousVectorIcons downwards; icon: BrGlamorousVectorIcons downwards;
label: 'Add MiniDocs'; label: 'Add MiniDocs';
aptitude: BrGlamorousButtonWithIconAndLabelAptitude. aptitude: BrGlamorousButtonWithIconAndLabelAptitude.
self addAptitude: (BrGlamorousWithDropdownAptitude self addAptitude: (BrGlamorousWithDropdownAptitude
handle: [ self createDropdownExpandedHandleButton ] handle: [ self createDropdownExpandedHandleButton ]
content: [ self createURLeditable ]). content: [ self createURLeditable ]).
self aptitude - BrGlamorousButtonExteriorAptitude. self aptitude - BrGlamorousButtonExteriorAptitude.
] ]
{ #category : #accessing } { #category : #accessing }
LeMiniDocsImport >> tooltip [ LeMiniDocsImport >> tooltip [
^ self label ^ self label
] ]
{ #category : #accessing } { #category : #accessing }
LeMiniDocsImport >> tooltip: aString [ LeMiniDocsImport >> tooltip: aString [
self label: aString self label: aString
] ]

View File

@ -1,26 +1,26 @@
Extension { #name : #LeMockedSnippet } Extension { #name : #LeMockedSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeMockedSnippet >> metadataUpdate [ LeMockedSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uuid; at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeMockedSnippet >> sanitizeMetadata [ LeMockedSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,6 +1,6 @@
Extension { #name : #LeNullDatabase } Extension { #name : #LeNullDatabase }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeNullDatabase >> attachmentsDirectory [ LeNullDatabase >> attachmentsDirectory [
^ (FileLocator temp / 'lepiter' / 'attachments') ensureCreateDirectory. ^ (FileLocator temp / 'lepiter' / 'attachments') ensureCreateDirectory.
] ]

View File

@ -1,333 +1,333 @@
Extension { #name : #LePage } Extension { #name : #LePage }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> asHtmlFile [ LePage >> asHtmlFile [
self asMarkdownFile. self asMarkdownFile.
self defaultPandocTemplate exists self defaultPandocTemplate exists
ifFalse: [ MarkupFile installTemplate: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/plantillas/Pandoc/clean-menu-mod.html' into: self defaultPandocTemplate parent ]. ifFalse: [ MarkupFile installTemplate: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/plantillas/Pandoc/clean-menu-mod.html' into: self defaultPandocTemplate parent ].
OSSUnixSubprocess new OSSUnixSubprocess new
command: 'pandoc' ; command: 'pandoc' ;
arguments: { arguments: {
self markdownFileName. '-o'. self htmlFileName . self markdownFileName. '-o'. self htmlFileName .
'--toc' . '--toc' .
'--template=', self defaultPandocTemplate basenameWithoutExtension }; '--template=', self defaultPandocTemplate basenameWithoutExtension };
workingDirectory: self storage fullName; workingDirectory: self storage fullName;
runAndWaitOnExitDo: [ :process :outString | ^ self storage / self htmlFileName]. runAndWaitOnExitDo: [ :process :outString | ^ self storage / self htmlFileName].
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> asMarkdeep [ LePage >> asMarkdeep [
| bodyStream markdeep | | bodyStream markdeep |
bodyStream := '' writeStream. bodyStream := '' writeStream.
self preorderTraversal self preorderTraversal
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdeep ]. do: [ :snippet | bodyStream nextPutAll: snippet asMarkdeep ].
markdeep := Markdeep new markdeep := Markdeep new
title: self title; title: self title;
body: bodyStream contents; body: bodyStream contents;
metadata: self metadata; metadata: self metadata;
file: self storage / self markdeepFileName; file: self storage / self markdeepFileName;
navTop: self navTop. navTop: self navTop.
self metadata self metadata
at: 'authors' at: 'authors'
ifPresent: [ :author | markdeep metadata at: 'authors' put: author ]. ifPresent: [ :author | markdeep metadata at: 'authors' put: author ].
self metadata self metadata
at: 'version' at: 'version'
ifPresent: [ :version | markdeep metadata at: 'version' put: version ]. ifPresent: [ :version | markdeep metadata at: 'version' put: version ].
markdeep head: nil. markdeep head: nil.
^ markdeep ^ markdeep
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> asMarkdeepFile [ LePage >> asMarkdeepFile [
^ self asMarkdeep notifyExportAsFileOn: self storage / self markdeepFileName ^ self asMarkdeep notifyExportAsFileOn: self storage / self markdeepFileName
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> asMarkdown [ LePage >> asMarkdown [
| bodyStream markdown | | bodyStream markdown |
bodyStream := '' writeStream. bodyStream := '' writeStream.
bodyStream bodyStream
nextPutAll: '# ', self title; cr; cr. nextPutAll: '# ', self title; cr; cr.
self preorderTraversal self preorderTraversal
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdown ]. do: [ :snippet | bodyStream nextPutAll: snippet asMarkdown ].
markdown := Markdown new markdown := Markdown new
contents: bodyStream contents demoteMarkdownHeaders; contents: bodyStream contents demoteMarkdownHeaders;
metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new). metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new).
^ markdown ^ markdown
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> asMarkdownFile [ LePage >> asMarkdownFile [
| folder | | folder |
folder := self storage. folder := self storage.
^ MarkupFile exportAsFileOn: folder / self markdownFileName containing: self asMarkdownWithMetadataWrappers contents ^ MarkupFile exportAsFileOn: folder / self markdownFileName containing: self asMarkdownWithMetadataWrappers contents
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> asMarkdownWithMetadataWrappers [ LePage >> asMarkdownWithMetadataWrappers [
| bodyStream markdown | | bodyStream markdown |
bodyStream := '' writeStream. bodyStream := '' writeStream.
bodyStream bodyStream
nextPutAll: '# ', self title; cr; cr. nextPutAll: '# ', self title; cr; cr.
self preorderTraversal self preorderTraversal
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdownWithMetadataWrappers ]. do: [ :snippet | bodyStream nextPutAll: snippet asMarkdownWithMetadataWrappers ].
markdown := Markdown new markdown := Markdown new
contents: bodyStream contents demoteMarkdownHeaders; contents: bodyStream contents demoteMarkdownHeaders;
metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new). metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new).
^ markdown ^ markdown
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> config [ LePage >> config [
| configFile | | configFile |
configFile := self storage / 'config.ston'. configFile := self storage / 'config.ston'.
configFile exists configFile exists
ifTrue: [^ STON fromString: configFile contents ] ifTrue: [^ STON fromString: configFile contents ]
ifFalse: [ ^ nil ] ifFalse: [ ^ nil ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> defaultPandocTemplate [ LePage >> defaultPandocTemplate [
^ FileLocator home / '.pandoc' / 'templates' / 'clean-menu-mod.html' ^ FileLocator home / '.pandoc' / 'templates' / 'clean-menu-mod.html'
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> detectMarkdeepTitleFrom: xmlSubtree [ LePage >> detectMarkdeepTitleFrom: xmlSubtree [
| titleLine | | titleLine |
titleLine := (xmlSubtree nodesCollect: [:node | node contentString ]) first lines titleLine := (xmlSubtree nodesCollect: [:node | node contentString ]) first lines
detect: [:line | line includesSubstring: ' **'] ifNone: ['Untitled']. detect: [:line | line includesSubstring: ' **'] ifNone: ['Untitled'].
^ titleLine trimmed trimBoth: [:char | char = $* ] ^ titleLine trimmed trimBoth: [:char | char = $* ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> detectParentSnippetWithUid: uidString [ LePage >> detectParentSnippetWithUid: uidString [
uidString = self uid asString36 ifTrue: [ ^ self ]. uidString = self uid asString36 ifTrue: [ ^ self ].
^ self preorderTraversal detect: [ :snippet | snippet uidString = uidString ] ^ self preorderTraversal detect: [ :snippet | snippet uidString = uidString ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> exportMetadataToHead: markdeep [ LePage >> exportMetadataToHead: markdeep [
self metadata self metadata
keysAndValuesDo: [ :k :v | keysAndValuesDo: [ :k :v |
k = 'lang' k = 'lang'
ifTrue: [ markdeep head ifTrue: [ markdeep head
add: '<meta lang="' , v , '">'; add: '<meta lang="' , v , '">';
yourself ] yourself ]
ifFalse: [ markdeep head ifFalse: [ markdeep head
add: '<meta name="' , k , '" content="' , v , '">'; add: '<meta name="' , k , '" content="' , v , '">';
yourself ] ] yourself ] ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> exportedFileName [ LePage >> exportedFileName [
| sanitized | | sanitized |
sanitized := self title asDashedLowercase romanizeAccents copyWithoutAll: #($/ $: $🢒). sanitized := self title asDashedLowercase romanizeAccents copyWithoutAll: #($/ $: $🢒).
^ sanitized , '--' , (self uidString copyFrom: 1 to: 5) ^ sanitized , '--' , (self uidString copyFrom: 1 to: 5)
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> fromMarkdeepUrl: aString [ LePage >> fromMarkdeepUrl: aString [
| docTree pageMetadata | | docTree pageMetadata |
docTree := GrafoscopioUtils xmlFromUrl: aString. docTree := GrafoscopioUtils xmlFromUrl: aString.
pageMetadata := Markdeep new metadataFromXML: docTree. pageMetadata := Markdeep new metadataFromXML: docTree.
self self
basicUid: (pageMetadata at: 'id'); basicUid: (pageMetadata at: 'id');
title: (pageMetadata at: 'title'); title: (pageMetadata at: 'title');
createTime: (pageMetadata at: 'created') asDateAndTime; createTime: (pageMetadata at: 'created') asDateAndTime;
editTime: (pageMetadata at: 'modified') asDateAndTime; editTime: (pageMetadata at: 'modified') asDateAndTime;
createEmail: (pageMetadata at: 'creator'); createEmail: (pageMetadata at: 'creator');
editEmail: (pageMetadata at: 'modifier'); editEmail: (pageMetadata at: 'modifier');
optionAt: 'metadata' put: pageMetadata. optionAt: 'metadata' put: pageMetadata.
self populateChildrenFrom: (docTree xpath: '//div') self populateChildrenFrom: (docTree xpath: '//div')
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> htmlFileName [ LePage >> htmlFileName [
^ self exportedFileName, '.html' ^ self exportedFileName, '.html'
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> latestEditTime: aLeTime [ LePage >> latestEditTime: aLeTime [
"Used for adding a LePage to database from a shared markdeep LePage version." "Used for adding a LePage to database from a shared markdeep LePage version."
latestEditTime := aLeTime latestEditTime := aLeTime
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> localHostAddress [ LePage >> localHostAddress [
| localUrl route | | localUrl route |
MiniDocsServer teapot server isRunning ifFalse: [ MiniDocsServer restart ]. MiniDocsServer teapot server isRunning ifFalse: [ MiniDocsServer restart ].
route := MiniDocsServer teapot staticRouter prefix joinUsing: '/'. route := MiniDocsServer teapot staticRouter prefix joinUsing: '/'.
localUrl := MiniDocsServer teapot server localUrl asString. localUrl := MiniDocsServer teapot server localUrl asString.
^ localUrl, route, '/', self markdeepFileName ^ localUrl, route, '/', self markdeepFileName
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> markdeepFileName [ LePage >> markdeepFileName [
^ self markdownFileName , '.html' ^ self markdownFileName , '.html'
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> markdownFileName [ LePage >> markdownFileName [
^ self exportedFileName, '.md' ^ self exportedFileName, '.md'
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> metadata [ LePage >> metadata [
^ self metadataUpdate ^ self metadataUpdate
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> metadataUpdate [ LePage >> metadataUpdate [
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'title' put: self contentAsString; at: 'title' put: self contentAsString;
at: 'created' put: self createTime greaseString; at: 'created' put: self createTime greaseString;
at: 'modified' put: self getLatestEditTime greaseString; at: 'modified' put: self getLatestEditTime greaseString;
at: 'creator' put: self createEmail greaseString; at: 'creator' put: self createEmail greaseString;
at: 'modifier' put: self editEmail greaseString; at: 'modifier' put: self editEmail greaseString;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> navTop [ LePage >> navTop [
| topNavFile | | topNavFile |
topNavFile := self storage / '_navtop.html'. topNavFile := self storage / '_navtop.html'.
topNavFile exists topNavFile exists
ifFalse: [ ^ '' ] ifFalse: [ ^ '' ]
ifTrue: [ ^ topNavFile contents ] ifTrue: [ ^ topNavFile contents ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> olderChild [ LePage >> olderChild [
"I provide the last edited child node. "I provide the last edited child node.
I'm useful to recalculate the age of a notebook." I'm useful to recalculate the age of a notebook."
| response| | response|
response := self preorderTraversal first. response := self preorderTraversal first.
self preorderTraversal do: [:current | self preorderTraversal do: [:current |
current editTime >= response editTime current editTime >= response editTime
ifTrue: [ response := current ] ifTrue: [ response := current ]
]. ].
^ response ^ response
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> options [ LePage >> options [
^ options ^ options
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> preorderTraversal [ LePage >> preorderTraversal [
^ self allChildrenDepthFirst ^ self allChildrenDepthFirst
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> removeSnippetsMetadata [ LePage >> removeSnippetsMetadata [
self preorderTraversal do: [ :snippet | self preorderTraversal do: [ :snippet |
(snippet options isNotNil and: [ snippet options includesKey: 'metadata' ]) (snippet options isNotNil and: [ snippet options includesKey: 'metadata' ])
ifTrue: [ snippet options removeKey: 'metadata' ] ] ifTrue: [ snippet options removeKey: 'metadata' ] ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> sanitizeMetadata [ LePage >> sanitizeMetadata [
self allChildrenDepthFirst do: [:snippet | snippet sanitizeMetadata ] self allChildrenDepthFirst do: [:snippet | snippet sanitizeMetadata ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> sharedVariablesBindings [ LePage >> sharedVariablesBindings [
| codeSnippets shared | | codeSnippets shared |
codeSnippets := self preorderTraversal select: [:snippet | codeSnippets := self preorderTraversal select: [:snippet |
snippet class = LePharoSnippet and: [ snippet code includesSubstring: ':='] snippet class = LePharoSnippet and: [ snippet code includesSubstring: ':=']
]. ].
codeSnippets first in: [:snippet | | context | codeSnippets first in: [:snippet | | context |
context := snippet coder evaluationContext. context := snippet coder evaluationContext.
snippet coder doItInContext: context. snippet coder doItInContext: context.
shared := context bindingStrategy bindings detect: [:each | shared := context bindingStrategy bindings detect: [:each |
each isKindOf: GtSharedVariablesBindings each isKindOf: GtSharedVariablesBindings
] ]
]. ].
codeSnippets asArray allButFirstDo: [:snippet| | context| codeSnippets asArray allButFirstDo: [:snippet| | context|
context := snippet coder evaluationContext. context := snippet coder evaluationContext.
context addBindings: shared. context addBindings: shared.
snippet coder doItInContext: context snippet coder doItInContext: context
]. ].
^ shared asDictionary ^ shared asDictionary
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> storage [ LePage >> storage [
| current | | current |
current := self database attachmentsDirectory parent. current := self database attachmentsDirectory parent.
self optionAt: 'storage' ifAbsent: [ ^ current ]. self optionAt: 'storage' ifAbsent: [ ^ current ].
(self optionAt: 'storage') ifNil: [ ^ current ]. (self optionAt: 'storage') ifNil: [ ^ current ].
^ self optionAt: 'storage' ^ self optionAt: 'storage'
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> uiAddCopyButtonFor: anAction [ LePage >> uiAddCopyButtonFor: anAction [
<lePageAction> <lePageAction>
^ anAction button ^ anAction button
tooltip: 'Export Page'; tooltip: 'Export Page';
icon: BrGlamorousVectorIcons changes; icon: BrGlamorousVectorIcons changes;
action: [:aButton | aButton phlow spawnObject: (self page database addPageCopy: self page) ] action: [:aButton | aButton phlow spawnObject: (self page database addPageCopy: self page) ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> uiDefineFolderFor: anAction [ LePage >> uiDefineFolderFor: anAction [
<lePageAction> <lePageAction>
| folderButton | | folderButton |
folderButton := anAction dropdown folderButton := anAction dropdown
icon: BrGlamorousIcons savetodisk; icon: BrGlamorousIcons savetodisk;
tooltip: 'Export folder'""; tooltip: 'Export folder'"";
content: [:aButton | BlElement new content: [:aButton | BlElement new
background: (Color gray alpha: 0.2); background: (Color gray alpha: 0.2);
size: 100 @ 100; size: 100 @ 100;
margin: (BlInsets all: 10) ]. margin: (BlInsets all: 10) ].
^ folderButton ^ folderButton
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> uiExportButtonFor: anAction [ LePage >> uiExportButtonFor: anAction [
<lePageAction> <lePageAction>
^ anAction button ^ anAction button
tooltip: 'Export Page'; tooltip: 'Export Page';
icon: BrGlamorousVectorIcons down; icon: BrGlamorousVectorIcons down;
action: [:aButton | aButton phlow spawnObject: self page asMarkdeepFile ] action: [:aButton | aButton phlow spawnObject: self page asMarkdeepFile ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> uiRefreshWebPreviewButtonFor: anAction [ LePage >> uiRefreshWebPreviewButtonFor: anAction [
<lePageAction> <lePageAction>
^ anAction button ^ anAction button
tooltip: 'Refresh web view'; tooltip: 'Refresh web view';
icon: BrGlamorousVectorIcons refresh; icon: BrGlamorousVectorIcons refresh;
action: [ action: [
self page asMarkdeep exportAsFileOn: (self page storage / self page markdeepFileName). self page asMarkdeep exportAsFileOn: (self page storage / self page markdeepFileName).
GoogleChrome openWindowOn: self page localHostAddress. GoogleChrome openWindowOn: self page localHostAddress.
"TODO: If Chrome/Chromium are not installed, I should execute:" "TODO: If Chrome/Chromium are not installed, I should execute:"
"WebBrowser openOn: self page localHostAddress" ] "WebBrowser openOn: self page localHostAddress" ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePage >> youngerChild [ LePage >> youngerChild [
"I provide the first create child node. "I provide the first create child node.
I'm useful to recalculate the age of a notebook." I'm useful to recalculate the age of a notebook."
| response| | response|
response := self preorderTraversal first. response := self preorderTraversal first.
self preorderTraversal do: [:current | self preorderTraversal do: [:current |
current createTime <= response createTime current createTime <= response createTime
ifTrue: [ response := current ] ifTrue: [ response := current ]
]. ].
^ response ^ response
] ]

View File

@ -1,26 +1,26 @@
Extension { #name : #LePharoRewriteSnippet } Extension { #name : #LePharoRewriteSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePharoRewriteSnippet >> metadataUpdate [ LePharoRewriteSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uuid; at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePharoRewriteSnippet >> sanitizeMetadata [ LePharoRewriteSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,58 +1,58 @@
Extension { #name : #LePharoSnippet } Extension { #name : #LePharoSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePharoSnippet >> contentAsStringCustomized [ LePharoSnippet >> contentAsStringCustomized [
| thisObject | | thisObject |
(self tags includes: 'output') ifFalse: [ ^ self contentAsString ]. (self tags includes: 'output') ifFalse: [ ^ self contentAsString ].
thisObject := ((self page sharedVariablesBindings) at: self detectObject) value. thisObject := ((self page sharedVariablesBindings) at: self detectObject) value.
^ thisObject perform: self detectMessage trimmed asSymbol. ^ thisObject perform: self detectMessage trimmed asSymbol.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePharoSnippet >> fromMarkdeep: markdeepDiv [ LePharoSnippet >> fromMarkdeep: markdeepDiv [
^ markdeepDiv asSnippetDictionary asLepiterSnippet ^ markdeepDiv asSnippetDictionary asLepiterSnippet
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePharoSnippet >> fromString: aString [ LePharoSnippet >> fromString: aString [
self code: aString self code: aString
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePharoSnippet >> markdeepCustomCloser [ LePharoSnippet >> markdeepCustomCloser [
^ String streamContents: [ :stream | ^ String streamContents: [ :stream |
stream stream
nextPutAll: '~~~'; lf; nextPutAll: '~~~'; lf;
nextPutAll: '</script>'; lf. nextPutAll: '</script>'; lf.
] ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePharoSnippet >> markdeepCustomOpener [ LePharoSnippet >> markdeepCustomOpener [
^ String streamContents: [ :stream | ^ String streamContents: [ :stream |
stream stream
nextPutAll: '<script type="preformatted">'; lf; nextPutAll: '<script type="preformatted">'; lf;
nextPutAll: '~~~ Smalltalk'; lf nextPutAll: '~~~ Smalltalk'; lf
] ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePharoSnippet >> markdownCustomCloser [ LePharoSnippet >> markdownCustomCloser [
(self tags includes: 'output') ifTrue: [^ String with: Character lf]. (self tags includes: 'output') ifTrue: [^ String with: Character lf].
^ String streamContents: [:stream | ^ String streamContents: [:stream |
stream stream
nextPutAll: '~~~'; lf nextPutAll: '~~~'; lf
] ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePharoSnippet >> markdownCustomOpener [ LePharoSnippet >> markdownCustomOpener [
(self tags includes: 'output') ifTrue: [ ^ String with: Character lf ]. (self tags includes: 'output') ifTrue: [ ^ String with: Character lf ].
^ String ^ String
streamContents: [ :stream | streamContents: [ :stream |
stream stream
nextPutAll: '~~~ Smalltalk'; nextPutAll: '~~~ Smalltalk';
lf ] lf ]
] ]

View File

@ -1,122 +1,122 @@
Extension { #name : #LePictureSnippet } Extension { #name : #LePictureSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePictureSnippet >> asMarkdeep [ LePictureSnippet >> asMarkdeep [
| output curatedCaption captionLines | | output curatedCaption captionLines |
captionLines := self caption lines. captionLines := self caption lines.
(captionLines size <= 1) (captionLines size <= 1)
ifTrue: [ curatedCaption := caption ] ifTrue: [ curatedCaption := caption ]
ifFalse: [ ifFalse: [
curatedCaption := WriteStream on: ''. curatedCaption := WriteStream on: ''.
curatedCaption nextPutAll: captionLines first. curatedCaption nextPutAll: captionLines first.
captionLines allButFirstDo: [:line | captionLines allButFirstDo: [:line |
curatedCaption nextPutAll: ' ', line. curatedCaption nextPutAll: ' ', line.
curatedCaption := curatedCaption contents. curatedCaption := curatedCaption contents.
] ]
]. ].
output := WriteStream on: ''. output := WriteStream on: ''.
output output
nextPutAll: self metadataDiv; nextPutAll: self metadataDiv;
nextPutAll: '![ ', curatedCaption ,' ](', self urlString, ')'; nextPutAll: '![ ', curatedCaption ,' ](', self urlString, ')';
nextPut: Character lf; nextPut: Character lf;
nextPutAll: '</div>'; nextPutAll: '</div>';
nextPut: Character lf; nextPut: Character lf;
nextPut: Character lf. nextPut: Character lf.
^ output contents ^ output contents
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePictureSnippet >> asMarkdownWithMetadataWrappers [ LePictureSnippet >> asMarkdownWithMetadataWrappers [
^ self asMarkdeep ^ self asMarkdeep
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePictureSnippet >> contentFrom: markdeepDiv [ LePictureSnippet >> contentFrom: markdeepDiv [
| caption width | | caption width |
caption := markdeepDiv contentString. caption := markdeepDiv contentString.
width := (markdeepDiv // 'img' @ 'width') stringValue. width := (markdeepDiv // 'img' @ 'width') stringValue.
self self
optionAt: 'caption' put: caption; optionAt: 'caption' put: caption;
optionAt: 'width' put: width. optionAt: 'width' put: width.
self urlString: (markdeepDiv // 'img' @ 'src') stringValue. self urlString: (markdeepDiv // 'img' @ 'src') stringValue.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePictureSnippet >> fromMarkdeep: markdeepDiv [ LePictureSnippet >> fromMarkdeep: markdeepDiv [
^ markdeepDiv asSnippetDictionary asLepiterSnippet ^ markdeepDiv asSnippetDictionary asLepiterSnippet
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePictureSnippet >> fromString: aStringArray [ LePictureSnippet >> fromString: aStringArray [
"aStringArray should contain as first element the sanitized string and "aStringArray should contain as first element the sanitized string and
as second the full original image Link string, which may contains links in the description." as second the full original image Link string, which may contains links in the description."
| args urlTemp | | args urlTemp |
args := aStringArray second splitOn: ']('. args := aStringArray second splitOn: ']('.
urlTemp := args last. urlTemp := args last.
urlTemp := urlTemp copyFrom: 1 to: urlTemp size - 1. urlTemp := urlTemp copyFrom: 1 to: urlTemp size - 1.
self caption: aStringArray first. self caption: aStringArray first.
self urlString: urlTemp. self urlString: urlTemp.
^ self ^ self
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePictureSnippet >> metadata [ LePictureSnippet >> metadata [
^ self metadataInit ^ self metadataInit
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePictureSnippet >> metadataDiv [ LePictureSnippet >> metadataDiv [
| output | | output |
output := WriteStream on: ''. output := WriteStream on: ''.
output output
nextPutAll: '<div st-class="' , self class greaseString , '"'; nextPutAll: '<div st-class="' , self class greaseString , '"';
nextPut: Character lf; nextPut: Character lf;
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">'; nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
nextPut: Character lf. nextPut: Character lf.
^ output contents withInternetLineEndings. ^ output contents withInternetLineEndings.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePictureSnippet >> metadataInit [ LePictureSnippet >> metadataInit [
| surrogate | | surrogate |
self parent self parent
ifNil: [ surrogate := nil] ifNil: [ surrogate := nil]
ifNotNil: [ surrogate := self parent uidString ]. ifNotNil: [ surrogate := self parent uidString ].
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: surrogate; at: 'parent' put: surrogate;
at: 'url' put: '<!--',self contentAsString, '-->'; at: 'url' put: '<!--',self contentAsString, '-->';
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters; at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters; at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePictureSnippet >> metadataUpdate [ LePictureSnippet >> metadataUpdate [
| surrogate | | surrogate |
self parent self parent
ifNil: [ surrogate := nil] ifNil: [ surrogate := nil]
ifNotNil: [ surrogate := self parent uidString ]. ifNotNil: [ surrogate := self parent uidString ].
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: surrogate; at: 'parent' put: surrogate;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: self createEmail asString withoutXMLTagDelimiters; at: 'creator' put: self createEmail asString withoutXMLTagDelimiters;
at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters; at: 'modifier' put: self editEmail asString withoutXMLTagDelimiters;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LePictureSnippet >> sanitizeMetadata [ LePictureSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,26 +1,26 @@
Extension { #name : #LeSmaCCRewriteSnippet } Extension { #name : #LeSmaCCRewriteSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeSmaCCRewriteSnippet >> metadataUpdate [ LeSmaCCRewriteSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uuid; at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeSmaCCRewriteSnippet >> sanitizeMetadata [ LeSmaCCRewriteSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,28 +1,28 @@
Extension { #name : #LeSnippet } Extension { #name : #LeSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeSnippet class >> fromMetaMarkdeep: div [ LeSnippet class >> fromMetaMarkdeep: div [
| className metadata snippet | | className metadata snippet |
className := (div xpath: '@st-class') stringValue. className := (div xpath: '@st-class') stringValue.
metadata := STON fromString:(div xpath: '@st-data') stringValue. metadata := STON fromString:(div xpath: '@st-data') stringValue.
snippet := className asClass new. snippet := className asClass new.
snippet injectMetadataFrom: metadata. snippet injectMetadataFrom: metadata.
snippet fromMarkdeep: div. snippet fromMarkdeep: div.
^ snippet. ^ snippet.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeSnippet >> metadata [ LeSnippet >> metadata [
^ self metadataUpdate ^ self metadataUpdate
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeSnippet >> moveToPageTitled: pageName [ LeSnippet >> moveToPageTitled: pageName [
| db origin destination thisSnippet | | db origin destination thisSnippet |
thisSnippet := self. thisSnippet := self.
db := self page database. db := self page database.
destination := db pageNamed: pageName. destination := db pageNamed: pageName.
origin := db pageNamed: thisSnippet page title. origin := db pageNamed: thisSnippet page title.
origin removeSnippet: thisSnippet. origin removeSnippet: thisSnippet.
destination addSnippet: thisSnippet. destination addSnippet: thisSnippet.
] ]

View File

@ -1,11 +1,11 @@
Extension { #name : #LeTextCoderSnippetElement } Extension { #name : #LeTextCoderSnippetElement }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextCoderSnippetElement >> moveToPageTitled: pageName [ LeTextCoderSnippetElement >> moveToPageTitled: pageName [
| db origin destination | | db origin destination |
db := self page database. db := self page database.
destination := db pageNamed: pageName. destination := db pageNamed: pageName.
origin := db pageNamed: self page title. origin := db pageNamed: self page title.
origin removeSnippet: self. origin removeSnippet: self.
destination addSnippet: self . destination addSnippet: self .
] ]

View File

@ -1,55 +1,55 @@
Extension { #name : #LeTextSnippet } Extension { #name : #LeTextSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextSnippet >> asLePage [ LeTextSnippet >> asLePage [
| page title currentSnippet | | page title currentSnippet |
title := self contentAsString markdownHeaders associations first value. title := self contentAsString markdownHeaders associations first value.
title := (title trimBoth: [:char | char = $# ]) trimmed. title := (title trimBoth: [:char | char = $# ]) trimmed.
page := LePage new page := LePage new
initializeTitle: title. initializeTitle: title.
currentSnippet := LeTextSnippet new currentSnippet := LeTextSnippet new
string: self contentAsString. string: self contentAsString.
page addSnippet: currentSnippet. page addSnippet: currentSnippet.
self database addPage: page. self database addPage: page.
self childrenDo: [:child | self childrenDo: [:child |
child moveToPageTitled: page title child moveToPageTitled: page title
]. ].
self removeSelfCommand. self removeSelfCommand.
^ page. ^ page.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextSnippet >> fromMarkdeep: markdeepDiv [ LeTextSnippet >> fromMarkdeep: markdeepDiv [
^ markdeepDiv asSnippetDictionary asLepiterSnippet ^ markdeepDiv asSnippetDictionary asLepiterSnippet
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextSnippet >> fromString: aString [ LeTextSnippet >> fromString: aString [
self string: aString self string: aString
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextSnippet >> metadata [ LeTextSnippet >> metadata [
^ self metadataUpdate ^ self metadataUpdate
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextSnippet >> options [ LeTextSnippet >> options [
^ options ^ options
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextSnippet >> parentId [ LeTextSnippet >> parentId [
self parent ifNil: [ ^ self ]. self parent ifNil: [ ^ self ].
(self parent isString) ifTrue: [^ self parent]. (self parent isString) ifTrue: [^ self parent].
^ self parent uidString. ^ self parent uidString.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextSnippet >> taggedWith: aString [ LeTextSnippet >> taggedWith: aString [
self metadata at: 'tags' ifPresent: [ (self metadata at: 'tags') add: aString; yourself ] ifAbsentPut: [ Set new ]. self metadata at: 'tags' ifPresent: [ (self metadata at: 'tags') add: aString; yourself ] ifAbsentPut: [ Set new ].
^ self metadata at: 'tags' ^ self metadata at: 'tags'
] ]

View File

@ -1,145 +1,145 @@
Extension { #name : #LeTextualSnippet } Extension { #name : #LeTextualSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> asMarkdeep [ LeTextualSnippet >> asMarkdeep [
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use "Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
'st-' properties as a way to extend divs metadata regarding its contents." 'st-' properties as a way to extend divs metadata regarding its contents."
| output | | output |
output := WriteStream on: ''. output := WriteStream on: ''.
output output
nextPutAll: '<div st-class="' , self class greaseString , '"'; nextPutAll: '<div st-class="' , self class greaseString , '"';
nextPut: Character lf; nextPut: Character lf;
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">'; nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">';
nextPut: Character lf; nextPut: Character lf;
nextPutAll: self markdeepCustomOpener; nextPutAll: self markdeepCustomOpener;
nextPutAll: self contentAsStringAnnotated; nextPutAll: self contentAsStringAnnotated;
nextPut: Character lf; nextPut: Character lf;
nextPutAll: self markdeepCustomCloser; nextPutAll: self markdeepCustomCloser;
nextPutAll: '</div>'; nextPutAll: '</div>';
nextPut: Character lf; nextPut: Character lf;
nextPut: Character lf. nextPut: Character lf.
^ output contents withInternetLineEndings ^ output contents withInternetLineEndings
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> asMarkdown [ LeTextualSnippet >> asMarkdown [
| output | | output |
output := '' writeStream. output := '' writeStream.
output output
nextPutAll: self contentAsStringCustomized; lf. nextPutAll: self contentAsStringCustomized; lf.
^ output contents ^ output contents
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> asMarkdownWithMetadataWrappers [ LeTextualSnippet >> asMarkdownWithMetadataWrappers [
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use "Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
'st-' properties as a way to extend divs metadata regarding its contents." 'st-' properties as a way to extend divs metadata regarding its contents."
| output | | output |
output := '' writeStream. output := '' writeStream.
output output
nextPutAll: '<div st-class="', self class asString, '"'; lf; nextPutAll: '<div st-class="', self class asString, '"'; lf;
nextPutAll: ' st-data="', (STON toString: self metadata), '">'; lf; nextPutAll: ' st-data="', (STON toString: self metadata), '">'; lf;
nextPutAll: self markdownCustomOpener; nextPutAll: self markdownCustomOpener;
nextPutAll: self contentAsStringCustomized; lf; nextPutAll: self contentAsStringCustomized; lf;
nextPutAll: self markdownCustomCloser; nextPutAll: self markdownCustomCloser;
nextPutAll: '</div>'; lf; lf. nextPutAll: '</div>'; lf; lf.
^ output contents ^ output contents
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> contentAsStringAnnotated [ LeTextualSnippet >> contentAsStringAnnotated [
| annotations substitutions exported pageConfig| | annotations substitutions exported pageConfig|
self ast ifNil: [ ^ self contentAsString ]. self ast ifNil: [ ^ self contentAsString ].
annotations := self ast parts select: [:each | each className includesSubstring: 'AnnotationNode' ]. annotations := self ast parts select: [:each | each className includesSubstring: 'AnnotationNode' ].
annotations ifEmpty: [ ^ self contentAsString ]. annotations ifEmpty: [ ^ self contentAsString ].
substitutions := OrderedDictionary new. substitutions := OrderedDictionary new.
pageConfig := self page config. pageConfig := self page config.
annotations do: [ :each | | key type value color | annotations do: [ :each | | key type value color |
key := each source. key := each source.
type := (key splitOn: ':') first copyWithoutAll: '{{'. type := (key splitOn: ':') first copyWithoutAll: '{{'.
value := key copyFrom: type size + 4 to: key size - 2. value := key copyFrom: type size + 4 to: key size - 2.
pageConfig pageConfig
ifNil: [ color := 'default' ] ifNil: [ color := 'default' ]
ifNotNil: [ | colors | ifNotNil: [ | colors |
colors := pageConfig at: 'annotationColors' ifAbsent: [ nil ]. colors := pageConfig at: 'annotationColors' ifAbsent: [ nil ].
colors ifNotNil: [ colors ifNotNil: [
color := colors at: type ifAbsent: [ colors at: 'defaultColor' ifAbsentPut: ['default'] ] color := colors at: type ifAbsent: [ colors at: 'defaultColor' ifAbsentPut: ['default'] ]
] ]
]. ].
substitutions substitutions
at: key at: key
put: '<span st-class="',type,'" style="color:', color, '">', value,'</span>'. put: '<span st-class="',type,'" style="color:', color, '">', value,'</span>'.
]. ].
exported := self contentAsString. exported := self contentAsString.
substitutions keysAndValuesDo: [:k :v | substitutions keysAndValuesDo: [:k :v |
exported := exported copyReplaceAll: k with: v. exported := exported copyReplaceAll: k with: v.
]. ].
^ exported ^ exported
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> contentAsStringCustomized [ LeTextualSnippet >> contentAsStringCustomized [
(self contentAsString beginsWith: '#') (self contentAsString beginsWith: '#')
ifTrue: [ ^ '#', self contentAsString ] ifTrue: [ ^ '#', self contentAsString ]
ifFalse: [ ^ self contentAsString ] ifFalse: [ ^ self contentAsString ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> markdeepCustomCloser [ LeTextualSnippet >> markdeepCustomCloser [
^ '' ^ ''
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> markdeepCustomOpener [ LeTextualSnippet >> markdeepCustomOpener [
^ '' ^ ''
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> markdownCustomCloser [ LeTextualSnippet >> markdownCustomCloser [
^ self markdeepCustomCloser ^ self markdeepCustomCloser
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> markdownCustomOpener [ LeTextualSnippet >> markdownCustomOpener [
^ self markdeepCustomOpener ^ self markdeepCustomOpener
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> metadata [ LeTextualSnippet >> metadata [
^ self metadataUpdate ^ self metadataUpdate
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> metadataUpdate [ LeTextualSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uidString; at: 'parent' put: self parent uidString;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> sanitizeMetadata [ LeTextualSnippet >> sanitizeMetadata [
self options ifNil: [^ self ]. self options ifNil: [^ self ].
self options removeKey: 'metadata' ifAbsent: [^ self ]. self options removeKey: 'metadata' ifAbsent: [^ self ].
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v asString includesAny: #($< $>)) (v asString includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v asString copyWithoutXMLDelimiters) self metadata at: k put: (v asString copyWithoutXMLDelimiters)
] ]
] ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeTextualSnippet >> tags [ LeTextualSnippet >> tags [
^ self metadata at: 'tags' ifAbsentPut: [ Set new ] ^ self metadata at: 'tags' ifAbsentPut: [ Set new ]
] ]

View File

@ -1,21 +1,21 @@
Extension { #name : #LeUnknownSnippet } Extension { #name : #LeUnknownSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeUnknownSnippet >> metadataUpdate [ LeUnknownSnippet >> metadataUpdate [
| surrogate | | surrogate |
self parent self parent
ifNil: [ surrogate := nil] ifNil: [ surrogate := nil]
ifNotNil: [ ifNotNil: [
self parent isString self parent isString
ifTrue: [ surrogate := self parent] ifTrue: [ surrogate := self parent]
ifFalse: [ surrogate := self parent uidString ] ifFalse: [ surrogate := self parent uidString ]
]. ].
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: surrogate; at: 'parent' put: surrogate;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: self createEmail asString; at: 'creator' put: self createEmail asString;
at: 'modifier' put: self editEmail asString; at: 'modifier' put: self editEmail asString;
yourself yourself
] ]

View File

@ -1,26 +1,26 @@
Extension { #name : #LeWardleyMapSnippet } Extension { #name : #LeWardleyMapSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeWardleyMapSnippet >> metadataUpdate [ LeWardleyMapSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uuid; at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeWardleyMapSnippet >> sanitizeMetadata [ LeWardleyMapSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,26 +1,26 @@
Extension { #name : #LeWordSnippet } Extension { #name : #LeWordSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeWordSnippet >> metadataUpdate [ LeWordSnippet >> metadataUpdate [
| createEmailSanitized editEmailSanitized | | createEmailSanitized editEmailSanitized |
createEmailSanitized := self createEmail asString withoutXMLTagDelimiters. createEmailSanitized := self createEmail asString withoutXMLTagDelimiters.
editEmailSanitized := self editEmail asString withoutXMLTagDelimiters. editEmailSanitized := self editEmail asString withoutXMLTagDelimiters.
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: self parent uuid; at: 'parent' put: self parent uuid;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: createEmailSanitized; at: 'creator' put: createEmailSanitized;
at: 'modifier' put: editEmailSanitized; at: 'modifier' put: editEmailSanitized;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeWordSnippet >> sanitizeMetadata [ LeWordSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,57 +1,57 @@
Extension { #name : #LeYoutubeReferenceSnippet } Extension { #name : #LeYoutubeReferenceSnippet }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeYoutubeReferenceSnippet >> asMarkdeep [ LeYoutubeReferenceSnippet >> asMarkdeep [
| output | | output |
output := WriteStream on: ''. output := WriteStream on: ''.
output output
nextPutAll: self metadataDiv; nextPutAll: self metadataDiv;
nextPutAll: '![ ', self title, ' | ', self authorName, ' ](',self urlString, ')'; nextPutAll: '![ ', self title, ' | ', self authorName, ' ](',self urlString, ')';
nextPut: Character lf; nextPut: Character lf;
nextPutAll: '</div>'; nextPutAll: '</div>';
nextPut: Character lf; nextPut: Character lf;
nextPut: Character lf. nextPut: Character lf.
^ output contents ^ output contents
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeYoutubeReferenceSnippet >> metadata [ LeYoutubeReferenceSnippet >> metadata [
^ self optionAt: 'metadata' ifAbsentPut: [ self metadataUpdate ] ^ self optionAt: 'metadata' ifAbsentPut: [ self metadataUpdate ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeYoutubeReferenceSnippet >> metadataDiv [ LeYoutubeReferenceSnippet >> metadataDiv [
| output | | output |
output := WriteStream on: ''. output := WriteStream on: ''.
output output
nextPutAll: '<div st-class="' , self class greaseString , '"'; nextPutAll: '<div st-class="' , self class greaseString , '"';
nextPut: Character lf; nextPut: Character lf;
nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">'. nextPutAll: ' st-data="' , (STON toStringPretty: self metadata) , '">'.
^ output contents withInternetLineEndings. ^ output contents withInternetLineEndings.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeYoutubeReferenceSnippet >> metadataUpdate [ LeYoutubeReferenceSnippet >> metadataUpdate [
| surrogate | | surrogate |
self parent self parent
ifNil: [ surrogate := nil] ifNil: [ surrogate := nil]
ifNotNil: [ surrogate := self parent uidString ]. ifNotNil: [ surrogate := self parent uidString ].
^ OrderedDictionary new ^ OrderedDictionary new
at: 'id' put: self uidString; at: 'id' put: self uidString;
at: 'parent' put: surrogate; at: 'parent' put: surrogate;
at: 'created' put: self createTime asString; at: 'created' put: self createTime asString;
at: 'modified' put: self latestEditTime asString; at: 'modified' put: self latestEditTime asString;
at: 'creator' put: self createEmail asString; at: 'creator' put: self createEmail asString;
at: 'modifier' put: self editEmail asString; at: 'modifier' put: self editEmail asString;
yourself yourself
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
LeYoutubeReferenceSnippet >> sanitizeMetadata [ LeYoutubeReferenceSnippet >> sanitizeMetadata [
self metadata keysAndValuesDo: [:k :v | self metadata keysAndValuesDo: [:k :v |
(v includesAny: #($< $>)) (v includesAny: #($< $>))
ifTrue: [ ifTrue: [
self metadata at: k put: (v copyWithoutAll: #($< $>)) self metadata at: k put: (v copyWithoutAll: #($< $>))
] ]
] ]
] ]

View File

@ -1,33 +1,33 @@
Class { Class {
#name : #Logseq, #name : #Logseq,
#superclass : #Object, #superclass : #Object,
#instVars : [ #instVars : [
'folder' 'folder'
], ],
#category : #MiniDocs #category : #MiniDocs
} }
{ #category : #accessing } { #category : #accessing }
Logseq >> assets [ Logseq >> assets [
^ self folder / 'assets' ^ self folder / 'assets'
] ]
{ #category : #accessing } { #category : #accessing }
Logseq >> folder [ Logseq >> folder [
^ folder ^ folder
] ]
{ #category : #accessing } { #category : #accessing }
Logseq >> folder: aFolder [ Logseq >> folder: aFolder [
folder := aFolder folder := aFolder
] ]
{ #category : #accessing } { #category : #accessing }
Logseq >> journals [ Logseq >> journals [
self folder / 'journals' self folder / 'journals'
] ]
{ #category : #accessing } { #category : #accessing }
Logseq >> pages [ Logseq >> pages [
self folder/ 'pages' self folder/ 'pages'
] ]

View File

@ -1,23 +1,23 @@
" "
Please describe the package using the class comment of the included manifest class. The manifest class also includes other additional metadata for the package. These meta data are used by other tools such as the SmalllintManifestChecker and the critics Browser Please describe the package using the class comment of the included manifest class. The manifest class also includes other additional metadata for the package. These meta data are used by other tools such as the SmalllintManifestChecker and the critics Browser
" "
Class { Class {
#name : #ManifestMiniDocs, #name : #ManifestMiniDocs,
#superclass : #PackageManifest, #superclass : #PackageManifest,
#category : #'MiniDocs-Manifest' #category : #'MiniDocs-Manifest'
} }
{ #category : #'code-critics' } { #category : #'code-critics' }
ManifestMiniDocs class >> ruleCascadedNextPutAllsRuleV1FalsePositive [ ManifestMiniDocs class >> ruleCascadedNextPutAllsRuleV1FalsePositive [
^ #(#(#(#RGMethodDefinition #(#LeTextualSnippet #asMarkdeep #false)) #'2022-09-09T12:31:08.106585-05:00') ) ^ #(#(#(#RGMethodDefinition #(#LeTextualSnippet #asMarkdeep #false)) #'2022-09-09T12:31:08.106585-05:00') )
] ]
{ #category : #'code-critics' } { #category : #'code-critics' }
ManifestMiniDocs class >> ruleExcessiveVariablesRuleV1FalsePositive [ ManifestMiniDocs class >> ruleExcessiveVariablesRuleV1FalsePositive [
^ #(#(#(#RGClassDefinition #(#Markdeep)) #'2022-07-16T12:24:34.695032-05:00') ) ^ #(#(#(#RGClassDefinition #(#Markdeep)) #'2022-07-16T12:24:34.695032-05:00') )
] ]
{ #category : #'code-critics' } { #category : #'code-critics' }
ManifestMiniDocs class >> ruleParseTreeLintRuleV1FalsePositive [ ManifestMiniDocs class >> ruleParseTreeLintRuleV1FalsePositive [
^ #(#(#(#RGPackageDefinition #(#MiniDocs)) #'2022-07-25T09:28:50.156394-05:00') ) ^ #(#(#(#RGPackageDefinition #(#MiniDocs)) #'2022-07-25T09:28:50.156394-05:00') )
] ]

File diff suppressed because it is too large Load Diff

View File

@ -1,221 +1,221 @@
" "
I model a Markdown document. I model a Markdown document.
At some point the idea is to have a full native parser implemented to deal At some point the idea is to have a full native parser implemented to deal
with my syntax, but meanwhile I will be collaborating with external parsers, with my syntax, but meanwhile I will be collaborating with external parsers,
particularly the ones provided by Pandoc and/or Lunamark. particularly the ones provided by Pandoc and/or Lunamark.
" "
Class { Class {
#name : #Markdown, #name : #Markdown,
#superclass : #MarkupFile, #superclass : #MarkupFile,
#instVars : [ #instVars : [
'metadata', 'metadata',
'body', 'body',
'title' 'title'
], ],
#category : #'MiniDocs-Core' #category : #'MiniDocs-Core'
} }
{ #category : #'instance creation' } { #category : #'instance creation' }
Markdown class >> fromFile: aFileReference [ Markdown class >> fromFile: aFileReference [
^ self new fromFile: aFileReference ^ self new fromFile: aFileReference
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> asMarkdeep [ Markdown >> asMarkdeep [
^ Markdeep new ^ Markdeep new
body: self body; body: self body;
commentYAMLMetadata commentYAMLMetadata
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> body [ Markdown >> body [
^ body ^ body
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> body: aString [ Markdown >> body: aString [
body := aString body := aString
] ]
{ #category : #operation } { #category : #operation }
Markdown >> commentYAMLMetadata [ Markdown >> commentYAMLMetadata [
| newContents | | newContents |
self detectYAMLMetadata ifFalse: [ ^ self ]. self detectYAMLMetadata ifFalse: [ ^ self ].
newContents := '' writeStream. newContents := '' writeStream.
newContents nextPutAll: '<!--@yaml'; lf. newContents nextPutAll: '<!--@yaml'; lf.
newContents nextPutAll: self yamlMetadataString. newContents nextPutAll: self yamlMetadataString.
newContents nextPutAll: '-->'; lf; lf. newContents nextPutAll: '-->'; lf; lf.
(self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line | (self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line |
newContents nextPutAll: line; lf ]. newContents nextPutAll: line; lf ].
^ newContents contents. ^ newContents contents.
] ]
{ #category : #utilities } { #category : #utilities }
Markdown >> containsYAMLMetadataClosing [ Markdown >> containsYAMLMetadataClosing [
^ self yamlMetadataClosingLineNumber > 0 ^ self yamlMetadataClosingLineNumber > 0
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> contents [ Markdown >> contents [
| response | | response |
response := WriteStream on: ''. response := WriteStream on: ''.
response response
nextPutAll: '---'; cr; nextPutAll: '---'; cr;
nextPutAll: self metadataAsYAML; cr; nextPutAll: self metadataAsYAML; cr;
nextPutAll: '---'; cr; nextPutAll: '---'; cr;
nextPutAll: self body. nextPutAll: self body.
^ response contents ^ response contents
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> contents: aString [ Markdown >> contents: aString [
body := aString body := aString
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> documentTree [ Markdown >> documentTree [
| parser| | parser|
self contents ifNil: [^ nil]. self contents ifNil: [^ nil].
parser := PPCommonMarkBlockParser new parse: self body. parser := PPCommonMarkBlockParser new parse: self body.
^ parser accept: CMBlockVisitor new ^ parser accept: CMBlockVisitor new
] ]
{ #category : #persistence } { #category : #persistence }
Markdown >> exportAsFile [ Markdown >> exportAsFile [
| newFile | | newFile |
newFile := (self file fullName ) asFileReference. newFile := (self file fullName ) asFileReference.
^ self notifyExportAsFileOn: newFile. ^ self notifyExportAsFileOn: newFile.
] ]
{ #category : #persistence } { #category : #persistence }
Markdown >> exportAsFileOn: aFileReference [ Markdown >> exportAsFileOn: aFileReference [
aFileReference ensureDelete. aFileReference ensureDelete.
aFileReference exists ifFalse: [ aFileReference ensureCreateFile ]. aFileReference exists ifFalse: [ aFileReference ensureCreateFile ].
aFileReference writeStreamDo: [ :stream | aFileReference writeStreamDo: [ :stream |
stream nextPutAll: self contents withInternetLineEndings ]. stream nextPutAll: self contents withInternetLineEndings ].
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> exportAsHTML [ Markdown >> exportAsHTML [
^ Pandoc markdownToHtml: self file ^ Pandoc markdownToHtml: self file
] ]
{ #category : #operation } { #category : #operation }
Markdown >> exportMetadataAsJson [ Markdown >> exportMetadataAsJson [
"TBD: Lua scripts should be checked and installed when missing. Maybe a shared location "TBD: Lua scripts should be checked and installed when missing. Maybe a shared location
in '.local/share/Grafoscopio/Scripts' should be developed in the near future." in '.local/share/Grafoscopio/Scripts' should be developed in the near future."
| output luaScript | | output luaScript |
luaScript := FileLocator home / '.local/share/Brea/scripts/meta-to-json.lua'. luaScript := FileLocator home / '.local/share/Brea/scripts/meta-to-json.lua'.
Smalltalk platformName = 'unix' ifTrue: [ Smalltalk platformName = 'unix' ifTrue: [
OSSUnixSubprocess new OSSUnixSubprocess new
workingDirectory: self file parent fullName; workingDirectory: self file parent fullName;
command: 'pandoc'; command: 'pandoc';
arguments: { '--lua-filter=', luaScript fullName . self file basename }; arguments: { '--lua-filter=', luaScript fullName . self file basename };
redirectStdout; redirectStdout;
redirectStdin; redirectStdin;
runAndWaitOnExitDo: [ :process :outString :errString | runAndWaitOnExitDo: [ :process :outString :errString |
output := process isSuccess output := process isSuccess
ifTrue: [ outString ] ifTrue: [ outString ]
ifFalse: [ errString ] ifFalse: [ errString ]
]]. ]].
^ output correctAccentedCharacters ^ output correctAccentedCharacters
] ]
{ #category : #operation } { #category : #operation }
Markdown >> exportMetadataAsYaml [ Markdown >> exportMetadataAsYaml [
| exportedFile | | exportedFile |
exportedFile := FileLocator temp / 'metadata.yaml'. exportedFile := FileLocator temp / 'metadata.yaml'.
MarkupFile exportAsFileOn: exportedFile containing: self yamlMetadataStringWithDelimiters. MarkupFile exportAsFileOn: exportedFile containing: self yamlMetadataStringWithDelimiters.
^ exportedFile ^ exportedFile
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> file [ Markdown >> file [
^ file ifNil: [ file := FileLocator temp / 'temporalMarkdeep.md.html' ] ^ file ifNil: [ file := FileLocator temp / 'temporalMarkdeep.md.html' ]
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> file: aFileReference [ Markdown >> file: aFileReference [
"I store the origen/destination of the Markdown contents." "I store the origen/destination of the Markdown contents."
file := aFileReference file := aFileReference
] ]
{ #category : #'instance creation' } { #category : #'instance creation' }
Markdown >> fromFile: aFileReference [ Markdown >> fromFile: aFileReference [
self fromString: aFileReference contents. self fromString: aFileReference contents.
self file: aFileReference. self file: aFileReference.
] ]
{ #category : #'instance creation' } { #category : #'instance creation' }
Markdown >> fromString: markdownString [ Markdown >> fromString: markdownString [
(self metadata) at: 'original' put: markdownString yamlMetadata. (self metadata) at: 'original' put: markdownString yamlMetadata.
self body: markdownString contentsWithoutYAMLMetadata self body: markdownString contentsWithoutYAMLMetadata
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> gtTextFor: aView [ Markdown >> gtTextFor: aView [
<gtView> <gtView>
^ aView textEditor ^ aView textEditor
title: 'Text'; title: 'Text';
text: [ self contents ] text: [ self contents ]
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> headerAsTitle [ Markdown >> headerAsTitle [
| headerNode | | headerNode |
headerNode := self documentTree children headerNode := self documentTree children
detect: [ :node | node className = 'PPCMHeader' and: [ node level = 1 ] ] ifNone: [ ^ 'Untitled' ]. detect: [ :node | node className = 'PPCMHeader' and: [ node level = 1 ] ] ifNone: [ ^ 'Untitled' ].
^ headerNode text ^ headerNode text
] ]
{ #category : #utilities } { #category : #utilities }
Markdown >> lines [ Markdown >> lines [
self file ifNotNil: [^ self file contents lines ]. self file ifNotNil: [^ self file contents lines ].
^ self contents lines. ^ self contents lines.
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> metadata [ Markdown >> metadata [
^ metadata ifNil: [ metadata := Dictionary new]. ^ metadata ifNil: [ metadata := Dictionary new].
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> metadata: rawMeta [ Markdown >> metadata: rawMeta [
metadata := rawMeta metadata := rawMeta
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> metadataAsYAML [ Markdown >> metadataAsYAML [
self metadata isEmptyOrNil ifTrue: [ ^ '' ]. self metadata isEmptyOrNil ifTrue: [ ^ '' ].
^ (YQ jsonToYaml: self metadata) accentedCharactersCorrection ^ (YQ jsonToYaml: self metadata) accentedCharactersCorrection
] ]
{ #category : #persistence } { #category : #persistence }
Markdown >> notifyExportAsFileOn: aFileReference [ Markdown >> notifyExportAsFileOn: aFileReference [
self exportAsFileOn: aFileReference. self exportAsFileOn: aFileReference.
self inform: 'Exported as: ', String cr, aFileReference fullName. self inform: 'Exported as: ', String cr, aFileReference fullName.
^ aFileReference ^ aFileReference
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> options [ Markdown >> options [
^ self metadata at: 'options' ifAbsentPut: [ self defaultOptions] ^ self metadata at: 'options' ifAbsentPut: [ self defaultOptions]
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> printOn: aStream [ Markdown >> printOn: aStream [
| response | | response |
super printOn: aStream. super printOn: aStream.
response := self title ifNil: [ 'Untitled' ]. response := self title ifNil: [ 'Untitled' ].
aStream aStream
nextPutAll: '( ', response , ' )' nextPutAll: '( ', response , ' )'
] ]
{ #category : #accessing } { #category : #accessing }
Markdown >> title [ Markdown >> title [
^ title ifNil: [ title:= self headerAsTitle ] ^ title ifNil: [ title:= self headerAsTitle ]
] ]

View File

@ -1,40 +1,40 @@
" "
I model common operations made with several markup files. I model common operations made with several markup files.
" "
Class { Class {
#name : #MarkupFile, #name : #MarkupFile,
#superclass : #Object, #superclass : #Object,
#instVars : [ #instVars : [
'file' 'file'
], ],
#category : #'MiniDocs-Core' #category : #'MiniDocs-Core'
} }
{ #category : #persistence } { #category : #persistence }
MarkupFile class >> exportAsFileOn: aFileReferenceOrFileName containing: anObject [ MarkupFile class >> exportAsFileOn: aFileReferenceOrFileName containing: anObject [
| file preprocessed | | file preprocessed |
file := aFileReferenceOrFileName asFileReference. file := aFileReferenceOrFileName asFileReference.
file ensureDelete. file ensureDelete.
file exists ifFalse: [ file ensureCreateFile ]. file exists ifFalse: [ file ensureCreateFile ].
(#('String' 'ByteString' 'WideString') includes: anObject className ) (#('String' 'ByteString' 'WideString') includes: anObject className )
ifTrue: [ preprocessed := anObject ] ifTrue: [ preprocessed := anObject ]
ifFalse: [preprocessed := STON toStringPretty: anObject ]. ifFalse: [preprocessed := STON toStringPretty: anObject ].
file writeStreamDo: [ :stream | file writeStreamDo: [ :stream |
stream nextPutAll: preprocessed ]. stream nextPutAll: preprocessed ].
self inform: 'Exported as: ', String cr, file fullName. self inform: 'Exported as: ', String cr, file fullName.
^ file ^ file
] ]
{ #category : #accessing } { #category : #accessing }
MarkupFile class >> installTemplate: anUrl into: aFolder [ MarkupFile class >> installTemplate: anUrl into: aFolder [
| fileName | | fileName |
fileName := anUrl asUrl segments last. fileName := anUrl asUrl segments last.
(aFolder / fileName) exists (aFolder / fileName) exists
ifTrue: [ (aFolder / fileName) ensureDeleteFile ] ifTrue: [ (aFolder / fileName) ensureDeleteFile ]
ifFalse: [ aFolder ensureCreateDirectory ]. ifFalse: [ aFolder ensureCreateDirectory ].
ZnClient new ZnClient new
url: anUrl; url: anUrl;
downloadTo: aFolder. downloadTo: aFolder.
^ aFolder ^ aFolder
] ]

View File

@ -1,145 +1,142 @@
" "
MiniDocs is a project that includes several minimalistic documentation tools used by the [Grafoscopio](https://mutabit.com/grafoscopio/en.html) community, starting with [Markdeep](https://casual-effects.com/markdeep/) and its integrations with [Lepiter](https://lepiter.io/feenk/introducing-lepiter--knowledge-management--e2p6apqsz5npq7m4xte0kkywn/) . MiniDocs is a project that includes several minimalistic documentation tools used by the [Grafoscopio](https://mutabit.com/grafoscopio/en.html) community, starting with [Markdeep](https://casual-effects.com/markdeep/) and its integrations with [Lepiter](https://lepiter.io/feenk/introducing-lepiter--knowledge-management--e2p6apqsz5npq7m4xte0kkywn/) .
" "
Class { Class {
#name : #MiniDocs, #name : #MiniDocs,
#superclass : #Object, #superclass : #Object,
#category : #'MiniDocs-Core' #category : #'MiniDocs-Core'
} }
{ #category : #accessing } { #category : #accessing }
MiniDocs class >> altKeys [ MiniDocs class >> altKeys [
^ BlAlternativeCombination new ^ BlAlternativeCombination new
combination: (BlSingleKeyCombination key:BlKeyboardKey altLeft) combination: (BlSingleKeyCombination key:BlKeyboardKey altLeft)
or: (BlSingleKeyCombination key:BlKeyboardKey altRight) or: (BlSingleKeyCombination key:BlKeyboardKey altRight)
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocs class >> altShiftLeftCombo [ MiniDocs class >> altShiftLeftCombo [
^ BlCompulsoryCombination new ^ BlCompulsoryCombination new
with: self altKeys; with: self altKeys;
with: self shiftKeys; with: self shiftKeys;
with: (BlSingleKeyCombination key: BlKeyboardKey arrowLeft); with: (BlSingleKeyCombination key: BlKeyboardKey arrowLeft);
yourself yourself
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocs class >> altShiftRightCombo [ MiniDocs class >> altShiftRightCombo [
^ BlCompulsoryCombination new ^ BlCompulsoryCombination new
with: self altKeys; with: self altKeys;
with: self shiftKeys; with: self shiftKeys;
with: (BlSingleKeyCombination key: BlKeyboardKey arrowRight); with: (BlSingleKeyCombination key: BlKeyboardKey arrowRight);
yourself yourself
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocs class >> appFolder [ MiniDocs class >> appFolder [
| tempFolder userDataFolder | | tempFolder |
userDataFolder := Smalltalk os isWindows tempFolder := ExoRepo userDataFolder / 'Mutabit' / 'MiniDocs'.
ifTrue: [ FileLocator home / 'AppData' / 'Local' ] tempFolder exists ifFalse: [ tempFolder ensureCreateDirectory ].
ifFalse: [ FileLocator userData ]. ^ tempFolder
tempFolder := userDataFolder / 'Mutabit' / 'MiniDocs'. ]
tempFolder exists ifFalse: [ tempFolder ensureCreateDirectory ].
^ tempFolder { #category : #accessing }
] MiniDocs class >> exportAsSton: anObject on: aFileReference [
MarkupFile exportAsFileOn: aFileReference containing: (STON toStringPretty: anObject) withInternetLineEndings
{ #category : #accessing } ]
MiniDocs class >> exportAsSton: anObject on: aFileReference [
MarkupFile exportAsFileOn: aFileReference containing: (STON toStringPretty: anObject) withInternetLineEndings { #category : #accessing }
] MiniDocs class >> importGrafoscopioFile: aFileReference [
{ #category : #accessing } ^ (STON fromString: aFileReference) first parent
MiniDocs class >> importGrafoscopioFile: aFileReference [ ]
^ (STON fromString: aFileReference) first parent { #category : #accessing }
] MiniDocs class >> initialize [
self keyboardShortcutsRemapping
{ #category : #accessing } ]
MiniDocs class >> initialize [
self keyboardShortcutsRemapping { #category : #accessing }
] MiniDocs class >> installYamlToJson [
"For the moment, only Gnu/Linux and Mac are supported.
{ #category : #accessing } IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
MiniDocs class >> installYamlToJson [ self yamlToJsonBinary exists ifTrue: [ ^ MiniDocs appFolder ].
"For the moment, only Gnu/Linux and Mac are supported. Nimble
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation." install: 'yaml';
self yamlToJsonBinary exists ifTrue: [ ^ MiniDocs appFolder ]. install: 'commandeer'.
Nimble OSSUnixSubprocess new
install: 'yaml'; command: 'nim';
install: 'commandeer'. arguments: {'c'. self yamlToJsonSourceCode fullName};
OSSUnixSubprocess new runAndWaitOnExitDo: [ :process :outString |
command: 'nim'; (self yamlToJsonSourceCode parent / self yamlToJsonSourceCode basenameWithoutExtension) moveTo: MiniDocs appFolder asFileReference.
arguments: {'c'. self yamlToJsonSourceCode fullName}; ^ MiniDocs appFolder ]
runAndWaitOnExitDo: [ :process :outString | ]
(self yamlToJsonSourceCode parent / self yamlToJsonSourceCode basenameWithoutExtension) moveTo: MiniDocs appFolder asFileReference.
^ MiniDocs appFolder ] { #category : #accessing }
] MiniDocs class >> keyboardShortcutsRemapping [
| primaryNewLine secondaryNewLine |
{ #category : #accessing } primaryNewLine := LeSnippetElement keyboardShortcuts at: #NewLine.
MiniDocs class >> keyboardShortcutsRemapping [ secondaryNewLine := LeSnippetElement keyboardShortcuts at: #SecondaryNewLine.
| primaryNewLine secondaryNewLine | ^ LeSnippetElement keyboardShortcuts
primaryNewLine := LeSnippetElement keyboardShortcuts at: #NewLine. at: #NewLine put: secondaryNewLine;
secondaryNewLine := LeSnippetElement keyboardShortcuts at: #SecondaryNewLine. at: #SecondaryNewLine put: primaryNewLine;
^ LeSnippetElement keyboardShortcuts at: #IndentSnippet put: self altShiftRightCombo;
at: #NewLine put: secondaryNewLine; at: #UnindentSnippet put: self altShiftLeftCombo;
at: #SecondaryNewLine put: primaryNewLine; yourself
at: #IndentSnippet put: self altShiftRightCombo;
at: #UnindentSnippet put: self altShiftLeftCombo; ]
yourself
{ #category : #accessing }
] MiniDocs class >> shiftKeys [
^ BlAlternativeCombination new
{ #category : #accessing } combination: (BlSingleKeyCombination key:BlKeyboardKey shiftLeft)
MiniDocs class >> shiftKeys [ or: (BlSingleKeyCombination key:BlKeyboardKey shiftRight)
^ BlAlternativeCombination new ]
combination: (BlSingleKeyCombination key:BlKeyboardKey shiftLeft)
or: (BlSingleKeyCombination key:BlKeyboardKey shiftRight) { #category : #accessing }
] MiniDocs class >> yamlToJson: yamlString [
"This method uses a external binary written in Nim, as the native Pharo parser for YAML, written in PetitParser,
{ #category : #accessing } was less robust and unable to parse correctly the same strings as the external one."
MiniDocs class >> yamlToJson: yamlString [ yamlString ifNil: [ ^ Dictionary new ].
"This method uses a external binary written in Nim, as the native Pharo parser for YAML, written in PetitParser, self yamlToJsonBinary exists ifFalse: [ self installYamlToJson ].
was less robust and unable to parse correctly the same strings as the external one."
yamlString ifNil: [ ^ Dictionary new ]. OSSUnixSubprocess new
self yamlToJsonBinary exists ifFalse: [ self installYamlToJson ]. command: self yamlToJsonBinary fullName;
arguments: {yamlString};
OSSUnixSubprocess new redirectStdout;
command: self yamlToJsonBinary fullName; runAndWaitOnExitDo: [ :process :outString |
arguments: {yamlString}; ^ (STONJSON fromString: outString allButFirst accentedCharactersCorrection) first
redirectStdout; ]
runAndWaitOnExitDo: [ :process :outString | ]
^ (STONJSON fromString: outString allButFirst accentedCharactersCorrection) first
] { #category : #accessing }
] MiniDocs class >> yamlToJsonBinary [
^ self appFolder / 'yamlToJson'
{ #category : #accessing } ]
MiniDocs class >> yamlToJsonBinary [
^ self appFolder / 'yamlToJson' { #category : #accessing }
] MiniDocs class >> yamlToJsonSourceCode [
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/yamlToJson.nim'
{ #category : #accessing } ]
MiniDocs class >> yamlToJsonSourceCode [
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/yamlToJson.nim' { #category : #accessing }
] MiniDocs >> installNimFileExporter [
| folder |
{ #category : #accessing } folder := (MiniDocs appFolder / 'scripts') ensureCreateDirectory.
MiniDocs >> installNimFileExporter [
| folder | ZnClient new
folder := (MiniDocs appFolder / 'scripts') ensureCreateDirectory. url: 'https://mutabit.com/repos.fossil/mutabit/uv/wiki/scripts/stringAsFileInto';
downloadTo: folder / 'stringAsFileInto'.
ZnClient new
url: 'https://mutabit.com/repos.fossil/mutabit/uv/wiki/scripts/stringAsFileInto'; ZnClient new
downloadTo: folder / 'stringAsFileInto'. url: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/wiki/scripts/stringAsFileInto.nim';
downloadTo: folder / 'stringAsFileInto.nim'.
ZnClient new
url: 'https://mutabit.com/repos.fossil/mutabit/doc/trunk/wiki/scripts/stringAsFileInto.nim'; OSSUnixSubprocess new
downloadTo: folder / 'stringAsFileInto.nim'. command: 'chmod';
arguments: { '+x' . (folder / 'stringAsFileInto') fullName };
OSSUnixSubprocess new workingDirectory: folder fullName;
command: 'chmod'; redirectStdout;
arguments: { '+x' . (folder / 'stringAsFileInto') fullName }; redirectStderr;
workingDirectory: folder fullName; runAndWaitOnExitDo: [ :process :outString | ^ outString ]
redirectStdout; ]
redirectStderr;
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
]

View File

@ -1,74 +1,74 @@
Class { Class {
#name : #MiniDocsServer, #name : #MiniDocsServer,
#superclass : #TLWebserver, #superclass : #TLWebserver,
#instVars : [ #instVars : [
'storage' 'storage'
], ],
#classInstVars : [ #classInstVars : [
'singleton' 'singleton'
], ],
#category : #'MiniDocs-Core' #category : #'MiniDocs-Core'
} }
{ #category : #accessing } { #category : #accessing }
MiniDocsServer class >> build [ MiniDocsServer class >> build [
TLRESTAPIBuilder buildAPI. TLRESTAPIBuilder buildAPI.
self start self start
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocsServer class >> defaultConfiguration [ MiniDocsServer class >> defaultConfiguration [
"Override to set more default values" "Override to set more default values"
^ { ^ {
#port -> 1701 #port -> 1701
} }
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocsServer class >> listLepiterDocs: aRequest [ MiniDocsServer class >> listLepiterDocs: aRequest [
<REST_API: 'GET' pattern: 'lepiter'> <REST_API: 'GET' pattern: 'lepiter'>
^ 'A list of Mardeep exported Lepiter docs will appear soon...' ^ 'A list of Mardeep exported Lepiter docs will appear soon...'
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocsServer class >> restart [ MiniDocsServer class >> restart [
Teapot stopAll. Teapot stopAll.
self build. self build.
^ self start ^ self start
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocsServer class >> singleton [ MiniDocsServer class >> singleton [
^ singleton ifNil: [ singleton := MiniDocsServer teapot ] ^ singleton ifNil: [ singleton := MiniDocsServer teapot ]
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocsServer >> addStorage: anObject [ MiniDocsServer >> addStorage: anObject [
self storage add: anObject. self storage add: anObject.
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocsServer >> initRoutes [ MiniDocsServer >> initRoutes [
self storage: FileLocator documents / 'lepiter' / 'default'. self storage: FileLocator documents / 'lepiter' / 'default'.
self teapot self teapot
serveStatic: '/lepiter/doc' from: self storage fullName. serveStatic: '/lepiter/doc' from: self storage fullName.
self teapot self teapot
GET: '/lepiter' -> 'A list of Mardeep exported Lepiter docs will appear soon...' GET: '/lepiter' -> 'A list of Mardeep exported Lepiter docs will appear soon...'
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocsServer >> start [ MiniDocsServer >> start [
self class defaultPort: 1701. self class defaultPort: 1701.
self initRoutes. self initRoutes.
super start. super start.
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocsServer >> storage [ MiniDocsServer >> storage [
^ storage ^ storage
] ]
{ #category : #accessing } { #category : #accessing }
MiniDocsServer >> storage: aFoldersOrderedCollection [ MiniDocsServer >> storage: aFoldersOrderedCollection [
storage := aFoldersOrderedCollection storage := aFoldersOrderedCollection
] ]

View File

@ -1,64 +1,66 @@
" "
I'm run an implementation of the [Nano ID](https://github.com/ai/nanoid) tiny, secure URL-friendly unique string ID generator via its [Nim implementation](https://github.com/icyphox/nanoid.nim). I'm run an implementation of the [Nano ID](https://github.com/ai/nanoid) tiny, secure URL-friendly unique string ID generator via its [Nim implementation](https://github.com/icyphox/nanoid.nim).
The Nim script has hard coded: The Nim script has hard coded:
* a [base 58 encoding](https://medium.com/concerning-pharo/understanding-base58-encoding-23e673e37ff6) alphabet to avoid similar looking letter and the use of non-alphanumeric characters. * a [base 58 encoding](https://medium.com/concerning-pharo/understanding-base58-encoding-23e673e37ff6) alphabet to avoid similar looking letter and the use of non-alphanumeric characters.
* a 12 characters length output, which gives [a pretty low probability collision](https://zelark.github.io/nano-id-cc/) for the previous alphabet: * a 12 characters length output, which gives [a pretty low probability collision](https://zelark.github.io/nano-id-cc/) for the previous alphabet:
~616 years needed, in order to have a 1% probability of at least one collision at a speed of 1000 IDs per hour. ~616 years needed, in order to have a 1% probability of at least one collision at a speed of 1000 IDs per hour.
This is more than enough for our unique IDs applications, mostly in the documentation context, This is more than enough for our unique IDs applications, mostly in the documentation context,
which consists of hand crafted and/or programmatically produced notes , which consists of hand crafted and/or programmatically produced notes ,
for example in data narratives, book(lets) and TiddlyWiki tiddlers of tens or hundreds of notes at most, for example in data narratives, book(lets) and TiddlyWiki tiddlers of tens or hundreds of notes at most,
unevenly produced between hours, days and/or weeks.. unevenly produced between hours, days and/or weeks..
The `External` tag is related on its dependency on other programming languages and frameworks, The `External` tag is related on its dependency on other programming languages and frameworks,
though the dependency should be loaded by just loading a small binary with no dependencies. though the dependency should be loaded by just loading a small binary with no dependencies.
" "
Class { Class {
#name : #NanoID, #name : #NanoID,
#superclass : #Object, #superclass : #Object,
#category : #'MiniDocs-External' #category : #'MiniDocs-External'
} }
{ #category : #accessing } { #category : #accessing }
NanoID class >> binaryFile [ NanoID class >> binaryFile [
^ MiniDocs appFolder / self scriptSourceCode basenameWithoutExtension Smalltalk os isWindows
] ifFalse: [ ^ MiniDocs appFolder / self scriptSourceCode basenameWithoutExtension ]
ifTrue: [ ^ ExoRepo userDataFolder / 'NanoId' / 'nanoid' ]
{ #category : #accessing } ]
NanoID class >> generate [
self binaryFile exists ifFalse: [ NanoID install]. { #category : #accessing }
Smalltalk os isWindows NanoID class >> generate [
ifTrue: [ ^ (LibC resultOfCommand:self binaryFile fullName) copyWithoutAll: (Character lf asString) ]. self binaryFile exists ifFalse: [ NanoID install].
OSSUnixSubprocess new Smalltalk os isWindows
command: self binaryFile fullName; ifTrue: [ ^ (LibC resultOfCommand:self binaryFile fullName) copyWithoutAll: (Character lf asString) ].
redirectStdout; OSSUnixSubprocess new
redirectStdout; command: self binaryFile fullName;
runAndWaitOnExitDo: [ :process :outString | ^ outString copyWithoutAll: (Character lf asString) ] redirectStdout;
] redirectStdout;
runAndWaitOnExitDo: [ :process :outString | ^ outString copyWithoutAll: (Character lf asString) ]
{ #category : #accessing } ]
NanoID class >> install [
"For the moment, only Gnu/Linux and Mac are supported. { #category : #accessing }
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation." NanoID class >> install [
self binaryFile exists ifTrue: [ ^ MiniDocs appFolder ]. "For the moment, only Gnu/Linux and Mac are supported.
Nimble install: 'nanoid'. IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
Smalltalk os isWindows self binaryFile exists ifTrue: [ ^ MiniDocs appFolder ].
ifTrue: [ ^ LibC resultOfCommand: 'nanoid c ',self scriptSourceCode fullName ]. Nimble install: 'nanoid'.
OSSUnixSubprocess new Smalltalk os isWindows
command: 'nim'; ifTrue: [ ^ LibC resultOfCommand: 'nanoid c ',self scriptSourceCode fullName ].
arguments: {'c'. self scriptSourceCode fullName}; OSSUnixSubprocess new
runAndWaitOnExitDo: [ :process :outString | command: 'nim';
(self scriptSourceCode parent / (self scriptSourceCode) basenameWithoutExtension) moveToPageTitled: MiniDocs appFolder asFileReference. arguments: {'c'. self scriptSourceCode fullName};
^ MiniDocs appFolder ] runAndWaitOnExitDo: [ :process :outString |
] (self scriptSourceCode parent / (self scriptSourceCode) basenameWithoutExtension) moveToPageTitled: MiniDocs appFolder asFileReference.
^ MiniDocs appFolder ]
{ #category : #accessing } ]
NanoID class >> isInstalled [
^ self binaryFile exists { #category : #accessing }
] NanoID class >> isInstalled [
^ self binaryFile exists
{ #category : #accessing } ]
NanoID class >> scriptSourceCode [
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/nanoIdGen.nim' { #category : #accessing }
] NanoID class >> scriptSourceCode [
^ FileLocator image parent / 'pharo-local/iceberg/Offray/MiniDocs/src/nanoIdGen.nim'
]

View File

@ -1,35 +1,35 @@
Extension { #name : #OrderedDictionary } Extension { #name : #OrderedDictionary }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
OrderedDictionary >> addErrata: noteString [ OrderedDictionary >> addErrata: noteString [
self errata add: noteString self errata add: noteString
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
OrderedDictionary >> asLepiterSnippet [ OrderedDictionary >> asLepiterSnippet [
| response | | response |
self at: 'className' ifAbsent: [ ^ nil ]. self at: 'className' ifAbsent: [ ^ nil ].
response := (self at: 'className') asClass new. response := (self at: 'className') asClass new.
response fromString: (self at: 'content'). response fromString: (self at: 'content').
response response
uid: (LeUID new uidString: (self at: 'id')); uid: (LeUID new uidString: (self at: 'id'));
parent: (self at: 'parent'); parent: (self at: 'parent');
createTime: (LeTime new time: ((self at: 'created')asDateAndTime)); createTime: (LeTime new time: ((self at: 'created')asDateAndTime));
editTime: (LeTime new time: ((self at: 'modified') asDateAndTime)); editTime: (LeTime new time: ((self at: 'modified') asDateAndTime));
editEmail: (self at: 'modifier'); editEmail: (self at: 'modifier');
createEmail: (self at: 'creator'). createEmail: (self at: 'creator').
self at: 'origin' ifPresent: [ response metadata at: 'origin' put: (self at: 'origin') ]. self at: 'origin' ifPresent: [ response metadata at: 'origin' put: (self at: 'origin') ].
self at: 'errata' ifPresent: [ response metadata at: 'errata' put: (self at: 'errata') ]. self at: 'errata' ifPresent: [ response metadata at: 'errata' put: (self at: 'errata') ].
^ response ^ response
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
OrderedDictionary >> errata [ OrderedDictionary >> errata [
^ self at: 'errata' ifAbsentPut: [ OrderedCollection new] ^ self at: 'errata' ifAbsentPut: [ OrderedCollection new]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
OrderedDictionary >> redefineTimestampsBefore: dateAndTime [ OrderedDictionary >> redefineTimestampsBefore: dateAndTime [
self at: 'modified' put: dateAndTime asDateAndTime. self at: 'modified' put: dateAndTime asDateAndTime.
self at: 'created' put: dateAndTime asDateAndTime - 1 second. self at: 'created' put: dateAndTime asDateAndTime - 1 second.
] ]

View File

@ -1,168 +1,168 @@
" "
I model the interaction between Pandoc and Grafoscopio. I model the interaction between Pandoc and Grafoscopio.
" "
Class { Class {
#name : #Pandoc, #name : #Pandoc,
#superclass : #Object, #superclass : #Object,
#classInstVars : [ #classInstVars : [
'executable' 'executable'
], ],
#category : #'MiniDocs-Core' #category : #'MiniDocs-Core'
} }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [ Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
OSSUnixSubprocess new OSSUnixSubprocess new
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat; shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
redirectStdout; redirectStdout;
runAndWaitOnExitDo: [ :command :outString | runAndWaitOnExitDo: [ :command :outString |
^ outString ^ outString
]. ].
] ]
{ #category : #'as yet unclassified' } { #category : #'as yet unclassified' }
Pandoc class >> downloadLuaFilters [ Pandoc class >> downloadLuaFilters [
self luaFilters do: [ :filter | | filterUrl | self luaFilters do: [ :filter | | filterUrl |
filterUrl := filter asUrl. filterUrl := filter asUrl.
(FileLocator temp asFileReference / (filterUrl segments last)) exists (FileLocator temp asFileReference / (filterUrl segments last)) exists
ifFalse: [ ifFalse: [
ZnClient new ZnClient new
url: filterUrl; url: filterUrl;
downloadTo: FileLocator temp ] ] downloadTo: FileLocator temp ] ]
] ]
{ #category : #accessing } { #category : #accessing }
Pandoc class >> executable [ Pandoc class >> executable [
^ executable ifNil: [ self executableLocation ] ^ executable ifNil: [ self executableLocation ]
] ]
{ #category : #accessing } { #category : #accessing }
Pandoc class >> executable: aFileReference [ Pandoc class >> executable: aFileReference [
executable := aFileReference executable := aFileReference
] ]
{ #category : #accessing } { #category : #accessing }
Pandoc class >> executableLocation [ Pandoc class >> executableLocation [
| location | | location |
location := '/usr/bin/pandoc'. location := '/usr/bin/pandoc'.
location asFileReference exists location asFileReference exists
ifTrue: [ ^ location ] ifTrue: [ ^ location ]
ifFalse: [ self definePandocExecutable ] ifFalse: [ self definePandocExecutable ]
] ]
{ #category : #utility } { #category : #utility }
Pandoc class >> extractImagesInUnixFor: aFileReference withFilter: aLuaFilter [ Pandoc class >> extractImagesInUnixFor: aFileReference withFilter: aLuaFilter [
"I use Pandoc Lua scripting capabilities to extract al images links in aFileReference" "I use Pandoc Lua scripting capabilities to extract al images links in aFileReference"
OSSUnixSubprocess new OSSUnixSubprocess new
command: 'pandoc'; command: 'pandoc';
arguments: {aFileReference fullName . '--lua-filter=',aLuaFilter fullName }; arguments: {aFileReference fullName . '--lua-filter=',aLuaFilter fullName };
redirectStdout; redirectStdout;
redirectStderr; redirectStderr;
runAndWaitOnExitDo: [ :process :outString :errString | runAndWaitOnExitDo: [ :process :outString :errString |
process isSuccess process isSuccess
ifTrue: [ ifTrue: [
^ ((Soup fromString: outString) findAllTags: 'td') collect: [ :each | each next ] ] ^ ((Soup fromString: outString) findAllTags: 'td') collect: [ :each | each next ] ]
ifFalse: [ ifFalse: [
"OSSUnixProcessExitStatus has a nice #printOn: " "OSSUnixProcessExitStatus has a nice #printOn: "
Transcript show: 'Command exit with error status: ', process exitStatusInterpreter printString; cr. Transcript show: 'Command exit with error status: ', process exitStatusInterpreter printString; cr.
Transcript show: 'Stderr contents: ', errString. Transcript show: 'Stderr contents: ', errString.
] ]
] ]
] ]
{ #category : #accessing } { #category : #accessing }
Pandoc class >> htmlStringToMarkdown: aString [ Pandoc class >> htmlStringToMarkdown: aString [
OSSUnixSubprocess new OSSUnixSubprocess new
shellCommand: 'echo "', aString , '" | pandoc -f markdown -t html'; shellCommand: 'echo "', aString , '" | pandoc -f markdown -t html';
redirectStdout; redirectStdout;
runAndWaitOnExitDo: [ :command :outString | runAndWaitOnExitDo: [ :command :outString |
^ outString ^ outString
]. ].
] ]
{ #category : #converters } { #category : #converters }
Pandoc class >> htmlToMarkdown: inputFile [ Pandoc class >> htmlToMarkdown: inputFile [
| outputFile | | outputFile |
outputFile := FileLocator temp / 'body.md'. outputFile := FileLocator temp / 'body.md'.
outputFile ensureDelete. outputFile ensureDelete.
outputFile ensureCreateFile. outputFile ensureCreateFile.
OSSUnixSubprocess new OSSUnixSubprocess new
command: 'pandoc'; command: 'pandoc';
arguments: {'-f'. 'html'. '-t'. 'markdown'. '--atx-headers'. inputFile fullName. arguments: {'-f'. 'html'. '-t'. 'markdown'. '--atx-headers'. inputFile fullName.
'--output'. outputFile fullName }; '--output'. outputFile fullName };
redirectStdout; redirectStdout;
redirectStderr; redirectStderr;
runAndWaitOnExitDo: [ :process :outString :errString | runAndWaitOnExitDo: [ :process :outString :errString |
process isSuccess process isSuccess
ifTrue: [ ^ outputFile contents ] ifTrue: [ ^ outputFile contents ]
ifFalse: [ ^inputFile contents ] ifFalse: [ ^inputFile contents ]
] ]
] ]
{ #category : #'as yet unclassified' } { #category : #'as yet unclassified' }
Pandoc class >> listImagesFrom: aFileReference [ Pandoc class >> listImagesFrom: aFileReference [
"I provide a list of all images contained in aFile." "I provide a list of all images contained in aFile."
| filter commandString outputString | | filter commandString outputString |
filter := FileLocator temp asFileReference / 'image-links.lua'. filter := FileLocator temp asFileReference / 'image-links.lua'.
filter exists filter exists
ifFalse: [ self downloadLuaFilters ]. ifFalse: [ self downloadLuaFilters ].
commandString := 'pandoc ' , aFileReference fullName commandString := 'pandoc ' , aFileReference fullName
, ' --lua-filter=' , filter fullName. , ' --lua-filter=' , filter fullName.
^ self extractImagesInUnixFor: aFileReference withFilter: filter ^ self extractImagesInUnixFor: aFileReference withFilter: filter
] ]
{ #category : #utility } { #category : #utility }
Pandoc class >> luaFilters [ Pandoc class >> luaFilters [
"I define the location of set of scripts, that allows to change the default behaviour of Pandoc "I define the location of set of scripts, that allows to change the default behaviour of Pandoc
and/or the processing of supported markup languages. and/or the processing of supported markup languages.
For more information about Lua filters see: For more information about Lua filters see:
https://pandoc.org/lua-filters.html https://pandoc.org/lua-filters.html
" "
| filters | | filters |
filters := OrderedCollection new. filters := OrderedCollection new.
filters filters
add: 'http://mutabit.com/repos.fossil/dataweek/doc/tip/Artefactos/Scripts/image-links.lua'. add: 'http://mutabit.com/repos.fossil/dataweek/doc/tip/Artefactos/Scripts/image-links.lua'.
^ filters ^ filters
] ]
{ #category : #converters } { #category : #converters }
Pandoc class >> markdownToHtml: inputFile [ Pandoc class >> markdownToHtml: inputFile [
(Smalltalk os isUnix or: [ Smalltalk os isMacOS ]) ifTrue: [ ^ self markdownToHtmlOnUnix: inputFile ]. (Smalltalk os isUnix or: [ Smalltalk os isMacOS ]) ifTrue: [ ^ self markdownToHtmlOnUnix: inputFile ].
Smalltalk os isWindows ifTrue: [ ^ self markdownToHtmlOnWindows: inputFile ]. Smalltalk os isWindows ifTrue: [ ^ self markdownToHtmlOnWindows: inputFile ].
] ]
{ #category : #converters } { #category : #converters }
Pandoc class >> markdownToHtmlOnUnix: inputFile [ Pandoc class >> markdownToHtmlOnUnix: inputFile [
| outputFile | | outputFile |
outputFile := inputFile parent / (inputFile basenameWithoutExtension , '.html'). outputFile := inputFile parent / (inputFile basenameWithoutExtension , '.html').
outputFile ensureDelete. outputFile ensureDelete.
outputFile ensureCreateFile. outputFile ensureCreateFile.
OSSUnixSubprocess new OSSUnixSubprocess new
command: 'pandoc'; command: 'pandoc';
arguments: {'-f'. 'markdown+startnum+task_lists'. '--standalone'. '-t'. 'html'. inputFile fullName. arguments: {'-f'. 'markdown+startnum+task_lists'. '--standalone'. '-t'. 'html'. inputFile fullName.
'--output'. outputFile fullName }; '--output'. outputFile fullName };
redirectStdout; redirectStdout;
redirectStderr; redirectStderr;
runAndWaitOnExitDo: [ :process :outString :errString | runAndWaitOnExitDo: [ :process :outString :errString |
process isSuccess process isSuccess
ifTrue: [ ^ outputFile ] ifTrue: [ ^ outputFile ]
ifFalse: [ ^ inputFile ] ifFalse: [ ^ inputFile ]
] ]
] ]
{ #category : #converters } { #category : #converters }
Pandoc class >> markdownToHtmlOnWindows: inputFile [ Pandoc class >> markdownToHtmlOnWindows: inputFile [
"ToDo: This command still doesn't receive any arguments." "ToDo: This command still doesn't receive any arguments."
^ (LibC resultOfCommand: 'pandoc ', inputFile fullName) correctAccentedCharacters. ^ (LibC resultOfCommand: 'pandoc ', inputFile fullName) correctAccentedCharacters.
] ]

View File

@ -1,11 +1,11 @@
Extension { #name : #Pandoc } Extension { #name : #Pandoc }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [ Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
OSSUnixSubprocess new OSSUnixSubprocess new
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat; shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
redirectStdout; redirectStdout;
runAndWaitOnExitDo: [ :command :outString | runAndWaitOnExitDo: [ :command :outString |
^ outString ^ outString
]. ].
] ]

View File

@ -1,148 +1,148 @@
Class { Class {
#name : #PubPubContent, #name : #PubPubContent,
#superclass : #Object, #superclass : #Object,
#instVars : [ #instVars : [
'title', 'title',
'language', 'language',
'url', 'url',
'thumbnail', 'thumbnail',
'work', 'work',
'contents' 'contents'
], ],
#category : #'MiniDocs-Model' #category : #'MiniDocs-Model'
} }
{ #category : #accessing } { #category : #accessing }
PubPubContent class >> fromXML: anXMLElement [ PubPubContent class >> fromXML: anXMLElement [
^ self new fromXML: anXMLElement ^ self new fromXML: anXMLElement
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> asMarkdeepFrontPageElement [ PubPubContent >> asMarkdeepFrontPageElement [
| response anchorName anchorLink markdeepFile | | response anchorName anchorLink markdeepFile |
response := '' writeStream. response := '' writeStream.
anchorName := '[', self title,']'. anchorName := '[', self title,']'.
markdeepFile := './book/', self shortName,'--',self id,'.md.html'. markdeepFile := './book/', self shortName,'--',self id,'.md.html'.
anchorLink := '(', markdeepFile,')'. anchorLink := '(', markdeepFile,')'.
response response
nextPutAll: '<big>', anchorName, anchorLink,'</big><br><br>'; nextPutAll: '<big>', anchorName, anchorLink,'</big><br><br>';
nextPutAll: String lf. nextPutAll: String lf.
self thumbnail ifNotNil: [ |image| self thumbnail ifNotNil: [ |image|
image := ' image := '
<img <img
src=', self thumbnail, src=', self thumbnail,
' width="55%" ' width="55%"
style="width: 400px; height: 220px; object-fit: cover;" style="width: 400px; height: 220px; object-fit: cover;"
/>'. />'.
response nextPutAll: '<a href="',markdeepFile,'">', image, '</a>' response nextPutAll: '<a href="',markdeepFile,'">', image, '</a>'
]. ].
response response
nextPutAll: String lf, String lf. nextPutAll: String lf, String lf.
^ response contents ^ response contents
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> contents: anObject [ PubPubContent >> contents: anObject [
contents := anObject contents := anObject
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> fileName [ PubPubContent >> fileName [
^ self shortName,'--', self id, '.md' ^ self shortName,'--', self id, '.md'
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> fromXML: aXMLElement [ PubPubContent >> fromXML: aXMLElement [
| image anchor| | image anchor|
image := aXMLElement contentNodes first xpath: './a/div'. image := aXMLElement contentNodes first xpath: './a/div'.
image image
ifNotEmpty: [|style rawUrl| ifNotEmpty: [|style rawUrl|
style := (image first attributeAt: 'style'). style := (image first attributeAt: 'style').
rawUrl := (style splitOn: 'url') second. rawUrl := (style splitOn: 'url') second.
self self
thumbnail:(rawUrl copyFrom: 3 to: rawUrl size - 2) thumbnail:(rawUrl copyFrom: 3 to: rawUrl size - 2)
]. ].
anchor := (aXMLElement contentNodes second contentNodes first xpath: './div[@class="title-wrapper"]/a') first. anchor := (aXMLElement contentNodes second contentNodes first xpath: './div[@class="title-wrapper"]/a') first.
self self
title: (anchor attributeAt: 'title'); title: (anchor attributeAt: 'title');
url: (anchor attributeAt: 'href'). url: (anchor attributeAt: 'href').
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> id [ PubPubContent >> id [
^ (self url splitOn: $/) last ^ (self url splitOn: $/) last
] ]
{ #category : #'as yet unclassified' } { #category : #'as yet unclassified' }
PubPubContent >> language: aString [ PubPubContent >> language: aString [
language := aString language := aString
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> next [ PubPubContent >> next [
^ self nextInstance ^ self nextInstance
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> previous [ PubPubContent >> previous [
| index | | index |
index := self work tableOfContents detectIndex: [:pubContent | pubContent = self ] ifNone: [ ^ nil ]. index := self work tableOfContents detectIndex: [:pubContent | pubContent = self ] ifNone: [ ^ nil ].
^ self work tableOfContents at: index - 1. ^ self work tableOfContents at: index - 1.
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> printOn: aStream [ PubPubContent >> printOn: aStream [
super printOn: aStream. super printOn: aStream.
aStream aStream
nextPutAll: '( ', self title,' | ', self id, ' )' nextPutAll: '( ', self title,' | ', self id, ' )'
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> shortName [ PubPubContent >> shortName [
| sanitized | | sanitized |
sanitized := (self title splitOn: $:) first. sanitized := (self title splitOn: $:) first.
sanitized := sanitized copyReplaceAll: '' with: ''. sanitized := sanitized copyReplaceAll: '' with: ''.
sanitized := sanitized asCamelCase. sanitized := sanitized asCamelCase.
sanitized at: 1 put: sanitized first asLowercase. sanitized at: 1 put: sanitized first asLowercase.
^ sanitized ^ sanitized
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> thumbnail [ PubPubContent >> thumbnail [
^ thumbnail ^ thumbnail
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> thumbnail: anURL [ PubPubContent >> thumbnail: anURL [
thumbnail := anURL thumbnail := anURL
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> title [ PubPubContent >> title [
^ title ^ title
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> title: anObject [ PubPubContent >> title: anObject [
title := anObject title := anObject
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> url [ PubPubContent >> url [
^url ^url
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> url: anObject [ PubPubContent >> url: anObject [
url := anObject url := anObject
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> work [ PubPubContent >> work [
^ work ^ work
] ]
{ #category : #accessing } { #category : #accessing }
PubPubContent >> work: aPubPubWork [ PubPubContent >> work: aPubPubWork [
work := aPubPubWork work := aPubPubWork
] ]

View File

@ -1,75 +1,75 @@
Class { Class {
#name : #PubPubGrammar, #name : #PubPubGrammar,
#superclass : #PP2CompositeNode, #superclass : #PP2CompositeNode,
#instVars : [ #instVars : [
'document', 'document',
'link', 'link',
'linkLabel', 'linkLabel',
'linkContent', 'linkContent',
'imageLinkLabel', 'imageLinkLabel',
'imageLinkContent', 'imageLinkContent',
'alternativeImages', 'alternativeImages',
'imageLink' 'imageLink'
], ],
#category : #'MiniDocs-Model' #category : #'MiniDocs-Model'
} }
{ #category : #accessing } { #category : #accessing }
PubPubGrammar >> alternativeImages [ PubPubGrammar >> alternativeImages [
^ self linkContent ^ self linkContent
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar >> document [ PubPubGrammar >> document [
^ (link / imageLink ) islandInSea star ^ (link / imageLink ) islandInSea star
] ]
{ #category : #links } { #category : #links }
PubPubGrammar >> imageLink [ PubPubGrammar >> imageLink [
^ imageLinkLabel, imageLinkContent, alternativeImages ^ imageLinkLabel, imageLinkContent, alternativeImages
] ]
{ #category : #links } { #category : #links }
PubPubGrammar >> imageLinkContent [ PubPubGrammar >> imageLinkContent [
^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second ^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second
] ]
{ #category : #links } { #category : #links }
PubPubGrammar >> imageLinkLabel [ PubPubGrammar >> imageLinkLabel [
| label | | label |
label := ("$] asPParser not /" #any asPParser) starLazy flatten. label := ("$] asPParser not /" #any asPParser) starLazy flatten.
^ '![' asPParser, label, ']' asPParser ==> #second. ^ '![' asPParser, label, ']' asPParser ==> #second.
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar >> imageLinkSea [ PubPubGrammar >> imageLinkSea [
^ imageLink sea ==> #second ^ imageLink sea ==> #second
] ]
{ #category : #links } { #category : #links }
PubPubGrammar >> link [ PubPubGrammar >> link [
^ linkLabel, linkContent ^ linkLabel, linkContent
] ]
{ #category : #links } { #category : #links }
PubPubGrammar >> linkContent [ PubPubGrammar >> linkContent [
^ '{' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second. ^ '{' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second.
] ]
{ #category : #links } { #category : #links }
PubPubGrammar >> linkLabel [ PubPubGrammar >> linkLabel [
| label | | label |
label := ("$] asPParser not /" #any asPParser) starLazy flatten. label := ("$] asPParser not /" #any asPParser) starLazy flatten.
^ $[ asPParser, label, $] asPParser ==> #second. ^ $[ asPParser, label, $] asPParser ==> #second.
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar >> linkSea [ PubPubGrammar >> linkSea [
^ link sea ==> #second ^ link sea ==> #second
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar >> start [ PubPubGrammar >> start [
^ document ^ document
] ]

View File

@ -1,65 +1,65 @@
Class { Class {
#name : #PubPubGrammar2, #name : #PubPubGrammar2,
#superclass : #PP2CompositeNode, #superclass : #PP2CompositeNode,
#instVars : [ #instVars : [
'imageLabel', 'imageLabel',
'imageLink', 'imageLink',
'imagesArray', 'imagesArray',
'imageLocation', 'imageLocation',
'document', 'document',
'footnote', 'footnote',
'footnoteLabel', 'footnoteLabel',
'footnoteContent' 'footnoteContent'
], ],
#category : #MiniDocs #category : #MiniDocs
} }
{ #category : #accessing } { #category : #accessing }
PubPubGrammar2 >> document [ PubPubGrammar2 >> document [
^ (imageLink / footnote) islandInSea star ^ (imageLink / footnote) islandInSea star
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar2 >> footnote [ PubPubGrammar2 >> footnote [
^ footnoteLabel, footnoteContent ^ footnoteLabel, footnoteContent
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar2 >> footnoteContent [ PubPubGrammar2 >> footnoteContent [
^ '{#' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second ^ '{#' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar2 >> footnoteLabel [ PubPubGrammar2 >> footnoteLabel [
^ '[' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second ^ '[' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar2 >> imageLabel [ PubPubGrammar2 >> imageLabel [
^ '![' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second ^ '![' asPParser, #any asPParser starLazy flatten, ']' asPParser ==> #second
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar2 >> imageLink [ PubPubGrammar2 >> imageLink [
^ imageLabel, imageLocation, imagesArray ^ imageLabel, imageLocation, imagesArray
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar2 >> imageLocation [ PubPubGrammar2 >> imageLocation [
^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second ^ '(' asPParser, #any asPParser starLazy flatten, ')' asPParser ==> #second
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar2 >> imagesArray [ PubPubGrammar2 >> imagesArray [
^ '{srcset=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second ^ '{srcset=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar2 >> imagesContent [ PubPubGrammar2 >> imagesContent [
^ '{src=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second ^ '{src=' asPParser, #any asPParser starLazy flatten, '}' asPParser ==> #second
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammar2 >> start [ PubPubGrammar2 >> start [
^ document ^ document
] ]

View File

@ -1,59 +1,59 @@
Class { Class {
#name : #PubPubGrammarTest, #name : #PubPubGrammarTest,
#superclass : #PP2CompositeNodeTest, #superclass : #PP2CompositeNodeTest,
#category : #'MiniDocs-Model' #category : #'MiniDocs-Model'
} }
{ #category : #accessing } { #category : #accessing }
PubPubGrammarTest >> parserClass [ PubPubGrammarTest >> parserClass [
^ PubPubGrammar ^ PubPubGrammar
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammarTest >> testComposedImageLink [ PubPubGrammarTest >> testComposedImageLink [
self self
parse: '![This is an image label with sublinks (bla bl)[blog]](this/is/an/image/link){this are alternate image sizes}' parse: '![This is an image label with sublinks (bla bl)[blog]](this/is/an/image/link){this are alternate image sizes}'
rule: #imageLink rule: #imageLink
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammarTest >> testImageLabel: label [ PubPubGrammarTest >> testImageLabel: label [
self self
parse: label parse: label
rule: #imageLinkLabel rule: #imageLinkLabel
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammarTest >> testImageLink [ PubPubGrammarTest >> testImageLink [
self self
parse: '![This is an image label](this/is/an/image/link){this are alternate image sizes}' parse: '![This is an image label](this/is/an/image/link){this are alternate image sizes}'
rule: #imageLink rule: #imageLink
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammarTest >> testLabel: label [ PubPubGrammarTest >> testLabel: label [
self self
parse: label parse: label
rule: #linkLabel rule: #linkLabel
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammarTest >> testLink [ PubPubGrammarTest >> testLink [
self self
parse: '[This is a label]{this/is/a/link}' parse: '[This is a label]{this/is/a/link}'
rule: #link rule: #link
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammarTest >> testNestedLabel [ PubPubGrammarTest >> testNestedLabel [
self self
parse: '[This is a label with [sublabels]]' parse: '[This is a label with [sublabels]]'
rule: #linkLabel rule: #linkLabel
] ]
{ #category : #accessing } { #category : #accessing }
PubPubGrammarTest >> testSimpleLabel [ PubPubGrammarTest >> testSimpleLabel [
self self
parse: '[This is a label]' parse: '[This is a label]'
rule: #linkLabel rule: #linkLabel
] ]

View File

@ -1,240 +1,240 @@
Class { Class {
#name : #PubPubWork, #name : #PubPubWork,
#superclass : #Object, #superclass : #Object,
#instVars : [ #instVars : [
'address', 'address',
'tableOfContents', 'tableOfContents',
'titles', 'titles',
'folder', 'folder',
'currentLanguage', 'currentLanguage',
'languages' 'languages'
], ],
#category : #'MiniDocs-Model' #category : #'MiniDocs-Model'
} }
{ #category : #accessing } { #category : #accessing }
PubPubWork >> addTableOfContents: anOrderedDictionary [ PubPubWork >> addTableOfContents: anOrderedDictionary [
self tableOfContents self tableOfContents
at: (self currentLanguage) put: anOrderedDictionary; at: (self currentLanguage) put: anOrderedDictionary;
yourself yourself
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> addTitle: aString [ PubPubWork >> addTitle: aString [
self titles self titles
at: (self currentLanguage) put: aString at: (self currentLanguage) put: aString
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> address [ PubPubWork >> address [
^ address ^ address
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> address: anUrl [ PubPubWork >> address: anUrl [
address := anUrl address := anUrl
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> bookishFolder [ PubPubWork >> bookishFolder [
^ { 'en' -> 'book'. ^ { 'en' -> 'book'.
'es' -> 'libro'} asDictionary 'es' -> 'libro'} asDictionary
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> currentLanguage [ PubPubWork >> currentLanguage [
^ currentLanguage ^ currentLanguage
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> currentLanguage: twoLettersInISO639_1 [ PubPubWork >> currentLanguage: twoLettersInISO639_1 [
currentLanguage := twoLettersInISO639_1 currentLanguage := twoLettersInISO639_1
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> defaultOptions [ PubPubWork >> defaultOptions [
^ { 'sourceCodeLink' -> true . ^ { 'sourceCodeLink' -> true .
'commentsProvider' -> 'Hypothesis' } asDictionary 'commentsProvider' -> 'Hypothesis' } asDictionary
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> defaultTitle [ PubPubWork >> defaultTitle [
^ self titles associations first value ^ self titles associations first value
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> downloadContents [ PubPubWork >> downloadContents [
| workingDirectory | | workingDirectory |
workingDirectory := self workingDirectory. workingDirectory := self workingDirectory.
self tableOfContentsDictionary self tableOfContentsDictionary
keysAndValuesDo: [ :name :chapterAddress | keysAndValuesDo: [ :name :chapterAddress |
| currentFileName | | currentFileName |
currentFileName := name , '--' , chapterAddress , '.md'. currentFileName := name , '--' , chapterAddress , '.md'.
(workingDirectory / currentFileName) asFileReference ensureDelete. (workingDirectory / currentFileName) asFileReference ensureDelete.
(workingDirectory / 'markdown') asFileReference ensureDelete. (workingDirectory / 'markdown') asFileReference ensureDelete.
ZnClient new ZnClient new
get: self address , 'pub/' , chapterAddress , '/download/markdown'; get: self address , 'pub/' , chapterAddress , '/download/markdown';
downloadTo: workingDirectory. downloadTo: workingDirectory.
workingDirectory / 'markdown' renameTo: currentFileName ]. workingDirectory / 'markdown' renameTo: currentFileName ].
^ workingDirectory ^ workingDirectory
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> downloadContents2 [ PubPubWork >> downloadContents2 [
| workingDirectory | | workingDirectory |
workingDirectory := self folder / self currentLanguage / 'book'. workingDirectory := self folder / self currentLanguage / 'book'.
self tableOfContentsDictionary keysAndValuesDo: [ :name :chapterAddress | |currentFileName| self tableOfContentsDictionary keysAndValuesDo: [ :name :chapterAddress | |currentFileName|
currentFileName := name, '--', chapterAddress, '.md'. currentFileName := name, '--', chapterAddress, '.md'.
(workingDirectory / currentFileName) asFileReference ensureDelete. (workingDirectory / currentFileName) asFileReference ensureDelete.
(workingDirectory / 'markdown') asFileReference ensureDelete. (workingDirectory / 'markdown') asFileReference ensureDelete.
ZnClient new ZnClient new
get: self address, 'pub/', chapterAddress, '/download/markdown'; get: self address, 'pub/', chapterAddress, '/download/markdown';
downloadTo: workingDirectory . downloadTo: workingDirectory .
workingDirectory / 'markdown' renameTo: currentFileName workingDirectory / 'markdown' renameTo: currentFileName
]. ].
^ workingDirectory ^ workingDirectory
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> exportToHTML [ PubPubWork >> exportToHTML [
self markdownFiles self markdownFiles
do: [ :file | | doc | do: [ :file | | doc |
doc := Markdown new fromFile: file. doc := Markdown new fromFile: file.
doc exportAsHTML ]. doc exportAsHTML ].
^ self markdownFiles first parent ^ self markdownFiles first parent
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> exportToMarkdeep [ PubPubWork >> exportToMarkdeep [
| markdeepDocs | | markdeepDocs |
markdeepDocs := self markdownFiles markdeepDocs := self markdownFiles
collect: [ :file | Markdeep fromMarkdownFile: file ]. collect: [ :file | Markdeep fromMarkdownFile: file ].
markdeepDocs do: [ :each | each fromPubPubToMarkdeep exportAsFile ]. markdeepDocs do: [ :each | each fromPubPubToMarkdeep exportAsFile ].
^ self languageFolder ^ self languageFolder
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> extractAllContentsRaw [ PubPubWork >> extractAllContentsRaw [
^ self frontPage xpath: '//div[@class="layout-pubs-block"]' ^ self frontPage xpath: '//div[@class="layout-pubs-block"]'
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> extractRawTableOfContents [ PubPubWork >> extractRawTableOfContents [
^ self extractAllContentsRaw first xpath: '//div[contains(concat(" ",normalize-space(@class)," "), " pub-preview-component ")]' ^ self extractAllContentsRaw first xpath: '//div[contains(concat(" ",normalize-space(@class)," "), " pub-preview-component ")]'
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> folder [ PubPubWork >> folder [
^ folder ensureCreateDirectory ^ folder ensureCreateDirectory
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> folder: localDirectory [ PubPubWork >> folder: localDirectory [
folder := localDirectory folder := localDirectory
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> frontPage [ PubPubWork >> frontPage [
"This should scrap contents of the book's front-page and translate them into Markdeep, "This should scrap contents of the book's front-page and translate them into Markdeep,
according to our templates." according to our templates."
^ (XMLHTMLParser on: (self address asUrl retrieveContents)) parseDocument ^ (XMLHTMLParser on: (self address asUrl retrieveContents)) parseDocument
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> languageFolder [ PubPubWork >> languageFolder [
^ self folder / self currentLanguage ^ self folder / self currentLanguage
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> markdeepFrontPage [ PubPubWork >> markdeepFrontPage [
| frontPage markdeepIndex | | frontPage markdeepIndex |
frontPage := Markdeep new. frontPage := Markdeep new.
frontPage frontPage
title: self defaultTitle; title: self defaultTitle;
file: self languageFolder / 'frontPage.md.html'. file: self languageFolder / 'frontPage.md.html'.
markdeepIndex := '' writeStream. markdeepIndex := '' writeStream.
self tableOfContents do: [:pubPubContent| self tableOfContents do: [:pubPubContent|
markdeepIndex markdeepIndex
nextPutAll: pubPubContent asMarkdeepFrontPageElement nextPutAll: pubPubContent asMarkdeepFrontPageElement
]. ].
frontPage body: markdeepIndex contents. frontPage body: markdeepIndex contents.
^ frontPage ^ frontPage
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> markdownFiles [ PubPubWork >> markdownFiles [
^ self languageFolder allChildren ^ self languageFolder allChildren
select: [ :file | file basename endsWith: '.md' ] select: [ :file | file basename endsWith: '.md' ]
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> populateContents [ PubPubWork >> populateContents [
self tableOfContents isEmptyOrNil self tableOfContents isEmptyOrNil
ifTrue: [ self populateTableOfContents ]. ifTrue: [ self populateTableOfContents ].
self workingDirectory children ifEmpty: [self downloadContents]. self workingDirectory children ifEmpty: [self downloadContents].
self tableOfContents do: [:pubPubContent | | contentFile| self tableOfContents do: [:pubPubContent | | contentFile|
contentFile := self workingDirectory / pubPubContent fileName. contentFile := self workingDirectory / pubPubContent fileName.
contentFile exists contentFile exists
ifTrue: [ pubPubContent contents: (Markdown new fromFile: contentFile) ] ifTrue: [ pubPubContent contents: (Markdown new fromFile: contentFile) ]
] ]
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> populateTableOfContents [ PubPubWork >> populateTableOfContents [
| contentsCollection | | contentsCollection |
contentsCollection := self extractRawTableOfContents collect: [:each | contentsCollection := self extractRawTableOfContents collect: [:each |
(PubPubContent fromXML: each) (PubPubContent fromXML: each)
language: self currentLanguage; language: self currentLanguage;
work: self work: self
]. ].
self addTableOfContents: contentsCollection asOrderedCollection self addTableOfContents: contentsCollection asOrderedCollection
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> printOn: aStream [ PubPubWork >> printOn: aStream [
super printOn: aStream. super printOn: aStream.
aStream aStream
nextPutAll: '(',self defaultTitle, ' | ', self address, ' )' nextPutAll: '(',self defaultTitle, ' | ', self address, ' )'
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> tableOfContents [ PubPubWork >> tableOfContents [
tableOfContents ifNil: [ ^ tableOfContents := Dictionary new]. tableOfContents ifNil: [ ^ tableOfContents := Dictionary new].
^ tableOfContents at: self currentLanguage ^ tableOfContents at: self currentLanguage
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> tableOfContents: anObject [ PubPubWork >> tableOfContents: anObject [
tableOfContents := anObject tableOfContents := anObject
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> tableOfContentsDictionary [ PubPubWork >> tableOfContentsDictionary [
| response | | response |
response := OrderedDictionary new. response := OrderedDictionary new.
self tableOfContents do: [:content | self tableOfContents do: [:content |
response response
at: content shortName put: content id at: content shortName put: content id
]. ].
^ response ^ response
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> titles [ PubPubWork >> titles [
^ titles ifNil: [titles := OrderedDictionary new] ^ titles ifNil: [titles := OrderedDictionary new]
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> viewContentsFor: aView [ PubPubWork >> viewContentsFor: aView [
<gtView> <gtView>
^ aView list ^ aView list
title: 'Contents'; title: 'Contents';
priority: 10; priority: 10;
items: [ self tableOfContents ] items: [ self tableOfContents ]
] ]
{ #category : #accessing } { #category : #accessing }
PubPubWork >> workingDirectory [ PubPubWork >> workingDirectory [
^ self folder / self currentLanguage / (self bookishFolder at: self currentLanguage) ^ self folder / self currentLanguage / (self bookishFolder at: self currentLanguage)
] ]

View File

@ -1,162 +1,162 @@
Extension { #name : #String } Extension { #name : #String }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> accentedCharactersCorrection [ String >> accentedCharactersCorrection [
| modified corrections | | modified corrections |
corrections := { corrections := {
'ó' -> 'ó' . 'ú' -> 'ú' . 'ñ' -> 'ñ' . 'ó' -> 'ó' . 'ú' -> 'ú' . 'ñ' -> 'ñ' .
'í' -> 'í' . 'á' -> 'á' . 'é' -> 'é' . '’' -> $' asString} asDictionary. 'í' -> 'í' . 'á' -> 'á' . 'é' -> 'é' . '’' -> $' asString} asDictionary.
modified := self copy. modified := self copy.
corrections keysAndValuesDo: [ :k :v | corrections keysAndValuesDo: [ :k :v |
modified := modified copyReplaceAll: k with: v modified := modified copyReplaceAll: k with: v
]. ].
^ modified ^ modified
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> asDashedLowercase [ String >> asDashedLowercase [
"I convert phrases like 'This is a phrase' into 'this-is-a-phrase'." "I convert phrases like 'This is a phrase' into 'this-is-a-phrase'."
^ '-' join: (self substrings collect: [:each | each asLowercase ]) ^ '-' join: (self substrings collect: [:each | each asLowercase ])
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> asInteger [ String >> asInteger [
"Return the integer present in the receiver, or nil. In case of float, returns the integer part." "Return the integer present in the receiver, or nil. In case of float, returns the integer part."
"'1' asInteger >>> 1" "'1' asInteger >>> 1"
"'-1' asInteger >>> -1" "'-1' asInteger >>> -1"
"'10' asInteger >>> 10" "'10' asInteger >>> 10"
"'a' asInteger >>> nil" "'a' asInteger >>> nil"
"'1.234' asInteger >>> 1" "'1.234' asInteger >>> 1"
^ (self copyWithoutAll: '_') asSignedInteger ^ (self copyWithoutAll: '_') asSignedInteger
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> contentsWithoutYAMLMetadata [ String >> contentsWithoutYAMLMetadata [
| newContents | | newContents |
self detectYAMLMetadata ifFalse: [ ^ self ]. self detectYAMLMetadata ifFalse: [ ^ self ].
newContents := '' writeStream. newContents := '' writeStream.
(self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line | (self lines copyFrom: self yamlMetadataClosingLineNumber + 2 to: self lines size) do: [ :line |
newContents nextPutAll: line; cr ]. newContents nextPutAll: line; cr ].
^ newContents contents. ^ newContents contents.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> deleteYAMLMetadata [ String >> deleteYAMLMetadata [
| newContents | | newContents |
self detectYAMLMetadata ifFalse: [ ^ self ]. self detectYAMLMetadata ifFalse: [ ^ self ].
newContents := '' writeStream. newContents := '' writeStream.
(self lines copyFrom: self yamlMetadataClosingLineNumber + 1 to: self lines size) do: [ :line | (self lines copyFrom: self yamlMetadataClosingLineNumber + 1 to: self lines size) do: [ :line |
newContents nextPutAll: line; lf;lf ]. newContents nextPutAll: line; lf;lf ].
^ newContents contents. ^ newContents contents.
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> demoteMarkdownHeaders [ String >> demoteMarkdownHeaders [
| response | | response |
response := self contents lines. response := self contents lines.
self markdownHeaders associations allButFirstDo: [ :assoc | self markdownHeaders associations allButFirstDo: [ :assoc |
response at: assoc key put: '#', assoc value ]. response at: assoc key put: '#', assoc value ].
^ response asStringWithCr withInternetLineEndings ^ response asStringWithCr withInternetLineEndings
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> detectYAMLMetadata [ String >> detectYAMLMetadata [
| lines | | lines |
lines := self lines. lines := self lines.
^ self startsWithYAMLMetadataDelimiter ^ self startsWithYAMLMetadataDelimiter
and: [ lines allButFirst and: [ lines allButFirst
detect: [ :currentLine | currentLine beginsWith: self class yamlMetadataDelimiter ] detect: [ :currentLine | currentLine beginsWith: self class yamlMetadataDelimiter ]
ifFound: [ ^ true ] ifNone: [ ^ false ] ] ifFound: [ ^ true ] ifNone: [ ^ false ] ]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> markdownHeaders [ String >> markdownHeaders [
| response headers | | response headers |
headers := (LeTextSnippet string: self contents) ast // #LeHeaderNode collect: [ :each | each headerFullName asString ]. headers := (LeTextSnippet string: self contents) ast // #LeHeaderNode collect: [ :each | each headerFullName asString ].
response := OrderedDictionary new. response := OrderedDictionary new.
self lines doWithIndex: [:line :index | self lines doWithIndex: [:line :index |
(line beginsWithAnyOf: headers) (line beginsWithAnyOf: headers)
ifTrue: [ response at: index put: line ] ifTrue: [ response at: index put: line ]
]. ].
^ response ^ response
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> promoteMarkdownHeaders [ String >> promoteMarkdownHeaders [
| response | | response |
response := self contents lines. response := self contents lines.
self markdownHeaders associationsDo: [ :assoc | self markdownHeaders associationsDo: [ :assoc |
response at: assoc key put: assoc value allButFirst ]. response at: assoc key put: assoc value allButFirst ].
^ response asStringWithCr withInternetLineEndings ^ response asStringWithCr withInternetLineEndings
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> romanizeAccents [ String >> romanizeAccents [
| modified corrections | | modified corrections |
corrections := { corrections := {
'ó' -> 'o' . 'ú' -> 'u' . 'ñ' -> 'n' . 'ó' -> 'o' . 'ú' -> 'u' . 'ñ' -> 'n' .
'í' -> 'i' . 'á' -> 'a' . 'é' -> 'e' } asDictionary. 'í' -> 'i' . 'á' -> 'a' . 'é' -> 'e' } asDictionary.
modified := self copy. modified := self copy.
corrections keysAndValuesDo: [ :k :v | corrections keysAndValuesDo: [ :k :v |
modified := modified copyReplaceAll: k with: v modified := modified copyReplaceAll: k with: v
]. ].
^ modified ^ modified
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> startsWithYAMLMetadataDelimiter [ String >> startsWithYAMLMetadataDelimiter [
self lines ifEmpty: [^false]. self lines ifEmpty: [^false].
^ self lines first beginsWith: self class yamlMetadataDelimiter ^ self lines first beginsWith: self class yamlMetadataDelimiter
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> withoutXMLTagDelimiters [ String >> withoutXMLTagDelimiters [
^ self copyWithoutAll: #($< $>) ^ self copyWithoutAll: #($< $>)
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> yamlMetadata [ String >> yamlMetadata [
^ (YAML2JSON fromString: self yamlMetadataString) ^ (YAML2JSON fromString: self yamlMetadataString)
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> yamlMetadataClosingLineNumber [ String >> yamlMetadataClosingLineNumber [
"I return the line where the closing of the YAML metadata occurs or 0 if no closing is found." "I return the line where the closing of the YAML metadata occurs or 0 if no closing is found."
self startsWithYAMLMetadataDelimiter ifFalse: [ ^ self ]. self startsWithYAMLMetadataDelimiter ifFalse: [ ^ self ].
self lines allButFirst doWithIndex: [ :currentLine :i | self lines allButFirst doWithIndex: [ :currentLine :i |
(currentLine beginsWith: self class yamlMetadataDelimiter) ifTrue: [ ^ i + 1 ]] (currentLine beginsWith: self class yamlMetadataDelimiter) ifTrue: [ ^ i + 1 ]]
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String class >> yamlMetadataDelimiter [ String class >> yamlMetadataDelimiter [
^ '---' ^ '---'
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> yamlMetadataString [ String >> yamlMetadataString [
| output yamlLines | | output yamlLines |
self detectYAMLMetadata ifFalse: [ ^nil ]. self detectYAMLMetadata ifFalse: [ ^nil ].
self lines ifEmpty: [ ^nil ]. self lines ifEmpty: [ ^nil ].
yamlLines := self lines copyFrom: 2 to: self yamlMetadataClosingLineNumber - 1. yamlLines := self lines copyFrom: 2 to: self yamlMetadataClosingLineNumber - 1.
output := '' writeStream. output := '' writeStream.
yamlLines do: [ :line | yamlLines do: [ :line |
output output
nextPutAll: line; nextPutAll: line;
nextPut: Character lf. ]. nextPut: Character lf. ].
^ output contents ^ output contents
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
String >> yamlMetadataStringWithDelimiters [ String >> yamlMetadataStringWithDelimiters [
| output | | output |
self yamlMetadataString ifNil: [ ^ nil ]. self yamlMetadataString ifNil: [ ^ nil ].
output := String new writeStream. output := String new writeStream.
output nextPutAll: self class yamlMetadataDelimiter; cr. output nextPutAll: self class yamlMetadataDelimiter; cr.
output nextPutAll: self yamlMetadataString. output nextPutAll: self yamlMetadataString.
output nextPutAll: self class yamlMetadataDelimiter; cr. output nextPutAll: self class yamlMetadataDelimiter; cr.
^ output contents. ^ output contents.
] ]

View File

@ -1,6 +1,6 @@
Extension { #name : #TeaCompositeRouter } Extension { #name : #TeaCompositeRouter }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
TeaCompositeRouter >> staticRouters [ TeaCompositeRouter >> staticRouters [
^ routers ^ routers
] ]

View File

@ -1,6 +1,6 @@
Extension { #name : #TeaStaticRouter } Extension { #name : #TeaStaticRouter }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
TeaStaticRouter >> delegate [ TeaStaticRouter >> delegate [
^ delegate ^ delegate
] ]

View File

@ -1,6 +1,6 @@
Extension { #name : #Teapot } Extension { #name : #Teapot }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
Teapot >> staticRouter [ Teapot >> staticRouter [
^ staticRouter delegate ^ staticRouter delegate
] ]

View File

@ -1,10 +1,10 @@
Extension { #name : #UnixChromePlatform } Extension { #name : #UnixChromePlatform }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
UnixChromePlatform class >> defaultExecutableLocations [ UnixChromePlatform class >> defaultExecutableLocations [
^ #( '/opt/google/chrome/chrome' ^ #( '/opt/google/chrome/chrome'
'/usr/bin/chromium-browser' '/usr/bin/chromium-browser'
'/usr/local/share/chromium/chrome' '/usr/local/share/chromium/chrome'
'/usr/bin/chromium' ) '/usr/bin/chromium' )
] ]

View File

@ -1,53 +1,53 @@
Extension { #name : #XMLElement } Extension { #name : #XMLElement }
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
XMLElement >> asSnippetDictionary [ XMLElement >> asSnippetDictionary [
| response | | response |
response := STON fromString: (self attributes at: 'st-data'). response := STON fromString: (self attributes at: 'st-data').
response at: 'className' put: (self attributes at: 'st-class'). response at: 'className' put: (self attributes at: 'st-class').
response at: 'content' put: self sanitizedContent. response at: 'content' put: self sanitizedContent.
^ response ^ response
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
XMLElement >> extractMarkdownImageLinkData [ XMLElement >> extractMarkdownImageLinkData [
| linkParserNodes sanitizedText linkParser | | linkParserNodes sanitizedText linkParser |
linkParser := (PPCommonMarkBlockParser parse: (self contentString trimBoth: [:each | each = Character lf]) allButFirst) linkParser := (PPCommonMarkBlockParser parse: (self contentString trimBoth: [:each | each = Character lf]) allButFirst)
accept: CMBlockVisitor new. accept: CMBlockVisitor new.
linkParserNodes := linkParser children first children. linkParserNodes := linkParser children first children.
linkParserNodes size = 1 linkParserNodes size = 1
ifTrue: [ sanitizedText := linkParserNodes first label text ] ifTrue: [ sanitizedText := linkParserNodes first label text ]
ifFalse: [ sanitizedText := '' writeStream. ifFalse: [ sanitizedText := '' writeStream.
linkParserNodes allButLast linkParserNodes allButLast
do: [ :each | do: [ :each |
each className = 'PPCMText' each className = 'PPCMText'
ifTrue: [ sanitizedText nextPutAll: each text allButFirst ]. ifTrue: [ sanitizedText nextPutAll: each text allButFirst ].
each className = 'PPCMLink' each className = 'PPCMLink'
ifTrue: [ sanitizedText nextPutAll: each printString ] ]. ifTrue: [ sanitizedText nextPutAll: each printString ] ].
sanitizedText := sanitizedText contents ]. sanitizedText := sanitizedText contents ].
^ {sanitizedText . self contentString } ^ {sanitizedText . self contentString }
] ]
{ #category : #'*MiniDocs' } { #category : #'*MiniDocs' }
XMLElement >> sanitizedContent [ XMLElement >> sanitizedContent [
| className sanitizedText | | className sanitizedText |
className := self attributes at: 'st-class'. className := self attributes at: 'st-class'.
className = 'LeTextSnippet' className = 'LeTextSnippet'
ifTrue: [ sanitizedText := self contentString. ifTrue: [ sanitizedText := self contentString.
sanitizedText := sanitizedText allButFirst. sanitizedText := sanitizedText allButFirst.
sanitizedText := sanitizedText allButLast ]. sanitizedText := sanitizedText allButLast ].
className = 'LePharoSnippet' className = 'LePharoSnippet'
ifTrue: [ | joinedText | ifTrue: [ | joinedText |
sanitizedText := self contentString lines. sanitizedText := self contentString lines.
sanitizedText := sanitizedText copyFrom: 4 to: sanitizedText size - 2. sanitizedText := sanitizedText copyFrom: 4 to: sanitizedText size - 2.
joinedText := '' writeStream. joinedText := '' writeStream.
sanitizedText sanitizedText
do: [ :line | do: [ :line |
joinedText joinedText
nextPutAll: line; nextPutAll: line;
nextPut: Character lf ]. nextPut: Character lf ].
sanitizedText := joinedText contents allButLast ]. sanitizedText := joinedText contents allButLast ].
className = 'LePictureSnippet' className = 'LePictureSnippet'
ifTrue: [ sanitizedText := self extractMarkdownImageLinkData ]. ifTrue: [ sanitizedText := self extractMarkdownImageLinkData ].
^ sanitizedText ^ sanitizedText
] ]

View File

@ -1,52 +1,52 @@
" "
The `External` tag is related on its dependency on other programming languages and frameworks, The `External` tag is related on its dependency on other programming languages and frameworks,
though the dependency should be loaded by just loading a small binary with no dependencies. though the dependency should be loaded by just loading a small binary with no dependencies.
" "
Class { Class {
#name : #YQ, #name : #YQ,
#superclass : #Object, #superclass : #Object,
#category : #'MiniDocs-External' #category : #'MiniDocs-External'
} }
{ #category : #accessing } { #category : #accessing }
YQ class >> binaryDownloadLinkFor: operativeSystem on: processor [ YQ class >> binaryDownloadLinkFor: operativeSystem on: processor [
| binaryName binaryDownloadData | | binaryName binaryDownloadData |
binaryName := 'yq_', operativeSystem , '_', processor. binaryName := 'yq_', operativeSystem , '_', processor.
binaryDownloadData := ((self lastReleaseData at: 'assets') binaryDownloadData := ((self lastReleaseData at: 'assets')
select: [:each | (each at: 'name') beginsWith: binaryName ]) first. select: [:each | (each at: 'name') beginsWith: binaryName ]) first.
^ binaryDownloadData at: 'browser_download_url' ^ binaryDownloadData at: 'browser_download_url'
] ]
{ #category : #accessing } { #category : #accessing }
YQ class >> binaryFile [ YQ class >> binaryFile [
"Starting with location on Arch Linux and its derivates. Multidistro and multiOS support should be added." "Starting with location on Arch Linux and its derivates. Multidistro and multiOS support should be added."
^ FileLocator root / 'usr/bin/yq' ^ FileLocator root / 'usr/bin/yq'
] ]
{ #category : #accessing } { #category : #accessing }
YQ class >> install [ YQ class >> install [
^ self lastReleaseData ^ self lastReleaseData
] ]
{ #category : #accessing } { #category : #accessing }
YQ class >> jsonToYaml: aDictionary [ YQ class >> jsonToYaml: aDictionary [
| jsonFile | | jsonFile |
self binaryFile exists ifFalse: [ YQ install]. self binaryFile exists ifFalse: [ YQ install].
jsonFile := MarkupFile exportAsFileOn: FileLocator temp / 'data.json' containing: aDictionary. jsonFile := MarkupFile exportAsFileOn: FileLocator temp / 'data.json' containing: aDictionary.
(Smalltalk os isUnix or: [ Smalltalk os isMacOS ]) (Smalltalk os isUnix or: [ Smalltalk os isMacOS ])
ifTrue: [ ifTrue: [
OSSUnixSubprocess new OSSUnixSubprocess new
shellCommand: 'cat ', jsonFile fullName,' | yq -y'; shellCommand: 'cat ', jsonFile fullName,' | yq -y';
redirectStdout; redirectStdout;
runAndWaitOnExitDo: [ :command :outString | runAndWaitOnExitDo: [ :command :outString |
^ outString ^ outString
]]. ]].
Smalltalk os isWindows Smalltalk os isWindows
ifTrue: [ ^ LibC resultOfCommand: 'yq -p=json ', jsonFile fullName ]. ifTrue: [ ^ LibC resultOfCommand: 'yq -p=json ', jsonFile fullName ].
] ]
{ #category : #accessing } { #category : #accessing }
YQ class >> lastReleaseData [ YQ class >> lastReleaseData [
^ (STONJSON ^ (STONJSON
fromString: 'https://api.github.com/repos/mikefarah/yq/releases' asUrl retrieveContents) first fromString: 'https://api.github.com/repos/mikefarah/yq/releases' asUrl retrieveContents) first
] ]

View File

@ -1 +1 @@
Package { #name : #MiniDocs } Package { #name : #MiniDocs }