Resyncing after detachemed.
This commit is contained in:
parent
f1f9a7b30c
commit
93fc8ffe1c
@ -13,14 +13,16 @@ BaselineOfMiniDocs >> baseline: spec [
|
|||||||
"Dependencies"
|
"Dependencies"
|
||||||
self setUpTeapot: spec.
|
self setUpTeapot: spec.
|
||||||
self setUpPetitParser: spec.
|
self setUpPetitParser: spec.
|
||||||
self setUpLepiterBuildingBlocs: spec.
|
"LepiterBuildingBlocs commented while resolving the conflict with the internal gtoolkit renaming."
|
||||||
|
"self setUpLepiterBuildingBlocs: spec"
|
||||||
spec
|
spec
|
||||||
baseline: 'Mustache' with: [ spec repository: 'github://noha/mustache' ];
|
baseline: 'Mustache' with: [ spec repository: 'github://noha/mustache' ];
|
||||||
baseline: 'Temple' with: [ spec repository: 'github://astares/Pharo-Temple/src' ];
|
baseline: 'Temple' with: [ spec repository: 'github://astares/Pharo-Temple/src' ];
|
||||||
baseline: 'Tealight' with: [ spec repository: 'github://astares/Tealight:main/src' ].
|
baseline: 'Tealight' with: [ spec repository: 'github://astares/Tealight:main/src' ];
|
||||||
|
baseline: 'DataFrame' with: [ spec repository: 'github://PolyMathOrg/DataFrame/src' ].
|
||||||
|
|
||||||
"self fossil: spec."
|
"self fossil: spec."
|
||||||
"self xmlParserHTML: spec"
|
self xmlParserHTML: spec.
|
||||||
|
|
||||||
"Packages"
|
"Packages"
|
||||||
spec
|
spec
|
||||||
@ -31,6 +33,7 @@ BaselineOfMiniDocs >> baseline: spec [
|
|||||||
'Teapot' 'Tealight' "Web server"
|
'Teapot' 'Tealight' "Web server"
|
||||||
'PetitMarkdown' 'PetitParser' "Parsers"
|
'PetitMarkdown' 'PetitParser' "Parsers"
|
||||||
'DataFrame' "Tabular data")].
|
'DataFrame' "Tabular data")].
|
||||||
|
.
|
||||||
|
|
||||||
"Groups"
|
"Groups"
|
||||||
|
|
||||||
@ -90,7 +93,7 @@ BaselineOfMiniDocs >> xmlParserHTML: spec [
|
|||||||
spec
|
spec
|
||||||
baseline: 'XMLParserHTML'
|
baseline: 'XMLParserHTML'
|
||||||
with: [ spec
|
with: [ spec
|
||||||
repository: 'github://pharo-contributions/XML-XMLParserHTML/src';
|
repository: 'github://ruidajo/XML-XMLParserHTML/src';
|
||||||
loads: #('ALL')];
|
loads: #('ALL')];
|
||||||
import: 'XMLParserHTML'
|
import: 'XMLParserHTML'
|
||||||
]
|
]
|
||||||
|
@ -1,5 +1,26 @@
|
|||||||
Extension { #name : #DataFrame }
|
Extension { #name : #DataFrame }
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
DataFrame >> asMarkdown [
|
||||||
|
| response |
|
||||||
|
response := '' writeStream.
|
||||||
|
self columnNames do: [ :name | response nextPutAll: '| ' , name , ' ' ].
|
||||||
|
response
|
||||||
|
nextPutAll: '|';
|
||||||
|
cr.
|
||||||
|
self columns size timesRepeat: [ response nextPutAll: '|---' ].
|
||||||
|
response
|
||||||
|
nextPutAll: '|';
|
||||||
|
cr.
|
||||||
|
self asArrayOfRows
|
||||||
|
do: [ :row |
|
||||||
|
row do: [ :cell | response nextPutAll: '| ' , cell asString , ' ' ].
|
||||||
|
response
|
||||||
|
nextPutAll: '|';
|
||||||
|
cr ].
|
||||||
|
^ response contents accentedCharactersCorrection withInternetLineEndings.
|
||||||
|
]
|
||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
DataFrame >> viewDataFor: aView [
|
DataFrame >> viewDataFor: aView [
|
||||||
<gtView>
|
<gtView>
|
||||||
@ -17,3 +38,9 @@ DataFrame >> viewDataFor: aView [
|
|||||||
].
|
].
|
||||||
^ columnedList
|
^ columnedList
|
||||||
]
|
]
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
DataFrame >> webView [
|
||||||
|
|
||||||
|
^ Pandoc convertString: self asMarkdown from: 'markdown' to: 'html'
|
||||||
|
]
|
||||||
|
@ -44,14 +44,18 @@ LeDatabase >> addPageFromMarkdeep: markdeepDocTree withRemote: externalDocLocati
|
|||||||
remoteMetadata at: 'origin' put: externalDocLocation.
|
remoteMetadata at: 'origin' put: externalDocLocation.
|
||||||
dataSnippets := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
|
dataSnippets := self sanitizeMarkdeepSnippets: divSnippets withMetadata: remoteMetadata.
|
||||||
snippets := dataSnippets collect: [ :each | each asLepiterSnippet ].
|
snippets := dataSnippets collect: [ :each | each asLepiterSnippet ].
|
||||||
page := LePage new
|
page := LePage new.
|
||||||
title: (remoteMetadata at: 'title');
|
page
|
||||||
basicUid: (UUID fromString36: (remoteMetadata at: 'id'));
|
title: (remoteMetadata at: 'title' ifAbsent: [ page detectMarkdeepTitleFrom: markdeepDocTree ]);
|
||||||
createTime: (LeTime new time: (remoteMetadata at: 'created') asDateAndTime);
|
basicUid: (UUID fromString36: (remoteMetadata at: 'id' ifAbsent: [UUID new asString36]));
|
||||||
editTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
|
createTime: (LeTime new
|
||||||
latestEditTime: (LeTime new time: (remoteMetadata at: 'modified') asDateAndTime);
|
time: (remoteMetadata at: 'created' ifAbsent: [ DateAndTime now]) asDateAndTime);
|
||||||
createEmail: (remoteMetadata at: 'creator');
|
editTime: (LeTime new
|
||||||
editEmail: (remoteMetadata at: 'modifier').
|
time: (remoteMetadata at: 'modified' ifAbsent: [ DateAndTime now]) asDateAndTime);
|
||||||
|
latestEditTime: (LeTime new
|
||||||
|
time: (remoteMetadata at: 'modified' ifAbsent: [ DateAndTime now]) asDateAndTime);
|
||||||
|
createEmail: (remoteMetadata at: 'creator' ifAbsent: [ 'unknown' ]);
|
||||||
|
editEmail: (remoteMetadata at: 'modifier' ifAbsent: [ 'unknown' ]).
|
||||||
snippets do: [ :snippet | "| currentParent |"
|
snippets do: [ :snippet | "| currentParent |"
|
||||||
page addSnippet: snippet.
|
page addSnippet: snippet.
|
||||||
"currentParent := page detectParentSnippetWithUid: (snippet metadata at: 'parent').
|
"currentParent := page detectParentSnippetWithUid: (snippet metadata at: 'parent').
|
||||||
@ -184,6 +188,16 @@ LeDatabase >> gtViewErrorDetailsOn: aView withKey: erroKey [
|
|||||||
].
|
].
|
||||||
]
|
]
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
LeDatabase >> importDocumentFrom: aURL [
|
||||||
|
| doc |
|
||||||
|
"Using file extension in URL as a cheap (non-robuts) way of detecting the kind of document.
|
||||||
|
Better file type detection should be implemented in the future."
|
||||||
|
(aURL endsWith: '.md.html') ifTrue: [ ^ self addPageFromMarkdeepUrl: aURL ].
|
||||||
|
doc := HedgeDoc fromLink: aURL asString.
|
||||||
|
^ self addPage: doc asLePage
|
||||||
|
]
|
||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
LeDatabase >> importErrorForLocal: page withRemote: externalDocLocation [
|
LeDatabase >> importErrorForLocal: page withRemote: externalDocLocation [
|
||||||
|
|
||||||
|
@ -42,22 +42,25 @@ LeMiniDocsImport >> createURLeditable [
|
|||||||
text: 'Document link';
|
text: 'Document link';
|
||||||
switchToEditor.
|
switchToEditor.
|
||||||
editable when: BrEditorAcceptWish do: [ :aWish |
|
editable when: BrEditorAcceptWish do: [ :aWish |
|
||||||
self importHedgedoc: aWish text asString.
|
self importDocumentFrom: aWish text asString.
|
||||||
].
|
].
|
||||||
base addChild: editable.
|
base addChild: editable.
|
||||||
^ base
|
^ base
|
||||||
]
|
]
|
||||||
|
|
||||||
|
{ #category : #accessing }
|
||||||
|
LeMiniDocsImport >> database [
|
||||||
|
^ database
|
||||||
|
]
|
||||||
|
|
||||||
{ #category : #accessing }
|
{ #category : #accessing }
|
||||||
LeMiniDocsImport >> database: aLeDatabase [
|
LeMiniDocsImport >> database: aLeDatabase [
|
||||||
database := aLeDatabase
|
database := aLeDatabase
|
||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #accessing }
|
{ #category : #accessing }
|
||||||
LeMiniDocsImport >> importHedgedoc: aURL [
|
LeMiniDocsImport >> importDocumentFrom: aURL [
|
||||||
| doc |
|
^ self database importDocumentFrom: aURL.
|
||||||
doc := HedgeDoc fromLink: aURL asString.
|
|
||||||
^ database addPage: doc asLePage
|
|
||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #accessing }
|
{ #category : #accessing }
|
||||||
|
@ -26,15 +26,16 @@ LePage >> asMarkdeep [
|
|||||||
markdeep := Markdeep new
|
markdeep := Markdeep new
|
||||||
title: self title;
|
title: self title;
|
||||||
body: bodyStream contents;
|
body: bodyStream contents;
|
||||||
|
metadata: self metadata;
|
||||||
file: self storage / self markdeepFileName;
|
file: self storage / self markdeepFileName;
|
||||||
navTop: self navTop.
|
navTop: self navTop.
|
||||||
"self exportMetadataToHead: markdeep."
|
|
||||||
self metadata
|
self metadata
|
||||||
at: 'authors'
|
at: 'authors'
|
||||||
ifPresent: [ :author | markdeep metadata at: 'authors' put: author ].
|
ifPresent: [ :author | markdeep metadata at: 'authors' put: author ].
|
||||||
self metadata
|
self metadata
|
||||||
at: 'version'
|
at: 'version'
|
||||||
ifPresent: [ :version | markdeep metadata at: 'version' put: version ].
|
ifPresent: [ :version | markdeep metadata at: 'version' put: version ].
|
||||||
|
markdeep head: nil.
|
||||||
^ markdeep
|
^ markdeep
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -49,17 +50,12 @@ LePage >> asMarkdown [
|
|||||||
| bodyStream markdown |
|
| bodyStream markdown |
|
||||||
bodyStream := '' writeStream.
|
bodyStream := '' writeStream.
|
||||||
bodyStream
|
bodyStream
|
||||||
nextPutAll: '---';
|
nextPutAll: '# ', self title; cr; cr.
|
||||||
nextPutAll: String lf.
|
|
||||||
self metadata keysAndValuesDo: [ :k :v |
|
|
||||||
bodyStream
|
|
||||||
nextPutAll: k , ': "' , v, '"';
|
|
||||||
nextPutAll: String lf ].
|
|
||||||
bodyStream nextPutAll: '---' , String lf , String lf, String lf.
|
|
||||||
bodyStream nextPutAll: '# ', self title, String lf , String lf.
|
|
||||||
self preorderTraversal
|
self preorderTraversal
|
||||||
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdown ].
|
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdown ].
|
||||||
markdown := Markdown new contents: bodyStream contents.
|
markdown := Markdown new
|
||||||
|
contents: bodyStream contents demoteMarkdownHeaders;
|
||||||
|
metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new).
|
||||||
^ markdown
|
^ markdown
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -67,7 +63,21 @@ LePage >> asMarkdown [
|
|||||||
LePage >> asMarkdownFile [
|
LePage >> asMarkdownFile [
|
||||||
| folder |
|
| folder |
|
||||||
folder := self storage.
|
folder := self storage.
|
||||||
^ MarkupFile exportAsFileOn: folder / self markdownFileName containing: self asMarkdown contents
|
^ MarkupFile exportAsFileOn: folder / self markdownFileName containing: self asMarkdownWithMetadataWrappers contents
|
||||||
|
]
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
LePage >> asMarkdownWithMetadataWrappers [
|
||||||
|
| bodyStream markdown |
|
||||||
|
bodyStream := '' writeStream.
|
||||||
|
bodyStream
|
||||||
|
nextPutAll: '# ', self title; cr; cr.
|
||||||
|
self preorderTraversal
|
||||||
|
do: [ :snippet | bodyStream nextPutAll: snippet asMarkdownWithMetadataWrappers ].
|
||||||
|
markdown := Markdown new
|
||||||
|
contents: bodyStream contents demoteMarkdownHeaders;
|
||||||
|
metadata: (self metadata at: 'original' ifAbsentPut: Dictionary new).
|
||||||
|
^ markdown
|
||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
@ -85,6 +95,14 @@ LePage >> defaultPandocTemplate [
|
|||||||
^ FileLocator home / '.pandoc' / 'templates' / 'clean-menu-mod.html'
|
^ FileLocator home / '.pandoc' / 'templates' / 'clean-menu-mod.html'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
LePage >> detectMarkdeepTitleFrom: xmlSubtree [
|
||||||
|
| titleLine |
|
||||||
|
titleLine := (xmlSubtree nodesCollect: [:node | node contentString ]) first lines
|
||||||
|
detect: [:line | line includesSubstring: ' **'] ifNone: ['Untitled'].
|
||||||
|
^ titleLine trimmed trimBoth: [:char | char = $* ]
|
||||||
|
]
|
||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
LePage >> detectParentSnippetWithUid: uidString [
|
LePage >> detectParentSnippetWithUid: uidString [
|
||||||
uidString = self uid asString36 ifTrue: [ ^ self ].
|
uidString = self uid asString36 ifTrue: [ ^ self ].
|
||||||
@ -163,7 +181,7 @@ LePage >> markdownFileName [
|
|||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
LePage >> metadata [
|
LePage >> metadata [
|
||||||
|
|
||||||
^ self options at: 'metadata' ifAbsentPut: [ self metadataUpdate]
|
^ self metadataUpdate
|
||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
@ -173,7 +191,7 @@ LePage >> metadataUpdate [
|
|||||||
at: 'id' put: self uidString;
|
at: 'id' put: self uidString;
|
||||||
at: 'title' put: self contentAsString;
|
at: 'title' put: self contentAsString;
|
||||||
at: 'created' put: self createTime greaseString;
|
at: 'created' put: self createTime greaseString;
|
||||||
at: 'modified' put: self latestEditTime greaseString;
|
at: 'modified' put: self getLatestEditTime greaseString;
|
||||||
at: 'creator' put: self createEmail greaseString;
|
at: 'creator' put: self createEmail greaseString;
|
||||||
at: 'modifier' put: self editEmail greaseString;
|
at: 'modifier' put: self editEmail greaseString;
|
||||||
yourself
|
yourself
|
||||||
@ -188,6 +206,19 @@ LePage >> navTop [
|
|||||||
ifTrue: [ ^ topNavFile contents ]
|
ifTrue: [ ^ topNavFile contents ]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
LePage >> olderChild [
|
||||||
|
"I provide the last edited child node.
|
||||||
|
I'm useful to recalculate the age of a notebook."
|
||||||
|
| response|
|
||||||
|
response := self preorderTraversal first.
|
||||||
|
self preorderTraversal do: [:current |
|
||||||
|
current editTime >= response editTime
|
||||||
|
ifTrue: [ response := current ]
|
||||||
|
].
|
||||||
|
^ response
|
||||||
|
]
|
||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
LePage >> options [
|
LePage >> options [
|
||||||
^ options
|
^ options
|
||||||
@ -287,3 +318,16 @@ LePage >> uiRefreshWebPreviewButtonFor: anAction [
|
|||||||
"TODO: If Chrome/Chromium are not installed, I should execute:"
|
"TODO: If Chrome/Chromium are not installed, I should execute:"
|
||||||
"WebBrowser openOn: self page localHostAddress" ]
|
"WebBrowser openOn: self page localHostAddress" ]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
LePage >> youngerChild [
|
||||||
|
"I provide the first create child node.
|
||||||
|
I'm useful to recalculate the age of a notebook."
|
||||||
|
| response|
|
||||||
|
response := self preorderTraversal first.
|
||||||
|
self preorderTraversal do: [:current |
|
||||||
|
current createTime <= response createTime
|
||||||
|
ifTrue: [ response := current ]
|
||||||
|
].
|
||||||
|
^ response
|
||||||
|
]
|
||||||
|
@ -26,7 +26,7 @@ LePictureSnippet >> asMarkdeep [
|
|||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
LePictureSnippet >> asMarkdown [
|
LePictureSnippet >> asMarkdownWithMetadataWrappers [
|
||||||
^ self asMarkdeep
|
^ self asMarkdeep
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -2,9 +2,14 @@ Extension { #name : #LeTextSnippet }
|
|||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
LeTextSnippet >> asLePage [
|
LeTextSnippet >> asLePage [
|
||||||
| page |
|
| page title currentSnippet |
|
||||||
|
title := self contentAsString markdownHeaders associations first value.
|
||||||
|
title := (title trimBoth: [:char | char = $# ]) trimmed.
|
||||||
page := LePage new
|
page := LePage new
|
||||||
initializeTitle: self contentAsString.
|
initializeTitle: title.
|
||||||
|
currentSnippet := LeTextSnippet new
|
||||||
|
string: self contentAsString.
|
||||||
|
page addSnippet: currentSnippet.
|
||||||
self database addPage: page.
|
self database addPage: page.
|
||||||
self childrenDo: [:child |
|
self childrenDo: [:child |
|
||||||
child moveToPageTitled: page title
|
child moveToPageTitled: page title
|
||||||
|
@ -24,6 +24,16 @@ LeTextualSnippet >> asMarkdeep [
|
|||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
LeTextualSnippet >> asMarkdown [
|
LeTextualSnippet >> asMarkdown [
|
||||||
|
|
||||||
|
| output |
|
||||||
|
output := '' writeStream.
|
||||||
|
output
|
||||||
|
nextPutAll: self contentAsStringCustomized; lf.
|
||||||
|
^ output contents
|
||||||
|
]
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
LeTextualSnippet >> asMarkdownWithMetadataWrappers [
|
||||||
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
|
"Inspired by Alpine.js and Assembler CSS 'x-' properties, we are going to use
|
||||||
'st-' properties as a way to extend divs metadata regarding its contents."
|
'st-' properties as a way to extend divs metadata regarding its contents."
|
||||||
| output |
|
| output |
|
||||||
|
@ -30,7 +30,7 @@ Markdeep class >> fromPubPubTOC: orderedDictionary folder: folder index: ordina
|
|||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #accessing }
|
{ #category : #accessing }
|
||||||
Markdeep >> asMarkdown [
|
Markdeep >> asMarkdownWithMetadataWrappers [
|
||||||
^ Markdown new
|
^ Markdown new
|
||||||
metadata: self metadata;
|
metadata: self metadata;
|
||||||
body: self body;
|
body: self body;
|
||||||
@ -294,6 +294,11 @@ Markdeep >> headContents [
|
|||||||
nextPutAll: line;
|
nextPutAll: line;
|
||||||
nextPut: Character lf
|
nextPut: Character lf
|
||||||
].
|
].
|
||||||
|
self metadata keysAndValuesDo: [:k :v |
|
||||||
|
k = 'lang'
|
||||||
|
ifTrue: [ stream nextPutAll: ' <meta lang="', v,'">'; cr. ]
|
||||||
|
ifFalse: [ stream nextPutAll: ' <meta name="', k, '" content="', v,'">'; cr. ]
|
||||||
|
].
|
||||||
stream
|
stream
|
||||||
nextPutAll: '</head>';
|
nextPutAll: '</head>';
|
||||||
nextPut: Character lf.
|
nextPut: Character lf.
|
||||||
@ -378,13 +383,6 @@ Markdeep >> options [
|
|||||||
]
|
]
|
||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #printing }
|
|
||||||
Markdeep >> printOn: aStream [
|
|
||||||
super printOn: aStream.
|
|
||||||
aStream
|
|
||||||
nextPutAll: '( ', self title, ' )'
|
|
||||||
]
|
|
||||||
|
|
||||||
{ #category : #'instance creation' }
|
{ #category : #'instance creation' }
|
||||||
Markdeep >> processMarkdownFor: aFileReference [
|
Markdeep >> processMarkdownFor: aFileReference [
|
||||||
"comment stating purpose of message"
|
"comment stating purpose of message"
|
||||||
|
@ -149,7 +149,7 @@ Markdown >> fromFile: aFileReference [
|
|||||||
|
|
||||||
{ #category : #'instance creation' }
|
{ #category : #'instance creation' }
|
||||||
Markdown >> fromString: markdownString [
|
Markdown >> fromString: markdownString [
|
||||||
self metadata: markdownString yamlMetadata.
|
(self metadata) at: 'original' put: markdownString yamlMetadata.
|
||||||
self body: markdownString contentsWithoutYAMLMetadata
|
self body: markdownString contentsWithoutYAMLMetadata
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -208,9 +208,11 @@ Markdown >> options [
|
|||||||
|
|
||||||
{ #category : #accessing }
|
{ #category : #accessing }
|
||||||
Markdown >> printOn: aStream [
|
Markdown >> printOn: aStream [
|
||||||
|
| response |
|
||||||
super printOn: aStream.
|
super printOn: aStream.
|
||||||
|
response := self title ifNil: [ 'Untitled' ].
|
||||||
aStream
|
aStream
|
||||||
nextPutAll: '( ', self title , ' )'
|
nextPutAll: '( ', response , ' )'
|
||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #accessing }
|
{ #category : #accessing }
|
||||||
|
@ -28,6 +28,8 @@ NanoID class >> binaryFile [
|
|||||||
{ #category : #accessing }
|
{ #category : #accessing }
|
||||||
NanoID class >> generate [
|
NanoID class >> generate [
|
||||||
self binaryFile exists ifFalse: [ NanoID install].
|
self binaryFile exists ifFalse: [ NanoID install].
|
||||||
|
Smalltalk os isWindows
|
||||||
|
ifTrue: [ ^ (LibC resultOfCommand:self binaryFile fullName) copyWithoutAll: (Character lf asString) ].
|
||||||
OSSUnixSubprocess new
|
OSSUnixSubprocess new
|
||||||
command: self binaryFile fullName;
|
command: self binaryFile fullName;
|
||||||
redirectStdout;
|
redirectStdout;
|
||||||
@ -41,6 +43,8 @@ NanoID class >> install [
|
|||||||
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
|
IMPORTANT: Nimble, Nim's package manager should be installed, as this process doesn't verify its proper installation."
|
||||||
self binaryFile exists ifTrue: [ ^ MiniDocs appFolder ].
|
self binaryFile exists ifTrue: [ ^ MiniDocs appFolder ].
|
||||||
Nimble install: 'nanoid'.
|
Nimble install: 'nanoid'.
|
||||||
|
Smalltalk os isWindows
|
||||||
|
ifTrue: [ ^ LibC resultOfCommand: 'nanoid c ',self scriptSourceCode fullName ].
|
||||||
OSSUnixSubprocess new
|
OSSUnixSubprocess new
|
||||||
command: 'nim';
|
command: 'nim';
|
||||||
arguments: {'c'. self scriptSourceCode fullName};
|
arguments: {'c'. self scriptSourceCode fullName};
|
||||||
|
@ -20,6 +20,8 @@ Nimble class >> detect: packageName [
|
|||||||
Nimble class >> install: packageName [
|
Nimble class >> install: packageName [
|
||||||
(self detect: packageName) ifTrue: [ ^ self ].
|
(self detect: packageName) ifTrue: [ ^ self ].
|
||||||
self installPackagesList.
|
self installPackagesList.
|
||||||
|
Smalltalk os isWindows
|
||||||
|
ifTrue: [ ^ LibC runCommand: 'nimble install ', packageName ].
|
||||||
OSSUnixSubprocess new
|
OSSUnixSubprocess new
|
||||||
command: 'nimble';
|
command: 'nimble';
|
||||||
arguments: {'install'.
|
arguments: {'install'.
|
||||||
@ -41,11 +43,16 @@ Nimble class >> installPackagesList [
|
|||||||
|
|
||||||
(FileLocator home / '.nimble' / 'packages_official.json') exists
|
(FileLocator home / '.nimble' / 'packages_official.json') exists
|
||||||
ifTrue: [ ^ self ].
|
ifTrue: [ ^ self ].
|
||||||
OSSUnixSubprocess new
|
(Smalltalk os isUnix or: [ Smalltalk os isMacOS ])
|
||||||
command: 'nimble';
|
ifTrue: [
|
||||||
arguments: #('refresh');
|
OSSUnixSubprocess new
|
||||||
redirectStdout;
|
command: 'nimble';
|
||||||
runAndWaitOnExitDo: [ :process :outString | ^ outString ]
|
arguments: #('refresh');
|
||||||
|
redirectStdout;
|
||||||
|
runAndWaitOnExitDo: [ :process :outString | ^ outString ].
|
||||||
|
].
|
||||||
|
Smalltalk os isWindows
|
||||||
|
ifTrue: [ ^ LibC resultOfCommand: 'nimble refresh' ]
|
||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #accessing }
|
{ #category : #accessing }
|
||||||
|
@ -10,6 +10,16 @@ Class {
|
|||||||
#category : #'MiniDocs-Core'
|
#category : #'MiniDocs-Core'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
|
||||||
|
OSSUnixSubprocess new
|
||||||
|
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
|
||||||
|
redirectStdout;
|
||||||
|
runAndWaitOnExitDo: [ :command :outString |
|
||||||
|
^ outString
|
||||||
|
].
|
||||||
|
]
|
||||||
|
|
||||||
{ #category : #'as yet unclassified' }
|
{ #category : #'as yet unclassified' }
|
||||||
Pandoc class >> downloadLuaFilters [
|
Pandoc class >> downloadLuaFilters [
|
||||||
self luaFilters do: [ :filter | | filterUrl |
|
self luaFilters do: [ :filter | | filterUrl |
|
||||||
@ -61,6 +71,17 @@ Pandoc class >> extractImagesInUnixFor: aFileReference withFilter: aLuaFilter [
|
|||||||
]
|
]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
{ #category : #accessing }
|
||||||
|
Pandoc class >> htmlStringToMarkdown: aString [
|
||||||
|
|
||||||
|
OSSUnixSubprocess new
|
||||||
|
shellCommand: 'echo "', aString , '" | pandoc -f markdown -t html';
|
||||||
|
redirectStdout;
|
||||||
|
runAndWaitOnExitDo: [ :command :outString |
|
||||||
|
^ outString
|
||||||
|
].
|
||||||
|
]
|
||||||
|
|
||||||
{ #category : #converters }
|
{ #category : #converters }
|
||||||
Pandoc class >> htmlToMarkdown: inputFile [
|
Pandoc class >> htmlToMarkdown: inputFile [
|
||||||
|
|
||||||
|
11
src/MiniDocs/Pandoc.extension.st
Normal file
11
src/MiniDocs/Pandoc.extension.st
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
Extension { #name : #Pandoc }
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
Pandoc class >> convertString: aString from: inputFormat to: outputFormat [
|
||||||
|
OSSUnixSubprocess new
|
||||||
|
shellCommand: 'echo "', aString , '" | pandoc -f ', inputFormat,' -t ', outputFormat;
|
||||||
|
redirectStdout;
|
||||||
|
runAndWaitOnExitDo: [ :command :outString |
|
||||||
|
^ outString
|
||||||
|
].
|
||||||
|
]
|
@ -51,6 +51,15 @@ String >> deleteYAMLMetadata [
|
|||||||
^ newContents contents.
|
^ newContents contents.
|
||||||
]
|
]
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
String >> demoteMarkdownHeaders [
|
||||||
|
| response |
|
||||||
|
response := self contents lines.
|
||||||
|
self markdownHeaders associations allButFirstDo: [ :assoc |
|
||||||
|
response at: assoc key put: '#', assoc value ].
|
||||||
|
^ response asStringWithCr withInternetLineEndings
|
||||||
|
]
|
||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
String >> detectYAMLMetadata [
|
String >> detectYAMLMetadata [
|
||||||
| lines |
|
| lines |
|
||||||
@ -62,15 +71,26 @@ String >> detectYAMLMetadata [
|
|||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
String >> promoteMarkdownHeaders [
|
String >> markdownHeaders [
|
||||||
| headers response |
|
| response headers |
|
||||||
response := self contents.
|
headers := (LeTextSnippet string: self contents) ast // #LeHeaderNode collect: [ :each | each headerFullName asString ].
|
||||||
headers := (LeTextSnippet string: response) ast // #LeHeaderNode collect: [ :each | each headerFullName asString ].
|
response := OrderedDictionary new.
|
||||||
headers do: [ :each |
|
self lines doWithIndex: [:line :index |
|
||||||
response := response copyReplaceAll: each with: each allButFirst ].
|
(line beginsWithAnyOf: headers)
|
||||||
|
ifTrue: [ response at: index put: line ]
|
||||||
|
].
|
||||||
^ response
|
^ response
|
||||||
]
|
]
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
String >> promoteMarkdownHeaders [
|
||||||
|
| response |
|
||||||
|
response := self contents lines.
|
||||||
|
self markdownHeaders associationsDo: [ :assoc |
|
||||||
|
response at: assoc key put: assoc value allButFirst ].
|
||||||
|
^ response asStringWithCr withInternetLineEndings
|
||||||
|
]
|
||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
String >> romanizeAccents [
|
String >> romanizeAccents [
|
||||||
| modified corrections |
|
| modified corrections |
|
||||||
|
@ -9,23 +9,45 @@ XMLElement >> asSnippetDictionary [
|
|||||||
^ response
|
^ response
|
||||||
]
|
]
|
||||||
|
|
||||||
|
{ #category : #'*MiniDocs' }
|
||||||
|
XMLElement >> extractMarkdownImageLinkData [
|
||||||
|
| linkParserNodes sanitizedText linkParser |
|
||||||
|
linkParser := (PPCommonMarkBlockParser parse: (self contentString trimBoth: [:each | each = Character lf]) allButFirst)
|
||||||
|
accept: CMBlockVisitor new.
|
||||||
|
linkParserNodes := linkParser children first children.
|
||||||
|
linkParserNodes size = 1
|
||||||
|
ifTrue: [ sanitizedText := linkParserNodes first label text ]
|
||||||
|
ifFalse: [ sanitizedText := '' writeStream.
|
||||||
|
linkParserNodes allButLast
|
||||||
|
do: [ :each |
|
||||||
|
each className = 'PPCMText'
|
||||||
|
ifTrue: [ sanitizedText nextPutAll: each text allButFirst ].
|
||||||
|
each className = 'PPCMLink'
|
||||||
|
ifTrue: [ sanitizedText nextPutAll: each printString ] ].
|
||||||
|
sanitizedText := sanitizedText contents ].
|
||||||
|
^ {sanitizedText . self contentString }
|
||||||
|
]
|
||||||
|
|
||||||
{ #category : #'*MiniDocs' }
|
{ #category : #'*MiniDocs' }
|
||||||
XMLElement >> sanitizedContent [
|
XMLElement >> sanitizedContent [
|
||||||
| className sanitizedText |
|
| className sanitizedText |
|
||||||
className := self attributes at: 'st-class'.
|
className := self attributes at: 'st-class'.
|
||||||
((className = 'LeTextSnippet') or: [className = 'LePictureSnippet'])
|
className = 'LeTextSnippet'
|
||||||
ifTrue: [
|
ifTrue: [ sanitizedText := self contentString.
|
||||||
sanitizedText := self contentString.
|
sanitizedText := sanitizedText allButFirst.
|
||||||
sanitizedText := sanitizedText allButFirst.
|
sanitizedText := sanitizedText allButLast ].
|
||||||
sanitizedText := sanitizedText allButLast.
|
className = 'LePharoSnippet'
|
||||||
].
|
ifTrue: [ | joinedText |
|
||||||
(className = 'LePharoSnippet') ifTrue: [ | joinedText |
|
sanitizedText := self contentString lines.
|
||||||
sanitizedText := self contentString lines.
|
sanitizedText := sanitizedText copyFrom: 4 to: sanitizedText size - 2.
|
||||||
sanitizedText := sanitizedText copyFrom: 4 to: sanitizedText size -2.
|
joinedText := '' writeStream.
|
||||||
joinedText := '' writeStream.
|
sanitizedText
|
||||||
sanitizedText do: [ :line | joinedText nextPutAll: line; nextPut: Character lf ].
|
do: [ :line |
|
||||||
sanitizedText := joinedText contents allButLast.
|
joinedText
|
||||||
].
|
nextPutAll: line;
|
||||||
|
nextPut: Character lf ].
|
||||||
|
sanitizedText := joinedText contents allButLast ].
|
||||||
|
className = 'LePictureSnippet'
|
||||||
|
ifTrue: [ sanitizedText := self extractMarkdownImageLinkData ].
|
||||||
^ sanitizedText
|
^ sanitizedText
|
||||||
|
|
||||||
]
|
]
|
||||||
|
@ -33,12 +33,16 @@ YQ class >> jsonToYaml: aDictionary [
|
|||||||
| jsonFile |
|
| jsonFile |
|
||||||
self binaryFile exists ifFalse: [ YQ install].
|
self binaryFile exists ifFalse: [ YQ install].
|
||||||
jsonFile := MarkupFile exportAsFileOn: FileLocator temp / 'data.json' containing: aDictionary.
|
jsonFile := MarkupFile exportAsFileOn: FileLocator temp / 'data.json' containing: aDictionary.
|
||||||
OSSUnixSubprocess new
|
(Smalltalk os isUnix or: [ Smalltalk os isMacOS ])
|
||||||
shellCommand: 'cat ', jsonFile fullName,' | yq -y';
|
ifTrue: [
|
||||||
redirectStdout;
|
OSSUnixSubprocess new
|
||||||
runAndWaitOnExitDo: [ :command :outString |
|
shellCommand: 'cat ', jsonFile fullName,' | yq -y';
|
||||||
^ outString
|
redirectStdout;
|
||||||
].
|
runAndWaitOnExitDo: [ :command :outString |
|
||||||
|
^ outString
|
||||||
|
]].
|
||||||
|
Smalltalk os isWindows
|
||||||
|
ifTrue: [ ^ LibC resultOfCommand: 'yq -p=json ', jsonFile fullName ].
|
||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #accessing }
|
{ #category : #accessing }
|
||||||
|
@ -39,9 +39,8 @@ PPCMLink >> printOn: aStream [
|
|||||||
super initialize.
|
super initialize.
|
||||||
^ aStream
|
^ aStream
|
||||||
nextPutAll:
|
nextPutAll:
|
||||||
self label text,
|
'[',self label text,']',
|
||||||
' -> ',
|
'(',self destination,')'
|
||||||
self destination
|
|
||||||
]
|
]
|
||||||
|
|
||||||
{ #category : #accessing }
|
{ #category : #accessing }
|
||||||
|
Loading…
Reference in New Issue
Block a user