Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
145 changes: 66 additions & 79 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,89 +1,76 @@
var VFile = require('vfile')
var path = require('path')
var fs = require('fs')

var parseInclude = /^@include (.*)(\n|$)/

module.exports = function (options) {
var proc = this;
options = options || {}
var cwd = options.cwd || process.cwd()

var prt = proc.Parser.prototype
prt.blockTokenizers.include = tokenizer
prt.blockMethods.unshift('include')

return function transformer(ast, file) {
var children = ast.children

for (var i = 0; i < children.length; i++) {
var child = children[i]
if (child.type === 'include') {
// Load file and create VFile
// console.log(cwd, file)
// var file = toFile(path.join(file.dirname || cwd, child.value))

// Parse vfile contents
// var parser = new processor.Parser(file, null, processor)
var root = proc.runSync(proc.parse(
toFile(path.join(child.source.dirname || cwd, child.value))
))

// Split and merge the head and tail around the new children
var head = children.slice(0, i)
var tail = children.slice(i + 1)
children = head.concat(root.children).concat(tail)

// Remember to update the offset!
i += root.children.length - 1
/**
* Remark plugin for including external files.
*/
const path = require('path')
const visit = require('unist-util-visit')
const VFile = require('to-vfile')

const parseInclude = /^@include (.*)(\n|$)/

function loadFile (cwd, vfile, filename) {
// Add CWD, VFile dir, and VFile CWD
const dirs = [ cwd ]
.concat(vfile.history.length > 0
? path.dirname(vfile.history[vfile.history.length - 1])
: ''
)
.concat(vfile.cwd)
.filter(Boolean)

// Create array of filenames
const files = dirs
.map(dir => [
path.resolve(dir, filename),
path.resolve(dir, filename + '.md'),
path.resolve(dir, filename + '.markdown')
])
.flat()

const ret = files
.map(name => {
try {
return VFile.readSync(name)
} catch (e) {
return false
}
}
})
.filter(Boolean)

ast.children = children
if (ret.length < 1) {
throw new Error('Unable to include ' + filename)
}

return ret[0]
}

function tokenizer (eat, value, silent) {
var self = this
var settings = self.options
var length = value.length + 1
var index = -1
var now = eat.now()
var node
function transformer (tree, file, cwd, processor) {
visit(tree, [ 'text' ], (node, i, parent) => {
if (!parseInclude.test(node.value)) {
return
}

const [ , filename ] = node.value.match(parseInclude)

const vfile = loadFile(cwd, file, filename)

const root = processor.parse(vfile)

// Recurse
transformer(root, vfile, cwd, processor)

const { children } = root

parent.children.splice(i, 1, ...children)
})
}

if (silent && parseInclude.test(value)) {
return true
}
function include (options = {}) {
const cwd = options.cwd || process.cwd()
const processor = this

// Replace all lines beginning with @include
while (parseInclude.test(value)) {
var file = value.match(parseInclude)[1]
var frag = '@include ' + file
value = value.slice(frag.length)
eat(frag)({
type: 'include',
source: this.file,
value: file
})
return (tree, file) => {
transformer(tree, file, cwd, processor)
}

return node
}

function toFile(full) {
return new VFile({path: full, contents: loadContent(full).toString('utf8')})
}

function loadContent(file) {
// console.log('loading', file)
try { return fs.readFileSync(file) }
catch (e) {}

try { return fs.readFileSync(file + '.md') }
catch (e) {}

try { return fs.readFileSync(file + '.markdown') }
catch (e) {}

throw new Error('Unable to include ' + file)
}
module.exports = include
Loading