Here are a few of my mu modules that I use in most of my projects.
What is mu?

readFile.js

'use strict'

const path = require('path')
const fsp = require('fs').promises
const parseFrontMatterJSON = require(`./parseFrontMatterJSON.js`)


/*
 * Read `source` file {string} (path/to/file.ext)
 * parse JSON front matter if it exists 
 * return file {object}
 */

const readFile = async (source, parseFrontMatter = true) => {

    const sourcePath = path.parse(source)
    const sourceStat = await fsp.stat(source)

    if (!sourceStat.isFile()) return 

    const file = {
        source: source,
        created: sourceStat.birthtime, 
        modified: sourceStat.mtime,
        createdMs: sourceStat.birthtimeMs,
        modifiedMs: sourceStat.mtimeMs,
        name: sourcePath.name,
        ext: sourcePath.ext,
        destination: ''
    }

    file.content = await fsp.readFile(source, 'utf8')

    if (parseFrontMatter) {

        const markdownExtensions = ['.md', '.markdown', '.mkdown']

        if (markdownExtensions.includes(file.ext)) {            
            const frontmatter = parseFrontMatterJSON(file.content)
            file.content = frontmatter.body

            // fm is most convenient as properties on the file object
            Object.assign(file, frontmatter.attributes) 

            // but sometimes you need to work with just fm
            // the duplication is wasteful except when it's useful
            file.fm = frontmatter.attributes
        }
    }

    return file
}

module.exports = readFile

readFiles.js

'use strict'

const path = require('path')
const fsp = require('fs').promises
const readFile = require(`./readFile.js`)

const isHidden = filename => (/(^|\/)\.[^\/\.]/g).test(filename)

/*
 * Read each file in `directory`.
 * return array of file objects.
 */

const readFiles = async (directory) => {

    const files = []
    const names = (await fsp.readdir(directory)).filter(file => !isHidden(file))

    for (let name of names) {
        const file = await readFile(path.join(directory, name))
        if (file) files.push(file)
    }

    console.log(`Read: ${directory} (${files.length} files)`)

    return files
}

module.exports = readFiles

writeFile.js

'use strict'

const path = require('path')
const fsp = require('fs').promises

const isWritable = file => {
    return file.destination && file.name && file.ext && file.content
}

const writeFile = async (file) => {

    if (!isWritable(file)) {
        throw new Error (`Can not write file. Make sure it has a name, extension and destination.`)
    }

    try {
        await fsp.mkdir(file.destination, { recursive: true })
    }
    catch (e) { 
        // If directory exists fs.mkdir will throw EEXIST which we can just ignore.
        if (e.code != 'EEXIST') throw e 
    }

    let destination = path.join(file.destination, file.name) + file.ext

    await fsp.writeFile(destination, file.content)

    console.log(`Write: ${destination}`)

} 

module.exports = writeFile

bundleFiles.js

'use strict'

const path = require('path')
const readFiles = require(`./readFiles.js`)
const writeFile = require(`./writeFile.js`)

/*
 * Read and concatenate files in `source` directory
 * write the result to `destination`
 */

const bundleFiles = async (source, destination) => {

    const p = path.parse(destination)

    const bundle = {
        name: p.name,
        ext: p.ext,
        destination: p.dir,
        content: ''
    }

    const files = await readFiles(source)

    for (let file of files) { 
        bundle.content += file.content + '\n'
    }

    writeFile(bundle)    
}

module.exports = bundleFiles

parseFrontMatterJSON.js

'use strict'

/*
 * Parse JSON front matter in `str` {string}
 * Valid front matter must be at the start of the file between triple dashes.
 * return {object}
 *    `attributes` {object} front matter
 *     `body` {string} 
 */

const parseFrontMatterJSON = (str) => {

    const fm = {}
    const start = str.indexOf('---\n')
    const end = str.indexOf("\n---")

    if (start === 0 && end){
        fm.attributes = JSON.parse(str.substr(3,end - 3))
        fm.body = str.substr(end + 4).trim()    
    }
    else {
        fm.attributes = {}
        fm.body = str
    }

    return fm
}

module.exports = parseFrontMatterJSON

paginateFiles.js

/*
 * Add next and previous links to each file in an array of `files`.
 */

const paginateFiles = (files) => {

    let pageNumber = 1

    for (let [index, page] of files.entries()) {

        if (pageNumber < files.length) {
            page.nextPage = files[index+1]['name']
            page.nextPageTitle = files[index+1]['title'] || page.nextPage
        }

        if (pageNumber > 1) {
            page.previousPage = files[index-1]['name']
            page.previousPageTitle = files[index-1]['title'] || page.previousPage
        }

        page.pageNumber = pageNumber

        pageNumber += 1
    }
}

module.exports = paginateFiles

mergeFiles.js

'use strict'

/*
 * Merge array of `files` into one or more pages.
 * If `filesPerPage`` is undefined, `files` are merged into a single page
 * returns {array} of file objects
 */

const mergeFiles = (files, perPage) => { 

  const filesPerPage = perPage || files.length 
  const numberOfPages = Math.ceil(files.length / filesPerPage)

  const merged = []
  let pageNumber = 1

  while (pageNumber <= numberOfPages) {
    const page = {
      name: pageNumber === 1 ? 'index' : (pageNumber).toString(),
      content: ''
    }

    const first = (pageNumber - 1) * filesPerPage
    const last = (pageNumber * filesPerPage) - 1

    for (let file of files.slice(first, last)) {
      page.content += file.content + '\n'
    }

    merged.push(page)
    pageNumber += 1
  }

  return merged
}

module.exports = mergeFiles