New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@nuxt/content

Package Overview
Dependencies
Maintainers
4
Versions
87
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@nuxt/content - npm Package Compare versions

Comparing version

to
1.15.0

68

lib/database.js
const { join, extname } = require('path')
const fs = require('graceful-fs').promises
const mkdirp = require('mkdirp')
const Hookable = require('hookable')

@@ -21,3 +22,5 @@ const chokidar = require('chokidar')

this.dir = options.dir || process.cwd()
this.cwd = options.cwd || process.cwd()
this.srcDir = options.srcDir || process.cwd()
this.buildDir = options.buildDir || process.cwd()
this.useCache = options.useCache || false
this.markdown = new Markdown(options.markdown)

@@ -30,6 +33,7 @@ this.yaml = new YAML(options.yaml)

// Init collection
this.items = this.db.addCollection('items', {
this.itemsCollectionOptions = {
fullTextSearch: options.fullTextSearchFields.map(field => ({ field })),
nestedProperties: options.nestedProperties
})
}
this.items = this.db.addCollection('items', this.itemsCollectionOptions)
// User Parsers

@@ -63,16 +67,62 @@ this.extendParser = options.extendParser || {}

async init () {
if (this.useCache) {
try {
return await this.initFromCache()
} catch (error) {}
}
await this.initFromFilesystem()
}
/**
* Clear items in database and load files into collection
*/
async init () {
async initFromFilesystem () {
const startTime = process.hrtime()
this.dirs = ['/']
this.items.clear()
await this.walk(this.dir)
const [s, ns] = process.hrtime(startTime)
logger.info(`Parsed ${this.items.count()} files in ${s}.${Math.round(ns / 1e8)} seconds`)
}
async initFromCache () {
const startTime = process.hrtime()
await this.walk(this.dir)
const cacheFilePath = join(this.buildDir, this.db.filename)
const cacheFileData = await fs.readFile(cacheFilePath, 'utf-8')
const cacheFileJson = JSON.parse(cacheFileData)
this.db.loadJSONObject(cacheFileJson)
// recreate references
this.items = this.db.getCollection('items')
this.dirs = this.items.mapReduce(doc => doc.dir, dirs => [...new Set(dirs)])
const [s, ns] = process.hrtime(startTime)
logger.info(`Parsed ${this.items.count()} files in ${s},${Math.round(ns / 1e8)} seconds`)
logger.info(`Loaded ${this.items.count()} documents from cache in ${s},${Math.round(ns / 1e8)} seconds`)
}
/**
* Store database info file
* @param {string} [dir] - Directory containing database dump file.
* @param {string} [filename] - Database dump filename.
*/
async save (dir, filename) {
dir = dir || this.buildDir
filename = filename || this.db.filename
await mkdirp(dir)
await fs.writeFile(join(dir, filename), this.db.serialize(), 'utf-8')
}
async rebuildCache () {
logger.info('Rebuilding content cache')
this.db = new Loki('content.db')
this.items = this.db.addCollection('items', this.itemsCollectionOptions)
await this.initFromFilesystem()
await this.save()
}
/**
* Walk dir tree recursively

@@ -151,3 +201,3 @@ * @param {string} dir - Directory to browse.

logger.info(`Updated ${path.replace(this.cwd, '.')}`)
logger.info(`Updated ${path.replace(this.srcDir, '.')}`)
if (document) {

@@ -178,3 +228,3 @@ this.items.update({ $loki: document.$loki, meta: document.meta, ...item })

const extension = extname(path)
// If unkown extension, skip
// If unknown extension, skip
if (!EXTENSIONS.includes(extension) && !this.extendParserExtensions.includes(extension)) {

@@ -212,3 +262,3 @@ return

} catch (err) {
logger.warn(`Could not parse ${path.replace(this.cwd, '.')}:`, err.message)
logger.warn(`Could not parse ${path.replace(this.srcDir, '.')}:`, err.message)
return null

@@ -215,0 +265,0 @@ }

26

lib/index.js
const { join, resolve } = require('path')
const fs = require('graceful-fs').promises
const mkdirp = require('mkdirp')
const defu = require('defu')

@@ -98,7 +97,17 @@ const logger = require('consola').withScope('@nuxt/content')

const useCache = options.useCache && !this.options.dev && this.options.ssr
const database = new Database({
...options,
cwd: this.options.srcDir
srcDir: this.options.srcDir,
buildDir: resolve(this.options.buildDir, 'content'),
useCache
})
if (useCache) {
this.nuxt.hook('builder:prepared', async () => {
await database.rebuildCache()
})
}
// Database hooks

@@ -191,8 +200,3 @@ database.hook('file:beforeInsert', item =>

await mkdirp(dir)
await fs.writeFile(
join(dir, `db-${dbHash}.json`),
database.db.serialize(),
'utf-8'
)
await database.save(dir, `db-${dbHash}.json`)
})

@@ -209,3 +213,3 @@

})
let publicPath = this.options.build.publicPath // can be an url
let publicPath = this.options.build.publicPath // can be a url
let routerBasePath = this.options.router.base

@@ -225,3 +229,3 @@

options: {
// if publicPath is an URL, use public path, if not, add basepath before it
// if publicPath is a URL, use public path, if not, add basepath before it
dbPath: isUrl(publicPath)

@@ -280,3 +284,3 @@ ? `${publicPath}content`

try {
// quick test if the string is an URL
// quick test if the string is a URL
// eslint-disable-next-line no-new

@@ -283,0 +287,0 @@ new URL(string)

@@ -7,2 +7,3 @@ const logger = require('consola').withScope('@nuxt/content')

watch: dev,
useCache: false,
liveEdit: true,

@@ -9,0 +10,0 @@ apiPrefix: '_content',

{
"name": "@nuxt/content",
"version": "1.14.0",
"version": "1.15.0",
"repository": "nuxt/content",

@@ -33,2 +33,3 @@ "license": "MIT",

"js-yaml": "4.0.0",
"json5": "^2.2.0",
"mdast-util-to-hast": "^10.2.0",

@@ -60,3 +61,3 @@ "mkdirp": "^1.0.4",

},
"gitHead": "939caf36c547a6b4af6e303c52ed5989a5dea2f0"
"gitHead": "e2657a02e01402a225556cc667f12ee534364194"
}

@@ -9,3 +9,3 @@ const csv = require('csvtojson')

/**
* Converts csv document to it's JSON structure.
* Converts csv document to its JSON structure.
* @param {string} file - Csv file

@@ -12,0 +12,0 @@ * @return {Object}

@@ -58,3 +58,3 @@ /**

* Root level nodes push to the original parent
* children and doesn't create a new node
* children and don't create a new node
*/

@@ -73,3 +73,3 @@ if (node.type === 'root') {

* We do not use `map` operation, since each node can be expanded to multiple top level
* nodes. Instead, we need a array to fill in as many elements inside a single
* nodes. Instead, we need an array to fill in as many elements inside a single
* iteration

@@ -76,0 +76,0 @@ */

@@ -5,3 +5,3 @@ const utils = exports = module.exports = {}

* Parses the value defined next to 3 back ticks
* in a codeblock and set line-highlights or
* in a codeblock and sets line-highlights or
* filename from it

@@ -8,0 +8,0 @@ *

@@ -117,3 +117,3 @@ const matter = require('gray-matter')

/**
* Converts markdown document to it's JSON structure.
* Converts markdown document to its JSON structure.
* @param {string} file - Markdown file

@@ -120,0 +120,0 @@ * @return {Object}

@@ -9,3 +9,3 @@ const xml = require('xml2js')

/**
* Converts xml document to it's JSON structure.
* Converts xml document to its JSON structure.
* @param {string} file - xml file

@@ -12,0 +12,0 @@ * @return {Object}

@@ -9,3 +9,3 @@ const yaml = require('js-yaml')

/**
* Converts yaml document to it's JSON structure.
* Converts yaml document to its JSON structure.
* @param {string} file - Yaml file

@@ -12,0 +12,0 @@ * @return {Object}

import info from 'property-information'
const rootKeys = ['class-name', 'class', 'style']
const rootKeys = ['class-name', 'class', 'className', 'style']

@@ -135,6 +135,10 @@ const rxOn = /^@|^v-on:/

required: true
},
tag: {
type: String,
default: 'div'
}
},
render (h, { data, props }) {
const { document } = props
const { document, tag } = props
const { body } = document || {}

@@ -155,4 +159,4 @@ if (!body || !body.children || !Array.isArray(body.children)) {

data.props = Object.assign({ ...body.props }, data.props)
return h('div', data, body.children.map(child => processNode(child, h, document)))
return h(tag, data, body.children.map(child => processNode(child, h, document)))
}
}

@@ -6,3 +6,3 @@ import type { Database } from './database';

export interface IContentDocument extends Record<string, any> {
export interface IContentDocumentBase extends Record<string, any> {
dir: string;

@@ -12,2 +12,13 @@ path: string;

slug: string;
createdAt: Date | string;
updatedAt: Date | string;
body?: object;
toc?: {
id: string;
depth: number;
text: string;
}[];
}
export interface IContentDocument extends IContentDocumentBase {
createdAt: Date;

@@ -14,0 +25,0 @@ updatedAt: Date;

@@ -6,3 +6,3 @@ import type { baseParser } from '../base';

/**
* Converts csv document to it's JSON structure.
* Converts csv document to its JSON structure.
* @param file - Csv file

@@ -9,0 +9,0 @@ * @return JSON

@@ -30,3 +30,3 @@ import type { baseParser } from '../base';

/**
* Converts markdown document to it's JSON structure.
* Converts markdown document to its JSON structure.
* @param file - Markdown file

@@ -33,0 +33,0 @@ * @return JSON

@@ -6,3 +6,3 @@ import type { baseParser } from '../base';

/**
* Converts xml document to it's JSON structure.
* Converts xml document to its JSON structure.
* @param file - xml file

@@ -9,0 +9,0 @@ * @return JSON

@@ -6,3 +6,3 @@ import type { baseParser } from '../base';

/**
* Converts yaml document to it's JSON structure.
* Converts yaml document to its JSON structure.
* @param file - Yaml file

@@ -9,0 +9,0 @@ * @return JSON

@@ -1,2 +0,3 @@

import type { IContentDocument } from './content';
import type { IContentDocumentBase } from "./content";
interface QueryBuilderOptions {

@@ -10,2 +11,7 @@ query: any;

interface FetchReturn extends IContentDocumentBase {
createdAt: string;
updatedAt: string;
}
export class QueryBuilder {

@@ -81,4 +87,4 @@ constructor(

*/
fetch(): Promise<IContentDocument | IContentDocument[]>;
fetch<T>(): Promise<(T & IContentDocument) | (T & IContentDocument)[]>;
fetch(): Promise<FetchReturn | FetchReturn[]>;
fetch<T>(): Promise<(T & FetchReturn) | (T & FetchReturn)[]>;
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet