Comparing version 1.0.0 to 2.0.0
261
index.js
'use strict'; | ||
var through = require('through'); | ||
var os = require('os'); | ||
var gutil = require('gulp-util'); | ||
var PluginError = gutil.PluginError; | ||
var File = gutil.File; | ||
var _ = require('lodash'); | ||
var path = require('path'); | ||
var extend = require('lodash').extend; | ||
/** | ||
* Convert text files to a website with nice urls, extra file.data and a website.map tree of content | ||
* Add url, root, parent, siblings and children properties to files, reflecting the structure of | ||
* the directories. Each property points to other file objects. | ||
* | ||
* @param object website An object to attach the map to, reference added to each file | ||
* Values are assigned to the `data` property of each file. Follows `gulp-data` convention so | ||
* won't override any other data already added to the file. | ||
* | ||
* @param array options.baseUrl The base url of the final website | ||
* @param array options.sort The property to sort by | ||
* @param array options.sectionProperties List of properties to copy from index file to section | ||
* @return stream | ||
*/ | ||
module.exports = function(website, options) { | ||
website = website || {}; | ||
options = _.extend({ | ||
module.exports = function(options) { | ||
var buffer = {}; | ||
options = extend({ | ||
baseUrl: '', | ||
sort: 'url', | ||
sectionProperties: [] | ||
sort: 'url' | ||
}, options || {}); | ||
// remove trailing slash from baseUrl | ||
if (options.baseUrl && options.baseUrl.length > 1 && options.baseUrl.substr(-1) === '/') { | ||
options.baseUrl = options.baseUrl.substr(0, options.baseUrl.length - 1); | ||
} | ||
// Normalize trailing slash on base URL | ||
options.baseUrl = options.baseUrl.replace(/\/$/, '') + '/'; | ||
var buffer = []; | ||
return through(bufferContents, endStream); | ||
/** | ||
* Rename each file and add properties to `data` | ||
* Add URL and buffer all files up into an object | ||
* | ||
* @param object file | ||
* @param {object} file | ||
*/ | ||
@@ -49,24 +44,9 @@ function bufferContents(file) { | ||
} | ||
var basename = path.basename(file.relative, path.extname(file.path)), | ||
isIndex = basename === 'index', | ||
originalDir = rename(file), | ||
isHome = isIndex && originalDir === '.', | ||
fileUrl = isHome ? options.baseUrl + '/' : url(file, options.baseUrl); | ||
file.data = _.extend({ | ||
website: website, | ||
name: basename, | ||
isIndex: isIndex, | ||
isHome: isHome, | ||
url: fileUrl, | ||
sectionUrl: sectionUrl(fileUrl, isIndex) | ||
}, file.data || {}); | ||
buffer.push(file); | ||
var fileUrl = url(file); | ||
file.data = extend({ url: fileUrl }, file.data || {}); | ||
buffer[fileUrl] = file; | ||
} | ||
/** | ||
* At the end of the stream build the website map, sort, then emit the file data. | ||
* This ensures the full map is built before the next pipe sees the file. | ||
* Emit file data at end of stream | ||
*/ | ||
@@ -78,25 +58,15 @@ function endStream() { | ||
if (options.sort) { | ||
buffer.sort(function(a, b) { | ||
var aDepth = a.data.url.split('/').length; | ||
var bDepth = b.data.url.split('/').length; | ||
if (aDepth < bDepth) { | ||
return -1; | ||
} | ||
if (bDepth < aDepth) { | ||
return 1; | ||
} | ||
if (a.isIndex) { | ||
return -1; | ||
} | ||
Object.keys(buffer).forEach(function(url) { | ||
var file = buffer[url]; | ||
file.data = extend({ | ||
root: buffer['/'] || null, | ||
parent: parent(url), | ||
children: children(url), | ||
siblings: siblings(url) | ||
}, file.data || {}); | ||
return a.data[options.sort] >= b.data[options.sort] ? 1 : -1; | ||
}); | ||
} | ||
}.bind(this)); | ||
website.map = treeify(options.baseUrl, buffer); | ||
addSectionToFiles(website.map); | ||
buffer.forEach(function(file) { | ||
this.emit('data', file); | ||
Object.keys(buffer).forEach(function(url) { | ||
this.emit('data', buffer[url]); | ||
}.bind(this)); | ||
@@ -108,156 +78,87 @@ | ||
/** | ||
* Copy options.sectionProperties from file data to section | ||
* Get the parent file for a given URL from the buffer | ||
* | ||
* @param object data | ||
* @return object | ||
* @param {string} url | ||
*/ | ||
function copySectionProperties(data) { | ||
if (typeof options.sectionProperties.forEach !== 'function') { | ||
return; | ||
function parent(url) { | ||
if (url === options.baseUrl) { | ||
return null; | ||
} | ||
var props = {}; | ||
options.sectionProperties.forEach(function(prop) { | ||
if (typeof data[prop] !== 'undefined') { | ||
props[prop] = data[prop]; | ||
} | ||
}); | ||
return props; | ||
return buffer[parentUrl(url)] || null; | ||
} | ||
/** | ||
* Converts flat files into a tree structure of sections | ||
* | ||
* @param string baseUrl | ||
* @return object | ||
*/ | ||
function treeify(baseUrl) { | ||
var currentList, | ||
foundAtIndex, | ||
baseUrlReplace = new RegExp('^' + baseUrl), | ||
sectionsToFiles = mapSectionsToFiles(buffer), | ||
contentTree = { | ||
sections: [], | ||
files: sectionsToFiles[baseUrl + '/'] | ||
}; | ||
buffer.forEach(function(file) { | ||
if (file.data.isHome) { | ||
contentTree.name = 'root'; | ||
contentTree.url = file.data.url; | ||
contentTree = _.extend(contentTree, copySectionProperties(file.data)); | ||
return; | ||
} | ||
if (!file.data.isIndex) { | ||
return; | ||
} | ||
currentList = contentTree.sections; | ||
file.data.url.replace(baseUrlReplace, '').split('/').filter(function(t) { | ||
return t !== ''; | ||
}).forEach(function(token, index) { | ||
foundAtIndex = -1; | ||
currentList.forEach(function(item, index) { | ||
if (item.name === token) { | ||
foundAtIndex = index; | ||
currentList = currentList[index].sections; | ||
} | ||
}); | ||
if (foundAtIndex === -1) { | ||
currentList.push(_.extend({ | ||
name: token, | ||
url: file.data.url, | ||
sections: [], | ||
files: sectionsToFiles[file.data.sectionUrl] | ||
}, copySectionProperties(file.data))); | ||
currentList = currentList[currentList.length-1].sections; | ||
} | ||
}); | ||
}); | ||
return contentTree; | ||
function parentUrl(url) { | ||
return url | ||
.replace(/\..+$/, '') | ||
.replace(/\/$/, '') | ||
.split('/') | ||
.slice(0, -1) | ||
.join('/') + '/'; | ||
} | ||
/** | ||
* Map each section URL to a list of files | ||
* Get the child files for a given URL from the buffer | ||
* | ||
* @return object | ||
* @param {string} url | ||
*/ | ||
function mapSectionsToFiles() { | ||
var map = {}; | ||
buffer.forEach(function(file) { | ||
if (typeof map[file.data.sectionUrl] === 'undefined') { | ||
map[file.data.sectionUrl] = []; | ||
} | ||
map[file.data.sectionUrl].push(file); | ||
}); | ||
function children(url) { | ||
// Filter to find files with this url as parent | ||
var ch = filter(new RegExp('^' + url + '[^/]+/?$')); | ||
sort(ch); | ||
return map; | ||
return ch; | ||
} | ||
/** | ||
* Give each file data a reference back to it's section | ||
* Get the sibling files for a given URL from the buffer | ||
* | ||
* @param object map The website map | ||
* @param {string} url | ||
*/ | ||
function addSectionToFiles(map) { | ||
if (!map.files.length) { | ||
return; | ||
function siblings(url) { | ||
if (url === options.baseUrl) { | ||
return []; | ||
} | ||
map.files.forEach(function(file) { | ||
file.data.section = map; | ||
if (!map.sections.length) { | ||
return; | ||
} | ||
}); | ||
// Recurse over nested sections | ||
map.sections.forEach(function(section) { | ||
addSectionToFiles(section); | ||
}); | ||
// Filter to find files with same parent URL | ||
var sb = filter(new RegExp('^' + parentUrl(url) + '[^/]+/?$')); | ||
sort(sb); | ||
return sb; | ||
} | ||
/** | ||
* Rename the file to path/to/index.html | ||
* Generate a URL for the file, adding base url and trimming any index.html | ||
* or index.htm | ||
* | ||
* @param object file | ||
* @return string The original directory name | ||
* @param {object} file | ||
*/ | ||
function rename(file) { | ||
var dirname = path.dirname(file.relative), | ||
basename = path.basename(file.relative, path.extname(file.relative)); | ||
file.path = file.base + | ||
(basename !== 'index' ? dirname + '/' + basename : dirname) + | ||
'/index.html'; | ||
return dirname; | ||
function url(file) { | ||
return options.baseUrl + file.relative.replace(/index\..+$/, ''); | ||
} | ||
/** | ||
* Generate URL from renamed path | ||
* Sort an array of files on `options.sort` property of `data` | ||
* | ||
* @param object file | ||
* @param string baseUrl | ||
* @return string url | ||
* @param {array} files | ||
*/ | ||
function url(file, baseUrl) { | ||
var dirname = path.dirname(file.relative).replace(/\\/g, '/'); | ||
return baseUrl + '/' + dirname.replace(/^\.\//, '') + '/'; | ||
function sort(files) { | ||
if (!options.sort) { | ||
return; | ||
} | ||
files.sort(function(a, b) { | ||
return a.data[options.sort] >= b.data[options.sort] ? 1 : -1; | ||
}); | ||
} | ||
/** | ||
* Generate a section URL from file url | ||
* | ||
* @param object file | ||
* @return string url | ||
* Filter buffer to return array of files with URLs matching given regex | ||
*/ | ||
function sectionUrl(url, isIndex) { | ||
return isIndex ? url : url.split('/').slice(0, -2).join('/') + '/'; | ||
function filter(rx) { | ||
return Object.keys(buffer).reduce(function(files, val) { | ||
if (rx.test(val)) { | ||
files.push(buffer[val]); | ||
} | ||
return files; | ||
}, []); | ||
} | ||
}; |
{ | ||
"name": "gulp-ssg", | ||
"version": "1.0.0", | ||
"version": "2.0.0", | ||
"main": "index.js", | ||
"homepage": "https://github.com/paulwib/gulp-ssg", | ||
"bugs": "https://github.com/paulwib/gulp-ssg/issues", | ||
"description": "Generate a static site with gulpjs", | ||
"description": "Generate a static website content tree", | ||
"directories": { | ||
@@ -20,3 +20,3 @@ "test": "test" | ||
"dependencies": { | ||
"lodash": "~2.4.1", | ||
"lodash": "~3.5.0", | ||
"gulp-util": "~3.0.1", | ||
@@ -27,11 +27,11 @@ "through": "~2.3.4" | ||
"chai": "~1.9.0", | ||
"event-stream": "^3.1.7", | ||
"front-matter": "^0.2.0", | ||
"gulp": "~3.8.9", | ||
"gulp-data": "^1.0.2", | ||
"del": "^1.1.1", | ||
"gray-matter": "^1.3.0", | ||
"gulp-data": "^1.2.0", | ||
"gulp-markdown": "^1.0.0", | ||
"gulp-rename": "^1.2.0", | ||
"gulp-wrap": "^0.11.0", | ||
"hogan.js": "^3.0.2", | ||
"marked": "^0.3.2", | ||
"mocha": "~2.0.1", | ||
"mversion": "^1.6.1", | ||
"should": "~4.1.0" | ||
"mversion": "^1.6.1" | ||
}, | ||
@@ -38,0 +38,0 @@ "engines": { |
120
README.md
@@ -1,5 +0,5 @@ | ||
gulp-ssg [![NPM version][npm-image]][npm-url] [![Dependency Status][depstat-image]][depstat-url] [![Build Status][travis-image]][travis-url] | ||
[gulp][]-ssg [![NPM version][npm-image]][npm-url] [![Dependency Status][depstat-image]][depstat-url] [![Build Status][travis-image]][travis-url] | ||
=== | ||
A [gulp][] plugin to generate a static site. | ||
A [gulp][] plugin to help generate a static website from a bunch of files. | ||
@@ -16,9 +16,6 @@ ## Installation | ||
var ssg = require('gulp-ssg'); | ||
var website = { | ||
title: 'My site' | ||
}; | ||
gulp.task('html', function() { | ||
return gulp.src('content/**/*.md') | ||
.pipe(ssg(website)) | ||
return gulp.src('content/**/*.html') | ||
.pipe(ssg()) | ||
.pipe(gulp.dest('public/')); | ||
@@ -28,37 +25,19 @@ }); | ||
This will rename the files so they have pretty URLs e.g. | ||
This will add properties to each files `data` property: | ||
content/index.md -> public/index.html | ||
content/foo.md -> public/foo/index.html | ||
content/bar/index.md -> public/bar/index.html | ||
content/bar/hello.md -> public/bar/hello/index.html | ||
* `file.data.url` - A URL, which is the `file.relative` with a slash prepended and any trailing `index.html` removed | ||
* `file.data.root` - A pointer to the root file | ||
* `file.data.parent` - A pointer to the parent file | ||
* `file.data.children` - An array of pointers to child files | ||
* `file.data.siblings` - An array of pointers to sibling files | ||
It will add properties to each files `data` property: | ||
To explain these a bit more: | ||
* `file.data.url` - `string` The full page URL | ||
* `file.data.isHome` - `boolean` Is it the root index page? | ||
* `file.data.isIndex` - `boolean` Is it a directory index page? | ||
* `file.data.sectionUrl` - `string` The URL of the section this page is in | ||
* `file.data.section` - `object` A pointer to the section in the website map | ||
* `file.data.website` - `object` The original passed in website object | ||
* `file.data.website.map` - `object` A map of all the files | ||
* The `root` file is the root `index.html` file. If there isn't one then `root` will be `null`. | ||
* The `parent` file is the parent `index.html` file. If there isn't one then `parent` will be `null`. | ||
* The `children` are all the files that have a URL that starts with the current files path plus at least one more token in there path. Because `index.html` is truncated from URLs this means `/foo/bar/` and `/foo/fred.html` are both children of `/foo/index.html`. | ||
* The `siblings` are all the files that have a common parent URL. | ||
The `file.data.website.map` represents a tree map of all files in the website. This can be used for things like generating global navigation, or making a single-page website. It looks like: | ||
This plug-in follows the [gulp-data][] convention of using `file.data`, so anything returned from a `gulp-data` pipe will be merged with the properties above. | ||
```javascript | ||
{ | ||
name: 'root', | ||
url: '/', | ||
files: [<index.html>, <foo/index.html> ] // All files in this section | ||
sections: [ | ||
{ | ||
name: 'bar', | ||
url: '/bar/', | ||
files: [<bar/index.html>, <bar/foo/index.html>] | ||
} | ||
] | ||
} | ||
``` | ||
Also each file has a reference back to it's section in the tree, so it's possible to generate sub-navigation too with `file.data.section.files`. | ||
## Example | ||
@@ -69,50 +48,45 @@ | ||
```javascript | ||
var ssg = require('../'); | ||
var gulp = require('gulp'); | ||
var ssg = require('gulp-ssg'); | ||
var rename = require('gulp-rename'); | ||
var data = require('gulp-data'); | ||
var fm = require('front-matter'); | ||
var marked = require('marked'); | ||
var fs = require('fs'); | ||
var es = require('event-stream'); | ||
var hogan = require('hogan.js'); | ||
var matter = require('gray-matter'); | ||
var markdown = require('gulp-markdown'); | ||
var wrap = require('gulp-wrap'); | ||
var del = require('del'); | ||
var website = { | ||
title: 'My site' | ||
}; | ||
gulp.task('default', function() { | ||
gulp.task('html', function() { | ||
return gulp.src('src/content/*.md') | ||
// Compile a template for rendering each page | ||
var template = hogan.compile(String(fs.readFileSync('templates/template.html'))); | ||
return gulp.src('content/**/*.md') | ||
// Extract YAML front-matter, convert content to markdown via gulp-data | ||
// Extract YAML front-matter and assign with gulp-data | ||
.pipe(data(function(file) { | ||
var content = fm(String(file.contents)); | ||
file.contents = new Buffer(marked(content.body)); | ||
return content.attributes; | ||
var m = matter(String(file.contents)); | ||
file.contents = new Buffer(m.content); | ||
return m.data; | ||
})) | ||
// Run through gulp-ssg, copy title from YAML to section | ||
.pipe(ssg(website, { sectionProperties: ['title'] })) | ||
// markdown -> HTML | ||
.pipe(markdown()) | ||
// Run each file through a template | ||
.pipe(es.map(function(file, cb) { | ||
file.contents = new Buffer(template.render(file)); | ||
cb(null, file); | ||
})) | ||
// Rename to .html | ||
.pipe(rename({ extname: '.html' })) | ||
// Run through gulp-ssg | ||
.pipe(ssg()) | ||
// Wrap file in template | ||
.pipe(wrap( | ||
{ src: 'src/templates/template.html' }, | ||
{ siteTitle: 'Example Website'}, | ||
{ engine: 'hogan' } | ||
)) | ||
// Output to build directory | ||
.pipe(gulp.dest('build/')); | ||
.pipe(gulp.dest('public/')); | ||
}); | ||
``` | ||
This plug-in follows the [gulp-data][] convention of using `file.data`, so anything returned from a `gulp-data` pipe will be merged with the properties above. | ||
There are [complete examples with templates](https://github.com/paulwib/gulp-ssg/tree/master/example) in the git repo. | ||
## Caveats | ||
* Each directory *must* contain a file with a base name of `index` (e.g. `index.md`) to have the site index fully traversed. | ||
## Options | ||
@@ -126,9 +100,5 @@ | ||
A property to sort pages by, defaults to `url`. For example, this could be a property like `order` extracted from the YAML front-matter, giving content editors full control over the order of pages. | ||
A property to sort pages by, defaults to `url`. For example, this could be a property like `order` extracted from the YAML front-matter. | ||
### sectionProperties `array` | ||
A list of properties to extract from index pages to add to the section, defaults to an empty list. For example, you could add a `sectionTitle` to front-matter in your `index.md` files, then use this it for link text in your global navigation. | ||
[gulp]:http://gulpjs.com | ||
@@ -135,0 +105,0 @@ [gulp-data]:https://github.com/colynb/gulp-data |
707
test/test.js
@@ -5,3 +5,2 @@ 'use strict'; | ||
var expect = require('chai').expect; | ||
var should = require('should'); | ||
var fs = require('fs'); | ||
@@ -12,437 +11,266 @@ var path = require('path'); | ||
function getMarkdownFile(path, content) { | ||
return new File({ | ||
cwd: '', | ||
base: 'test/', | ||
path: path, | ||
contents: new Buffer(content) | ||
}); | ||
} | ||
describe('gulp-ssg()', function() { | ||
describe('in buffer mode', function() { | ||
/* jshint camelcase: false */ | ||
it('should rename indexes to path/index.html', function(done) { | ||
var stream = ssg({}); | ||
var file = getMarkdownFile('test/index.md', 'test'); | ||
stream.on('end', function() { | ||
var newFilePath = path.resolve(file.path); | ||
var expectedFilePath = path.resolve('test/index.html'); | ||
newFilePath.should.equal(expectedFilePath); | ||
file.relative.should.equal('index.html'); | ||
Buffer.isBuffer(file.contents).should.equal(true); | ||
done(); | ||
}); | ||
stream.write(file); | ||
stream.end(); | ||
function mockFile(path, content) { | ||
content = content || 'test'; | ||
return new File({ | ||
cwd: '', | ||
base: 'test/', | ||
path: path, | ||
contents: new Buffer(content) | ||
}); | ||
} | ||
it('should rename non-indexes to path/basename/index.html', function(done) { | ||
var stream = ssg({}); | ||
var file = getMarkdownFile('test/hello.md', 'test'); | ||
describe('in buffer mode', function() { | ||
stream.on('end', function() { | ||
var newFilePath = path.resolve(file.path); | ||
var expectedFilePath = path.resolve('test/hello/index.html'); | ||
newFilePath.should.equal(expectedFilePath); | ||
file.relative.should.equal(path.normalize('hello/index.html')); | ||
Buffer.isBuffer(file.contents).should.equal(true); | ||
done(); | ||
}); | ||
it('should assign urls, truncating "index" (regardless of file extension)', function(done) { | ||
var stream = ssg(); | ||
var h = mockFile('test/index.md'); | ||
var p1 = mockFile('test/hello.xhtml'); | ||
var p2 = mockFile('test/foo/index.html'); | ||
var p2_1 = mockFile('test/foo/bar.xml'); | ||
stream.write(file); | ||
stream.end(); | ||
}); | ||
it('should assign booleans for isHome and isIndex', function(done) { | ||
var website = {}; | ||
var stream = ssg(website); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var page = getMarkdownFile('test/hello.md', 'page'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'index'); | ||
var sectionPage = getMarkdownFile('test/foo/bar.md', 'section page'); | ||
stream.on('end', function() { | ||
expect(home.data.isHome).to.be.true; | ||
expect(home.data.isIndex).to.be.true; | ||
expect(page.data.isHome).to.be.false; | ||
expect(page.data.isIndex).to.be.false; | ||
expect(sectionIndex.data.isHome).to.be.false; | ||
expect(sectionIndex.data.isIndex).to.be.true; | ||
expect(sectionPage.data.isHome).to.be.false; | ||
expect(sectionPage.data.isIndex).to.be.false; | ||
expect(h.data.url).to.equal('/'); | ||
expect(p1.data.url).to.equal('/hello.xhtml'); | ||
expect(p2.data.url).to.equal('/foo/'); | ||
expect(p2_1.data.url).to.equal('/foo/bar.xml'); | ||
done(); | ||
}); | ||
stream.write(home); | ||
stream.write(page); | ||
stream.write(sectionIndex); | ||
stream.write(sectionPage); | ||
stream.write(h); | ||
stream.write(p1); | ||
stream.write(p2); | ||
stream.write(p2_1); | ||
stream.end(); | ||
}); | ||
it('should assign a name unique within the section', function(done) { | ||
var website = {}; | ||
var stream = ssg(website); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var page = getMarkdownFile('test/hello.md', 'page'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'index'); | ||
var sectionPage = getMarkdownFile('test/foo/bar.md', 'section page'); | ||
it('should give each a file a pointer to the root', function(done) { | ||
var stream = ssg(); | ||
var h = mockFile('test/index.html'); | ||
var p1 = mockFile('test/hello.html'); | ||
var p2 = mockFile('test/foo/index.html'); | ||
var p2_1 = mockFile('test/foo/bar.html'); | ||
stream.on('end', function() { | ||
expect(home.data.name).to.equal('index'); | ||
expect(page.data.name).to.equal('hello'); | ||
expect(sectionIndex.data.name).to.equal('index'); | ||
expect(sectionPage.data.name).to.equal('bar'); | ||
expect(h.data.root.data.url).to.equal('/'); | ||
expect(p1.data.root.data.url).to.equal('/'); | ||
expect(p2.data.root.data.url).to.equal('/'); | ||
expect(p2_1.data.root.data.url).to.equal('/'); | ||
done(); | ||
}); | ||
stream.write(home); | ||
stream.write(page); | ||
stream.write(sectionIndex); | ||
stream.write(sectionPage); | ||
stream.write(h); | ||
stream.write(p1); | ||
stream.write(p2); | ||
stream.write(p2_1); | ||
stream.end(); | ||
}); | ||
it('should not override properties assigned to the site', function(done) { | ||
var website = { title: 'My Site' }; | ||
var stream = ssg(website); | ||
var file1 = getMarkdownFile('test/index.md', 'home'); | ||
it('should give each a file a pointer to their parent', function(done) { | ||
var stream = ssg(); | ||
var h = mockFile('test/index.html'); | ||
var p1 = mockFile('test/hello.html'); | ||
var p2 = mockFile('test/foo/index.html'); | ||
var p2_1 = mockFile('test/foo/bar.html'); | ||
stream.on('end', function() { | ||
expect(website.title).to.equal('My Site'); | ||
expect(h.data.parent).to.equal(null); | ||
expect(p1.data.parent.data.url).to.equal('/'); | ||
expect(p2.data.parent.data.url).to.equal('/'); | ||
expect(p2_1.data.parent.data.url).to.equal('/foo/'); | ||
done(); | ||
}); | ||
stream.write(file1); | ||
stream.write(h); | ||
stream.write(p1); | ||
stream.write(p2); | ||
stream.write(p2_1); | ||
stream.end(); | ||
}); | ||
it('should assign urls', function(done) { | ||
var website = {}; | ||
var stream = ssg(website); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var page = getMarkdownFile('test/hello.md', 'page'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var sectionPage = getMarkdownFile('test/foo/bar.md', 'section page'); | ||
it('should give each a file a pointer to their children', function(done) { | ||
var stream = ssg(); | ||
var h = mockFile('test/index.html'); | ||
var p1 = mockFile('test/hello.html'); | ||
var p2 = mockFile('test/foo/index.html'); | ||
var p2_1 = mockFile('test/foo/bar.html'); | ||
stream.on('end', function() { | ||
expect(home.data.url).to.equal('/'); | ||
expect(page.data.url).to.equal('/hello/'); | ||
expect(sectionIndex.data.url).to.equal('/foo/'); | ||
expect(sectionPage.data.url).to.equal('/foo/bar/'); | ||
expect(h.data.children[0].data.url).to.equal('/foo/'); | ||
expect(h.data.children[1].data.url).to.equal('/hello.html'); | ||
expect(p1.data.children.length).to.equal(0); | ||
expect(p2.data.children[0].data.url).to.equal('/foo/bar.html'); | ||
expect(p2_1.data.children.length).to.equal(0); | ||
done(); | ||
}); | ||
stream.write(home); | ||
stream.write(page); | ||
stream.write(sectionIndex); | ||
stream.write(sectionPage); | ||
stream.write(h); | ||
stream.write(p1); | ||
stream.write(p2); | ||
stream.write(p2_1); | ||
stream.end(); | ||
}); | ||
it('should assign section urls', function(done) { | ||
var website = {}; | ||
var stream = ssg(website); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var page = getMarkdownFile('test/hello.md', 'page'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var sectionPage = getMarkdownFile('test/foo/bar.md', 'sectionPage'); | ||
it('should give each a file a pointer to their siblings', function(done) { | ||
stream.on('end', function() { | ||
expect(home.data.sectionUrl).to.equal('/'); | ||
expect(page.data.sectionUrl).to.equal('/'); | ||
expect(sectionIndex.data.sectionUrl).to.equal('/foo/'); | ||
expect(sectionPage.data.sectionUrl).to.equal('/foo/'); | ||
done(); | ||
}); | ||
var stream = ssg(); | ||
var h = mockFile('test/index.html'); | ||
var p1 = mockFile('test/hello.html'); | ||
var p2 = mockFile('test/foo/index.html'); | ||
var p2_1 = mockFile('test/foo/bar.html'); | ||
stream.write(home); | ||
stream.write(page); | ||
stream.write(sectionIndex); | ||
stream.write(sectionPage); | ||
stream.end(); | ||
}); | ||
it('should use the specified base url', function(done) { | ||
var website = {}; | ||
var options = { | ||
baseUrl: '/path/to/site' | ||
}; | ||
var stream = ssg(website, options); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var page = getMarkdownFile('test/hello.md', 'page'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var sectionPage = getMarkdownFile('test/foo/bar.md', 'section page'); | ||
stream.on('end', function() { | ||
expect(home.data.url).to.equal('/path/to/site/'); | ||
expect(page.data.url).to.equal('/path/to/site/hello/'); | ||
expect(sectionIndex.data.url).to.equal('/path/to/site/foo/'); | ||
expect(sectionPage.data.url).to.equal('/path/to/site/foo/bar/'); | ||
expect(h.data.siblings.length).to.equal(0); | ||
expect(p1.data.siblings[0].data.url).to.equal('/foo/'); | ||
expect(p1.data.siblings[1].data.url).to.equal('/hello.html'); | ||
expect(p2.data.siblings[0].data.url).to.equal('/foo/'); | ||
expect(p2.data.siblings[1].data.url).to.equal('/hello.html'); | ||
expect(p2_1.data.siblings.length).to.equal(1); | ||
// Siblings includes self, so will always be one | ||
expect(p2_1.data.siblings[0].data.url).to.equal(p2_1.data.url); | ||
done(); | ||
}); | ||
stream.write(home); | ||
stream.write(page); | ||
stream.write(sectionIndex); | ||
stream.write(sectionPage); | ||
stream.write(h); | ||
stream.write(p1); | ||
stream.write(p2); | ||
stream.write(p2_1); | ||
stream.end(); | ||
}); | ||
it('should remove a trailing slash from the specified base url', function(done) { | ||
var website = {}; | ||
var options = { | ||
baseUrl: '/path/to/site/' | ||
}; | ||
var stream = ssg(website, options); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var page = getMarkdownFile('test/hello.md', 'page'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var sectionPage = getMarkdownFile('test/foo/bar.md', 'section page'); | ||
it('should handle deeply nested trees', function(done) { | ||
stream.on('end', function() { | ||
expect(home.data.url).to.equal('/path/to/site/'); | ||
expect(page.data.url).to.equal('/path/to/site/hello/'); | ||
expect(sectionIndex.data.url).to.equal('/path/to/site/foo/'); | ||
expect(sectionPage.data.url).to.equal('/path/to/site/foo/bar/'); | ||
done(); | ||
}); | ||
var stream = ssg(); | ||
// Files named like level[n]page[n] | ||
var h = mockFile('test/index.html'); | ||
var p1 = mockFile('test/hello.html'); | ||
var p2 = mockFile('test/foo/index.html'); | ||
var p2_1 = mockFile('test/foo/bar.html'); | ||
var p2_2 = mockFile('test/foo/qux.html'); | ||
var p2_3 = mockFile('test/foo/fred/index.html'); | ||
var p2_3_1 = mockFile('test/foo/fred/foo/index.html'); | ||
var p2_3_2 = mockFile('test/foo/fred/bar.html'); | ||
stream.write(home); | ||
stream.write(page); | ||
stream.write(sectionIndex); | ||
stream.write(sectionPage); | ||
stream.end(); | ||
}); | ||
it('should generate an index tree of sections', function(done) { | ||
var website = {}; | ||
var stream = ssg(website); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var subsectionIndex = getMarkdownFile('test/foo/bar/index.md', 'sub-section page'); | ||
stream.on('end', function() { | ||
expect(website.map).to.not.be.undefined; | ||
expect(website.map.sections).to.not.be.undefined; | ||
expect(website.map.sections[0].name).to.equal('foo'); | ||
expect(website.map.sections[0].url).to.equal('/foo/'); | ||
expect(website.map.sections[0].sections[0].name).to.equal('bar'); | ||
expect(website.map.sections[0].sections[0].url).to.equal('/foo/bar/'); | ||
expect(website.map.sections[0].sections[0].sections).to.be.empty; | ||
done(); | ||
}); | ||
stream.write(home); | ||
stream.write(sectionIndex); | ||
stream.write(subsectionIndex); | ||
stream.end(); | ||
}); | ||
// Siblings | ||
expect(h.data.siblings.length).to.equal(0); | ||
it('should generate an index tree of sections with correct baseUrl', function(done) { | ||
var website = {}; | ||
var options = { | ||
baseUrl: '/path/to/site' | ||
}; | ||
var stream = ssg(website, options); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var subsectionIndex = getMarkdownFile('test/foo/bar/index.md', 'sub-section page'); | ||
expect(p1.data.siblings.map(function(f) { return f.data.url; })) | ||
.to.deep.equal(['/foo/', '/hello.html']); | ||
stream.on('end', function() { | ||
expect(website.map).to.not.be.undefined; | ||
expect(website.map.sections).to.not.be.undefined; | ||
expect(website.map.sections[0].name).to.equal('foo'); | ||
expect(website.map.sections[0].url).to.equal('/path/to/site/foo/'); | ||
expect(website.map.sections[0].sections[0].name).to.equal('bar'); | ||
expect(website.map.sections[0].sections[0].url).to.equal('/path/to/site/foo/bar/'); | ||
expect(website.map.sections[0].sections[0].sections).to.be.empty; | ||
done(); | ||
}); | ||
expect(p2.data.siblings.map(function(f) { return f.data.url; })) | ||
.to.deep.equal(['/foo/', '/hello.html']); | ||
stream.write(home); | ||
stream.write(sectionIndex); | ||
stream.write(subsectionIndex); | ||
stream.end(); | ||
}); | ||
expect(p2_1.data.siblings.map(function(f) { return f.data.url; })) | ||
.to.deep.equal(['/foo/bar.html', '/foo/fred/', '/foo/qux.html']); | ||
it('should allow overriding section name in tree', function(done) { | ||
var website = {}; | ||
var stream = ssg(website, { | ||
sectionProperties: ['sectionTitle'] | ||
}); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var subsectionIndex = getMarkdownFile('test/foo/bar/index.md', 'sub-section page'); | ||
expect(p2_2.data.siblings.map(function(f) { return f.data.url; })) | ||
.to.deep.equal(['/foo/bar.html', '/foo/fred/', '/foo/qux.html']); | ||
sectionIndex.data = { sectionTitle: 'This is foo' }; | ||
subsectionIndex.data = { sectionTitle: 'This is bar' }; | ||
expect(p2_3.data.siblings.map(function(f) { return f.data.url; })) | ||
.to.deep.equal(['/foo/bar.html', '/foo/fred/', '/foo/qux.html']); | ||
stream.on('end', function() { | ||
expect(website.map).to.not.be.undefined; | ||
expect(website.map.sections).to.not.be.undefined; | ||
expect(website.map.sections[0].name).to.equal('foo'); | ||
expect(website.map.sections[0].sectionTitle).to.equal('This is foo'); | ||
expect(website.map.sections[0].url).to.equal('/foo/'); | ||
expect(website.map.sections[0].sections[0].name).to.equal('bar'); | ||
expect(website.map.sections[0].sections[0].sectionTitle).to.equal('This is bar'); | ||
expect(website.map.sections[0].sections[0].url).to.equal('/foo/bar/'); | ||
expect(website.map.sections[0].sections[0].sections).to.be.empty; | ||
done(); | ||
}); | ||
// Children | ||
expect(h.data.children.map(function(f) { return f.data.url; })) | ||
.to.deep.equal(['/foo/', '/hello.html']); | ||
stream.write(home); | ||
stream.write(sectionIndex); | ||
stream.write(subsectionIndex); | ||
stream.end(); | ||
}); | ||
expect(p2.data.children.map(function(f) { return f.data.url; })) | ||
.to.deep.equal(['/foo/bar.html', '/foo/fred/', '/foo/qux.html']); | ||
it('should add files to the section tree', function(done) { | ||
var website = {}; | ||
var stream = ssg(website); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var page1 = getMarkdownFile('test/hello.md', 'page'); | ||
var page2 = getMarkdownFile('test/goodbye.md', 'page'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var sectionPage1 = getMarkdownFile('test/foo/page1.md', 'section page'); | ||
var sectionPage2 = getMarkdownFile('test/foo/page2.md', 'section page'); | ||
var sectionPage3 = getMarkdownFile('test/foo/page3.md', 'section page'); | ||
var subsectionIndex = getMarkdownFile('test/foo/bar/index.md', 'subsection index'); | ||
var subsectionPage1 = getMarkdownFile('test/foo/bar/page1.md', 'subsection page'); | ||
var subsectionPage2 = getMarkdownFile('test/foo/bar/page2.md', 'subsection page'); | ||
expect(p2_3.data.children.map(function(f) { return f.data.url; })) | ||
.to.deep.equal(['/foo/fred/bar.html', '/foo/fred/foo/']); | ||
stream.on('end', function() { | ||
expect(website.map).to.not.be.undefined; | ||
expect(website.map.files.length).to.equal(3); | ||
expect(website.map.sections[0].files.length).to.equal(4); | ||
expect(website.map.sections[0].sections[0].files.length).to.equal(3); | ||
done(); | ||
}); | ||
stream.write(home); | ||
stream.write(page1); | ||
stream.write(page2); | ||
stream.write(sectionIndex); | ||
stream.write(sectionPage1); | ||
stream.write(sectionPage2); | ||
stream.write(sectionPage3); | ||
stream.write(subsectionIndex); | ||
stream.write(subsectionPage1); | ||
stream.write(subsectionPage2); | ||
stream.write(h); | ||
stream.write(p1); | ||
stream.write(p2); | ||
stream.write(p2_1); | ||
stream.write(p2_2); | ||
stream.write(p2_3); | ||
stream.write(p2_3_1); | ||
stream.write(p2_3_2); | ||
stream.end(); | ||
}); | ||
it('should break if you have no index in a directory', function(done) { | ||
// ideally the inverse of this should pass, but it's difficult | ||
var website = {}; | ||
var stream = ssg(website); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var page1 = getMarkdownFile('test/hello.md', 'page'); | ||
var page2 = getMarkdownFile('test/goodbye.md', 'page'); | ||
var sectionPage1 = getMarkdownFile('test/foo/page1.md', 'section page'); | ||
var sectionPage2 = getMarkdownFile('test/foo/page2.md', 'section page'); | ||
var sectionPage3 = getMarkdownFile('test/foo/page3.md', 'section page'); | ||
var subsectionPage1 = getMarkdownFile('test/foo/bar/page1.md', 'subsection page'); | ||
var subsectionPage2 = getMarkdownFile('test/foo/bar/page2.md', 'subsection page'); | ||
it('should use the specified base url', function(done) { | ||
var options = { | ||
baseUrl: '/path/to/site' | ||
}; | ||
var stream = ssg(options); | ||
var h = mockFile('test/index.html'); | ||
var p1 = mockFile('test/hello.html'); | ||
var p2 = mockFile('test/foo/index.html'); | ||
var p2_1 = mockFile('test/foo/bar.html'); | ||
stream.on('end', function() { | ||
expect(website.map).to.not.be.undefined; | ||
expect(website.map.files.length).to.equal(3); | ||
expect(typeof website.map.sections[0]).to.equal('undefined'); | ||
expect(h.data.url).to.equal('/path/to/site/'); | ||
expect(p1.data.url).to.equal('/path/to/site/hello.html'); | ||
expect(p2.data.url).to.equal('/path/to/site/foo/'); | ||
expect(p2_1.data.url).to.equal('/path/to/site/foo/bar.html'); | ||
done(); | ||
}); | ||
stream.write(home); | ||
stream.write(page1); | ||
stream.write(page2); | ||
stream.write(sectionPage1); | ||
stream.write(sectionPage2); | ||
stream.write(sectionPage3); | ||
stream.write(subsectionPage1); | ||
stream.write(subsectionPage2); | ||
stream.write(h); | ||
stream.write(p1); | ||
stream.write(p2); | ||
stream.write(p2_1); | ||
stream.end(); | ||
}); | ||
it('should give each file a section reference', function(done) { | ||
var website = {}; | ||
var stream = ssg(website); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var page1 = getMarkdownFile('test/hello.md', 'page'); | ||
var page2 = getMarkdownFile('test/goodbye.md', 'page'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var sectionPage1 = getMarkdownFile('test/foo/page1.md', 'section page'); | ||
var sectionPage2 = getMarkdownFile('test/foo/page2.md', 'section page'); | ||
var sectionPage3 = getMarkdownFile('test/foo/page3.md', 'section page'); | ||
var subsectionIndex = getMarkdownFile('test/foo/bar/index.md', 'subsection index'); | ||
var subsectionPage1 = getMarkdownFile('test/foo/bar/page2.md', 'subsection page'); | ||
var subsectionPage2 = getMarkdownFile('test/foo/bar/page3.md', 'subsection page'); | ||
it('should remove a trailing slash from the specified base url', function(done) { | ||
var options = { | ||
baseUrl: '/path/to/site/' | ||
}; | ||
var stream = ssg(options); | ||
var h = mockFile('test/index.html'); | ||
var p1 = mockFile('test/hello.html'); | ||
var p2 = mockFile('test/foo/index.html'); | ||
var p2_1 = mockFile('test/foo/bar.html'); | ||
stream.on('end', function() { | ||
expect(home.data.section).to.not.be.undefined; | ||
expect(home.data.section.name).to.equal('root'); | ||
expect(page1.data.section.name).to.equal('root'); | ||
expect(page2.data.section.name).to.equal('root'); | ||
expect(home.data.section.files).to.not.be.undefined; | ||
expect(page1.data.section.files).to.not.be.undefined; | ||
expect(page2.data.section.files).to.not.be.undefined; | ||
expect(sectionIndex.data.section.name).to.equal('foo'); | ||
expect(sectionPage1.data.section.name).to.equal('foo'); | ||
expect(sectionPage2.data.section.name).to.equal('foo'); | ||
expect(sectionIndex.data.section.files).to.not.be.undefined; | ||
expect(sectionPage1.data.section.files).to.not.be.undefined; | ||
expect(sectionPage2.data.section.files).to.not.be.undefined; | ||
expect(subsectionIndex.data.section.name).to.equal('bar'); | ||
expect(subsectionPage1.data.section.name).to.equal('bar'); | ||
expect(subsectionPage2.data.section.name).to.equal('bar'); | ||
expect(subsectionIndex.data.section.files).to.not.be.undefined; | ||
expect(subsectionPage1.data.section.files).to.not.be.undefined; | ||
expect(subsectionPage2.data.section.files).to.not.be.undefined; | ||
expect(h.data.url).to.equal('/path/to/site/'); | ||
expect(p1.data.url).to.equal('/path/to/site/hello.html'); | ||
expect(p2.data.url).to.equal('/path/to/site/foo/'); | ||
expect(p2_1.data.url).to.equal('/path/to/site/foo/bar.html'); | ||
done(); | ||
}); | ||
stream.write(home); | ||
stream.write(page1); | ||
stream.write(page2); | ||
stream.write(sectionIndex); | ||
stream.write(sectionPage1); | ||
stream.write(sectionPage2); | ||
stream.write(sectionPage3); | ||
stream.write(subsectionIndex); | ||
stream.write(subsectionPage1); | ||
stream.write(subsectionPage2); | ||
stream.write(h); | ||
stream.write(p1); | ||
stream.write(p2); | ||
stream.write(p2_1); | ||
stream.end(); | ||
}); | ||
it('should default to sort by url', function(done) { | ||
var website = {}; | ||
var stream = ssg(website); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var page1 = getMarkdownFile('test/xyz.md', 'page'); | ||
var page2 = getMarkdownFile('test/abc.md', 'page'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var sectionPage1 = getMarkdownFile('test/foo/10-hello.md', 'section page'); | ||
var sectionPage2 = getMarkdownFile('test/foo/05-goodbye.md', 'section page'); | ||
it('should sort by url by default', function(done) { | ||
var stream = ssg(); | ||
var h = mockFile('test/index.html'); | ||
var p1 = mockFile('test/xyz.html'); | ||
var p2 = mockFile('test/abc.html'); | ||
var p3 = mockFile('test/foo/index.html'); | ||
var p3_1 = mockFile('test/foo/10-hello.html', 'child page'); | ||
var p3_2 = mockFile('test/foo/05-goodbye.html', 'child page'); | ||
stream.on('end', function() { | ||
var urls = website.map.files.map(function(file) { | ||
var urls = h.data.children.map(function(file) { | ||
return file.data.url; | ||
}); | ||
expect(urls).to.deep.equal([ | ||
'/', | ||
'/abc/', | ||
'/xyz/' | ||
'/abc.html', | ||
'/foo/', | ||
'/xyz.html' | ||
]); | ||
var sectionUrls = website.map.sections[0].files.map(function(file) { | ||
var childUrls = p3.data.children.map(function(file) { | ||
return file.data.url; | ||
}); | ||
expect(sectionUrls).to.deep.equal([ | ||
'/foo/', | ||
'/foo/05-goodbye/', | ||
'/foo/10-hello/' | ||
expect(childUrls).to.deep.equal([ | ||
'/foo/05-goodbye.html', | ||
'/foo/10-hello.html' | ||
]); | ||
@@ -452,48 +280,47 @@ done(); | ||
stream.write(home); | ||
stream.write(page1); | ||
stream.write(page2); | ||
stream.write(sectionIndex); | ||
stream.write(sectionPage1); | ||
stream.write(sectionPage2); | ||
stream.write(h); | ||
stream.write(p1); | ||
stream.write(p2); | ||
stream.write(p3); | ||
stream.write(p3_1); | ||
stream.write(p3_2); | ||
stream.end(); | ||
}); | ||
it('should be possible to sort pages by assigned property', function(done) { | ||
var website = {}; | ||
it('should sort pages by options.sort', function(done) { | ||
var options = { | ||
sort: 'order' | ||
}; | ||
var stream = ssg(website, options); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var page1 = getMarkdownFile('test/xyz.md', 'page'); | ||
var page2 = getMarkdownFile('test/abc.md', 'page'); | ||
var page3 = getMarkdownFile('test/def.md', 'page'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var sectionPage1 = getMarkdownFile('test/foo/10-hello.md', 'section page'); | ||
var sectionPage2 = getMarkdownFile('test/foo/05-goodbye.md', 'section page'); | ||
var stream = ssg(options); | ||
var h = mockFile('test/index.html'); | ||
var p1 = mockFile('test/xyz.html'); | ||
var p2 = mockFile('test/abc.html'); | ||
var p3 = mockFile('test/def.html'); | ||
var p4 = mockFile('test/foo/index.html'); | ||
var p4_1 = mockFile('test/foo/10-hello.html'); | ||
var p4_2 = mockFile('test/foo/05-goodbye.html'); | ||
page1.data = { order: 1 }; | ||
page2.data = { order: 12 }; | ||
page3.data = { order: 6 }; | ||
sectionPage1.data = { order: 1 }; | ||
sectionPage2.data = { order: 2 }; | ||
p1.data = { order: 1 }; | ||
p2.data = { order: 12 }; | ||
p3.data = { order: 6 }; | ||
p4.data = { order: 2 }; | ||
p4_1.data = { order: 1 }; | ||
p4_2.data = { order: 2 }; | ||
stream.on('end', function() { | ||
var urls = website.map.files.map(function(file) { | ||
var urls = h.data.children.map(function(file) { | ||
return file.data.url; | ||
}); | ||
expect(urls).to.deep.equal([ | ||
'/', | ||
'/xyz/', | ||
'/def/', | ||
'/abc/' | ||
'/xyz.html', | ||
'/foo/', | ||
'/def.html', | ||
'/abc.html' | ||
]); | ||
var sectionUrls = website.map.sections[0].files.map(function(file) { | ||
var childUrls = p4.data.children.map(function(file) { | ||
return file.data.url; | ||
}); | ||
expect(sectionUrls).to.deep.equal([ | ||
'/foo/', | ||
'/foo/10-hello/', | ||
'/foo/05-goodbye/' | ||
expect(childUrls).to.deep.equal([ | ||
'/foo/10-hello.html', | ||
'/foo/05-goodbye.html' | ||
]); | ||
@@ -503,123 +330,37 @@ done(); | ||
stream.write(home); | ||
stream.write(page2); | ||
stream.write(page1); | ||
stream.write(sectionPage1); | ||
stream.write(page3); | ||
stream.write(sectionIndex); | ||
stream.write(sectionPage2); | ||
stream.write(h); | ||
stream.write(p2); | ||
stream.write(p1); | ||
stream.write(p4); | ||
stream.write(p3); | ||
stream.write(p4_2); | ||
stream.write(p4_1); | ||
stream.end(); | ||
}); | ||
it('should be possible to sort indexes in section (but indexes always come first in their own section)', function(done) { | ||
var website = {}; | ||
var options = { | ||
sort: 'order' | ||
}; | ||
var stream = ssg(website, options); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var section1Index = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var section1Page1 = getMarkdownFile('test/foo/10-hello.md', 'section page'); | ||
var section1Page2 = getMarkdownFile('test/foo/05-goodbye.md', 'section page'); | ||
var section2Index = getMarkdownFile('test/bar/index.md', 'section index'); | ||
var section2Page1 = getMarkdownFile('test/bar/10-hello.md', 'section page'); | ||
var section2Page2 = getMarkdownFile('test/bar/05-goodbye.md', 'section page'); | ||
var section3Index = getMarkdownFile('test/xyz/index.md', 'section index'); | ||
var section3Page1 = getMarkdownFile('test/xyz/10-hello.md', 'section page'); | ||
var section3Page2 = getMarkdownFile('test/xyz/05-goodbye.md', 'section page'); | ||
it('should give not break if there is no root file', function(done) { | ||
section1Index.data = { order: 5 }; | ||
section1Page1.data = { order: 1 }; | ||
section1Page2.data = { order: 2 }; | ||
var stream = ssg(); | ||
var p1 = mockFile('test/hello.html'); | ||
var p2 = mockFile('test/foo/index.html'); | ||
var p2_1 = mockFile('test/foo/bar.html'); | ||
section2Index.data = { order: 3 }; | ||
section2Page1.data = { order: 1 }; | ||
section2Page2.data = { order: 2 }; | ||
section3Index.data = { order: 1 }; | ||
section3Page1.data = { order: 1 }; | ||
section3Page2.data = { order: 2 }; | ||
stream.on('end', function() { | ||
var sectionUrls = website.map.sections.map(function(section) { | ||
return section.url; | ||
}); | ||
expect(sectionUrls).to.deep.equal([ | ||
'/xyz/', | ||
'/bar/', | ||
'/foo/' | ||
]); | ||
var section1Urls = website.map.sections[0].files.map(function(file) { | ||
return file.data.url; | ||
}); | ||
expect(section1Urls).to.deep.equal([ | ||
'/xyz/', | ||
'/xyz/10-hello/', | ||
'/xyz/05-goodbye/' | ||
]); | ||
var section2Urls = website.map.sections[1].files.map(function(file) { | ||
return file.data.url; | ||
}); | ||
expect(section2Urls).to.deep.equal([ | ||
'/bar/', | ||
'/bar/10-hello/', | ||
'/bar/05-goodbye/' | ||
]); | ||
var section3Urls = website.map.sections[2].files.map(function(file) { | ||
return file.data.url; | ||
}); | ||
expect(section3Urls).to.deep.equal([ | ||
'/foo/', | ||
'/foo/10-hello/', | ||
'/foo/05-goodbye/' | ||
]); | ||
expect(p1.data.root).to.equal(null); | ||
expect(p1.data.siblings[0].data.url).to.equal('/foo/'); | ||
expect(p1.data.siblings[1].data.url).to.equal('/hello.html'); | ||
expect(p2.data.siblings[0].data.url).to.equal('/foo/'); | ||
expect(p2.data.siblings[1].data.url).to.equal('/hello.html'); | ||
expect(p2_1.data.siblings.length).to.equal(1); | ||
done(); | ||
}); | ||
stream.write(home); | ||
stream.write(section1Index); | ||
stream.write(section1Page1); | ||
stream.write(section1Page2); | ||
stream.write(section2Index); | ||
stream.write(section2Page1); | ||
stream.write(section2Page2); | ||
stream.write(section3Index); | ||
stream.write(section3Page1); | ||
stream.write(section3Page2); | ||
stream.write(p1); | ||
stream.write(p2); | ||
stream.write(p2_1); | ||
stream.end(); | ||
}); | ||
it('should emit file data after the full index is created', function(done) { | ||
var website = {}; | ||
var stream = ssg(website); | ||
var home = getMarkdownFile('test/index.md', 'home'); | ||
var sectionIndex = getMarkdownFile('test/foo/index.md', 'section index'); | ||
var subsectionIndex = getMarkdownFile('test/foo/bar/index.md', 'sub-section page'); | ||
var testCount = 0; | ||
stream.on('data', function() { | ||
expect(website.map).to.not.be.undefined; | ||
expect(website.map.sections).to.not.be.undefined; | ||
expect(website.map.sections[0].name).to.equal('foo'); | ||
expect(website.map.sections[0].url).to.equal('/foo/'); | ||
expect(website.map.sections[0].sections[0].name).to.equal('bar'); | ||
expect(website.map.sections[0].sections[0].url).to.equal('/foo/bar/'); | ||
expect(website.map.sections[0].sections[0].sections).to.be.empty; | ||
if (testCount++ === 2) { | ||
done(); | ||
} | ||
}); | ||
stream.write(home); | ||
stream.write(sectionIndex); | ||
stream.write(subsectionIndex); | ||
stream.end(); | ||
}); | ||
}); | ||
}); |
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
22659
6
449
111
1
+ Addedlodash@3.5.0(transitive)
- Removedlodash@2.4.2(transitive)
Updatedlodash@~3.5.0