commit
fc7ae5ce0c
@ -0,0 +1,121 @@
|
|||||||
|
# ---> Node
|
||||||
|
# Logs
|
||||||
|
logs
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
lerna-debug.log*
|
||||||
|
.pnpm-debug.log*
|
||||||
|
|
||||||
|
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||||
|
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||||
|
|
||||||
|
# Runtime data
|
||||||
|
pids
|
||||||
|
*.pid
|
||||||
|
*.seed
|
||||||
|
*.pid.lock
|
||||||
|
|
||||||
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
|
lib-cov
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
coverage
|
||||||
|
*.lcov
|
||||||
|
|
||||||
|
# nyc test coverage
|
||||||
|
.nyc_output
|
||||||
|
|
||||||
|
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
|
.grunt
|
||||||
|
|
||||||
|
# Bower dependency directory (https://bower.io/)
|
||||||
|
bower_components
|
||||||
|
|
||||||
|
# node-waf configuration
|
||||||
|
.lock-wscript
|
||||||
|
|
||||||
|
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||||
|
build/Release
|
||||||
|
|
||||||
|
# Dependency directories
|
||||||
|
node_modules/
|
||||||
|
jspm_packages/
|
||||||
|
|
||||||
|
# Snowpack dependency directory (https://snowpack.dev/)
|
||||||
|
web_modules/
|
||||||
|
|
||||||
|
# TypeScript cache
|
||||||
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# Optional npm cache directory
|
||||||
|
.npm
|
||||||
|
|
||||||
|
# Optional eslint cache
|
||||||
|
.eslintcache
|
||||||
|
|
||||||
|
# Microbundle cache
|
||||||
|
.rpt2_cache/
|
||||||
|
.rts2_cache_cjs/
|
||||||
|
.rts2_cache_es/
|
||||||
|
.rts2_cache_umd/
|
||||||
|
|
||||||
|
# Optional REPL history
|
||||||
|
.node_repl_history
|
||||||
|
|
||||||
|
# Output of 'npm pack'
|
||||||
|
*.tgz
|
||||||
|
|
||||||
|
# Yarn Integrity file
|
||||||
|
.yarn-integrity
|
||||||
|
|
||||||
|
# dotenv environment variables file
|
||||||
|
.env
|
||||||
|
.env.test
|
||||||
|
.env.production
|
||||||
|
|
||||||
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
|
.cache
|
||||||
|
.parcel-cache
|
||||||
|
|
||||||
|
# Next.js build output
|
||||||
|
.next
|
||||||
|
out
|
||||||
|
|
||||||
|
# Nuxt.js build / generate output
|
||||||
|
.nuxt
|
||||||
|
dist
|
||||||
|
|
||||||
|
# Gatsby files
|
||||||
|
.cache/
|
||||||
|
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||||
|
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||||
|
# public
|
||||||
|
|
||||||
|
# vuepress build output
|
||||||
|
.vuepress/dist
|
||||||
|
|
||||||
|
# Serverless directories
|
||||||
|
.serverless/
|
||||||
|
|
||||||
|
# FuseBox cache
|
||||||
|
.fusebox/
|
||||||
|
|
||||||
|
# DynamoDB Local files
|
||||||
|
.dynamodb/
|
||||||
|
|
||||||
|
# TernJS port file
|
||||||
|
.tern-port
|
||||||
|
|
||||||
|
# Stores VSCode versions used for testing VSCode extensions
|
||||||
|
.vscode-test
|
||||||
|
|
||||||
|
# yarn v2
|
||||||
|
.yarn/cache
|
||||||
|
.yarn/unplugged
|
||||||
|
.yarn/build-state.yml
|
||||||
|
.yarn/install-state.gz
|
||||||
|
.pnp.*
|
||||||
|
|
||||||
|
public
|
@ -0,0 +1,229 @@
|
|||||||
|
# Site-O-Mat - Core
|
||||||
|
|
||||||
|
Generating a Website as Html-Files from a Markdown-File Structure.
|
||||||
|
|
||||||
|
Why? The Main reason i had to update some Websites, but realize there were no benefit
|
||||||
|
to use a Full CMS or Headless CMS like Directus. Rendering the same pages that a rarely updated
|
||||||
|
seems like a waste of energy. Why not generate from a hierarchical file structure. Luckily i
|
||||||
|
i had development a CMS, a few years ago, that runs on Markdown Files it had been never
|
||||||
|
finished, it was only a proof of concept. But now it works for created a entire Website.
|
||||||
|
|
||||||
|
## Roadmap
|
||||||
|
|
||||||
|
Next will be,
|
||||||
|
|
||||||
|
* Some Tests
|
||||||
|
* Standalone, handle Webpack only as wrapper
|
||||||
|
|
||||||
|
Maybe later,
|
||||||
|
|
||||||
|
* Integrate Eta.js and LiquidJS
|
||||||
|
* Hooks for handle generic content
|
||||||
|
|
||||||
|
## Additional Packages
|
||||||
|
|
||||||
|
[@site-o-mat/webpack-plugin](https://gitea.node001.net/site-o-mat/webpack-plugin) - Wrapper for Core to use as Webpack Plugin
|
||||||
|
[@site-o-mat/query](https://gitea.node001.net/site-o-mat/query) - Query for Filter, OrderBy and Reduce Data
|
||||||
|
[@site-o-mat/api](https://gitea.node001.net/site-o-mat/api) - Api for getting Data from JSON
|
||||||
|
[@site-o-mat/blog](https://gitea.node001.net/site-o-mat/blog) - Example for Blog
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
Setup this registry in your project .npmrc file:
|
||||||
|
|
||||||
|
```
|
||||||
|
@helpers:registry=https://gitea.node001.net/api/packages/site-o-mat/npm/
|
||||||
|
```
|
||||||
|
|
||||||
|
Install with npm or yarn
|
||||||
|
|
||||||
|
```
|
||||||
|
npm i --save-dev @site-o-mat/core
|
||||||
|
yarn add --dev @site-o-mat/core
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Basic Usage:
|
||||||
|
|
||||||
|
```
|
||||||
|
const Siteomat = require('@site-o-mat/core')
|
||||||
|
|
||||||
|
const siteomat = new Siteomat(<source>, <destinaction>, {
|
||||||
|
<options>
|
||||||
|
})
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
| Name | Type | Default | Description |
|
||||||
|
|-------------|-----------|---------|-------------|
|
||||||
|
| destination | {String} | null | If not set, it will use the public path |
|
||||||
|
| htmlMinify | {Boolean} | true | Minify Html and remove all Whitespace |
|
||||||
|
|
||||||
|
## Pages
|
||||||
|
|
||||||
|
Pages are Markdown-Files, they are separates in two parts. First part is a yaml-Section,
|
||||||
|
|
||||||
|
```
|
||||||
|
---
|
||||||
|
title: "health goth DIY tattooed"
|
||||||
|
view: "home.njk"
|
||||||
|
meta:
|
||||||
|
description: "La"
|
||||||
|
media:
|
||||||
|
teaser:
|
||||||
|
src: "_images/test.jpeg"
|
||||||
|
alt: "cold-pressed"
|
||||||
|
---
|
||||||
|
```
|
||||||
|
|
||||||
|
The yaml-Section will be parsed as an Object and available in the Templates. The
|
||||||
|
second part of the File will be parsed as Markdown, but it could be also empty.
|
||||||
|
|
||||||
|
Default type for Pages is **html**.
|
||||||
|
|
||||||
|
## Nesting
|
||||||
|
|
||||||
|
A Page can be a single Markdown-File, or a Directory with a index-File inside.
|
||||||
|
The Name of a file or a directory will the name of the html-File. To create Sub-pages,
|
||||||
|
create Sub-directories.
|
||||||
|
|
||||||
|
This Structure,
|
||||||
|
|
||||||
|
```
|
||||||
|
index.md
|
||||||
|
about-me.md
|
||||||
|
blog
|
||||||
|
└ index.md
|
||||||
|
belly-polaroid-subway.md
|
||||||
|
```
|
||||||
|
|
||||||
|
will be,
|
||||||
|
|
||||||
|
```
|
||||||
|
index.html
|
||||||
|
about-me.html
|
||||||
|
blog.html
|
||||||
|
blog/belly-polaroid-subway.html
|
||||||
|
```
|
||||||
|
|
||||||
|
## Blocks
|
||||||
|
|
||||||
|
Each Page can have Blocks. Blocks are like Pages, but they are only accessible
|
||||||
|
for a single Page. To add Blocks to a page, add a "_blocks"-Directory
|
||||||
|
to the Directory of the Page.
|
||||||
|
|
||||||
|
Markdown-Files in a "_blocks"-Directory will be automatic accessible for a Page. The yaml-Section is Optional.
|
||||||
|
|
||||||
|
```
|
||||||
|
recipes
|
||||||
|
└ index.md
|
||||||
|
_blocks
|
||||||
|
└ hero-1.md
|
||||||
|
hero-2.md
|
||||||
|
hero-3.md
|
||||||
|
```
|
||||||
|
|
||||||
|
Blocks will be Grouped by there name, and sorted by the number at the end. The "hero"-Files
|
||||||
|
can be used like this,
|
||||||
|
|
||||||
|
```
|
||||||
|
{% hero in page.blocks.hero %}
|
||||||
|
{{ hero.content }}
|
||||||
|
{% endFor %}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Queries
|
||||||
|
|
||||||
|
Queries can be used in Templates to get Pages.
|
||||||
|
|
||||||
|
### Pages
|
||||||
|
|
||||||
|
Basic Usage:
|
||||||
|
|
||||||
|
```
|
||||||
|
pageQuery.find()
|
||||||
|
```
|
||||||
|
|
||||||
|
or with options,
|
||||||
|
|
||||||
|
| Name | Type | Default | Description |
|
||||||
|
|-------------|-----------|---------|-------------|
|
||||||
|
| parent | {String} | / | Directory for start query |
|
||||||
|
| deep | {Integer} | -1 | Deep of Recursive |
|
||||||
|
| orderBy | {Array} | null | Name of field sorting, a "-" in front of the. Nested fields are also possible. |
|
||||||
|
| limit | {Integer} | null | Limit results |
|
||||||
|
| filter | {Object} | null | Filtering results by Fields in yaml |
|
||||||
|
|
||||||
|
#### Filter
|
||||||
|
|
||||||
|
Basic Usage:
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
<fieldname>: {
|
||||||
|
<operator>: <value>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
| Name | Description |
|
||||||
|
|-------------|-------------|
|
||||||
|
| _eq | Equal Value |
|
||||||
|
|
||||||
|
## Sitemap
|
||||||
|
|
||||||
|
Sitemap will be generating by Pages. Pages will be only add to Sitemap, if the have meta-robots is set
|
||||||
|
to "index". Pages default is "index".
|
||||||
|
|
||||||
|
## Templates
|
||||||
|
|
||||||
|
At this Time only [https://mozilla.github.io/nunjucks/](Nunjunks) is used for Templating.
|
||||||
|
|
||||||
|
### Nunjunks
|
||||||
|
|
||||||
|
#### Functions
|
||||||
|
|
||||||
|
##### asset(path)
|
||||||
|
|
||||||
|
This function handle manifest-File from [https://laravel-mix.com/](Laravel-Mix).
|
||||||
|
|
||||||
|
```
|
||||||
|
<script src="{{ asset('js/app.js') }}"></script>
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Filters
|
||||||
|
|
||||||
|
##### resize
|
||||||
|
|
||||||
|
The Filter is using [https://github.com/lovell/sharp](sharp), for crop, resize and
|
||||||
|
optimize Images. The Filter needs a relative Path in the File Structure.
|
||||||
|
|
||||||
|
Basic Usage:
|
||||||
|
|
||||||
|
```
|
||||||
|
{% page.teaser.src | resize({ 'width': '300' }) %}
|
||||||
|
```
|
||||||
|
|
||||||
|
Add options:
|
||||||
|
|
||||||
|
```
|
||||||
|
{% page.teaser.src | resize({ 'width': '300' }, { sigma: 2 }) %}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Json
|
||||||
|
|
||||||
|
Results from PageQuery can also be created as json-File. The can use with a
|
||||||
|
simple API [https://gitea.node001.net/site-o-mat/api](https://gitea.node001.net/site-o-mat/api). Create a
|
||||||
|
File "json.yml" and add options.
|
||||||
|
|
||||||
|
Basic Usage:
|
||||||
|
|
||||||
|
```
|
||||||
|
posts:
|
||||||
|
orderBy:
|
||||||
|
- '-date_published'
|
||||||
|
filter:
|
||||||
|
view:
|
||||||
|
_eq: 'post.njk'
|
||||||
|
```
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,36 @@
|
|||||||
|
{
|
||||||
|
"name": "@site-o-mat/webpack-plugin",
|
||||||
|
"version": "0.5.0",
|
||||||
|
"build": "webpack",
|
||||||
|
"author": "Björn Hase <me@herr-hase.wtf>",
|
||||||
|
"main": "index.js",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git@gitea.node001.net:site-o-mat/webpack-plugin.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha"
|
||||||
|
},
|
||||||
|
"description": "A Webpack Plugin for generating a Website as Html-Files from a Markdown File Structure",
|
||||||
|
"dependencies": {
|
||||||
|
"@site-o-mat/query": "^0.1.0",
|
||||||
|
"assign-deep": "^1.0.1",
|
||||||
|
"crypto": "^1.0.1",
|
||||||
|
"dayjs": "^1.11.6",
|
||||||
|
"fast-xml-parser": "^4.0.11",
|
||||||
|
"html-minifier": "^4.0.0",
|
||||||
|
"js-yaml": "^4.1.0",
|
||||||
|
"lodash.merge": "^4.6.2",
|
||||||
|
"lodash.orderby": "^4.6.0",
|
||||||
|
"marked": "^4.1.1",
|
||||||
|
"mkdirp": "^1.0.4",
|
||||||
|
"nunjucks": "^3.2.3",
|
||||||
|
"sharp": "^0.31.1",
|
||||||
|
"slugify": "^1.6.5",
|
||||||
|
"yaml": "^2.1.3"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"chai": "^4.3.7",
|
||||||
|
"mocha": "^10.2.0"
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,14 @@
|
|||||||
|
---
|
||||||
|
title: "health goth DIY tattooed"
|
||||||
|
---
|
||||||
|
## Normcore cold-pressed ramps DSA
|
||||||
|
|
||||||
|
Normcore cold-pressed ramps DSA yes plz hot chicken green juice succulents leggings messenger bag truffaut iceland pabst ethical godard. Semiotics air plant marfa, drinking vinegar authentic iceland pug fit cloud bread cronut kickstarter glossier crucifix tumeric. Chicharrones polaroid flexitarian, seitan lumbersexual viral fam master cleanse four dollar toast scenester. Succulents poutine vegan keffiyeh meh, health goth DIY tattooed. Praxis roof party celiac chartreuse banjo butcher you probably haven't heard of them schlitz beard. Ethical tattooed kinfolk, cliche vegan messenger bag mukbang dreamcatcher cloud bread farm-to-table gatekeep trust fund.
|
||||||
|
|
||||||
|
## Palo santo leggings normcore aesthetic
|
||||||
|
|
||||||
|
bicycle rights sartorial godard slow-carb thundercats art party cray JOMO. Truffaut four dollar toast hoodie pour-over. Fanny pack iPhone jean shorts tote bag, master cleanse succulents tbh fixie gatekeep pok pok letterpress cornhole. Dreamcatcher tattooed hot chicken gatekeep, glossier salvia 8-bit cred. Fit lomo chillwave cold-pressed humblebrag narwhal. Meggings edison bulb fanny pack irony af pug pok pok whatever vexillologist vibecession cred butcher trust fund chia.
|
||||||
|
|
||||||
|
## Bitters kale chips chambray activated charcoal
|
||||||
|
|
||||||
|
wolf keffiyeh hell of selfies. Wolf readymade shoreditch flexitarian venmo single-origin coffee, knausgaard fit actually street art cold-pressed iPhone gatekeep. Migas bruh adaptogen semiotics marfa pickled yuccie. Locavore normcore lomo, shoreditch fashion axe actually glossier iPhone photo booth blue bottle DIY XOXO williamsburg. Pinterest whatever taxidermy, kale chips prism XOXO schlitz twee tote bag woke swag. Wayfarers fashion axe heirloom humblebrag synth. Whatever succulents PBR&B, pop-up enamel pin echo park tonx stumptown taiyaki.
|
@ -0,0 +1,14 @@
|
|||||||
|
---
|
||||||
|
title: "health goth DIY tattooed"
|
||||||
|
---
|
||||||
|
## Normcore cold-pressed ramps DSA
|
||||||
|
|
||||||
|
Normcore cold-pressed ramps DSA yes plz hot chicken green juice succulents leggings messenger bag truffaut iceland pabst ethical godard. Semiotics air plant marfa, drinking vinegar authentic iceland pug fit cloud bread cronut kickstarter glossier crucifix tumeric. Chicharrones polaroid flexitarian, seitan lumbersexual viral fam master cleanse four dollar toast scenester. Succulents poutine vegan keffiyeh meh, health goth DIY tattooed. Praxis roof party celiac chartreuse banjo butcher you probably haven't heard of them schlitz beard. Ethical tattooed kinfolk, cliche vegan messenger bag mukbang dreamcatcher cloud bread farm-to-table gatekeep trust fund.
|
||||||
|
|
||||||
|
## Palo santo leggings normcore aesthetic
|
||||||
|
|
||||||
|
bicycle rights sartorial godard slow-carb thundercats art party cray JOMO. Truffaut four dollar toast hoodie pour-over. Fanny pack iPhone jean shorts tote bag, master cleanse succulents tbh fixie gatekeep pok pok letterpress cornhole. Dreamcatcher tattooed hot chicken gatekeep, glossier salvia 8-bit cred. Fit lomo chillwave cold-pressed humblebrag narwhal. Meggings edison bulb fanny pack irony af pug pok pok whatever vexillologist vibecession cred butcher trust fund chia.
|
||||||
|
|
||||||
|
## Bitters kale chips chambray activated charcoal
|
||||||
|
|
||||||
|
wolf keffiyeh hell of selfies. Wolf readymade shoreditch flexitarian venmo single-origin coffee, knausgaard fit actually street art cold-pressed iPhone gatekeep. Migas bruh adaptogen semiotics marfa pickled yuccie. Locavore normcore lomo, shoreditch fashion axe actually glossier iPhone photo booth blue bottle DIY XOXO williamsburg. Pinterest whatever taxidermy, kale chips prism XOXO schlitz twee tote bag woke swag. Wayfarers fashion axe heirloom humblebrag synth. Whatever succulents PBR&B, pop-up enamel pin echo park tonx stumptown taiyaki.
|
@ -0,0 +1,17 @@
|
|||||||
|
---
|
||||||
|
title: "health goth DIY tattooed"
|
||||||
|
view: "page.njk"
|
||||||
|
meta:
|
||||||
|
description: "DSA yes plz hot chicken green juice"
|
||||||
|
---
|
||||||
|
## Normcore cold-pressed ramps DSA
|
||||||
|
|
||||||
|
Normcore cold-pressed ramps DSA yes plz hot chicken green juice succulents leggings messenger bag truffaut iceland pabst ethical godard. Semiotics air plant marfa, drinking vinegar authentic iceland pug fit cloud bread cronut kickstarter glossier crucifix tumeric. Chicharrones polaroid flexitarian, seitan lumbersexual viral fam master cleanse four dollar toast scenester. Succulents poutine vegan keffiyeh meh, health goth DIY tattooed. Praxis roof party celiac chartreuse banjo butcher you probably haven't heard of them schlitz beard. Ethical tattooed kinfolk, cliche vegan messenger bag mukbang dreamcatcher cloud bread farm-to-table gatekeep trust fund.
|
||||||
|
|
||||||
|
## Palo santo leggings normcore aesthetic
|
||||||
|
|
||||||
|
bicycle rights sartorial godard slow-carb thundercats art party cray JOMO. Truffaut four dollar toast hoodie pour-over. Fanny pack iPhone jean shorts tote bag, master cleanse succulents tbh fixie gatekeep pok pok letterpress cornhole. Dreamcatcher tattooed hot chicken gatekeep, glossier salvia 8-bit cred. Fit lomo chillwave cold-pressed humblebrag narwhal. Meggings edison bulb fanny pack irony af pug pok pok whatever vexillologist vibecession cred butcher trust fund chia.
|
||||||
|
|
||||||
|
## Bitters kale chips chambray activated charcoal
|
||||||
|
|
||||||
|
wolf keffiyeh hell of selfies. Wolf readymade shoreditch flexitarian venmo single-origin coffee, knausgaard fit actually street art cold-pressed iPhone gatekeep. Migas bruh adaptogen semiotics marfa pickled yuccie. Locavore normcore lomo, shoreditch fashion axe actually glossier iPhone photo booth blue bottle DIY XOXO williamsburg. Pinterest whatever taxidermy, kale chips prism XOXO schlitz twee tote bag woke swag. Wayfarers fashion axe heirloom humblebrag synth. Whatever succulents PBR&B, pop-up enamel pin echo park tonx stumptown taiyaki.
|
@ -0,0 +1,52 @@
|
|||||||
|
/**
|
||||||
|
* ConfigStore
|
||||||
|
*
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @author Björn Hase <me@herr-hase.wtf>
|
||||||
|
* @license http://opensource.org/licenses/MIT The MIT License
|
||||||
|
* @link https://gitea.node001.net/HerrHase/siteomat-webpack-plugin.git
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
class ConfigStore {
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
if (!ConfigStore.instance) {
|
||||||
|
ConfigStore.instance = this;
|
||||||
|
this._data = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ConfigStore.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* set value by key
|
||||||
|
*
|
||||||
|
* @param {String} key
|
||||||
|
* @param {String|Object} value
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
set(key, value) {
|
||||||
|
this._data[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get value by key
|
||||||
|
*
|
||||||
|
* @param {String} key
|
||||||
|
* @return {String|Object}
|
||||||
|
*/
|
||||||
|
get(key) {
|
||||||
|
|
||||||
|
if (!this._data?.[key]) {
|
||||||
|
throw new Error(key + ' not found in ConfigStore!')
|
||||||
|
}
|
||||||
|
|
||||||
|
return this._data[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// create instance
|
||||||
|
const instance = new ConfigStore();
|
||||||
|
|
||||||
|
module.exports = instance
|
@ -0,0 +1,53 @@
|
|||||||
|
const path = require('path')
|
||||||
|
const fs = require('fs')
|
||||||
|
|
||||||
|
const Media = require('./../factories/media.js')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* asset - checks manifest.json for given path and return
|
||||||
|
* file path with id for cache busting
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @param {String} publicPath
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
function asset(staticPath) {
|
||||||
|
|
||||||
|
// getting basePath
|
||||||
|
let result = staticPath
|
||||||
|
|
||||||
|
// path to mix-manifest
|
||||||
|
const file = path.join(path.resolve()) + 'mix-manifest.json'
|
||||||
|
|
||||||
|
if (fs.existsSync(file)) {
|
||||||
|
|
||||||
|
const manifest = fs.readFileSync(file)
|
||||||
|
const files = JSON.parse(manifest)
|
||||||
|
|
||||||
|
if (files[staticPath]) {
|
||||||
|
result = files[staticPath]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* asset - checks manifest.json for given path and return
|
||||||
|
* file path with id for cache busting
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @param {String} publicPath
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
async function resize(src, sizes, options, done)
|
||||||
|
{
|
||||||
|
const media = new Media()
|
||||||
|
|
||||||
|
src = await media.resize(src, sizes, options)
|
||||||
|
done(null, src)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { asset, resize }
|
@ -0,0 +1,99 @@
|
|||||||
|
const path = require('path')
|
||||||
|
const fs = require('fs')
|
||||||
|
const sharp = require('sharp')
|
||||||
|
const mkdirp = require('mkdirp')
|
||||||
|
const crypto = require('crypto')
|
||||||
|
const slugify = require('slugify')
|
||||||
|
|
||||||
|
const configStore = require('./../config.js')
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
class Media {
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this._DIR_ASSETS = '/assets/'
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {string} src
|
||||||
|
* @param {object} sizes
|
||||||
|
* @param {Object} [options={}]
|
||||||
|
* @return {string}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
async resize(src, sizes, options = {}) {
|
||||||
|
|
||||||
|
this._extension = path.extname(src)
|
||||||
|
this._filename = slugify(path.basename(src, this._extension))
|
||||||
|
|
||||||
|
this._process = await sharp(configStore.get('source') + '/' + src)
|
||||||
|
|
||||||
|
// resize without options and with options
|
||||||
|
if (Object.getOwnPropertyNames(options).length === 0) {
|
||||||
|
await this._process
|
||||||
|
.resize(sizes)
|
||||||
|
} else {
|
||||||
|
this._process
|
||||||
|
.resize(sizes, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
// optimize
|
||||||
|
this._optimize()
|
||||||
|
|
||||||
|
const fileBuffer = await this._process
|
||||||
|
.toBuffer()
|
||||||
|
|
||||||
|
const relativeDestinationPath = this._DIR_ASSETS + this._resolveRelativeDestinationPath(fileBuffer)
|
||||||
|
|
||||||
|
// create directories and write file
|
||||||
|
mkdirp.sync(configStore.get('destination') + relativeDestinationPath)
|
||||||
|
fs.writeFileSync(configStore.get('destination') + relativeDestinationPath + '/' + this._filename + this._extension, fileBuffer)
|
||||||
|
|
||||||
|
return relativeDestinationPath + '/' + this._filename + this._extension
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @TODO much nicer to add a hook system so behavior can be change
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @param {string} extension
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
_optimize() {
|
||||||
|
if (this._extension === '.gif') {
|
||||||
|
this._process
|
||||||
|
.gif({
|
||||||
|
reoptimise: true
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
|
||||||
|
// change extension
|
||||||
|
this._extension = '.webp'
|
||||||
|
this._process
|
||||||
|
.webp({
|
||||||
|
lossless: true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* resolve path to write file, hash will be get = fileBuffer and
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @param {object} fileBuffer
|
||||||
|
* @return {string}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
_resolveRelativeDestinationPath(fileBuffer) {
|
||||||
|
const hash = crypto.createHash('sha1')
|
||||||
|
hash.update(fileBuffer)
|
||||||
|
|
||||||
|
return hash.digest('hex').match(new RegExp('.{1,8}', 'g')).join('/')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Media
|
@ -0,0 +1,111 @@
|
|||||||
|
const { XMLParser, XMLBuilder, XMLValidator} = require('fast-xml-parser')
|
||||||
|
const dayjs = require('dayjs')
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @author Björn Hase <me@herr-hase.wtf>
|
||||||
|
* @license http://opensource.org/licenses/MIT The MIT License
|
||||||
|
* @link https://gitea.node001.net/HerrHase/siteomat-webpack-plugin.git
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
class Sitemap {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @param {[type]} site
|
||||||
|
* @param {[type]} pages
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
constructor(site) {
|
||||||
|
this._site = site
|
||||||
|
this._urls = []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* adding page to urls of sitemap, check if page is valid for sitemap
|
||||||
|
*
|
||||||
|
* @param {object} page
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
addPage(page) {
|
||||||
|
if (this._isValid(page)) {
|
||||||
|
this._urls.push({
|
||||||
|
loc: 'https://' + this._site.domain + page.pathname + '/' + page.filename,
|
||||||
|
lastmod: dayjs().format()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get xml as string
|
||||||
|
*
|
||||||
|
* @return {string}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
getXmlAsString() {
|
||||||
|
return this._createXml(this._urls)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check if robots has a noindex
|
||||||
|
*
|
||||||
|
* @param {object} page
|
||||||
|
* @return {boolean}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
_isValid(page) {
|
||||||
|
|
||||||
|
let result = true
|
||||||
|
|
||||||
|
if (page.meta) {
|
||||||
|
page.meta = Object.entries(page.meta)
|
||||||
|
page.meta.forEach((meta) => {
|
||||||
|
if (meta['name'] === 'robots' && meta['content'].includes('noindex')) {
|
||||||
|
result = false
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (page.type !== 'html') {
|
||||||
|
result = false
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* create xml with urls and return it as string
|
||||||
|
*
|
||||||
|
* @param {object} urls
|
||||||
|
* @return {string}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
_createXml(urls) {
|
||||||
|
|
||||||
|
// builder for XML
|
||||||
|
const builder = new XMLBuilder({
|
||||||
|
format: true,
|
||||||
|
processEntities: false,
|
||||||
|
ignoreAttributes: false,
|
||||||
|
attributeNamePrefix: '@'
|
||||||
|
})
|
||||||
|
|
||||||
|
const xmlString = builder.build({
|
||||||
|
'?xml': {
|
||||||
|
'@version': '1.0',
|
||||||
|
'@encoding': 'UTF-8'
|
||||||
|
},
|
||||||
|
'urlset': {
|
||||||
|
'@xmlns': 'http://www.sitemaps.org/schemas/sitemap/0.9',
|
||||||
|
'url': urls
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return xmlString
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Sitemap
|
@ -0,0 +1,42 @@
|
|||||||
|
const yaml = require('js-yaml')
|
||||||
|
const { marked } = require('marked')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* parse string of file, parse yaml and parse markdown
|
||||||
|
*
|
||||||
|
* @author Björn Hase <me@herr-hase.wtf>
|
||||||
|
* @license http://opensource.org/licenses/MIT The MIT License
|
||||||
|
* @link https://gitea.node001.net/HerrHase/siteomat-webpack-plugin.git
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
function parseMarkdownFile(fileString) {
|
||||||
|
|
||||||
|
// regex get yaml section and markdown
|
||||||
|
// thanks to, https://github.com/getgrav/grav
|
||||||
|
const regex = new RegExp(/^(---\n(.+?)\n---){0,}(.*)$/gs)
|
||||||
|
const matches = regex.exec(fileString)
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
fields: undefined,
|
||||||
|
content: ''
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if yaml section not exists throw error
|
||||||
|
if (matches?.[2]) {
|
||||||
|
try {
|
||||||
|
result.fields = yaml.load(matches[2])
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error('Yaml has errors!')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if markdown section exits parse it to html 6565
|
||||||
|
if (matches?.[3]) {
|
||||||
|
result.content = marked.parse(matches[3])
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = parseMarkdownFile
|
@ -0,0 +1,25 @@
|
|||||||
|
const yaml = require('js-yaml')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* parse string of file and only parse yaml
|
||||||
|
*
|
||||||
|
* @author Björn Hase <me@herr-hase.wtf>
|
||||||
|
* @license http://opensource.org/licenses/MIT The MIT License
|
||||||
|
* @link https://gitea.node001.net/HerrHase/siteomat-webpack-plugin.git
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
function parseYamlFile(file) {
|
||||||
|
|
||||||
|
let config
|
||||||
|
|
||||||
|
try {
|
||||||
|
config = yaml.load(file)
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error('parseYamlFile: Yaml has errors!')
|
||||||
|
}
|
||||||
|
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = parseYamlFile
|
@ -0,0 +1,130 @@
|
|||||||
|
const fs = require('fs')
|
||||||
|
const path = require('path')
|
||||||
|
|
||||||
|
const BlockFactory = require('./../factories/block.js')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* search, filter and find pages
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @author Björn Hase <me@herr-hase.wtf>
|
||||||
|
* @license http://opensource.org/licenses/MIT The MIT License
|
||||||
|
* @link https://gitea.node001.net/HerrHase/siteomat-webpack-plugin.git
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
class Blocks {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @param {string} dirPath
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
constructor(dirPath) {
|
||||||
|
|
||||||
|
this.FILE_EXTENSION = '.md'
|
||||||
|
this.DIRECTORY_BLOCKS = '_blocks'
|
||||||
|
|
||||||
|
this._dirPath = dirPath + '/' + this.DIRECTORY_BLOCKS;
|
||||||
|
this._results = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @return {array}
|
||||||
|
*/
|
||||||
|
find() {
|
||||||
|
|
||||||
|
if (fs.existsSync(this._dirPath)) {
|
||||||
|
this._findFiles(this._dirPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
return this._results
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* find files
|
||||||
|
*
|
||||||
|
* @param {string} dirPath
|
||||||
|
* @param {Object} [parent = '']
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
_findFiles(dirPath, parent = '') {
|
||||||
|
|
||||||
|
//
|
||||||
|
const files = fs.readdirSync(dirPath, {
|
||||||
|
withFileTypes: true
|
||||||
|
})
|
||||||
|
|
||||||
|
files.forEach((file) => {
|
||||||
|
|
||||||
|
// skip for file that is not markdown
|
||||||
|
if (file.isFile() && path.extname(file.name) !== this.FILE_EXTENSION) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if directory going deep
|
||||||
|
if (file.isDirectory()) {
|
||||||
|
this._findFiles(dirPath, parent + '/' + file.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// get file
|
||||||
|
const fileString = this._getFile(file, dirPath + parent)
|
||||||
|
|
||||||
|
// skip if empty
|
||||||
|
if (!fileString) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// create page object and add to page
|
||||||
|
const block = new BlockFactory(fileString)
|
||||||
|
const blockname = this._parseBlockname(file.name)
|
||||||
|
|
||||||
|
if (!this._results[blockname]) {
|
||||||
|
this._results[blockname] = []
|
||||||
|
}
|
||||||
|
|
||||||
|
this._results[blockname].push(block.get())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* remove '.md' and also ordering number = filename
|
||||||
|
*
|
||||||
|
* @param {string} filename
|
||||||
|
* @return {string}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
_parseBlockname(filename) {
|
||||||
|
const regex = new RegExp(/[-_]?[0-9]*\b.md\b$/)
|
||||||
|
return filename.replace(regex, '')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get file content
|
||||||
|
*
|
||||||
|
* @param {string} slug
|
||||||
|
* @param {string} sourcePath
|
||||||
|
* @return {mixed}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
_getFile(file, dirPath) {
|
||||||
|
|
||||||
|
// file
|
||||||
|
let result = null
|
||||||
|
|
||||||
|
// path of file, first try with slug
|
||||||
|
let filePath = dirPath + '/' + file.name
|
||||||
|
|
||||||
|
if (fs.existsSync(filePath) && file.isFile()) {
|
||||||
|
result = fs.readFileSync(filePath, 'utf8')
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports =Blocks
|
@ -0,0 +1,188 @@
|
|||||||
|
const fs = require('fs')
|
||||||
|
const path = require('path')
|
||||||
|
const {
|
||||||
|
Filter, orderBy, reduce
|
||||||
|
} = require('@helpers/siteomat-query')
|
||||||
|
|
||||||
|
const PageFactory = require('./../factories/page.js')
|
||||||
|
const BlocksQuery = require('./../queries/blocks.js')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pages - search, filter and find pages
|
||||||
|
*
|
||||||
|
* @author Björn Hase <me@herr-hase.wtf>
|
||||||
|
* @license http://opensource.org/licenses/MIT The MIT License
|
||||||
|
* @link https://gitea.node001.net/HerrHase/siteomat-webpack-plugin.git
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
class Pages {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @param {string} dirPath
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
constructor(dirPath) {
|
||||||
|
|
||||||
|
// constants
|
||||||
|
this.FILE_EXTENSION = '.md'
|
||||||
|
this.FILE_INDEX = 'index'
|
||||||
|
this.DIRECTORY_BLOCKS = '_blocks'
|
||||||
|
|
||||||
|
// default options for find
|
||||||
|
this._options = {
|
||||||
|
parent: '',
|
||||||
|
deep: -1
|
||||||
|
}
|
||||||
|
|
||||||
|
this._dirPath = dirPath
|
||||||
|
this._results = []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* find pages
|
||||||
|
*
|
||||||
|
* @param {Object} [options={}]
|
||||||
|
* @return {array}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
find(options = {}) {
|
||||||
|
this._results = []
|
||||||
|
this._count = 0
|
||||||
|
|
||||||
|
options = Object.assign({}, this._options, options)
|
||||||
|
|
||||||
|
if (options.filter) {
|
||||||
|
this._filter = new Filter(options.filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
this._findFiles(this._dirPath, options)
|
||||||
|
|
||||||
|
//
|
||||||
|
if (options.orderBy && options.orderBy.length > 0) {
|
||||||
|
this.results = orderBy(options.orderBy, this._results)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.limit || options.offset) {
|
||||||
|
this._results = reduce(options, this._results)
|
||||||
|
}
|
||||||
|
|
||||||
|
return this._results
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* find files
|
||||||
|
*
|
||||||
|
* @param {[type]} dirPath [description]
|
||||||
|
* @param {Object} [parameters={}] [description]
|
||||||
|
* @param {Object} [options={}] [description]
|
||||||
|
* @return {[type]}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
_findFiles(dirPath, options) {
|
||||||
|
|
||||||
|
// getting all files
|
||||||
|
const files = fs.readdirSync(dirPath + options.parent, {
|
||||||
|
withFileTypes: true
|
||||||
|
})
|
||||||
|
|
||||||
|
files.forEach((file) => {
|
||||||
|
|
||||||
|
// skip for file that is not markdown
|
||||||
|
if (file.isFile() && path.extname(file.name) !== this.FILE_EXTENSION ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip for file that is index but not root
|
||||||
|
if (file.isFile() && file.name === (this.FILE_INDEX + this.FILE_EXTENSION) && options.parent !== '') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip for directory that contains partials
|
||||||
|
if (file.isDirectory() && file.name === this.DIRECTORY_BLOCKS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if directory going deep
|
||||||
|
if (file.isDirectory() && (options.deep > 0 || options.deep === -1)) {
|
||||||
|
|
||||||
|
if (options.deep > 0) {
|
||||||
|
options.deep--
|
||||||
|
}
|
||||||
|
|
||||||
|
const childrenOptions = Object.assign({}, options, {
|
||||||
|
'parent': options.parent + '/' + file.name
|
||||||
|
})
|
||||||
|
|
||||||
|
this._findFiles(dirPath, childrenOptions)
|
||||||
|
}
|
||||||
|
|
||||||
|
// get file
|
||||||
|
const content = this._getFile(file, dirPath + options.parent)
|
||||||
|
|
||||||
|
// skip if empty
|
||||||
|
if (!content) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if
|
||||||
|
const blocks = this._getBlocks(dirPath + options.parent + '/' + file.name)
|
||||||
|
|
||||||
|
// create page object and add to page
|
||||||
|
const page = new PageFactory(file, options.parent, content, blocks)
|
||||||
|
|
||||||
|
// check for filters and skip
|
||||||
|
if (this._filter && !this._filter.validate(page.get())) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._results.push(page.get())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @param {string} dirPath
|
||||||
|
* @return {array}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
_getBlocks(dirPath) {
|
||||||
|
const blocksQuery = new BlocksQuery(dirPath)
|
||||||
|
return blocksQuery.find()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get file content
|
||||||
|
*
|
||||||
|
* @param {string} slug
|
||||||
|
* @param {string} sourcePath
|
||||||
|
* @return {mixed}
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
_getFile(file, dirPath) {
|
||||||
|
|
||||||
|
// file
|
||||||
|
let result = null
|
||||||
|
|
||||||
|
// path of file, first try with slug
|
||||||
|
let filePath = dirPath + '/' + file.name
|
||||||
|
|
||||||
|
if (fs.existsSync(filePath) && file.isFile()) {
|
||||||
|
result = fs.readFileSync(filePath, 'utf8')
|
||||||
|
} else {
|
||||||
|
filePath = dirPath + '/' + file.name + '/' + this.FILE_INDEX + this.FILE_EXTENSION
|
||||||
|
|
||||||
|
if (fs.existsSync(filePath)) {
|
||||||
|
result = fs.readFileSync(filePath, 'utf8')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Pages
|
@ -0,0 +1,24 @@
|
|||||||
|
const { assert } = require('chai')
|
||||||
|
const fs = require('fs')
|
||||||
|
|
||||||
|
describe('Blocks', function () {
|
||||||
|
|
||||||
|
// get function parseMarkdownFile
|
||||||
|
const BlocksQuery = require('./../src/queries/blocks.js')
|
||||||
|
|
||||||
|
const blocksQuery = new BlocksQuery('./ressources')
|
||||||
|
const results = blocksQuery.find()
|
||||||
|
|
||||||
|
// check results
|
||||||
|
it('block is array', function() {
|
||||||
|
assert.isArray(results.block)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('block has length of 2', function() {
|
||||||
|
assert.equal(results.block.length, 2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('title in first block are equal', function() {
|
||||||
|
assert.equal(results.block[0].title, 'health goth DIY tattooed')
|
||||||
|
})
|
||||||
|
})
|
@ -0,0 +1,41 @@
|
|||||||
|
const { assert } = require('chai')
|
||||||
|
const fs = require('fs')
|
||||||
|
|
||||||
|
describe('Parser Markdown', function () {
|
||||||
|
|
||||||
|
// default file
|
||||||
|
const markdownData = fs.readFileSync('./ressources/default.md', 'utf8')
|
||||||
|
|
||||||
|
// get function parseMarkdownFile
|
||||||
|
const parseMarkdownFile = require('./../src/parsers/markdown.js')
|
||||||
|
|
||||||
|
// start parsing
|
||||||
|
const result = parseMarkdownFile(markdownData)
|
||||||
|
|
||||||
|
// check results
|
||||||
|
it('fields exists', function() {
|
||||||
|
assert.notEqual(result.fields, undefined)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('fields is object', function() {
|
||||||
|
assert.isObject(result.fields)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('fields are valid', function() {
|
||||||
|
assert.deepEqual(result.fields, {
|
||||||
|
title: 'health goth DIY tattooed',
|
||||||
|
view: 'page.njk',
|
||||||
|
meta: {
|
||||||
|
description: 'DSA yes plz hot chicken green juice'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('content exists', function() {
|
||||||
|
assert.notEqual(result.content, '')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('content has html', function() {
|
||||||
|
assert.match(result.content, /<h2 id="normcore-cold-pressed-ramps-dsa">Normcore cold-pressed ramps DSA<\/h2>/)
|
||||||
|
})
|
||||||
|
})
|
Loading…
Reference in new issue