HerrHase 2 years ago
parent 494e79c86e
commit 0efa8adaab

@ -12,8 +12,8 @@ finished, it was only a proof of concept. But now it works for created a entire
Next will be,
* Some Tests
* Standalone, handle Webpack only as wrapper
* Some more Tests
* Image Handling in Markdown
Maybe later,
@ -148,6 +148,54 @@ can be used like this,
{% endFor %}
```
## Media
Image Files can be add to the Markdown-Structure and will be processed by [Sharp](https://sharp.pixelplumbing.com/).
```
recipes
└ index.md
_images
└ dog.jpg
```
In Fields all keys with "src" will be handled as Path to Images. Files will be search first
in current Directory of page, if nothing found, it will be search in Root-Directory of
Markdown-Structure. Blocks can also have there own Images.
```
---
title: "health goth DIY tattooed"
view: "home.njk"
meta:
description: "La"
media:
teaser:
src: "_images/dog.jpg"
alt: "cold-pressed"
---
```
It is also possible add
```
---
title: "health goth DIY tattooed"
view: "home.njk"
meta:
description: "La"
media:
teaser:
src:
src: '_images/dog.jpg'
sizes:
- width: 300
- width: 500
height: 100
alt: "cold-pressed"
---
```
## Queries
Queries can be used in Templates to get Pages.
@ -207,25 +255,6 @@ This function handle manifest-File from [https://laravel-mix.com/](Laravel-Mix).
<script src="{{ asset('js/app.js') }}"></script>
```
#### Filters
##### resize
The Filter is using [sharp](https://github.com/lovell/sharp), for crop, resize and
optimize Images. The Filter needs a relative Path in the File Structure.
Basic Usage:
```
{% page.teaser.src | resize({ 'width': '300' }) %}
```
Add options:
```
{% page.teaser.src | resize({ 'width': '300' }, { sigma: 2 }) %}
```
## Json
Results from PageQuery can also be created as json-File. The can use with a

@ -1,6 +1,6 @@
{
"name": "@site-o-mat/core",
"version": "0.2.0",
"version": "0.3.0",
"build": "webpack",
"author": "Björn Hase <me@herr-hase.wtf>",
"main": "index.js",

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

@ -1,5 +1,7 @@
---
title: "Data"
media:
src: '_images/dog-tired.webp'
---
## Normcore cold-pressed ramps DSA

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 MiB

@ -3,6 +3,8 @@ title: "article"
view: "article.njk"
meta:
description: "DSA yes plz hot chicken green juice"
media:
src: '_images/dog.jpg'
---
## Normcore cold-pressed ramps DSA

@ -3,6 +3,13 @@ title: "blog"
view: "page.njk"
meta:
description: "DSA yes plz hot chicken green juice"
media:
src:
src: '_images/dog.jpg'
sizes:
- width: 300
- width: 500
height: 100
---
## Normcore cold-pressed ramps DSA

@ -3,6 +3,13 @@ title: "health goth DIY tattooed"
view: "page.njk"
meta:
description: "DSA yes plz hot chicken green juice"
media:
src:
src: '_images/dog.jpg'
sizes:
- width: 300
- width: 500
height: 100
---
## Normcore cold-pressed ramps DSA

@ -1,8 +1,6 @@
const path = require('path')
const fs = require('fs')
const Media = require('./../factories/media.js')
/**
* asset - checks manifest.json for given path and return
* file path with id for cache busting
@ -33,21 +31,4 @@ function asset(staticPath) {
return result
}
/**
* asset - checks manifest.json for given path and return
* file path with id for cache busting
*
*
* @param {String} publicPath
*
*/
async function resize(src, sizes, options, done)
{
const media = new Media()
src = await media.resize(src, sizes, options)
done(null, src)
}
module.exports = { asset, resize }
module.exports = { asset, media }

@ -4,7 +4,7 @@ const assign = require('assign-deep')
const { minify } = require('html-minifier')
const configStore = require('./../config.js')
const { asset, resize } = require('./helpers.js')
const { asset } = require('./helpers.js')
const PageQuery = require('./../queries/pages.js')
const dayjs = require('dayjs')
@ -37,14 +37,6 @@ class Engine {
this.nunjucks = nunjucks.configure(views, this._options)
// add filter: resize
this.nunjucks.addFilter('resize', (...args) => {
const done = args.pop()
const options = args?.[2] ? {} : args[2]
resize(args[0], args[1], options, done)
}, true)
// adding defaults for view, data from site.yml, functions and pageQuery
this._defaults = assign(this._options.defaults, {
site: site,

@ -1,6 +1,5 @@
const path = require('path')
const parseMarkdownFile = require('./../parsers/markdown.js')
const assign = require('assign-deep')
/**
@ -23,11 +22,13 @@ class Block {
* @param {string} fileString
*
*/
constructor(fileString) {
constructor(fileString, dirPath) {
// parse string of file
const parsedFile = parseMarkdownFile(fileString)
this._dirPath = dirPath
// getting parsed data
this._content = parsedFile.content
this._fields = parsedFile.fields

@ -8,16 +8,25 @@ const slugify = require('slugify')
const configStore = require('./../config.js')
/**
* Media
*
* change size, optimize and copy media to assets
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://gitea.node001.net/HerrHase/siteomat-webpack-plugin.git
*
*/
class Media {
constructor() {
constructor(path = NULL) {
this._path = path
this._DIR_ASSETS = '/assets/'
}
/**
* resolve media
*
* @param {string} src
* @param {object} sizes
@ -25,74 +34,182 @@ class Media {
* @return {string}
*
*/
async resize(src, sizes, options = {}) {
resolve(src, sizes = {}, options = {}) {
this._extension = path.extname(src)
this._filename = slugify(path.basename(src, this._extension))
let extension = path.extname(src)
let sourcePath
this._process = await sharp(configStore.get('source') + '/' + src)
const filename = slugify(path.basename(src, extension))
// resize without options and with options
if (Object.getOwnPropertyNames(options).length === 0) {
await this._process
.resize(sizes)
// check for images in path
if (this._path && fs.existsSync(configStore.get('source') + this._path + '/' + src)) {
sourcePath = configStore.get('source') + this._path + '/' + src
} else {
sourcePath = configStore.get('source') + '/' + src
}
// getting sharp
const process = sharp(sourcePath)
if (extension === '.gif') {
process
.gif({
reoptimise: true
})
} else {
this._process
.resize(sizes, options)
// change extension
extension = '.webp'
process
.webp({
lossless: true
})
}
// optimize
this._optimize()
// destination
const destinationPath = this._getDestinationPath(sourcePath)
const fileBuffer = await this._process
.toBuffer()
// create files to write
const filesToWrite = this._getFilesToWrite(filename, extension, destinationPath, sizes)
const relativeDestinationPath = this._DIR_ASSETS + this._resolveRelativeDestinationPath(fileBuffer)
// results contains only path as strings
const results = {}
// create directories and write file
mkdirp.sync(configStore.get('destination') + relativeDestinationPath)
fs.writeFileSync(configStore.get('destination') + relativeDestinationPath + '/' + this._filename + this._extension, fileBuffer)
// create path if not exists
if (!fs.existsSync(configStore.get('destination') + destinationPath)) {
mkdirp.sync(configStore.get('destination') + destinationPath)
}
return relativeDestinationPath + '/' + this._filename + this._extension
filesToWrite.forEach((file) => {
if (!fs.existsSync(configStore.get('destination') + file.path)) {
this._writeFile(file, process, options)
}
results[file.name] = file.path
})
return this._reduce(results)
}
/**
* @TODO much nicer to add a hook system so behavior can be change
* if only full is in results, reduce object to string
*
* @param {Object} results
* @return {mixed}
*
* @param {string} extension
*/
_reduce(results) {
if (Object.getOwnPropertyNames(results).length === 1) {
results = results['full']
}
return results
}
/**
* getting files to write
*
* @param {string} src
* @param {string} extension
* @param {Object} sizes
* @return {string}
*/
_optimize() {
if (this._extension === '.gif') {
this._process
.gif({
reoptimise: true
})
} else {
_getFilesToWrite(filename, extension, destinationPath, sizes) {
// change extension
this._extension = '.webp'
this._process
.webp({
lossless: true
const results = []
// add orginal
results.push(this._getFile(filename, destinationPath, extension))
// check for sizes
if (typeof sizes === 'object' && !Array.isArray(sizes)) {
results.push(this._getFile(filename, destinationPath, extension, sizes))
} else if (Array.isArray(sizes)) {
sizes.forEach((size) => {
results.push(this._getFile(filename, destinationPath, extension, size))
})
}
return results
}
/**
* resolve path to write file, hash will be get = fileBuffer and
* write files to destination
*
* @param {string} file
* @param {object} process
* @param {Object} options
*/
_writeFile(file, process, options) {
// resize without options and with options
if (file.sizes && Object.getOwnPropertyNames(options).length === 0 && Object.getOwnPropertyNames(file.sizes).length > 0) {
process.resize(file.sizes)
} else if (file.sizes && Object.getOwnPropertyNames(options).length > 0 && Object.getOwnPropertyNames(file.sizes).length > 0) {
process.resize(file.sizes, options)
}
process.toFile(configStore.get('destination') + file.path)
}
/**
* generate destination path from hash of file
*
* @param {object} fileBuffer
* @param {string}
* @return {string}
*
*/
_resolveRelativeDestinationPath(fileBuffer) {
_getDestinationPath(sourcePath) {
const hash = crypto.createHash('sha1')
hash.update(fileBuffer)
const file = fs.readFileSync(sourcePath)
// getting hash from file
hash.update(file)
return this._DIR_ASSETS + hash.digest('hex').match(new RegExp('.{1,8}', 'g')).join('/')
}
/**
* create file as object, adding path, name with sizes
*
* @param {string} filename
* @param {object} destinationPath
* @param {Object} extension
* @param {sizes} sizes
* @return {object}
*/
_getFile(filename, destinationPath, extension, sizes = undefined) {
let file = {
name: ''
}
let prefix = ''
// check for sizes
if (sizes && sizes.width) {
file.name += sizes.width
}
if (sizes && sizes.height) {
if (sizes.width) {
prefix = 'x'
}
file.name += prefix + sizes.height
}
// create path before name is set to orginal as fallback
file.path = destinationPath + '/' + filename + file.name + extension
if (!file.name) {
file.name = 'full'
}
if (sizes) {
file.sizes = sizes
}
return hash.digest('hex').match(new RegExp('.{1,8}', 'g')).join('/')
return file
}
}

@ -4,10 +4,11 @@ const merge = require('lodash.merge')
const nunjucks = require('nunjucks')
const assign = require('assign-deep')
const Media = require('./../factories/media.js')
const parseMarkdownFile = require('./../parsers/markdown.js')
/**
* Page
* Page - building from markdown file
*
*
* @author Björn Hase <me@herr-hase.wtf>
@ -56,6 +57,24 @@ class Page {
this._content = result.content
this._blocks = blocks
// check for fields and resolve media
if (this._fields) {
this._fields = this._resolveMedia(this._fields, this._dirPath)
}
// check for fields and resolve media
if (this._blocks) {
for (const key of Object.keys(this._blocks)) {
if (Array.isArray(this._blocks[key])) {
this._blocks[key].forEach((fields, index) => {
this._blocks[key][index] = this._resolveMedia(fields, this._dirPath + '/_blocks')
})
} else {
this._blocks[key] = this._resolveMedia(this._blocks[key], this._dirPath + '/_blocks')
}
}
}
}
/**
@ -76,6 +95,38 @@ class Page {
}, this._fields)
}
/**
*
*
*/
_resolveMedia(fields, dirPath) {
for (const key of Object.keys(fields)) {
if (key === 'src') {
fields[key] = this._resolveMediaSrc(fields[key], dirPath)
}
if (toString.call(fields[key]) === '[object Object]') {
fields[key] = this._resolveMedia(fields[key], dirPath)
}
}
return fields
}
_resolveMediaSrc(field, dirPath) {
const media = new Media(dirPath)
if (typeof field === 'string' || field instanceof String) {
field = media.resolve(field)
}
if (typeof field === 'object' || field instanceof Object) {
field = media.resolve(field.src, field.sizes)
}
return field
}
/**
*
*

@ -43,7 +43,6 @@ class Blocks {
* @return {array}
*/
find() {
if (fs.existsSync(this._dirPath)) {
this._findFiles(this._dirPath)
}
@ -72,11 +71,6 @@ class Blocks {
return
}
// if directory going deep
if (file.isDirectory()) {
this._findFiles(dirPath, parent + '/' + file.name)
}
// get file
const fileString = this._getFile(file, dirPath + parent)
@ -86,7 +80,7 @@ class Blocks {
}
// create page object and add to page
const block = new BlockFactory(fileString)
const block = new BlockFactory(fileString, this._dirPath)
const blockname = this._parseBlockname(file.name)
if (this._isArray(file.name)) {
@ -150,4 +144,4 @@ class Blocks {
}
module.exports =Blocks
module.exports = Blocks

@ -135,7 +135,7 @@ class Pages {
// check for filters and skip
if (this._filter && !this._filter.validate(page.get())) {
return;
return
}
this._results.push(page.get())

@ -12,7 +12,7 @@ const PagesQuery = require('./queries/pages.js')
const parseYamlFile = require('./parsers/yaml.js')
/**
* Siteomat
* Site-O-Mat
*
*
*

@ -1,8 +1,13 @@
const { assert } = require('chai')
const fs = require('fs')
const configStore = require('./../src/config.js')
describe('Blocks / Array', function () {
configStore.set('source', './ressources')
configStore.set('destination', './dist')
// get function parseMarkdownFile
const BlocksQuery = require('./../src/queries/blocks.js')
@ -25,6 +30,9 @@ describe('Blocks / Array', function () {
describe('Blocks / Single', function () {
configStore.set('source', './ressources')
configStore.set('destination', './dist')
// get function parseMarkdownFile
const BlocksQuery = require('./../src/queries/blocks.js')

@ -2,9 +2,13 @@ const { assert } = require('chai')
const fs = require('fs')
const PagesQuery = require('./../src/queries/pages.js')
const configStore = require('./../src/config.js')
describe('Page /index.md', function () {
configStore.set('source', './ressources')
configStore.set('destination', './dist')
const query = new PagesQuery('./ressources');
const results = query.find()
@ -29,6 +33,9 @@ describe('Page /index.md', function () {
describe('Page /blog/index.md', function () {
configStore.set('source', './ressources')
configStore.set('destination', './dist')
const query = new PagesQuery('./ressources');
const results = query.find()
@ -49,10 +56,21 @@ describe('Page /blog/index.md', function () {
it('permalink', function() {
assert.equal(page.permalink, '/blog')
})
it('fields has media src', function() {
assert.deepEqual(page.media.src, {
"300": "/assets/88c010ea/4ca9b5f5/6024c57d/05899fae/a33d9a45/dog300.webp",
"500x100": "/assets/88c010ea/4ca9b5f5/6024c57d/05899fae/a33d9a45/dog500x100.webp",
"full": "/assets/88c010ea/4ca9b5f5/6024c57d/05899fae/a33d9a45/dog.webp"
})
})
})
describe('Page /blog/article.md', function () {
configStore.set('source', './ressources')
configStore.set('destination', './dist')
const query = new PagesQuery('./ressources');
const results = query.find()
@ -69,4 +87,8 @@ describe('Page /blog/article.md', function () {
it('permalink', function() {
assert.equal(page.permalink, '/blog/article')
})
it('fields has media src', function() {
assert.equal(page.media.src, '/assets/a6c45d17/11bf0a4e/a2b1d75d/dc85ca56/71c63294/dog.webp')
})
})

@ -23,6 +23,19 @@ describe('Parser Markdown', function () {
it('fields are valid', function() {
assert.deepEqual(result.fields, {
media: {
src: {
sizes: [{
width: 300
},
{
height: 100,
width: 500
}
],
src: '_images/dog.jpg'
}
},
title: 'health goth DIY tattooed',
view: 'page.njk',
meta: {

Loading…
Cancel
Save