|
|
|
@ -8,16 +8,25 @@ const slugify = require('slugify')
|
|
|
|
|
const configStore = require('./../config.js')
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Media
|
|
|
|
|
*
|
|
|
|
|
* change size, optimize and copy media to assets
|
|
|
|
|
*
|
|
|
|
|
* @author Björn Hase <me@herr-hase.wtf>
|
|
|
|
|
* @license http://opensource.org/licenses/MIT The MIT License
|
|
|
|
|
* @link https://gitea.node001.net/HerrHase/siteomat-webpack-plugin.git
|
|
|
|
|
*
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
class Media {
|
|
|
|
|
|
|
|
|
|
constructor() {
|
|
|
|
|
constructor(path = NULL) {
|
|
|
|
|
this._path = path
|
|
|
|
|
this._DIR_ASSETS = '/assets/'
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* resolve media
|
|
|
|
|
*
|
|
|
|
|
* @param {string} src
|
|
|
|
|
* @param {object} sizes
|
|
|
|
@ -25,74 +34,182 @@ class Media {
|
|
|
|
|
* @return {string}
|
|
|
|
|
*
|
|
|
|
|
*/
|
|
|
|
|
async resize(src, sizes, options = {}) {
|
|
|
|
|
resolve(src, sizes = {}, options = {}) {
|
|
|
|
|
|
|
|
|
|
this._extension = path.extname(src)
|
|
|
|
|
this._filename = slugify(path.basename(src, this._extension))
|
|
|
|
|
let extension = path.extname(src)
|
|
|
|
|
let sourcePath
|
|
|
|
|
|
|
|
|
|
this._process = await sharp(configStore.get('source') + '/' + src)
|
|
|
|
|
const filename = slugify(path.basename(src, extension))
|
|
|
|
|
|
|
|
|
|
// resize without options and with options
|
|
|
|
|
if (Object.getOwnPropertyNames(options).length === 0) {
|
|
|
|
|
await this._process
|
|
|
|
|
.resize(sizes)
|
|
|
|
|
// check for images in path
|
|
|
|
|
if (this._path && fs.existsSync(configStore.get('source') + this._path + '/' + src)) {
|
|
|
|
|
sourcePath = configStore.get('source') + this._path + '/' + src
|
|
|
|
|
} else {
|
|
|
|
|
sourcePath = configStore.get('source') + '/' + src
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// getting sharp
|
|
|
|
|
const process = sharp(sourcePath)
|
|
|
|
|
|
|
|
|
|
if (extension === '.gif') {
|
|
|
|
|
process
|
|
|
|
|
.gif({
|
|
|
|
|
reoptimise: true
|
|
|
|
|
})
|
|
|
|
|
} else {
|
|
|
|
|
this._process
|
|
|
|
|
.resize(sizes, options)
|
|
|
|
|
|
|
|
|
|
// change extension
|
|
|
|
|
extension = '.webp'
|
|
|
|
|
process
|
|
|
|
|
.webp({
|
|
|
|
|
lossless: true
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// optimize
|
|
|
|
|
this._optimize()
|
|
|
|
|
// destination
|
|
|
|
|
const destinationPath = this._getDestinationPath(sourcePath)
|
|
|
|
|
|
|
|
|
|
const fileBuffer = await this._process
|
|
|
|
|
.toBuffer()
|
|
|
|
|
// create files to write
|
|
|
|
|
const filesToWrite = this._getFilesToWrite(filename, extension, destinationPath, sizes)
|
|
|
|
|
|
|
|
|
|
const relativeDestinationPath = this._DIR_ASSETS + this._resolveRelativeDestinationPath(fileBuffer)
|
|
|
|
|
// results contains only path as strings
|
|
|
|
|
const results = {}
|
|
|
|
|
|
|
|
|
|
// create directories and write file
|
|
|
|
|
mkdirp.sync(configStore.get('destination') + relativeDestinationPath)
|
|
|
|
|
fs.writeFileSync(configStore.get('destination') + relativeDestinationPath + '/' + this._filename + this._extension, fileBuffer)
|
|
|
|
|
// create path if not exists
|
|
|
|
|
if (!fs.existsSync(configStore.get('destination') + destinationPath)) {
|
|
|
|
|
mkdirp.sync(configStore.get('destination') + destinationPath)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return relativeDestinationPath + '/' + this._filename + this._extension
|
|
|
|
|
filesToWrite.forEach((file) => {
|
|
|
|
|
if (!fs.existsSync(configStore.get('destination') + file.path)) {
|
|
|
|
|
this._writeFile(file, process, options)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
results[file.name] = file.path
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
return this._reduce(results)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @TODO much nicer to add a hook system so behavior can be change
|
|
|
|
|
* if only full is in results, reduce object to string
|
|
|
|
|
*
|
|
|
|
|
* @param {Object} results
|
|
|
|
|
* @return {mixed}
|
|
|
|
|
*
|
|
|
|
|
* @param {string} extension
|
|
|
|
|
*/
|
|
|
|
|
_reduce(results) {
|
|
|
|
|
if (Object.getOwnPropertyNames(results).length === 1) {
|
|
|
|
|
results = results['full']
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* getting files to write
|
|
|
|
|
*
|
|
|
|
|
* @param {string} src
|
|
|
|
|
* @param {string} extension
|
|
|
|
|
* @param {Object} sizes
|
|
|
|
|
* @return {string}
|
|
|
|
|
*/
|
|
|
|
|
_optimize() {
|
|
|
|
|
if (this._extension === '.gif') {
|
|
|
|
|
this._process
|
|
|
|
|
.gif({
|
|
|
|
|
reoptimise: true
|
|
|
|
|
})
|
|
|
|
|
} else {
|
|
|
|
|
_getFilesToWrite(filename, extension, destinationPath, sizes) {
|
|
|
|
|
|
|
|
|
|
// change extension
|
|
|
|
|
this._extension = '.webp'
|
|
|
|
|
this._process
|
|
|
|
|
.webp({
|
|
|
|
|
lossless: true
|
|
|
|
|
const results = []
|
|
|
|
|
|
|
|
|
|
// add orginal
|
|
|
|
|
results.push(this._getFile(filename, destinationPath, extension))
|
|
|
|
|
|
|
|
|
|
// check for sizes
|
|
|
|
|
if (typeof sizes === 'object' && !Array.isArray(sizes)) {
|
|
|
|
|
results.push(this._getFile(filename, destinationPath, extension, sizes))
|
|
|
|
|
} else if (Array.isArray(sizes)) {
|
|
|
|
|
sizes.forEach((size) => {
|
|
|
|
|
results.push(this._getFile(filename, destinationPath, extension, size))
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* resolve path to write file, hash will be get = fileBuffer and
|
|
|
|
|
* write files to destination
|
|
|
|
|
*
|
|
|
|
|
* @param {string} file
|
|
|
|
|
* @param {object} process
|
|
|
|
|
* @param {Object} options
|
|
|
|
|
*/
|
|
|
|
|
_writeFile(file, process, options) {
|
|
|
|
|
|
|
|
|
|
// resize without options and with options
|
|
|
|
|
if (file.sizes && Object.getOwnPropertyNames(options).length === 0 && Object.getOwnPropertyNames(file.sizes).length > 0) {
|
|
|
|
|
process.resize(file.sizes)
|
|
|
|
|
} else if (file.sizes && Object.getOwnPropertyNames(options).length > 0 && Object.getOwnPropertyNames(file.sizes).length > 0) {
|
|
|
|
|
process.resize(file.sizes, options)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
process.toFile(configStore.get('destination') + file.path)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* generate destination path from hash of file
|
|
|
|
|
*
|
|
|
|
|
* @param {object} fileBuffer
|
|
|
|
|
* @param {string}
|
|
|
|
|
* @return {string}
|
|
|
|
|
*
|
|
|
|
|
*/
|
|
|
|
|
_resolveRelativeDestinationPath(fileBuffer) {
|
|
|
|
|
_getDestinationPath(sourcePath) {
|
|
|
|
|
const hash = crypto.createHash('sha1')
|
|
|
|
|
hash.update(fileBuffer)
|
|
|
|
|
const file = fs.readFileSync(sourcePath)
|
|
|
|
|
|
|
|
|
|
// getting hash from file
|
|
|
|
|
hash.update(file)
|
|
|
|
|
|
|
|
|
|
return this._DIR_ASSETS + hash.digest('hex').match(new RegExp('.{1,8}', 'g')).join('/')
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* create file as object, adding path, name with sizes
|
|
|
|
|
*
|
|
|
|
|
* @param {string} filename
|
|
|
|
|
* @param {object} destinationPath
|
|
|
|
|
* @param {Object} extension
|
|
|
|
|
* @param {sizes} sizes
|
|
|
|
|
* @return {object}
|
|
|
|
|
*/
|
|
|
|
|
_getFile(filename, destinationPath, extension, sizes = undefined) {
|
|
|
|
|
|
|
|
|
|
let file = {
|
|
|
|
|
name: ''
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let prefix = ''
|
|
|
|
|
|
|
|
|
|
// check for sizes
|
|
|
|
|
if (sizes && sizes.width) {
|
|
|
|
|
file.name += sizes.width
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (sizes && sizes.height) {
|
|
|
|
|
if (sizes.width) {
|
|
|
|
|
prefix = 'x'
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
file.name += prefix + sizes.height
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// create path before name is set to orginal as fallback
|
|
|
|
|
file.path = destinationPath + '/' + filename + file.name + extension
|
|
|
|
|
|
|
|
|
|
if (!file.name) {
|
|
|
|
|
file.name = 'full'
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (sizes) {
|
|
|
|
|
file.sizes = sizes
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return hash.digest('hex').match(new RegExp('.{1,8}', 'g')).join('/')
|
|
|
|
|
return file
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|