main
HerrHase 8 months ago
parent a410c13d89
commit 9106af7e24

8
.gitignore vendored

@ -132,3 +132,11 @@ dist
# #
storage/* storage/*
resources/actions/*
!resources/actions/.gitkeep
resources/config/enabled/*
!resources/config/enabled/.gitkeep
resources/config/available/*
!resources/config/available/.gitkeep

@ -1,2 +1,11 @@
# super-hog # super-hog
Super Hog is a process runner. Tasks will be configured in a File by Yaml.
```
name: "Lorem Ipsum RSS"
url: "https://lorem-rss.herokuapp.com/feed"
cron: "0 1 * * *"
actions:
- class: "RssExample"
```

@ -1,5 +0,0 @@
# super-hog
```
yarn install
```

Binary file not shown.

@ -0,0 +1,24 @@
import Action from './../../packages/runner/actions/action.ts'
/**
* getting json from url, parse response and send to console
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
class JsonExample extends Action {
async run() {
const response = await fetch(this.config.url)
if (response.status == 200) {
this.result = await response.json()
}
console.log(this.result)
}
}
export default JsonExample

@ -0,0 +1,25 @@
import { XMLParser } from 'fast-xml-parser'
import Action from './../../packages/runner/actions/action.ts'
/**
* getting rss feed from url, parse response and send to console
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
class RssExample extends Action {
async run() {
const response = await fetch(this.config.url)
const body = await response.text()
const parser = new XMLParser()
this.result = parser.parse(body)
console.log(this.result)
}
}
export default RssExample

@ -0,0 +1,5 @@
name: "Lorem Ipsum RSS"
url: "https://lorem-rss.herokuapp.com/feed"
cron: "0 1 * * *"
actions:
- class: "RssExample"

3466
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -1,28 +1,10 @@
{ {
"private": true, "private": true,
"name": "runner", "name": "super-hog",
"version": "0.1.0", "workspaces": [
"scripts": { "packages/*"
"start": "node src/runner/index.js" ],
}, "scripts": {
"type": "module", "start": "bun run packages/runner/index.ts"
"dependencies": { }
"@directus/sdk": "^10.3.3",
"better-sqlite3": "^8.2.0",
"chalk": "^5.1.2",
"dayjs": "^1.11.7",
"dotenv": "^16.0.3",
"es6-interface": "^3.2.1",
"fast-xml-parser": "^4.1.3",
"got": "^12.5.2",
"js-yaml": "^4.1.0",
"lowdb": "^5.1.0",
"node-cron": "^3.0.2",
"node-html-parser": "^6.1.5",
"pino": "^8.7.0",
"pino-pretty": "^10.0.0",
"puppeteer": "^19.8.3",
"turndown": "^7.1.2",
"yargs": "^17.6.0"
}
} }

@ -0,0 +1,4 @@
import dotenv from 'dotenv'
// getting .env
dotenv.config({ path: path.join(path.resolve(), '/.env') })

@ -0,0 +1,48 @@
import { JSONFilePreset } from 'lowdb/node'
import merge from 'deepmerge'
const db = await JSONFilePreset('./storage/db.json', { runs: [] })
/**
*
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
class Db {
public constructor(slug) {
this.slug = slug
this.data = db.data.runs.find((runs) => runs.slug === this.slug)
}
public async get() {
if (this.data === undefined) {
this.data = {
'slug': this.slug,
'date_created_at': dayjs().toISOString(),
'date_run_at': dayjs().toISOString()
}
db.data.runs.push(run)
await db.write()
}
return this.data
}
public async update(data) {
db.data.runs.find((run, index) => {
if (run.slug === this.slug) {
db.data.runs[index] = merge(db.data.runs[index], data)
}
})
await db.write()
}
}
export default Db

@ -0,0 +1,53 @@
/**
* Docket
*
* Is used to hold all data and configs that run through the actions
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
class Docket {
// config for action
private config: object
// data
private data: object
private options: object
private db: object
/**
*
*
* @param object config
*
*/
public constructor(config: object, db: object) {
this.config = config
this.db = db
}
public getData() {
return this.data
}
public setData(data: object) {
return this.data = data
}
public setOptions(options): object {
this.options = options
}
public getConfig(): object {
return this.config
}
public getDb(): object {
return this.db
}
}
export default Docket

@ -0,0 +1,57 @@
import dayjs from 'dayjs'
import Db from './_db.ts'
import Docket from './_docket.ts'
import { resolveActionClass } from './helpers/resolver.ts'
import logger from './helpers/logger.ts'
/**
* run through a single config, getting each action with config
* and run them, the docket will be used to hold data and configs
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
async function run(config: object) {
let db = new Db(config.slug)
let docket = new Docket(config, db)
logger(config.slug).info('has started')
for (const actionConfig of config.actions) {
// resolve action class
const ActionClass = await import(resolveActionClass(actionConfig.class))
// options are exists, add to docket
if (actionConfig.hasOwnProperty('options')) {
docket.setOptions(action.options)
}
try {
const action = new ActionClass.default(docket)
logger(config.slug).info('action / ' + actionConfig.class)
await action.run()
docket = action.getDocket()
} catch(error) {
logger(config.slug).error('action / ' + actionConfig.class + ' / ' + error)
}
}
logger(config.slug).info('has finished')
// update db
await db.update({
'date_run_at': dayjs().toISOString()
})
return docket
}
export default run

@ -0,0 +1,27 @@
import logger from './helpers/logger.ts'
/**
* Webhook
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
class Webhook {
static send(message: string) {
const query = new URLSearchParams({
'message': message
})
try {
fetch(process.env.WEBHOOK_URL + '?' + query.toString())
} catch(error) {
logger().error(error)
}
}
}
export default Webhook

@ -0,0 +1,46 @@
import ActionInterface from './actionInterface.ts'
import Docket from './../docket.ts'
/**
* Action
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
class Action implements ActionInterface {
private docket: Docket
private data: object
private result: object
private options: object
private config: object
/**
*
*
*
*/
public constructor(docket: Docket, options: object = {}) {
this.docket = docket
this.data = docket.getData()
// current data will be set as data
this.result = this.data
this.options = options
this.config = docket.getConfig()
}
/**
* set result as data
*
*/
public getDocket() {
this.docket.setData(this.result)
return this.docket
}
}
export default Action

@ -0,0 +1,14 @@
/**
* Interface for Action
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
interface ActionInterface {
async run(): any
}
export default ActionInterface

@ -0,0 +1,36 @@
import pino from 'pino'
import pretty from 'pino-pretty'
import dayjs from 'dayjs'
import { mkdirSync } from "node:fs"
import slug from 'slug'
/**
* Wrapper for logger
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
function logger(slug?: string) {
let destination = './storage/logs/'
// if name is set, adding name as directory
if (slug) {
destination += slug + '/'
mkdirSync(destination, { recursive: true })
}
destination += dayjs().format('DD-MM-YYYY') + '.log'
return pino({
timestamp: () => {
return `, "time":"${new Date(Date.now()).toISOString()}"`
},
},
pino.destination(destination)
)
}
export default logger

@ -0,0 +1,71 @@
import fs from 'fs'
import path from 'path'
import yaml from 'js-yaml'
import slug from 'slug'
/**
* resolve action class
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
function resolveActionClass(className) {
let classPath = path.join(path.resolve(), 'resources/actions/' + className + '.ts')
let result = undefined
if (fs.existsSync(classPath)) {
result = classPath
}
if (!result) {
throw new Error('Action Class ' + className + ' / ' + classPath + ' not found!')
}
return result
}
/**
* loading all configs from enabled directory
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
function resolveEnabledConfig(name?: string) {
const configs = []
const directoryPath = path.join(path.resolve(), 'resources/config')
// load files from enabled
const files = fs.readdirSync(directoryPath + '/enabled')
for (let index in files) {
let file = files[index]
if (!file.endsWith('.yml')) {
continue
}
if (name !== undefined && file !== (name + '.yml')) {
continue
}
const result = yaml.load(fs.readFileSync(directoryPath + '/enabled/' + file))
if (result) {
result.slug = slug(result.name)
configs.push(result)
}
}
return configs
}
export {
resolveActionClass,
resolveEnabledConfig
}

@ -0,0 +1,40 @@
import cron from 'node-cron'
import { resolveEnabledConfig } from './helpers/resolver.ts'
import logger from './helpers/logger.ts'
import run from './_run.ts'
/**
* Run all Configs that are in Directory /resources/configs/enabled
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
// get all configs from resolveEnabledConfig
const configs = resolveEnabledConfig()
// check if there is any valid config
if (configs.length === 0) {
throw new Error('No valid config found!')
}
// running through configs
for (const config in configs)
// check for cron
if (!config.hasOwnProperty('cron')) {
return
}
/**
* adding task to schedule, using cron
*
*/
const task = cron.schedule(config.cron, async() => {
const docket = await run(config, docket)
})
}

@ -0,0 +1,20 @@
{
"private": true,
"name": "runner",
"version": "0.1.0",
"type": "module",
"dependencies": {
"chalk": "^5.1.2",
"dayjs": "^1.11.7",
"deepmerge": "^4.3.1",
"dotenv": "^16.0.3",
"fast-xml-parser": "^4.1.3",
"js-yaml": "^4.1.0",
"lowdb": "^7.0.1",
"node-cron": "^3.0.2",
"pino": "^8.7.0",
"pino-pretty": "^10.0.0",
"slug": "^9.0.0",
"yargs": "^17.6.0"
}
}

@ -0,0 +1,35 @@
import chalk from 'chalk'
import { resolveEnabledConfig } from './helpers/resolver.ts'
import Docket from './docket.ts'
import logger from './helpers/logger.ts'
import run from './_run.ts'
/**
* Run single Config that is in Directory /resources/configs/enabled
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://git.node001.net/HerrHase/super-hog.git
*
*/
const args = process.argv.slice(2)
if (args[0] === undefined) {
console.log(chalk.red('Required name for config'))
process.exit(0)
}
// get all configs from resolveEnabledConfig
const configs = resolveEnabledConfig(args[0])
// check if there is any valid config
if (configs.length === 0) {
console.log(chalk.red('Config ' + args[0] + ' not found!'))
process.exit(0)
}
const config = configs[0]
const docket = await run(config)

@ -1,58 +0,0 @@
import got from 'got'
import url from 'url'
import path from 'path'
import dayjs from 'dayjs'
import { createWriteStream } from 'fs'
import stream from 'node:stream'
import { promisify } from 'util'
import Action from './../../src/runner/actions/action.js'
import logger from './../../src/runner/helpers/logger.js'
import EventStore from './../directus/event.js'
import { Directus } from '@directus/sdk';
/**
* Download Episodes from a RSS-Feed,
*
*
* @extends Action
*
*/
class MindenErlebenApi extends Action {
async run() {
const eventStore = new EventStore()
// adding result
let result
if (this.lastResult) {
for (let data of this.lastResult) {
if (data.mediaUrl) {
// import media to directus
const media = await eventStore.importMediaFromUrl(data.mediaUrl)
// if import successfull add to data
if (media) {
data.media = {
id: media
}
}
delete data.mediaUrl
}
result = await eventStore.create(data)
if (result && result.id) {
logger.info(data.source_host + ' / update, create ' + result.title)
}
}
}
}
}
export default MindenErlebenApi

@ -1,138 +0,0 @@
import got from 'got'
import path from 'path'
import dayjs from 'dayjs'
import { createWriteStream } from 'fs'
import stream from 'node:stream'
import { promisify } from 'util'
import crypto from 'crypto'
import { parse } from 'node-html-parser'
import Action from './../../src/runner/actions/action.js'
import logger from './../../src/runner/helpers/logger.js'
import EventStore from './../directus/event.js'
import { createHash } from 'node:crypto'
import url from 'node:url'
import TurndownService from 'turndown'
/**
* Download Episodes from a RSS-Feed,
*
*
* @extends Action
*
*/
class OberealtstadtEvents extends Action
{
async run() {
const results = []
const turndownService = new TurndownService()
const eventStore = new EventStore()
const currentUrl = new URL(this.config.url)
for (let item of this.data) {
const hash = createHash('sha256').update(item.id.toString()).digest('hex')
if (item.status !== 'publish') {
continue
}
const event = await eventStore.findOneBySource(currentUrl.host, hash)
if (event.data.length > 0 && event.data[0].date_updated_source <= item.modified) {
continue
}
let result = {
title: item.title.rendered,
date_updated_source: item.modified,
content: turndownService.turndown(item.content.rendered),
source_host: currentUrl.host,
source_url: item.link,
source_hash: hash
}
const htmlString = await got(item.guid.rendered, {
resolveBodyOnly: true
})
const html = parse(htmlString)
result = this._parseDates(html, result)
result = this._parseMedia(html, result)
results.push(result)
}
return results
}
/**
*
*
*/
_parseDates(html, result) {
const dates = html.querySelectorAll('.wpem-event-date-time-text')
if (dates[0]) {
result.start = this._getDate(dates[0])
}
if (dates[1]) {
result.end = this._getDate(dates[1])
}
return result
}
/**
*
*/
_getDate(string) {
const dateString = string.textContent.replace('um ', '').trim()
const dateArray = dateString.split(' ')
if (dateArray.length === 2) {
let time = dateString.split(' ')[1]
let day = dateString.split(' ')[0]
// getting day and time
day = day.split('.')[2] + '-' + day.split('.')[1] + '-' + day.split('.')[0]
time = time.split(':')[0] + ':' + time.split(':')[1]
const date = dayjs(day + ' ' + time)
if (date.isValid()) {
return date.format()
}
}
return false
}
/**
*
*
*/
_parseMedia(html, result) {
const img = html.querySelector('.wpem-event-single-image img')
if (img) {
result.mediaUrl = img.getAttribute('src')
}
return result
}
}
export default OberealtstadtEvents

@ -1,9 +0,0 @@
name: "Insert Moin"
url: "https://steadyhq.com/rss/insertmoin?auth=d37bffc9-9a84-4eed-95f6-3b6cb77c2406"
cron: "0 1 * * *"
request:
class: "rss"
actions:
- class: "podcast"
options:
destination: "/home/herrhase/Downloads/x"

@ -1,3 +0,0 @@
notifications:
type: "webhook"
url: ""

@ -1,8 +0,0 @@
name: "oberealtstadt.de"
url: "https://oberealtstadt.de/wp-json/wp/v2/event_listing"
cron: "0 */6 * * *"
request:
class: "json"
actions:
- class: "oberealtstadtEvents"
- class: "mindenErlebenApi"

@ -1,40 +0,0 @@
import { Directus } from '@directus/sdk';
/**
* Abstract Class for handling Directus Api
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://gitea.node001.net/HerrHase/super-fastify-directus.git
*
*/
class DirectusAbstractStore {
/**
*
*
*
* @param {string} endpoint
*
*/
constructor(endpoint) {
this.endpoint = endpoint
// if endpoint not set throw Error
if (!this.endpoint) {
throw new Error('Endpoint in ' + this.constructor.name + ' missing!')
}
this.directus = new Directus(process.env.DIRECTUS_API_URL, {
auth: {
staticToken: process.env.DIRECTUS_API_TOKEN
}
})
// create items
this.items = this.directus.items(this.endpoint)
}
}
export default DirectusAbstractStore

@ -1,98 +0,0 @@
import DirectusAbstractStore from './abstract.js'
import logger from './../../src/runner/helpers/logger.js'
/**
* Pages from Directus
*
* @author Björn Hase <me@herr-hase.wtf>
* @license http://opensource.org/licenses/MIT The MIT License
* @link https://gitea.node001.net/HerrHase/super-fastify-directus.git
*
*/
class EventStore extends DirectusAbstractStore {
/**
* init PageStore
*
*
* @param {string} endpoint
*
*/
constructor() {
super('event')
}
/**
* getting page by permalink
*
*
* @param {string} permalink
* @return {object}
*
*/
findOneBySource(sourceHost, sourceHash) {
return this.items.readByQuery({
fields: [
'title',
'status',
'source_host',
'source_url',
'source_hash',
'start',
'end',
'excerpt',
'content',
'tags',
'parent',
'media.id',
'media.description',
'permalink',
'date_updated_source'
],
filter: {
source_host : sourceHost,
source_hash : sourceHash
},
limit: 1
})
}
/**
* import media from url
*
* @param {string} url
* @return {object}
*/
async importMediaFromUrl(url) {
try {
const file = await this.directus.files.import({
url: url
})
if (file) {
return file.id
}
} catch(error) {
logger.error(error)
}
return false
}
/**
* getting page by permalink
*
*
* @param {object} data
* @return {object}
*
*/
create(data) {
return this.items.createOne(data)
}
}
export default EventStore

@ -1,25 +0,0 @@
import got from 'got'
import RequestHandler from './../../src/runner/requests/requestHandler.js'
/**
*
*
* @author Björn Hase <me@herr-hase.wtf>
*
*/
class JsonHandler extends RequestHandler {
/**
* getting rss feed from url
*
*
*/
async send() {
const json = await got(this.config.url, {}).json()
this.processActions(json)
}
}
export default JsonHandler

@ -1,36 +0,0 @@
import got from 'got'
import { XMLParser } from 'fast-xml-parser'
import RequestHandler from './../../src/runner/requests/requestHandler.js'
/**
* RssHandler
*
* request rss from url and parse to object
*
* @author Björn Hase <me@herr-hase.wtf>
*
*/
class RssHandler extends RequestHandler {
/**
* getting rss feed from url
*
*
*/
async send() {
const buffer = await got(this.config.url, {
responseType: 'buffer',
resolveBodyOnly: true
})
const parser = new XMLParser()
const feed = parser.parse(buffer.toString())
this.processActions(feed)
}
}
export default RssHandler

@ -1,37 +0,0 @@
import Interface from 'es6-interface'
import ActionInterface from './actionInterface.js'
import StateStore from './../stores/state.js'
/**
* Single Action to process Data from Remote Server
*
*
*
*/
class Action extends Interface(ActionInterface) {
/**
*
*
* @param {[type]} config
* @param {[type]} local
* @param {[type]} data
* @param {[type]} lastResult
*
*/
constructor(config, options, data, lastResult) {
super()
this.config = config
this.options = options
this.data = data
this.lastResult = lastResult
this.stateStore = new StateStore()
this.state = this.stateStore.findOneByName(this.config.name)
}
}
export default Action

@ -1,14 +0,0 @@
/**
*
*
*
*
*/
const ActionInterface = {
run: function() {
}
}
export default ActionInterface

@ -1,32 +0,0 @@
import Database from 'better-sqlite3'
/**
*
*
* @return {[Object]}
*
*/
function getDB() {
const db = new Database('./storage/db/main.db')
db.pragma('journal_mode = WAL')
return db
}
/**
* creating db
*
*
*/
function migrateDB() {
const db = getDB()
db.prepare('CREATE TABLE IF NOT EXISTS states (id INTEGER PRIMARY KEY, name TEXT NOT NULL, date_lastrun TEXT)').run()
}
export {
migrateDB,
getDB
}

@ -1,35 +0,0 @@
import fs from 'fs'
import path from 'path'
import yaml from 'js-yaml'
/**
*
*
*
*/
function loadEnabledConfig() {
const configs = []
const directoryPath = path.join(path.resolve(), 'resources/config')
// load file for main config
const mainConfig = yaml.load(fs.readFileSync(directoryPath + '/config.yml'))
// load files from enabled
const configFiles = fs.readdirSync(directoryPath + '/enabled')
configFiles.forEach((file, index) => {
const result = yaml.load(fs.readFileSync(directoryPath + '/enabled/' + file))
if (result) {
configs.push(result)
}
})
return configs
}
export {
loadEnabledConfig
}

@ -1,11 +0,0 @@
import pino from 'pino'
import pretty from 'pino-pretty'
import dayjs from 'dayjs'
export default pino({
timestamp: () => {
return `, "time":"${new Date(Date.now()).toISOString()}"`
},
},
pino.destination('./storage/logs/' + dayjs().format('DD-MM-YYYY') + '.log')
)

@ -1,31 +0,0 @@
import path from 'path'
import fs from 'fs'
/**
*
*
* @param {[string]} prefix
* @param {[string]} className
* @return {[string]}
*
*/
function resolveClassPath(prefix, className) {
let classPath = path.join(path.resolve(), 'resources/' + prefix + '/' + className + '.js')
let result = undefined
if (fs.existsSync(classPath)) {
result = classPath
}
if (!result) {
throw new Error('Class ' + prefix + ' / ' + className + ' / ' + classPath + ' not found!')
}
return result
}
export {
resolveClassPath
}

@ -1,43 +0,0 @@
import cron from 'node-cron'
import path from 'path'
import dotenv from 'dotenv'
import { loadEnabledConfig } from './helpers/config.js'
import { resolveClassPath } from './helpers/resolver.js'
import { migrateDB, getDB } from './db.js'
import logger from './helpers/logger.js'
// loading config-files from /resources/configs/enabled
const configs = loadEnabledConfig()
// getting .env
dotenv.config({ path: path.join(path.resolve(), '/.env') })
// check if there is any valid config
if (configs.length === 0) {
throw new Error('No valid config found!')
}
// running migrate for prepate db
migrateDB()
// let it rain and add schedule for each config a cron and start
configs.forEach(async (config) => {
// const task = cron.schedule(config.cron, async () => {
const requestClass = resolveClassPath('requests', config.request.class)
const RequestClass = await import(requestClass)
const request = new RequestClass.default(config)
//try {
request.send()
// } catch(error) {
// logger.error(error)
// }
//})
//
// task.start()
})

@ -1,11 +0,0 @@
/**
*
*
*
*/
class Webhook {
webhook(url) {
}
}

@ -1,64 +0,0 @@
import Interface from 'es6-interface'
import RequestHandlerInterface from './requestHandlerInterface.js'
import { resolveClassPath } from './../helpers/resolver.js'
import logger from './../helpers/logger.js'
import StateStore from './../stores/state.js'
/**
*
*
*
*
*/
class RequestHandler extends Interface(RequestHandlerInterface)
{
/**
*
*
* @param {object} config
*
*/
constructor(config) {
super()
this.config = config
this.stateStore = new StateStore()
this.state = this.stateStore.findOneByName(this.config.name)
}
/**
* process actions that are saved in a source
*
*
* @param {object} data
*
*/
async processActions(data)
{
//
let lastResult = false
for (const config of this.config.actions) {
// getting class path
const classPath = resolveClassPath('actions', config.class)
// import class from action
const Action = await import(classPath)
// create action an call run
const action = new Action.default(this.config, config.options, data, lastResult)
try {
lastResult = await action.run()
} catch (error) {
logger.error(error)
}
}
}
}
export default RequestHandler

@ -1,19 +0,0 @@
/**
*
*
*
*/
const RequestHandlerInterface = {
/**
*
*
*/
send: function() {
}
}
export default RequestHandlerInterface
Loading…
Cancel
Save