refactor(runner): rename preprocessing to runner and update directory structure

This commit is contained in:
Haoyu Xu
2023-01-17 10:08:22 -05:00
parent 09ca22d7c7
commit 4fa886be90
16 changed files with 34 additions and 30 deletions

31
libs/alpha_composite.js Normal file
View File

@@ -0,0 +1,31 @@
import sharp from "sharp";
import path from "path";
export default class AlphaComposite {
#config
#operatorName
#operatorSourceFolder
constructor(config, operatorName, rootDir) {
this.#config = config
this.#operatorName = operatorName
this.#operatorSourceFolder = path.join(rootDir, this.#config.folder.operator, this.#operatorName)
}
async process(filename, extractedDir) {
const image = sharp(path.join(extractedDir, filename))
.removeAlpha()
const imageMeta = await image.metadata()
const imageBuffer = await image.toBuffer()
const mask = await sharp(path.join(extractedDir, `${path.parse(filename).name}[alpha].png`))
.extractChannel("blue")
.resize(imageMeta.width, imageMeta.height)
.toBuffer();
return sharp(imageBuffer)
.joinChannel(mask)
.toBuffer()
}
}

44
libs/assets_processor.js Normal file
View File

@@ -0,0 +1,44 @@
import path from 'path'
import { copy, read, write } from './file.js'
import AlphaComposite from './alpha_composite.js'
export default class AssetsProcessor {
#config
#operatorName
#operatorSourceFolder
#alphaCompositer
constructor(config, operatorName, rootDir) {
this.#config = config
this.#operatorName = operatorName
this.#operatorSourceFolder = path.join(rootDir, this.#config.folder.operator)
this.#alphaCompositer = new AlphaComposite(config, operatorName, rootDir)
}
async process(publicAssetsDir, extractedDir) {
const BASE64_BINARY_PREFIX = 'data:application/octet-stream;base64,'
const BASE64_PNG_PREFIX = 'data:image/png;base64,'
const assetsJson = {}
const skelFilename = `${this.#config.operators[this.#operatorName].filename}.skel`
const skel = await read(path.join(extractedDir, skelFilename), null)
const atlasFilename = `${this.#config.operators[this.#operatorName].filename}.atlas`
const atlas = await read(path.join(extractedDir, atlasFilename))
const dimensions = atlas.match(new RegExp(/^size:(.*),(.*)/gm))[0].replace('size: ', '').split(',')
const matches = atlas.match(new RegExp(/(.*).png/g))
for (const item of matches) {
const buffer = await this.#alphaCompositer.process(item, extractedDir)
assetsJson[`./assets/${item}`] = BASE64_PNG_PREFIX + buffer.toString('base64')
}
assetsJson[`./assets/${skelFilename.replace('#', '%23')}`] = BASE64_BINARY_PREFIX + skel.toString('base64')
assetsJson[`./assets/${atlasFilename.replace('#', '%23')}`] = BASE64_BINARY_PREFIX + Buffer.from(atlas).toString('base64')
const fallbackFilename = `${this.#config.operators[this.#operatorName].fallback_name}.png`
const fallbackBuffer = await this.#alphaCompositer.process(fallbackFilename, extractedDir)
await write(fallbackBuffer, path.join(this.#operatorSourceFolder, this.#operatorName, fallbackFilename))
await copy(path.join(this.#operatorSourceFolder, this.#operatorName, fallbackFilename), path.join(publicAssetsDir, fallbackFilename))
return {
dimensions,
assetsJson
}
}
}

9
libs/config.js Normal file
View File

@@ -0,0 +1,9 @@
import path from 'path'
import { read } from './yaml.js'
export default function (dirname) {
return {
basedir: dirname,
...read(path.join(dirname, 'config.yaml'))
}
}

72
libs/content_processor.js Normal file
View File

@@ -0,0 +1,72 @@
export default class Matcher {
#start
#end
#content
#reExp
#config
constructor(content, start, end, config) {
this.#start = start
this.#end = end
this.#content = content
this.#reExp = new RegExp(`\\${start}.+?${end}`, 'g')
this.#config = config
}
match() {
return this.#content.match(this.#reExp)
}
process() {
const matches = this.match()
if (matches !== null) {
matches.forEach((match) => {
const matchTypeName = match.replace(this.#start, '').replace(this.#end, '')
const type = matchTypeName.split(':')[0]
const name = matchTypeName.split(':')[1]
switch (type) {
case 'var':
let replaceValue = this.#config
name.split('->').forEach((item) => {
try {
replaceValue = replaceValue[item]
} catch (e) {
throw new Error(`Cannot find variable ${name}.`)
}
this.#content = this.#content.replace(match, replaceValue)
})
break
case 'func':
try {
this.#content = this.#content.replace(match, (new Function('Evalable', 'config', `return new Evalable(config).${name}`))(Evalable, this.#config))
} catch (e) {
throw new Error(e)
}
break
default:
throw new Error(`Cannot find type ${type}.`)
}
})
}
return this.#content
}
}
class Evalable {
#config
constructor(config) {
this.#config = config
}
split(varName, separator) {
varName.split("->").forEach((item) => {
try {
this.#config = this.#config[item]
} catch (e) {
throw new Error(`Cannot split ${varName} with separator ${separator}.`)
}
})
return this.#config.split(separator)
}
}

19
libs/directory.js Normal file
View File

@@ -0,0 +1,19 @@
import path from 'path'
import { writeSync, copy, rmdir } from './file.js'
export default function (config, rootDir) {
const targetFolder = path.join(rootDir, config.folder.release, config.folder.directory);
const sourceFolder = path.join(rootDir, config.folder.operator);
rmdir(targetFolder);
const filesToCopy = [];
const directoryJson = []
for (const [key, value] of Object.entries(config.operators)) {
filesToCopy.push(key);
directoryJson.push(value);
}
writeSync(JSON.stringify(directoryJson, null), path.join(targetFolder, "directory.json"))
filesToCopy.forEach((key) => {
const filename = `${config.operators[key].filename}.json`;
copy(path.join(sourceFolder, key, filename), path.join(targetFolder, filename))
})
}

32
libs/env_generator.js Normal file
View File

@@ -0,0 +1,32 @@
import path from 'path'
export default class EnvGenerator {
#config
constructor(config, operatorName) {
this.#config = config.operators[operatorName]
}
async generate(dimensions) {
return await this.#promise(dimensions)
}
#promise(dimensions) {
return new Promise((resolve, reject) => {
resolve([
`VITE_TITLE="${this.#config.title}"`,
`VITE_FILENAME=${this.#config.filename.replace('#', '%23')}`,
`VITE_LOGO_FILENAME=${this.#config.logo}`,
`VITE_FALLBACK_FILENAME=${this.#config.fallback_name.replace('#', '%23')}`,
`VITE_VIEWPORT_LEFT=${this.#config.viewport_left}`,
`VITE_VIEWPORT_RIGHT=${this.#config.viewport_right}`,
`VITE_VIEWPORT_TOP=${this.#config.viewport_top}`,
`VITE_VIEWPORT_BOTTOM=${this.#config.viewport_bottom}`,
`VITE_INVERT_FILTER=${this.#config.invert_filter}`,
`VITE_IMAGE_WIDTH=${dimensions[0]}`,
`VITE_IMAGE_HEIGHT=${dimensions[1]}`,
].join('\n'))
})
}
}

12
libs/exec.js Normal file
View File

@@ -0,0 +1,12 @@
import { execSync } from 'child_process'
export function buildAll(config) {
for (const [key, _] of Object.entries(config.operators)) {
if (key.startsWith('_')) break;
console.log(execSync(`O=${key} node runner.js && O=${key} pnpm run build`).toString());
}
}
export function runDev(config) {
}

47
libs/file.js Normal file
View File

@@ -0,0 +1,47 @@
import fs, { promises as fsP } from 'fs'
import path from 'path'
export async function write(content, filePath) {
mkdir(path.dirname(filePath))
return await fsP.writeFile(filePath, content, { flag: 'w' })
}
export function writeSync(content, filePath) {
mkdir(path.dirname(filePath))
return fs.writeFileSync(filePath, content, { flag: 'w' })
}
export async function read(filePath, encoding = 'utf8') {
return await fsP.readFile(filePath, encoding, { flag: 'r' })
}
export function exists(filePath) {
return fs.existsSync(filePath)
}
export function rmdir(dir) {
if (exists(dir)) {
fs.rmSync(dir, { recursive: true })
}
}
export function rm(file) {
if (exists(file)) {
fs.rmSync(file)
}
}
export function mkdir(dir) {
if (!exists(dir)) {
fs.mkdirSync(dir, { recursive: true })
}
}
export async function copy(sourcePath, targetPath) {
if (!exists(sourcePath)) {
console.warn(`Source file ${sourcePath} does not exist.`)
return
}
mkdir(path.dirname(targetPath))
return await fsP.copyFile(sourcePath, targetPath)
}

7
libs/initializer.js Normal file
View File

@@ -0,0 +1,7 @@
import path from 'path'
import { mkdir, copy } from './file.js'
export default function init(operatorName, __dirname, extractedDir) {
mkdir(extractedDir)
copy(path.join(__dirname, 'config', '_template.yaml'), path.join(__dirname, 'config', `${operatorName}.yaml`))
}

70
libs/project_json.js Normal file
View File

@@ -0,0 +1,70 @@
import path from 'path'
import Matcher from './content_processor.js'
import { read, exists } from './file.js'
export default class ProjectJson {
#json
#config
#operatorName
#operatorSourceFolder
#operatorShareFolder
constructor(config, operatorName, __dirname, operatorShareFolder) {
this.#config = config
this.#operatorName = operatorName
this.#operatorSourceFolder = path.join(__dirname, this.#config.folder.operator)
this.#operatorShareFolder = operatorShareFolder
}
async load() {
// load json from file
this.#json = JSON.parse(await read(this.#getPath()))
this.#process()
return this.#json
}
#getPath() {
// if exists, do not use the template
const defaultPath = path.join(this.#operatorSourceFolder, this.#operatorName, 'project.json')
if (exists(defaultPath)) {
return defaultPath
} else {
return path.join(this.#operatorShareFolder, 'project.json')
}
}
#process() {
const matcher = new Matcher(this.#json.description, '${', '}', this.#config.operators[this.#operatorName])
if (matcher.match() !== null) {
this.#json.description = matcher.process()
}
this.#json = {
...this.#json,
description: this.#json.description,
title: this.#config.operators[this.#operatorName].title,
general: {
...this.#json.general,
properties: {
...this.#json.general.properties,
paddingbottom: {
...this.#json.general.properties.paddingbottom,
value: this.#config.operators[this.#operatorName].viewport_bottom
},
paddingleft: {
...this.#json.general.properties.paddingleft,
value: this.#config.operators[this.#operatorName].viewport_left
},
paddingright: {
...this.#json.general.properties.paddingright,
value: this.#config.operators[this.#operatorName].viewport_right
},
paddingtop: {
...this.#json.general.properties.paddingtop,
value: this.#config.operators[this.#operatorName].viewport_top
},
}
},
}
}
}

19
libs/yaml.js Normal file
View File

@@ -0,0 +1,19 @@
import path from 'path'
import { parse } from 'yaml'
import fs from 'fs'
export function read(file_dir) {
const include = {
identify: value => value.startsWith('!include'),
tag: '!include',
resolve(str) {
const dir = path.resolve(path.dirname(file_dir), str)
const data = read(dir)
return data
}
}
const file = fs.readFileSync(file_dir, 'utf8')
return parse(file, {
customTags: [include],
})
}