chore: moved to a new branch to save space
This commit is contained in:
31
libs/alpha_composite.js
Normal file
31
libs/alpha_composite.js
Normal file
@@ -0,0 +1,31 @@
|
||||
import sharp from "sharp";
|
||||
import path from "path";
|
||||
|
||||
export default class AlphaComposite {
|
||||
|
||||
async process(filename, maskFilename, extractedDir) {
|
||||
const image = sharp(path.join(extractedDir, filename))
|
||||
.removeAlpha()
|
||||
const imageMeta = await image.metadata()
|
||||
const imageBuffer = await image.toBuffer()
|
||||
const mask = await sharp(path.join(extractedDir, maskFilename))
|
||||
.extractChannel("blue")
|
||||
.resize(imageMeta.width, imageMeta.height)
|
||||
.toBuffer();
|
||||
|
||||
return sharp(imageBuffer)
|
||||
.joinChannel(mask)
|
||||
.toBuffer()
|
||||
}
|
||||
|
||||
async crop(buffer, rect) {
|
||||
const left = rect.y
|
||||
const top = rect.x
|
||||
const width = rect.h
|
||||
const height = rect.w
|
||||
const rotate = rect.rotate === 0 ? -90 : 0
|
||||
const newImage = await sharp(buffer).rotate(90).extract({ left: left, top: top, width: width, height: height }).resize(width, height).extract({ left: 0, top: 0, width: width, height: height }).toBuffer()
|
||||
return await sharp(newImage).rotate(rotate).toBuffer()
|
||||
}
|
||||
|
||||
}
|
||||
19
libs/append.js
Normal file
19
libs/append.js
Normal file
@@ -0,0 +1,19 @@
|
||||
/* eslint-disable no-undef */
|
||||
import path from 'path'
|
||||
import { appendSync, readSync } from './file.js'
|
||||
|
||||
export function appendReadme(operatorName) {
|
||||
const operatorConfig = __config.operators[operatorName]
|
||||
const projectJson = JSON.parse(readSync(path.join(__projectRoot, __config.folder.operator, operatorName, 'project.json')))
|
||||
appendSync(
|
||||
`\n| ${operatorConfig.codename["en-US"]} | [Link](https://arknights.halyul.dev/${operatorConfig.link}/?settings) | [Link](https://steamcommunity.com/sharedfiles/filedetails/?id=${projectJson.workshopid}) |`,
|
||||
path.join(__projectRoot, 'README.md')
|
||||
)
|
||||
}
|
||||
|
||||
export function appendMainConfig(operatorName) {
|
||||
appendSync(
|
||||
`\n ${operatorName}: !include config/${operatorName}.yaml`,
|
||||
path.join(__projectRoot, 'config.yaml')
|
||||
)
|
||||
}
|
||||
64
libs/assets_processor.js
Normal file
64
libs/assets_processor.js
Normal file
@@ -0,0 +1,64 @@
|
||||
/* eslint-disable no-undef */
|
||||
import path from 'path'
|
||||
import { read, write, readSync } from './file.js'
|
||||
import AlphaComposite from './alpha_composite.js'
|
||||
|
||||
export default class AssetsProcessor {
|
||||
#operatorSourceFolder
|
||||
#alphaCompositer
|
||||
#operatorName
|
||||
#shareFolder
|
||||
|
||||
constructor(operatorName, shareFolder) {
|
||||
this.#operatorSourceFolder = path.join(__projectRoot, __config.folder.operator)
|
||||
this.#alphaCompositer = new AlphaComposite()
|
||||
this.#operatorName = operatorName
|
||||
this.#shareFolder = shareFolder
|
||||
}
|
||||
|
||||
async process(extractedDir) {
|
||||
const fallback_name = __config.operators[this.#operatorName].fallback_name
|
||||
const fallbackFilename = `${fallback_name}.png`
|
||||
const fallbackBuffer = await this.#alphaCompositer.process(fallbackFilename, `${path.parse(fallbackFilename).name}[alpha].png`, extractedDir)
|
||||
await write(fallbackBuffer, path.join(this.#operatorSourceFolder, this.#operatorName, fallbackFilename))
|
||||
|
||||
// generate portrait
|
||||
const portraitDir = path.join(this.#shareFolder, "portraits")
|
||||
const portraitHub = JSON.parse(readSync(path.join(portraitDir, "MonoBehaviour", "portrait_hub.json")))
|
||||
const portraitAtlas = portraitHub._sprites.find((item) => item.name === fallback_name).atlas
|
||||
const portraitJson = JSON.parse(readSync(path.join(portraitDir, "MonoBehaviour", `portraits#${portraitAtlas}.json`)))
|
||||
const item = portraitJson._sprites.find((item) => item.name === fallback_name)
|
||||
const rect = {
|
||||
...item.rect,
|
||||
rotate: item.rotate
|
||||
}
|
||||
const protraitFilename = `portraits#${portraitAtlas}.png`
|
||||
const portraitBuffer = await this.#alphaCompositer.process(protraitFilename, `${path.parse(protraitFilename).name}a.png`, path.join(portraitDir, "Texture2D"))
|
||||
const croppedBuffer = await this.#alphaCompositer.crop(portraitBuffer, rect)
|
||||
await write(croppedBuffer, path.join(this.#operatorSourceFolder, this.#operatorName, `${fallback_name}_portrait.png`))
|
||||
|
||||
return await this.generateAssets(__config.operators[this.#operatorName].filename, extractedDir)
|
||||
}
|
||||
|
||||
async generateAssets(filename, extractedDir) {
|
||||
const BASE64_BINARY_PREFIX = 'data:application/octet-stream;base64,'
|
||||
const BASE64_PNG_PREFIX = 'data:image/png;base64,'
|
||||
const assetsJson = {}
|
||||
const skelFilename = `${filename}.skel`
|
||||
const skel = await read(path.join(extractedDir, skelFilename), null)
|
||||
const atlasFilename = `${filename}.atlas`
|
||||
const atlas = await read(path.join(extractedDir, atlasFilename))
|
||||
const dimensions = atlas.match(new RegExp(/^size:(.*),(.*)/gm))[0].replace('size: ', '').split(',')
|
||||
const matches = atlas.match(new RegExp(/(.*).png/g))
|
||||
for (const item of matches) {
|
||||
const buffer = await this.#alphaCompositer.process(item, `${path.parse(item).name}[alpha].png`, extractedDir)
|
||||
assetsJson[`./assets/${item}`] = BASE64_PNG_PREFIX + buffer.toString('base64')
|
||||
}
|
||||
assetsJson[`./assets/${skelFilename.replace('#', '%23')}`] = BASE64_BINARY_PREFIX + skel.toString('base64')
|
||||
assetsJson[`./assets/${atlasFilename.replace('#', '%23')}`] = BASE64_BINARY_PREFIX + Buffer.from(atlas).toString('base64')
|
||||
return {
|
||||
dimensions,
|
||||
assetsJson
|
||||
}
|
||||
}
|
||||
}
|
||||
64
libs/background.js
Normal file
64
libs/background.js
Normal file
@@ -0,0 +1,64 @@
|
||||
/* eslint-disable no-undef */
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import sharp from "sharp";
|
||||
|
||||
export default class Background {
|
||||
#backgroundFolder
|
||||
#extractFolder
|
||||
#files
|
||||
|
||||
constructor() {
|
||||
this.#backgroundFolder = path.join(__projectRoot, __config.folder.operator, __config.folder.share, __config.folder.background);
|
||||
this.#extractFolder = path.join(this.#backgroundFolder, 'extracted');
|
||||
}
|
||||
|
||||
async process() {
|
||||
this.#files = fs.readdirSync(this.#extractFolder).filter((f) => {
|
||||
return f.endsWith('.png') && f.includes('_left');
|
||||
})
|
||||
if (this.#files.length + 2 !== fs.readdirSync(this.#backgroundFolder).length) {
|
||||
await Promise.all(this.#files.map(async (f) => {
|
||||
const filenamePrefix = path.parse(f).name.replace('_left', '');
|
||||
await this.#composite(filenamePrefix, '.png');
|
||||
}))
|
||||
} else {
|
||||
console.log('Background images already exist, skip generation.')
|
||||
}
|
||||
}
|
||||
|
||||
async #composite(filenamePrefix, fileExt) {
|
||||
const image = sharp(path.join(this.#extractFolder, `${filenamePrefix}_left${fileExt}`))
|
||||
const metadata = await image.metadata()
|
||||
|
||||
image
|
||||
.resize(2 * metadata.width, metadata.height, {
|
||||
kernel: sharp.kernel.nearest,
|
||||
fit: 'contain',
|
||||
position: 'left top',
|
||||
background: { r: 255, g: 255, b: 255, alpha: 0 }
|
||||
})
|
||||
.composite([
|
||||
{
|
||||
input: path.join(this.#extractFolder, `${filenamePrefix}_right${fileExt}`),
|
||||
top: 0,
|
||||
left: metadata.width,
|
||||
},
|
||||
])
|
||||
.toFile(path.join(this.#backgroundFolder, `${filenamePrefix}${fileExt}`));
|
||||
}
|
||||
|
||||
get files() {
|
||||
return this.#files.map(f => f.replace('_left', ''))
|
||||
}
|
||||
|
||||
getFilesToCopy(publicAssetsDir) {
|
||||
return this.#files.map((f) => {
|
||||
return {
|
||||
filename: f.replace('_left', ''),
|
||||
source: path.join(this.#backgroundFolder),
|
||||
target: path.join(publicAssetsDir, __config.folder.background)
|
||||
};
|
||||
})
|
||||
}
|
||||
}
|
||||
246
libs/charword_table.js
Normal file
246
libs/charword_table.js
Normal file
@@ -0,0 +1,246 @@
|
||||
/* eslint-disable no-undef */
|
||||
import fetch from "node-fetch"
|
||||
import path from "path"
|
||||
import dotenv from "dotenv"
|
||||
import { exists, writeSync, readdirSync, rm, readSync } from "./file.js"
|
||||
|
||||
dotenv.config()
|
||||
|
||||
// zh_TW uses an older version of charword_table.json
|
||||
const REGIONS = ["zh_CN", "en_US", "ja_JP", "ko_KR", "zh_TW"]
|
||||
const DEFAULT_REGION = REGIONS[0]
|
||||
const NICKNAME = {
|
||||
"zh_CN": "博士",
|
||||
"en_US": "Doctor",
|
||||
"ja_JP": "ドクター",
|
||||
"ko_KR": "박사",
|
||||
"zh_TW": "博士",
|
||||
}
|
||||
|
||||
export function getOperatorId(operatorConfig) {
|
||||
return operatorConfig.filename.replace(/^(dyn_illust_)(char_[\d]+)(_[\w]+)(|(_.+))$/g, '$2$3$4')
|
||||
}
|
||||
|
||||
export default class CharwordTable {
|
||||
#operatorIDs = Object.values(__config.operators).map(operator => { return getOperatorId(operator) })
|
||||
#charwordTablePath = path.join(__projectRoot, __config.folder.operator, __config.folder.share)
|
||||
#charwordTableFile = path.join(this.#charwordTablePath, 'charword_table.json')
|
||||
#charwordTable = JSON.parse(readSync(this.#charwordTableFile)) || {
|
||||
config: {
|
||||
default_region: DEFAULT_REGION,
|
||||
regions: REGIONS,
|
||||
},
|
||||
operators: {},
|
||||
}
|
||||
|
||||
async process() {
|
||||
const regionObject = REGIONS.reduce((acc, cur) => ({ ...acc, [cur]: {} }), {})
|
||||
this.#operatorIDs.forEach(id => {
|
||||
this.#charwordTable.operators[id] = {
|
||||
alternativeId: id.replace(/^(char_)([\d]+)(_[\w]+)(|(_.+))$/g, '$1$2$3'),
|
||||
voice: JSON.parse(JSON.stringify(regionObject)), // deep copy
|
||||
info: JSON.parse(JSON.stringify(regionObject)), // deep copy
|
||||
}
|
||||
})
|
||||
await this.#load(DEFAULT_REGION)
|
||||
await Promise.all(REGIONS.slice(1).map(async (region) => {await this.#load(region)}))
|
||||
writeSync(JSON.stringify(this.#charwordTable), this.#charwordTableFile)
|
||||
}
|
||||
|
||||
lookup(operatorName) {
|
||||
const operatorId = getOperatorId(__config.operators[operatorName])
|
||||
const operatorBlock = this.#charwordTable.operators[operatorId]
|
||||
return {
|
||||
config: this.#charwordTable.config,
|
||||
operator: operatorBlock.ref ? this.#charwordTable.operators[operatorBlock.alternativeId] : operatorBlock,
|
||||
}
|
||||
}
|
||||
|
||||
async #load(region) {
|
||||
if (region === 'zh_TW') {
|
||||
return await this.#zhTWLoad()
|
||||
}
|
||||
|
||||
const data = await this.#download(region)
|
||||
|
||||
// put voice actor info into charword_table
|
||||
for (const [id, element] of Object.entries(this.#charwordTable.operators)) {
|
||||
let operatorId = id
|
||||
let useAlternativeId = false
|
||||
if (typeof data.voiceLangDict[operatorId] === 'undefined') {
|
||||
operatorId = element.alternativeId
|
||||
useAlternativeId = true
|
||||
}
|
||||
if (region === DEFAULT_REGION) {
|
||||
element.infile = this.#operatorIDs.includes(operatorId);
|
||||
element.ref = useAlternativeId && element.infile;
|
||||
}
|
||||
// not available in other region
|
||||
if (typeof data.voiceLangDict[operatorId] === 'undefined') {
|
||||
console.log(`Voice actor info of ${id} is not available in ${region}.`)
|
||||
continue
|
||||
}
|
||||
|
||||
if (element.infile && useAlternativeId) {
|
||||
// if using alternative id and infile is true, means data can be
|
||||
// refered inside the file
|
||||
// if infile is false, useAlternativeId is always true
|
||||
// if useAlternativeId is false, infile is always true
|
||||
// | case | infile | useAlternativeId | Note |
|
||||
// | ------------------- | ------ | ---------------- | --------------- |
|
||||
// | lee_trust_your_eyes | false | true | skin only |
|
||||
// | nearl_relight | true | true | skin, operator, no voice |
|
||||
// | nearl | true | false | operator only |
|
||||
// | w_fugue | true | false | skin, operator, voice |
|
||||
continue
|
||||
}
|
||||
Object.values(data.voiceLangDict[operatorId].dict).forEach(item => {
|
||||
if (typeof element.info[region][item.wordkey] === 'undefined') {
|
||||
element.info[region][item.wordkey] = {}
|
||||
}
|
||||
element.info[region][item.wordkey][item.voiceLangType] = [...(typeof item.cvName === 'string' ? [item.cvName] : item.cvName)]
|
||||
})
|
||||
}
|
||||
|
||||
// put voice lines into charword_table
|
||||
Object.values(data.charWords).forEach(item => {
|
||||
const operatorInfo = Object.values(this.#charwordTable.operators).filter(element => element.info[region][item.wordKey])
|
||||
if (operatorInfo.length > 0) {
|
||||
if (typeof operatorInfo[0].voice[region][item.wordKey] === 'undefined') {
|
||||
operatorInfo[0].voice[region][item.wordKey] = {}
|
||||
}
|
||||
operatorInfo[0].voice[region][item.wordKey][item.voiceId] = {
|
||||
title: item.voiceTitle,
|
||||
text: item.voiceText.replace(/{@nickname}/g, NICKNAME[region]),
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async #download(region) {
|
||||
const historyResponse = await fetch(`https://api.github.com/repos/Kengxxiao/ArknightsGameData/commits?path=${region}/gamedata/excel/charword_table.json`)
|
||||
const historyData = await historyResponse.json()
|
||||
const lastCommit = historyData[0]
|
||||
const lastCommitDate = new Date(lastCommit.commit.committer.date)
|
||||
const filepath = path.join(this.#charwordTablePath, `charword_table_${region}_${lastCommitDate.getTime()}.json`)
|
||||
console.log(`Last commit date: ${lastCommitDate.getTime()}`)
|
||||
|
||||
if (exists(filepath)) {
|
||||
console.log(`charword_table_${region}.json is the latest version.`)
|
||||
return JSON.parse(readSync(filepath))
|
||||
}
|
||||
const response = await fetch(`https://raw.githubusercontent.com/Kengxxiao/ArknightsGameData/master/${region}/gamedata/excel/charword_table.json`)
|
||||
const data = await response.json()
|
||||
writeSync(JSON.stringify(data), filepath)
|
||||
console.log(`charword_table_${region}.json is updated.`)
|
||||
|
||||
// remove old file
|
||||
const files = readdirSync(path.join(__projectRoot, __config.folder.operator, __config.folder.share))
|
||||
for (const file of files) {
|
||||
if (file.startsWith(`charword_table_${region}`) && file !== path.basename(filepath)) {
|
||||
rm(path.join(__projectRoot, __config.folder.operator, __config.folder.share, file))
|
||||
}
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
async #zhTWLoad() {
|
||||
const region = 'zh_TW'
|
||||
const downloaded = await this.#zhTWDownload()
|
||||
const data = downloaded.data
|
||||
const handbook = downloaded.handbook
|
||||
|
||||
// put voice actor info into charword_table
|
||||
for (const [id, element] of Object.entries(this.#charwordTable.operators)) {
|
||||
let operatorId = id
|
||||
let useAlternativeId = false
|
||||
if (typeof handbook.handbookDict[operatorId] === 'undefined') {
|
||||
operatorId = element.alternativeId
|
||||
useAlternativeId = true
|
||||
}
|
||||
// not available in other region
|
||||
if (typeof handbook.handbookDict[operatorId] === 'undefined') {
|
||||
console.log(`Voice actor info of ${id} is not available in ${region}.`)
|
||||
continue
|
||||
}
|
||||
if (element.infile && useAlternativeId) {
|
||||
continue
|
||||
}
|
||||
const charId = handbook.handbookDict[operatorId].charID
|
||||
if (typeof element.info[region][charId] === 'undefined') {
|
||||
element.info[region][charId] = {}
|
||||
}
|
||||
element.info[region][charId].JP = [...[handbook.handbookDict[operatorId].infoName]]
|
||||
}
|
||||
|
||||
// put voice lines into charword_table
|
||||
Object.values(data).forEach(item => {
|
||||
const operatorInfo = Object.values(this.#charwordTable.operators).filter(element => element.info[region][item.wordKey])
|
||||
if (operatorInfo.length > 0) {
|
||||
if (typeof operatorInfo[0].voice[region][item.wordKey] === 'undefined') {
|
||||
operatorInfo[0].voice[region][item.wordKey] = {}
|
||||
}
|
||||
operatorInfo[0].voice[region][item.wordKey][item.voiceId] = {
|
||||
title: item.voiceTitle,
|
||||
text: item.voiceText.replace(/{@nickname}/g, NICKNAME[region]),
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
async #zhTWDownload() {
|
||||
const output = {}
|
||||
const region = 'zh_TW'
|
||||
const historyResponse = await fetch(`https://api.github.com/repos/Kengxxiao/ArknightsGameData/commits?path=${region}/gamedata/excel/charword_table.json`)
|
||||
const handbookHistoryResponse = await fetch(`https://api.github.com/repos/Kengxxiao/ArknightsGameData/commits?path=${region}/gamedata/excel/handbook_info_table.json`)
|
||||
const historyData = await historyResponse.json()
|
||||
const handbookHistoryData = await handbookHistoryResponse.json()
|
||||
const lastCommit = historyData[0]
|
||||
const handboookLastCommit = handbookHistoryData[0]
|
||||
const lastCommitDate = new Date(lastCommit.commit.committer.date)
|
||||
const handbookLastCommitDate = new Date(handboookLastCommit.commit.committer.date)
|
||||
const filepath = path.join(this.#charwordTablePath, `charword_table_${region}_${lastCommitDate.getTime()}.json`)
|
||||
const handbookFilepath = path.join(this.#charwordTablePath, `handbook_info_table_${region}_${handbookLastCommitDate.getTime()}.json`)
|
||||
console.log(`Last commit date: ${lastCommitDate.getTime()}`)
|
||||
console.log(`Handbook last commit date: ${handbookLastCommitDate.getTime()}`)
|
||||
|
||||
if (exists(filepath)) {
|
||||
console.log(`charword_table_${region}.json is the latest version.`)
|
||||
output.data = JSON.parse(readSync(filepath))
|
||||
} else {
|
||||
const response = await fetch(`https://raw.githubusercontent.com/Kengxxiao/ArknightsGameData/master/${region}/gamedata/excel/charword_table.json`)
|
||||
const data = await response.json()
|
||||
writeSync(JSON.stringify(data), filepath)
|
||||
console.log(`charword_table_${region}.json is updated.`)
|
||||
|
||||
// remove old file
|
||||
const files = readdirSync(path.join(__projectRoot, __config.folder.operator, __config.folder.share))
|
||||
for (const file of files) {
|
||||
if (file.startsWith(`charword_table_${region}`) && file !== path.basename(filepath)) {
|
||||
rm(path.join(__projectRoot, __config.folder.operator, __config.folder.share, file))
|
||||
}
|
||||
}
|
||||
output.data = data
|
||||
}
|
||||
if (exists(handbookFilepath)) {
|
||||
console.log(`handbook_info_table_${region}.json is the latest version.`)
|
||||
output.handbook = JSON.parse(readSync(handbookFilepath))
|
||||
} else {
|
||||
const response = await fetch(`https://raw.githubusercontent.com/Kengxxiao/ArknightsGameData/master/${region}/gamedata/excel/handbook_info_table.json`)
|
||||
const data = await response.json()
|
||||
writeSync(JSON.stringify(data), handbookFilepath)
|
||||
console.log(`handbook_info_table_${region}.json is updated.`)
|
||||
|
||||
// remove old file
|
||||
const files = readdirSync(path.join(__projectRoot, __config.folder.operator, __config.folder.share))
|
||||
for (const file of files) {
|
||||
if (file.startsWith(`handbook_info_table_${region}`) && file !== path.basename(handbookFilepath)) {
|
||||
rm(path.join(__projectRoot, __config.folder.operator, __config.folder.share, file))
|
||||
}
|
||||
}
|
||||
output.handbook = data
|
||||
}
|
||||
return output
|
||||
}
|
||||
}
|
||||
32
libs/config.js
Normal file
32
libs/config.js
Normal file
@@ -0,0 +1,32 @@
|
||||
/* eslint-disable no-undef */
|
||||
import path from 'path'
|
||||
import { read } from './yaml.js'
|
||||
import { read as readVersion } from './version.js'
|
||||
import { getOperatorId } from './charword_table.js'
|
||||
|
||||
export default function () {
|
||||
return process(read(path.join(__projectRoot, 'config.yaml')))
|
||||
}
|
||||
|
||||
function process(config) {
|
||||
for (const [operatorName, operator] of Object.entries(config.operators)) {
|
||||
// add title
|
||||
operator.title = `${config.share.title["en-US"]}${operator.codename["en-US"]} - ${config.share.title["zh-CN"]}${operator.codename["zh-CN"]}`
|
||||
// add type
|
||||
operator.type = operator.codename["zh-CN"].includes('·') ? 'skin' : 'operator'
|
||||
|
||||
// add link
|
||||
operator.link = operatorName
|
||||
|
||||
// id
|
||||
operator.id = getOperatorId(operator).replace(/^(char_)(\d+)(_.+)$/g, '$2')
|
||||
}
|
||||
|
||||
// version
|
||||
config.version = {
|
||||
showcase: readVersion(path.join(__projectRoot)),
|
||||
directory: readVersion(path.join(__projectRoot, 'directory')),
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
68
libs/content_processor.js
Normal file
68
libs/content_processor.js
Normal file
@@ -0,0 +1,68 @@
|
||||
/* eslint-disable no-undef */
|
||||
export default class Matcher {
|
||||
#start
|
||||
#end
|
||||
#reExp
|
||||
#config
|
||||
#assets
|
||||
|
||||
constructor(start, end, config, assets) {
|
||||
this.#start = start
|
||||
this.#end = end
|
||||
this.#reExp = new RegExp(`${start}.+?${end}`, 'g')
|
||||
this.#config = config
|
||||
this.#assets = assets
|
||||
}
|
||||
|
||||
get result() {
|
||||
const matches = this.content.match(this.#reExp)
|
||||
if (matches !== null) {
|
||||
matches.forEach((match) => {
|
||||
const name = match.replace(this.#start, '').replace(this.#end, '')
|
||||
const result = (new Function(
|
||||
'Evalable',
|
||||
'config',
|
||||
'assets',
|
||||
`return new Evalable(config, assets).${name}`)
|
||||
)(Evalable, this.#config, this.#assets)
|
||||
this.content = matches.length > 1 ? this.content.replace(match, result) : result
|
||||
})
|
||||
}
|
||||
return this.content
|
||||
}
|
||||
}
|
||||
|
||||
class Evalable {
|
||||
#config
|
||||
#assets
|
||||
|
||||
constructor(config, assets) {
|
||||
this.#config = config
|
||||
this.#assets = assets
|
||||
}
|
||||
|
||||
split(location, varName, separator) {
|
||||
return this.#step(location, varName).split(separator)
|
||||
}
|
||||
|
||||
var(location, varName) {
|
||||
return this.#step(location, varName)
|
||||
}
|
||||
|
||||
version(prefix, suffix) {
|
||||
return `${prefix}${__config.version.showcase}${suffix}`
|
||||
}
|
||||
|
||||
#step(location, varName) {
|
||||
let content = this.#config
|
||||
if (location === 'assets') content = this.#assets
|
||||
varName.split("->").forEach((item) => {
|
||||
try {
|
||||
content = content[item]
|
||||
} catch (e) {
|
||||
throw new Error(`Cannot step ${varName}.`)
|
||||
}
|
||||
})
|
||||
return content
|
||||
}
|
||||
}
|
||||
99
libs/directory.js
Normal file
99
libs/directory.js
Normal file
@@ -0,0 +1,99 @@
|
||||
/* eslint-disable no-undef */
|
||||
import path from 'path'
|
||||
import { writeSync, copy, readSync as readFile } from './file.js'
|
||||
import { read } from './yaml.js';
|
||||
import AssetsProcessor from './assets_processor.js'
|
||||
import EnvGenerator from './env_generator.js'
|
||||
|
||||
export default function ({ backgrounds, musicMapping }) {
|
||||
const extractedFolder = path.join(__projectRoot, __config.folder.operator, '_directory')
|
||||
const targetFolder = path.join(__projectRoot, __config.folder.release, __config.folder.directory);
|
||||
const sourceFolder = path.join(__projectRoot, __config.folder.operator);
|
||||
const filesToCopy = Object.keys(__config.operators)
|
||||
const directoryJson = {
|
||||
operators: Object.values(
|
||||
Object.values(__config.operators)
|
||||
.reduce((acc, cur) => {
|
||||
const date = cur.date
|
||||
if (acc[date]) {
|
||||
acc[date].push(cur)
|
||||
} else {
|
||||
acc[date] = [cur]
|
||||
}
|
||||
|
||||
cur.workshopId = null
|
||||
try {
|
||||
cur.workshopId = JSON.parse(readFile(path.join(__projectRoot, __config.folder.operator, cur.link, 'project.json'))).workshopid
|
||||
} catch (e) {
|
||||
console.log(`No workshop id for ${cur.link}!`, e)
|
||||
}
|
||||
|
||||
return acc
|
||||
}, {}))
|
||||
.sort((a, b) => Date.parse(b[0].date) - Date.parse(a[0].date)),
|
||||
}
|
||||
const versionJson = __config.version
|
||||
|
||||
const changelogs = read(path.join(__projectRoot, 'changelogs.yaml'))
|
||||
const changelogsArray = Object.keys(changelogs).reduce((acc, cur) => {
|
||||
const array = []
|
||||
Object.keys(changelogs[cur]).map((item) => {
|
||||
array.push({
|
||||
key: cur,
|
||||
date: item,
|
||||
content: [...changelogs[cur][item]]
|
||||
})
|
||||
})
|
||||
acc.push(array)
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
__config.directory.error.files.forEach((key) => {
|
||||
const assetsProcessor = new AssetsProcessor()
|
||||
assetsProcessor.generateAssets(key.key, extractedFolder).then((content) => {
|
||||
writeSync(JSON.stringify(content.assetsJson, null), path.join(targetFolder, `${key.key}.json`))
|
||||
})
|
||||
})
|
||||
|
||||
writeSync((new EnvGenerator()).generate([
|
||||
{
|
||||
key: "app_title",
|
||||
value: __config.directory.title
|
||||
}, {
|
||||
key: "app_voice_url",
|
||||
value: __config.directory.voice
|
||||
}, {
|
||||
key: "voice_folders",
|
||||
value: JSON.stringify(__config.folder.voice)
|
||||
}, {
|
||||
key: "directory_folder",
|
||||
value: JSON.stringify(__config.folder.directory)
|
||||
}
|
||||
, {
|
||||
key: "background_folder",
|
||||
value: JSON.stringify(__config.folder.background)
|
||||
}, {
|
||||
key: "available_operators",
|
||||
value: JSON.stringify(Object.keys(__config.operators))
|
||||
}, {
|
||||
key: "error_files",
|
||||
value: JSON.stringify(__config.directory.error).replace('#', '%23')
|
||||
}, {
|
||||
key: "music_folder",
|
||||
value: __config.folder.music
|
||||
}, {
|
||||
key: "music_mapping",
|
||||
value: JSON.stringify(musicMapping)
|
||||
}
|
||||
]), path.join(__projectRoot, 'directory', '.env'))
|
||||
|
||||
writeSync(JSON.stringify(directoryJson, null), path.join(targetFolder, "directory.json"))
|
||||
writeSync(JSON.stringify(versionJson, null), path.join(targetFolder, "version.json"))
|
||||
writeSync(JSON.stringify(changelogsArray, null), path.join(targetFolder, "changelogs.json"))
|
||||
writeSync(JSON.stringify(backgrounds, null), path.join(targetFolder, "backgrounds.json"))
|
||||
filesToCopy.forEach((key) => {
|
||||
copy(path.join(sourceFolder, key, 'assets.json'), path.join(targetFolder, `${__config.operators[key].filename}.json`))
|
||||
copy(path.join(sourceFolder, key, 'charword_table.json'), path.join(targetFolder, `voice_${key}.json`))
|
||||
})
|
||||
copy(path.join(extractedFolder, __config.directory.error.voice), path.join(targetFolder, `error.ogg`))
|
||||
}
|
||||
7
libs/env_generator.js
Normal file
7
libs/env_generator.js
Normal file
@@ -0,0 +1,7 @@
|
||||
export default class EnvGenerator {
|
||||
generate(values) {
|
||||
return values.map((value) => {
|
||||
return `VITE_${value.key.toUpperCase()}=${value.value}`
|
||||
}).join('\n')
|
||||
}
|
||||
}
|
||||
71
libs/file.js
Normal file
71
libs/file.js
Normal file
@@ -0,0 +1,71 @@
|
||||
import fs, { promises as fsP } from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
export async function write(content, filePath) {
|
||||
mkdir(path.dirname(filePath))
|
||||
return await fsP.writeFile(filePath, content, { flag: 'w' })
|
||||
}
|
||||
|
||||
export function writeSync(content, filePath) {
|
||||
mkdir(path.dirname(filePath))
|
||||
return fs.writeFileSync(filePath, content, { flag: 'w' })
|
||||
}
|
||||
|
||||
export async function read(filePath, encoding = 'utf8') {
|
||||
return await fsP.readFile(filePath, encoding, { flag: 'r' })
|
||||
}
|
||||
|
||||
export function readSync(filePath, encoding = 'utf8') {
|
||||
if (exists(filePath)) {
|
||||
return fs.readFileSync(filePath, encoding, { flag: 'r' })
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
export function exists(filePath) {
|
||||
return fs.existsSync(filePath)
|
||||
}
|
||||
|
||||
export function rmdir(dir) {
|
||||
if (exists(dir)) {
|
||||
fs.rmSync(dir, { recursive: true })
|
||||
}
|
||||
}
|
||||
|
||||
export function rm(file) {
|
||||
if (exists(file)) {
|
||||
fs.rmSync(file)
|
||||
}
|
||||
}
|
||||
|
||||
export function mkdir(dir) {
|
||||
if (!exists(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
}
|
||||
}
|
||||
|
||||
export async function copy(sourcePath, targetPath) {
|
||||
if (!exists(sourcePath)) {
|
||||
console.warn(`Source file ${sourcePath} does not exist.`)
|
||||
return
|
||||
}
|
||||
mkdir(path.dirname(targetPath))
|
||||
return await fsP.copyFile(sourcePath, targetPath)
|
||||
}
|
||||
|
||||
export async function copyDir(sourcePath, targetPath) {
|
||||
if (!exists(sourcePath)) {
|
||||
console.warn(`Source file ${sourcePath} does not exist.`)
|
||||
return
|
||||
}
|
||||
mkdir(targetPath)
|
||||
return await fsP.cp(sourcePath, targetPath, { recursive: true })
|
||||
}
|
||||
|
||||
export function appendSync(content, filePath) {
|
||||
return fs.appendFileSync(filePath, content, 'utf8');
|
||||
}
|
||||
|
||||
export function readdirSync(dir) {
|
||||
return fs.readdirSync(dir)
|
||||
}
|
||||
18
libs/initializer.js
Normal file
18
libs/initializer.js
Normal file
@@ -0,0 +1,18 @@
|
||||
/* eslint-disable no-undef */
|
||||
import path from 'path'
|
||||
import { stringify } from 'yaml'
|
||||
import { read as readYAML } from './yaml.js'
|
||||
import { mkdir, writeSync } from './file.js'
|
||||
import { appendMainConfig } from './append.js'
|
||||
|
||||
export default function init(operatorName, extractedDir) {
|
||||
extractedDir.forEach((dir) => {
|
||||
mkdir(dir)
|
||||
})
|
||||
const date = new Date()
|
||||
const template = readYAML(path.join(__projectRoot, 'config', '_template.yaml'))
|
||||
template.link = operatorName
|
||||
template.date = `${date.getFullYear()}/${(date.getMonth() + 1).toString().padStart(2, '0') }`
|
||||
writeSync(stringify(template), path.join(__projectRoot, 'config', `${operatorName}.yaml`))
|
||||
appendMainConfig(operatorName)
|
||||
}
|
||||
19
libs/music.js
Normal file
19
libs/music.js
Normal file
@@ -0,0 +1,19 @@
|
||||
/* eslint-disable no-undef */
|
||||
import path from 'path';
|
||||
import { read } from './yaml.js';
|
||||
|
||||
export default function () {
|
||||
const musicFolder = path.join(__projectRoot, __config.folder.operator, __config.folder.share, __config.folder.music);
|
||||
const musicMapping = read(path.join(musicFolder, 'mapping.yaml'));
|
||||
const musicToCopy = Object.values(musicMapping).map(entry => Object.values(entry)).flat(1).filter(entry => entry !== null).map(entry => {
|
||||
return {
|
||||
filename: entry,
|
||||
source: musicFolder,
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
musicToCopy,
|
||||
musicMapping,
|
||||
}
|
||||
}
|
||||
90
libs/project_json.js
Normal file
90
libs/project_json.js
Normal file
@@ -0,0 +1,90 @@
|
||||
/* eslint-disable no-undef */
|
||||
import path from 'path'
|
||||
import Matcher from './content_processor.js'
|
||||
import { read as readFile, exists } from './file.js'
|
||||
import { read as readYAML } from './yaml.js'
|
||||
|
||||
export default class ProjectJson {
|
||||
#json
|
||||
#operatorName
|
||||
#operatorSourceFolder
|
||||
#operatorShareFolder
|
||||
#assets
|
||||
#template
|
||||
|
||||
constructor(operatorName, operatorShareFolder, assets) {
|
||||
this.#operatorName = operatorName
|
||||
this.#operatorSourceFolder = path.join(__projectRoot, __config.folder.operator)
|
||||
this.#operatorShareFolder = operatorShareFolder
|
||||
this.#assets = assets
|
||||
}
|
||||
|
||||
async load() {
|
||||
// load json from file
|
||||
this.#json = JSON.parse(await readFile(this.#getPath()))
|
||||
const matcher = new Matcher('~{', '}', __config.operators[this.#operatorName], {
|
||||
...this.#assets,
|
||||
...(() => {
|
||||
const output = {}
|
||||
for (const [key, value] of Object.entries(this.#assets)) {
|
||||
output[`${key}Options`] = value.map((b) => {
|
||||
return {
|
||||
"label": b,
|
||||
"value": b
|
||||
}
|
||||
})
|
||||
}
|
||||
return output
|
||||
})()
|
||||
})
|
||||
const match = {
|
||||
identify: value => value.startsWith('!match'),
|
||||
tag: '!match',
|
||||
resolve(str) {
|
||||
matcher.content = str
|
||||
return matcher.result
|
||||
}
|
||||
}
|
||||
this.#template = readYAML(path.join(__projectRoot, 'config', '_project_json.yaml'), [match])
|
||||
this.#process()
|
||||
return this.#json
|
||||
}
|
||||
|
||||
#getPath() {
|
||||
// if exists, do not use the template
|
||||
const defaultPath = path.join(this.#operatorSourceFolder, this.#operatorName, 'project.json')
|
||||
if (exists(defaultPath)) {
|
||||
return defaultPath
|
||||
} else {
|
||||
return path.join(this.#operatorShareFolder, 'project.json')
|
||||
}
|
||||
}
|
||||
|
||||
#process() {
|
||||
this.#json = {
|
||||
...this.#json,
|
||||
description: this.#template.description,
|
||||
title: __config.operators[this.#operatorName].title,
|
||||
general: {
|
||||
...this.#json.general,
|
||||
localization: this.#template.localization,
|
||||
properties: {
|
||||
...this.#properties
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
get #properties() {
|
||||
const properties = this.#template.properties
|
||||
const output = {}
|
||||
for (let i = 0; i < properties.length; i++) {
|
||||
output[properties[i].key] = {
|
||||
index: i,
|
||||
order: 100 + i,
|
||||
...properties[i].value
|
||||
}
|
||||
}
|
||||
return output
|
||||
}
|
||||
}
|
||||
15
libs/version.js
Normal file
15
libs/version.js
Normal file
@@ -0,0 +1,15 @@
|
||||
import path from 'path'
|
||||
import { readSync, writeSync } from './file.js'
|
||||
|
||||
export function read (dir) {
|
||||
return readSync(path.join(dir, 'Version'))
|
||||
}
|
||||
|
||||
export function increase(dir) {
|
||||
// release version will be lagged by 0.0.1
|
||||
const version = read(dir)
|
||||
const [major, minor, patch] = version.split('.')
|
||||
const newVersion = `${major}.${minor}.${+patch + 1}`
|
||||
writeSync(newVersion, path.join(dir, 'Version'))
|
||||
return newVersion
|
||||
}
|
||||
19
libs/yaml.js
Normal file
19
libs/yaml.js
Normal file
@@ -0,0 +1,19 @@
|
||||
import path from 'path'
|
||||
import { parse } from 'yaml'
|
||||
import fs from 'fs'
|
||||
|
||||
export function read(file_dir, customTags = []) {
|
||||
const include = {
|
||||
identify: value => value.startsWith('!include'),
|
||||
tag: '!include',
|
||||
resolve(str) {
|
||||
const dir = path.resolve(path.dirname(file_dir), str)
|
||||
const data = read(dir)
|
||||
return data
|
||||
}
|
||||
}
|
||||
const file = fs.readFileSync(file_dir, 'utf8')
|
||||
return parse(file, {
|
||||
customTags: [include, ...customTags],
|
||||
})
|
||||
}
|
||||
Reference in New Issue
Block a user