Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Dockerfile, read raw romfs with zstd+sarc+byml #10

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
FROM node:18
WORKDIR /radar
COPY . .
RUN npm install && npm install typescript -g
RUN apt-get update && apt-get install -y zstd python3-pip && pip3 install sarc byml --break-system-packages
CMD ./node_modules/.bin/ts-node ./build.ts -r /romfs -e ./tools && \
npm run dev
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,11 @@ Run build.ts to generate a map database before starting the server for the first
ts-node build.ts -d ../totk/Banc

This assumes the `totk/Banc` directory contains the YAML data object map files

ts-node build.ts -r ../totk -e tools

This assumes the `totk` directory contains the unaltered romfs contents.

For docker usage: `docker build -t radar .; docker run -it --rm --name radar -v /path/to/your/romfs:/romfs radar`

It's possible to build the db within docker and copy it out for the server to use, if you'd rather not install the extraction tools used in build.ts on your local machine.
3 changes: 1 addition & 2 deletions beco.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,8 @@ export class Beco {
// Offsets to row data, divided by 2 and relative to the start of the row section
offsets: number[]; // u32, size num_rows
segments: BecoSegment[][]; // Rows x Segments
constructor(file: string) {
constructor(buf: Buffer) {
let little = true;
let buf = fs.readFileSync(file);
let arr = new Uint8Array(buf.byteLength);
buf.copy(arr, 0, 0, buf.byteLength);
let dv = new DataView(arr.buffer);
Expand Down
73 changes: 56 additions & 17 deletions build.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { execSync } from 'child_process';
import sqlite3 from 'better-sqlite3';
import fs from 'fs';
import yaml from 'js-yaml';
Expand All @@ -6,17 +7,22 @@ import { Beco } from './beco';

let parseArgs = require('minimist');
let argv = parseArgs(process.argv);
if (!argv.d || !argv.b || !argv.e) {
console.log("Error: Must specify paths to directories with ");
const validRomfsArgs = (argv.e && argv.r);
const validFolderArgs = (argv.e && argv.b && argv.d);
if (!validRomfsArgs && !validFolderArgs) {
console.log("Error: Must specify paths to directories with -e and either -r or (-b and -d)");
console.log(" -d Banc extracted YAML files");
console.log(" -b field map area beco files");
console.log(" -e Ecosystem json files");
console.log(" -r Bare game romfs");
console.log(" e.g. % ts-node build.ts -d path/to/Banc -b path/to/beco -e path/to/Ecosystem")
console.log(" or: % ts-node build.ts -r path/to/romfs -e path/to/Ecosystem")
process.exit(1);
}
const totkData = argv.d
const becoPath = argv.b;
const ecoPath = argv.e;
const romfsPath = argv.r;
const totkData = argv.d || path.join(romfsPath, 'Banc');
const becoPath = argv.b || path.join(romfsPath, 'Ecosystem', 'FieldMapArea');

fs.rmSync('map.db.tmp', { force: true });
const db = sqlite3('map.db.tmp');
Expand Down Expand Up @@ -85,14 +91,29 @@ const LOCATIONS = JSON.parse(fs.readFileSync('LocationMarker.json', 'utf8'))
const KOROKS = JSON.parse(fs.readFileSync('koroks_id.json', 'utf8'))
const DROP_TABLES = JSON.parse(fs.readFileSync('drop_tables.json', 'utf8'))

const BCETT_YAML_SUFFIXES = /\.bcett\.b?yml(\.zs)?$/;

const DropTableDefault = "Default";
const DROP_TYPE_ACTOR = "Actor";
const DROP_TYPE_TABLE = "Table";

const BecoGround = new Beco(path.join(becoPath, 'Ground.beco'));
const BecoMinus = new Beco(path.join(becoPath, 'MinusField.beco'));
const BecoSky = new Beco(path.join(becoPath, 'Sky.beco'));
const BecoCave = new Beco(path.join(becoPath, 'Cave.beco'));
const getZsDicPath = (function() {
// Only call these tools when we need to use them, only extract zsdics once
let zsDicPath: string = "";
return function(): string {
if (!zsDicPath) {
zsDicPath = fs.mkdtempSync('zsdicpack');
execSync(`zstd -d "${romfsPath}/Pack/ZsDic.pack.zs" -o "${zsDicPath}/ZsDic.pack"`);
execSync(`sarc x --directory "${zsDicPath}" "${zsDicPath}/ZsDic.pack"`);
}
return zsDicPath;
};
})();

const BecoGround = new Beco(readRawBeco('Ground'));
const BecoMinus = new Beco(readRawBeco('MinusField'));
const BecoSky = new Beco(readRawBeco('Sky'));
const BecoCave = new Beco(readRawBeco('Cave'));

// Should probably be yaml not json for consistency
const Ecosystem = Object.fromEntries(['Cave', 'Ground', 'MinusField', 'Sky'].map(name => {
Expand Down Expand Up @@ -192,6 +213,27 @@ function parseHash(hash: string) {
return '0x' + BigInt(hash).toString(16).padStart(16, '0');
}

function readRawBeco(name: string): Buffer {
let filePath = path.join(becoPath, name + '.beco');
if (fs.existsSync(filePath)) {
return fs.readFileSync(filePath);
} else if (fs.existsSync(filePath + '.zs')) {
return execSync(`zstd -D "${getZsDicPath()}/zs.zsdic" -d ${filePath}.zs -c`, {maxBuffer: 1073741824});
}
throw Error(`No beco file found for ${name}`);
}

function readRawYaml(filePath: string): string {
if (filePath.endsWith('.yml')) {
return fs.readFileSync(filePath, 'utf-8').toString();
} else if (filePath.endsWith('.byml')) {
return execSync(`byml_to_yml ${filePath} -`, {maxBuffer: 1073741824}).toString();
} else if (filePath.endsWith('.byml.zs')) {
return execSync(`zstd -D "${getZsDicPath()}/bcett.byml.zsdic" -d ${filePath} -c | byml_to_yml - -`, {maxBuffer: 1073741824}).toString();
}
throw Error(`No yml file found at ${filePath}`);
}

function getKorokType(hideType: number | undefined, name: string) {
if (name == 'KorokCarryProgressKeeper') {
return 'Korok Friends';
Expand All @@ -215,9 +257,7 @@ function getKorokType(hideType: number | undefined, name: string) {
function processBanc(filePath: string, mapType: string, mapName: string) {
let doc: any = null;
try {
doc = yaml.load(fs.readFileSync(filePath, 'utf-8'),
{ schema: schema }
);
doc = yaml.load(readRawYaml(filePath), { schema: schema });
} catch (e: any) {
console.log("Error: ", e);
process.exit(1);
Expand Down Expand Up @@ -443,13 +483,13 @@ function processBancs() {
const dirPath = path.join(totkData, field);
let files = fs.readdirSync(dirPath);
for (const file of files) {
if (!file.endsWith('.bcett.yml'))
if (!file.match(BCETT_YAML_SUFFIXES))
continue;
let filePath = path.join(dirPath, file);

const fieldParts = field.split("/");
let mapName = file
.replace(".bcett.yml", "")
.replace(BCETT_YAML_SUFFIXES, "")
.replace("_Static", "")
.replace("_Dynamic", "");
const mapType = fieldParts[0];
Expand All @@ -463,12 +503,12 @@ function processBancs() {
for (const mapType of ["SmallDungeon", "LargeDungeon", "NormalStage"]) {
const dirPath = path.join(totkData, mapType);
for (const file of fs.readdirSync(dirPath)) {
if (!file.endsWith('.bcett.yml'))
if (!file.match(BCETT_YAML_SUFFIXES))
continue;

const filePath = path.join(dirPath, file);
const mapName = file
.replace(".bcett.yml", "")
.replace(BCETT_YAML_SUFFIXES, "")
.replace("_Static", "")
.replace("_Dynamic", "");
processBanc(filePath, mapType, mapName);
Expand Down Expand Up @@ -496,8 +536,7 @@ function processRecycleBox() {
console.log("process recyclebox: ", filePath)
let doc: any = null;
try {
doc = yaml.load(fs.readFileSync(filePath, 'utf-8'),
{ schema: schema });
doc = yaml.load(readRawYaml(filePath), { schema: schema });
} catch (e: any) {
console.log("Error: ", e);
process.exit(1);
Expand Down