Skip to content

Commit

Permalink
feat: warframe-items build refactor to esm
Browse files Browse the repository at this point in the history
  • Loading branch information
AyAyEm committed Nov 29, 2022
1 parent 99bde60 commit 002ddfd
Show file tree
Hide file tree
Showing 28 changed files with 178 additions and 180 deletions.
1 change: 1 addition & 0 deletions build/.eslintrc.yaml
@@ -1,3 +1,4 @@
extends: '@wfcd/eslint-config/esm'
rules:
no-console: off
import/no-extraneous-dependencies: off
58 changes: 31 additions & 27 deletions build/build.js → build/build.mjs 100755 → 100644
@@ -1,20 +1,22 @@
'use strict';

const fs = require('fs/promises');
const path = require('path');
const crypto = require('crypto');
const minify = require('imagemin');
const minifyPng = require('imagemin-pngquant');
const minifyJpeg = require('imagemin-jpegtran');
const fetch = require('node-fetch');
const sharp = require('sharp');
const Progress = require('./progress');
const stringify = require('./stringify');
const scraper = require('./scraper');
const parser = require('./parser');
const hashManager = require('./hashManager');
const imageCache = require('../data/cache/.images.json');
const exportCache = require('../data/cache/.export.json');
import fs from 'node:fs/promises';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import crypto from 'crypto';
import minify from 'imagemin';
import minifyPng from 'imagemin-pngquant';
import minifyJpeg from 'imagemin-jpegtran';
import fetch from 'node-fetch';
import sharp from 'sharp';

import Progress from './progress.mjs';
import stringify from './stringify.mjs';
import scraper from './scraper.mjs';
import parser from './parser.mjs';
import hashManager from './hashManager.mjs';
import readJson from './readJson.mjs';

const imageCache = await readJson(new URL('../data/cache/.images.json', import.meta.url));
const exportCache = await readJson(new URL('../data/cache/.export.json', import.meta.url));

const allowedCustomCategories = ['SentinelWeapons'];

Expand Down Expand Up @@ -125,15 +127,15 @@ class Build {
const data = categories[category].sort(sort);
all = all.concat(data);
await fs.writeFile(
path.join(__dirname, `../data/json/${category}.json`),
new URL(`../data/json/${category}.json`, import.meta.url),
JSON.stringify(JSON.parse(stringify(data)))
);
}

// All.json (all items in one file)
all.sort(sort);
await fs.writeFile(path.join(__dirname, '../data/json/All.json'), stringify(all));
await fs.writeFile(path.join(__dirname, '../data/json/i18n.json'), stringify(i18n));
await fs.writeFile(new URL('../data/json/All.json', import.meta.url), stringify(all));
await fs.writeFile(new URL('../data/json/i18n.json', import.meta.url), stringify(i18n));

return all;
}
Expand All @@ -160,7 +162,7 @@ class Build {
* @param {Warnings} warnings warnings to save to file
*/
async saveWarnings(warnings) {
return fs.writeFile(path.join(__dirname, '../data/warnings.json'), stringify(warnings));
return fs.writeFile(new URL('../data/warnings.json', import.meta.url), stringify(warnings));
}

/**
Expand Down Expand Up @@ -194,11 +196,14 @@ class Build {

// write the manifests after images have all succeeded
exportCache.Manifest.hash = manifestHash;
await fs.writeFile(path.join(__dirname, '../data/cache/.export.json'), JSON.stringify(exportCache, undefined, 1));
await fs.writeFile(
new URL('../data/cache/.export.json', import.meta.url),
JSON.stringify(exportCache, undefined, 1)
);

// Write new cache to disk
await fs.writeFile(
path.join(__dirname, '../data/cache/.images.json'),
new URL('../data/cache/.images.json', import.meta.url),
JSON.stringify(
imageCache.filter((i) => i.hash),
undefined,
Expand All @@ -220,7 +225,7 @@ class Build {
if (!imageBase) return;
const imageStub = imageBase.textureLocation.replace(/\\/g, '/').replace('xport/', '');
const imageUrl = `https://content.warframe.com/PublicExport/${imageStub}`;
const basePath = path.join(__dirname, '../data/img/');
const basePath = fileURLToPath(new URL('../data/img/', import.meta.url));
const filePath = path.join(basePath, item.imageName);
const hash = manifest.find((i) => i.uniqueName === item.uniqueName).fileTime;
const cached = imageCache.find((c) => c.uniqueName === item.uniqueName);
Expand Down Expand Up @@ -293,14 +298,13 @@ class Build {
* @param {module:warframe-patchlogs.Patchlogs} patchlogs for pulling the latest update
*/
async updateReadme(patchlogs) {
// eslint-disable-next-line import/no-dynamic-require
const logob64 = require(path.join(__dirname, '../data/logo.json'));
const logob64 = await readJson(new URL('../data/logo.json', import.meta.url));
const version = patchlogs.posts[0].name
.replace(/ \+ /g, '--')
.replace(/[^0-9\-.]/g, '')
.trim();
const { url } = patchlogs.posts[0];
const readmeLocation = path.join(__dirname, '../README.md');
const readmeLocation = new URL('../README.md', import.meta.url);
const readmeOld = await fs.readFile(readmeLocation, 'utf-8');
const readmeNew = readmeOld.replace(
/\[!\[warframe update.*/,
Expand Down
6 changes: 2 additions & 4 deletions build/dedupe.js → build/dedupe.mjs
@@ -1,13 +1,11 @@
'use strict';

const objectsort = require('./objectsort');
import objectsort from './objectsort.mjs';

/**
* Simple deduplication leveraging reduce (and equality based on stringification)
* @param {Iterable<*>} iter Should be an iterable object, but function will check first.
* @returns {Iterable<*>} type will be whatever was originally passed in
*/
module.exports = (iter) => {
export default (iter) => {
return Array.isArray(iter)
? iter
.reduce(
Expand Down
16 changes: 8 additions & 8 deletions build/hashManager.js → build/hashManager.mjs
@@ -1,10 +1,10 @@
'use strict';
import fs from 'node:fs/promises';

const fs = require('fs/promises');
const path = require('path');
const scraper = require('./scraper');
const exportCache = require('../data/cache/.export.json');
const locales = require('../config/locales.json');
import scraper from './scraper.mjs';
import readJson from './readJson.mjs';

const exportCache = await readJson(new URL('../data/cache/.export.json', import.meta.url));
const locales = await readJson(new URL('../config/locales.json', import.meta.url));

const exportKeyWhitelist = ['Manifest', 'DropChances', 'Patchlogs'];

Expand All @@ -31,7 +31,7 @@ class HashManager {

async saveExportCache() {
await fs.writeFile(
path.join(__dirname, '../data/cache/.export.json'),
new URL('../data/cache/.export.json', import.meta.url),
JSON.stringify(this.exportCache, undefined, 1)
);
}
Expand All @@ -55,4 +55,4 @@ class HashManager {
}
}

module.exports = new HashManager();
export default new HashManager();
4 changes: 1 addition & 3 deletions build/objectsort.js → build/objectsort.mjs
@@ -1,12 +1,10 @@
'use strict';

/**
* Sorting objects by keys
* https://www.w3docs.com/snippets/javascript/how-to-sort-javascript-object-by-key.html
* @param {Record<string, *>} obj object to be sorted
* @returns {Record<string, *>} same as {@param obj} but sorted keys
*/
module.exports = (obj) => {
export default (obj) => {
return Object.keys(obj)
.sort()
.reduce(function (result, key) {
Expand Down
32 changes: 17 additions & 15 deletions build/parser.js → build/parser.mjs 100755 → 100644
@@ -1,13 +1,18 @@
'use strict';

const _ = require('lodash');
const Progress = require('./progress');
const previousBuild = require('../data/json/All.json');
const watson = require('../config/dt_map.json');
const bpConflicts = require('../config/bpConflicts.json');
const { prefixes, suffixes } = require('../config/variants.json');
const dedupe = require('./dedupe');
const tradable = require('./tradable');
import _ from 'lodash';

import Progress from './progress.mjs';
import dedupe from './dedupe.mjs';
import tradable from './tradable.mjs';
import readJson from './readJson.mjs';

const previousBuild = await readJson(new URL('../data/json/All.json', import.meta.url));
const watson = await readJson(new URL('../config/dt_map.json', import.meta.url));
const bpConflicts = await readJson(new URL('../config/bpConflicts.json', import.meta.url));
const { prefixes, suffixes } = await readJson(new URL('../config/variants.json', import.meta.url));
const grades = await readJson(new URL('../config/relicGrades.json', import.meta.url));
const polarities = await readJson(new URL('../config/polarities.json', import.meta.url));
const types = await readJson(new URL('../config/itemTypes.json', import.meta.url));
const overrides = await readJson(new URL('../config/overrides.json', import.meta.url));

/**
* Titlecase a string
Expand Down Expand Up @@ -372,7 +377,6 @@ class Parser {

// Relics don't have their grade in the name for some reason
if (item.type === 'Relic') {
const grades = require('../config/relicGrades.json');
// eslint-disable-next-line no-restricted-syntax
for (const grade of grades) {
if (item.uniqueName.includes(grade.id)) {
Expand All @@ -396,7 +400,6 @@ class Parser {

// Use proper polarity names
if (item.polarity) {
const polarities = require('../config/polarities.json');
const polarity = polarities.find((p) => p.id === item.polarity);
if (polarity) {
item.polarity = polarity.name;
Expand Down Expand Up @@ -424,7 +427,6 @@ class Parser {
*/
addType(item) {
if (item.parent) return;
const types = require('../config/itemTypes.json');
// eslint-disable-next-line no-restricted-syntax
for (const type of types) {
const contains = type.regex ? new RegExp(type.id, 'ig').test(item.uniqueName) : item.uniqueName.includes(type.id);
Expand Down Expand Up @@ -980,7 +982,7 @@ class Parser {
applyOverrides(item) {
// universal polarity casing override
if (item.polarity) item.polarity = item.polarity.toLowerCase();
const override = require('../config/overrides.json')[item.uniqueName];
const override = overrides[item.uniqueName];
if (override) {
Object.keys(override).forEach((key) => {
item[key] = override[key];
Expand Down Expand Up @@ -1080,4 +1082,4 @@ class Parser {
}
}

module.exports = new Parser();
export default new Parser();
7 changes: 3 additions & 4 deletions build/progress.js → build/progress.mjs 100755 → 100644
@@ -1,8 +1,7 @@
'use strict';
import ProgressBar from 'progress';
import colors from 'colors/safe.js';

const prod = process.env.NODE_ENV === 'production';
const ProgressBar = require('progress');
const colors = require('colors/safe');

/**
* Simple progress bar
Expand All @@ -26,7 +25,7 @@ class Progress extends ProgressBar {
* Use dummy object in prod because pm2 won't render
* the progress bar properly.
*/
module.exports = prod
export default prod
? class Empty {
interrupt() {}
tick() {}
Expand Down
16 changes: 16 additions & 0 deletions build/readJson.mjs
@@ -0,0 +1,16 @@
import fs from 'node:fs/promises';

const jsonCache = new Map();

/**
* @param {URL} jsonURL URL to json file
* @returns {Promise<Record<string, *> | Array<*>>}
*/
export default async (jsonURL) => {
const { pathname } = jsonURL;
if (jsonCache.has(pathname)) {
return jsonCache.get(pathname);
}

return JSON.parse(await fs.readFile(jsonURL, { encoding: 'utf-8' }));
};
48 changes: 27 additions & 21 deletions build/scraper.js → build/scraper.mjs 100755 → 100644
@@ -1,23 +1,24 @@
'use strict';
import Agent from 'socks5-http-client/lib/Agent.js';
import fetch from 'node-fetch';
import crypto from 'crypto';
import lzma from 'lzma';
import fs from 'node:fs/promises';
import cheerio from 'cheerio';

const prod = process.env.NODE_ENV === 'production';
const Agent = require('socks5-http-client/lib/Agent');
const fetch = require('node-fetch');
const crypto = require('crypto');
const lzma = require('lzma');
const fs = require('node:fs/promises');
const path = require('path');
const cheerio = require('cheerio');
import { Generator as RelicGenerator } from '@wfcd/relics';
import patchlogs from 'warframe-patchlogs';

import Progress from './progress.mjs';
import ModScraper from './wikia/scrapers/ModScraper.mjs';
import WeaponScraper from './wikia/scrapers/WeaponScraper.mjs';
import WarframeScraper from './wikia/scrapers/WarframeScraper.mjs';
import VersionScraper from './wikia/scrapers/VersionScraper.mjs';
import readJson from './readJson.mjs';

const { Generator: RelicGenerator } = require('@wfcd/relics');
const exportCache = await readJson(new URL('../data/cache/.export.json', import.meta.url));
const locales = await readJson(new URL('../config/locales.json', import.meta.url));

const Progress = require('./progress');
const exportCache = require('../data/cache/.export.json');
const locales = require('../config/locales.json');
const ModScraper = require('./wikia/scrapers/ModScraper');
const WeaponScraper = require('./wikia/scrapers/WeaponScraper');
const WarframeScraper = require('./wikia/scrapers/WarframeScraper');
const VersionScraper = require('./wikia/scrapers/VersionScraper');
const prod = process.env.NODE_ENV === 'production';
// eslint-disable-next-line no-control-regex
const sanitize = (str) => str.replace(/\\r|\r?\n|\x09/g, '').replace(/\\\\"/g, "'");
const agent = process.env.SOCK5_HOST
Expand Down Expand Up @@ -149,7 +150,10 @@ class Scraper {
// Update checksum
if (changed) {
exportCache.DropChances.hash = ratesHash;
await fs.writeFile(path.join(__dirname, '../data/cache/.export.json'), JSON.stringify(exportCache, undefined, 1));
await fs.writeFile(
new URL('../data/cache/.export.json', import.meta.url),
JSON.stringify(exportCache, undefined, 1)
);
}

bar.tick();
Expand All @@ -170,13 +174,15 @@ class Scraper {
*/
async fetchPatchLogs() {
const bar = new Progress('Fetching Patchlogs', 1);
const patchlogs = require('warframe-patchlogs');
const patchlogsHash = crypto.createHash('md5').update(JSON.stringify(patchlogs.posts)).digest('hex');
const changed = exportCache.Patchlogs.hash !== patchlogsHash;

if (changed) {
exportCache.Patchlogs.hash = patchlogsHash;
await fs.writeFile(path.join(__dirname, '../data/cache/.export.json'), JSON.stringify(exportCache, undefined, 1));
await fs.writeFile(
new URL('../data/cache/.export.json', import.meta.url),
JSON.stringify(exportCache, undefined, 1)
);
}

bar.tick();
Expand Down Expand Up @@ -287,4 +293,4 @@ class Scraper {
}
}

module.exports = new Scraper();
export default new Scraper();
6 changes: 2 additions & 4 deletions build/stringify.js → build/stringify.mjs 100755 → 100644
@@ -1,6 +1,4 @@
'use strict';

const dedupe = require('./dedupe');
import dedupe from './dedupe.mjs';

/**
* Pretty print JSON as it should be.
Expand All @@ -15,4 +13,4 @@ const replacer = (key, value) => (isArrayOfPrimitive(value) ? format(value) : va
const expand = (str) => str.replace(/"\^\^\^(\[ .* ])"/g, (match, a) => a.replace(/\\"/g, '"')).replace(/\\\\"/g, "'");
const stringify = (obj) => expand(JSON.stringify(Array.isArray(obj) ? dedupe(obj) : obj, replacer, 2));

module.exports = stringify;
export default stringify;
4 changes: 1 addition & 3 deletions build/tradable.js → build/tradable.mjs
@@ -1,5 +1,3 @@
'use strict';

const builtUntradable = ['Warframe', 'Throwing', 'Shotgun', 'Rifle', 'Pistol', 'Melee', 'Sword And Shield'];
const tradableConditions = (item) => !(builtUntradable.includes(item.type) && item.name.match(/Prime/gi));

Expand Down Expand Up @@ -51,7 +49,7 @@ const untradableRegex =
* @param {module:warframe-items.Item} item Item to determine tradability
* @returns {boolean}
*/
module.exports = (item) => {
export default (item) => {
const notFiltered =
!untradableTypes.includes(item.type) &&
!item.name.match(untradableRegex) &&
Expand Down

0 comments on commit 002ddfd

Please sign in to comment.