Skip to content

Commit

Permalink
feat: warframe-items build refactor to esm
Browse files Browse the repository at this point in the history
  • Loading branch information
AyAyEm committed Nov 26, 2022
1 parent 99bde60 commit cf46e22
Show file tree
Hide file tree
Showing 31 changed files with 292 additions and 255 deletions.
8 changes: 8 additions & 0 deletions build/.eslintrc.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
extends: '@wfcd/eslint-config/esm'
parserOptions:
babelOptions:
plugins:
- '@babel/plugin-syntax-import-assertions'
rules:
no-console: off
import/no-extraneous-dependencies: off
import/extensions:
- error
- always
66 changes: 34 additions & 32 deletions build/build.js → build/build.mjs
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1,20 +1,18 @@
'use strict';

const fs = require('fs/promises');
const path = require('path');
const crypto = require('crypto');
const minify = require('imagemin');
const minifyPng = require('imagemin-pngquant');
const minifyJpeg = require('imagemin-jpegtran');
const fetch = require('node-fetch');
const sharp = require('sharp');
const Progress = require('./progress');
const stringify = require('./stringify');
const scraper = require('./scraper');
const parser = require('./parser');
const hashManager = require('./hashManager');
const imageCache = require('../data/cache/.images.json');
const exportCache = require('../data/cache/.export.json');
import fs from 'fs/promises';
import { createHash } from 'crypto';
import minify from 'imagemin';
import minifyPng from 'imagemin-pngquant';
import minifyJpeg from 'imagemin-jpegtran';
import fetch from 'node-fetch';
import sharp from 'sharp';

import Progress from './progress.mjs';
import { stringify } from './stringify.mjs';
import { scraper } from './scraper.mjs';
import { parser } from './parser.mjs';
import { hashManager } from './hashManager.mjs';
import imagesCache from '../data/cache/.images.json' assert { type: 'json' };
import exportCache from '../data/cache/.export.json' assert { type: 'json' };

const allowedCustomCategories = ['SentinelWeapons'];

Expand Down Expand Up @@ -125,15 +123,15 @@ class Build {
const data = categories[category].sort(sort);
all = all.concat(data);
await fs.writeFile(
path.join(__dirname, `../data/json/${category}.json`),
new URL(`../data/json/${category}.json`, import.meta.url),
JSON.stringify(JSON.parse(stringify(data)))
);
}

// All.json (all items in one file)
all.sort(sort);
await fs.writeFile(path.join(__dirname, '../data/json/All.json'), stringify(all));
await fs.writeFile(path.join(__dirname, '../data/json/i18n.json'), stringify(i18n));
await fs.writeFile(new URL('../data/json/All.json', import.meta.url), stringify(all));
await fs.writeFile(new URL('../data/json/i18n.json', import.meta.url), stringify(i18n));

return all;
}
Expand All @@ -160,7 +158,7 @@ class Build {
* @param {Warnings} warnings warnings to save to file
*/
async saveWarnings(warnings) {
return fs.writeFile(path.join(__dirname, '../data/warnings.json'), stringify(warnings));
return fs.writeFile(new URL('../data/warnings.json', import.meta.url), stringify(warnings));
}

/**
Expand All @@ -170,7 +168,7 @@ class Build {
* @async
*/
async saveImages(items, manifest) {
const manifestHash = crypto.createHash('md5').update(JSON.stringify(manifest)).digest('hex');
const manifestHash = createHash('md5').update(JSON.stringify(manifest)).digest('hex');
// No need to go through every item if the manifest didn't change. I'm
// guessing the `fileTime` key in each element works more or less like a
// hash, so any change to that changes the hash of the full thing.
Expand All @@ -194,13 +192,16 @@ class Build {

// write the manifests after images have all succeeded
exportCache.Manifest.hash = manifestHash;
await fs.writeFile(path.join(__dirname, '../data/cache/.export.json'), JSON.stringify(exportCache, undefined, 1));
await fs.writeFile(
new URL('../data/cache/.export.json', import.meta.url),
JSON.stringify(exportCache, undefined, 1)
);

// Write new cache to disk
await fs.writeFile(
path.join(__dirname, '../data/cache/.images.json'),
new URL('../data/cache/.images.json', import.meta.url),
JSON.stringify(
imageCache.filter((i) => i.hash),
imagesCache.filter((i) => i.hash),
undefined,
1
)
Expand All @@ -220,10 +221,10 @@ class Build {
if (!imageBase) return;
const imageStub = imageBase.textureLocation.replace(/\\/g, '/').replace('xport/', '');
const imageUrl = `https://content.warframe.com/PublicExport/${imageStub}`;
const basePath = path.join(__dirname, '../data/img/');
const filePath = path.join(basePath, item.imageName);
const basePath = new URL('../data/img/', import.meta.url);
const filePath = new URL(item.imageName, basePath);
const hash = manifest.find((i) => i.uniqueName === item.uniqueName).fileTime;
const cached = imageCache.find((c) => c.uniqueName === item.uniqueName);
const cached = imagesCache.find((c) => c.uniqueName === item.uniqueName);

// We'll use a custom blueprint image
if (item.name === 'Blueprint') return;
Expand Down Expand Up @@ -277,7 +278,7 @@ class Build {
*/
updateCache(item, cached, hash, isComponent) {
if (!cached) {
imageCache.push({
imagesCache.push({
uniqueName: item.uniqueName,
hash,
isComponent,
Expand All @@ -293,14 +294,15 @@ class Build {
* @param {module:warframe-patchlogs.Patchlogs} patchlogs for pulling the latest update
*/
async updateReadme(patchlogs) {
// eslint-disable-next-line import/no-dynamic-require
const logob64 = require(path.join(__dirname, '../data/logo.json'));
const { default: logob64 } = await import(new URL('../data/logo.json', import.meta.url), {
assert: { type: 'json' },
});
const version = patchlogs.posts[0].name
.replace(/ \+ /g, '--')
.replace(/[^0-9\-.]/g, '')
.trim();
const { url } = patchlogs.posts[0];
const readmeLocation = path.join(__dirname, '../README.md');
const readmeLocation = new URL('../README.md', import.meta.url);
const readmeOld = await fs.readFile(readmeLocation, 'utf-8');
const readmeNew = readmeOld.replace(
/\[!\[warframe update.*/,
Expand Down
12 changes: 6 additions & 6 deletions build/dedupe.js → build/dedupe.mjs
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
'use strict';

const objectsort = require('./objectsort');
import { sortObject } from './sortObject.mjs';

/**
* Simple deduplication leveraging reduce (and equality based on stringification)
* @param {Iterable<*>} iter Should be an iterable object, but function will check first.
* @returns {Iterable<*>} type will be whatever was originally passed in
*/
module.exports = (iter) => {
export function dedupe(iter) {
return Array.isArray(iter)
? iter
.reduce(
(acc, curr) =>
(!acc.includes(JSON.stringify(objectsort(curr))) && acc.push(JSON.stringify(objectsort(curr))) && acc) ||
(!acc.includes(JSON.stringify(sortObject(curr))) && acc.push(JSON.stringify(sortObject(curr))) && acc) ||
acc,
[]
)
.map((o) => JSON.parse(o))
: iter;
};
}

export default dedupe;
18 changes: 9 additions & 9 deletions build/hashManager.js → build/hashManager.mjs
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
'use strict';
import fs from 'fs/promises';

const fs = require('fs/promises');
const path = require('path');
const scraper = require('./scraper');
const exportCache = require('../data/cache/.export.json');
const locales = require('../config/locales.json');
import { scraper } from './scraper.mjs';
import exportCache from '../data/cache/.export.json' assert { type: 'json' };
import locales from '../config/locales.json' assert { type: 'json' };

const exportKeyWhitelist = ['Manifest', 'DropChances', 'Patchlogs'];

class HashManager {
export class HashManager {
constructor() {
this.exportCache = Object.fromEntries(
exportKeyWhitelist.filter((key) => key in exportCache).map((key) => [key, exportCache[key]])
Expand All @@ -31,7 +29,7 @@ class HashManager {

async saveExportCache() {
await fs.writeFile(
path.join(__dirname, '../data/cache/.export.json'),
new URL('../data/cache/.export.json', import.meta.url),
JSON.stringify(this.exportCache, undefined, 1)
);
}
Expand All @@ -55,4 +53,6 @@ class HashManager {
}
}

module.exports = new HashManager();
export const hashManager = new HashManager();

export default hashManager;
51 changes: 28 additions & 23 deletions build/parser.js → build/parser.mjs
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
'use strict';

const _ = require('lodash');
const Progress = require('./progress');
const previousBuild = require('../data/json/All.json');
const watson = require('../config/dt_map.json');
const bpConflicts = require('../config/bpConflicts.json');
const { prefixes, suffixes } = require('../config/variants.json');
const dedupe = require('./dedupe');
const tradable = require('./tradable');
import _ from 'lodash';

import Progress from './progress.mjs';
import { dedupe } from './dedupe.mjs';
import { isTradable } from './tradable.mjs';
import previousItems from '../data/json/All.json' assert { type: 'json' };
import watson from '../config/dt_map.json' assert { type: 'json' };
import bpConflicts from '../config/bpConflicts.json' assert { type: 'json' };
import variants from '../config/variants.json' assert { type: 'json' };
import overrides from '../config/overrides.json' assert { type: 'json' };
import relicGrades from '../config/relicGrades.json' assert { type: 'json' };
import itemTypes from '../config/itemTypes.json' assert { type: 'json' };
import polarities from '../config/polarities.json' assert { type: 'json' };

/**
* Titlecase a string
Expand All @@ -31,7 +34,10 @@ const warnings = {
const filterBps = (blueprint) => !bpConflicts.includes(blueprint.uniqueName);
const primeExcludeRegex = /(^Noggle .*|Extractor .*|^[A-Z] Prime$|^Excalibur .*|^Lato .*|^Skana .*)/i;
const prefixed = (name) =>
new RegExp(`(((?:${prefixes.join('|')})\\s?${name}.*)|(?:${name}\\s?(?:${suffixes.join('|')})\\s?.*))+`, 'i');
new RegExp(
`(((?:${variants.prefixes.join('|')})\\s?${name}.*)|(?:${name}\\s?(?:${variants.suffixes.join('|')})\\s?.*))+`,
'i'
);

/**
* Drop comparator
Expand Down Expand Up @@ -90,7 +96,7 @@ const dropMap = (drop) => {
/**
* Parse API data into a more clear or complete format.
*/
class Parser {
export class Parser {
/**
* @typedef {Object} ParsedData
* @property {Array<Item>} data
Expand Down Expand Up @@ -372,9 +378,8 @@ class Parser {

// Relics don't have their grade in the name for some reason
if (item.type === 'Relic') {
const grades = require('../config/relicGrades.json');
// eslint-disable-next-line no-restricted-syntax
for (const grade of grades) {
for (const grade of relicGrades) {
if (item.uniqueName.includes(grade.id)) {
item.name = item.name.replace('Relic', grade.refinement);
}
Expand All @@ -396,7 +401,6 @@ class Parser {

// Use proper polarity names
if (item.polarity) {
const polarities = require('../config/polarities.json');
const polarity = polarities.find((p) => p.id === item.polarity);
if (polarity) {
item.polarity = polarity.name;
Expand Down Expand Up @@ -424,9 +428,8 @@ class Parser {
*/
addType(item) {
if (item.parent) return;
const types = require('../config/itemTypes.json');
// eslint-disable-next-line no-restricted-syntax
for (const type of types) {
for (const type of itemTypes) {
const contains = type.regex ? new RegExp(type.id, 'ig').test(item.uniqueName) : item.uniqueName.includes(type.id);
if (contains) {
if (type.append) item.type = `${item.type}${type.name}`;
Expand Down Expand Up @@ -661,7 +664,7 @@ class Parser {
* @param {Item} item to have tradability applied
*/
addTradable(item) {
item.tradable = tradable(item);
item.tradable = isTradable(item);
}

/**
Expand Down Expand Up @@ -696,7 +699,7 @@ class Parser {
if (item.components) {
// eslint-disable-next-line no-restricted-syntax
for (const component of item.components) {
const previous = previousBuild.find((i) => i.name === item.name && item.category !== 'Node');
const previous = previousItems.find((i) => i.name === item.name && item.category !== 'Node');
if (!previous || !previous.components) return;

const saved = previous.components.find((c) => c.name === component.name);
Expand All @@ -710,7 +713,7 @@ class Parser {
}
} else {
// Otherwise attach to main item
const saved = previousBuild.find((i) => i.name === item.name);
const saved = previousItems.find((i) => i.name === item.name);
if (saved?.drops) {
// chances were written as strings, caused by previous bad data
saved.drops.forEach((drop) => {
Expand Down Expand Up @@ -780,7 +783,7 @@ class Parser {
// This process takes a lot of cpu time, so we won't repeat it unless the
// patchlog hash changed.
if (!patchlogs.changed) {
const previous = previousBuild.find((i) => i.name === item.name);
const previous = previousItems.find((i) => i.name === item.name);
if (previous && previous.patchlogs) item.patchlogs = previous.patchlogs;
return;
}
Expand Down Expand Up @@ -980,7 +983,7 @@ class Parser {
applyOverrides(item) {
// universal polarity casing override
if (item.polarity) item.polarity = item.polarity.toLowerCase();
const override = require('../config/overrides.json')[item.uniqueName];
const override = overrides[item.uniqueName];
if (override) {
Object.keys(override).forEach((key) => {
item[key] = override[key];
Expand Down Expand Up @@ -1080,4 +1083,6 @@ class Parser {
}
}

module.exports = new Parser();
export const parser = new Parser();

export default parser;
34 changes: 0 additions & 34 deletions build/progress.js

This file was deleted.

0 comments on commit cf46e22

Please sign in to comment.