-
Notifications
You must be signed in to change notification settings - Fork 8
/
Copy pathbuild.js
89 lines (74 loc) · 3.07 KB
/
build.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
const fs = require('fs');
const path = require('path');
const util = require('util');
const Ajv = require('ajv/dist/2020');
const crypto = require('crypto');
const readdir = util.promisify(fs.readdir);
const readFile = util.promisify(fs.readFile);
const writeFile = util.promisify(fs.writeFile);
const ROOT = './src/';
const SCHEMA_PATH = './schema.json';
const DIST_DIR = 'dist';
const DIST_FILE = 'Payloads.js';
function encodeUnicode(str) {
return Array.from(str).map(char => {
if (char.charCodeAt(0) < 128) return char;
return `\\u${char.charCodeAt(0).toString(16).padStart(4, '0')}`;
}).join('');
}
function generateId(prefix, payload, suffix) {
const hash = crypto.createHash('sha1');
hash.update(`${prefix}${payload}${suffix}`);
return hash.digest('hex');
}
async function validateWordlist(schemaValidator, filePath) {
const content = await readFile(filePath, 'utf8');
const wordlist = JSON.parse(content);
if (!schemaValidator(wordlist)) {
throw new Error(`Validation failed for ${filePath}:\n${JSON.stringify(schemaValidator.errors, null, 2)}`);
}
return wordlist;
}
function checkUniqueIds(wordlists) {
const idCounts = Array.from(wordlists.values()).flatMap(wl => wl.payloads).reduce((acc, { id }) => {
acc[id] = (acc[id] || 0) + 1;
return acc;
}, {});
const nonUniqueIds = Object.keys(idCounts).filter(id => idCounts[id] > 1);
if (nonUniqueIds.length > 0) {
throw new Error(`Non-unique IDs found: ${nonUniqueIds.join(', ')}`);
}
}
async function aggregateJson() {
const ajv = new Ajv({ strict: false });
const schema = JSON.parse(fs.readFileSync(path.resolve(__dirname, SCHEMA_PATH), 'utf-8'));
const validate = ajv.compile(schema);
const payloadsDir = path.resolve(__dirname, ROOT);
const files = await readdir(payloadsDir);
let wordlists = new Map();
for (const file of files.filter(f => f.endsWith('.json'))) {
const filePath = path.join(payloadsDir, file);
const wordlist = await validateWordlist(validate, filePath);
wordlist.payloads.forEach(element => {
element.id = generateId(element.prefix, element.payload, element.suffix);
});
let sorted = wordlist.payloads.sort((a,b) => a.payload.localeCompare(b.payload));
wordlist.payloads = sorted;
wordlists.set(file, wordlist);
}
checkUniqueIds(wordlists);
const distPath = path.join(__dirname, DIST_DIR);
if (!fs.existsSync(distPath)) {
fs.mkdirSync(distPath, { recursive: true });
}
for (const [key, value] of wordlists) {
await writeFile(path.join(distPath, key), encodeUnicode(JSON.stringify(value, null, 2)));
}
const output = `export const PAYLOADS = ${encodeUnicode(JSON.stringify(Array.from(wordlists.values()), null, 2))};`;
await writeFile(path.join(distPath, DIST_FILE), output);
console.log(`Aggregated JSON files into ${path.join(distPath, DIST_FILE)}`);
}
aggregateJson().catch(err => {
console.error('Failed to aggregate JSON files:', err);
process.exit(1);
});