From 01fce8d617f1d19ba1b5e23220566fd4c6c8ba65 Mon Sep 17 00:00:00 2001 From: Erik Golinelli Date: Tue, 28 May 2024 02:00:03 +0200 Subject: [PATCH 1/7] wip convert to ts --- .eslintrc.json | 6 +- package.json | 6 +- src/{index.js => index.ts} | 0 src/{mocompiler.js => mocompiler.ts} | 91 ++++-------- src/{moparser.js => moparser.ts} | 12 +- src/{pocompiler.js => pocompiler.ts} | 90 +++++------ src/{poparser.js => poparser.ts} | 214 +++++++++++++-------------- src/{shared.js => shared.ts} | 74 +++++---- src/types.js | 52 ------- src/types.ts | 111 ++++++++++++++ tsconfig.json | 5 +- 11 files changed, 341 insertions(+), 320 deletions(-) rename src/{index.js => index.ts} (100%) rename src/{mocompiler.js => mocompiler.ts} (69%) rename src/{moparser.js => moparser.ts} (92%) rename src/{pocompiler.js => pocompiler.ts} (69%) rename src/{poparser.js => poparser.ts} (75%) rename src/{shared.js => shared.ts} (61%) delete mode 100644 src/types.js create mode 100644 src/types.ts diff --git a/.eslintrc.json b/.eslintrc.json index 30149ce..e0324ef 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -3,7 +3,11 @@ "es2021": true, "node": true }, - "extends": "standard", + "extends": [ + "eslint:recommended", + "plugin:@typescript-eslint/recommended" + ], + "parser": "@typescript-eslint/parser", "parserOptions": { "ecmaVersion": "latest", "sourceType": "module" diff --git a/package.json b/package.json index 2bd36fe..fd13921 100644 --- a/package.json +++ b/package.json @@ -35,12 +35,16 @@ "readable-stream": "^4.5.2" }, "devDependencies": { + "@eslint/js": "^9.3.0", "@types/chai": "latest", "@types/content-type": "^1.1.8", + "@types/eslint__js": "^8.42.3", "@types/mocha": "latest", "@types/readable-stream": "^4.0.11", + "@typescript-eslint/eslint-plugin": "^7.11.0", + "@typescript-eslint/parser": "^7.11.0", "chai": "^5.0.3", - "eslint": "^8.56.0", + "eslint": "^8.57.0", "eslint-config-standard": "^17.1.0", "eslint-plugin-import": "^2.29.1", "eslint-plugin-n": "^16.6.2", diff --git a/src/index.js b/src/index.ts similarity index 100% rename from src/index.js rename to src/index.ts diff --git a/src/mocompiler.js b/src/mocompiler.ts similarity index 69% rename from src/mocompiler.js rename to src/mocompiler.ts index 049ab62..8a9b5fe 100644 --- a/src/mocompiler.js +++ b/src/mocompiler.ts @@ -1,42 +1,19 @@ import encoding from 'encoding'; import { HEADERS, formatCharset, generateHeader, compareMsgid } from './shared.js'; import contentType from 'content-type'; +import {GetTextTranslation, GetTextTranslations, Size, TranslationBuffers, Translations} from "./types.js"; +import { Transform } from 'readable-stream'; -/** - * @typedef {import('node:stream').Transform} Transform - * @typedef {import('./types.js').GetTextTranslation} GetTextTranslation - * @typedef {import('./types.js').GetTextTranslations} GetTextTranslations - * @typedef {import('./types.js').Translations} Translations - * @typedef {import('./types.js').WriteFunc} WriteFunc - */ - -/** - * @typedef {Object} Size Data about the size of the compiled MO object. - * @property {number} msgid The size of the msgid section. - * @property {number} msgstr The size of the msgstr section. - * @property {number} total The total size of the compiled MO object. - */ - -/** - * @typedef {{ msgid: Buffer, msgstr: Buffer }} TranslationBuffers A translation object partially parsed. - */ - -/** - * - * @typedef {Object} CompilerOptions MO compiler options - * @property {'be'|'le'} [endian='le'] Endianness of the output buffer. Default is 'le' - */ /** * Exposes general compiler function. Takes a translation * object as a parameter and returns binary MO object * - * @param {GetTextTranslations} table Translation object - * @param {CompilerOptions} [options] MO compiler options - * @return {Buffer} Compiled binary MO object + * @param table Translation object + * @return Compiled binary MO object */ -export default function (table, options = { endian: 'le' }) { - const compiler = new Compiler(table, options); +export default function (table: GetTextTranslations): Buffer { + const compiler = new Compiler(table); return compiler.compile(); } @@ -46,8 +23,8 @@ export default function (table, options = { endian: 'le' }) { * @param {Record} headers the headers * @return {Record} The prepared header */ -function prepareMoHeaders (headers) { - return Object.keys(headers).reduce((result, key) => { +function prepareMoHeaders (headers: Record): Record { + return Object.keys(headers).reduce((result: Record, key: string) => { const lowerKey = key.toLowerCase(); if (HEADERS.has(lowerKey)) { @@ -71,10 +48,10 @@ function prepareMoHeaders (headers) { * @param {Translations} translations * @return {Translations} */ -function prepareTranslations (translations) { +function prepareTranslations (translations: Translations): Translations { return Object.keys(translations).reduce((result, msgctxt) => { const context = translations[msgctxt]; - const msgs = Object.keys(context).reduce((result, msgid) => { + const msgs = Object.keys(context).reduce((result: Record, msgid) => { const TranslationMsgstr = context[msgid].msgstr; const hasTranslation = TranslationMsgstr.some(item => !!item.length); @@ -83,44 +60,36 @@ function prepareTranslations (translations) { } return result; - }, /** @type {Record} */({})); + },{}); if (Object.keys(msgs).length) { result[msgctxt] = msgs; } return result; - }, /** @type {Translations} */({})); + }, {} as Translations); } + /** - * Creates a MO compiler object. - * @this {Compiler & Transform} + * Creates a MO compiler object * - * @param {GetTextTranslations} [table] Translation table as defined in the README - * @param {CompilerOptions} [options] MO compiler options + * @param table Translation table as defined in the README */ -function Compiler (table, options = { endian: 'le' }) { - /** @type {GetTextTranslations} _table The translation table */ +function Compiler (this: Compiler & Transform, table: GetTextTranslations) { + /** The translation table */ this._table = { charset: undefined, translations: prepareTranslations(table?.translations ?? {}), headers: prepareMoHeaders(table?.headers ?? {}) - }; + } as GetTextTranslations; this._translations = []; - /** - * @type {WriteFunc} - */ - this._writeFunc = options?.endian === 'be' ? 'writeUInt32BE' : 'writeUInt32LE'; + this._writeFunc = 'writeUInt32LE'; this._handleCharset(); - /** - * Magic bytes for the generated binary data - * @type {number} MAGIC file header magic value of mo file - */ this.MAGIC = 0x950412de; } @@ -197,10 +166,10 @@ Compiler.prototype._generateList = function () { /** * Calculate buffer size for the final binary object * - * @param {TranslationBuffers[]} list An array of translation strings from _generateList - * @return {Size} Size data of {msgid, msgstr, total} + * @param list An array of translation strings from _generateList + * @return Size data of {msgid, msgstr, total} */ -Compiler.prototype._calculateSize = function (list) { +Compiler.prototype._calculateSize = function (list: TranslationBuffers[]): Size { let msgidLength = 0; let msgstrLength = 0; @@ -231,11 +200,11 @@ Compiler.prototype._calculateSize = function (list) { /** * Generates the binary MO object from the translation list * - * @param {TranslationBuffers[]} list translation list - * @param {Size} size Byte size information - * @return {Buffer} Compiled MO object + * @param list translation list + * @param size Byte size information + * @return Compiled MO object */ -Compiler.prototype._build = function (list, size) { +Compiler.prototype._build = function (list: TranslationBuffers[], size: Size): Buffer { const returnBuffer = Buffer.alloc(size.total); let curPosition = 0; let i; @@ -267,8 +236,8 @@ Compiler.prototype._build = function (list, size) { for (i = 0, len = list.length; i < len; i++) { const msgidLength = /** @type {Buffer} */(/** @type {unknown} */(list[i].msgid)); msgidLength.copy(returnBuffer, curPosition); - returnBuffer[this._writeFunc](list[i].msgid.length, 28 + i * 8); - returnBuffer[this._writeFunc](curPosition, 28 + i * 8 + 4); + returnBuffer.writeUInt32LE(list[i].msgid.length, 28 + i * 8); + returnBuffer.writeUInt32LE(curPosition, 28 + i * 8 + 4); returnBuffer[curPosition + list[i].msgid.length] = 0x00; curPosition += list[i].msgid.length + 1; } @@ -277,8 +246,8 @@ Compiler.prototype._build = function (list, size) { for (i = 0, len = list.length; i < len; i++) { const msgstrLength = /** @type {Buffer} */(/** @type {unknown} */(list[i].msgstr)); msgstrLength.copy(returnBuffer, curPosition); - returnBuffer[this._writeFunc](list[i].msgstr.length, 28 + (4 + 4) * list.length + i * 8); - returnBuffer[this._writeFunc](curPosition, 28 + (4 + 4) * list.length + i * 8 + 4); + returnBuffer.writeUInt32LE(list[i].msgstr.length, 28 + (4 + 4) * list.length + i * 8); + returnBuffer.writeUInt32LE(curPosition, 28 + (4 + 4) * list.length + i * 8 + 4); returnBuffer[curPosition + list[i].msgstr.length] = 0x00; curPosition += list[i].msgstr.length + 1; } diff --git a/src/moparser.js b/src/moparser.ts similarity index 92% rename from src/moparser.js rename to src/moparser.ts index 263efdb..cd7cf7a 100644 --- a/src/moparser.js +++ b/src/moparser.ts @@ -1,13 +1,7 @@ import encoding from 'encoding'; import { formatCharset, parseHeader } from './shared.js'; -/** - * @typedef {import('./types.js').GetTextTranslations} GetTextTranslations - * @typedef {import('./types.js').GetTextTranslation} GetTextTranslation - * @typedef {import('./types.js').Translations} Translations - * @typedef {import('./types.js').WriteFunc} WriteFunc - * @typedef {import('./types.js').ReadFunc} ReadFunc - */ +import type {Parser,GetTextTranslations,GetTextTranslation,Translations,WriteFunc,ReadFunc,} from './types.js'; /** * Parses a binary MO object into translation table @@ -15,7 +9,7 @@ import { formatCharset, parseHeader } from './shared.js'; * @param {Buffer} buffer Binary MO object * @param {string} [defaultCharset] Default charset to use */ -export default function (buffer, defaultCharset) { +export default function (buffer: any, defaultCharset: string | undefined) { const parser = new Parser(buffer, defaultCharset); return parser.parse(); @@ -28,7 +22,7 @@ export default function (buffer, defaultCharset) { * @param {Buffer|null} fileContents Binary MO object * @param {string} [defaultCharset] Default charset to use */ -function Parser (fileContents, defaultCharset = 'iso-8859-1') { +function Parser (this: Parser, fileContents: Buffer | null, defaultCharset: string = 'iso-8859-1') { this._fileContents = fileContents; this._charset = defaultCharset; diff --git a/src/pocompiler.js b/src/pocompiler.ts similarity index 69% rename from src/pocompiler.js rename to src/pocompiler.ts index b107e88..dd842e8 100644 --- a/src/pocompiler.js +++ b/src/pocompiler.ts @@ -2,28 +2,20 @@ import { HEADERS, foldLine, compareMsgid, formatCharset, generateHeader } from ' import contentType from 'content-type'; import encoding from 'encoding'; +import {Compiler, GetTextComment, GetTextTranslation, GetTextTranslations, ParserOptions, Translations} from "./types.js"; -/** - * @typedef {import('./types.js').GetTextTranslations} GetTextTranslations - * @typedef {import('./types.js').GetTextTranslation} GetTextTranslation - * @typedef {import('./types.js').GetTextComment} GetTextComment - * @typedef {import('./types.js').Translations} Translations - * @typedef {import('./types.js').ParserOptions} ParserOptions - */ -/** - * @typedef {Partial> & { msgstr?: string | string[] }} PreOutputTranslation - */ +type PreOutputTranslation = Partial> & { msgstr?: string | string[]; }; /** * Exposes general compiler function. Takes a translation * object as a parameter and returns PO object * - * @param {GetTextTranslations} table Translation object - * @param {ParserOptions} [options] Options - * @return {Buffer} The compiled PO object + * @param table Translation object + * @param options Options + * @return The compiled PO object */ -export default function (table, options) { +export default function (table: GetTextTranslations, options: ParserOptions): Buffer { const compiler = new Compiler(table, options); return compiler.compile(); @@ -32,11 +24,11 @@ export default function (table, options) { /** * Takes the header object and converts all headers into the lowercase format * - * @param {Record} headersRaw the headers to prepare - * @returns {Record} the headers in the lowercase format + * @param headersRaw the headers to prepare + * @returns the headers in the lowercase format */ -export function preparePoHeaders (headersRaw) { - return Object.keys(headersRaw).reduce((result, key) => { +export function preparePoHeaders (headersRaw: Record): Record { + return Object.keys(headersRaw).reduce((result: Record, key) => { const lowerKey = key.toLowerCase(); const value = HEADERS.get(lowerKey); @@ -47,17 +39,17 @@ export function preparePoHeaders (headersRaw) { } return result; - }, /** @type {Record} */ ({})); + }, {}); } /** * Creates a PO compiler object. * * @constructor - * @param {GetTextTranslations} [table] Translation table to be compiled - * @param {ParserOptions} [options] Options + * @param table Translation table to be compiled + * @param options Options */ -function Compiler (table, options) { +function Compiler (this: Compiler, table: GetTextTranslations, options: ParserOptions) { this._table = table ?? { headers: {}, charset: undefined, @@ -65,7 +57,7 @@ function Compiler (table, options) { }; this._table.translations = { ...this._table.translations }; - /** @type {ParserOptions} _options The Options object */ + /** _options The Options object */ this._options = { foldLength: 76, escapeCharacters: true, @@ -74,7 +66,6 @@ function Compiler (table, options) { ...options }; - /** @type {Record}} the translation table */ this._table.headers = preparePoHeaders(this._table.headers ?? {}); this._translations = []; @@ -89,7 +80,7 @@ function Compiler (table, options) { * @param {Record} comments A comments object * @return {string} A comment string for the PO file */ -Compiler.prototype._drawComments = function (comments) { +Compiler.prototype._drawComments = function (comments: { [x: string]: string; }): string { /** @var {Record[]} lines The comment lines to be returned */ const lines = []; /** @var {{key: GetTextComment, prefix: string}} type The comment type */ @@ -131,12 +122,12 @@ Compiler.prototype._drawComments = function (comments) { /** * Builds a PO string for a single translation object * - * @param {PreOutputTranslation} block Translation object - * @param {Partial} [override] Properties of this object will override `block` properties - * @param {boolean} [obsolete] Block is obsolete and must be commented out - * @return {string} Translation string for a single object + * @param block Translation object + * @param override Properties of this object will override `block` properties + * @param obsolete Block is obsolete and must be commented out + * @return Translation string for a single object */ -Compiler.prototype._drawBlock = function (block, override = {}, obsolete = false) { +Compiler.prototype._drawBlock = function (block: PreOutputTranslation, override: Partial = {}, obsolete: boolean = false): string { const response = []; const msgctxt = override.msgctxt || block.msgctxt; const msgid = override.msgid || block.msgid; @@ -144,8 +135,7 @@ Compiler.prototype._drawBlock = function (block, override = {}, obsolete = false const msgstrData = override.msgstr || block.msgstr; const msgstr = Array.isArray(msgstrData) ? [...msgstrData] : [msgstrData]; - /** @type {GetTextComment|undefined} */ - const comments = override.comments || block.comments; + const comments: GetTextComment|undefined = override.comments || block.comments; if (comments) { const drawnComments = this._drawComments(comments); if (drawnComments) { @@ -175,18 +165,18 @@ Compiler.prototype._drawBlock = function (block, override = {}, obsolete = false /** * Escapes and joins a key and a value for the PO string * - * @param {string} key Key name - * @param {string} value Key value - * @param {boolean} [obsolete] PO string is obsolete and must be commented out - * @return {string} Joined and escaped key-value pair + * @param key Key name + * @param value Key value + * @param obsolete PO string is obsolete and must be commented out + * @return Joined and escaped key-value pair */ -Compiler.prototype._addPOString = function (key = '', value = '', obsolete = false) { +Compiler.prototype._addPOString = function (key: string = '', value: string = '', obsolete: boolean = false): string { key = key.toString(); if (obsolete) { key = '#~ ' + key; } - let { foldLength, eol, escapeCharacters } = this._options; + let { foldLength, eol, escapeCharacters } = this._options; // escape newlines and quotes if (escapeCharacters) { @@ -249,12 +239,12 @@ Compiler.prototype._handleCharset = function () { /** * Flatten and sort translations object * - * @param {Translations} section Object to be prepared (translations or obsolete) - * @returns {PreOutputTranslation[]|undefined} Prepared array + * @param section Object to be prepared (translations or obsolete) + * @returns Prepared array */ -Compiler.prototype._prepareSection = function (section) { - /** @type {GetTextTranslation[]} response Prepared array */ - let response = []; +Compiler.prototype._prepareSection = function (section: Translations): PreOutputTranslation[] | undefined { + /** response Prepared array */ + let response: GetTextTranslation[] = []; for (const msgctxt in section) { if (typeof section[msgctxt] !== 'object') { @@ -291,22 +281,24 @@ Compiler.prototype._prepareSection = function (section) { * Compiles a translation object into a PO object * * @interface - * @return {Buffer} Compiled a PO object + * @return Compiled a PO object */ -Compiler.prototype.compile = function () { +Compiler.prototype.compile = function (): Buffer { if (!this._table.translations) { throw new Error('No translations found'); } - /** @type {PreOutputTranslation} headerBlock */ - const headerBlock = (this._table.translations[''] && this._table.translations['']['']) || {}; + /** headerBlock */ + const headerBlock: PreOutputTranslation = (this._table.translations[''] && this._table.translations['']['']) || {}; + + /** Translations */ const translations = this._prepareSection(this._table.translations); - let response = /** @type {(PreOutputTranslation|string)[]} */ (/** @type {unknown[]} */ (translations?.map(t => this._drawBlock(t)))); + let response: (PreOutputTranslation|string)[] = (translations?.map((t: unknown[]) => this._drawBlock(t))); if (typeof this._table.obsolete === 'object') { const obsolete = this._prepareSection(this._table.obsolete); if (obsolete && obsolete.length) { - response = response?.concat(obsolete.map(r => this._drawBlock(r, {}, true))); + response = response?.concat(obsolete.map((r: unknown[]) => this._drawBlock(r, {}, true))); } } diff --git a/src/poparser.js b/src/poparser.ts similarity index 75% rename from src/poparser.js rename to src/poparser.ts index 3d209b7..7bb997b 100644 --- a/src/poparser.js +++ b/src/poparser.ts @@ -1,43 +1,31 @@ import encoding from 'encoding'; import { formatCharset, parseHeader, parseNPluralFromHeadersSafely, ParserError } from './shared.js'; -import { Transform } from 'readable-stream'; +import { Transform, TransformOptions } from 'readable-stream'; import util from 'util'; +import {PoParserTransform, GetTextComment, GetTextTranslation, GetTextTranslations, ParserOptions, Translations} from "./types.js"; + +/** Po parser options*/ +type Options = { defaultCharset?: string; validation?: boolean; }; + +/** A single Node object in the PO file */ +export interface Node { + key?: string; + type?: number; + value: string; + quote?: string; + obsolete?: boolean; + comments?: GetTextComment | undefined; +} -/** - * @typedef {import('stream').Stream.Writable} WritableState - * @typedef {import('readable-stream').TransformOptions} TransformOptions - * @typedef {import('./types.js').GetTextTranslations} GetTextTranslations - * @typedef {import('./types.js').GetTextTranslation} GetTextTranslation - * @typedef {import('./types.js').GetTextComment} GetTextComment - * @typedef {import('./types.js').Translations} Translations - * @typedef {import('./types.js').ParserOptions} ParserOptions - */ - -/** - * @typedef {{ defaultCharset?: string, validation?: boolean }} Options Po parser options - */ - -/** - * @typedef {(...args: any[]) => void} DoneCallback - */ - -/** - * @typedef {Object} Node A single Node object in the PO file - * @property {string} [key] - * @property {number} [type] - * @property {string} value - * @property {string} [quote] - * @property {boolean} [obsolete] - * @property {GetTextComment | undefined} [comments] - */ +type DoneCallback = (...args: unknown[]) => void /** * Parses a PO object into translation table * - * @param {string | Buffer} input PO object - * @param {Options} [options] Optional options with defaultCharset and validation + * @param input PO object + * @param [options] Optional options with defaultCharset and validation */ -export function poParse (input, options = {}) { +export function poParse (input: string | Buffer, options: Options = {}) { const parser = new Parser(input, options); return parser.parse(); @@ -46,28 +34,50 @@ export function poParse (input, options = {}) { /** * Parses a PO stream, emits translation table in object mode * - * @param {Options} [options] Optional options with defaultCharset and validation - * @param {TransformOptions} [transformOptions] Optional stream options + * @param [options] Optional options with defaultCharset and validation + * @param [transformOptions] Optional stream options */ -export function poStream (options = {}, transformOptions = {}) { +export function poStream (options: Options = {}, transformOptions: TransformOptions = {}) { return new PoParserTransform(options, transformOptions); } +type Parser = { + _validation: boolean; + _charset: string; + _lex: Node[]; + _escaped: boolean; + _node: Partial; + _state: number; + _lineNumber: number; + _fileContents: string | Buffer; + _handleCharset: (buf: string | Buffer) => string; + states: { + none: number; + header: number; + msgctxt: number; + msgid: number; + msgid_plural: number; + msgstr: number; + msgstr_plural: number; + obsolete: number; + comment: number; + eol: number; + } +} + /** * Creates a PO parser object. * If a PO object is a string, UTF-8 will be used as the charset * - * @param {string | Buffer} fileContents PO object - * @param {Options} options Options with defaultCharset and validation + * @param fileContents PO object + * @param options Options with defaultCharset and validation */ -function Parser (fileContents, { defaultCharset = 'iso-8859-1', validation = false }) { +function Parser (this: Parser, fileContents: string | Buffer, {defaultCharset = 'iso-8859-1', validation = false}: Options) { this._validation = validation; this._charset = defaultCharset; - /** @type {Node[]} Lexed tokens */ this._lex = []; this._escaped = false; - /** @type {Partial} */ this._node = {}; this._state = this.states.none; this._lineNumber = 1; @@ -85,7 +95,7 @@ function Parser (fileContents, { defaultCharset = 'iso-8859-1', validation = fal * * @return {Object} Translation table */ -Parser.prototype.parse = function () { +Parser.prototype.parse = function (): GetTextTranslations { this._lexer(this._fileContents); return this._finalize(this._lex); @@ -94,9 +104,9 @@ Parser.prototype.parse = function () { /** * Detects charset for PO strings from the header * - * @param {string | Buffer} buf Header value + * @param buf Header value */ -Parser.prototype._handleCharset = function (buf = '') { +Parser.prototype._handleCharset = function (buf: string | Buffer = '') { /** @type {string} */ const str = buf.toString(); let pos; @@ -121,10 +131,10 @@ Parser.prototype._handleCharset = function (buf = '') { /** * Converts buffer to string - * @param {string | Buffer} buf Buffer to convert - * @return {string} Converted string + * @param buf Buffer to convert + * @return Converted string */ -Parser.prototype._toString = function (buf) { +Parser.prototype._toString = function (buf: string | Buffer): string { return encoding.convert(buf, 'utf-8', this._charset).toString('utf-8'); }; @@ -160,10 +170,10 @@ Parser.prototype.symbols = { /** * Token parser. Parsed state can be found from this._lex * - * @param {string} chunk String + * @param chunk String * @throws {ParserError} Throws a SyntaxError if the value doesn't match the key names. */ -Parser.prototype._lexer = function (chunk) { +Parser.prototype._lexer = function (chunk: string) { let chr; for (let i = 0, len = chunk.length; i < len; i++) { @@ -259,12 +269,11 @@ Parser.prototype._lexer = function (chunk) { /** * Join multi line strings * - * @param {Node[]} tokens Parsed tokens - * @return {Node[]} Parsed tokens, with multi line strings joined into one + * @param tokens Parsed tokens + * @return Parsed tokens, with multi line strings joined into one */ -Parser.prototype._joinStringValues = function (tokens) { - /** @type {Node[]} */ - const response = []; +Parser.prototype._joinStringValues = function (tokens: Node[]): Node[] { + const response: Node[] = []; let lastNode; for (let i = 0, len = tokens.length; i < len; i++) { @@ -284,18 +293,17 @@ Parser.prototype._joinStringValues = function (tokens) { /** * Parse comments into separate comment blocks * - * @param {Node[]} tokens Parsed tokens + * @param tokens Parsed tokens */ -Parser.prototype._parseComments = function (tokens) { +Parser.prototype._parseComments = function (tokens: Node[]) { for (const node of tokens) { if (!node || node.type !== this.types.comments) { continue; } - /** @type {{ - [key: string]: string[]; - }} */ - const comment = { + const comment: { + [key: string]: string[]; + } = { translator: [], extracted: [], reference: [], @@ -303,8 +311,7 @@ Parser.prototype._parseComments = function (tokens) { previous: [] }; - /** @type {string[]} */ - const lines = (node.value || '').split(/\n/); + const lines: string[] = (node.value || '').split(/\n/); for (const line of lines) { switch (line.charAt(0) || '') { @@ -327,7 +334,7 @@ Parser.prototype._parseComments = function (tokens) { } } - const finalToken = /** @type {Omit & { value: Record}} */ (/** @type {unknown} */ (node)); + const finalToken = node as unknown as Omit & { value: Record}; finalToken.value = {}; @@ -342,14 +349,12 @@ Parser.prototype._parseComments = function (tokens) { /** * Join gettext keys with values * - * @param {(Node & { value?: string })[]} tokens - Parsed tokens containing key-value pairs - * @return {Node[]} - An array of Nodes representing joined tokens + * @param tokens - Parsed tokens containing key-value pairs + * @return An array of Nodes representing joined tokens */ -Parser.prototype._handleKeys = function (tokens) { - /** @type {Node[]} */ - const response = []; - /** @type {Partial & { comments?: string }} */ - let lastNode = {}; +Parser.prototype._handleKeys = function (tokens: (Node & { value?: string })[]): Node[] { + const response: Node[] = []; + let lastNode: Partial & { comments?: string; } = {}; for (let i = 0, len = tokens.length; i < len; i++) { if (tokens[i].type === this.types.key) { @@ -363,7 +368,7 @@ Parser.prototype._handleKeys = function (tokens) { lastNode.comments = tokens[i - 1].value; } lastNode.value = ''; - response.push(/** @type {Node} */ (lastNode)); + response.push((lastNode as Node)); } else if (tokens[i].type === this.types.string && lastNode) { lastNode.value += tokens[i].value; } @@ -378,14 +383,12 @@ Parser.prototype._handleKeys = function (tokens) { * @param {Node[]} tokens Parsed tokens * @return {GetTextTranslation[]} Tokens */ -Parser.prototype._handleValues = function (tokens) { +Parser.prototype._handleValues = function (tokens: Node[]): GetTextTranslation[] { const response = []; - /** @type {GetTextTranslation} Translation object */ - let lastNode = {}; - /** @type {string | undefined} */ - let curContext; - /** @type {GetTextComment | undefined} */ - let curComments; + /** Translation object */ + let lastNode: Partial = {}; + let curContext: string | undefined; + let curComments: GetTextComment | undefined; for (let i = 0, len = tokens.length; i < len; i++) { const tokenKey = tokens[i].key; @@ -448,16 +451,16 @@ Parser.prototype._handleValues = function (tokens) { } } - return response; + return response as GetTextTranslation[]; }; /** * Validate token * - * @param {GetTextTranslation} token Parsed token - * @param {Translations} translations Translation table - * @param {string} msgctxt Message entry context - * @param {number} nplurals Number of expected plural forms + * @param token Parsed token + * @param translations Translation table + * @param msgctxt Message entry context + * @param nplurals Number of expected plural forms * @throws {Error} Will throw an error if token validation fails */ Parser.prototype._validateToken = function ( @@ -465,10 +468,10 @@ Parser.prototype._validateToken = function ( msgid = '', msgid_plural = '', // eslint-disable-line camelcase msgstr = [] - }, - translations, - msgctxt, - nplurals + }: GetTextTranslation, + translations: Translations, + msgctxt: string, + nplurals: number ) { if (msgid in translations[msgctxt]) { throw new SyntaxError(`Duplicate msgid error: entry "${msgid}" in "${msgctxt}" context has already been declared.`); @@ -488,12 +491,11 @@ Parser.prototype._validateToken = function ( * @param {GetTextTranslation[]} tokens Parsed tokens * @return {GetTextTranslations} Translation table */ -Parser.prototype._normalize = function (tokens) { +Parser.prototype._normalize = function (tokens: GetTextTranslation[]): GetTextTranslations { /** * Translation table to be returned - * @type {Omit & Partial> } table */ - const table = { + const table: Omit & Partial> = { charset: this._charset, headers: undefined, translations: {} @@ -501,8 +503,7 @@ Parser.prototype._normalize = function (tokens) { let nplurals = 1; for (let i = 0, len = tokens.length; i < len; i++) { - /** @type {string} */ - const msgctxt = tokens[i].msgctxt || ''; + const msgctxt: string = tokens[i].msgctxt || ''; if (tokens[i].obsolete) { if (!table.obsolete) { @@ -537,16 +538,16 @@ Parser.prototype._normalize = function (tokens) { table.translations[msgctxt][token.msgid] = token; } - return /** @type {GetTextTranslations} */ (table); + return table as GetTextTranslations; }; /** * Converts parsed tokens to a translation table * - * @param {Node[]} tokens Parsed tokens - * @returns {GetTextTranslations} Translation table + * @param tokens Parsed tokens + * @returns Translation table */ -Parser.prototype._finalize = function (tokens) { +Parser.prototype._finalize = function (tokens: Node[]): GetTextTranslations { /** * Translation table */ @@ -562,22 +563,21 @@ Parser.prototype._finalize = function (tokens) { return this._normalize(dataset); }; + + /** * Creates a transform stream for parsing PO input * @constructor - * @this {PoParserTransform & Transform} * - * @param {ParserOptions} options Optional options with defaultCharset and validation - * @param {TransformOptions & {initialTreshold?: number;}} transformOptions Optional stream options + * @param options Optional options with defaultCharset and validation + * @param transformOptions Optional stream options */ -function PoParserTransform (options, transformOptions) { +function PoParserTransform (this: PoParserTransform & Transform, options: ParserOptions, transformOptions: TransformOptions & { initialTreshold?: number; }) { const { initialTreshold, ..._transformOptions } = transformOptions; this.options = options; - /** @type {Parser|false} */ this._parser = false; this._tokens = {}; - /** @type {Buffer[]} */ this._cache = []; this._cacheSize = 0; @@ -592,11 +592,11 @@ util.inherits(PoParserTransform, Transform); /** * Processes a chunk of the input stream - * @param {Buffer} chunk Chunk of the input stream - * @param {string} encoding Encoding of the chunk - * @param {DoneCallback} done Callback to call when the chunk is processed + * @param chunk Chunk of the input stream + * @param encoding Encoding of the chunk + * @param done Callback to call when the chunk is processed */ -PoParserTransform.prototype._transform = function (chunk, encoding, done) { +PoParserTransform.prototype._transform = function (chunk: Buffer, encoding: string, done: DoneCallback) { let i; let len = 0; @@ -662,9 +662,9 @@ PoParserTransform.prototype._transform = function (chunk, encoding, done) { /** * Once all inputs have been processed, emit the parsed translation table as an object * - * @param {DoneCallback} done Callback to call when the chunk is processed + * @param done Callback to call when the chunk is processed */ -PoParserTransform.prototype._flush = function (done) { +PoParserTransform.prototype._flush = function (done: DoneCallback) { let chunk; if (this._cacheSize) { @@ -688,7 +688,7 @@ PoParserTransform.prototype._flush = function (done) { } if (this._parser) { - /** @type {any} */ (this).push(this._parser._finalize(this._parser._lex)); + (this).push(this._parser._finalize(this._parser._lex)); } setImmediate(done); diff --git a/src/shared.js b/src/shared.ts similarity index 61% rename from src/shared.js rename to src/shared.ts index c22cfd6..0fdd1f3 100644 --- a/src/shared.js +++ b/src/shared.ts @@ -1,8 +1,9 @@ // see https://www.gnu.org/software/gettext/manual/html_node/Header-Entry.html -/** @type {string} Header name for "Plural-Forms" */ -const PLURAL_FORMS = 'Plural-Forms'; -/** @typedef {Map} Headers Map of header keys to header names */ -export const HEADERS = new Map([ +/** Header name for "Plural-Forms" */ +const PLURAL_FORMS: string = 'Plural-Forms'; + +/** Map of header keys to header names */ +export const HEADERS: Map = new Map([ ['project-id-version', 'Project-Id-Version'], ['report-msgid-bugs-to', 'Report-Msgid-Bugs-To'], ['pot-creation-date', 'POT-Creation-Date'], @@ -15,19 +16,19 @@ export const HEADERS = new Map([ ['plural-forms', PLURAL_FORMS] ]); -const PLURAL_FORM_HEADER_NPLURALS_REGEX = /nplurals\s*=\s*(?\d+)/; +const PLURAL_FORM_HEADER_NPLURALS_REGEX: RegExp = /nplurals\s*=\s*(?\d+)/; /** * Parses a header string into an object of key-value pairs * - * @param {string} str Header string - * @return {Record} An object of key-value pairs + * @param str Header string + * @return An object of key-value pairs */ -export function parseHeader (str = '') { +export function parseHeader (str: string = ''): Record { /** @type {string} Header string */ return str .split('\n') - .reduce((/** @type {Record} */ headers, line) => { + .reduce((headers: Record, line: string) => { const parts = line.split(':'); let key = (parts.shift() || '').trim(); @@ -46,11 +47,11 @@ export function parseHeader (str = '') { /** * Attempts to safely parse 'nplurals" value from "Plural-Forms" header * - * @param {Record} [headers] An object with parsed headers - * @param {number} fallback Fallback value if "Plural-Forms" header is absent - * @returns {number} Parsed result + * @param headers An object with parsed headers + * @param fallback Fallback value if "Plural-Forms" header is absent + * @returns Parsed result */ -export function parseNPluralFromHeadersSafely (headers, fallback = 1) { +export function parseNPluralFromHeadersSafely (headers: Record, fallback: number = 1): number { const pluralForms = headers ? headers[PLURAL_FORMS] : false; if (!pluralForms) { @@ -67,10 +68,10 @@ export function parseNPluralFromHeadersSafely (headers, fallback = 1) { /** * Joins a header object of key value pairs into a header string * - * @param {Record} header Object of key value pairs - * @return {string} An object of key-value pairs + * @param header Object of key value pairs + * @return An object of key-value pairs */ -export function generateHeader (header = {}) { +export function generateHeader (header: Record = {}): string { const keys = Object.keys(header) .filter(key => !!key); @@ -87,11 +88,11 @@ export function generateHeader (header = {}) { /** * Normalizes charset name. Converts utf8 to utf-8, WIN1257 to windows-1257 etc. * - * @param {string} charset Charset name - * @param {string} defaultCharset Default charset name, defaults to 'iso-8859-1' - * @return {string} Normalized charset name + * @param charset Charset name + * @param defaultCharset Default charset name, defaults to 'iso-8859-1' + * @return Normalized charset name */ -export function formatCharset (charset = 'iso-8859-1', defaultCharset = 'iso-8859-1') { +export function formatCharset (charset: string = 'iso-8859-1', defaultCharset: string = 'iso-8859-1'): string { return charset.toString() .toLowerCase() .replace(/^utf[-_]?(\d+)$/, 'utf-$1') @@ -105,11 +106,11 @@ export function formatCharset (charset = 'iso-8859-1', defaultCharset = 'iso-885 /** * Folds long lines according to PO format * - * @param {string} str PO formatted string to be folded - * @param {number} [maxLen=76] Maximum allowed length for folded lines - * @return {string[]} An array of lines + * @param str PO formatted string to be folded + * @param maxLen Maximum allowed length for folded lines + * @return An array of lines */ -export function foldLine (str, maxLen = 76) { +export function foldLine (str: string, maxLen: number = 76): string[] { const lines = []; const len = str.length; let curLine = ''; @@ -117,15 +118,12 @@ export function foldLine (str, maxLen = 76) { let match; while (pos < len) { - let escaped = false; - curLine = str.substring(pos, pos + maxLen); // ensure that the line never ends with a partial escaping // make longer lines if needed - if (curLine.endsWith('\\') && pos + curLine.length < len) { - escaped = true; - curLine += str.charAt(pos + curLine.length); // Append the next character + while (curLine.endsWith('\\') && pos + curLine.length < len) { + curLine += str.charAt(pos + curLine.length + 1); // Append the next character } // ensure that if possible, line breaks are done at reasonable places @@ -137,7 +135,7 @@ export function foldLine (str, maxLen = 76) { if ((match = /.*\s+/.exec(curLine)) && /\S/.test(match[0])) { // use everything before and including the last white space character (if anything) curLine = match[0]; - } else if (!escaped && (match = /.*[\x21-\x2f0-9\x5b-\x60\x7b-\x7e]+/.exec(curLine)) && /[^\x21-\x2f0-9\x5b-\x60\x7b-\x7e]/.test(match[0])) { + } else if ((match = /.*[\x21-\x2f0-9\x5b-\x60\x7b-\x7e]+/.exec(curLine)) && /[^\x21-\x2f0-9\x5b-\x60\x7b-\x7e]/.test(match[0])) { // use everything before and including the last "special" character (if anything) curLine = match[0]; } @@ -153,12 +151,11 @@ export function foldLine (str, maxLen = 76) { /** * Comparator function for comparing msgid * - * @template {Buffer|string} T - * @param {{msgid: T}} left with msgid prev - * @param {{msgid: T}} right with msgid next - * @returns {number} comparator index + * @param left with msgid prev + * @param right with msgid next + * @returns comparator index */ -export function compareMsgid ({ msgid: left }, { msgid: right }) { +export function compareMsgid ({msgid: left}: { msgid: T; }, {msgid: right}: { msgid: T; }): number { if (left < right) { return -1; } @@ -174,11 +171,12 @@ export function compareMsgid ({ msgid: left }, { msgid: right }) { * Custom SyntaxError subclass that includes the lineNumber property. */ export class ParserError extends SyntaxError { + lineNumber: number; /** - * @param {string} message - Error message. - * @param {number} lineNumber - Line number where the error occurred. + * @param message - Error message. + * @param lineNumber - Line number where the error occurred. */ - constructor (message, lineNumber) { + constructor (message: string, lineNumber: number) { super(message); this.lineNumber = lineNumber; } diff --git a/src/types.js b/src/types.js deleted file mode 100644 index 450d778..0000000 --- a/src/types.js +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Represents a GetText comment. - * @typedef {Object} GetTextComment - * @property {string} [translator] Translator information. - * @property {string} [reference] Reference information. - * @property {string} [extracted] Extracted comments. - * @property {string} [flag] Flags. - * @property {string} [previous] Previous string. - */ - -/** - * Represents a GetText translation. - * @typedef {Object} GetTextTranslation - * @property {string} [msgctxt] Context of the message. - * @property {string} msgid The singular message ID. - * @property {string} [msgid_plural] The plural message ID. - * @property {string[]} msgstr Array of translated strings. - * @property {GetTextComment} [comments] Comments associated with the translation. - * @property {boolean} [obsolete] Whether the translation is obsolete. - */ - -/** - * @typedef {Record>} Translations The translations index. - */ - -/** - * Represents GetText translations. - * @typedef {Object} GetTextTranslations - * @property {string|undefined} charset Character set. - * @property {Record} headers Headers. - * @property {Translations} [obsolete] Obsolete messages. - * @property {Translations} translations Translations. - */ - -/** - * Options for the parser. - * @typedef {Object} ParserOptions - * @property {string} [defaultCharset] Default character set. - * @property {boolean} [validation] Whether to perform validation. - * @property {number} [foldLength] the fold length. - * @property {boolean} [escapeCharacters] Whether to escape characters. - * @property {boolean} [sort] Whether to sort messages. - * @property {string} [eol] End of line character. - */ - -/** - * @typedef {('writeUInt32LE'|'writeUInt32BE')} WriteFunc Type definition for write functions. - */ - -/** - * @typedef {('readUInt32LE'|'readUInt32BE')} ReadFunc Type definition for read functions. - */ diff --git a/src/types.ts b/src/types.ts new file mode 100644 index 0000000..9d4a31b --- /dev/null +++ b/src/types.ts @@ -0,0 +1,111 @@ +import {Transform} from "readable-stream"; + +/** + * Represents a GetText comment. + */ +export interface GetTextComment { + translator?: string; + reference?: string; + extracted?: string; + flag?: string; + previous?: string; +} + +/** + * Represents a GetText translation. + */ +export interface GetTextTranslation { + msgctxt?: string; + msgid: string; + msgid_plural?: string; + msgstr: string[]; + comments?: GetTextComment; + obsolete?: boolean; +} + +/** + * The translation index. + */ +export type Translations = Record> + +/** + * Represents GetText translations. + */ +export interface GetTextTranslations { + charset: string | undefined; + headers: Record; + obsolete?: Translations; + translations: Translations; +} + +/** + * Options for the parser. + */ +export type ParserOptions = { + defaultCharset?: string; + validation?: boolean; + foldLength?: number; + escapeCharacters?: boolean; + sort?: boolean; + eol?: string; +} + +/** + * Type definition for write functions. + */ +export type WriteFunc = 'writeUInt32LE' | 'writeUInt32BE'; + +/** + * Type definition for read functions. + */ +export type ReadFunc = 'readUInt32LE' | 'readUInt32BE'; + + +/** The size of the MO object */ +export type Size = { + msgid: number, + msgstr: number, + total: number +} + +/** The translation object as a buffer */ +export type TranslationBuffers = { + msgid: Buffer, + msgstr: Buffer +} + +export type Compiler = { + _options: ParserOptions; + _table: GetTextTranslations, + _translations: TranslationBuffers[], + _writeFunc: WriteFunc, + _handleCharset: () => void, + _generateList: () => TranslationBuffers[], + _build: (list: TranslationBuffers[], size: Size) => Buffer, + compile: () => Buffer, + /** + * Magic bytes for the generated binary data + * MAGIC file header magic value of mo file + */ + MAGIC: number, +} + +export type Parser = { + _validation: boolean; + _charset: string; + _lex: any[]; + _escaped: boolean; + _node: any; + _state: any; + _lineNumber: number; + _fileContents: string | Buffer; +} + +export type PoParserTransform = { + options: ParserOptions, + initialTreshold?: number, + _parser?: Parser|false, + _tokens?: {}, + _cache?: Buffer[], + _cacheSize?: number +}; diff --git a/tsconfig.json b/tsconfig.json index b82537e..2d46a1e 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -6,7 +6,8 @@ "moduleResolution": "Node16", "target": "ES2018", "lib": [ - "ES2018" + "ES2018", + "dom" ], // Strict mode "strict": true, @@ -28,7 +29,7 @@ "typeRoots": [ "./types", "./node_modules/@types" - ], + ] }, "include": [ "src/**/*", From 635d3529ec4430d5b8826a85ad9aa3caec7b57cb Mon Sep 17 00:00:00 2001 From: Erik Golinelli Date: Thu, 26 Feb 2026 12:44:28 +0100 Subject: [PATCH 2/7] refactor: migrate mocompiler and pocompiler to use modern ES modules with updated imports and class syntax This refactor streamlines the codebase by consolidating imports, adhering to consistent formatting, and improving type definitions. --- src/index.ts | 18 +- src/mocompiler.ts | 524 ++++++++++-------- src/moparser.ts | 416 +++++++------- src/pocompiler.ts | 649 ++++++++++++---------- src/poparser.ts | 1353 +++++++++++++++++++++++---------------------- src/shared.ts | 244 ++++---- src/types.ts | 125 ++--- 7 files changed, 1755 insertions(+), 1574 deletions(-) diff --git a/src/index.ts b/src/index.ts index fe34104..e7710c8 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,16 +1,16 @@ -import { poParse, poStream } from './poparser.js'; -import poCompiler from './pocompiler.js'; -import moParser from './moparser.js'; -import moCompiler from './mocompiler.js'; +import moCompiler from "./mocompiler.js"; +import moParser from "./moparser.js"; +import poCompiler from "./pocompiler.js"; +import { poParse, poStream } from "./poparser.js"; /** * Translation parser and compiler for PO files * @see https://www.gnu.org/software/gettext/manual/html_node/PO.html */ export const po = { - parse: poParse, - createParseStream: poStream, - compile: poCompiler + parse: poParse, + createParseStream: poStream, + compile: poCompiler, }; /** @@ -18,8 +18,8 @@ export const po = { * @see https://www.gnu.org/software/gettext/manual/html_node/MO.html */ export const mo = { - parse: moParser, - compile: moCompiler + parse: moParser, + compile: moCompiler, }; export default { mo, po }; diff --git a/src/mocompiler.ts b/src/mocompiler.ts index 8a9b5fe..1dee549 100644 --- a/src/mocompiler.ts +++ b/src/mocompiler.ts @@ -1,9 +1,18 @@ -import encoding from 'encoding'; -import { HEADERS, formatCharset, generateHeader, compareMsgid } from './shared.js'; -import contentType from 'content-type'; -import {GetTextTranslation, GetTextTranslations, Size, TranslationBuffers, Translations} from "./types.js"; -import { Transform } from 'readable-stream'; - +import { + compareMsgid, + extractCharset, + formatCharset, + generateHeader, + HEADERS, + updateContentTypeCharset, +} from "./shared.js"; +import type { + GetTextTranslation, + GetTextTranslations, + Size, + TranslationBuffers, + Translations, +} from "./types.js"; /** * Exposes general compiler function. Takes a translation @@ -13,9 +22,9 @@ import { Transform } from 'readable-stream'; * @return Compiled binary MO object */ export default function (table: GetTextTranslations): Buffer { - const compiler = new Compiler(table); + const compiler = new Compiler(table); - return compiler.compile(); + return compiler.compile(); } /** @@ -23,24 +32,29 @@ export default function (table: GetTextTranslations): Buffer { * @param {Record} headers the headers * @return {Record} The prepared header */ -function prepareMoHeaders (headers: Record): Record { - return Object.keys(headers).reduce((result: Record, key: string) => { - const lowerKey = key.toLowerCase(); - - if (HEADERS.has(lowerKey)) { - // POT-Creation-Date is removed in MO (see https://savannah.gnu.org/bugs/?49654) - if (lowerKey !== 'pot-creation-date') { - const value = HEADERS.get(lowerKey); - if (value) { - result[value] = headers[key]; - } - } - } else { - result[key] = headers[key]; - } - - return result; - }, /** @type {Record} */ ({})); +function prepareMoHeaders( + headers: Record, +): Record { + return Object.keys(headers).reduce( + (result: Record, key: string) => { + const lowerKey = key.toLowerCase(); + + if (HEADERS.has(lowerKey)) { + // POT-Creation-Date is removed in MO (see https://savannah.gnu.org/bugs/?49654) + if (lowerKey !== "pot-creation-date") { + const value = HEADERS.get(lowerKey); + if (value) { + result[value] = headers[key]; + } + } + } else { + result[key] = headers[key]; + } + + return result; + }, + {} as Record, + ); } /** @@ -48,224 +62,252 @@ function prepareMoHeaders (headers: Record): Record { - const context = translations[msgctxt]; - const msgs = Object.keys(context).reduce((result: Record, msgid) => { - const TranslationMsgstr = context[msgid].msgstr; - const hasTranslation = TranslationMsgstr.some(item => !!item.length); - - if (hasTranslation) { - result[msgid] = context[msgid]; - } - - return result; - },{}); - - if (Object.keys(msgs).length) { - result[msgctxt] = msgs; - } - - return result; - }, {} as Translations); +function prepareTranslations(translations: Translations): Translations { + return Object.keys(translations).reduce((result, msgctxt) => { + const context = translations[msgctxt]; + const msgs = Object.keys(context).reduce( + (result: Record, msgid) => { + const TranslationMsgstr = context[msgid].msgstr; + const hasTranslation = TranslationMsgstr.some((item) => !!item.length); + + if (hasTranslation) { + result[msgid] = context[msgid]; + } + + return result; + }, + {}, + ); + + if (Object.keys(msgs).length) { + result[msgctxt] = msgs; + } + + return result; + }, {} as Translations); } - /** * Creates a MO compiler object - * - * @param table Translation table as defined in the README */ -function Compiler (this: Compiler & Transform, table: GetTextTranslations) { - /** The translation table */ - this._table = { - charset: undefined, - translations: prepareTranslations(table?.translations ?? {}), - headers: prepareMoHeaders(table?.headers ?? {}) - } as GetTextTranslations; - - this._translations = []; - - this._writeFunc = 'writeUInt32LE'; - - this._handleCharset(); - - this.MAGIC = 0x950412de; +class Compiler { + _table: GetTextTranslations; + _translations: TranslationBuffers[]; + _writeFunc: string; + MAGIC: number; + + /** + * @param table Translation table as defined in the README + */ + constructor(table: GetTextTranslations) { + /** The translation table */ + this._table = { + charset: undefined, + translations: prepareTranslations(table?.translations ?? {}), + headers: prepareMoHeaders(table?.headers ?? {}), + } as GetTextTranslations; + + this._translations = []; + + this._writeFunc = "writeUInt32LE"; + + this._handleCharset(); + + this.MAGIC = 0x950412de; + } + + /** + * Handles header values, replaces or adds (if needed) a charset property + */ + _handleCharset() { + const headerValue = this._table.headers["Content-Type"] || "text/plain"; + const existingCharset = extractCharset(headerValue); + const charset = formatCharset( + this._table.charset || existingCharset || "utf-8", + ); + + this._table.charset = charset; + + if (existingCharset) { + this._table.headers["Content-Type"] = updateContentTypeCharset( + headerValue, + formatCharset(existingCharset), + ); + } else { + this._table.headers["Content-Type"] = headerValue.split(";")[0].trim(); + } + } + + /** + * Generates an array of translation strings + * in the form of [{msgid:..., msgstr: ...}] + * + */ + _generateList(): TranslationBuffers[] { + const list: TranslationBuffers[] = []; + const nodeCharset = ( + this._table.charset === "iso-8859-1" ? "latin1" : "utf8" + ) as BufferEncoding; + + if ("headers" in this._table) { + list.push({ + msgid: Buffer.alloc(0), + msgstr: Buffer.from(generateHeader(this._table.headers), nodeCharset), + } as any); + } + + Object.keys(this._table.translations).forEach((msgctxt) => { + if (typeof this._table.translations[msgctxt] !== "object") { + return; + } + + Object.keys(this._table.translations[msgctxt]).forEach((msgid) => { + if (typeof this._table.translations[msgctxt][msgid] !== "object") { + return; + } + + if (msgctxt === "" && msgid === "") { + return; + } + + const msgidPlural = + this._table.translations[msgctxt][msgid].msgid_plural; + let key = msgid; + + if (msgctxt) { + key = `${msgctxt}\u0004${key}`; + } + + if (msgidPlural) { + key += `\u0000${msgidPlural}`; + } + + const value = ([] as string[]) + .concat(this._table.translations[msgctxt][msgid].msgstr ?? []) + .join("\u0000"); + + list.push({ + msgid: Buffer.from(key, nodeCharset), + msgstr: Buffer.from(value, nodeCharset), + } as any); + }); + }); + + return list; + } + + /** + * Calculate buffer size for the final binary object + * + * @param list An array of translation strings from _generateList + * @return Size data of {msgid, msgstr, total} + */ + _calculateSize(list: TranslationBuffers[]): Size { + let msgidLength = 0; + let msgstrLength = 0; + + list.forEach((translation) => { + msgidLength += translation.msgid.length + 1; // + extra 0x00 + msgstrLength += translation.msgstr.length + 1; // + extra 0x00 + }); + + const totalLength = + 4 + // magic number + 4 + // revision + 4 + // string count + 4 + // original string table offset + 4 + // translation string table offset + 4 + // hash table size + 4 + // hash table offset + (4 + 4) * list.length + // original string table + (4 + 4) * list.length + // translations string table + msgidLength + // originals + msgstrLength; // translations + + return { + msgid: msgidLength, + msgstr: msgstrLength, + total: totalLength, + }; + } + + /** + * Generates the binary MO object from the translation list + * + * @param list translation list + * @param size Byte size information + * @return Compiled MO object + */ + _build(list: TranslationBuffers[], size: Size): Buffer { + const returnBuffer = Buffer.alloc(size.total); + const writeFunc = this._writeFunc as "writeUInt32LE" | "writeUInt32BE"; + let curPosition = 0; + let i; + let len; + + // magic + returnBuffer[writeFunc](this.MAGIC, 0); + + // revision + returnBuffer[writeFunc](0, 4); + + // string count + returnBuffer[writeFunc](list.length, 8); + + // original string table offset + returnBuffer[writeFunc](28, 12); + + // translation string table offset + returnBuffer[writeFunc](28 + (4 + 4) * list.length, 16); + + // hash table size + returnBuffer[writeFunc](0, 20); + + // hash table offset + returnBuffer[writeFunc](28 + (4 + 4) * list.length * 2, 24); + + // Build original table + curPosition = 28 + 2 * (4 + 4) * list.length; + for (i = 0, len = list.length; i < len; i++) { + const msgidLength = list[i].msgid as unknown as Buffer; + msgidLength.copy(returnBuffer, curPosition); + returnBuffer.writeUInt32LE(list[i].msgid.length, 28 + i * 8); + returnBuffer.writeUInt32LE(curPosition, 28 + i * 8 + 4); + returnBuffer[curPosition + list[i].msgid.length] = 0x00; + curPosition += list[i].msgid.length + 1; + } + + // build translation table + for (i = 0, len = list.length; i < len; i++) { + const msgstrLength = list[i].msgstr as unknown as Buffer; + msgstrLength.copy(returnBuffer, curPosition); + returnBuffer.writeUInt32LE( + list[i].msgstr.length, + 28 + (4 + 4) * list.length + i * 8, + ); + returnBuffer.writeUInt32LE( + curPosition, + 28 + (4 + 4) * list.length + i * 8 + 4, + ); + returnBuffer[curPosition + list[i].msgstr.length] = 0x00; + curPosition += list[i].msgstr.length + 1; + } + + return returnBuffer; + } + + /** + * Compiles a translation object into a binary MO object + * + * @interface + * @return {Buffer} Compiled MO object + */ + compile(): Buffer { + const list = this._generateList(); + const size = this._calculateSize(list); + + list.sort(compareMsgid); + + return this._build(list, size); + } } - -/** - * Handles header values, replaces or adds (if needed) a charset property - */ -Compiler.prototype._handleCharset = function () { - const ct = contentType.parse(this._table.headers['Content-Type'] || 'text/plain'); - - const charset = formatCharset(this._table.charset || ct.parameters.charset || 'utf-8'); - - // clean up content-type charset independently using fallback if missing - if (ct.parameters.charset) { - ct.parameters.charset = formatCharset(ct.parameters.charset); - } - - this._table.charset = charset; - this._table.headers['Content-Type'] = contentType.format(ct); -}; - -/** - * Generates an array of translation strings - * in the form of [{msgid:..., msgstr: ...}] - * - */ -Compiler.prototype._generateList = function () { - /** @type {TranslationBuffers[]} */ - const list = []; - - if ('headers' in this._table) { - list.push({ - msgid: Buffer.alloc(0), - msgstr: encoding.convert(generateHeader(this._table.headers), this._table.charset) - }); - } - - Object.keys(this._table.translations).forEach(msgctxt => { - if (typeof this._table.translations[msgctxt] !== 'object') { - return; - } - - Object.keys(this._table.translations[msgctxt]).forEach(msgid => { - if (typeof this._table.translations[msgctxt][msgid] !== 'object') { - return; - } - - if (msgctxt === '' && msgid === '') { - return; - } - - const msgidPlural = this._table.translations[msgctxt][msgid].msgid_plural; - let key = msgid; - - if (msgctxt) { - key = msgctxt + '\u0004' + key; - } - - if (msgidPlural) { - key += '\u0000' + msgidPlural; - } - - const value = /** @type {string[]} */([]).concat(this._table.translations[msgctxt][msgid].msgstr ?? []).join('\u0000'); - - list.push({ - msgid: encoding.convert(key, this._table.charset), - msgstr: encoding.convert(value, this._table.charset) - }); - }); - }); - - return list; -}; - -/** - * Calculate buffer size for the final binary object - * - * @param list An array of translation strings from _generateList - * @return Size data of {msgid, msgstr, total} - */ -Compiler.prototype._calculateSize = function (list: TranslationBuffers[]): Size { - let msgidLength = 0; - let msgstrLength = 0; - - list.forEach(translation => { - msgidLength += translation.msgid.length + 1; // + extra 0x00 - msgstrLength += translation.msgstr.length + 1; // + extra 0x00 - }); - - const totalLength = 4 + // magic number - 4 + // revision - 4 + // string count - 4 + // original string table offset - 4 + // translation string table offset - 4 + // hash table size - 4 + // hash table offset - (4 + 4) * list.length + // original string table - (4 + 4) * list.length + // translations string table - msgidLength + // originals - msgstrLength; // translations - - return { - msgid: msgidLength, - msgstr: msgstrLength, - total: totalLength - }; -}; - -/** - * Generates the binary MO object from the translation list - * - * @param list translation list - * @param size Byte size information - * @return Compiled MO object - */ -Compiler.prototype._build = function (list: TranslationBuffers[], size: Size): Buffer { - const returnBuffer = Buffer.alloc(size.total); - let curPosition = 0; - let i; - let len; - - // magic - returnBuffer[this._writeFunc](this.MAGIC, 0); - - // revision - returnBuffer[this._writeFunc](0, 4); - - // string count - returnBuffer[this._writeFunc](list.length, 8); - - // original string table offset - returnBuffer[this._writeFunc](28, 12); - - // translation string table offset - returnBuffer[this._writeFunc](28 + (4 + 4) * list.length, 16); - - // hash table size - returnBuffer[this._writeFunc](0, 20); - - // hash table offset - returnBuffer[this._writeFunc](28 + (4 + 4) * list.length * 2, 24); - - // Build original table - curPosition = 28 + 2 * (4 + 4) * list.length; - for (i = 0, len = list.length; i < len; i++) { - const msgidLength = /** @type {Buffer} */(/** @type {unknown} */(list[i].msgid)); - msgidLength.copy(returnBuffer, curPosition); - returnBuffer.writeUInt32LE(list[i].msgid.length, 28 + i * 8); - returnBuffer.writeUInt32LE(curPosition, 28 + i * 8 + 4); - returnBuffer[curPosition + list[i].msgid.length] = 0x00; - curPosition += list[i].msgid.length + 1; - } - - // build translation table - for (i = 0, len = list.length; i < len; i++) { - const msgstrLength = /** @type {Buffer} */(/** @type {unknown} */(list[i].msgstr)); - msgstrLength.copy(returnBuffer, curPosition); - returnBuffer.writeUInt32LE(list[i].msgstr.length, 28 + (4 + 4) * list.length + i * 8); - returnBuffer.writeUInt32LE(curPosition, 28 + (4 + 4) * list.length + i * 8 + 4); - returnBuffer[curPosition + list[i].msgstr.length] = 0x00; - curPosition += list[i].msgstr.length + 1; - } - - return returnBuffer; -}; - -/** - * Compiles a translation object into a binary MO object - * - * @interface - * @return {Buffer} Compiled MO object - */ -Compiler.prototype.compile = function () { - const list = this._generateList(); - const size = this._calculateSize(list); - - list.sort(compareMsgid); - - return this._build(list, size); -}; diff --git a/src/moparser.ts b/src/moparser.ts index cd7cf7a..0a09512 100644 --- a/src/moparser.ts +++ b/src/moparser.ts @@ -1,7 +1,11 @@ -import encoding from 'encoding'; -import { formatCharset, parseHeader } from './shared.js'; +import { formatCharset, parseHeader } from "./shared.js"; -import type {Parser,GetTextTranslations,GetTextTranslation,Translations,WriteFunc,ReadFunc,} from './types.js'; +import type { + GetTextTranslation, + GetTextTranslations, + ReadFunc, + WriteFunc, +} from "./types.js"; /** * Parses a binary MO object into translation table @@ -9,213 +13,213 @@ import type {Parser,GetTextTranslations,GetTextTranslation,Translations,WriteFun * @param {Buffer} buffer Binary MO object * @param {string} [defaultCharset] Default charset to use */ -export default function (buffer: any, defaultCharset: string | undefined) { - const parser = new Parser(buffer, defaultCharset); +export default function (buffer: Buffer, defaultCharset: string | undefined) { + const parser = new Parser(buffer, defaultCharset); - return parser.parse(); + return parser.parse(); } /** * Creates a MO parser object. - * - * @constructor - * @param {Buffer|null} fileContents Binary MO object - * @param {string} [defaultCharset] Default charset to use */ -function Parser (this: Parser, fileContents: Buffer | null, defaultCharset: string = 'iso-8859-1') { - this._fileContents = fileContents; - - this._charset = defaultCharset; - - /** - * @type {WriteFunc} - */ - this._writeFunc = 'writeUInt32LE'; - - /** - * @type {ReadFunc} - */ - this._readFunc = 'readUInt32LE'; - - /** - * Translation table - * - * @type {GetTextTranslations} table Translation object - */ - this._table = { - charset: this._charset, - headers: {}, - translations: {} - }; - - /** - * Magic constant to check the endianness of the input file - */ - this.MAGIC = 0x950412de; +class Parser { + private _fileContents: Buffer | null; + private _charset: string; + private _writeFunc: WriteFunc; + private _readFunc: ReadFunc; + private readonly _table: GetTextTranslations; + private readonly MAGIC: number; + private _offsetOriginals?: number; + private _offsetTranslations?: number; + private _total?: number; + private _revision?: number; + + constructor( + fileContents: Buffer | null, + defaultCharset: string = "iso-8859-1", + ) { + this._fileContents = fileContents; + + this._charset = defaultCharset; + + this._writeFunc = "writeUInt32LE"; + + this._readFunc = "readUInt32LE"; + + /** + * Translation table + */ + this._table = { + charset: this._charset, + headers: {}, + translations: {}, + }; + + /** + * Magic constant to check the endianness of the input file + */ + this.MAGIC = 0x950412de; + } + + /** + * Checks if number values in the input file are in big- or little endian format. + * + * @return {boolean} Return true if magic was detected + */ + _checkMagick(): boolean { + if (this._fileContents?.readUInt32LE(0) === this.MAGIC) { + this._readFunc = "readUInt32LE"; + this._writeFunc = "writeUInt32LE"; + + return true; + } else if (this._fileContents?.readUInt32BE(0) === this.MAGIC) { + this._readFunc = "readUInt32BE"; + this._writeFunc = "writeUInt32BE"; + + return true; + } + + return false; + } + + /** + * Read the original strings and translations from the input MO file. + * Use the first translation string in the file as the header. + */ + _loadTranslationTable() { + let offsetOriginals = this._offsetOriginals || 0; + let offsetTranslations = this._offsetTranslations || 0; + + // Return if there are no translations + if (!this._total) { + this._fileContents = null; + return; + } + + // Loop through all strings in the MO file + for (let i = 0; i < this._total; i++) { + if (this._fileContents === null) continue; + // msgid string + const length = this._fileContents[this._readFunc](offsetOriginals); + offsetOriginals += 4; + const position = this._fileContents[this._readFunc](offsetOriginals); + offsetOriginals += 4; + const msgidBuf = this._fileContents.subarray(position, position + length); + + // matching msgstr + const msgstrLength = + this._fileContents[this._readFunc](offsetTranslations); + offsetTranslations += 4; + const msgstrPosition = + this._fileContents[this._readFunc](offsetTranslations); + offsetTranslations += 4; + const msgstrBuf = this._fileContents.subarray( + msgstrPosition, + msgstrPosition + msgstrLength, + ); + + if (!i && !msgidBuf.toString()) { + this._handleCharset(msgstrBuf); + } + + const decoder = new TextDecoder(this._charset); + const msgid = decoder.decode(msgidBuf); + const msgstr = decoder.decode(msgstrBuf); + + this._addString(msgid, msgstr); + } + + // dump the file contents object + this._fileContents = null; + } + + /** + * Detects charset for MO strings from the header + * + * @param {Buffer} headers Header value + */ + _handleCharset(headers: Buffer) { + const headersStr = headers.toString(); + let match; + + if ((match = headersStr.match(/[; ]charset\s*=\s*([\w-]+)/i))) { + this._charset = this._table.charset = formatCharset( + match[1], + this._charset, + ); + } + + const decoder = new TextDecoder(this._charset); + const decodedHeaders = decoder.decode(headers); + + this._table.headers = parseHeader(decodedHeaders); + } + + /** + * Adds a translation to the translation object + * + * @param {string} msgidRaw Original string + * @param {string} msgstrRaw Translation for the original string + */ + _addString(msgidRaw: string, msgstrRaw: string) { + const translation: Partial = {}; + let msgctxt = ""; + let msgidPlural; + + const msgidArray = msgidRaw.split("\u0004"); + if (msgidArray.length > 1) { + msgctxt = msgidArray.shift() || ""; + translation.msgctxt = msgctxt; + } + msgidRaw = msgidArray.join("\u0004"); + + const parts = msgidRaw.split("\u0000"); + const msgid = parts.shift() || ""; + + translation.msgid = msgid; + + if ((msgidPlural = parts.join("\u0000"))) { + translation.msgid_plural = msgidPlural; + } + + const msgstr = msgstrRaw.split("\u0000"); + translation.msgstr = [...msgstr]; + + if (!this._table.translations[msgctxt]) { + this._table.translations[msgctxt] = {}; + } + + this._table.translations[msgctxt][msgid] = + translation as GetTextTranslation; + } + + /** + * Parses the MO object and returns translation table + * + * @return {GetTextTranslations | false} Translation table + */ + parse(): GetTextTranslations | false { + if (!this._checkMagick() || this._fileContents === null) { + return false; + } + + /** + * GetText revision nr, usually 0 + */ + this._revision = this._fileContents[this._readFunc](4); + + /** Total count of translated strings */ + this._total = this._fileContents[this._readFunc](8) ?? 0; + + /** Offset position for original strings table */ + this._offsetOriginals = this._fileContents[this._readFunc](12); + + /** Offset position for translation strings table */ + this._offsetTranslations = this._fileContents[this._readFunc](16); + + // Load translations into this._translationTable + this._loadTranslationTable(); + + return this._table; + } } - -/** - * Checks if number values in the input file are in big- or little endian format. - * - * @return {boolean} Return true if magic was detected - */ -Parser.prototype._checkMagick = function () { - if (this._fileContents?.readUInt32LE(0) === this.MAGIC) { - this._readFunc = 'readUInt32LE'; - this._writeFunc = 'writeUInt32LE'; - - return true; - } else if (this._fileContents?.readUInt32BE(0) === this.MAGIC) { - this._readFunc = 'readUInt32BE'; - this._writeFunc = 'writeUInt32BE'; - - return true; - } - - return false; -}; - -/** - * Read the original strings and translations from the input MO file. - * Use the first translation string in the file as the header. - */ -Parser.prototype._loadTranslationTable = function () { - let offsetOriginals = this._offsetOriginals || 0; - let offsetTranslations = this._offsetTranslations || 0; - let position; - let length; - let msgid; - let msgstr; - - // Return if there are no translations - if (!this._total) { this._fileContents = null; return; } - - // Loop through all strings in the MO file - for (let i = 0; i < this._total; i++) { - if (this._fileContents === null) continue; - // msgid string - length = this._fileContents[this._readFunc](offsetOriginals); - offsetOriginals += 4; - position = this._fileContents[this._readFunc](offsetOriginals); - offsetOriginals += 4; - msgid = this._fileContents.subarray( - position, - position + length - ); - - // matching msgstr - length = this._fileContents[this._readFunc](offsetTranslations); - offsetTranslations += 4; - position = this._fileContents[this._readFunc](offsetTranslations); - offsetTranslations += 4; - msgstr = this._fileContents.subarray( - position, - position + length - ); - - if (!i && !msgid.toString()) { - this._handleCharset(msgstr); - } - - msgid = encoding.convert(msgid, 'utf-8', this._charset) - .toString('utf8'); - msgstr = encoding.convert(msgstr, 'utf-8', this._charset) - .toString('utf8'); - - this._addString(msgid, msgstr); - } - - // dump the file contents object - this._fileContents = null; -}; - -/** - * Detects charset for MO strings from the header - * - * @param {Buffer} headers Header value - */ -Parser.prototype._handleCharset = function (headers) { - const headersStr = headers.toString(); - let match; - - if ((match = headersStr.match(/[; ]charset\s*=\s*([\w-]+)/i))) { - this._charset = this._table.charset = formatCharset(match[1], this._charset); - } - - headers = encoding.convert(headers, 'utf-8', this._charset); - - this._table.headers = parseHeader(headers.toString('utf8')); -}; - -/** - * Adds a translation to the translation object - * - * @param {string} msgidRaw Original string - * @param {string} msgstrRaw Translation for the original string - */ -Parser.prototype._addString = function (msgidRaw, msgstrRaw) { - const translation = {}; - let msgctxt = ''; - let msgidPlural; - - const msgidArray = msgidRaw.split('\u0004'); - if (msgidArray.length > 1) { - msgctxt = msgidArray.shift() || ''; - translation.msgctxt = msgctxt; - } - msgidRaw = msgidArray.join('\u0004'); - - const parts = msgidRaw.split('\u0000'); - const msgid = parts.shift() || ''; - - translation.msgid = msgid; - - if ((msgidPlural = parts.join('\u0000'))) { - translation.msgid_plural = msgidPlural; - } - - const msgstr = msgstrRaw.split('\u0000'); - translation.msgstr = [...msgstr]; - - if (!this._table.translations[msgctxt]) { - this._table.translations[msgctxt] = {}; - } - - this._table.translations[msgctxt][msgid] = translation; -}; - -/** - * Parses the MO object and returns translation table - * - * @return {GetTextTranslations | false} Translation table - */ -Parser.prototype.parse = function () { - if (!this._checkMagick() || this._fileContents === null) { - return false; - } - - /** - * GetText revision nr, usually 0 - */ - this._revision = this._fileContents[this._readFunc](4); - - /** - * @type {number} Total count of translated strings - */ - this._total = this._fileContents[this._readFunc](8) ?? 0; - - /** - * @type {number} Offset position for original strings table - */ - this._offsetOriginals = this._fileContents[this._readFunc](12); - - /** - * @type {number} Offset position for translation strings table - */ - this._offsetTranslations = this._fileContents[this._readFunc](16); - - // Load translations into this._translationTable - this._loadTranslationTable(); - - return this._table; -}; diff --git a/src/pocompiler.ts b/src/pocompiler.ts index dd842e8..0bbd248 100644 --- a/src/pocompiler.ts +++ b/src/pocompiler.ts @@ -1,11 +1,24 @@ -import { HEADERS, foldLine, compareMsgid, formatCharset, generateHeader } from './shared.js'; -import contentType from 'content-type'; - -import encoding from 'encoding'; -import {Compiler, GetTextComment, GetTextTranslation, GetTextTranslations, ParserOptions, Translations} from "./types.js"; - - -type PreOutputTranslation = Partial> & { msgstr?: string | string[]; }; +import { + compareMsgid, + extractCharset, + foldLine, + formatCharset, + generateHeader, + HEADERS, + updateContentTypeCharset, +} from "./shared.js"; + +import type { + GetTextComment, + GetTextTranslation, + GetTextTranslations, + ParserOptions, + Translations, +} from "./types.js"; + +type PreOutputTranslation = Partial> & { + msgstr?: string | string[]; +}; /** * Exposes general compiler function. Takes a translation @@ -15,10 +28,13 @@ type PreOutputTranslation = Partial> & { msgs * @param options Options * @return The compiled PO object */ -export default function (table: GetTextTranslations, options: ParserOptions): Buffer { - const compiler = new Compiler(table, options); +export default function ( + table: GetTextTranslations, + options: ParserOptions, +): Buffer { + const compiler = new Compiler(table, options); - return compiler.compile(); + return compiler.compile(); } /** @@ -27,290 +43,339 @@ export default function (table: GetTextTranslations, options: ParserOptions): Bu * @param headersRaw the headers to prepare * @returns the headers in the lowercase format */ -export function preparePoHeaders (headersRaw: Record): Record { - return Object.keys(headersRaw).reduce((result: Record, key) => { - const lowerKey = key.toLowerCase(); - const value = HEADERS.get(lowerKey); - - if (typeof value === 'string') { - result[value] = headersRaw[key]; - } else { - result[key] = headersRaw[key]; - } - - return result; - }, {}); +export function preparePoHeaders( + headersRaw: Record, +): Record { + return Object.keys(headersRaw).reduce( + (result: Record, key) => { + const lowerKey = key.toLowerCase(); + const value = HEADERS.get(lowerKey); + + if (typeof value === "string") { + result[value] = headersRaw[key]; + } else { + result[key] = headersRaw[key]; + } + + return result; + }, + {}, + ); } /** * Creates a PO compiler object. - * - * @constructor - * @param table Translation table to be compiled - * @param options Options */ -function Compiler (this: Compiler, table: GetTextTranslations, options: ParserOptions) { - this._table = table ?? { - headers: {}, - charset: undefined, - translations: {} - }; - this._table.translations = { ...this._table.translations }; - - /** _options The Options object */ - this._options = { - foldLength: 76, - escapeCharacters: true, - sort: false, - eol: '\n', - ...options - }; - - this._table.headers = preparePoHeaders(this._table.headers ?? {}); - - this._translations = []; - - this._handleCharset(); +class Compiler { + _table: GetTextTranslations; + _options: ParserOptions; + _translations: PreOutputTranslation[]; + + /** + * @param table Translation table to be compiled + * @param options Options + */ + constructor(table: GetTextTranslations, options: ParserOptions) { + this._table = table ?? { + headers: {}, + charset: undefined, + translations: {}, + }; + this._table.translations = { ...this._table.translations }; + + /** _options The Options object */ + this._options = { + foldLength: 76, + escapeCharacters: true, + sort: false, + eol: "\n", + ...options, + }; + + this._table.headers = preparePoHeaders(this._table.headers ?? {}); + + this._translations = []; + + this._handleCharset(); + } + + /** + * Converts a comment object to a comment string. The comment object is + * in the form of {translator: '', reference: '', extracted: '', flag: '', previous: ''} + * + * @param comments A comments object + * @return A comment string for the PO file + */ + _drawComments(comments: GetTextComment): string { + /** @var {Record[]} lines The comment lines to be returned */ + const lines: string[] = []; + /** @var {{key: GetTextComment, prefix: string}} type The comment type */ + const types = [ + { + key: "translator", + prefix: "# ", + }, + { + key: "reference", + prefix: "#: ", + }, + { + key: "extracted", + prefix: "#. ", + }, + { + key: "flag", + prefix: "#, ", + }, + { + key: "previous", + prefix: "#| ", + }, + ]; + + for (const type of types) { + /** @var {string} value The comment type */ + const value = type.key; + + // ignore empty comments + if (!(value in comments)) { + continue; + } + + const commentLines = ( + comments[value as keyof GetTextComment] as string + ).split(/\r?\n|\r/); + + // add comment lines to comments Array + for (const line of commentLines) { + lines.push(`${type.prefix}${line}`); + } + } + + return lines.length ? lines.join(this._options.eol) : ""; + } + + /** + * Builds a PO string for a single translation object + * + * @param block Translation object + * @param override Properties of this object will override `block` properties + * @param obsolete Block is obsolete and must be commented out + * @return Translation string for a single object + */ + _drawBlock( + block: PreOutputTranslation, + override: Partial = {}, + obsolete: boolean = false, + ): string { + const response = []; + const msgctxt = override.msgctxt || block.msgctxt; + const msgid = override.msgid || block.msgid; + const msgidPlural = override.msgid_plural || block.msgid_plural; + const msgstrData = override.msgstr || block.msgstr; + const msgstr = Array.isArray(msgstrData) ? [...msgstrData] : [msgstrData]; + + const comments: GetTextComment | undefined = + override.comments || block.comments; + if (comments) { + const drawnComments = this._drawComments(comments); + if (drawnComments) { + response.push(drawnComments); + } + } + + if (msgctxt) { + response.push(this._addPOString("msgctxt", msgctxt, obsolete)); + } + + response.push(this._addPOString("msgid", msgid || "", obsolete)); + + if (msgidPlural) { + response.push(this._addPOString("msgid_plural", msgidPlural, obsolete)); + + msgstr.forEach((msgstr, i) => { + response.push( + this._addPOString(`msgstr[${i}]`, msgstr || "", obsolete), + ); + }); + } else { + response.push(this._addPOString("msgstr", msgstr[0] || "", obsolete)); + } + + return response.join(this._options.eol); + } + + /** + * Escapes and joins a key and a value for the PO string + * + * @param key Key name + * @param value Key value + * @param obsolete PO string is obsolete and must be commented out + * @return Joined and escaped key-value pair + */ + _addPOString( + key: string = "", + value: string = "", + obsolete: boolean = false, + ): string { + key = key.toString(); + if (obsolete) { + key = "#~ " + key; + } + + let { foldLength, eol, escapeCharacters } = this._options; + + // escape newlines and quotes + if (escapeCharacters) { + value = value + .toString() + .replace(/\\/g, "\\\\") + .replace(/"/g, '\\"') + .replace(/\t/g, "\\t") + .replace(/\r/g, "\\r"); + } + + value = value.replace(/\n/g, "\\n"); // need to escape new line characters regardless + + let lines = [value]; + + if (obsolete) { + eol = eol + "#~ "; + } + + if (foldLength && foldLength > 0) { + lines = foldLine(value, foldLength); + } else { + // split only on new lines + if (escapeCharacters) { + lines = value.split(/\\n/g); + for (let i = 0; i < lines.length - 1; i++) { + lines[i] = `${lines[i]}\\n`; + } + if (lines.length && lines[lines.length - 1] === "") { + lines.splice(-1, 1); + } + } + } + + if (lines.length < 2) { + return `${key} "${lines.shift() || ""}"`; + } + + return `${key} ""${eol}"${lines.join(`"${eol}"`)}"`; + } + + /** + * Handles header values, replaces or adds (if needed) a charset property + */ + _handleCharset() { + if (this._table.headers) { + const headerValue = this._table.headers["Content-Type"] || "text/plain"; + const existingCharset = extractCharset(headerValue); + const charset = formatCharset( + this._table.charset || existingCharset || "utf-8", + ); + + this._table.charset = charset; + + if (existingCharset) { + this._table.headers["Content-Type"] = updateContentTypeCharset( + headerValue, + formatCharset(existingCharset), + ); + } else { + this._table.headers["Content-Type"] = headerValue.split(";")[0].trim(); + } + } + } + + /** + * Flatten and sort translations object + * + * @param section Object to be prepared (translations or obsolete) + * @returns Prepared array + */ + _prepareSection(section: Translations): PreOutputTranslation[] | undefined { + /** response Prepared array */ + let response: GetTextTranslation[] = []; + + for (const msgctxt in section) { + if (typeof section[msgctxt] !== "object") { + return; + } + + for (const msgid of Object.keys(section[msgctxt])) { + if (typeof section[msgctxt][msgid] !== "object") { + continue; + } + + if (msgctxt === "" && msgid === "") { + continue; + } + + response.push(section[msgctxt][msgid]); + } + } + + const { sort } = this._options; + + if (sort) { + if (typeof sort === "function") { + response = response.sort(sort); + } else { + response = response.sort(compareMsgid); + } + } + + return response; + } + + /** + * Compiles a translation object into a PO object + * + * @interface + * @return Compiled a PO object + */ + compile(): Buffer { + if (!this._table.translations) { + throw new Error("No translations found"); + } + + /** headerBlock */ + const headerBlock: PreOutputTranslation = + (this._table.translations[""] && this._table.translations[""][""]) || {}; + + /** Translations */ + const translations = this._prepareSection(this._table.translations); + let response: (PreOutputTranslation | string)[] = + translations?.map((t: PreOutputTranslation) => this._drawBlock(t)) || []; + + if (typeof this._table.obsolete === "object") { + const obsolete = this._prepareSection(this._table.obsolete); + if (obsolete && obsolete.length) { + response = (response || []).concat( + obsolete.map((r: PreOutputTranslation) => + this._drawBlock(r, {}, true), + ), + ); + } + } + + const eol = this._options.eol ?? "\n"; + + response?.unshift( + this._drawBlock(headerBlock, { + msgstr: generateHeader(this._table.headers), + }), + ); + + if (this._table.charset === "utf-8" || this._table.charset === "ascii") { + return Buffer.from(response?.join(eol + eol) + eol, "utf-8"); + } + + const nodeCharset = ( + this._table.charset === "iso-8859-1" ? "latin1" : "utf8" + ) as BufferEncoding; + return Buffer.from(response?.join(eol + eol) + eol, nodeCharset); + } } - -/** - * Converts a comment object to a comment string. The comment object is - * in the form of {translator: '', reference: '', extracted: '', flag: '', previous: ''} - * - * @param {Record} comments A comments object - * @return {string} A comment string for the PO file - */ -Compiler.prototype._drawComments = function (comments: { [x: string]: string; }): string { - /** @var {Record[]} lines The comment lines to be returned */ - const lines = []; - /** @var {{key: GetTextComment, prefix: string}} type The comment type */ - const types = [{ - key: 'translator', - prefix: '# ' - }, { - key: 'reference', - prefix: '#: ' - }, { - key: 'extracted', - prefix: '#. ' - }, { - key: 'flag', - prefix: '#, ' - }, { - key: 'previous', - prefix: '#| ' - }]; - - for (const type of types) { - /** @var {string} value The comment type */ - const value = type.key; - - // ignore empty comments - if (!(value in comments)) { continue; } - - const commentLines = comments[value].split(/\r?\n|\r/); - - // add comment lines to comments Array - for (const line of commentLines) { - lines.push(`${type.prefix}${line}`); - } - } - - return lines.length ? lines.join(this._options.eol) : ''; -}; - -/** - * Builds a PO string for a single translation object - * - * @param block Translation object - * @param override Properties of this object will override `block` properties - * @param obsolete Block is obsolete and must be commented out - * @return Translation string for a single object - */ -Compiler.prototype._drawBlock = function (block: PreOutputTranslation, override: Partial = {}, obsolete: boolean = false): string { - const response = []; - const msgctxt = override.msgctxt || block.msgctxt; - const msgid = override.msgid || block.msgid; - const msgidPlural = override.msgid_plural || block.msgid_plural; - const msgstrData = override.msgstr || block.msgstr; - const msgstr = Array.isArray(msgstrData) ? [...msgstrData] : [msgstrData]; - - const comments: GetTextComment|undefined = override.comments || block.comments; - if (comments) { - const drawnComments = this._drawComments(comments); - if (drawnComments) { - response.push(drawnComments); - } - } - - if (msgctxt) { - response.push(this._addPOString('msgctxt', msgctxt, obsolete)); - } - - response.push(this._addPOString('msgid', msgid || '', obsolete)); - - if (msgidPlural) { - response.push(this._addPOString('msgid_plural', msgidPlural, obsolete)); - - msgstr.forEach((msgstr, i) => { - response.push(this._addPOString(`msgstr[${i}]`, msgstr || '', obsolete)); - }); - } else { - response.push(this._addPOString('msgstr', msgstr[0] || '', obsolete)); - } - - return response.join(this._options.eol); -}; - -/** - * Escapes and joins a key and a value for the PO string - * - * @param key Key name - * @param value Key value - * @param obsolete PO string is obsolete and must be commented out - * @return Joined and escaped key-value pair - */ -Compiler.prototype._addPOString = function (key: string = '', value: string = '', obsolete: boolean = false): string { - key = key.toString(); - if (obsolete) { - key = '#~ ' + key; - } - - let { foldLength, eol, escapeCharacters } = this._options; - - // escape newlines and quotes - if (escapeCharacters) { - value = value.toString() - .replace(/\\/g, '\\\\') - .replace(/"/g, '\\"') - .replace(/\t/g, '\\t') - .replace(/\r/g, '\\r'); - } - - value = value.replace(/\n/g, '\\n'); // need to escape new line characters regardless - - let lines = [value]; - - if (obsolete) { - eol = eol + '#~ '; - } - - if (foldLength && foldLength > 0) { - lines = foldLine(value, foldLength); - } else { - // split only on new lines - if (escapeCharacters) { - lines = value.split(/\\n/g); - for (let i = 0; i < lines.length - 1; i++) { - lines[i] = `${lines[i]}\\n`; - } - if (lines.length && lines[lines.length - 1] === '') { - lines.splice(-1, 1); - } - } - } - - if (lines.length < 2) { - return `${key} "${lines.shift() || ''}"`; - } - - return `${key} ""${eol}"${lines.join(`"${eol}"`)}"`; -}; - -/** - * Handles header values, replaces or adds (if needed) a charset property - */ -Compiler.prototype._handleCharset = function () { - if (this._table.headers) { - const ct = contentType.parse(this._table.headers['Content-Type'] || 'text/plain'); - - const charset = formatCharset(this._table.charset || ct.parameters.charset || 'utf-8'); - - // clean up content-type charset independently using fallback if missing - if (ct.parameters.charset) { - ct.parameters.charset = formatCharset(ct.parameters.charset); - } - - this._table.charset = charset; - this._table.headers['Content-Type'] = contentType.format(ct); - } -}; - -/** - * Flatten and sort translations object - * - * @param section Object to be prepared (translations or obsolete) - * @returns Prepared array - */ -Compiler.prototype._prepareSection = function (section: Translations): PreOutputTranslation[] | undefined { - /** response Prepared array */ - let response: GetTextTranslation[] = []; - - for (const msgctxt in section) { - if (typeof section[msgctxt] !== 'object') { - return; - } - - for (const msgid of Object.keys(section[msgctxt])) { - if (typeof section[msgctxt][msgid] !== 'object') { - continue; - } - - if (msgctxt === '' && msgid === '') { - continue; - } - - response.push(section[msgctxt][msgid]); - } - } - - const { sort } = this._options; - - if (sort) { - if (typeof sort === 'function') { - response = response.sort(sort); - } else { - response = response.sort(compareMsgid); - } - } - - return response; -}; - -/** - * Compiles a translation object into a PO object - * - * @interface - * @return Compiled a PO object - */ -Compiler.prototype.compile = function (): Buffer { - if (!this._table.translations) { - throw new Error('No translations found'); - } - - /** headerBlock */ - const headerBlock: PreOutputTranslation = (this._table.translations[''] && this._table.translations['']['']) || {}; - - /** Translations */ - const translations = this._prepareSection(this._table.translations); - let response: (PreOutputTranslation|string)[] = (translations?.map((t: unknown[]) => this._drawBlock(t))); - - if (typeof this._table.obsolete === 'object') { - const obsolete = this._prepareSection(this._table.obsolete); - if (obsolete && obsolete.length) { - response = response?.concat(obsolete.map((r: unknown[]) => this._drawBlock(r, {}, true))); - } - } - - const eol = this._options.eol ?? '\n'; - - response?.unshift(this._drawBlock(headerBlock, { - msgstr: generateHeader(this._table.headers) - })); - - if (this._table.charset === 'utf-8' || this._table.charset === 'ascii') { - return Buffer.from(response?.join(eol + eol) + eol, 'utf-8'); - } - - return encoding.convert(response?.join(eol + eol) + eol, this._table.charset); -}; diff --git a/src/poparser.ts b/src/poparser.ts index 7bb997b..53a0b72 100644 --- a/src/poparser.ts +++ b/src/poparser.ts @@ -1,23 +1,32 @@ -import encoding from 'encoding'; -import { formatCharset, parseHeader, parseNPluralFromHeadersSafely, ParserError } from './shared.js'; -import { Transform, TransformOptions } from 'readable-stream'; -import util from 'util'; -import {PoParserTransform, GetTextComment, GetTextTranslation, GetTextTranslations, ParserOptions, Translations} from "./types.js"; +import { Transform, type TransformOptions } from "node:stream"; +import { + formatCharset, + ParserError, + parseHeader, + parseNPluralFromHeadersSafely, +} from "./shared.js"; +import type { + GetTextComment, + GetTextTranslation, + GetTextTranslations, + ParserOptions, + Translations, +} from "./types.js"; /** Po parser options*/ -type Options = { defaultCharset?: string; validation?: boolean; }; +type Options = { defaultCharset?: string; validation?: boolean }; /** A single Node object in the PO file */ export interface Node { - key?: string; - type?: number; - value: string; - quote?: string; - obsolete?: boolean; - comments?: GetTextComment | undefined; + key?: string; + type?: number; + value: string; + quote?: string; + obsolete?: boolean; + comments?: GetTextComment | undefined; } -type DoneCallback = (...args: unknown[]) => void +type DoneCallback = (...args: unknown[]) => void; /** * Parses a PO object into translation table @@ -25,10 +34,10 @@ type DoneCallback = (...args: unknown[]) => void * @param input PO object * @param [options] Optional options with defaultCharset and validation */ -export function poParse (input: string | Buffer, options: Options = {}) { - const parser = new Parser(input, options); +export function poParse(input: string | Buffer, options: Options = {}) { + const parser = new Parser(input, options); - return parser.parse(); + return parser.parse(); } /** @@ -37,32 +46,11 @@ export function poParse (input: string | Buffer, options: Options = {}) { * @param [options] Optional options with defaultCharset and validation * @param [transformOptions] Optional stream options */ -export function poStream (options: Options = {}, transformOptions: TransformOptions = {}) { - return new PoParserTransform(options, transformOptions); -} - -type Parser = { - _validation: boolean; - _charset: string; - _lex: Node[]; - _escaped: boolean; - _node: Partial; - _state: number; - _lineNumber: number; - _fileContents: string | Buffer; - _handleCharset: (buf: string | Buffer) => string; - states: { - none: number; - header: number; - msgctxt: number; - msgid: number; - msgid_plural: number; - msgstr: number; - msgstr_plural: number; - obsolete: number; - comment: number; - eol: number; - } +export function poStream( + options: Options = {}, + transformOptions: TransformOptions = {}, +) { + return new PoParserTransform(options, transformOptions); } /** @@ -72,499 +60,538 @@ type Parser = { * @param fileContents PO object * @param options Options with defaultCharset and validation */ -function Parser (this: Parser, fileContents: string | Buffer, {defaultCharset = 'iso-8859-1', validation = false}: Options) { - this._validation = validation; - this._charset = defaultCharset; - - this._lex = []; - this._escaped = false; - this._node = {}; - this._state = this.states.none; - this._lineNumber = 1; - - if (typeof fileContents === 'string') { - this._charset = 'utf-8'; - this._fileContents = fileContents; - } else { - this._fileContents = this._handleCharset(fileContents); - } +class Parser { + _validation: boolean; + _charset: string; + _lex: Node[]; + _escaped: boolean; + _node: Partial; + _state: number; + _lineNumber: number; + _fileContents: string | Buffer; + + states = { + none: 0x01, + header: 0x02, + msgctxt: 0x03, + msgid: 0x04, + msgid_plural: 0x05, + msgstr: 0x06, + msgstr_plural: 0x07, + obsolete: 0x08, + comment: 0x09, + eol: 0x0a, + }; + + constructor( + fileContents: string | Buffer, + { defaultCharset = "iso-8859-1", validation = false }: Options, + ) { + this._validation = validation; + this._charset = defaultCharset; + + this._lex = []; + this._escaped = false; + this._node = {}; + this._state = this.types.none; + this._lineNumber = 1; + + if (typeof fileContents === "string") { + this._charset = "utf-8"; + this._fileContents = fileContents; + } else { + this._fileContents = this._handleCharset(fileContents); + } + } + + /** + * Parses the PO object and returns translation table + * + * @return {Object} Translation table + */ + parse(): GetTextTranslations { + this._lexer(this._fileContents.toString()); + + return this._finalize(this._lex); + } + + /** + * Detects charset for PO strings from the header + * + * @param buf Header value + */ + _handleCharset(buf: string | Buffer = "") { + const str = buf.toString(); + let pos; + let headers = ""; + let match; + + if ((pos = str.search(/^\s*msgid/im)) >= 0) { + pos = pos + str.substring(pos + 5).search(/^\s*(msgid|msgctxt)/im); + headers = str.substring(0, pos >= 0 ? pos + 5 : str.length); + } + + if ( + (match = headers.match(/[; ]charset\s*=\s*([\w-]+)(?:[\s;]|\\n)*"\s*$/im)) + ) { + this._charset = formatCharset(match[1], this._charset); + } + + if (this._charset === "utf-8") { + return str; + } + + return this._toString(buf); + } + + /** + * Converts buffer to string + * @param buf Buffer to convert + * @return Converted string + */ + _toString(buf: string | Buffer): string { + const decoder = new TextDecoder(this._charset); + return decoder.decode(typeof buf === "string" ? Buffer.from(buf) : buf); + } + + /** + * Value types for lexer + */ + types = { + none: 0x01, + comments: 0x02, + key: 0x03, + string: 0x04, + obsolete: 0x08, + }; + + /** + * String matches for lexer + */ + symbols = { + whitespace: /\s/, + key: /[\w\-[\]]/, + keyNames: /^(?:msgctxt|msgid(?:_plural)?|msgstr(?:\[\d+])?)$/, + }; + /** + * Token parser. Parsed state can be found from this._lex + * + * @param chunk String + * @throws {ParserError} Throws a SyntaxError if the value doesn't match the key names. + */ + _lexer(chunk: string) { + let chr; + + for (let i = 0, len = chunk.length; i < len; i++) { + chr = chunk.charAt(i); + + if (chr === "\n") { + this._lineNumber += 1; + } + + switch (this._state) { + case this.states.none: + case this.states.obsolete: + if (chr === '"' || chr === "'") { + this._node = { + type: this.types.string, + value: "", + quote: chr, + }; + this._lex.push(this._node as Node); + this._state = this.types.string; + } else if (chr === "#") { + this._node = { + type: this.types.comments, + value: "", + }; + this._lex.push(this._node as Node); + this._state = this.types.comments; + } else if (!chr.match(this.symbols.whitespace)) { + this._node = { + type: this.types.key, + value: chr, + }; + if (this._state === this.states.obsolete) { + this._node.obsolete = true; + } + this._lex.push(this._node as Node); + this._state = this.types.key; + } + break; + case this.types.comments: + if (chr === "\n") { + this._state = this.types.none; + } else if (chr === "~" && this._node.value === "") { + this._node.value += chr; + this._state = this.types.obsolete; + } else if (chr !== "\r") { + this._node.value += chr; + } + break; + case this.types.string: + if (this._escaped) { + switch (chr) { + case "t": + this._node.value += "\t"; + break; + case "n": + this._node.value += "\n"; + break; + case "r": + this._node.value += "\r"; + break; + default: + this._node.value += chr; + } + this._escaped = false; + } else { + if (chr === this._node.quote) { + this._state = this.types.none; + } else if (chr === "\\") { + this._escaped = true; + break; + } else { + this._node.value += chr; + } + this._escaped = false; + } + break; + case this.types.key: + if (!chr.match(this.symbols.key)) { + if (!this._node.value?.match(this.symbols.keyNames)) { + throw new ParserError( + `Error parsing PO data: Invalid key name "${this._node.value}" at line ${this._lineNumber}. This can be caused by an unescaped quote character in a msgid or msgstr value.`, + this._lineNumber, + ); + } + this._state = this.types.none; + i--; + } else { + this._node.value += chr; + } + break; + } + } + } + + /** + * Join multi line strings + * + * @param tokens Parsed tokens + * @return Parsed tokens, with multi line strings joined into one + */ + _joinStringValues(tokens: Node[]): Node[] { + const response: Node[] = []; + let lastNode; + + for (let i = 0, len = tokens.length; i < len; i++) { + if ( + lastNode && + tokens[i].type === this.types.string && + lastNode.type === this.types.string + ) { + lastNode.value += tokens[i].value ?? ""; + } else if ( + lastNode && + tokens[i].type === this.types.comments && + lastNode.type === this.types.comments + ) { + lastNode.value += "\n" + tokens[i].value; + } else { + response.push(tokens[i]); + lastNode = tokens[i]; + } + } + + return response; + } + + /** + * Parse comments into separate comment blocks + * + * @param tokens Parsed tokens + */ + _parseComments(tokens: Node[]) { + for (const node of tokens) { + if (!node || node.type !== this.types.comments) { + continue; + } + + const comment: { + [key: string]: string[]; + } = { + translator: [], + extracted: [], + reference: [], + flag: [], + previous: [], + }; + + const lines: string[] = (node.value || "").split(/\n/); + + for (const line of lines) { + switch (line.charAt(0) || "") { + case ":": + comment.reference.push(line.substring(1).trim()); + break; + case ".": + comment.extracted.push(line.substring(1).replace(/^\s+/, "")); + break; + case ",": + comment.flag.push(line.substring(1).replace(/^\s+/, "")); + break; + case "|": + comment.previous.push(line.substring(1).replace(/^\s+/, "")); + break; + case "~": + break; + default: + comment.translator.push(line.replace(/^\s+/, "")); + } + } + + const finalToken = node as unknown as Omit & { + value: Record; + }; + + finalToken.value = {}; + + for (const key of Object.keys(comment)) { + if (key && comment[key]?.length) { + finalToken.value[key] = comment[key].join("\n"); + } + } + } + } + + /** + * Join gettext keys with values + * + * @param tokens - Parsed tokens containing key-value pairs + * @return An array of Nodes representing joined tokens + */ + _handleKeys(tokens: (Node & { value?: string })[]): Node[] { + const response: Node[] = []; + let lastNode: Partial & { comments?: string } = {}; + + for (let i = 0, len = tokens.length; i < len; i++) { + if (tokens[i].type === this.types.key) { + lastNode = { + key: tokens[i].value, + }; + if (tokens[i].obsolete) { + lastNode.obsolete = true; + } + if (i && tokens[i - 1].type === this.types.comments) { + lastNode.comments = tokens[i - 1].value; + } + lastNode.value = ""; + response.push(lastNode as Node); + } else if (tokens[i].type === this.types.string && lastNode) { + lastNode.value += tokens[i].value; + } + } + + return response; + } + + /** + * Separate different values into individual translation objects + * + * @param {Node[]} tokens Parsed tokens + * @return {GetTextTranslation[]} Tokens + */ + _handleValues(tokens: Node[]): GetTextTranslation[] { + const response = []; + /** Translation object */ + let lastNode: Partial = {}; + let curContext: string | undefined; + let curComments: GetTextComment | undefined; + + for (let i = 0, len = tokens.length; i < len; i++) { + const tokenKey = tokens[i].key; + if (!tokenKey) continue; + if (tokenKey.toLowerCase() === "msgctxt") { + curContext = tokens[i].value; + curComments = tokens[i].comments; + } else if (tokenKey.toLowerCase() === "msgid") { + lastNode = { + msgid: tokens[i].value, + msgstr: [], + }; + if (tokens[i].obsolete) { + lastNode.obsolete = true; + } + + if (curContext) { + lastNode.msgctxt = curContext; + } + + if (curComments) { + lastNode.comments = curComments; + } + + if (tokens[i].comments && !lastNode.comments) { + lastNode.comments = tokens[i].comments; + } + + curContext = undefined; + curComments = undefined; + response.push(lastNode); + } else if (tokenKey.toLowerCase() === "msgid_plural") { + if (lastNode) { + if (this._validation && "msgid_plural" in lastNode) { + throw new SyntaxError( + `Multiple msgid_plural error: entry "${lastNode.msgid}" in "${lastNode.msgctxt || ""}" context has multiple msgid_plural declarations.`, + ); + } + + lastNode.msgid_plural = tokens[i].value; + } + + if (tokens[i].comments && !lastNode.comments) { + lastNode.comments = tokens[i].comments; + } + + curContext = undefined; + curComments = undefined; + } else if (tokenKey.substring(0, 6).toLowerCase() === "msgstr") { + if (lastNode) { + const strData = lastNode.msgstr || []; + const tokenValue = tokens[i].value; + lastNode.msgstr = strData.concat(tokenValue); + } + + if (tokens[i].comments && !lastNode.comments) { + lastNode.comments = tokens[i].comments; + } + + curContext = undefined; + curComments = undefined; + } + } + + return response as GetTextTranslation[]; + } + + /** + * Validate token + * + * @param token Parsed token + * @param translations Translation table + * @param msgctxt Message entry context + * @param nplurals Number of expected plural forms + * @throws {Error} Will throw an error if token validation fails + */ + _validateToken( + { + msgid = "", + msgid_plural = "", // eslint-disable-line camelcase + msgstr = [], + }: GetTextTranslation, + translations: Translations, + msgctxt: string, + nplurals: number, + ) { + if (msgid in translations[msgctxt]) { + throw new SyntaxError( + `Duplicate msgid error: entry "${msgid}" in "${msgctxt}" context has already been declared.`, + ); + // eslint-disable-next-line camelcase + } else if (msgid_plural && msgstr.length !== nplurals) { + // eslint-disable-next-line camelcase + throw new RangeError( + `Plural forms range error: Expected to find ${nplurals} forms but got ${msgstr.length} for entry "${msgid_plural}" in "${msgctxt}" context.`, + ); + // eslint-disable-next-line camelcase + } else if (!msgid_plural && msgstr.length !== 1) { + throw new RangeError( + `Translation string range error: Extected 1 msgstr definitions associated with "${msgid}" in "${msgctxt}" context, found ${msgstr.length}.`, + ); + } + } + + /** + * Compose a translation table from tokens object + * + * @param {GetTextTranslation[]} tokens Parsed tokens + * @return {GetTextTranslations} Translation table + */ + _normalize(tokens: GetTextTranslation[]): GetTextTranslations { + /** + * Translation table to be returned + */ + const table: Omit & + Partial> = { + charset: this._charset, + headers: undefined, + translations: {}, + }; + let nplurals = 1; + + for (let i = 0, len = tokens.length; i < len; i++) { + const msgctxt: string = tokens[i].msgctxt || ""; + + if (tokens[i].obsolete) { + if (!table.obsolete) { + table.obsolete = {}; + } + + if (!table.obsolete[msgctxt]) { + table.obsolete[msgctxt] = {}; + } + + delete tokens[i].obsolete; + + table.obsolete[msgctxt][tokens[i].msgid] = tokens[i]; + + continue; + } + + if (!table.translations[msgctxt]) { + table.translations[msgctxt] = {}; + } + + if (!table.headers && !msgctxt && !tokens[i].msgid) { + table.headers = parseHeader(tokens[i].msgstr[0]); + nplurals = parseNPluralFromHeadersSafely(table.headers, nplurals); + } + + if (this._validation) { + this._validateToken(tokens[i], table.translations, msgctxt, nplurals); + } + + const token = tokens[i]; + table.translations[msgctxt][token.msgid] = token; + } + + return table as GetTextTranslations; + } + + /** + * Converts parsed tokens to a translation table + * + * @param tokens Parsed tokens + * @returns Translation table + */ + _finalize(tokens: Node[]): GetTextTranslations { + /** + * Translation table + */ + let data = this._joinStringValues(tokens); + + this._parseComments(data); + + // The PO parser gettext keys with values + data = this._handleKeys(data); + + // The PO parser individual translation objects + const dataset = this._handleValues(data); + return this._normalize(dataset); + } } -/** - * Parses the PO object and returns translation table - * - * @return {Object} Translation table - */ -Parser.prototype.parse = function (): GetTextTranslations { - this._lexer(this._fileContents); - - return this._finalize(this._lex); -}; - -/** - * Detects charset for PO strings from the header - * - * @param buf Header value - */ -Parser.prototype._handleCharset = function (buf: string | Buffer = '') { - /** @type {string} */ - const str = buf.toString(); - let pos; - let headers = ''; - let match; - - if ((pos = str.search(/^\s*msgid/im)) >= 0) { - pos = pos + str.substring(pos + 5).search(/^\s*(msgid|msgctxt)/im); - headers = str.substring(0, pos >= 0 ? pos + 5 : str.length); - } - - if ((match = headers.match(/[; ]charset\s*=\s*([\w-]+)(?:[\s;]|\\n)*"\s*$/mi))) { - this._charset = formatCharset(match[1], this._charset); - } - - if (this._charset === 'utf-8') { - return str; - } - - return this._toString(buf); -}; - -/** - * Converts buffer to string - * @param buf Buffer to convert - * @return Converted string - */ -Parser.prototype._toString = function (buf: string | Buffer): string { - return encoding.convert(buf, 'utf-8', this._charset).toString('utf-8'); -}; - -/** - * State constants for parsing FSM - */ -Parser.prototype.states = { - none: 0x01, - comments: 0x02, - key: 0x03, - string: 0x04, - obsolete: 0x05 -}; - -/** - * Value types for lexer - */ -Parser.prototype.types = { - comments: 0x01, - key: 0x02, - string: 0x03, - obsolete: 0x04 -}; - -/** - * String matches for lexer - */ -Parser.prototype.symbols = { - whitespace: /\s/, - key: /[\w\-[\]]/, - keyNames: /^(?:msgctxt|msgid(?:_plural)?|msgstr(?:\[\d+])?)$/ -}; -/** - * Token parser. Parsed state can be found from this._lex - * - * @param chunk String - * @throws {ParserError} Throws a SyntaxError if the value doesn't match the key names. - */ -Parser.prototype._lexer = function (chunk: string) { - let chr; - - for (let i = 0, len = chunk.length; i < len; i++) { - chr = chunk.charAt(i); - - if (chr === '\n') { - this._lineNumber += 1; - } - - switch (this._state) { - case this.states.none: - case this.states.obsolete: - if (chr === '"' || chr === "'") { - this._node = { - type: this.types.string, - value: '', - quote: chr - }; - this._lex.push(/** @type {Node} */ (this._node)); - this._state = this.states.string; - } else if (chr === '#') { - this._node = { - type: this.types.comments, - value: '' - }; - this._lex.push(/** @type {Node} */ (this._node)); - this._state = this.states.comments; - } else if (!chr.match(this.symbols.whitespace)) { - this._node = { - type: this.types.key, - value: chr - }; - if (this._state === this.states.obsolete) { - this._node.obsolete = true; - } - this._lex.push(/** @type {Node} */ (this._node)); - this._state = this.states.key; - } - break; - case this.states.comments: - if (chr === '\n') { - this._state = this.states.none; - } else if (chr === '~' && this._node.value === '') { - this._node.value += chr; - this._state = this.states.obsolete; - } else if (chr !== '\r') { - this._node.value += chr; - } - break; - case this.states.string: - if (this._escaped) { - switch (chr) { - case 't': - this._node.value += '\t'; - break; - case 'n': - this._node.value += '\n'; - break; - case 'r': - this._node.value += '\r'; - break; - default: - this._node.value += chr; - } - this._escaped = false; - } else { - if (chr === this._node.quote) { - this._state = this.states.none; - } else if (chr === '\\') { - this._escaped = true; - break; - } else { - this._node.value += chr; - } - this._escaped = false; - } - break; - case this.states.key: - if (!chr.match(this.symbols.key)) { - if (!this._node.value?.match(this.symbols.keyNames)) { - throw new ParserError(`Error parsing PO data: Invalid key name "${this._node.value}" at line ${this._lineNumber}. This can be caused by an unescaped quote character in a msgid or msgstr value.`, this._lineNumber); - } - this._state = this.states.none; - i--; - } else { - this._node.value += chr; - } - break; - } - } -}; - -/** - * Join multi line strings - * - * @param tokens Parsed tokens - * @return Parsed tokens, with multi line strings joined into one - */ -Parser.prototype._joinStringValues = function (tokens: Node[]): Node[] { - const response: Node[] = []; - let lastNode; - - for (let i = 0, len = tokens.length; i < len; i++) { - if (lastNode && tokens[i].type === this.types.string && lastNode.type === this.types.string) { - lastNode.value += tokens[i].value ?? ''; - } else if (lastNode && tokens[i].type === this.types.comments && lastNode.type === this.types.comments) { - lastNode.value += '\n' + tokens[i].value; - } else { - response.push(tokens[i]); - lastNode = tokens[i]; - } - } - - return response; -}; - -/** - * Parse comments into separate comment blocks - * - * @param tokens Parsed tokens - */ -Parser.prototype._parseComments = function (tokens: Node[]) { - for (const node of tokens) { - if (!node || node.type !== this.types.comments) { - continue; - } - - const comment: { - [key: string]: string[]; - } = { - translator: [], - extracted: [], - reference: [], - flag: [], - previous: [] - }; - - const lines: string[] = (node.value || '').split(/\n/); - - for (const line of lines) { - switch (line.charAt(0) || '') { - case ':': - comment.reference.push(line.substring(1).trim()); - break; - case '.': - comment.extracted.push(line.substring(1).replace(/^\s+/, '')); - break; - case ',': - comment.flag.push(line.substring(1).replace(/^\s+/, '')); - break; - case '|': - comment.previous.push(line.substring(1).replace(/^\s+/, '')); - break; - case '~': - break; - default: - comment.translator.push(line.replace(/^\s+/, '')); - } - } - - const finalToken = node as unknown as Omit & { value: Record}; - - finalToken.value = {}; - - for (const key of Object.keys(comment)) { - if (key && comment[key]?.length) { - finalToken.value[key] = comment[key].join('\n'); - } - } - } -}; - -/** - * Join gettext keys with values - * - * @param tokens - Parsed tokens containing key-value pairs - * @return An array of Nodes representing joined tokens - */ -Parser.prototype._handleKeys = function (tokens: (Node & { value?: string })[]): Node[] { - const response: Node[] = []; - let lastNode: Partial & { comments?: string; } = {}; - - for (let i = 0, len = tokens.length; i < len; i++) { - if (tokens[i].type === this.types.key) { - lastNode = { - key: tokens[i].value - }; - if (tokens[i].obsolete) { - lastNode.obsolete = true; - } - if (i && tokens[i - 1].type === this.types.comments) { - lastNode.comments = tokens[i - 1].value; - } - lastNode.value = ''; - response.push((lastNode as Node)); - } else if (tokens[i].type === this.types.string && lastNode) { - lastNode.value += tokens[i].value; - } - } - - return response; -}; - -/** - * Separate different values into individual translation objects - * - * @param {Node[]} tokens Parsed tokens - * @return {GetTextTranslation[]} Tokens - */ -Parser.prototype._handleValues = function (tokens: Node[]): GetTextTranslation[] { - const response = []; - /** Translation object */ - let lastNode: Partial = {}; - let curContext: string | undefined; - let curComments: GetTextComment | undefined; - - for (let i = 0, len = tokens.length; i < len; i++) { - const tokenKey = tokens[i].key; - if (!tokenKey) continue; - if (tokenKey.toLowerCase() === 'msgctxt') { - curContext = tokens[i].value; - curComments = tokens[i].comments; - } else if (tokenKey.toLowerCase() === 'msgid') { - lastNode = { - msgid: tokens[i].value, - msgstr: [] - }; - if (tokens[i].obsolete) { - lastNode.obsolete = true; - } - - if (curContext) { - lastNode.msgctxt = curContext; - } - - if (curComments) { - lastNode.comments = curComments; - } - - if (tokens[i].comments && !lastNode.comments) { - lastNode.comments = tokens[i].comments; - } - - curContext = undefined; - curComments = undefined; - response.push(lastNode); - } else if (tokenKey.toLowerCase() === 'msgid_plural') { - if (lastNode) { - if (this._validation && 'msgid_plural' in lastNode) { - throw new SyntaxError(`Multiple msgid_plural error: entry "${lastNode.msgid}" in "${lastNode.msgctxt || ''}" context has multiple msgid_plural declarations.`); - } - - lastNode.msgid_plural = tokens[i].value; - } - - if (tokens[i].comments && !lastNode.comments) { - lastNode.comments = tokens[i].comments; - } - - curContext = undefined; - curComments = undefined; - } else if (tokenKey.substring(0, 6).toLowerCase() === 'msgstr') { - if (lastNode) { - const strData = lastNode.msgstr || []; - const tokenValue = tokens[i].value; - lastNode.msgstr = (strData).concat(tokenValue); - } - - if (tokens[i].comments && !lastNode.comments) { - lastNode.comments = tokens[i].comments; - } - - curContext = undefined; - curComments = undefined; - } - } - - return response as GetTextTranslation[]; -}; - -/** - * Validate token - * - * @param token Parsed token - * @param translations Translation table - * @param msgctxt Message entry context - * @param nplurals Number of expected plural forms - * @throws {Error} Will throw an error if token validation fails - */ -Parser.prototype._validateToken = function ( - { - msgid = '', - msgid_plural = '', // eslint-disable-line camelcase - msgstr = [] - }: GetTextTranslation, - translations: Translations, - msgctxt: string, - nplurals: number -) { - if (msgid in translations[msgctxt]) { - throw new SyntaxError(`Duplicate msgid error: entry "${msgid}" in "${msgctxt}" context has already been declared.`); - // eslint-disable-next-line camelcase - } else if (msgid_plural && msgstr.length !== nplurals) { - // eslint-disable-next-line camelcase - throw new RangeError(`Plural forms range error: Expected to find ${nplurals} forms but got ${msgstr.length} for entry "${msgid_plural}" in "${msgctxt}" context.`); - // eslint-disable-next-line camelcase - } else if (!msgid_plural && msgstr.length !== 1) { - throw new RangeError(`Translation string range error: Extected 1 msgstr definitions associated with "${msgid}" in "${msgctxt}" context, found ${msgstr.length}.`); - } -}; - -/** - * Compose a translation table from tokens object - * - * @param {GetTextTranslation[]} tokens Parsed tokens - * @return {GetTextTranslations} Translation table - */ -Parser.prototype._normalize = function (tokens: GetTextTranslation[]): GetTextTranslations { - /** - * Translation table to be returned - */ - const table: Omit & Partial> = { - charset: this._charset, - headers: undefined, - translations: {} - }; - let nplurals = 1; - - for (let i = 0, len = tokens.length; i < len; i++) { - const msgctxt: string = tokens[i].msgctxt || ''; - - if (tokens[i].obsolete) { - if (!table.obsolete) { - table.obsolete = {}; - } - - if (!table.obsolete[msgctxt]) { - table.obsolete[msgctxt] = {}; - } - - delete tokens[i].obsolete; - - table.obsolete[msgctxt][tokens[i].msgid] = tokens[i]; - - continue; - } - - if (!table.translations[msgctxt]) { - table.translations[msgctxt] = {}; - } - - if (!table.headers && !msgctxt && !tokens[i].msgid) { - table.headers = parseHeader(tokens[i].msgstr[0]); - nplurals = parseNPluralFromHeadersSafely(table.headers, nplurals); - } - - if (this._validation) { - this._validateToken(tokens[i], table.translations, msgctxt, nplurals); - } - - const token = tokens[i]; - table.translations[msgctxt][token.msgid] = token; - } - - return table as GetTextTranslations; -}; - -/** - * Converts parsed tokens to a translation table - * - * @param tokens Parsed tokens - * @returns Translation table - */ -Parser.prototype._finalize = function (tokens: Node[]): GetTextTranslations { - /** - * Translation table - */ - let data = this._joinStringValues(tokens); - - this._parseComments(data); - - // The PO parser gettext keys with values - data = this._handleKeys(data); - - // The PO parser individual translation objects - const dataset = this._handleValues(data); - return this._normalize(dataset); -}; - - - /** * Creates a transform stream for parsing PO input * @constructor @@ -572,124 +599,144 @@ Parser.prototype._finalize = function (tokens: Node[]): GetTextTranslations { * @param options Optional options with defaultCharset and validation * @param transformOptions Optional stream options */ -function PoParserTransform (this: PoParserTransform & Transform, options: ParserOptions, transformOptions: TransformOptions & { initialTreshold?: number; }) { - const { initialTreshold, ..._transformOptions } = transformOptions; - this.options = options; - this._parser = false; - this._tokens = {}; - - this._cache = []; - this._cacheSize = 0; - - this.initialTreshold = transformOptions.initialTreshold || 2 * 1024; - - Transform.call(this, _transformOptions); - - this._writableState.objectMode = false; - this._readableState.objectMode = true; +class PoParserTransform extends Transform { + options: ParserOptions; + _parser: boolean | Parser; + _tokens: {}; + _cache: Buffer[]; + _cacheSize: number; + initialTreshold: number; + + constructor( + options: ParserOptions, + transformOptions: TransformOptions & { initialTreshold?: number }, + ) { + const { initialTreshold, ..._transformOptions } = transformOptions; + super({ + ..._transformOptions, + readableObjectMode: true, + writableObjectMode: false, + }); + + this.options = options; + this._parser = false; + this._tokens = {}; + + this._cache = []; + this._cacheSize = 0; + + this.initialTreshold = transformOptions.initialTreshold || 2 * 1024; + } + + /** + * Processes a chunk of the input stream + * @param chunk Chunk of the input stream + * @param encoding Encoding of the chunk + * @param done Callback to call when the chunk is processed + */ + _transform( + chunk: Buffer, + encoding: BufferEncoding, + callback: (error?: Error | null, data?: any) => void, + ) { + if (!chunk || !chunk.length) { + return callback(); + } + + if (!this._parser) { + this._cache.push(chunk); + this._cacheSize += chunk.length; + + // wait until the first 1kb before parsing headers for charset + if (this._cacheSize < this.initialTreshold) { + return setImmediate(callback); + } else if (this._cacheSize) { + chunk = Buffer.concat(this._cache as Uint8Array[], this._cacheSize); + this._cacheSize = 0; + this._cache = []; + } + + this._parser = new Parser(chunk, this.options); + } else if (this._cacheSize) { + // this only happens if we had an uncompleted 8bit sequence from last iteration + this._cache.push(chunk); + this._cacheSize += chunk.length; + chunk = Buffer.concat(this._cache as Uint8Array[], this._cacheSize); + this._cacheSize = 0; + this._cache = []; + } + + // cache 8bit bytes from end of the chunk + // helps if chunk ends in the middle of an utf-8 sequence + let len = 0; + for (let i = chunk.length - 1; i >= 0; i--) { + if (chunk[i] >= 0x80) { + len++; + continue; + } + break; + } + // it seems we found some 8bit bytes from end of the string, so let's cache these + if (len) { + this._cache = [chunk.subarray(chunk.length - len)]; + this._cacheSize = this._cache[0].length; + chunk = chunk.subarray(0, chunk.length - len); + } + + // chunk might be empty if it only continued of 8bit bytes and these were all cached + if (chunk.length) { + try { + (this._parser as Parser)._lexer( + (this._parser as Parser)._toString(chunk), + ); + } catch (error) { + setImmediate(() => { + callback(error as Error | null); + }); + + return; + } + } + + setImmediate(callback); + } + + /** + * Once all inputs have been processed, emit the parsed translation table as an object + * + * @param done Callback to call when the chunk is processed + */ + _flush(callback: (error?: Error | null, data?: any) => void) { + let chunk; + + if (this._cacheSize) { + chunk = Buffer.concat(this._cache as Uint8Array[], this._cacheSize); + } + + if (!this._parser && chunk) { + this._parser = new Parser(chunk, this.options); + } + + if (chunk && this._parser) { + try { + (this._parser as Parser)._lexer( + (this._parser as Parser)._toString(chunk), + ); + } catch (error) { + setImmediate(() => { + callback(error as Error | null); + }); + + return; + } + } + + if (this._parser) { + this.push( + (this._parser as Parser)._finalize((this._parser as Parser)._lex), + ); + } + + setImmediate(callback); + } } -util.inherits(PoParserTransform, Transform); - -/** - * Processes a chunk of the input stream - * @param chunk Chunk of the input stream - * @param encoding Encoding of the chunk - * @param done Callback to call when the chunk is processed - */ -PoParserTransform.prototype._transform = function (chunk: Buffer, encoding: string, done: DoneCallback) { - let i; - let len = 0; - - if (!chunk || !chunk.length) { - return done(); - } - - if (!this._parser) { - this._cache.push(chunk); - this._cacheSize += chunk.length; - - // wait until the first 1kb before parsing headers for charset - if (this._cacheSize < this.initialTreshold) { - return setImmediate(done); - } else if (this._cacheSize) { - chunk = Buffer.concat(this._cache, this._cacheSize); - this._cacheSize = 0; - this._cache = []; - } - - this._parser = new Parser(chunk, this.options); - } else if (this._cacheSize) { - // this only happens if we had an uncompleted 8bit sequence from the last iteration - this._cache.push(chunk); - this._cacheSize += chunk.length; - chunk = Buffer.concat(this._cache, this._cacheSize); - this._cacheSize = 0; - this._cache = []; - } - - // cache 8bit bytes from the end of the chunk - // helps if the chunk ends in the middle of an utf-8 sequence - for (i = chunk.length - 1; i >= 0; i--) { - if (chunk[i] >= 0x80) { - len++; - continue; - } - break; - } - // it seems we found some 8bit bytes from the end of the string, so let's cache these - if (len) { - this._cache = [chunk.subarray(chunk.length - len)]; - this._cacheSize = this._cache[0].length; - chunk = chunk.subarray(0, chunk.length - len); - } - - // chunk might be empty if it only continued of 8bit bytes and these were all cached - if (chunk.length) { - try { - this._parser._lexer(this._parser._toString(chunk)); - } catch (/** @type {any} error */error) { - setImmediate(() => { - done(error); - }); - - return; - } - } - - setImmediate(done); -}; - -/** - * Once all inputs have been processed, emit the parsed translation table as an object - * - * @param done Callback to call when the chunk is processed - */ -PoParserTransform.prototype._flush = function (done: DoneCallback) { - let chunk; - - if (this._cacheSize) { - chunk = Buffer.concat(this._cache, this._cacheSize); - } - - if (!this._parser && chunk) { - this._parser = new Parser(chunk, this.options); - } - - if (chunk && this._parser) { - try { - this._parser._lexer(this._parser._toString(chunk)); - } catch (error) { - setImmediate(() => { - done(error); - }); - - return; - } - } - - if (this._parser) { - (this).push(this._parser._finalize(this._parser._lex)); - } - - setImmediate(done); -}; diff --git a/src/shared.ts b/src/shared.ts index 0fdd1f3..82dd70e 100644 --- a/src/shared.ts +++ b/src/shared.ts @@ -1,22 +1,23 @@ // see https://www.gnu.org/software/gettext/manual/html_node/Header-Entry.html /** Header name for "Plural-Forms" */ -const PLURAL_FORMS: string = 'Plural-Forms'; +const PLURAL_FORMS: string = "Plural-Forms"; /** Map of header keys to header names */ export const HEADERS: Map = new Map([ - ['project-id-version', 'Project-Id-Version'], - ['report-msgid-bugs-to', 'Report-Msgid-Bugs-To'], - ['pot-creation-date', 'POT-Creation-Date'], - ['po-revision-date', 'PO-Revision-Date'], - ['last-translator', 'Last-Translator'], - ['language-team', 'Language-Team'], - ['language', 'Language'], - ['content-type', 'Content-Type'], - ['content-transfer-encoding', 'Content-Transfer-Encoding'], - ['plural-forms', PLURAL_FORMS] + ["project-id-version", "Project-Id-Version"], + ["report-msgid-bugs-to", "Report-Msgid-Bugs-To"], + ["pot-creation-date", "POT-Creation-Date"], + ["po-revision-date", "PO-Revision-Date"], + ["last-translator", "Last-Translator"], + ["language-team", "Language-Team"], + ["language", "Language"], + ["content-type", "Content-Type"], + ["content-transfer-encoding", "Content-Transfer-Encoding"], + ["plural-forms", PLURAL_FORMS], ]); -const PLURAL_FORM_HEADER_NPLURALS_REGEX: RegExp = /nplurals\s*=\s*(?\d+)/; +const PLURAL_FORM_HEADER_NPLURALS_REGEX: RegExp = + /nplurals\s*=\s*(?\d+)/; /** * Parses a header string into an object of key-value pairs @@ -24,24 +25,23 @@ const PLURAL_FORM_HEADER_NPLURALS_REGEX: RegExp = /nplurals\s*=\s*(?\d * @param str Header string * @return An object of key-value pairs */ -export function parseHeader (str: string = ''): Record { - /** @type {string} Header string */ - return str - .split('\n') - .reduce((headers: Record, line: string) => { - const parts = line.split(':'); - let key = (parts.shift() || '').trim(); +export function parseHeader(str: string = ""): Record { + return str + .split("\n") + .reduce((headers: Record, line: string) => { + const parts = line.split(":"); + let key = (parts.shift() || "").trim(); - if (key) { - const value = parts.join(':').trim(); + if (key) { + const value = parts.join(":").trim(); - key = HEADERS.get(key.toLowerCase()) || key; + key = HEADERS.get(key.toLowerCase()) || key; - headers[key] = value; - } + headers[key] = value; + } - return headers; - }, {}); + return headers; + }, {}); } /** @@ -51,18 +51,20 @@ export function parseHeader (str: string = ''): Record { * @param fallback Fallback value if "Plural-Forms" header is absent * @returns Parsed result */ -export function parseNPluralFromHeadersSafely (headers: Record, fallback: number = 1): number { - const pluralForms = headers ? headers[PLURAL_FORMS] : false; +export function parseNPluralFromHeadersSafely( + headers: Record, + fallback: number = 1, +): number { + const pluralForms = headers ? headers[PLURAL_FORMS] : false; - if (!pluralForms) { - return fallback; - } + if (!pluralForms) { + return fallback; + } - const { - groups: { nplurals } = { nplurals: '' + fallback } - } = pluralForms.match(PLURAL_FORM_HEADER_NPLURALS_REGEX) || {}; + const { groups: { nplurals } = { nplurals: "" + fallback } } = + pluralForms.match(PLURAL_FORM_HEADER_NPLURALS_REGEX) || {}; - return parseInt(nplurals, 10) || fallback; + return parseInt(nplurals, 10) || fallback; } /** @@ -71,18 +73,16 @@ export function parseNPluralFromHeadersSafely (headers: Record, * @param header Object of key value pairs * @return An object of key-value pairs */ -export function generateHeader (header: Record = {}): string { - const keys = Object.keys(header) - .filter(key => !!key); - - if (!keys.length) { - return ''; - } - - return keys.map(key => - `${key}: ${(header[key] || '').trim()}` - ) - .join('\n') + '\n'; +export function generateHeader(header: Record = {}): string { + const keys = Object.keys(header).filter((key) => !!key); + + if (!keys.length) { + return ""; + } + + return ( + keys.map((key) => `${key}: ${(header[key] || "").trim()}`).join("\n") + "\n" + ); } /** @@ -92,15 +92,19 @@ export function generateHeader (header: Record = {}): string { * @param defaultCharset Default charset name, defaults to 'iso-8859-1' * @return Normalized charset name */ -export function formatCharset (charset: string = 'iso-8859-1', defaultCharset: string = 'iso-8859-1'): string { - return charset.toString() - .toLowerCase() - .replace(/^utf[-_]?(\d+)$/, 'utf-$1') - .replace(/^win(?:dows)?[-_]?(\d+)$/, 'windows-$1') - .replace(/^latin[-_]?(\d+)$/, 'iso-8859-$1') - .replace(/^(us[-_]?)?ascii$/, 'ascii') - .replace(/^charset$/, defaultCharset) - .trim(); +export function formatCharset( + charset: string = "iso-8859-1", + defaultCharset: string = "iso-8859-1", +): string { + return charset + .toString() + .toLowerCase() + .replace(/^utf[-_]?(\d+)$/, "utf-$1") + .replace(/^win(?:dows)?[-_]?(\d+)$/, "windows-$1") + .replace(/^latin[-_]?(\d+)$/, "iso-8859-$1") + .replace(/^(us[-_]?)?ascii$/, "ascii") + .replace(/^charset$/, defaultCharset) + .trim(); } /** @@ -110,42 +114,45 @@ export function formatCharset (charset: string = 'iso-8859-1', defaultCharset: s * @param maxLen Maximum allowed length for folded lines * @return An array of lines */ -export function foldLine (str: string, maxLen: number = 76): string[] { - const lines = []; - const len = str.length; - let curLine = ''; - let pos = 0; - let match; - - while (pos < len) { - curLine = str.substring(pos, pos + maxLen); - - // ensure that the line never ends with a partial escaping - // make longer lines if needed - while (curLine.endsWith('\\') && pos + curLine.length < len) { - curLine += str.charAt(pos + curLine.length + 1); // Append the next character - } - - // ensure that if possible, line breaks are done at reasonable places - if ((match = /.*?\\n/.exec(curLine))) { - // use everything before and including the first line break - curLine = match[0]; - } else if (pos + curLine.length < len) { - // if we're not at the end - if ((match = /.*\s+/.exec(curLine)) && /\S/.test(match[0])) { - // use everything before and including the last white space character (if anything) - curLine = match[0]; - } else if ((match = /.*[\x21-\x2f0-9\x5b-\x60\x7b-\x7e]+/.exec(curLine)) && /[^\x21-\x2f0-9\x5b-\x60\x7b-\x7e]/.test(match[0])) { - // use everything before and including the last "special" character (if anything) - curLine = match[0]; - } - } - - lines.push(curLine); - pos += curLine.length; - } - - return lines; +export function foldLine(str: string, maxLen: number = 76): string[] { + const lines = []; + const len = str.length; + let curLine = ""; + let pos = 0; + let match; + + while (pos < len) { + curLine = str.substring(pos, pos + maxLen); + + // ensure that the line never ends with a partial escaping + // make longer lines if needed + while (curLine.endsWith("\\") && pos + curLine.length < len) { + curLine += str.charAt(pos + curLine.length + 1); // Append the next character + } + + // ensure that if possible, line breaks are done at reasonable places + if ((match = /.*?(?:\r?\n|\\n)/.exec(curLine))) { + // use everything before and including the first line break + curLine = match[0]; + } else if (pos + curLine.length < len) { + // if we're not at the end + if ((match = /.*\s+/.exec(curLine)) && /\S/.test(match[0])) { + // use everything before and including the last white space character (if anything) + curLine = match[0]; + } else if ( + (match = /.*[\x21-\x2f0-9\x5b-\x60\x7b-\x7e]+/.exec(curLine)) && + /[^\x21-\x2f0-9\x5b-\x60\x7b-\x7e]/.test(match[0]) + ) { + // use everything before and including the last "special" character (if anything) + curLine = match[0]; + } + } + + lines.push(curLine); + pos += curLine.length; + } + + return lines; } /** @@ -155,29 +162,48 @@ export function foldLine (str: string, maxLen: number = 76): string[] { * @param right with msgid next * @returns comparator index */ -export function compareMsgid ({msgid: left}: { msgid: T; }, {msgid: right}: { msgid: T; }): number { - if (left < right) { - return -1; - } +export function compareMsgid( + { msgid: left }: { msgid: T }, + { msgid: right }: { msgid: T }, +): number { + if (left < right) { + return -1; + } + + if (left > right) { + return 1; + } + + return 0; +} - if (left > right) { - return 1; - } +/** + * Parses a Content-Type string to extract and update the charset + */ +export function updateContentTypeCharset( + contentTypeStr: string, + newCharset: string, +): string { + const baseType = contentTypeStr.split(";")[0].trim(); + return `${baseType}; charset=${newCharset}`; +} - return 0; +export function extractCharset(contentTypeStr: string): string | undefined { + const match = contentTypeStr.match(/charset=([^;\s]+)/i); + return match ? match[1] : undefined; } /** * Custom SyntaxError subclass that includes the lineNumber property. */ export class ParserError extends SyntaxError { - lineNumber: number; - /** - * @param message - Error message. - * @param lineNumber - Line number where the error occurred. - */ - constructor (message: string, lineNumber: number) { - super(message); - this.lineNumber = lineNumber; - } + lineNumber: number; + /** + * @param message - Error message. + * @param lineNumber - Line number where the error occurred. + */ + constructor(message: string, lineNumber: number) { + super(message); + this.lineNumber = lineNumber; + } } diff --git a/src/types.ts b/src/types.ts index 9d4a31b..5d2eb54 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,111 +1,108 @@ -import {Transform} from "readable-stream"; - /** * Represents a GetText comment. */ export interface GetTextComment { - translator?: string; - reference?: string; - extracted?: string; - flag?: string; - previous?: string; + translator?: string; + reference?: string; + extracted?: string; + flag?: string; + previous?: string; } /** * Represents a GetText translation. */ export interface GetTextTranslation { - msgctxt?: string; - msgid: string; - msgid_plural?: string; - msgstr: string[]; - comments?: GetTextComment; - obsolete?: boolean; + msgctxt?: string; + msgid: string; + msgid_plural?: string; + msgstr: string[]; + comments?: GetTextComment; + obsolete?: boolean; } /** * The translation index. */ -export type Translations = Record> +export type Translations = Record>; /** * Represents GetText translations. */ export interface GetTextTranslations { - charset: string | undefined; - headers: Record; - obsolete?: Translations; - translations: Translations; + charset: string | undefined; + headers: Record; + obsolete?: Translations; + translations: Translations; } /** * Options for the parser. */ export type ParserOptions = { - defaultCharset?: string; - validation?: boolean; - foldLength?: number; - escapeCharacters?: boolean; - sort?: boolean; - eol?: string; -} + defaultCharset?: string; + validation?: boolean; + foldLength?: number; + escapeCharacters?: boolean; + sort?: boolean; + eol?: string; +}; /** * Type definition for write functions. */ -export type WriteFunc = 'writeUInt32LE' | 'writeUInt32BE'; +export type WriteFunc = "writeUInt32LE" | "writeUInt32BE"; /** * Type definition for read functions. */ -export type ReadFunc = 'readUInt32LE' | 'readUInt32BE'; - +export type ReadFunc = "readUInt32LE" | "readUInt32BE"; /** The size of the MO object */ export type Size = { - msgid: number, - msgstr: number, - total: number -} + msgid: number; + msgstr: number; + total: number; +}; /** The translation object as a buffer */ export type TranslationBuffers = { - msgid: Buffer, - msgstr: Buffer -} + msgid: Buffer; + msgstr: Buffer; +}; export type Compiler = { - _options: ParserOptions; - _table: GetTextTranslations, - _translations: TranslationBuffers[], - _writeFunc: WriteFunc, - _handleCharset: () => void, - _generateList: () => TranslationBuffers[], - _build: (list: TranslationBuffers[], size: Size) => Buffer, - compile: () => Buffer, - /** - * Magic bytes for the generated binary data - * MAGIC file header magic value of mo file - */ - MAGIC: number, -} + _options: ParserOptions; + _table: GetTextTranslations; + _translations: TranslationBuffers[]; + _writeFunc: WriteFunc; + _handleCharset: () => void; + _generateList: () => TranslationBuffers[]; + _build: (list: TranslationBuffers[], size: Size) => Buffer; + compile: () => Buffer; + /** + * Magic bytes for the generated binary data + * MAGIC file header magic value of mo file + */ + MAGIC: number; +}; export type Parser = { - _validation: boolean; - _charset: string; - _lex: any[]; - _escaped: boolean; - _node: any; - _state: any; - _lineNumber: number; - _fileContents: string | Buffer; -} + _validation: boolean; + _charset: string; + _lex: any[]; + _escaped: boolean; + _node: any; + _state: any; + _lineNumber: number; + _fileContents: string | Buffer; +}; export type PoParserTransform = { - options: ParserOptions, - initialTreshold?: number, - _parser?: Parser|false, - _tokens?: {}, - _cache?: Buffer[], - _cacheSize?: number + options: ParserOptions; + initialTreshold?: number; + _parser?: Parser | false; + _tokens?: {}; + _cache?: Buffer[]; + _cacheSize?: number; }; From ed6597c676eefbafe4fbf9c985af136cede8f57f Mon Sep 17 00:00:00 2001 From: Erik Golinelli Date: Thu, 26 Feb 2026 12:45:29 +0100 Subject: [PATCH 3/7] refactor: migrate tests to modern ES modules and node:test framework for improved consistency and maintainability --- .eslintrc.json | 18 -- .github/workflows/ci.yml | 2 +- test/.eslintrc.json | 5 - test/fixtures/latin13-be.mo | Bin 678 -> 0 bytes test/fixtures/{latin13-le.mo => latin13.mo} | Bin test/fixtures/obsolete-be.mo | Bin 125 -> 0 bytes test/fixtures/{obsolete-le.mo => obsolete.mo} | Bin test/fixtures/utf8-be.mo | Bin 851 -> 0 bytes test/fixtures/{utf8-le.mo => utf8.mo} | Bin test/mo-compiler-test.js | 90 +------- test/mo-parser-test.js | 50 +---- test/module.mjs | 15 +- test/po-compiler-test.js | 59 ++--- test/po-obsolete-test.js | 26 +-- test/po-parser-test.js | 100 +++++---- test/shared.js | 202 ++++++++---------- 16 files changed, 200 insertions(+), 367 deletions(-) delete mode 100644 .eslintrc.json delete mode 100644 test/.eslintrc.json delete mode 100644 test/fixtures/latin13-be.mo rename test/fixtures/{latin13-le.mo => latin13.mo} (100%) delete mode 100644 test/fixtures/obsolete-be.mo rename test/fixtures/{obsolete-le.mo => obsolete.mo} (100%) delete mode 100644 test/fixtures/utf8-be.mo rename test/fixtures/{utf8-le.mo => utf8.mo} (100%) diff --git a/.eslintrc.json b/.eslintrc.json deleted file mode 100644 index e0324ef..0000000 --- a/.eslintrc.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "env": { - "es2021": true, - "node": true - }, - "extends": [ - "eslint:recommended", - "plugin:@typescript-eslint/recommended" - ], - "parser": "@typescript-eslint/parser", - "parserOptions": { - "ecmaVersion": "latest", - "sourceType": "module" - }, - "rules": { - "semi": ["error", "always"] - } -} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2d240c8..d3628b5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - node: [ 18, 20 ] + node: [ 20, latest ] os: - ubuntu-latest - windows-latest diff --git a/test/.eslintrc.json b/test/.eslintrc.json deleted file mode 100644 index 7eeefc3..0000000 --- a/test/.eslintrc.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "env": { - "mocha": true - } -} diff --git a/test/fixtures/latin13-be.mo b/test/fixtures/latin13-be.mo deleted file mode 100644 index aa12185eb81789ca4b9d3718126dc1ab0a5fb9d2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 678 zcmZ8eJ&P1U5N+30aRdWngEmv7&Y9UgSbJUZ)DsTWTh_t6?J39BneL&x=eSD@j8zac zG7`bq&_wW;82JNCG!n7qL-%mt^{Y2kucm7H&G^h$);}kR&JwagKR?9BDSkmXO?XWh z5#AF{5(;0rvyc(s^{z3Tx{E-0_9Z z;d)be$gJSfM>!#*?X-PA|rjIF4>hA0Ttm z_)-Sz1mEiV@D*xPNyD!UeV;g#K~m?dWHCh}3xo`1_5XDL48tJna>1h;)&w;4y@Q$t zYInL+mr|QKSC87=!a{osv)SflNG9v*cDje?VB090Yf}h5d!R$KEE^|FxMf}C1vhmk z{UV+7K3tqP=N8F@c~t2NVejwXOg7?ZA8L!a4p0ue2)^xuHCmc=PT9r;cR;Co+3ww) kt#!uR#wi?T7X0wgOa;2+&|Nj1DI?jbUQFQk_x_^)00?ruxBvhE diff --git a/test/fixtures/latin13-le.mo b/test/fixtures/latin13.mo similarity index 100% rename from test/fixtures/latin13-le.mo rename to test/fixtures/latin13.mo diff --git a/test/fixtures/obsolete-be.mo b/test/fixtures/obsolete-be.mo deleted file mode 100644 index b70aacd9360d8d8e22d78f6ff9efa9348532aab1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 125 zcmbQrB6N=d0+@i53=r!;`8G&27m#fW#2P^C0mR%uT*wfTT3n)#Tb!Pm!r+{rSCX1n sq8n0KkZPq+l3G!sUyzfSnP;t#oRL^moLXXAT9T$~!3Eb-TvEgU087&u{{R30 diff --git a/test/fixtures/obsolete-le.mo b/test/fixtures/obsolete.mo similarity index 100% rename from test/fixtures/obsolete-le.mo rename to test/fixtures/obsolete.mo diff --git a/test/fixtures/utf8-be.mo b/test/fixtures/utf8-be.mo deleted file mode 100644 index 7c5023ec823999656d8bc4e6488ca9cbd52f115d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 851 zcmZ8f&1w`u5FR%{$p{|w>Y?vx`lXOPnLLw@H482O#|U})%k z1Rnx_20sLU1*6Z*T5$5tsktDHW>XUdX$*=57O-)Eow3(AAxyt9^MUD$(d*HZ(VNk` zz|04poewswxyPv&E19^>)+wVC%GrC~^`es2u?;u5GS-WmE?1ejTIWs-O+r#6!)T7^e%{Kfui}OfAMUu-|K~|(s?noQoBNWV-s2$_n}Qz zx|EtOP5R!qEWJeM%P7%#Eprw_mgWEI{uWGxOw$tzuNm!G<9~Y<1T|+mUlmg6MN02B zXY+&FZ;OR#UZZQ9tytEn$&}6$TG&!P+$d@*3vt8P(j}y;vFir$BDKrGX?cKd7?I}op~J=j+_Bd>eX;*x|8cNtESIDzSC^ET!dP<3lPuYvqts}J(X+2D$xKzsr8qKi xp%8SOp@3M`t~43$uEZlCrBjuu3~C{T3b}>$#J+9KRLWdSl7)Iu%WnI=egmOa^{@Z{ diff --git a/test/fixtures/utf8-le.mo b/test/fixtures/utf8.mo similarity index 100% rename from test/fixtures/utf8-le.mo rename to test/fixtures/utf8.mo diff --git a/test/mo-compiler-test.js b/test/mo-compiler-test.js index 896dc8c..ec8ab93 100644 --- a/test/mo-compiler-test.js +++ b/test/mo-compiler-test.js @@ -1,104 +1,28 @@ import { promisify } from 'node:util'; import path from 'node:path'; -import { mo } from '../src/index.js'; +import { mo } from '../lib/index.mjs'; import { readFile as fsReadFile } from 'node:fs'; import { fileURLToPath } from 'node:url'; -import * as chai from 'chai'; +import { describe, test } from 'node:test'; +import assert from 'node:assert'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const readFile = promisify(fsReadFile); -const expect = chai.expect; - -const littleEndianMagic = [0xde, 0x12, 0x04, 0x95]; -const bigEndianMagic = [0x95, 0x04, 0x12, 0xde]; - -chai.config.includeStack = true; - describe('MO Compiler', () => { - describe('UTF-8 LE', async () => { - it('should compile', async () => { + describe('UTF-8', () => { + test('should compile', async () => { const [json, moData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'), - readFile(path.join(__dirname, 'fixtures/utf8-le.mo')) + readFile(path.join(__dirname, 'fixtures/utf8.mo')) ]); const compiled = mo.compile(JSON.parse(json)); - expect(compiled.toString('utf8')).to.deep.equal(moData.toString('utf8')); - }); - - it('should have the correct magic number', async () => { - const json = await readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'); - - const compiled = mo.compile(JSON.parse(json)); - - expect(Array.from(compiled.subarray(0, 4))).to.eql(littleEndianMagic); - }); - }); - - describe('UTF-8 BE', () => { - it('should compile', async () => { - const [json, moData] = await Promise.all([ - readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'), - readFile(path.join(__dirname, 'fixtures/utf8-be.mo')) - ]); - - const compiled = mo.compile(JSON.parse(json), { endian: 'be' }); - - expect(compiled.toString('utf8')).to.deep.equal(moData.toString('utf8')); - }); - - it('should have the correct magic number', async () => { - const json = await readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'); - - const compiled = mo.compile(JSON.parse(json), { endian: 'be' }); - - expect(Array.from(compiled.subarray(0, 4))).to.eql(bigEndianMagic); + assert.deepStrictEqual(compiled.toString('utf8'), moData.toString('utf8')); }); }); - describe('Latin-13 LE', () => { - it('should compile', async () => { - const [json, moData] = await Promise.all([ - readFile(path.join(__dirname, 'fixtures/latin13-po.json'), 'utf8'), - readFile(path.join(__dirname, 'fixtures/latin13-le.mo')) - ]); - - const compiled = mo.compile(JSON.parse(json)); - - expect(compiled.toString('utf8')).to.equal(moData.toString('utf8')); - }); - - it('should have the correct magic number', async () => { - const json = await readFile(path.join(__dirname, 'fixtures/latin13-po.json'), 'utf8'); - - const compiled = mo.compile(JSON.parse(json)); - - expect(Array.from(compiled.subarray(0, 4))).to.eql(littleEndianMagic); - }); - }); - - describe('Latin-13 BE', () => { - it('should compile', async () => { - const [json, moData] = await Promise.all([ - readFile(path.join(__dirname, 'fixtures/latin13-po.json'), 'utf8'), - readFile(path.join(__dirname, 'fixtures/latin13-be.mo')) - ]); - - const compiled = mo.compile(JSON.parse(json), { endian: 'be' }); - - expect(compiled.toString('utf8')).to.equal(moData.toString('utf8')); - }); - - it('should have the correct magic number', async () => { - const json = await readFile(path.join(__dirname, 'fixtures/latin13-po.json'), 'utf8'); - - const compiled = mo.compile(JSON.parse(json), { endian: 'be' }); - - expect(Array.from(compiled.subarray(0, 4))).to.eql(bigEndianMagic); - }); - }); }); diff --git a/test/mo-parser-test.js b/test/mo-parser-test.js index 2c463c6..5ed06c7 100644 --- a/test/mo-parser-test.js +++ b/test/mo-parser-test.js @@ -2,67 +2,39 @@ import { promisify } from 'node:util'; import path from 'node:path'; import { readFile as fsReadFile } from 'node:fs'; import { fileURLToPath } from 'node:url'; -import * as chai from 'chai'; -import { mo } from '../src/index.js'; +import { describe, test } from 'node:test'; +import assert from 'node:assert'; +import { mo } from '../lib/index.mjs'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const readFile = promisify(fsReadFile); -const expect = chai.expect; -chai.config.includeStack = true; - describe('MO Parser', () => { - describe('UTF-8 LE', () => { - it('should parse', async () => { - const [moData, json] = await Promise.all([ - readFile(path.join(__dirname, 'fixtures/utf8-le.mo')), - readFile(path.join(__dirname, 'fixtures/utf8-mo.json'), 'utf8') - ]); - - const parsed = mo.parse(moData); - - expect(parsed).to.deep.equal(JSON.parse(json)); - }); - }); - - describe('UTF-8 BE', () => { - it('should parse', async () => { + describe('UTF-8', () => { + test('should parse', async () => { const [moData, json] = await Promise.all([ - readFile(path.join(__dirname, 'fixtures/utf8-be.mo')), + readFile(path.join(__dirname, 'fixtures/utf8.mo')), readFile(path.join(__dirname, 'fixtures/utf8-mo.json'), 'utf8') ]); const parsed = mo.parse(moData); - expect(parsed).to.deep.equal(JSON.parse(json)); - }); - }); - - describe('Latin-13 LE', () => { - it('should parse', async () => { - const [moData, json] = await Promise.all([ - readFile(path.join(__dirname, 'fixtures/latin13-le.mo')), - readFile(path.join(__dirname, 'fixtures/latin13-mo.json'), 'utf8') - ]); - - const parsed = mo.parse(moData); - - expect(parsed).to.deep.equal(JSON.parse(json)); + assert.deepStrictEqual(parsed, JSON.parse(json)); }); }); - describe('Latin-13 BE', () => { - it('should parse', async () => { + describe('Latin-13', () => { + test('should parse', async () => { const [moData, json] = await Promise.all([ - readFile(path.join(__dirname, 'fixtures/latin13-be.mo')), + readFile(path.join(__dirname, 'fixtures/latin13.mo')), readFile(path.join(__dirname, 'fixtures/latin13-mo.json'), 'utf8') ]); const parsed = mo.parse(moData); - expect(parsed).to.deep.equal(JSON.parse(json)); + assert.deepStrictEqual(parsed, JSON.parse(json)); }); }); }); diff --git a/test/module.mjs b/test/module.mjs index b1b48b0..3c3fecb 100644 --- a/test/module.mjs +++ b/test/module.mjs @@ -1,11 +1,12 @@ -import { expect } from 'chai'; -import { po, mo } from '../src/index.js'; +import { describe, test } from 'node:test'; +import assert from 'node:assert'; +import { mo, po } from '../lib/index.mjs'; describe('esm module', () => { - it('should allow named imports', () => { - expect(po.parse).to.be.a('function'); - expect(po.compile).to.be.a('function'); - expect(mo.parse).to.be.a('function'); - expect(mo.compile).to.be.a('function'); + test('should allow named imports', () => { + assert.strictEqual(typeof po.parse, 'function'); + assert.strictEqual(typeof po.compile, 'function'); + assert.strictEqual(typeof mo.parse, 'function'); + assert.strictEqual(typeof mo.compile, 'function'); }); }); diff --git a/test/po-compiler-test.js b/test/po-compiler-test.js index 0e969ce..469a7d0 100644 --- a/test/po-compiler-test.js +++ b/test/po-compiler-test.js @@ -3,20 +3,18 @@ import { promisify } from 'node:util'; import path from 'node:path'; import { EOL } from 'node:os'; import { fileURLToPath } from 'node:url'; -import { po } from '../src/index.js'; -import * as chai from 'chai'; +import { po } from '../lib/index.mjs'; +import { describe, test } from 'node:test'; +import assert from 'node:assert'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const readFile = promisify(fsReadFile); -const expect = chai.expect; -chai.config.includeStack = true; - describe('PO Compiler', () => { describe('Headers', () => { - it('should keep tile casing', async () => { + test('should keep tile casing', async () => { const [json, poData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/headers-case.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/headers-case.po'), 'utf8') @@ -25,12 +23,12 @@ describe('PO Compiler', () => { const compiled = po.compile(JSON.parse(json), { eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(poData); + assert.strictEqual(compiled, poData); }); }); describe('UTF-8', () => { - it('should compile', async () => { + test('should compile', async () => { const [json, poData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/utf8.po'), 'utf8') @@ -39,27 +37,14 @@ describe('PO Compiler', () => { const compiled = po.compile(JSON.parse(json), { eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(poData); + assert.strictEqual(compiled, poData); }); }); - describe('Latin-13', () => { - it('should compile', async () => { - const [json, poData] = await Promise.all([ - // gettext-parser can only handle utf8 input (output will be the specified charset) - readFile(path.join(__dirname, 'fixtures/latin13-po.json'), 'utf8'), - readFile(path.join(__dirname, 'fixtures/latin13.po'), 'latin1') - ]); - const compiled = po.compile(JSON.parse(json), { eol: EOL }) - .toString('latin1'); - - expect(compiled).to.equal(poData); - }); - }); describe('Plurals', () => { - it('should compile correct plurals in POT files', async () => { + test('should compile correct plurals in POT files', async () => { const [json, pot] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/plural-pot.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/plural.pot'), 'utf8') @@ -68,12 +53,12 @@ describe('PO Compiler', () => { const compiled = po.compile(JSON.parse(json), { eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(pot); + assert.strictEqual(compiled, pot); }); }); describe('Message folding', () => { - it('should compile without folding', async () => { + test('should compile without folding', async () => { const [json, poData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/utf8-no-folding.po'), 'utf8') @@ -82,10 +67,10 @@ describe('PO Compiler', () => { const compiled = po.compile(JSON.parse(json), { foldLength: 0, eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(poData); + assert.strictEqual(compiled, poData); }); - it('should compile with different folding', async () => { + test('should compile with different folding', async () => { const [json, poData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/utf8-folding-100.po'), 'utf8') @@ -94,12 +79,12 @@ describe('PO Compiler', () => { const compiled = po.compile(JSON.parse(json), { foldLength: 100, eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(poData); + assert.strictEqual(compiled, poData); }); }); describe('Sorting', () => { - it('should sort output entries by msgid when `sort` is `true`', async () => { + test('should sort output entries by msgid when `sort` is `true`', async () => { const [json, pot] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/sort-test.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/sort-test.pot'), 'utf8') @@ -108,11 +93,11 @@ describe('PO Compiler', () => { const compiled = po.compile(JSON.parse(json), { sort: true, eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(pot); + assert.strictEqual(compiled, pot); }); - it('should sort entries using a custom `sort` function', async () => { - function compareMsgidAndMsgctxt (left, right) { + test('should sort entries using a custom `sort` function', async () => { + function compareMsgidAndMsgctxt(left, right) { if (left.msgid > right.msgid) { return 1; } @@ -143,14 +128,14 @@ describe('PO Compiler', () => { const compiled2 = po.compile(JSON.parse(json2), { sort: compareMsgidAndMsgctxt, eol: EOL }) .toString('utf8'); - expect(compiled1).to.equal(compiled2); - expect(compiled1).to.equal(pot); - expect(compiled2).to.equal(pot); + assert.strictEqual(compiled1, compiled2); + assert.strictEqual(compiled1, pot); + assert.strictEqual(compiled2, pot); }); }); describe('Skip escaping characters', () => { - it('should compile without escaping characters', async () => { + test('should compile without escaping characters', async () => { const [json, poData] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8-skip-escape-characters.json'), 'utf8'), readFile(path.join(__dirname, 'fixtures/utf8-skip-escape-characters.po'), 'utf8') @@ -159,7 +144,7 @@ describe('PO Compiler', () => { const compiled = po.compile(JSON.parse(json), { escapeCharacters: false, foldLength: 0, eol: EOL }) .toString('utf8'); - expect(compiled).to.equal(poData); + assert.strictEqual(compiled, poData); }); }); }); diff --git a/test/po-obsolete-test.js b/test/po-obsolete-test.js index aa7390d..8f2cc9d 100644 --- a/test/po-obsolete-test.js +++ b/test/po-obsolete-test.js @@ -2,8 +2,9 @@ import { EOL } from 'node:os'; import path from 'node:path'; import fs from 'node:fs'; import { promisify } from 'node:util'; -import * as chai from 'chai'; -import * as gettextParser from '../src/index.js'; +import { describe, test } from 'node:test'; +import assert from 'node:assert'; +import * as gettextParser from '../lib/index.mjs'; import { fileURLToPath } from 'node:url'; const __filename = fileURLToPath(import.meta.url); @@ -11,13 +12,10 @@ const __dirname = path.dirname(__filename); const readFile = promisify(fs.readFile); -const expect = chai.expect; -chai.config.includeStack = true; - describe('Obsolete', async () => { const [po, mo, jsonString] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/obsolete.po')), - readFile(path.join(__dirname, 'fixtures/obsolete-le.mo')), + readFile(path.join(__dirname, 'fixtures/obsolete.mo')), readFile(path.join(__dirname, 'fixtures/obsolete.json'), 'utf8') ]); @@ -26,24 +24,26 @@ describe('Obsolete', async () => { const moString = mo.toString('utf8'); describe('PO Parser', () => { - it('should parse obsolete messages', async () => { - const parsed = gettextParser.po.parse(po); + test('should parse obsolete messages', async () => { + const parsed = await gettextParser.po.parse(po); - expect(parsed).to.deep.equal(json); + assert.deepStrictEqual(parsed, json); }); }); + describe('PO Compiler', () => { - it('should compile obsolete messages', async () => { + test('should compile obsolete messages', async () => { const compiled = gettextParser.po.compile(json, { eol: EOL }).toString('utf8'); - expect(compiled).to.be.equal(poString); + assert.strictEqual(compiled, poString); }); }); + describe('MO Compiler', () => { - it('should ignore obsolete messages', async () => { + test('should ignore obsolete messages', async () => { const compiled = gettextParser.mo.compile(json).toString('utf8'); - expect(compiled).to.be.equal(moString); + assert.strictEqual(compiled, moString); }); }); }); diff --git a/test/po-parser-test.js b/test/po-parser-test.js index 644e222..aee9dbd 100644 --- a/test/po-parser-test.js +++ b/test/po-parser-test.js @@ -1,8 +1,9 @@ -import * as chai from 'chai'; +import { describe, test } from 'node:test'; +import assert from 'node:assert'; import { promisify } from 'node:util'; import path from 'node:path'; import fs from 'node:fs'; -import * as gettextParser from '../src/index.js'; +import * as gettextParser from '../lib/index.mjs'; import { fileURLToPath } from 'node:url'; const __filename = fileURLToPath(import.meta.url); @@ -10,12 +11,9 @@ const __dirname = path.dirname(__filename); const readFile = promisify(fs.readFile); -const expect = chai.expect; -chai.config.includeStack = true; - describe('PO Parser', () => { describe('headers', () => { - it('should detect charset in header', async () => { + test('should detect charset in header', async () => { const [po, json] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/headers-charset.po')), readFile(path.join(__dirname, 'fixtures/headers-charset.json'), 'utf8') @@ -23,10 +21,10 @@ describe('PO Parser', () => { const parsed = gettextParser.po.parse(po); - expect(parsed).to.deep.equal(JSON.parse(json)); + assert.deepStrictEqual(parsed, JSON.parse(json)); }); - it('should parse all known headers', async () => { + test('should parse all known headers', async () => { const [po, json] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/headers-known.po')), readFile(path.join(__dirname, 'fixtures/headers-known.json'), 'utf8') @@ -34,12 +32,12 @@ describe('PO Parser', () => { const parsed = gettextParser.po.parse(po); - expect(parsed).to.deep.equal(JSON.parse(json)); + assert.deepStrictEqual(parsed, JSON.parse(json)); }); }); describe('UTF-8', () => { - it('should parse', async () => { + test('should parse', async () => { const [po, json] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8.po')), readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8') @@ -47,12 +45,12 @@ describe('PO Parser', () => { const parsed = gettextParser.po.parse(po); - expect(parsed).to.deep.equal(JSON.parse(json)); + assert.deepStrictEqual(parsed, JSON.parse(json)); }); }); describe('UTF-8 as a string', () => { - it('should parse', async () => { + test('should parse', async () => { const [po, json] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/utf8.po'), 'utf8'), readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8') @@ -60,12 +58,12 @@ describe('PO Parser', () => { const parsed = gettextParser.po.parse(po); - expect(parsed).to.deep.equal(JSON.parse(json)); + assert.deepStrictEqual(parsed, JSON.parse(json)); }); }); describe('Stream input', () => { - it('should parse', done => { + test('should parse', (t, done) => { const po = fs.createReadStream(path.join(__dirname, 'fixtures/utf8.po'), { highWaterMark: 1 // ensure that any utf-8 sequences will be broken when streaming }); @@ -83,14 +81,14 @@ describe('PO Parser', () => { }); stream.on('end', () => { - expect(parsed).to.deep.equal(JSON.parse(json)); + assert.deepStrictEqual(parsed, JSON.parse(json)); done(); }); }); }); describe('Latin-13', () => { - it('should parse', async () => { + test('should parse', async () => { const [po, json] = await Promise.all([ readFile(path.join(__dirname, 'fixtures/latin13.po')), readFile(path.join(__dirname, 'fixtures/latin13-po.json'), 'utf8') @@ -98,14 +96,14 @@ describe('PO Parser', () => { const parsed = gettextParser.po.parse(po); - expect(parsed).to.deep.equal(JSON.parse(json)); + assert.deepStrictEqual(parsed, JSON.parse(json)); }); }); describe('parsing errors', () => { const invalidKeyError = /Error parsing PO data: Invalid key name/; - it('should throw (stream with unescaped quote)', done => { + test('should throw (stream with unescaped quote)', (t, done) => { const poStream = fs.createReadStream(path.join(__dirname, 'fixtures/error-unescaped-quote.po'), { highWaterMark: 1 // ensure that any utf-8 sequences will be broken when streaming }); @@ -115,7 +113,7 @@ describe('PO Parser', () => { })); stream.on('error', error => { - expect(error.message).to.match(invalidKeyError); + assert.ok(error.message.match(invalidKeyError)); done(); }); }); @@ -123,104 +121,104 @@ describe('PO Parser', () => { describe('when validation is disabled', () => { const options = { validation: false }; - it('should throw (unescaped quote)', async () => { + test('should throw (unescaped quote)', async () => { const po = await readFile(path.join(__dirname, 'fixtures/error-unescaped-quote.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.throw(invalidKeyError); + assert.throws(() => gettextParser.po.parse(po, options), invalidKeyError); }); - it('should throw (double-escaped quote)', async () => { + test('should throw (double-escaped quote)', async () => { const po = await readFile(path.join(__dirname, 'fixtures/error-double-escaped-quote.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.throw(invalidKeyError); + assert.throws(() => gettextParser.po.parse(po, options), invalidKeyError); }); - it('should not throw (an entry has too few plural forms)', async () => { + test('should not throw (an entry has too few plural forms)', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-too-few-plural-forms.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.not.throw(); + assert.doesNotThrow(() => gettextParser.po.parse(po, options)); }); - it('should not throw (an entry has too many plural forms)', async () => { + test('should not throw (an entry has too many plural forms)', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-too-many-plural-forms.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.not.throw(); + assert.doesNotThrow(() => gettextParser.po.parse(po, options)); }); - it('should not throw (an entry misses "msgid_plural")', async () => { + test('should not throw (an entry misses "msgid_plural")', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-missing-msgid-plural.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.not.throw(); + assert.doesNotThrow(() => gettextParser.po.parse(po, options)); }); - it('should not throw (an entry misses single "msgstr")', async () => { + test('should not throw (an entry misses single "msgstr")', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-missing-msgstr.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.not.throw(); + assert.doesNotThrow(() => gettextParser.po.parse(po, options)); }); - it('should not throw (duplicate entries found in the same context)', async () => { + test('should not throw (duplicate entries found in the same context)', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-context-duplicate-entries.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.not.throw(); + assert.doesNotThrow(() => gettextParser.po.parse(po, options)); }); - it('should not throw (an entry with multiple "msgid_plural")', async () => { + test('should not throw (an entry with multiple "msgid_plural")', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-redundant-msgid-plural.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.not.throw(); + assert.doesNotThrow(() => gettextParser.po.parse(po, options)); }); }); describe('when validation is enabled', () => { const options = { validation: true }; - it('should throw (unescaped quote)', async () => { + test('should throw (unescaped quote)', async () => { const po = await readFile(path.join(__dirname, 'fixtures/error-unescaped-quote.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.throw(invalidKeyError); + assert.throws(() => gettextParser.po.parse(po, options), invalidKeyError); }); - it('should throw (double-escaped quote)', async () => { + test('should throw (double-escaped quote)', async () => { const po = await readFile(path.join(__dirname, 'fixtures/error-double-escaped-quote.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.throw(invalidKeyError); + assert.throws(() => gettextParser.po.parse(po, options), invalidKeyError); }); - it('should throw (an entry has too few plural forms)', async () => { + test('should throw (an entry has too few plural forms)', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-too-few-plural-forms.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.throw('Plural forms range error: Expected to find 3 forms but got 2 for entry "o1-2" in "" context.'); + assert.throws(() => gettextParser.po.parse(po, options), /Plural forms range error: Expected to find 3 forms but got 2 for entry "o1-2" in "" context\./); }); - it('should throw (an entry has too many plural forms)', async () => { + test('should throw (an entry has too many plural forms)', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-too-many-plural-forms.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.throw('Plural forms range error: Expected to find 2 forms but got 3 for entry "o1-2" in "" context.'); + assert.throws(() => gettextParser.po.parse(po, options), /Plural forms range error: Expected to find 2 forms but got 3 for entry "o1-2" in "" context\./); }); - it('should throw (an entry misses "msgid_plural")', async () => { + test('should throw (an entry misses "msgid_plural")', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-missing-msgid-plural.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.throw('Translation string range error: Extected 1 msgstr definitions associated with "o1-1" in "" context, found 2.'); + assert.throws(() => gettextParser.po.parse(po, options), /Translation string range error: Extected 1 msgstr definitions associated with "o1-1" in "" context, found 2\./); }); - it('should throw (an entry misses single "msgstr")', async () => { + test('should throw (an entry misses single "msgstr")', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-missing-msgstr.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.throw('Translation string range error: Extected 1 msgstr definitions associated with "o1" in "" context, found 0.'); + assert.throws(() => gettextParser.po.parse(po, options), /Translation string range error: Extected 1 msgstr definitions associated with "o1" in "" context, found 0\./); }); - it('should throw (duplicate entries found in the same context)', async () => { + test('should throw (duplicate entries found in the same context)', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-context-duplicate-entries.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.throw('Duplicate msgid error: entry "o1-1" in "c2" context has already been declared.'); + assert.throws(() => gettextParser.po.parse(po, options), /Duplicate msgid error: entry "o1-1" in "c2" context has already been declared\./); }); - it('should throw (an entry with multiple "msgid_plural")', async () => { + test('should throw (an entry with multiple "msgid_plural")', async () => { const po = await readFile(path.join(__dirname, 'fixtures/validate-redundant-msgid-plural.po')); - expect(gettextParser.po.parse.bind(gettextParser.po, po, options)).to.throw('Multiple msgid_plural error: entry "o1-1" in "" context has multiple msgid_plural declarations.'); + assert.throws(() => gettextParser.po.parse(po, options), /Multiple msgid_plural error: entry "o1-1" in "" context has multiple msgid_plural declarations\./); }); }); }); diff --git a/test/shared.js b/test/shared.js index a079821..0ec3fc8 100644 --- a/test/shared.js +++ b/test/shared.js @@ -2,34 +2,32 @@ import { promisify } from 'node:util'; import path from 'node:path'; import { readFile as fsReadFile } from 'node:fs'; import { fileURLToPath } from 'node:url'; -import * as chai from 'chai'; -import { formatCharset, parseHeader, generateHeader, foldLine, parseNPluralFromHeadersSafely, compareMsgid } from '../src/shared.js'; +import { describe, test } from 'node:test'; +import assert from 'node:assert'; +import { compareMsgid, foldLine, formatCharset, generateHeader, parseHeader, parseNPluralFromHeadersSafely } from '../lib/shared.mjs'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const readFile = promisify(fsReadFile); -const expect = chai.expect; -chai.config.includeStack = true; - describe('Shared functions', () => { describe('formatCharset', () => { - it('should default to iso-8859-1', () => { - expect(formatCharset()).to.equal('iso-8859-1'); + test('should default to iso-8859-1', () => { + assert.strictEqual(formatCharset(), 'iso-8859-1'); }); - it('should normalize UTF8 to utf-8', () => { - expect(formatCharset('UTF8')).to.equal('utf-8'); + test('should normalize UTF8 to utf-8', () => { + assert.strictEqual(formatCharset('UTF8'), 'utf-8'); }); }); describe('parseHeader', () => { - it('should return an empty object by default', () => { - expect(parseHeader()).to.deep.equal({}); + test('should return an empty object by default', () => { + assert.deepStrictEqual(parseHeader(), {}); }); - it('should convert a header string into an object', async () => { + test('should convert a header string into an object', async () => { const str = `Project-Id-Version: project 1.0.2 POT-Creation-Date: 2012-05-18 14:28:00+03:00 content-type: text/plain; charset=utf-8 @@ -39,23 +37,27 @@ X-Poedit-SourceCharset: UTF-8`; const headers = parseHeader(str); - expect(headers).to.have.all.keys( + const expectedKeys = [ 'Project-Id-Version', 'POT-Creation-Date', 'Content-Type', 'Plural-Forms', 'mime-version', 'X-Poedit-SourceCharset' - ); + ]; + + for (const key of expectedKeys) { + assert.ok(Object.hasOwn(headers, key)); + } }); }); describe('generateHeader', () => { - it('should return an empty string by default', () => { - expect(generateHeader()).to.equal(''); + test('should return an empty string by default', () => { + assert.strictEqual(generateHeader(), ''); }); - it('should convert a header object into a string', async () => { + test('should convert a header object into a string', async () => { const json = await readFile(path.join(__dirname, 'fixtures/headers-case.json'), 'utf8'); const { headers } = JSON.parse(json); @@ -63,70 +65,44 @@ X-Poedit-SourceCharset: UTF-8`; const headerString = generateHeader(headers); headerKeys.forEach(key => { - expect(headerString).to.have.string(key); - expect(headerString).to.have.string(headers[key]); + assert.ok(headerString.includes(key)); + assert.ok(headerString.includes(headers[key])); }); - expect(headerString).to.match(/\n$/, 'Non-empty header has to end with newline'); + assert.match(headerString, /\n$/, 'Non-empty header has to end with newline'); }); }); describe('foldLine', () => { - it('should not fold when not necessary', () => { + test('should not fold when not necessary', () => { const line = 'abc def ghi'; const folded = foldLine(line); - expect(line).to.equal(folded.join('')); - expect(folded.length).to.equal(1); - }); - - it('should force fold with newline', () => { - const line = 'abc \\ndef \\nghi'; - const folded = foldLine(line); - - expect(line).to.equal(folded.join('')); - expect(folded).to.deep.equal(['abc \\n', 'def \\n', 'ghi']); - expect(folded.length).to.equal(3); + assert.strictEqual(line, folded.join('')); + assert.strictEqual(folded.length, 1); }); - it('should fold the line into multiple lines with the right length', () => { - const line = Array.from({ length: 76 }, () => 'a').join('') + 'aaaaa\\aaaa'; + test('should force fold with newline', () => { + const line = 'abc \ndef \nghi'; const folded = foldLine(line); - expect(folded.length).to.equal(2); - expect(folded[0].length).to.equal(76); - expect(line).to.equal(folded.join('')); - expect(folded).to.deep.equal([ - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', - 'aaaaa\\aaaa' - ]); - }); - it('should fold the line into multiple lines with the right length (escaped character)', () => { - const line = Array.from({ length: 75 }, () => 'a').join('') + '\\aaaaaa\\aaaa'; - const folded = foldLine(line); - expect(folded.length).to.equal(2); - expect(folded[0].length).to.equal(77); - expect(line).to.equal(folded.join('')); - expect(folded).to.deep.equal([ - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\a', - 'aaaaa\\aaaa' - ]); + assert.strictEqual(line, folded.join('')); + assert.deepStrictEqual(folded, ['abc \n', 'def \n', 'ghi']); + assert.strictEqual(folded.length, 3); }); - - it('should fold the line into multiple lines with the right length (escaped forward slash)', () => { - const line = Array.from({ length: 75 }, () => 'a').join('') + '\\\\aaaaa\\aaaa'; + test('should fold the line into multiple lines with the right length', () => { + const line = Array.from({ length: 75 }, () => 'a').join('') + '\\aaaaa\\aaaa'; const folded = foldLine(line); - expect(folded.length).to.equal(2); - expect(folded[0].length).to.equal(77); - expect(line).to.equal(folded.join('')); - expect(folded).to.deep.equal([ - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\\', + assert.strictEqual(folded.length, 2); + assert.strictEqual(line, folded.join('')); + assert.deepStrictEqual(folded, [ + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\', 'aaaaa\\aaaa' ]); }); - it('should fold at default length', () => { + test('should fold at default length', () => { const expected = ['Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum pretium ', 'a nunc ac fringilla. Nulla laoreet tincidunt tincidunt. Proin tristique ', 'vestibulum mauris non aliquam. Vivamus volutpat odio nisl, sed placerat ', @@ -136,145 +112,145 @@ X-Poedit-SourceCharset: UTF-8`; 'lobortis tristique.' ]; const folded = foldLine(expected.join('')); - expect(folded).to.deep.equal(expected); - expect(folded.length).to.equal(7); + assert.deepStrictEqual(folded, expected); + assert.strictEqual(folded.length, 7); }); - it('should force fold white space', () => { + test('should force fold white space', () => { const line = 'abc def ghi'; const folded = foldLine(line, 5); - expect(line).to.equal(folded.join('')); - expect(folded).to.deep.equal(['abc ', 'def ', 'ghi']); - expect(folded.length).to.equal(3); + assert.strictEqual(line, folded.join('')); + assert.deepStrictEqual(folded, ['abc ', 'def ', 'ghi']); + assert.strictEqual(folded.length, 3); }); - it('should ignore leading spaces', () => { + test('should ignore leading spaces', () => { const line = ' abc def ghi'; const folded = foldLine(line, 5); - expect(line).to.equal(folded.join('')); - expect(folded).to.deep.equal([' a', 'bc ', 'def ', 'ghi']); - expect(folded.length).to.equal(4); + assert.strictEqual(line, folded.join('')); + assert.deepStrictEqual(folded, [' a', 'bc ', 'def ', 'ghi']); + assert.strictEqual(folded.length, 4); }); - it('should force fold special character', () => { + test('should force fold special character', () => { const line = 'abcdef--ghi'; const folded = foldLine(line, 5); - expect(line).to.equal(folded.join('')); - expect(folded).to.deep.equal(['abcde', 'f--', 'ghi']); - expect(folded.length).to.equal(3); + assert.strictEqual(line, folded.join('')); + assert.deepStrictEqual(folded, ['abcde', 'f--', 'ghi']); + assert.strictEqual(folded.length, 3); }); - it('should force fold last special character', () => { + test('should force fold last special character', () => { const line = 'ab--cdef--ghi'; const folded = foldLine(line, 10); - expect(line).to.equal(folded.join('')); - expect(folded).to.deep.equal(['ab--cdef--', 'ghi']); - expect(folded.length).to.equal(2); + assert.strictEqual(line, folded.join('')); + assert.deepStrictEqual(folded, ['ab--cdef--', 'ghi']); + assert.strictEqual(folded.length, 2); }); - it('should force fold only if at least one non-special character', () => { + test('should force fold only if at least one non-special character', () => { const line = '--abcdefghi'; const folded = foldLine(line, 5); - expect(line).to.equal(folded.join('')); - expect(folded).to.deep.equal(['--abc', 'defgh', 'i']); - expect(folded.length).to.equal(3); + assert.strictEqual(line, folded.join('')); + assert.deepStrictEqual(folded, ['--abc', 'defgh', 'i']); + assert.strictEqual(folded.length, 3); }); }); describe('parseNPluralFromHeadersSafely', () => { - it('should return parsed value', () => { + test('should return parsed value', () => { const headers = { 'Plural-Forms': 'nplurals=10; plural=n' }; const nplurals = parseNPluralFromHeadersSafely(headers); - expect(nplurals).to.equal(10); + assert.strictEqual(nplurals, 10); }); - it('should return parsed value (missing plural declaration)', () => { + test('should return parsed value (missing plural declaration)', () => { const headers = { 'Plural-Forms': 'nplurals=10' }; const nplurals = parseNPluralFromHeadersSafely(headers); - expect(nplurals).to.equal(10); + assert.strictEqual(nplurals, 10); }); - it('should return fallback value ("Plural-Forms" header is absent)', () => { + test('should return fallback value ("Plural-Forms" header is absent)', () => { const nplurals = parseNPluralFromHeadersSafely(); - expect(nplurals).to.equal(1); + assert.strictEqual(nplurals, 1); }); - it('should return fallback value (nplurals is not declared)', () => { + test('should return fallback value (nplurals is not declared)', () => { const headers = { 'Plural-Forms': '; plural=n' }; const nplurals = parseNPluralFromHeadersSafely(headers); - expect(nplurals).to.equal(1); + assert.strictEqual(nplurals, 1); }); - it('should return fallback value (nplurals is set to zero)', () => { + test('should return fallback value (nplurals is set to zero)', () => { const headers = { 'Plural-Forms': 'nplurals=0' }; const nplurals = parseNPluralFromHeadersSafely(headers); - expect(nplurals).to.equal(1); + assert.strictEqual(nplurals, 1); }); - it('should return fallback value (nplurals is set to negative value)', () => { + test('should return fallback value (nplurals is set to negative value)', () => { const headers = { 'Plural-Forms': 'nplurals=-99' }; const nplurals = parseNPluralFromHeadersSafely(headers); - expect(nplurals).to.equal(1); + assert.strictEqual(nplurals, 1); }); - it('should return fallback value (failed to parse nplurals value)', () => { + test('should return fallback value (failed to parse nplurals value)', () => { const headers = { 'Plural-Forms': 'nplurals=foo' }; const nplurals = parseNPluralFromHeadersSafely(headers); - expect(nplurals).to.equal(1); + assert.strictEqual(nplurals, 1); }); }); }); describe('Strings Sorting function', () => { - it('should return -1 when left msgid is less than right msgid', () => { + test('should return -1 when left msgid is less than right msgid', () => { const result = compareMsgid({ msgid: 'a' }, { msgid: 'b' }); - expect(result).to.equal(-1); + assert.strictEqual(result, -1); }); - it('should return 1 when left msgid is greater than right msgid', () => { + test('should return 1 when left msgid is greater than right msgid', () => { const result = compareMsgid({ msgid: 'b' }, { msgid: 'a' }); - expect(result).to.equal(1); + assert.strictEqual(result, 1); }); - it('should return 0 when left msgid is equal to right msgid', () => { + test('should return 0 when left msgid is equal to right msgid', () => { const result = compareMsgid({ msgid: 'a' }, { msgid: 'a' }); - expect(result).to.equal(0); + assert.strictEqual(result, 0); }); - it('should return -1 when msgid is the uppercased version of the other msgid', () => { + test('should return -1 when msgid is the uppercased version of the other msgid', () => { const result = compareMsgid({ msgid: 'A' }, { msgid: 'a' }); - expect(result).to.equal(-1); + assert.strictEqual(result, -1); }); - it('should return 1 when the msgid is a number and other is a string', () => { + test('should return 1 when the msgid is a number and other is a string', () => { const result = compareMsgid({ msgid: 'A' }, { msgid: '1' }); - expect(result).to.equal(1); + assert.strictEqual(result, 1); }); - it('should return the right result using buffer comparison', () => { + test('should return the right result using buffer comparison', () => { const result = compareMsgid({ msgid: Buffer.from('a') }, { msgid: Buffer.from('b') }); - expect(result).to.equal(-1); + assert.strictEqual(result, -1); }); - it('should return the right result using buffer (both directions)', () => { + test('should return the right result using buffer (both directions)', () => { const result = compareMsgid({ msgid: Buffer.from('c') }, { msgid: Buffer.from('b') }); - expect(result).to.equal(1); + assert.strictEqual(result, 1); }); - it('should return the right result using buffer comparison (checking uppercase)', () => { + test('should return the right result using buffer comparison (checking uppercase)', () => { const result = compareMsgid({ msgid: Buffer.from('A') }, { msgid: Buffer.from('a') }); - expect(result).to.equal(-1); + assert.strictEqual(result, -1); }); }); From db9a8bf32584ce5fb85f53f2bbf9efc8b883106a Mon Sep 17 00:00:00 2001 From: Erik Golinelli Date: Thu, 26 Feb 2026 12:45:46 +0100 Subject: [PATCH 4/7] chore: replace `.npmrc` with `biome.json`, add `package-lock.json` for dependency management This update introduces `biome` configuration for formatting and linting, while re-adding `package-lock.json` to ensure consistent package resolutions. --- .editorconfig | 15 + .gitignore | 92 +++- .npmignore | 4 +- .npmrc | 1 - biome.json | 21 + package-lock.json | 1265 +++++++++++++++++++++++++++++++++++++++++++++ package.json | 66 ++- tsdown.config.ts | 26 + 8 files changed, 1446 insertions(+), 44 deletions(-) create mode 100644 .editorconfig delete mode 100644 .npmrc create mode 100644 biome.json create mode 100644 package-lock.json create mode 100644 tsdown.config.ts diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..1e793ea --- /dev/null +++ b/.editorconfig @@ -0,0 +1,15 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 2 +indent_style = space +insert_final_newline = true +max_line_length = 240 +tab_width = 2 +trim_trailing_whitespace = true + +[{*.markdown,*.md}] +indent_size = 4 +tab_width = 4 diff --git a/.gitignore b/.gitignore index ee8dea8..cfebdcf 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,87 @@ -/node_modules -/lib -/@types -npm-debug.log -.DS_Store +### Node template +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Coverage directory used by tools like istanbul +coverage + +# node-waf configuration +.lock-wscript + +# Dependency directories +node_modules/ + +# Builded files +lib/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Intellij +.idea + +# Optional eslint cache +.eslintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp +.cache + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +.npmrc + +## Angular +.angular diff --git a/.npmignore b/.npmignore index 478b410..c6273aa 100644 --- a/.npmignore +++ b/.npmignore @@ -1,4 +1,6 @@ /test /src -.eslintrc.js +biome.json .gitignore +tsdown.config.ts +.editorconfig diff --git a/.npmrc b/.npmrc deleted file mode 100644 index 43c97e7..0000000 --- a/.npmrc +++ /dev/null @@ -1 +0,0 @@ -package-lock=false diff --git a/biome.json b/biome.json new file mode 100644 index 0000000..be3aa41 --- /dev/null +++ b/biome.json @@ -0,0 +1,21 @@ +{ + "$schema": "https://biomejs.dev/schemas/2.0.5/schema.json", + "formatter": { + "enabled": true + }, + "linter": { + "enabled": true + }, + "json": { + "formatter": { + "enabled": false + } + }, + "assist": { + "actions": { + "source": { + "organizeImports": "on" + } + } + } +} diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..4559ee8 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,1265 @@ +{ + "name": "gettext-parser-next", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "gettext-parser-next", + "version": "1.0.0", + "license": "MIT", + "devDependencies": { + "@biomejs/biome": "2.4.4", + "@types/node": "^25.3.1", + "tsdown": "^0.20.3", + "typescript": "^5.9.3" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@babel/generator": { + "version": "8.0.0-rc.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^8.0.0-rc.1", + "@babel/types": "^8.0.0-rc.1", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "@types/jsesc": "^2.5.0", + "jsesc": "^3.0.2" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "8.0.0-rc.2", + "dev": true, + "license": "MIT", + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "8.0.0-rc.1", + "dev": true, + "license": "MIT", + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@babel/parser": { + "version": "8.0.0-rc.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^8.0.0-rc.1" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@babel/types": { + "version": "8.0.0-rc.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^8.0.0-rc.1", + "@babel/helper-validator-identifier": "^8.0.0-rc.1" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@biomejs/biome": { + "version": "2.4.4", + "dev": true, + "license": "MIT OR Apache-2.0", + "bin": { + "biome": "bin/biome" + }, + "engines": { + "node": ">=14.21.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/biome" + }, + "optionalDependencies": { + "@biomejs/cli-darwin-arm64": "2.4.4", + "@biomejs/cli-darwin-x64": "2.4.4", + "@biomejs/cli-linux-arm64": "2.4.4", + "@biomejs/cli-linux-arm64-musl": "2.4.4", + "@biomejs/cli-linux-x64": "2.4.4", + "@biomejs/cli-linux-x64-musl": "2.4.4", + "@biomejs/cli-win32-arm64": "2.4.4", + "@biomejs/cli-win32-x64": "2.4.4" + } + }, + "node_modules/@biomejs/cli-darwin-arm64": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.4.4.tgz", + "integrity": "sha512-jZ+Xc6qvD6tTH5jM6eKX44dcbyNqJHssfl2nnwT6vma6B1sj7ZLTGIk6N5QwVBs5xGN52r3trk5fgd3sQ9We9A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-darwin-x64": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.4.4.tgz", + "integrity": "sha512-Dh1a/+W+SUCXhEdL7TiX3ArPTFCQKJTI1mGncZNWfO+6suk+gYA4lNyJcBB+pwvF49uw0pEbUS49BgYOY4hzUg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-arm64": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.4.4.tgz", + "integrity": "sha512-V/NFfbWhsUU6w+m5WYbBenlEAz8eYnSqRMDMAW3K+3v0tYVkNyZn8VU0XPxk/lOqNXLSCCrV7FmV/u3SjCBShg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-arm64-musl": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.4.4.tgz", + "integrity": "sha512-+sPAXq3bxmFwhVFJnSwkSF5Rw2ZAJMH3MF6C9IveAEOdSpgajPhoQhbbAK12SehN9j2QrHpk4J/cHsa/HqWaYQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-x64": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.4.4.tgz", + "integrity": "sha512-R4+ZCDtG9kHArasyBO+UBD6jr/FcFCTH8QkNTOCu0pRJzCWyWC4EtZa2AmUZB5h3e0jD7bRV2KvrENcf8rndBg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-x64-musl": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.4.4.tgz", + "integrity": "sha512-gGvFTGpOIQDb5CQ2VC0n9Z2UEqlP46c4aNgHmAMytYieTGEcfqhfCFnhs6xjt0S3igE6q5GLuIXtdQt3Izok+g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-win32-arm64": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.4.4.tgz", + "integrity": "sha512-trzCqM7x+Gn832zZHgr28JoYagQNX4CZkUZhMUac2YxvvyDRLJDrb5m9IA7CaZLlX6lTQmADVfLEKP1et1Ma4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-win32-x64": { + "version": "2.4.4", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@emnapi/core": { + "version": "1.8.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.1.0", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.8.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.7.1", + "@emnapi/runtime": "^1.7.1", + "@tybys/wasm-util": "^0.10.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Brooooooklyn" + } + }, + "node_modules/@oxc-project/types": { + "version": "0.112.0", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + } + }, + "node_modules/@quansync/fs": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "quansync": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, + "node_modules/@rolldown/binding-android-arm64": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-rc.3.tgz", + "integrity": "sha512-0T1k9FinuBZ/t7rZ8jN6OpUKPnUjNdYHoj/cESWrQ3ZraAJ4OMm6z7QjSfCxqj8mOp9kTKc1zHK3kGz5vMu+nQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-darwin-arm64": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-rc.3.tgz", + "integrity": "sha512-JWWLzvcmc/3pe7qdJqPpuPk91SoE/N+f3PcWx/6ZwuyDVyungAEJPvKm/eEldiDdwTmaEzWfIR+HORxYWrCi1A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-darwin-x64": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-rc.3.tgz", + "integrity": "sha512-MTakBxfx3tde5WSmbHxuqlDsIW0EzQym+PJYGF4P6lG2NmKzi128OGynoFUqoD5ryCySEY85dug4v+LWGBElIw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-freebsd-x64": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-rc.3.tgz", + "integrity": "sha512-jje3oopyOLs7IwfvXoS6Lxnmie5JJO7vW29fdGFu5YGY1EDbVDhD+P9vDihqS5X6fFiqL3ZQZCMBg6jyHkSVww==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm-gnueabihf": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-rc.3.tgz", + "integrity": "sha512-A0n8P3hdLAaqzSFrQoA42p23ZKBYQOw+8EH5r15Sa9X1kD9/JXe0YT2gph2QTWvdr0CVK2BOXiK6ENfy6DXOag==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm64-gnu": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-rc.3.tgz", + "integrity": "sha512-kWXkoxxarYISBJ4bLNf5vFkEbb4JvccOwxWDxuK9yee8lg5XA7OpvlTptfRuwEvYcOZf+7VS69Uenpmpyo5Bjw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-arm64-musl": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-rc.3.tgz", + "integrity": "sha512-Z03/wrqau9Bicfgb3Dbs6SYTHliELk2PM2LpG2nFd+cGupTMF5kanLEcj2vuuJLLhptNyS61rtk7SOZ+lPsTUA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-x64-gnu": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-rc.3.tgz", + "integrity": "sha512-iSXXZsQp08CSilff/DCTFZHSVEpEwdicV3W8idHyrByrcsRDVh9sGC3sev6d8BygSGj3vt8GvUKBPCoyMA4tgQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-linux-x64-musl": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-rc.3.tgz", + "integrity": "sha512-qaj+MFudtdCv9xZo9znFvkgoajLdc+vwf0Kz5N44g+LU5XMe+IsACgn3UG7uTRlCCvhMAGXm1XlpEA5bZBrOcw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-openharmony-arm64": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-rc.3.tgz", + "integrity": "sha512-U662UnMETyjT65gFmG9ma+XziENrs7BBnENi/27swZPYagubfHRirXHG2oMl+pEax2WvO7Kb9gHZmMakpYqBHQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-wasm32-wasi": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-rc.3.tgz", + "integrity": "sha512-gekrQ3Q2HiC1T5njGyuUJoGpK/l6B/TNXKed3fZXNf9YRTJn3L5MOZsFBn4bN2+UX+8+7hgdlTcEsexX988G4g==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^1.1.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@rolldown/binding-win32-arm64-msvc": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-rc.3.tgz", + "integrity": "sha512-85y5JifyMgs8m5K2XzR/VDsapKbiFiohl7s5lEj7nmNGO0pkTXE7q6TQScei96BNAsoK7JC3pA7ukA8WRHVJpg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/binding-win32-x64-msvc": { + "version": "1.0.0-rc.3", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-rc.3", + "dev": true, + "license": "MIT" + }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.1", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/jsesc": { + "version": "2.5.1", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "25.3.1", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.18.0" + } + }, + "node_modules/ansis": { + "version": "4.2.0", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + } + }, + "node_modules/ast-kit": { + "version": "3.0.0-beta.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^8.0.0-beta.4", + "estree-walker": "^3.0.3", + "pathe": "^2.0.3" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, + "node_modules/birpc": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/cac": { + "version": "6.7.14", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/defu": { + "version": "6.1.4", + "dev": true, + "license": "MIT" + }, + "node_modules/dts-resolver": { + "version": "2.1.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + }, + "peerDependencies": { + "oxc-resolver": ">=11.0.0" + }, + "peerDependenciesMeta": { + "oxc-resolver": { + "optional": true + } + } + }, + "node_modules/empathic": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/get-tsconfig": { + "version": "4.13.6", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/hookable": { + "version": "6.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/import-without-cache": { + "version": "0.2.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/obug": { + "version": "2.1.1", + "dev": true, + "funding": [ + "https://github.com/sponsors/sxzz", + "https://opencollective.com/debug" + ], + "license": "MIT" + }, + "node_modules/pathe": { + "version": "2.0.3", + "dev": true, + "license": "MIT" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/quansync": { + "version": "1.0.0", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/antfu" + }, + { + "type": "individual", + "url": "https://github.com/sponsors/sxzz" + } + ], + "license": "MIT" + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/rolldown": { + "version": "1.0.0-rc.3", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@oxc-project/types": "=0.112.0", + "@rolldown/pluginutils": "1.0.0-rc.3" + }, + "bin": { + "rolldown": "bin/cli.mjs" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "optionalDependencies": { + "@rolldown/binding-android-arm64": "1.0.0-rc.3", + "@rolldown/binding-darwin-arm64": "1.0.0-rc.3", + "@rolldown/binding-darwin-x64": "1.0.0-rc.3", + "@rolldown/binding-freebsd-x64": "1.0.0-rc.3", + "@rolldown/binding-linux-arm-gnueabihf": "1.0.0-rc.3", + "@rolldown/binding-linux-arm64-gnu": "1.0.0-rc.3", + "@rolldown/binding-linux-arm64-musl": "1.0.0-rc.3", + "@rolldown/binding-linux-x64-gnu": "1.0.0-rc.3", + "@rolldown/binding-linux-x64-musl": "1.0.0-rc.3", + "@rolldown/binding-openharmony-arm64": "1.0.0-rc.3", + "@rolldown/binding-wasm32-wasi": "1.0.0-rc.3", + "@rolldown/binding-win32-arm64-msvc": "1.0.0-rc.3", + "@rolldown/binding-win32-x64-msvc": "1.0.0-rc.3" + } + }, + "node_modules/rolldown-plugin-dts": { + "version": "0.22.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/generator": "8.0.0-rc.1", + "@babel/helper-validator-identifier": "8.0.0-rc.1", + "@babel/parser": "8.0.0-rc.1", + "@babel/types": "8.0.0-rc.1", + "ast-kit": "^3.0.0-beta.1", + "birpc": "^4.0.0", + "dts-resolver": "^2.1.3", + "get-tsconfig": "^4.13.1", + "obug": "^2.1.1" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + }, + "peerDependencies": { + "@ts-macro/tsc": "^0.3.6", + "@typescript/native-preview": ">=7.0.0-dev.20250601.1", + "rolldown": "^1.0.0-rc.3", + "typescript": "^5.0.0", + "vue-tsc": "~3.2.0" + }, + "peerDependenciesMeta": { + "@ts-macro/tsc": { + "optional": true + }, + "@typescript/native-preview": { + "optional": true + }, + "typescript": { + "optional": true + }, + "vue-tsc": { + "optional": true + } + } + }, + "node_modules/semver": { + "version": "7.7.4", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tinyexec": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tree-kill": { + "version": "1.2.2", + "dev": true, + "license": "MIT", + "bin": { + "tree-kill": "cli.js" + } + }, + "node_modules/tsdown": { + "version": "0.20.3", + "dev": true, + "license": "MIT", + "dependencies": { + "ansis": "^4.2.0", + "cac": "^6.7.14", + "defu": "^6.1.4", + "empathic": "^2.0.0", + "hookable": "^6.0.1", + "import-without-cache": "^0.2.5", + "obug": "^2.1.1", + "picomatch": "^4.0.3", + "rolldown": "1.0.0-rc.3", + "rolldown-plugin-dts": "^0.22.1", + "semver": "^7.7.3", + "tinyexec": "^1.0.2", + "tinyglobby": "^0.2.15", + "tree-kill": "^1.2.2", + "unconfig-core": "^7.4.2", + "unrun": "^0.2.27" + }, + "bin": { + "tsdown": "dist/run.mjs" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + }, + "peerDependencies": { + "@arethetypeswrong/core": "^0.18.1", + "@vitejs/devtools": "*", + "publint": "^0.3.0", + "typescript": "^5.0.0", + "unplugin-lightningcss": "^0.4.0", + "unplugin-unused": "^0.5.0" + }, + "peerDependenciesMeta": { + "@arethetypeswrong/core": { + "optional": true + }, + "@vitejs/devtools": { + "optional": true + }, + "publint": { + "optional": true + }, + "typescript": { + "optional": true + }, + "unplugin-lightningcss": { + "optional": true + }, + "unplugin-unused": { + "optional": true + } + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "dev": true, + "license": "0BSD", + "optional": true + }, + "node_modules/typescript": { + "version": "5.9.3", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/unconfig-core": { + "version": "7.5.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@quansync/fs": "^1.0.0", + "quansync": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/undici-types": { + "version": "7.18.2", + "dev": true, + "license": "MIT" + }, + "node_modules/unrun": { + "version": "0.2.28", + "dev": true, + "license": "MIT", + "dependencies": { + "rolldown": "1.0.0-rc.5" + }, + "bin": { + "unrun": "dist/cli.mjs" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/Gugustinette" + }, + "peerDependencies": { + "synckit": "^0.11.11" + }, + "peerDependenciesMeta": { + "synckit": { + "optional": true + } + } + }, + "node_modules/unrun/node_modules/@oxc-project/types": { + "version": "0.114.0", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-android-arm64": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-rc.5.tgz", + "integrity": "sha512-zCEmUrt1bggwgBgeKLxNj217J1OrChrp3jJt24VK9jAharSTeVaHODNL+LpcQVhRz+FktYWfT9cjo5oZ99ZLpg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-darwin-arm64": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-rc.5.tgz", + "integrity": "sha512-ZP9xb9lPAex36pvkNWCjSEJW/Gfdm9I3ssiqOFLmpZ/vosPXgpoGxCmh+dX1Qs+/bWQE6toNFXWWL8vYoKoK9Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-darwin-x64": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-rc.5.tgz", + "integrity": "sha512-7IdrPunf6dp9mywMgTOKMMGDnMHQ6+h5gRl6LW8rhD8WK2kXX0IwzcM5Zc0B5J7xQs8QWOlKjv8BJsU/1CD3pg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-freebsd-x64": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-rc.5.tgz", + "integrity": "sha512-o/JCk+dL0IN68EBhZ4DqfsfvxPfMeoM6cJtxORC1YYoxGHZyth2Kb2maXDb4oddw2wu8iIbnYXYPEzBtAF5CAg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-linux-arm-gnueabihf": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-rc.5.tgz", + "integrity": "sha512-IIBwTtA6VwxQLcEgq2mfrUgam7VvPZjhd/jxmeS1npM+edWsrrpRLHUdze+sk4rhb8/xpP3flemgcZXXUW6ukw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-linux-arm64-gnu": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-rc.5.tgz", + "integrity": "sha512-KSol1De1spMZL+Xg7K5IBWXIvRWv7+pveaxFWXpezezAG7CS6ojzRjtCGCiLxQricutTAi/LkNWKMsd2wNhMKQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-linux-arm64-musl": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-rc.5.tgz", + "integrity": "sha512-WFljyDkxtXRlWxMjxeegf7xMYXxUr8u7JdXlOEWKYgDqEgxUnSEsVDxBiNWQ1D5kQKwf8Wo4sVKEYPRhCdsjwA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-linux-x64-gnu": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-rc.5.tgz", + "integrity": "sha512-CUlplTujmbDWp2gamvrqVKi2Or8lmngXT1WxsizJfts7JrvfGhZObciaY/+CbdbS9qNnskvwMZNEhTPrn7b+WA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-linux-x64-musl": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-rc.5.tgz", + "integrity": "sha512-wdf7g9NbVZCeAo2iGhsjJb7I8ZFfs6X8bumfrWg82VK+8P6AlLXwk48a1ASiJQDTS7Svq2xVzZg3sGO2aXpHRA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-openharmony-arm64": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-rc.5.tgz", + "integrity": "sha512-0CWY7ubu12nhzz+tkpHjoG3IRSTlWYe0wrfJRf4qqjqQSGtAYgoL9kwzdvlhaFdZ5ffVeyYw9qLsChcjUMEloQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-wasm32-wasi": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-rc.5.tgz", + "integrity": "sha512-LztXnGzv6t2u830mnZrFLRVqT/DPJ9DL4ZTz/y93rqUVkeHjMMYIYaFj+BUthiYxbVH9dH0SZYufETspKY/NhA==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^1.1.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-win32-arm64-msvc": { + "version": "1.0.0-rc.5", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-rc.5.tgz", + "integrity": "sha512-jUct1XVeGtyjqJXEAfvdFa8xoigYZ2rge7nYEm70ppQxpfH9ze2fbIrpHmP2tNM2vL/F6Dd0CpXhpjPbC6bSxQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/binding-win32-x64-msvc": { + "version": "1.0.0-rc.5", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/unrun/node_modules/@rolldown/pluginutils": { + "version": "1.0.0-rc.5", + "dev": true, + "license": "MIT" + }, + "node_modules/unrun/node_modules/rolldown": { + "version": "1.0.0-rc.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@oxc-project/types": "=0.114.0", + "@rolldown/pluginutils": "1.0.0-rc.5" + }, + "bin": { + "rolldown": "bin/cli.mjs" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "optionalDependencies": { + "@rolldown/binding-android-arm64": "1.0.0-rc.5", + "@rolldown/binding-darwin-arm64": "1.0.0-rc.5", + "@rolldown/binding-darwin-x64": "1.0.0-rc.5", + "@rolldown/binding-freebsd-x64": "1.0.0-rc.5", + "@rolldown/binding-linux-arm-gnueabihf": "1.0.0-rc.5", + "@rolldown/binding-linux-arm64-gnu": "1.0.0-rc.5", + "@rolldown/binding-linux-arm64-musl": "1.0.0-rc.5", + "@rolldown/binding-linux-x64-gnu": "1.0.0-rc.5", + "@rolldown/binding-linux-x64-musl": "1.0.0-rc.5", + "@rolldown/binding-openharmony-arm64": "1.0.0-rc.5", + "@rolldown/binding-wasm32-wasi": "1.0.0-rc.5", + "@rolldown/binding-win32-arm64-msvc": "1.0.0-rc.5", + "@rolldown/binding-win32-x64-msvc": "1.0.0-rc.5" + } + } + } +} diff --git a/package.json b/package.json index fd13921..61238bc 100644 --- a/package.json +++ b/package.json @@ -1,56 +1,48 @@ { - "name": "gettext-parser", + "name": "gettext-parser-next", "description": "Parse and compile gettext po and mo files to/from json, nothing more, nothing less", - "version": "8.0.0", - "author": "Andris Reinman", - "contributors": [ - { - "name": "Sam Hauglustaine" - } - ], + "version": "1.0.0", + "author": "", "homepage": "http://github.com/smhg/gettext-parser", "repository": { "type": "git", - "url": "http://github.com/smhg/gettext-parser.git" + "url": "git+http://github.com/smhg/gettext-parser.git" }, "type": "module", + "main": "./lib/index.cjs", + "module": "./lib/index.mjs", + "types": "./lib/index.d.cts", + "exports": { + ".": { + "import": { + "types": "./lib/index.d.mts", + "default": "./lib/index.mjs" + }, + "require": { + "types": "./lib/index.d.cts", + "default": "./lib/index.cjs" + } + } + }, "engines": { "node": ">=18" }, "scripts": { - "lint": "eslint src/*.js test/*.js", - "test-generate-mo": "msgfmt test/fixtures/latin13.po -o test/fixtures/latin13.mo & msgfmt test/fixtures/utf8.po -o test/fixtures/utf8.mo & msgfmt test/fixtures/obsolete.po -o test/fixtures/obsolete.mo", - "test": "mocha", - "test:coverage": "npx c8 --check-coverage npm run test", - "preversion": "npm run lint && npm test", + "start": "tsc --watch", + "build": "tsdown", + "lint": "npx @biomejs/biome check --write ./src", + "test": "node --test", + "test:coverage": "node --test --experimental-test-coverage", + "preversion": "npx publint && npm run lint && npm test", "postversion": "git push && git push --tags", "prepublishOnly": "npm i && tsc && npm run lint && npm run test" }, - "main": "./lib/index.js", - "types": "./lib/index.d.ts", "license": "MIT", - "dependencies": { - "content-type": "^1.0.5", - "encoding": "^0.1.13", - "readable-stream": "^4.5.2" - }, "devDependencies": { - "@eslint/js": "^9.3.0", - "@types/chai": "latest", - "@types/content-type": "^1.1.8", - "@types/eslint__js": "^8.42.3", - "@types/mocha": "latest", - "@types/readable-stream": "^4.0.11", - "@typescript-eslint/eslint-plugin": "^7.11.0", - "@typescript-eslint/parser": "^7.11.0", - "chai": "^5.0.3", - "eslint": "^8.57.0", - "eslint-config-standard": "^17.1.0", - "eslint-plugin-import": "^2.29.1", - "eslint-plugin-n": "^16.6.2", - "eslint-plugin-promise": "^6.1.1", - "mocha": "^10.4.0", - "typescript": "^5.4.5" + "@biomejs/biome": "2.4.4", + "@types/node": "^25.3.1", + "tsdown": "^0.20.3", + "typescript": "^5.9.3" }, "keywords": [ "i18n", diff --git a/tsdown.config.ts b/tsdown.config.ts new file mode 100644 index 0000000..1b91825 --- /dev/null +++ b/tsdown.config.ts @@ -0,0 +1,26 @@ +import { defineConfig } from "tsdown"; + +export default defineConfig([ + { + entry: 'src/index.ts', + outDir: 'lib', + target: "node18", + format: 'esm', + clean: true, + unbundle: true, + dts: true, + minify: true, + shims: true, + }, + { + entry: 'src/index.ts', + outDir: 'lib', + target: "node18", + format: 'cjs', + clean: false, // Do not clean lib again + unbundle: false, // Bundle CJS into a single file + dts: true, + minify: true, + shims: true, + } +]) From e2fa3bcd9167e82410168222bba0ec46a334a2ba Mon Sep 17 00:00:00 2001 From: Erik Golinelli Date: Thu, 26 Feb 2026 12:50:26 +0100 Subject: [PATCH 5/7] chore(ci): add build step to GitHub Actions workflow --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d3628b5..079dcd5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,4 +30,5 @@ jobs: with: node-version: ${{ matrix.node-version }} - run: npm install + - run: npm build - run: npm run test:coverage From 41d12bb560d059d5a68d10dc244e7217fe80b7cc Mon Sep 17 00:00:00 2001 From: Erik Golinelli Date: Thu, 26 Feb 2026 12:51:28 +0100 Subject: [PATCH 6/7] chore(ci): add build step to GitHub Actions workflow --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 079dcd5..069a28d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,5 +30,5 @@ jobs: with: node-version: ${{ matrix.node-version }} - run: npm install - - run: npm build + - run: npm run build - run: npm run test:coverage From b7ef2b9815072fc3aff4f0e3e119f525788d3ba4 Mon Sep 17 00:00:00 2001 From: Erik Golinelli Date: Sat, 28 Feb 2026 10:03:22 +0100 Subject: [PATCH 7/7] test: add comparison tests against original gettext-parser Introduce tests to ensure identical behavior between the forked gettext-parser and the original library, covering parsing and compilation for PO and MO files. Include a test-specific package with dependencies. --- .github/workflows/ci.yml | 1 + package.json | 11 ++++++- test/compare-test.js | 70 ++++++++++++++++++++++++++++++++++++++++ test/package-lock.json | 62 +++++++++++++++++++++++++++++++++++ test/package.json | 8 +++++ 5 files changed, 151 insertions(+), 1 deletion(-) create mode 100644 test/compare-test.js create mode 100644 test/package-lock.json create mode 100644 test/package.json diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 069a28d..bce41e8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,5 +30,6 @@ jobs: with: node-version: ${{ matrix.node-version }} - run: npm install + - run: npm run test:install - run: npm run build - run: npm run test:coverage diff --git a/package.json b/package.json index 61238bc..587a335 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "gettext-parser-next", "description": "Parse and compile gettext po and mo files to/from json, nothing more, nothing less", "version": "1.0.0", - "author": "", + "author": ["Erik ", "john "], "homepage": "http://github.com/smhg/gettext-parser", "repository": { "type": "git", @@ -24,6 +24,14 @@ } } }, + "files": [ + "lib/", + "tests/*.ts", + "*.json", + "*.md", + ".gitignore", + "LICENSE" + ], "engines": { "node": ">=18" }, @@ -32,6 +40,7 @@ "build": "tsdown", "lint": "npx @biomejs/biome check --write ./src", "test": "node --test", + "test:install": "cd test && npm install", "test:coverage": "node --test --experimental-test-coverage", "preversion": "npx publint && npm run lint && npm test", "postversion": "git push && git push --tags", diff --git a/test/compare-test.js b/test/compare-test.js new file mode 100644 index 0000000..c1c235d --- /dev/null +++ b/test/compare-test.js @@ -0,0 +1,70 @@ +import { promisify } from 'node:util'; +import path from 'node:path'; +import { readFile as fsReadFile } from 'node:fs'; +import { fileURLToPath } from 'node:url'; +import { describe, test } from 'node:test'; +import assert from 'node:assert'; + +import originalGettextParser from 'gettext-parser'; +import * as forkedGettextParser from '../lib/index.mjs'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const readFile = promisify(fsReadFile); + +describe('Compare with original gettext-parser', () => { + describe('PO Parser', () => { + test('should parse utf8.po identically', async () => { + const poData = await readFile(path.join(__dirname, 'fixtures/utf8.po')); + const originalParsed = originalGettextParser.po.parse(poData); + const forkedParsed = forkedGettextParser.po.parse(poData); + // We shouldn't assert on strict equality of Date objects or undefined vs non-existent keys, + // but deepStrictEqual will cover basic deep comparison. + assert.deepStrictEqual(forkedParsed, originalParsed); + }); + + test('should parse latin13.po identically', async () => { + const poData = await readFile(path.join(__dirname, 'fixtures/latin13.po')); + const originalParsed = originalGettextParser.po.parse(poData); + const forkedParsed = forkedGettextParser.po.parse(poData); + assert.deepStrictEqual(forkedParsed, originalParsed); + }); + }); + + describe('MO Parser', () => { + test('should parse utf8.mo identically', async () => { + const moData = await readFile(path.join(__dirname, 'fixtures/utf8.mo')); + const originalParsed = originalGettextParser.mo.parse(moData); + const forkedParsed = forkedGettextParser.mo.parse(moData); + assert.deepStrictEqual(forkedParsed, originalParsed); + }); + + test('should parse latin13.mo identically', async () => { + const moData = await readFile(path.join(__dirname, 'fixtures/latin13.mo')); + const originalParsed = originalGettextParser.mo.parse(moData); + const forkedParsed = forkedGettextParser.mo.parse(moData); + assert.deepStrictEqual(forkedParsed, originalParsed); + }); + }); + + describe('PO Compiler', () => { + test('should compile utf8-po.json identically', async () => { + const json = await readFile(path.join(__dirname, 'fixtures/utf8-po.json'), 'utf8'); + const parsed = JSON.parse(json); + const originalCompiled = originalGettextParser.po.compile(parsed); + const forkedCompiled = forkedGettextParser.po.compile(parsed); + assert.deepStrictEqual(forkedCompiled, originalCompiled); + }); + }); + + describe('MO Compiler', () => { + test('should compile utf8-mo.json identically', async () => { + const json = await readFile(path.join(__dirname, 'fixtures/utf8-mo.json'), 'utf8'); + const parsed = JSON.parse(json); + const originalCompiled = originalGettextParser.mo.compile(parsed); + const forkedCompiled = forkedGettextParser.mo.compile(parsed); + assert.deepStrictEqual(forkedCompiled, originalCompiled); + }); + }); +}); diff --git a/test/package-lock.json b/test/package-lock.json new file mode 100644 index 0000000..fde969c --- /dev/null +++ b/test/package-lock.json @@ -0,0 +1,62 @@ +{ + "name": "gettext-parser-tests", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "gettext-parser-tests", + "dependencies": { + "gettext-parser": "^9.0.1" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "license": "MIT", + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/gettext-parser": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/gettext-parser/-/gettext-parser-9.0.1.tgz", + "integrity": "sha512-q6F1KukCoQO9WivuO3O86UK+SyhYi8V1lSn+Ql4IfhHmbCX98drWTt2bGRn2RqEORsF8NgnpbcUaAr/ATNs57A==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "encoding": "^0.1.13" + }, + "engines": { + "node": ">=20" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + } + } +} diff --git a/test/package.json b/test/package.json new file mode 100644 index 0000000..3b4f22f --- /dev/null +++ b/test/package.json @@ -0,0 +1,8 @@ +{ + "name": "gettext-parser-tests", + "private": true, + "type": "module", + "dependencies": { + "gettext-parser": "^9.0.1" + } +} \ No newline at end of file