diff --git a/packages/js-sdk/package.json b/packages/js-sdk/package.json index 4db9c19562..6f3f5ed240 100644 --- a/packages/js-sdk/package.json +++ b/packages/js-sdk/package.json @@ -63,6 +63,7 @@ "openapi-typescript": "^7.9.1", "playwright": "^1.55.1", "react": "^18.3.1", + "tar": "^7.5.2", "tsup": "^8.4.0", "typedoc": "0.26.8", "typedoc-plugin-markdown": "4.2.7", @@ -97,9 +98,9 @@ "compare-versions": "^6.1.0", "dockerfile-ast": "^0.7.1", "glob": "^11.1.0", + "modern-tar": "^0.7.3", "openapi-fetch": "^0.14.1", - "platform": "^1.3.6", - "tar": "^7.5.2" + "platform": "^1.3.6" }, "engines": { "node": ">=20" diff --git a/packages/js-sdk/src/template/buildApi.ts b/packages/js-sdk/src/template/buildApi.ts index 505817db1e..58e548f272 100644 --- a/packages/js-sdk/src/template/buildApi.ts +++ b/packages/js-sdk/src/template/buildApi.ts @@ -88,7 +88,7 @@ export async function getFileUploadLink( export async function uploadFile( options: { - fileName: string + filePath: string fileContextPath: string url: string ignorePatterns: string[] @@ -96,11 +96,11 @@ export async function uploadFile( }, stackTrace: string | undefined ) { - const { fileName, url, fileContextPath, ignorePatterns, resolveSymlinks } = + const { filePath, url, fileContextPath, ignorePatterns, resolveSymlinks } = options try { const { contentLength, uploadStream } = await tarFileStreamUpload( - fileName, + filePath, fileContextPath, ignorePatterns, resolveSymlinks diff --git a/packages/js-sdk/src/template/index.ts b/packages/js-sdk/src/template/index.ts index 177ffc1310..a8510bdbd3 100644 --- a/packages/js-sdk/src/template/index.ts +++ b/packages/js-sdk/src/template/index.ts @@ -39,6 +39,7 @@ import { padOctal, readDockerignore, readGCPServiceAccountJSON, + relativizePath, } from './utils' /** @@ -358,7 +359,7 @@ export class TemplateBase for (const src of srcs) { const args = [ - src.toString(), + relativizePath(src, this.fileContextPath), dest.toString(), options?.user ?? '', options?.mode ? padOctal(options.mode) : '', @@ -369,6 +370,7 @@ export class TemplateBase args, force: options?.forceUpload || this.forceNextLayer, forceUpload: options?.forceUpload, + filePath: src, resolveSymlinks: options?.resolveSymlinks, }) } @@ -895,9 +897,11 @@ export class TemplateBase return } - const src = instruction.args.length > 0 ? instruction.args[0] : null + const fileName = + instruction.args.length > 0 ? instruction.args[0] : null + const filePath = instruction.filePath ?? null const filesHash = instruction.filesHash ?? null - if (src === null || filesHash === null) { + if (!fileName || !filePath || !filesHash) { throw new Error('Source path and files hash are required') } @@ -922,7 +926,7 @@ export class TemplateBase ) { await uploadFile( { - fileName: src, + filePath: filePath.toString(), fileContextPath: this.fileContextPath.toString(), url, ignorePatterns: [ @@ -934,14 +938,14 @@ export class TemplateBase stackTrace ) options.onBuildLogs?.( - new LogEntry(new Date(), 'info', `Uploaded '${src}'`) + new LogEntry(new Date(), 'info', `Uploaded '${filePath}'`) ) } else { options.onBuildLogs?.( new LogEntry( new Date(), 'info', - `Skipping upload of '${src}', already cached` + `Skipping upload of '${filePath}', already cached` ) ) } @@ -984,9 +988,8 @@ export class TemplateBase return instruction } - const src = instruction.args.length > 0 ? instruction.args[0] : null const dest = instruction.args.length > 1 ? instruction.args[1] : null - if (src === null || dest === null) { + if (!instruction.filePath || !dest) { throw new Error('Source path and destination path are required') } @@ -998,7 +1001,7 @@ export class TemplateBase return { ...instruction, filesHash: await calculateFilesHash( - src, + instruction.filePath.toString(), dest, this.fileContextPath.toString(), [ diff --git a/packages/js-sdk/src/template/types.ts b/packages/js-sdk/src/template/types.ts index eb10ca435e..bdec52dc7b 100644 --- a/packages/js-sdk/src/template/types.ts +++ b/packages/js-sdk/src/template/types.ts @@ -101,6 +101,7 @@ export type Instruction = { forceUpload?: true filesHash?: string resolveSymlinks?: boolean + filePath?: PathLike } /** diff --git a/packages/js-sdk/src/template/utils.ts b/packages/js-sdk/src/template/utils.ts index d1224b9782..edc1f6507d 100644 --- a/packages/js-sdk/src/template/utils.ts +++ b/packages/js-sdk/src/template/utils.ts @@ -1,5 +1,5 @@ import crypto from 'node:crypto' -import fs from 'node:fs' +import fs, { PathLike } from 'node:fs' import path from 'node:path' import { dynamicImport, dynamicRequire } from '../utils' import { BASE_STEP_NAME, FINALIZE_STEP_NAME } from './consts' @@ -54,7 +54,9 @@ export async function getAllFilesInPath( ignore: ignorePatterns, withFileTypes: true, // this is required so that the ignore pattern is relative to the file path - cwd: contextPath, + // if src is absolute, we don't need to set the cwd; otherwise use contextPath + // to anchor relative patterns to the template's context directory + cwd: path.isAbsolute(src) ? undefined : contextPath, }) for (const file of globFiles) { @@ -72,6 +74,7 @@ export async function getAllFilesInPath( const dirFiles = await glob(dirPattern, { ignore: ignorePatterns, withFileTypes: true, + // dirPattern is always relative (constructed from file.relative()), so use contextPath cwd: contextPath, }) dirFiles.forEach((f) => files.set(f.fullpath(), f)) @@ -106,7 +109,7 @@ export async function calculateFilesHash( resolveSymlinks: boolean, stackTrace: string | undefined ): Promise { - const srcPath = path.join(contextPath, src) + const srcPath = path.isAbsolute(src) ? src : path.join(contextPath, src) const hash = crypto.createHash('sha256') const content = `COPY ${src} ${dest}` @@ -255,38 +258,66 @@ export function padOctal(mode: number): string { /** * Create a compressed tar stream of files matching a pattern. * - * @param fileName Glob pattern for files to include + * @param filePath Glob pattern for files to include + * @param fileName Name of the file in the tar archive * @param fileContextPath Base directory for resolving file paths * @param ignorePatterns Ignore patterns to exclude from the archive * @param resolveSymlinks Whether to follow symbolic links * @returns A readable stream of the gzipped tar archive */ export async function tarFileStream( - fileName: string, + filePath: string, fileContextPath: string, ignorePatterns: string[], resolveSymlinks: boolean ) { - const { create } = await dynamicImport('tar') + const { packTar } = + await dynamicImport('modern-tar/fs') + const { createGzip } = + await dynamicImport('node:zlib') const allFiles = await getAllFilesInPath( - fileName, + filePath, fileContextPath, ignorePatterns, true ) - const filePaths = allFiles.map((file) => file.relativePosix()) + const sources: Array<{ + type: 'file' | 'directory' + source: string + target: string + }> = [] - return create( - { - gzip: true, - cwd: fileContextPath, - follow: resolveSymlinks, - noDirRecurse: true, - }, - filePaths - ) + for (const file of allFiles) { + const sourcePath = file.fullpathPosix() + const targetPath = relativizePath(sourcePath, fileContextPath) + + if (file.isDirectory()) { + sources.push({ + type: 'directory', + source: sourcePath, + target: targetPath, + }) + } else { + sources.push({ + type: 'file', + source: sourcePath, + target: targetPath, + }) + } + } + + // Create tar stream with gzip compression + const tarStream = packTar(sources, { + dereference: resolveSymlinks, // Follow symlinks when resolveSymlinks is true + }) + + // Pipe through gzip compression + const gzipStream = createGzip() + tarStream.pipe(gzipStream) + + return gzipStream } /** @@ -298,14 +329,14 @@ export async function tarFileStream( * @returns Object containing the content length and upload stream */ export async function tarFileStreamUpload( - fileName: string, + filePath: string, fileContextPath: string, ignorePatterns: string[], resolveSymlinks: boolean ) { // First pass: calculate the compressed size const sizeCalculationStream = await tarFileStream( - fileName, + filePath, fileContextPath, ignorePatterns, resolveSymlinks @@ -318,7 +349,7 @@ export async function tarFileStreamUpload( return { contentLength, uploadStream: await tarFileStream( - fileName, + filePath, fileContextPath, ignorePatterns, resolveSymlinks @@ -369,3 +400,32 @@ export function readGCPServiceAccountJSON( } return JSON.stringify(pathOrContent) } + +/** + * Convert absolute paths to relativized paths. + * In addition to converting absolute paths to relative paths, + * it strips up directories (../ or ..\ on Windows). + * + * @param src Absolute path to convert + * @param fileContextPath Base directory for resolving relative paths + * @returns Relative path with forward slashes (for tar/cross-platform compatibility) + */ +export function relativizePath( + src: PathLike, + fileContextPath: PathLike +): string { + let rewrittenPath = src.toString() + + // Convert absolute paths to relative paths + if (path.isAbsolute(rewrittenPath)) { + const contextPath = path.resolve(fileContextPath.toString()) + const relativePath = path.relative(contextPath, rewrittenPath) + rewrittenPath = relativePath + } + + // Strip up directories (../ or ..\ on Windows) + rewrittenPath = rewrittenPath.replace(/\.\.(\/|\\)/g, '') + + // Normalize to forward slashes for cross-platform compatibility (tar archives require forward slashes) + return normalizePath(rewrittenPath) +} diff --git a/packages/js-sdk/tests/template/build.test.ts b/packages/js-sdk/tests/template/build.test.ts index 241e8f172a..6ebfcf00ad 100644 --- a/packages/js-sdk/tests/template/build.test.ts +++ b/packages/js-sdk/tests/template/build.test.ts @@ -72,3 +72,20 @@ buildTemplateTest( await buildTemplate(template) } ) + +buildTemplateTest( + 'build template with absolute paths', + async ({ buildTemplate }) => { + const packageTxt = path.resolve(process.cwd(), folderPath, 'test.txt') + + const template = Template() + // using base image to avoid re-building ubuntu:22.04 image + .fromBaseImage() + .skipCache() + .copy(packageTxt, 'text.txt', { forceUpload: true }) + .copy('../../../../package.json', 'package.json', { forceUpload: true }) + .runCmd(['ls -l .', 'cat text.txt', 'cat package.json']) + + await buildTemplate(template, {}, defaultBuildLogger()) + } +) diff --git a/packages/python-sdk/e2b/template/main.py b/packages/python-sdk/e2b/template/main.py index 9d20c5fc3c..d3ee224927 100644 --- a/packages/python-sdk/e2b/template/main.py +++ b/packages/python-sdk/e2b/template/main.py @@ -21,6 +21,7 @@ read_dockerignore, read_gcp_service_account_json, get_caller_frame, + relativize_path, ) from types import TracebackType @@ -66,7 +67,7 @@ def copy( for src_item in srcs: args = [ - str(src_item), + relativize_path(str(src_item), self._template._file_context_path), str(dest), user or "", pad_octal(mode) if mode else "", @@ -78,6 +79,7 @@ def copy( "force": force_upload or self._template._force_next_layer, "forceUpload": force_upload, "resolveSymlinks": resolve_symlinks, + "filePath": src_item, } self._template._instructions.append(instruction) @@ -1283,14 +1285,15 @@ def _instructions_with_hashes( stack_trace = self._stack_traces[index + 1] args = instruction.get("args", []) - src = args[0] if len(args) > 0 else None + file_path = instruction.get("filePath") dest = args[1] if len(args) > 1 else None - if src is None or dest is None: + if file_path is None or dest is None: raise ValueError("Source path and destination path are required") resolve_symlinks = instruction.get("resolveSymlinks") + step["filePath"] = file_path step["filesHash"] = calculate_files_hash( - src, + str(file_path), dest, self._file_context_path, [ diff --git a/packages/python-sdk/e2b/template/types.py b/packages/python-sdk/e2b/template/types.py index a1e8406940..59e15e77bc 100644 --- a/packages/python-sdk/e2b/template/types.py +++ b/packages/python-sdk/e2b/template/types.py @@ -42,6 +42,7 @@ class Instruction(TypedDict): forceUpload: NotRequired[Optional[Literal[True]]] filesHash: NotRequired[Optional[str]] resolveSymlinks: NotRequired[Optional[bool]] + filePath: NotRequired[Optional[Union[str, Path]]] class GenericDockerRegistry(TypedDict): diff --git a/packages/python-sdk/e2b/template/utils.py b/packages/python-sdk/e2b/template/utils.py index 413c58a4f7..b12a7933a9 100644 --- a/packages/python-sdk/e2b/template/utils.py +++ b/packages/python-sdk/e2b/template/utils.py @@ -67,7 +67,7 @@ def get_all_files_in_path( files_glob = glob.glob( src, flags=glob.GLOBSTAR, - root_dir=abs_context_path, + root_dir=abs_context_path if not os.path.isabs(src) else None, exclude=ignore_patterns, ) @@ -82,7 +82,7 @@ def get_all_files_in_path( dir_files = glob.glob( normalize_path(file) + "/**/*", flags=glob.GLOBSTAR, - root_dir=abs_context_path, + root_dir=abs_context_path if not os.path.isabs(src) else None, exclude=ignore_patterns, ) for dir_file in dir_files: @@ -119,7 +119,7 @@ def calculate_files_hash( :raises ValueError: If no files match the source pattern """ - src_path = os.path.join(context_path, src) + src_path = src if os.path.isabs(src) else os.path.join(context_path, src) hash_obj = hashlib.sha256() content = f"COPY {src} {dest}" @@ -166,15 +166,15 @@ def hash_stats(stat_info: os.stat_result) -> None: def tar_file_stream( - file_name: str, + file_path: str, file_context_path: str, ignore_patterns: List[str], resolve_symlinks: bool, ) -> io.BytesIO: """ - Create a tar stream of files matching a pattern. + Create a tar stream of a file from the given path with the given name. - :param file_name: Glob pattern for files to include + :param file_path: Path to the file (relative to file_context_path) :param file_context_path: Base directory for resolving file paths :param ignore_patterns: Ignore patterns :param resolve_symlinks: Whether to resolve symbolic links @@ -188,11 +188,11 @@ def tar_file_stream( dereference=resolve_symlinks, ) as tar: files = get_all_files_in_path( - file_name, file_context_path, ignore_patterns, True + file_path, file_context_path, ignore_patterns, True ) for file in files: tar.add( - file, arcname=os.path.relpath(file, file_context_path), recursive=False + file, arcname=relativize_path(file, file_context_path), recursive=False ) return tar_buffer @@ -318,3 +318,29 @@ def read_gcp_service_account_json( return f.read() else: return json.dumps(path_or_content) + + +def relativize_path(src: str, file_context_path: str) -> str: + r""" + Convert absolute paths to relativized paths. + In addition to converting absolute paths to relative paths, + it strips up directories (../ or ..\ on Windows). + + :param src: Absolute path to convert + :param file_context_path: Base directory for resolving relative paths + + :return: Relative path + """ + rewritten_path = str(src) + + # Convert absolute paths to relative paths + if os.path.isabs(rewritten_path): + context_path = os.path.abspath(file_context_path) + relative_path = os.path.relpath(rewritten_path, context_path) + rewritten_path = relative_path + + # Strip up directories (../ or ..\ on Windows) + rewritten_path = re.sub(r"\.\.[/\\]", "", rewritten_path) + + # Normalize to forward slashes for cross-platform compatibility (tar archives require forward slashes) + return normalize_path(rewritten_path) diff --git a/packages/python-sdk/e2b/template_async/build_api.py b/packages/python-sdk/e2b/template_async/build_api.py index 13ff984d83..8c2da40e00 100644 --- a/packages/python-sdk/e2b/template_async/build_api.py +++ b/packages/python-sdk/e2b/template_async/build_api.py @@ -79,7 +79,7 @@ async def get_file_upload_link( async def upload_file( api_client: AuthenticatedClient, - file_name: str, + file_path: str, context_path: str, url: str, ignore_patterns: List[str], @@ -88,7 +88,7 @@ async def upload_file( ): try: tar_buffer = tar_file_stream( - file_name, context_path, ignore_patterns, resolve_symlinks + file_path, context_path, ignore_patterns, resolve_symlinks ) client = api_client.get_async_httpx_client() diff --git a/packages/python-sdk/e2b/template_async/main.py b/packages/python-sdk/e2b/template_async/main.py index 52ecdcce26..7b48e24571 100644 --- a/packages/python-sdk/e2b/template_async/main.py +++ b/packages/python-sdk/e2b/template_async/main.py @@ -86,13 +86,12 @@ async def _build( if file_upload["type"] != InstructionType.COPY: continue - args = file_upload.get("args", []) - src = args[0] if len(args) > 0 else None + file_path = file_upload.get("filePath") force_upload = file_upload.get("forceUpload") files_hash = file_upload.get("filesHash", None) resolve_symlinks = file_upload.get("resolveSymlinks", RESOLVE_SYMLINKS) - if src is None or files_hash is None: + if file_path is None or files_hash is None: raise ValueError("Source path and files hash are required") stack_trace = None @@ -108,7 +107,7 @@ async def _build( ): await upload_file( api_client, - src, + str(file_path), template._template._file_context_path, file_info.url, [ @@ -123,7 +122,7 @@ async def _build( LogEntry( timestamp=datetime.now(), level="info", - message=f"Uploaded '{src}'", + message=f"Uploaded '{file_path}'", ) ) else: @@ -132,7 +131,7 @@ async def _build( LogEntry( timestamp=datetime.now(), level="info", - message=f"Skipping upload of '{src}', already cached", + message=f"Skipping upload of '{file_path}', already cached", ) ) diff --git a/packages/python-sdk/e2b/template_sync/build_api.py b/packages/python-sdk/e2b/template_sync/build_api.py index 392737cd4a..8ec3ae1027 100644 --- a/packages/python-sdk/e2b/template_sync/build_api.py +++ b/packages/python-sdk/e2b/template_sync/build_api.py @@ -79,7 +79,7 @@ def get_file_upload_link( def upload_file( api_client: AuthenticatedClient, - file_name: str, + file_path: str, context_path: str, url: str, ignore_patterns: List[str], @@ -88,7 +88,7 @@ def upload_file( ): try: tar_buffer = tar_file_stream( - file_name, context_path, ignore_patterns, resolve_symlinks + file_path, context_path, ignore_patterns, resolve_symlinks ) client = api_client.get_httpx_client() response = client.put(url, content=tar_buffer.getvalue()) diff --git a/packages/python-sdk/e2b/template_sync/main.py b/packages/python-sdk/e2b/template_sync/main.py index fe41afdfd6..2f154db007 100644 --- a/packages/python-sdk/e2b/template_sync/main.py +++ b/packages/python-sdk/e2b/template_sync/main.py @@ -86,13 +86,12 @@ def _build( if file_upload["type"] != InstructionType.COPY: continue - args = file_upload.get("args", []) - src = args[0] if len(args) > 0 else None + file_path = file_upload.get("filePath") force_upload = file_upload.get("forceUpload") files_hash = file_upload.get("filesHash", None) resolve_symlinks = file_upload.get("resolveSymlinks", RESOLVE_SYMLINKS) - if src is None or files_hash is None: + if file_path is None or files_hash is None: raise ValueError("Source path and files hash are required") stack_trace = None @@ -108,7 +107,7 @@ def _build( ): upload_file( api_client, - src, + str(file_path), template._template._file_context_path, file_info.url, [ @@ -123,7 +122,7 @@ def _build( LogEntry( timestamp=datetime.now(), level="info", - message=f"Uploaded '{src}'", + message=f"Uploaded '{file_path}'", ) ) else: @@ -132,7 +131,7 @@ def _build( LogEntry( timestamp=datetime.now(), level="info", - message=f"Skipping upload of '{src}', already cached", + message=f"Skipping upload of '{file_path}', already cached", ) ) diff --git a/packages/python-sdk/tests/async/template_async/test_build.py b/packages/python-sdk/tests/async/template_async/test_build.py index f16da0bd4b..6733586319 100644 --- a/packages/python-sdk/tests/async/template_async/test_build.py +++ b/packages/python-sdk/tests/async/template_async/test_build.py @@ -92,11 +92,23 @@ async def test_build_template_with_resolve_symlinks(async_build, setup_test_fold @pytest.mark.skip_debug() -async def test_build_template_with_skip_cache(async_build, setup_test_folder): +async def test_build_template_with_absolute_paths(async_build, setup_test_folder): + folder_path = os.path.join(setup_test_folder, "folder") + + # Absolute path to test.txt in the folder + package_txt = os.path.abspath(os.path.join(folder_path, "test.txt")) + + # Absolute path to package.json in the repo root + root_json = os.path.abspath(os.path.join(os.getcwd(), "..", "..", "package.json")) + template = ( AsyncTemplate(file_context_path=setup_test_folder) + # using base image to avoid re-building ubuntu:22.04 image + .from_base_image() .skip_cache() - .from_image("ubuntu:22.04") + .copy(package_txt, "text.txt", force_upload=True) + .copy(root_json, "package.json", force_upload=True) + .run_cmd(["ls -l .", "cat text.txt", "cat package.json"]) ) - await async_build(template) + await async_build(template, on_build_logs=default_build_logger()) diff --git a/packages/python-sdk/tests/sync/template_sync/test_build.py b/packages/python-sdk/tests/sync/template_sync/test_build.py index 423e809c4c..60566a08db 100644 --- a/packages/python-sdk/tests/sync/template_sync/test_build.py +++ b/packages/python-sdk/tests/sync/template_sync/test_build.py @@ -44,7 +44,6 @@ def setup_test_folder(): def test_build_template(build, setup_test_folder): template = ( Template(file_context_path=setup_test_folder) - # using base image to avoid re-building ubuntu:22.04 image .from_base_image() .copy("folder/*", "folder", force_upload=True) .run_cmd("cat folder/test.txt") @@ -90,3 +89,26 @@ def test_build_template_with_resolve_symlinks(build, setup_test_folder): ) build(template) + + +@pytest.mark.skip_debug() +def test_build_template_with_absolute_paths(build, setup_test_folder): + folder_path = os.path.join(setup_test_folder, "folder") + + # Absolute path to test.txt in the folder + package_txt = os.path.abspath(os.path.join(folder_path, "test.txt")) + + # Absolute path to package.json in the repo root + root_json = os.path.abspath(os.path.join(os.getcwd(), "..", "..", "package.json")) + + template = ( + Template(file_context_path=setup_test_folder) + # using base image to avoid re-building ubuntu:22.04 image + .from_base_image() + .skip_cache() + .copy(package_txt, "text.txt", force_upload=True) + .copy(root_json, "package.json", force_upload=True) + .run_cmd(["ls -l .", "cat text.txt", "cat package.json"]) + ) + + build(template, on_build_logs=default_build_logger()) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index d975530089..09f6c3962a 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -368,15 +368,15 @@ importers: glob: specifier: ^11.1.0 version: 11.1.0 + modern-tar: + specifier: ^0.7.3 + version: 0.7.3 openapi-fetch: specifier: ^0.14.1 version: 0.14.1 platform: specifier: ^1.3.6 version: 1.3.6 - tar: - specifier: ^7.5.2 - version: 7.5.2 devDependencies: '@testing-library/react': specifier: ^16.2.0 @@ -432,6 +432,9 @@ importers: react: specifier: ^18.3.1 version: 18.3.1 + tar: + specifier: ^7.5.2 + version: 7.5.2 tsup: specifier: ^8.4.0 version: 8.4.0(jiti@2.4.2)(postcss@8.5.6)(typescript@5.5.3)(yaml@2.5.1) @@ -5709,6 +5712,10 @@ packages: engines: {node: '>=10'} hasBin: true + modern-tar@0.7.3: + resolution: {integrity: sha512-4W79zekKGyYU4JXVmB78DOscMFaJth2gGhgfTl2alWE4rNe3nf4N2pqenQ0rEtIewrnD79M687Ouba3YGTLOvg==} + engines: {node: '>=18.0.0'} + module-details-from-path@1.0.3: resolution: {integrity: sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==} @@ -14089,6 +14096,8 @@ snapshots: mkdirp@1.0.4: {} + modern-tar@0.7.3: {} + module-details-from-path@1.0.3: {} mri@1.2.0: {}